Use seek() instead of writing 0s

test_ZipWrite_large_file need to write a 4GB file to zipfile for
testing. Instead of writing sequence of 0s to the test file, use seek().

Test: th
Bug: 298138109
Change-Id: I886bfa0c3f21664e529ba7de9688909518148f61
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index 86fb480..c69a13d 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -15,14 +15,13 @@
 #
 
 import copy
-import json
 import os
 import subprocess
 import tempfile
-import time
 import unittest
 import zipfile
 from hashlib import sha1
+from typing import BinaryIO
 
 import common
 import test_utils
@@ -36,14 +35,24 @@
 GiB = 1024 * MiB
 
 
-def get_2gb_string():
+def get_2gb_file():
   size = int(2 * GiB + 1)
   block_size = 4 * KiB
   step_size = 4 * MiB
-  # Generate a long string with holes, e.g. 'xyz\x00abc\x00...'.
+  tmpfile = tempfile.NamedTemporaryFile()
+  tmpfile.truncate(size)
   for _ in range(0, size, step_size):
-    yield os.urandom(block_size)
-    yield b'\0' * (step_size - block_size)
+    tmpfile.write(os.urandom(block_size))
+    tmpfile.seek(step_size - block_size, os.SEEK_CUR)
+  return tmpfile
+
+
+def hash_file(filename):
+  sha1_hash = sha1()
+  with open(filename, "rb") as fp:
+    for data in iter(lambda: fp.read(4*MiB), b''):
+      sha1_hash.update(data)
+  return sha1_hash
 
 
 class BuildInfoTest(test_utils.ReleaseToolsTestCase):
@@ -222,17 +231,17 @@
     info_dict = copy.deepcopy(self.TEST_INFO_FINGERPRINT_DICT)
     build_info = common.BuildInfo(info_dict)
     self.assertEqual(
-      'product-brand/product-name/product-device:version-release/build-id/'
-      'version-incremental:build-type/build-tags', build_info.fingerprint)
+        'product-brand/product-name/product-device:version-release/build-id/'
+        'version-incremental:build-type/build-tags', build_info.fingerprint)
 
     build_props = info_dict['build.prop'].build_props
     del build_props['ro.build.id']
     build_props['ro.build.legacy.id'] = 'legacy-build-id'
     build_info = common.BuildInfo(info_dict, use_legacy_id=True)
     self.assertEqual(
-      'product-brand/product-name/product-device:version-release/'
-      'legacy-build-id/version-incremental:build-type/build-tags',
-      build_info.fingerprint)
+        'product-brand/product-name/product-device:version-release/'
+        'legacy-build-id/version-incremental:build-type/build-tags',
+        build_info.fingerprint)
 
     self.assertRaises(common.ExternalError, common.BuildInfo, info_dict, None,
                       False)
@@ -241,9 +250,9 @@
     info_dict['vbmeta_digest'] = 'abcde12345'
     build_info = common.BuildInfo(info_dict, use_legacy_id=False)
     self.assertEqual(
-      'product-brand/product-name/product-device:version-release/'
-      'legacy-build-id.abcde123/version-incremental:build-type/build-tags',
-      build_info.fingerprint)
+        'product-brand/product-name/product-device:version-release/'
+        'legacy-build-id.abcde123/version-incremental:build-type/build-tags',
+        build_info.fingerprint)
 
   def test___getitem__(self):
     target_info = common.BuildInfo(self.TEST_INFO_DICT, None)
@@ -376,7 +385,7 @@
     info_dict['build.prop'].build_props[
         'ro.product.property_source_order'] = 'bad-source'
     with self.assertRaisesRegexp(common.ExternalError,
-        'Invalid ro.product.property_source_order'):
+                                 'Invalid ro.product.property_source_order'):
       info = common.BuildInfo(info_dict, None)
       info.GetBuildProp('ro.product.device')
 
@@ -429,6 +438,13 @@
     self.assertIsNone(zip_file.testzip())
 
   def _test_ZipWrite(self, contents, extra_zipwrite_args=None):
+    with tempfile.NamedTemporaryFile() as test_file:
+      test_file_name = test_file.name
+      for data in contents:
+        test_file.write(bytes(data))
+      return self._test_ZipWriteFile(test_file_name, extra_zipwrite_args)
+
+  def _test_ZipWriteFile(self, test_file_name, extra_zipwrite_args=None):
     extra_zipwrite_args = dict(extra_zipwrite_args or {})
 
     test_file = tempfile.NamedTemporaryFile(delete=False)
@@ -441,17 +457,12 @@
     arcname = extra_zipwrite_args.get("arcname", test_file_name)
     if arcname[0] == "/":
       arcname = arcname[1:]
+    sha1_hash = hash_file(test_file_name)
 
     zip_file.close()
     zip_file = zipfile.ZipFile(zip_file_name, "w", allowZip64=True)
 
     try:
-      sha1_hash = sha1()
-      for data in contents:
-        sha1_hash.update(bytes(data))
-        test_file.write(bytes(data))
-      test_file.close()
-
       expected_mode = extra_zipwrite_args.get("perms", 0o644)
       expected_compress_type = extra_zipwrite_args.get("compress_type",
                                                        zipfile.ZIP_STORED)
@@ -467,7 +478,6 @@
                    test_file_name, expected_stat, expected_mode,
                    expected_compress_type)
     finally:
-      os.remove(test_file_name)
       os.remove(zip_file_name)
 
   def _test_ZipWriteStr(self, zinfo_or_arcname, contents, extra_args=None):
@@ -502,14 +512,13 @@
     finally:
       os.remove(zip_file_name)
 
-  def _test_ZipWriteStr_large_file(self, large, small, extra_args=None):
+  def _test_ZipWriteStr_large_file(self, large_file: BinaryIO, small, extra_args=None):
     extra_args = dict(extra_args or {})
 
     zip_file = tempfile.NamedTemporaryFile(delete=False)
     zip_file_name = zip_file.name
 
-    test_file = tempfile.NamedTemporaryFile(delete=False)
-    test_file_name = test_file.name
+    test_file_name = large_file.name
 
     arcname_large = test_file_name
     arcname_small = "bar"
@@ -522,11 +531,7 @@
     zip_file = zipfile.ZipFile(zip_file_name, "w", allowZip64=True)
 
     try:
-      sha1_hash = sha1()
-      for data in large:
-        sha1_hash.update(data)
-        test_file.write(data)
-      test_file.close()
+      sha1_hash = hash_file(test_file_name)
 
       # Arbitrary timestamp, just to make sure common.ZipWrite() restores
       # the timestamp after writing.
@@ -551,7 +556,6 @@
                    expected_compress_type=expected_compress_type)
     finally:
       os.remove(zip_file_name)
-      os.remove(test_file_name)
 
   def _test_reset_ZIP64_LIMIT(self, func, *args):
     default_limit = (1 << 31) - 1
@@ -577,10 +581,10 @@
     })
 
   def test_ZipWrite_large_file(self):
-    file_contents = get_2gb_string()
-    self._test_ZipWrite(file_contents, {
-        "compress_type": zipfile.ZIP_DEFLATED,
-    })
+    with get_2gb_file() as tmpfile:
+      self._test_ZipWriteFile(tmpfile.name, {
+          "compress_type": zipfile.ZIP_DEFLATED,
+      })
 
   def test_ZipWrite_resets_ZIP64_LIMIT(self):
     self._test_reset_ZIP64_LIMIT(self._test_ZipWrite, "")
@@ -627,11 +631,11 @@
     # zipfile.writestr() doesn't work when the str size is over 2GiB even with
     # the workaround. We will only test the case of writing a string into a
     # large archive.
-    long_string = get_2gb_string()
     short_string = os.urandom(1024)
-    self._test_ZipWriteStr_large_file(long_string, short_string, {
-        "compress_type": zipfile.ZIP_DEFLATED,
-    })
+    with get_2gb_file() as large_file:
+      self._test_ZipWriteStr_large_file(large_file, short_string, {
+          "compress_type": zipfile.ZIP_DEFLATED,
+      })
 
   def test_ZipWriteStr_resets_ZIP64_LIMIT(self):
     self._test_reset_ZIP64_LIMIT(self._test_ZipWriteStr, 'foo', b'')
@@ -821,9 +825,9 @@
   )
 
   APKCERTS_CERTMAP1 = {
-      'RecoveryLocalizer.apk' : 'certs/devkey',
-      'Settings.apk' : 'build/make/target/product/security/platform',
-      'TV.apk' : 'PRESIGNED',
+      'RecoveryLocalizer.apk': 'certs/devkey',
+      'Settings.apk': 'build/make/target/product/security/platform',
+      'TV.apk': 'PRESIGNED',
   }
 
   APKCERTS_TXT2 = (
@@ -838,10 +842,10 @@
   )
 
   APKCERTS_CERTMAP2 = {
-      'Compressed1.apk' : 'certs/compressed1',
-      'Compressed2a.apk' : 'certs/compressed2',
-      'Compressed2b.apk' : 'certs/compressed2',
-      'Compressed3.apk' : 'certs/compressed3',
+      'Compressed1.apk': 'certs/compressed1',
+      'Compressed2a.apk': 'certs/compressed2',
+      'Compressed2b.apk': 'certs/compressed2',
+      'Compressed3.apk': 'certs/compressed3',
   }
 
   APKCERTS_TXT3 = (
@@ -850,7 +854,7 @@
   )
 
   APKCERTS_CERTMAP3 = {
-      'Compressed4.apk' : 'certs/compressed4',
+      'Compressed4.apk': 'certs/compressed4',
   }
 
   # Test parsing with no optional fields, both optional fields, and only the
@@ -867,9 +871,9 @@
   )
 
   APKCERTS_CERTMAP4 = {
-      'RecoveryLocalizer.apk' : 'certs/devkey',
-      'Settings.apk' : 'build/make/target/product/security/platform',
-      'TV.apk' : 'PRESIGNED',
+      'RecoveryLocalizer.apk': 'certs/devkey',
+      'Settings.apk': 'build/make/target/product/security/platform',
+      'TV.apk': 'PRESIGNED',
   }
 
   def setUp(self):
@@ -973,7 +977,7 @@
     extracted_from_privkey = common.ExtractAvbPublicKey('avbtool', privkey)
     extracted_from_pubkey = common.ExtractAvbPublicKey('avbtool', pubkey)
     with open(extracted_from_privkey, 'rb') as privkey_fp, \
-        open(extracted_from_pubkey, 'rb') as pubkey_fp:
+            open(extracted_from_pubkey, 'rb') as pubkey_fp:
       self.assertEqual(privkey_fp.read(), pubkey_fp.read())
 
   def test_ParseCertificate(self):
@@ -1237,7 +1241,8 @@
     self.assertEqual(
         '1-5 9-10',
         sparse_image.file_map['//system/file1'].extra['text_str'])
-    self.assertTrue(sparse_image.file_map['//system/file2'].extra['incomplete'])
+    self.assertTrue(
+        sparse_image.file_map['//system/file2'].extra['incomplete'])
     self.assertTrue(
         sparse_image.file_map['/system/app/file3'].extra['incomplete'])
 
@@ -1345,7 +1350,7 @@
       'recovery_api_version': 3,
       'fstab_version': 2,
       'system_root_image': 'true',
-      'no_recovery' : 'true',
+      'no_recovery': 'true',
       'recovery_as_boot': 'true',
   }
 
@@ -1667,6 +1672,7 @@
     self.assertRaises(common.ExternalError, common._GenerateGkiCertificate,
                       test_file.name, 'generic_kernel')
 
+
 class InstallRecoveryScriptFormatTest(test_utils.ReleaseToolsTestCase):
   """Checks the format of install-recovery.sh.
 
@@ -1676,7 +1682,7 @@
   def setUp(self):
     self._tempdir = common.MakeTempDir()
     # Create a fake dict that contains the fstab info for boot&recovery.
-    self._info = {"fstab" : {}}
+    self._info = {"fstab": {}}
     fake_fstab = [
         "/dev/soc.0/by-name/boot /boot emmc defaults defaults",
         "/dev/soc.0/by-name/recovery /recovery emmc defaults defaults"]
@@ -2023,11 +2029,11 @@
           input_zip, 'odm', placeholder_values)
 
     self.assertEqual({
-      'ro.odm.build.date.utc': '1578430045',
-      'ro.odm.build.fingerprint':
-      'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
-      'ro.product.odm.device': 'coral',
-      'ro.product.odm.name': 'product1',
+        'ro.odm.build.date.utc': '1578430045',
+        'ro.odm.build.fingerprint':
+        'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
+        'ro.product.odm.device': 'coral',
+        'ro.product.odm.name': 'product1',
     }, partition_props.build_props)
 
     with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
@@ -2210,8 +2216,8 @@
 
     copied_props = copy.deepcopy(partition_props)
     self.assertEqual({
-      'ro.odm.build.date.utc': '1578430045',
-      'ro.odm.build.fingerprint':
-      'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
-      'ro.product.odm.device': 'coral',
+        'ro.odm.build.date.utc': '1578430045',
+        'ro.odm.build.fingerprint':
+        'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
+        'ro.product.odm.device': 'coral',
     }, copied_props.build_props)