autotest: reduce memory usage when download huge file

Uncompressed video files may be huge and exceeded the ram size of DUT.
This CL split the file content into blocks to write and md5.

BUG=chromium:717973
TEST=video_HangoutHardwarePerf on low-end devices

Change-Id: I1100b9628745db5d1d81aa262a13b3f4e45ed539
Reviewed-on: https://chromium-review.googlesource.com/497346
Commit-Ready: Kuang-che Wu <kcwu@chromium.org>
Tested-by: Kuang-che Wu <kcwu@chromium.org>
Reviewed-by: Pin-chih Lin <johnylin@chromium.org>
diff --git a/client/common_lib/file_utils.py b/client/common_lib/file_utils.py
index b17fdab..73715ec 100644
--- a/client/common_lib/file_utils.py
+++ b/client/common_lib/file_utils.py
@@ -165,4 +165,8 @@
         raise
 
     with open(local_path, 'wb') as local_file:
-        local_file.write(remote_file.read())
\ No newline at end of file
+        while True:
+            block = remote_file.read(128 * 1024)
+            if not block:
+                break
+            local_file.write(block)
diff --git a/client/site_tests/video_HangoutHardwarePerf/video_HangoutHardwarePerf.py b/client/site_tests/video_HangoutHardwarePerf/video_HangoutHardwarePerf.py
index 5edf9e8..d94d1c3 100644
--- a/client/site_tests/video_HangoutHardwarePerf/video_HangoutHardwarePerf.py
+++ b/client/site_tests/video_HangoutHardwarePerf/video_HangoutHardwarePerf.py
@@ -165,7 +165,11 @@
         file_utils.download_file(url, tmp.name)
         md5 = hashlib.md5()
         with open(tmp.name, 'r') as r:
-            md5.update(r.read())
+            while True:
+                block = r.read(128 * 1024)
+                if not block:
+                    break
+                md5.update(block)
 
         filename = os.path.basename(remote_path)
         m = RE_VERSIONING_FILE.match(filename)