torch.package - a way to package models and code (#45015)

Summary:
Pull Request resolved: https://github.com/pytorch/pytorch/pull/45015

torch.package allows you to write packages of code, pickled python data, and
arbitrary binary and text resources into a self-contained package.

torch.package.PackageExporter writes the packages and
torch.package.PackageImporter reads them.

The importers can load this code in a hermetic way, such that code is loaded
from the package rather than the normal python import system. This allows
for the packaging of PyTorch model code and data so that it can be run
on a server or used in the future for transfer learning.

The code contained in packages is copied file-by-file from the original
source when it is created, and the file format is a specially organized
zip file. Future users of the package can unzip the package, and edit the code
in order to perform custom modifications to it.

The importer for packages ensures that code in the module can only be loaded from
within the package, except for modules explicitly listed as external using :method:`extern_module`.
The file `extern_modules` in the zip archive lists all the modules that a package externally depends on.
This prevents "implicit" dependencies where the package runs locally because it is importing
a locally-installed package, but then fails when the package is copied to another machine.

Test Plan: Imported from OSS

Reviewed By: SplitInfinity

Differential Revision: D23824337

Pulled By: zdevito

fbshipit-source-id: 1247c34ba9b656f9db68a83e31f2a0fbe3bea6bd
diff --git a/test/module_a.py b/test/module_a.py
new file mode 100644
index 0000000..685af9b
--- /dev/null
+++ b/test/module_a.py
@@ -0,0 +1 @@
+result = 'module_a'
diff --git a/test/namespace_b/subpackage.py b/test/namespace_b/subpackage.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test/namespace_b/subpackage.py
diff --git a/test/package_a/__init__.py b/test/package_a/__init__.py
new file mode 100644
index 0000000..4761b3d
--- /dev/null
+++ b/test/package_a/__init__.py
@@ -0,0 +1,7 @@
+result = 'package_a'
+
+class PackageAObject:
+    __slots__ = ['obj']
+
+    def __init__(self, obj):
+        self.obj = obj
diff --git a/test/package_a/subpackage.py b/test/package_a/subpackage.py
new file mode 100644
index 0000000..46f729d
--- /dev/null
+++ b/test/package_a/subpackage.py
@@ -0,0 +1,3 @@
+result = 'package_a.subpackage'
+class PackageASubpackageObject:
+    pass
diff --git a/test/run_test.py b/test/run_test.py
index 606e20a..d63fc37 100755
--- a/test/run_test.py
+++ b/test/run_test.py
@@ -89,7 +89,8 @@
     'test_determination',
     'test_futures',
     'test_fx',
-    'test_functional_autograd_benchmark'
+    'test_functional_autograd_benchmark',
+    'test_package',
 ]
 
 WINDOWS_BLOCKLIST = [
diff --git a/test/test_package.py b/test/test_package.py
new file mode 100644
index 0000000..a25726a
--- /dev/null
+++ b/test/test_package.py
@@ -0,0 +1,309 @@
+from unittest import main, skipIf
+from torch.testing._internal.common_utils import TestCase, IS_WINDOWS
+from tempfile import NamedTemporaryFile
+from torch.package import PackageExporter, PackageImporter
+from pathlib import Path
+from tempfile import TemporaryDirectory
+import torch
+from sys import version_info
+
+try:
+    from torchvision.models import resnet18
+    HAS_TORCHVISION = True
+except ImportError:
+    HAS_TORCHVISION = False
+skipIfNoTorchVision = skipIf(not HAS_TORCHVISION, "no torchvision")
+
+
+
+packaging_directory = Path(__file__).parent
+
+class PackagingTest(TestCase):
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        self._temporary_files = []
+
+    def temp(self):
+        t = NamedTemporaryFile()
+        name = t.name
+        if IS_WINDOWS:
+            t.close()  # can't read an open file in windows
+        else:
+            self._temporary_files.append(t)
+        return name
+
+    def tearDown(self):
+        for t in self._temporary_files:
+            t.close()
+        self._temporary_files = []
+
+    def test_saving_source(self):
+        filename = self.temp()
+        with PackageExporter(filename, verbose=False) as he:
+            he.save_source_file('foo', str(packaging_directory / 'module_a.py'))
+            he.save_source_file('foodir', str(packaging_directory / 'package_a'))
+        hi = PackageImporter(filename)
+        foo = hi.import_module('foo')
+        s = hi.import_module('foodir.subpackage')
+        self.assertEqual(foo.result, 'module_a')
+        self.assertEqual(s.result, 'package_a.subpackage')
+
+    def test_saving_string(self):
+        filename = self.temp()
+        with PackageExporter(filename, verbose=False) as he:
+            src = """\
+import math
+the_math = math
+"""
+            he.save_source_string('my_mod', src)
+        hi = PackageImporter(filename)
+        m = hi.import_module('math')
+        import math
+        self.assertIs(m, math)
+        my_mod = hi.import_module('my_mod')
+        self.assertIs(my_mod.math, math)
+
+    def test_save_module(self):
+        filename = self.temp()
+        with PackageExporter(filename, verbose=False) as he:
+            import module_a
+            import package_a
+            he.save_module(module_a.__name__)
+            he.save_module(package_a.__name__)
+        hi = PackageImporter(filename)
+        module_a_i = hi.import_module('module_a')
+        self.assertEqual(module_a_i.result, 'module_a')
+        self.assertIsNot(module_a, module_a_i)
+        package_a_i = hi.import_module('package_a')
+        self.assertEqual(package_a_i.result, 'package_a')
+        self.assertIsNot(package_a_i, package_a)
+
+    def test_pickle(self):
+        import package_a.subpackage
+        obj = package_a.subpackage.PackageASubpackageObject()
+        obj2 = package_a.PackageAObject(obj)
+
+        filename = self.temp()
+        with PackageExporter(filename, verbose=False) as he:
+            he.save_pickle('obj', 'obj.pkl', obj2)
+        hi = PackageImporter(filename)
+
+        # check we got dependencies
+        sp = hi.import_module('package_a.subpackage')
+        # check we didn't get other stuff
+        with self.assertRaises(ImportError):
+            hi.import_module('module_a')
+
+        obj_loaded = hi.load_pickle('obj', 'obj.pkl')
+        self.assertIsNot(obj2, obj_loaded)
+        self.assertIsInstance(obj_loaded.obj, sp.PackageASubpackageObject)
+        self.assertIsNot(package_a.subpackage.PackageASubpackageObject, sp.PackageASubpackageObject)
+
+    def test_resources(self):
+        filename = self.temp()
+        with PackageExporter(filename, verbose=False) as he:
+            he.save_text('main', 'main', "my string")
+            he.save_binary('main', 'main_binary', "my string".encode('utf-8'))
+            src = """\
+import resources
+t = resources.load_text('main', 'main')
+b = resources.load_binary('main', 'main_binary')
+"""
+            he.save_source_string('main', src, is_package=True)
+        hi = PackageImporter(filename)
+        m = hi.import_module('main')
+        self.assertEqual(m.t, "my string")
+        self.assertEqual(m.b, "my string".encode('utf-8'))
+
+    def test_extern(self):
+        filename = self.temp()
+        with PackageExporter(filename, verbose=False) as he:
+            he.extern_modules(['package_a.subpackage', 'module_a'])
+            he.save_module('package_a')
+        hi = PackageImporter(filename)
+        import package_a.subpackage
+        import module_a
+
+        module_a_im = hi.import_module('module_a')
+        hi.import_module('package_a.subpackage')
+        package_a_im = hi.import_module('package_a')
+
+        self.assertIs(module_a, module_a_im)
+        self.assertIsNot(package_a, package_a_im)
+        self.assertIs(package_a.subpackage, package_a_im.subpackage)
+
+    @skipIf(version_info.major < 3 or version_info.minor < 7, 'mock uses __getattr__ a 3.7 feature')
+    def test_mock(self):
+        filename = self.temp()
+        with PackageExporter(filename, verbose=False) as he:
+            he.mock_modules(['package_a.subpackage', 'module_a'])
+            he.save_module('package_a')
+        hi = PackageImporter(filename)
+        import package_a.subpackage
+        _ = package_a.subpackage
+        import module_a
+        _ = module_a
+
+        m = hi.import_module('package_a.subpackage')
+        r = m.result
+        with self.assertRaisesRegex(NotImplementedError, 'was mocked out'):
+            r()
+
+    @skipIf(version_info.major < 3 or version_info.minor < 7, 'mock uses __getattr__ a 3.7 feature')
+    def test_custom_requires(self):
+        filename = self.temp()
+
+        class Custom(PackageExporter):
+            def require_module(self, name, dependencies):
+                if name == 'module_a':
+                    self.mock_module('module_a')
+                elif name == 'package_a':
+                    self.save_source_string('package_a', 'import module_a\nresult = 5\n')
+                else:
+                    raise NotImplementedError('wat')
+
+        with Custom(filename, verbose=False) as he:
+            he.save_source_string('main', 'import package_a\n')
+
+        hi = PackageImporter(filename)
+        hi.import_module('module_a').should_be_mocked
+        bar = hi.import_module('package_a')
+        self.assertEqual(bar.result, 5)
+
+    @skipIfNoTorchVision
+    def test_resnet(self):
+        resnet = resnet18()
+
+        f1 = self.temp()
+
+        # create a package that will save it along with its code
+        with PackageExporter(f1, verbose=False) as e:
+            # put the pickled resnet in the package, by default
+            # this will also save all the code files references by
+            # the objects in the pickle
+            e.save_pickle('model', 'model.pkl', resnet)
+
+        # we can now load the saved model
+        i = PackageImporter(f1)
+        r2 = i.load_pickle('model', 'model.pkl')
+
+        # test that it works
+        input = torch.rand(1, 3, 224, 224)
+        ref = resnet(input)
+        self.assertTrue(torch.allclose(r2(input), ref))
+
+        # functions exist also to get at the private modules in each package
+        torchvision = i.import_module('torchvision')
+
+        f2 = self.temp()
+        # if we are doing transfer learning we might want to re-save
+        # things that were loaded from a package
+        with PackageExporter(f2, verbose=False) as e:
+            # We need to tell the exporter about any modules that
+            # came from imported packages so that it can resolve
+            # class names like torchvision.models.resnet.ResNet
+            # to their source code.
+
+            e.importers.insert(0, i.import_module)
+
+            # e.importers is a list of module importing functions
+            # that by default contains importlib.import_module.
+            # it is searched in order until the first success and
+            # that module is taken to be what torchvision.models.resnet
+            # should be in this code package. In the case of name collisions,
+            # such as trying to save a ResNet from two different packages,
+            # we take the first thing found in the path, so only ResNet objects from
+            # one importer will work. This avoids a bunch of name mangling in
+            # the source code. If you need to actually mix ResNet objects,
+            # we suggest reconstructing the model objects using code from a single package
+            # using functions like save_state_dict and load_state_dict to transfer state
+            # to the correct code objects.
+            e.save_pickle('model', 'model.pkl', r2)
+
+        i2 = PackageImporter(f2)
+        r3 = i2.load_pickle('model', 'model.pkl')
+        self.assertTrue(torch.allclose(r3(input), ref))
+
+        # test we can load from a directory
+        import zipfile
+        zf = zipfile.ZipFile(f1, 'r')
+
+        with TemporaryDirectory() as td:
+            zf.extractall(path=td)
+            iz = PackageImporter(str(Path(td) / Path(f1).name))
+            r4 = iz.load_pickle('model', 'model.pkl')
+            self.assertTrue(torch.allclose(r4(input), ref))
+
+    @skipIfNoTorchVision
+    def test_model_save(self):
+
+        # This example shows how you might package a model
+        # so that the creator of the model has flexibility about
+        # how they want to save it but the 'server' can always
+        # use the same API to load the package.
+
+        # The convension is for each model to provide a
+        # 'model' package with a 'load' function that actual
+        # reads the model out of the archive.
+
+        # How the load function is implemented is up to the
+        # the packager.
+
+        # get our normal torchvision resnet
+        resnet = resnet18()
+
+
+        f1 = self.temp()
+        # Option 1: save by pickling the whole model
+        # + single-line, similar to torch.jit.save
+        # - more difficult to edit the code after the model is created
+        with PackageExporter(f1, verbose=False) as e:
+            e.save_pickle('model', 'pickled', resnet)
+            # note that this source is the same for all models in this approach
+            # so it can be made part of an API that just takes the model and
+            # packages it with this source.
+            src = """\
+import resources # gives you access to the importer from within the package
+
+# server knows to call model.load() to get the model,
+# maybe in the future it passes options as arguments by convension
+def load():
+    return resources.load_pickle('model', 'pickled')
+        """
+            e.save_source_string('model', src, is_package=True)
+
+        f2 = self.temp()
+        # Option 2: save with state dict
+        # - more code to write to save/load the model
+        # + but this code can be edited later to adjust adapt the model later
+        with PackageExporter(f2, verbose=False) as e:
+            e.save_pickle('model', 'state_dict', resnet.state_dict())
+            src = """\
+import resources # gives you access to the importer from within the package
+from torchvision.models.resnet import resnet18
+def load():
+    # if you want, you can later edit how resnet is constructed here
+    # to edit the model in the package, while still loading the original
+    # state dict weights
+    r = resnet18()
+    state_dict = resources.load_pickle('model', 'state_dict')
+    r.load_state_dict(state_dict)
+    return r
+        """
+            e.save_source_string('model', src, is_package=True)
+
+
+
+        # regardless of how we chose to package, we can now use the model in a server in the same way
+        input = torch.rand(1, 3, 224, 224)
+        results = []
+        for m in [f1, f2]:
+            importer = PackageImporter(m)
+            the_model = importer.import_module('model').load()
+            r = the_model(input)
+            results.append(r)
+
+        self.assertTrue(torch.allclose(*results))
+
+if __name__ == '__main__':
+    main()
diff --git a/torch/package/__init__.py b/torch/package/__init__.py
new file mode 100644
index 0000000..be7159a
--- /dev/null
+++ b/torch/package/__init__.py
@@ -0,0 +1,2 @@
+from .importer import PackageImporter
+from .exporter import PackageExporter
diff --git a/torch/package/_custom_import_pickler.py b/torch/package/_custom_import_pickler.py
new file mode 100644
index 0000000..fd5787b
--- /dev/null
+++ b/torch/package/_custom_import_pickler.py
@@ -0,0 +1,78 @@
+from pickle import _Pickler, _getattribute, whichmodule, _extension_registry, _compat_pickle  # type: ignore
+from pickle import GLOBAL, STACK_GLOBAL, EXT1, EXT2, EXT4, PicklingError
+from struct import pack
+
+class CustomImportPickler(_Pickler):
+    def __init__(self, import_module, *args, **kwargs):
+        self.import_module = import_module
+        super().__init__(*args, **kwargs)
+
+    def save_global(self, obj, name=None):
+        # unfortunately the pickler code is factored in a way that
+        # forces us to copy/paste this function. The only change is marked
+        # CHANGED below.
+        write = self.write
+        memo = self.memo
+
+        if name is None:
+            name = getattr(obj, '__qualname__', None)
+        if name is None:
+            name = obj.__name__
+
+        module_name = whichmodule(obj, name)
+        try:
+            # CHANGED: self.import_module rather than
+            # __import__
+            module = self.import_module(module_name)
+            obj2, parent = _getattribute(module, name)
+        except (ImportError, KeyError, AttributeError):
+            raise PicklingError(
+                "Can't pickle %r: it's not found as %s.%s" %
+                (obj, module_name, name)) from None
+        else:
+            if obj2 is not obj:
+                raise PicklingError(
+                    "Can't pickle %r: it's not the same object as %s.%s" %
+                    (obj, module_name, name))
+
+        if self.proto >= 2:
+            code = _extension_registry.get((module_name, name))
+            if code:
+                assert code > 0
+                if code <= 0xff:
+                    write(EXT1 + pack("<B", code))
+                elif code <= 0xffff:
+                    write(EXT2 + pack("<H", code))
+                else:
+                    write(EXT4 + pack("<i", code))
+                return
+        lastname = name.rpartition('.')[2]
+        if parent is module:
+            name = lastname
+        # Non-ASCII identifiers are supported only with protocols >= 3.
+        if self.proto >= 4:
+            self.save(module_name)
+            self.save(name)
+            write(STACK_GLOBAL)
+        elif parent is not module:
+            self.save_reduce(getattr, (parent, lastname))
+        elif self.proto >= 3:
+            write(GLOBAL + bytes(module_name, "utf-8") + b'\n' +
+                  bytes(name, "utf-8") + b'\n')
+        else:
+            if self.fix_imports:
+                r_name_mapping = _compat_pickle.REVERSE_NAME_MAPPING
+                r_import_mapping = _compat_pickle.REVERSE_IMPORT_MAPPING
+                if (module_name, name) in r_name_mapping:
+                    module_name, name = r_name_mapping[(module_name, name)]
+                elif module_name in r_import_mapping:
+                    module_name = r_import_mapping[module_name]
+            try:
+                write(GLOBAL + bytes(module_name, "ascii") + b'\n' +
+                      bytes(name, "ascii") + b'\n')
+            except UnicodeEncodeError:
+                raise PicklingError(
+                    "can't pickle global identifier '%s.%s' using "
+                    "pickle protocol %i" % (module, name, self.proto)) from None
+
+        self.memoize(obj)
diff --git a/torch/package/_importlib.py b/torch/package/_importlib.py
new file mode 100644
index 0000000..1b521ca
--- /dev/null
+++ b/torch/package/_importlib.py
@@ -0,0 +1,83 @@
+import _warnings
+import os.path
+# note: implementations 
+# copied from cpython's import code
+
+
+# _zip_searchorder defines how we search for a module in the Zip
+# archive: we first search for a package __init__, then for
+# non-package .pyc, and .py entries. The .pyc entries
+# are swapped by initzipimport() if we run in optimized mode. Also,
+# '/' is replaced by path_sep there.
+
+_zip_searchorder = (
+    ('/__init__.py', True),
+    ('.py', False),
+)
+
+# Replace any occurrences of '\r\n?' in the input string with '\n'.
+# This converts DOS and Mac line endings to Unix line endings.
+def _normalize_line_endings(source):
+    source = source.replace(b'\r\n', b'\n')
+    source = source.replace(b'\r', b'\n')
+    return source
+
+def _resolve_name(name, package, level):
+    """Resolve a relative module name to an absolute one."""
+    bits = package.rsplit('.', level - 1)
+    if len(bits) < level:
+        raise ValueError('attempted relative import beyond top-level package')
+    base = bits[0]
+    return '{}.{}'.format(base, name) if name else base
+
+def _sanity_check(name, package, level):
+    """Verify arguments are "sane"."""
+    if not isinstance(name, str):
+        raise TypeError('module name must be str, not {}'.format(type(name)))
+    if level < 0:
+        raise ValueError('level must be >= 0')
+    if level > 0:
+        if not isinstance(package, str):
+            raise TypeError('__package__ not set to a string')
+        elif not package:
+            raise ImportError('attempted relative import with no known parent '
+                              'package')
+    if not name and level == 0:
+        raise ValueError('Empty module name')
+
+def _calc___package__(globals):
+    """Calculate what __package__ should be.
+
+    __package__ is not guaranteed to be defined or could be set to None
+    to represent that its proper value is unknown.
+
+    """
+    package = globals.get('__package__')
+    spec = globals.get('__spec__')
+    if package is not None:
+        if spec is not None and package != spec.parent:
+            _warnings.warn("__package__ != __spec__.parent "
+                           f"({package!r} != {spec.parent!r})",
+                           ImportWarning, stacklevel=3)
+        return package
+    elif spec is not None:
+        return spec.parent
+    else:
+        _warnings.warn("can't resolve package from __spec__ or __package__, "
+                       "falling back on __name__ and __path__",
+                       ImportWarning, stacklevel=3)
+        package = globals['__name__']
+        if '__path__' not in globals:
+            package = package.rpartition('.')[0]
+    return package
+
+def _normalize_path(path):
+    """Normalize a path by ensuring it is a string.
+
+    If the resulting string contains path separators, an exception is raised.
+    """
+    parent, file_name = os.path.split(path)
+    if parent:
+        raise ValueError('{!r} must be only a file name'.format(path))
+    else:
+        return file_name
diff --git a/torch/package/_mock.py b/torch/package/_mock.py
new file mode 100644
index 0000000..d291bb5
--- /dev/null
+++ b/torch/package/_mock.py
@@ -0,0 +1,39 @@
+
+_magic_methods = ['__subclasscheck__', '__hex__', '__rmul__',
+                  '__float__', '__idiv__', '__setattr__', '__div__', '__invert__',
+                  '__nonzero__', '__rshift__',
+                  '__eq__', '__pos__', '__round__',
+                  '__rand__', '__or__', '__complex__', '__divmod__',
+                  '__len__', '__reversed__', '__copy__', '__reduce__',
+                  '__deepcopy__', '__rdivmod__', '__rrshift__', '__ifloordiv__',
+                  '__hash__', '__iand__', '__xor__', '__isub__', '__oct__',
+                  '__ceil__', '__imod__', '__add__', '__truediv__',
+                  '__unicode__', '__le__', '__delitem__', '__sizeof__', '__sub__',
+                  '__ne__', '__pow__', '__bytes__', '__mul__',
+                  '__itruediv__', '__bool__', '__iter__', '__abs__',
+                  '__gt__', '__iadd__', '__enter__',
+                  '__floordiv__', '__call__', '__neg__',
+                  '__and__', '__ixor__', '__getitem__', '__exit__', '__cmp__',
+                  '__getstate__', '__index__', '__contains__', '__floor__', '__lt__', '__getattr__',
+                  '__mod__', '__trunc__', '__delattr__', '__instancecheck__', '__setitem__', '__ipow__',
+                  '__ilshift__', '__long__', '__irshift__', '__imul__',
+                  '__lshift__', '__dir__', '__ge__', '__int__', '__ior__']
+
+
+class MockedObject:
+    _name: str
+
+    def __init__(self, name):
+        self.__dict__['_name'] = name
+
+    def __repr__(self):
+        return f"MockedObject({self._name})"
+
+
+def install_method(method_name):
+    def _not_implemented(self, *args, **kwargs):
+        raise NotImplementedError(f"Object '{self._name}' was mocked out during packaging but it is being used in {method_name}")
+    setattr(MockedObject, method_name, _not_implemented)
+
+for method_name in _magic_methods:
+    install_method(method_name)
diff --git a/torch/package/_mock_zipreader.py b/torch/package/_mock_zipreader.py
new file mode 100644
index 0000000..b273d41
--- /dev/null
+++ b/torch/package/_mock_zipreader.py
@@ -0,0 +1,48 @@
+import torch
+from glob import glob
+import os.path
+from typing import List, Any
+
+_storages : List[Any] = [
+    torch.DoubleStorage,
+    torch.FloatStorage,
+    torch.LongStorage,
+    torch.IntStorage,
+    torch.ShortStorage,
+    torch.CharStorage,
+    torch.ByteStorage,
+    torch.BoolStorage,
+]
+_dtype_to_storage = {
+    data_type(0).dtype: data_type for data_type in _storages
+}
+
+# because get_storage_from_record returns a tensor!?
+class _HasStorage(object):
+    def __init__(self, storage):
+        self._storage = storage
+
+    def storage(self):
+        return self._storage
+
+
+class MockZipReader(object):
+    def __init__(self, directory):
+        self.directory = directory
+
+    def get_record(self, name):
+        filename = f'{self.directory}/{name}'
+        with open(filename, 'rb') as f:
+            return f.read()
+
+    def get_storage_from_record(self, name, numel, dtype):
+        storage = _dtype_to_storage[dtype]
+        filename = f'{self.directory}/{name}'
+        return _HasStorage(storage.from_file(filename=filename, size=numel))
+
+    def get_all_records(self, ):
+        files = []
+        for filename in glob(f'{self.directory}/**', recursive=True):
+            if not os.path.isdir(filename):
+                files.append(filename[len(self.directory) + 1:])
+        return files
diff --git a/torch/package/exporter.py b/torch/package/exporter.py
new file mode 100644
index 0000000..8530f6f
--- /dev/null
+++ b/torch/package/exporter.py
@@ -0,0 +1,435 @@
+import torch
+from torch.serialization import normalize_storage_type, location_tag, _should_read_directly
+import io
+import pickle
+import pickletools
+from .find_file_dependencies import find_files_source_depends_on
+from ._custom_import_pickler import CustomImportPickler
+from ._importlib import _normalize_path
+import types
+import importlib
+from typing import List, Any, Callable, Dict
+from distutils.sysconfig import get_python_lib
+from pathlib import Path
+import linecache
+import sys
+
+class PackageExporter:
+    """ Exporters allow you to write packages of code, pickled python data, and
+    arbitrary binary and text resources into a self-contained package.
+
+    Imports can load this code in a hermetic way, such that code is loaded
+    from the package rather than the normal python import system. This allows
+    for the packaging of PyTorch model code and data so that it can be run
+    on a server or used in the future for transfer learning.
+
+    The code contained in packages is copied file-by-file from the original
+    source when it is created, and the file format is a specially organized
+    zip file. Future users of the package can unzip the package, and edit the code
+    in order to perform custom modifications to it.
+
+    The importer for packages ensures that code in the module can only be loaded from
+    within the package, except for modules explicitly listed as external using :method:`extern_module`.
+    The file `extern_modules` in the zip archive lists all the modules that a package externally depends on.
+    This prevents "implicit" dependencies where the package runs locally because it is importing
+    a locally-installed package, but then fails when the package is copied to another machine.
+
+
+    Dependencies
+    ------------
+
+    When source code is added to the package, the exporter optionally can scan it
+    for further code dependencies (`dependencies=True`). It looks for import statements,
+    resolves relative references to qualified module names, and calls :method:`require_module`
+    on each it finds, recursively resolving dependencies.
+
+    """
+
+    importers: List[Callable[[str], Any]]
+    """ A list of functions that will be called in order to find the module assocated
+    with module names referenced by other modules or by pickled objects. Initialized to
+    `[importlib.import_module]` by default. When pickling code or objects that was loaded
+    from an imported packaged, that `importer.import_module` should be put into the importer list.
+    When a name conflict occurs between importers, the first importer in the list takes precedence,
+    and only objects that refer to this first importers class can be saved
+    """
+
+
+    def __init__(self, filename: str, verbose: bool = True):
+        """
+        Create an exporter.
+
+        Args:
+            filename: e.g. my_package.zip
+            verbose: Print information about dependency resolution to stdout.
+                Useful for tracking down why certain files get included.
+        """
+        self.zip_file = torch._C.PyTorchFileWriter(filename)
+        self.serialized_storages : Dict[str, Any] = {}
+        self.external : List[str] = []
+        self.provided : Dict[str, bool] = {}
+        self.verbose = verbose
+        self.importers = [importlib.import_module]
+
+    def save_source_file(self, module_name: str, file_or_directory: str, dependencies=True):
+        """Adds the local file system `file_or_directory` to the source package to provide the code
+        for `module_name`.
+
+        Args:
+            module_name (str): e.g. `my_package.my_subpackage`, code will be saved to provide code for this package.
+            file_or_directory (str): the path to a file or directory of code. When a directory, all python files in the directory
+                are recursively copied using :meth:`save_source_file`. If a file is named "/__init__.py" the code is treated
+                as a package.
+            dependencies (bool, optional): If True, we scan the source for dependencies (see :ref:`Dependencies`).
+        """
+        path = Path(file_or_directory)
+        if path.is_dir():
+            to_save = []  # list of tuples with arguments to save_source_string
+            module_path = module_name.replace('.', '/')
+            for filename in path.glob('**/*.py'):
+                relative_path = filename.relative_to(path).as_posix()
+                archivename = module_path + '/' + relative_path
+                if filename.is_dir():
+                    self.provided[archivename] = True
+                else:
+                    submodule_name = None
+                    if filename.name == '__init__.py':
+                        submodule_name = archivename[:-len('/__init__.py')].replace('/', '.')
+                        is_package = True
+                    else:
+                        submodule_name = archivename[:-len('.py')].replace('/', '.')
+                        is_package = False
+
+                    self.provided[submodule_name] = True
+                    # we delay the call to save_source_string so that we record all the source files
+                    # being provided by this directory structure _before_ attempting to resolve the dependencies
+                    # on the source. This makes sure we don't try to copy over modules that will just get
+                    # overwritten by this directory blob
+                    to_save.append((submodule_name, _read_file(str(filename)), is_package, dependencies, str(filename)))
+
+            for item in to_save:
+                self.save_source_string(*item)
+        else:
+            is_package = path.name == '__init__.py'
+            self.save_source_string(module_name, _read_file(file_or_directory), is_package, dependencies, file_or_directory)
+
+    def save_source_string(self, module_name: str, src: str, is_package: bool = False,
+                           dependencies: bool = True, orig_file_name: str = None):
+        """Adds `src` as the source code for `module_name` in the exported package.
+
+        Args:
+            module_name (str): e.g. `my_package.my_subpackage`, code will be saved to provide code for this package.
+            src (str): The python source code to save for this package
+            is_package (bool, optional): If True, this module is treated as a package. Packages are allowed to have submodules
+                (e.g. my_package.my_subpackage.my_subsubpackage), and resources can be saved inside them. Defaults to False.
+            dependencies (bool, optional): If True, we scan the source for dependencies (see :ref:`Dependencies`).
+            orig_file_name (str, optional): If present, used in logging to identifying where the source came from. Defaults to None.
+        """
+        self.provided[module_name] = True
+        extension = '/__init__.py' if is_package else '.py'
+        filename = module_name.replace('.', '/') + extension
+        self._write(filename, src)
+        if dependencies:
+            package = module_name if is_package else module_name.rsplit('.', maxsplit=1)[0]
+            dep_list = find_files_source_depends_on(src, package)
+            if self.verbose:
+                def fmt_dep(mod, obj):
+                    return f'{mod}' if obj is None else f'{mod}.{obj}'
+                dep_str = ''.join(f'  {fmt_dep(mod, obj)}\n' for mod, obj in dep_list)
+                file_info = f'(from file {orig_file_name}) ' if orig_file_name is not None else ''
+                print(f"{module_name} {file_info}depends on:\n{dep_str}\n")
+
+            for dep_module_name, dep_module_obj in dep_list:
+                # handle the case where someone did something like `from pack import sub`
+                # where `sub` is a submodule. In this case we don't have to save pack, just sub.
+                # this ensures we don't pick up additional dependencies on pack.
+                # However, in the case where `sub` is not a submodule but an object, then we do have
+                # to save pack.
+                if dep_module_obj is not None:
+                    possible_submodule = f'{dep_module_name}.{dep_module_obj}'
+                    if self._module_exists(possible_submodule):
+                        self.require_module_if_not_provided(possible_submodule)
+                        # we don't need to save `pack`
+                        continue
+                if self._module_exists(dep_module_name):
+                    self.require_module_if_not_provided(dep_module_name)
+
+    def _module_exists(self, module_name: str) -> bool:
+        try:
+            self._import_module(module_name)
+            return True
+        except ModuleNotFoundError:
+            return False
+
+    def _get_source_of_module(self, module: types.ModuleType) -> str:
+        filename = getattr(module, '__file__', None)
+        result = None if filename is None else linecache.getlines(filename, module.__dict__)
+        if result is None:
+            raise ValueError(f'cannot save source for module "{module.__name__}" because '
+                             f'its source file "{filename}" could not be found.')
+        return ''.join(result)
+
+    def require_module_if_not_provided(self, module_name: str, dependencies=True):
+        if self._module_is_already_provided(module_name):
+            return
+        self.require_module(module_name, dependencies)
+
+    def require_module(self, module_name: str, dependencies=True):
+        """This is called by dependencies resolution when it finds that something in the package
+        depends on the module and it is not already present. It then decides how to provide that module.
+        The default resolution rules will mark the module as extern if it is part of the standard library,
+        and call `save_module` otherwise. Clients can subclass this object
+        and override this method to provide other behavior, such as automatically mocking out a whole class
+        of modules"""
+
+        root_name = module_name.split('.', maxsplit=1)[0]
+        if self._can_implicitly_extern(root_name):
+            if self.verbose:
+                print(f'implicitly adding {root_name} to external modules '
+                      f'since it is part of the standard library and is a dependency.')
+            self.extern_module(root_name)
+            return
+
+        self.save_module(module_name, dependencies)
+
+    def save_module(self, module_name: str, dependencies=True):
+        """Save the code for `module_name` into the package. Code for the module is resolved using the `importers` path to find the
+        module object, and then using its `__file__` attribute to find the source code.
+        Args:
+            module_name (str): e.g. `my_package.my_subpackage`, code will be saved to provide code for this package.
+            dependencies (bool, optional): If True, we scan the source for dependencies (see :ref:`Dependencies`).
+        """
+        module = self._import_module(module_name)
+        source = self._get_source_of_module(module)
+        self.save_source_string(module_name, source, hasattr(module, '__path__'), dependencies, module.__file__)
+
+
+    def _import_module(self, module_name):
+        last_err = None
+        for import_module in self.importers:
+            try:
+                return import_module(module_name)
+            except ModuleNotFoundError as err:
+                last_err = err
+        if last_err is not None:
+            raise last_err
+        else:
+            raise ModuleNotFoundError(module_name)
+
+    def _create_pickler(self, data_buf):
+        if self.importers == [importlib.import_module]:
+            # if we are using the normal import library system, then
+            # we can use the C implementation of pickle which is faster
+            return pickle.Pickler(data_buf, protocol=3)
+        else:
+            return CustomImportPickler(self._import_module, data_buf, protocol=3)
+
+    def save_pickle(self, package: str, resource: str, obj: Any, dependencies: bool = True):
+        """Save a python object to the archive using pickle. Equivalent to :func:`torch.save` but saving into
+        the archive rather than a stand-alone file. Stanard pickle does not save the code, only the objects.
+        If `dependencies` is true, this method will also scan the pickled objects for which modules are required
+        to reconstruct them and save the relevant code.
+
+        To be able to save an object where `type(obj).__name__` is `my_module.MyObject`,
+        `my_module.MyObject` must resolve to the class of the object according to the `importer` order. When saving objects that
+        have previously been packaged, the importer's `import_module` method will need to be present in the `importer` list
+        for this to work.
+
+        Args:
+            package (str): The name of module package this resource should go it (e.g. "my_package.my_subpackage")
+            resource (str): A unique name for the resource, used to indentify it to load.
+            obj (Any): The object to save, must be picklable.
+            dependencies (bool, optional): If True, we scan the source for dependencies (see :ref:`Dependencies`).
+        """
+        filename = self._filename(package, resource)
+        # Write the pickle data for `obj`
+        data_buf = io.BytesIO()
+        pickler = self._create_pickler(data_buf)
+        pickler.persistent_id = self._persistent_id
+        pickler.dump(obj)
+        data_value = data_buf.getvalue()
+
+        if dependencies:
+            all_dependencies = []
+            for opcode, arg, pos in pickletools.genops(data_value):
+                if opcode.name == 'GLOBAL':  # a global reference
+                    assert isinstance(arg, str)
+                    module, field = arg.split(' ')
+                    if module not in all_dependencies:
+                        all_dependencies.append(module)
+
+            if self.verbose:
+                dep_string = ''.join(f'  {dep}\n' for dep in all_dependencies)
+                print(f"{resource} depends on:\n{dep_string}\n")
+
+            for module_name in all_dependencies:
+                self.require_module_if_not_provided(module_name)
+
+        self._write(filename, data_value)
+
+    def save_text(self, package: str, resource: str, text: str):
+        """Save text data to the package
+
+        Args:
+            package (str): The name of module package this resource should go it (e.g. "my_package.my_subpackage")
+            resource (str): A unique name for the resource, used to indentify it to load.
+            text (str): The contents to save
+        """
+        return self.save_binary(package, resource, text.encode('utf-8'))
+
+    def save_binary(self, package, resource, binary: bytes):
+        """Save raw bytes to the package.
+
+        Args:
+            package (str): The name of module package this resource should go it (e.g. "my_package.my_subpackage")
+            resource (str): A unique name for the resource, used to indentify it to load.
+            binary (str): The data to save.
+        """
+        filename = self._filename(package, resource)
+        self._write(filename, binary)
+
+    def extern_module(self, module_name: str):
+        """Include `module` in the list of external modules the package can import.
+        This will prevent dependency discover from saving
+        it in the package. The importer will load an external module directly from the standard import system.
+        Code for extern modules must also exist in the process loading the package.
+
+        Args:
+            module_name (str): e.g. "my_package.my_subpackage" the name of the external module
+        """
+        if module_name not in self.external:
+            self.external.append(module_name)
+
+    def extern_modules(self, module_names: List[str]):
+        """Extern a list of modules. Convience wrapper for calling :meth:`extern_module` on many items.
+
+        Args:
+            module_names (List[str]): List of module names
+        """
+        for m in module_names:
+            self.extern_module(m)
+
+    def mock_module(self, module_name: str):
+        """Replace the code for `module_name` in the package with a fake implementation. This module will return a fake
+        object for any attribute accessed from it. Because we copy file-by-file, the dependency resolution will sometimes
+        find files that are imported by model files but whose functionality is never used
+        (e.g. custom serialization code or training helpers).
+        Use this function to mock this functionality out without having to modify the original code.
+
+        Args:
+            module_name (str): e.g. "my_package.my_subpackage" the name of the module to be mocked out.
+        """
+        if '_mock' not in self.provided:
+            self.save_source_file('_mock', str(Path(__file__).parent / '_mock.py'), dependencies=False)
+        is_package = hasattr(self._import_module(module_name), '__path__')
+        self.save_source_string(module_name, _MOCK_IMPL, is_package, dependencies=False)
+
+
+    def mock_modules(self, module_names):
+        """Mock a list of modules. Convience wrapper for calling :meth:`mock_module` on many items.
+
+        Args:
+            module_names (List[str]): List of module names
+        """
+        for module_name in module_names:
+            self.mock_module(module_name)
+
+    def _module_is_already_provided(self, qualified_name: str) -> bool:
+        for mod in self.external:
+            if qualified_name == mod or qualified_name.startswith(mod + '.'):
+                return True
+        return qualified_name in self.provided
+
+    def _persistent_id(self, obj):
+        # FIXME: the docs say that persistent_id should only return a string
+        # but torch store returns tuples. This works only in the binary protocol
+        # see
+        # https://docs.python.org/2/library/pickle.html#pickling-and-unpickling-external-objects
+        # https://github.com/python/cpython/blob/master/Lib/pickle.py#L527-L537
+        if torch.is_storage(obj):
+            storage_type = normalize_storage_type(type(obj))
+            obj_key = str(obj._cdata)
+            location = location_tag(obj)
+            self.serialized_storages[obj_key] = obj
+
+            return ('storage',
+                    storage_type,
+                    obj_key,
+                    location,
+                    obj.size())
+        return None
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, type, value, traceback):
+        self.close()
+
+    def _write(self, filename, str_or_bytes):
+        if isinstance(str_or_bytes, str):
+            str_or_bytes = str_or_bytes.encode('utf-8')
+        self.zip_file.write_record(filename, str_or_bytes, len(str_or_bytes))
+
+    def close(self):
+        """Write the package to the filesystem. Any calls after close are now invalid.
+        It is preferable to use resource guard syntax instead:
+
+            with PackageExporter("file.zip") as e:
+                ...
+        """
+        # Write each tensor to a file named tensor/the_tensor_key in the zip archive
+        for key in sorted(self.serialized_storages.keys()):
+            name = 'data/{}'.format(key)
+            storage = self.serialized_storages[key]
+            if storage.device.type == 'cpu':
+                # If it's on the CPU we can directly copy it into the zip file
+                num_bytes = storage.size() * storage.element_size()
+                self.zip_file.write_record(name, storage.data_ptr(), num_bytes)
+            else:
+                # Copy to a buffer, then serialize that
+                buf = io.BytesIO()
+                storage._write_file(buf, _should_read_directly(buf))
+                buf_value = buf.getvalue()
+                self._write(name, buf_value)
+        contents = ('\n'.join(self.external) + '\n')
+        self._write('extern_modules', contents)
+        del self.zip_file
+
+    def _filename(self, package, resource):
+        package_path = package.replace('.', '/')
+        resource = _normalize_path(resource)
+        return f'{package_path}/{resource}'
+
+    def _can_implicitly_extern(self, module_name: str):
+        return module_name == 'torch' or (module_name not in _DISALLOWED_MODULES
+                                          and _is_builtin_or_stdlib_module(self._import_module(module_name)))
+
+
+# even though these are in the standard library, we do not allow them to be
+# automatically externed since they offer a lot of system level access
+_DISALLOWED_MODULES = ['sys', 'io']
+
+def _is_builtin_or_stdlib_module(module: types.ModuleType) -> bool:
+    if module.__name__ in sys.builtin_module_names:
+        return True
+    filename = module.__file__
+    if filename is None:
+        return False
+    standard_lib = get_python_lib(standard_lib=True)
+    # this is often a subdirectory of standard_lib so we have to check
+    # that the file is in the standard_lib directory but not in this one
+    installed_libs = get_python_lib(standard_lib=False)
+    in_standard_lib = filename.startswith(standard_lib + '/')
+    in_installed_libs = filename.startswith(installed_libs + '/')
+    return in_standard_lib and not in_installed_libs
+
+_MOCK_IMPL = """\
+from _mock import MockedObject
+def __getattr__(attr: str):
+    return MockedObject(__name__ + '.' + attr)
+"""
+
+def _read_file(filename: str) -> str:
+    with open(filename, 'rb') as f:
+        b = f.read()
+        return b.decode('utf-8')
diff --git a/torch/package/find_file_dependencies.py b/torch/package/find_file_dependencies.py
new file mode 100644
index 0000000..25b501e
--- /dev/null
+++ b/torch/package/find_file_dependencies.py
@@ -0,0 +1,42 @@
+from typing import List, Optional, Tuple
+import ast
+from ._importlib import _resolve_name
+
+class _ExtractModuleReferences(ast.NodeVisitor):
+    """
+    Extract the list of global variables a block of code will read and write
+    """
+
+    @classmethod
+    def run(cls, src: str, package: str) -> List[Tuple[str, Optional[str]]]:
+        visitor = cls(package)
+        tree = ast.parse(src)
+        visitor.visit(tree)
+        return list(visitor.references.keys())
+
+    def __init__(self, package):
+        super().__init__()
+        self.package = package
+        self.references = {}
+
+    def _absmodule(self, module_name: str, level: int) -> str:
+        if level > 0:
+            return _resolve_name(module_name, self.package, level)
+        return module_name
+
+    def visit_Import(self, node):
+        for alias in node.names:
+            self.references[(alias.name, None)] = True
+
+    def visit_ImportFrom(self, node):
+        name = self._absmodule(node.module, 0 if node.level is None else node.level)
+        for alias in node.names:
+            # from my_package import foo
+            # foo may be a module, so we have to add it to the list of
+            # potential references, if import of it fails, we will ignore it
+            if alias.name != '*':
+                self.references[(name, alias.name)] = True
+            else:
+                self.references[(name, None)] = True
+
+find_files_source_depends_on = _ExtractModuleReferences.run
diff --git a/torch/package/importer.py b/torch/package/importer.py
new file mode 100644
index 0000000..59c7cd9
--- /dev/null
+++ b/torch/package/importer.py
@@ -0,0 +1,388 @@
+from typing import List, Callable, Dict, Optional, Any, Union
+import builtins
+import importlib
+from torch.serialization import _load
+import pickle
+import torch
+import _compat_pickle  # type: ignore
+import types
+import os.path
+
+from ._importlib import _normalize_line_endings, _resolve_name, _sanity_check, _calc___package__, \
+    _normalize_path
+from ._mock_zipreader import MockZipReader
+
+class PackageImporter:
+    """Importers allow you to load code written to packages by PackageExporter.
+    Code is loaded in a hermetic way, using files from the package
+    rather than the normal python import system. This allows
+    for the packaging of PyTorch model code and data so that it can be run
+    on a server or used in the future for transfer learning.
+
+    The importer for packages ensures that code in the module can only be loaded from
+    within the package, except for modules explicitly listed as external during export.
+    The file `extern_modules` in the zip archive lists all the modules that a package externally depends on.
+    This prevents "implicit" dependencies where the package runs locally because it is importing
+    a locally-installed package, but then fails when the package is copied to another machine.
+    """
+
+    modules : Dict[str, Optional[types.ModuleType]]
+    """The dictionary of already loaded modules from this package, equivalent to `sys.modules` but
+    local to this importer.
+    """
+
+    def __init__(self, filename: str, module_allowed: Callable[[str], bool] = lambda module_name: True):
+        """Open `filename` for importing. This checks that the imported package only requires modules
+        allowed by `module_allowed`
+
+        Args:
+            filename (str): archive to load. Can also be a directory of the unzipped files in the archive
+                for easy debugging and editing.
+            module_allowed (Callable[[str], bool], optional): A method to determine if a externally provided module
+                should be allowed. Can be used to ensure packages loaded do not depend on modules that the server
+                does not support. Defaults to allowing anything.
+
+        Raises:
+            ImportError: If the package will use a disallowed module.
+        """
+        self.filename = filename
+        self.zip_reader : Any
+        if not os.path.isdir(self.filename):
+            self.zip_reader = torch._C.PyTorchFileReader(self.filename)
+        else:
+            self.zip_reader = MockZipReader(self.filename)
+
+        self.root = _PackageNode(None)
+        self.modules = {}
+        self.extern_modules = self._read_extern()
+
+        for extern_module in self.extern_modules:
+            if not module_allowed(extern_module):
+                raise ImportError(f"package '{filename}' needs the external module '{extern_module}' "
+                                  f"but that module has been disallowed")
+            self._add_extern(extern_module)
+
+        for filename in self.zip_reader.get_all_records():
+            self._add_file(filename)
+
+        self.patched_builtins = builtins.__dict__.copy()
+        self.patched_builtins['__import__'] = self.__import__
+        # allow pickles from archive using `import resources`
+        self.modules['resources'] = self  # type: ignore
+
+        # used for torch.serialization._load
+        self.Unpickler = lambda *args, **kwargs: _UnpicklerWrapper(self, *args, **kwargs)
+
+    def import_module(self, name: str, package=None):
+        """Load a module from the package if it hasn't already been loaded, and then return
+        the module. Modules are loaded locally
+        to the importer and will appear in `self.modules` rather than `sys.modules`
+
+        Args:
+            name (str): Fully qualified name of the module to load.
+            package ([type], optional): Unused, but present to match the signature of importlib.import_module. Defaults to None.
+
+        Returns:
+            types.ModuleType: the (possibly already) loaded module.
+        """
+        return self._gcd_import(name)
+
+    def load_binary(self, package: str, resource: str) -> bytes:
+        """Load raw bytes.
+
+        Args:
+            package (str): The name of module package (e.g. "my_package.my_subpackage")
+            resource (str): The unique name for the resource.
+
+        Returns:
+            bytes: The loaded data.
+        """
+
+        path = self._zipfile_path(package, resource)
+        return self.zip_reader.get_record(path)
+
+    def load_text(self, package: str, resource: str, encoding: str = 'utf-8', errors: str = 'strict') -> str:
+        """Load a string.
+
+        Args:
+            package (str): The name of module package (e.g. "my_package.my_subpackage")
+            resource (str): The unique name for the resource.
+            encoding (str, optional): Passed to `decode`. Defaults to 'utf-8'.
+            errors (str, optional): Passed to `decode`. Defaults to 'strict'.
+
+        Returns:
+            str: The loaded text.
+        """
+        data = self.load_binary(package, resource)
+        return data.decode(encoding, errors)
+
+    def load_pickle(self, package: str, resource: str, map_location=None) -> Any:
+        """Unpickles the resource from the package, loading any modules that are needed to construct the objects
+        using :meth:`import_module`
+
+        Args:
+            package (str): The name of module package (e.g. "my_package.my_subpackage")
+            resource (str): The unique name for the resource.
+            map_location: Passed to `torch.load` to determine how tensors are mapped to devices. Defaults to None.
+
+        Returns:
+            Any: the unpickled object.
+        """
+        pickle_file = self._zipfile_path(package, resource)
+        return _load(self.zip_reader, map_location, self, pickle_file=pickle_file)
+
+
+    def _read_extern(self):
+        return self.zip_reader.get_record('extern_modules').decode('utf-8').splitlines(keepends=False)
+
+    def _make_module(self, name: str, filename: Optional[str], is_package: bool):
+        spec = importlib.machinery.ModuleSpec(name, self, is_package=is_package)  # type: ignore
+        module = importlib.util.module_from_spec(spec)
+        self.modules[name] = module
+        ns = module.__dict__
+        ns['__spec__'] = spec
+        ns['__loader__'] = self
+        ns['__file__'] = filename
+        ns['__cached__'] = None
+        ns['__builtins__'] = self.patched_builtins
+        if filename is not None:
+            code = self._compile_source(filename)
+            exec(code, ns)
+        return module
+
+    def _load_module(self, name: str):
+        cur : _PathNode = self.root
+        for atom in name.split('.'):
+            if not isinstance(cur, _PackageNode) or atom not in cur.children:
+                raise ModuleNotFoundError(
+                    f'No module named "{name}" in self-contained archive "{self.filename}"'
+                    f' and the module is also not in the list of allowed external modules: {self.extern_modules}')
+            cur = cur.children[atom]
+            if isinstance(cur, _ExternNode):
+                module = self.modules[name] = importlib.import_module(name)
+                return module
+        return self._make_module(name, cur.source_file, isinstance(cur, _PackageNode))  # type: ignore
+
+    def _compile_source(self, fullpath):
+        source = self.zip_reader.get_record(fullpath)
+        source = _normalize_line_endings(source)
+        return compile(source, fullpath, 'exec', dont_inherit=True)
+
+    # note: named `get_source` so that linecache can find the source
+    # when this is the __loader__ of a module.
+    def get_source(self, module_name) -> str:
+        module = self.import_module(module_name)
+        return self.zip_reader.get_record(module.__file__).decode('utf-8')
+
+    # note: copied from cpython's import code, with call to create module replaced with _make_module
+    def _do_find_and_load(self, name):
+        path = None
+        parent = name.rpartition('.')[0]
+        if parent:
+            if parent not in self.modules:
+                self._gcd_import(parent)
+            # Crazy side-effects!
+            if name in self.modules:
+                return self.modules[name]
+            parent_module = self.modules[parent]
+            try:
+                path = parent_module.__path__  # type: ignore
+            except AttributeError:
+                msg = (_ERR_MSG + '; {!r} is not a package').format(name, parent)
+                raise ModuleNotFoundError(msg, name=name) from None
+
+        module = self._load_module(name)
+
+        if parent:
+            # Set the module as an attribute on its parent.
+            parent_module = self.modules[parent]
+            if parent_module.__loader__ is self:  # type: ignore
+                setattr(parent_module, name.rpartition('.')[2], module)
+        return module
+
+    # note: copied from cpython's import code
+    def _find_and_load(self, name):
+        module = self.modules.get(name, _NEEDS_LOADING)
+        if module is _NEEDS_LOADING:
+            return self._do_find_and_load(name)
+
+        if module is None:
+            message = ('import of {} halted; '
+                       'None in sys.modules'.format(name))
+            raise ModuleNotFoundError(message, name=name)
+
+        return module
+
+
+    def _gcd_import(self, name, package=None, level=0):
+        """Import and return the module based on its name, the package the call is
+        being made from, and the level adjustment.
+
+        This function represents the greatest common denominator of functionality
+        between import_module and __import__. This includes setting __package__ if
+        the loader did not.
+
+        """
+        _sanity_check(name, package, level)
+        if level > 0:
+            name = _resolve_name(name, package, level)
+
+        return self._find_and_load(name)
+
+    # note: copied from cpython's import code
+    def _handle_fromlist(self, module, fromlist, *, recursive=False):
+        """Figure out what __import__ should return.
+
+        The import_ parameter is a callable which takes the name of module to
+        import. It is required to decouple the function from assuming importlib's
+        import implementation is desired.
+
+        """
+        # The hell that is fromlist ...
+        # If a package was imported, try to import stuff from fromlist.
+        if hasattr(module, '__path__'):
+            for x in fromlist:
+                if not isinstance(x, str):
+                    if recursive:
+                        where = module.__name__ + '.__all__'
+                    else:
+                        where = "``from list''"
+                    raise TypeError(f"Item in {where} must be str, "
+                                    f"not {type(x).__name__}")
+                elif x == '*':
+                    if not recursive and hasattr(module, '__all__'):
+                        self._handle_fromlist(module, module.__all__,
+                                              recursive=True)
+                elif not hasattr(module, x):
+                    from_name = '{}.{}'.format(module.__name__, x)
+                    try:
+                        self._gcd_import(from_name)
+                    except ModuleNotFoundError as exc:
+                        # Backwards-compatibility dictates we ignore failed
+                        # imports triggered by fromlist for modules that don't
+                        # exist.
+                        if (exc.name == from_name and
+                           self.modules.get(from_name, _NEEDS_LOADING) is not None):
+                            continue
+                        raise
+        return module
+
+    def __import__(self, name, globals=None, locals=None, fromlist=(), level=0):
+        if level == 0:
+            module = self._gcd_import(name)
+        else:
+            globals_ = globals if globals is not None else {}
+            package = _calc___package__(globals_)
+            module = self._gcd_import(name, package, level)
+        if not fromlist:
+            # Return up to the first dot in 'name'. This is complicated by the fact
+            # that 'name' may be relative.
+            if level == 0:
+                return self._gcd_import(name.partition('.')[0])
+            elif not name:
+                return module
+            else:
+                # Figure out where to slice the module's name up to the first dot
+                # in 'name'.
+                cut_off = len(name) - len(name.partition('.')[0])
+                # Slice end needs to be positive to alleviate need to special-case
+                # when ``'.' not in name``.
+                return self.modules[module.__name__[:len(module.__name__) - cut_off]]
+        else:
+            return self._handle_fromlist(module, fromlist)
+
+    def _get_package(self, package):
+        """Take a package name or module object and return the module.
+
+        If a name, the module is imported.  If the passed or imported module
+        object is not a package, raise an exception.
+        """
+        if hasattr(package, '__spec__'):
+            if package.__spec__.submodule_search_locations is None:
+                raise TypeError('{!r} is not a package'.format(
+                    package.__spec__.name))
+            else:
+                return package
+        else:
+            module = self.import_module(package)
+            if module.__spec__.submodule_search_locations is None:
+                raise TypeError('{!r} is not a package'.format(package))
+            else:
+                return module
+
+    def _zipfile_path(self, package, resource):
+        package = self._get_package(package)
+        resource = _normalize_path(resource)
+        assert package.__loader__ is self
+        return f"{package.__name__.replace('.', '/')}/{resource}"
+
+    def _get_or_create_package(self, atoms: List[str]) -> 'Union[_PackageNode, _ExternNode]':
+        cur = self.root
+        for i, atom in enumerate(atoms):
+            node = cur.children.get(atom, None)
+            if node is None:
+                node = cur.children[atom] = _PackageNode(None)
+            if isinstance(node, _ExternNode):
+                return node
+            if isinstance(node, _ModuleNode):
+                name = ".".join(atoms[:i])
+                raise ImportError(f'inconsistent module structure. module {name} is not a package, but has submodules')
+            assert isinstance(node, _PackageNode)
+            cur = node
+        return cur
+
+    def _add_file(self, filename: str):
+        *prefix, last = filename.split('/')
+        package = self._get_or_create_package(prefix)
+        if isinstance(package, _ExternNode):
+            raise ImportError(f'inconsistent module structure. package contains a module file {filename}'
+                              f' that is a subpackage of a module marked external.')
+        if last == '__init__.py':
+            package.source_file = filename
+        elif last.endswith('.py'):
+            package_name = last.rstrip('.py')
+            package.children[package_name] = _ModuleNode(filename)
+
+    def _add_extern(self, extern_name: str):
+        *prefix, last = extern_name.split('.')
+        package = self._get_or_create_package(prefix)
+        if isinstance(package, _ExternNode):
+            return  # the shorter extern covers this extern case
+        package.children[last] = _ExternNode()
+
+
+_NEEDS_LOADING = object()
+_ERR_MSG_PREFIX = 'No module named '
+_ERR_MSG = _ERR_MSG_PREFIX + '{!r}'
+
+class _UnpicklerWrapper(pickle._Unpickler):  # type: ignore
+    def __init__(self, importer, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        self._importer = importer
+
+    def find_class(self, module, name):
+        # Subclasses may override this.
+        if self.proto < 3 and self.fix_imports:
+            if (module, name) in _compat_pickle.NAME_MAPPING:
+                module, name = _compat_pickle.NAME_MAPPING[(module, name)]
+            elif module in _compat_pickle.IMPORT_MAPPING:
+                module = _compat_pickle.IMPORT_MAPPING[module]
+        mod = self._importer.import_module(module)
+        return getattr(mod, name)
+
+class _PathNode:
+    pass
+
+class _PackageNode(_PathNode):
+    def __init__(self, source_file: Optional[str]):
+        self.source_file = source_file
+        self.children : Dict[str, _PathNode] = {}
+
+class _ModuleNode(_PathNode):
+    __slots__ = ['source_file']
+
+    def __init__(self, source_file: str):
+        self.source_file = source_file
+
+class _ExternNode(_PathNode):
+    pass
diff --git a/torch/serialization.py b/torch/serialization.py
index c68c1ff..1c05767 100644
--- a/torch/serialization.py
+++ b/torch/serialization.py
@@ -821,7 +821,7 @@
     return restore_location
 
 
-def _load(zip_file, map_location, pickle_module, **pickle_load_args):
+def _load(zip_file, map_location, pickle_module, pickle_file='data.pkl', **pickle_load_args):
     restore_location = _get_restore_location(map_location)
 
     loaded_storages = {}
@@ -847,7 +847,7 @@
         return storage
 
     # Load the data (which may in turn use `persistent_load` to load tensors)
-    data_file = io.BytesIO(zip_file.get_record('data.pkl'))
+    data_file = io.BytesIO(zip_file.get_record(pickle_file))
     unpickler = pickle_module.Unpickler(data_file, **pickle_load_args)
     unpickler.persistent_load = persistent_load
     result = unpickler.load()