Set seed at top-level of common.py (#3862)

Some tests, such as test_autograd.py, include random generation at the
top-level. It's going to be tough to police these files to ensure that
all randomness only happens within a test, so just set the seed as soon
as args are parsed (as well as before each test).

torch.manual_seed_all is no longer needed since torch.manual_seed also
seeds the CUDA random number generator.
diff --git a/test/common.py b/test/common.py
index a63b0ea..c8e9506 100644
--- a/test/common.py
+++ b/test/common.py
@@ -19,7 +19,6 @@
 
 torch.set_default_tensor_type('torch.DoubleTensor')
 
-# set seed one time
 parser = argparse.ArgumentParser(add_help=False)
 parser.add_argument('--seed', type=int, default=123)
 parser.add_argument('--accept', action='store_true')
@@ -27,6 +26,7 @@
 SEED = args.seed
 ACCEPT = args.accept
 UNITTEST_ARGS = [sys.argv[0]] + remaining
+torch.manual_seed(SEED)
 
 
 def run_tests():
@@ -127,8 +127,6 @@
 
     def setUp(self):
         torch.manual_seed(SEED)
-        if torch.cuda.is_available():
-            torch.cuda.manual_seed_all(SEED)
 
     def assertTensorsSlowEqual(self, x, y, prec=None, message=''):
         max_err = 0