[Lint] Address F811
diff --git a/test/test_cuda.py b/test/test_cuda.py
index b432ae2..9cc86ea 100644
--- a/test/test_cuda.py
+++ b/test/test_cuda.py
@@ -382,7 +382,7 @@
             self.assertEqual(z.get_device(), 0)
             self.assertIs(z.cuda(0), z)
 
-    def test_serialization(self):
+    def test_serialization_array_with_storage(self):
         x = torch.randn(5, 5).cuda()
         y = torch.IntTensor(2, 5).fill_(0).cuda()
         q = [x, y, x, y.storage()]
@@ -537,7 +537,7 @@
         self.assertIs(type(x_copy), type(x))
         self.assertEqual(x_copy.get_device(), x.get_device())
 
-    def test_serialization_empty(self):
+    def test_serialization_array_with_empty(self):
         x = [torch.randn(4, 4).cuda(), torch.cuda.FloatTensor()]
         with tempfile.NamedTemporaryFile() as f:
             torch.save(x, f)
diff --git a/test/test_torch.py b/test/test_torch.py
index 8756c9c..45c4d8c 100644
--- a/test/test_torch.py
+++ b/test/test_torch.py
@@ -1655,7 +1655,7 @@
         self._test_conv_corr_eq(lambda x, k: torch.xcorr3(x, k), reference)
 
     @unittest.skip("Not implemented yet")
-    def test_xcorr3_xcorr2_eq(self):
+    def test_xcorr3_xcorr2_eq_full(self):
         def reference(x, k, o3, o32):
             for i in range(x.size(1)):
                 for j in range(k.size(1)):
@@ -1663,7 +1663,7 @@
         self._test_conv_corr_eq(lambda x, k: torch.xcorr3(x, k, 'F'), reference)
 
     @unittest.skip("Not implemented yet")
-    def test_conv3_conv2_eq(self):
+    def test_conv3_conv2_eq_valid(self):
         def reference(x, k, o3, o32):
             for i in range(o3.size(1)):
                 for j in range(k.size(1)):
diff --git a/test/test_utils.py b/test/test_utils.py
index 3141f8b..039b7c5 100644
--- a/test/test_utils.py
+++ b/test/test_utils.py
@@ -6,7 +6,6 @@
 import random
 import tempfile
 import unittest
-import sys
 import traceback
 import torch
 import torch.cuda
diff --git a/torch/autograd/variable.py b/torch/autograd/variable.py
index b5b4b17..d359fbb 100644
--- a/torch/autograd/variable.py
+++ b/torch/autograd/variable.py
@@ -430,12 +430,6 @@
     def trunc(self):
         return Trunc()(self)
 
-    def floor(self):
-        return Floor()(self)
-
-    def ceil(self):
-        return Ceil()(self)
-
     def fmod(self, value):
         return Fmod(value)(self)
 
@@ -491,9 +485,6 @@
     def split(self, split_size, dim=0):
         return torch.split(self, split_size, dim)
 
-    def chunk(self, n_chunks, dim=0):
-        return torch.chunk(self, n_chunks, dim)
-
     def repeat(self, *repeats):
         if len(repeats) == 1 and isinstance(repeats[0], torch.Size):
             repeats = repeats[0]
diff --git a/tox.ini b/tox.ini
index 2227a86..0af4b51 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,4 +1,4 @@
 [flake8]
 max-line-length = 120
-ignore = E305,E402,E721,F401,F403,F405,F811,F821,F841
-exclude = venv,docs/src
+ignore = E305,E402,E721,F401,F403,F405,F821,F841
+exclude = docs/src,venv