[functorch] Fix CI hopefully
diff --git a/functorch/test/test_ops.py b/functorch/test/test_ops.py
index ee81267..ad68b98 100644
--- a/functorch/test/test_ops.py
+++ b/functorch/test/test_ops.py
@@ -179,6 +179,8 @@
 
 
 vjp_fail = {
+    xfail('nn.functional.dropout'),  # randomness testing artifact
+    xfail('nn.functional.rrelu'),  # randomness testing artifact
     xfail('linalg.cholesky'),
     xfail('linalg.inv'),
     xfail('linalg.matrix_power'),
@@ -234,6 +236,9 @@
 
     @ops(functorch_lagging_op_db + additional_op_db, allowed_dtypes=(torch.float,))
     @skipOps('TestOperators', 'test_jvp', set({
+        xfail('nn.functional.dropout'),  # randomness testing artifact; not actually a problem
+        xfail('nn.functional.rrelu'),  # randomness testing artifact; not actually a problem
+
         # See https://github.com/pytorch/pytorch/issues/69034
         # RuntimeError: expected scalar type double but found float
         xfail('minimum'),
diff --git a/functorch/test/test_vmap.py b/functorch/test/test_vmap.py
index 759a0c2..9d6a1af 100644
--- a/functorch/test/test_vmap.py
+++ b/functorch/test/test_vmap.py
@@ -2873,7 +2873,6 @@
         self._test_arithmetic(lambda x, y: x / y, device)
 
     @allowVmapFallbackUsage
-    @unittest.expectedFailure
     def test_binary_cross_entropy(self, device):
         x = F.sigmoid(torch.randn(3, 2, device=device, requires_grad=True))
         target = torch.rand(3, 2, device=device)
@@ -3163,7 +3162,6 @@
         xfail('cdist'),
         xfail('complex'),
         xfail('copysign'),
-        xfail('diag_embed'),
         xfail('dsplit'),
         xfail('eig'),
         xfail('fft.fftn'),
@@ -3223,7 +3221,6 @@
         xfail('linalg.multi_dot'),
         xfail('nanmean'),
         xfail('vstack'),
-        xfail('block_diag'),
         xfail('nn.functional.dropout'),
         xfail('nn.functional.conv2d', ''),
         xfail('nn.functional.batch_norm'),