Thrown out ValueError if alpha is None for ELU

Signed-off-by: Yong Tang <yong.tang.github@outlook.com>
diff --git a/tensorflow/python/keras/layers/advanced_activations.py b/tensorflow/python/keras/layers/advanced_activations.py
index 5e0e5c7..f73a12f 100644
--- a/tensorflow/python/keras/layers/advanced_activations.py
+++ b/tensorflow/python/keras/layers/advanced_activations.py
@@ -209,6 +209,9 @@
 
   def __init__(self, alpha=1.0, **kwargs):
     super(ELU, self).__init__(**kwargs)
+    if alpha is None:
+      raise ValueError('alpha of ELU layer '
+                       'cannot be None. Required a float')
     self.supports_masking = True
     self.alpha = K.cast_to_floatx(alpha)
 
diff --git a/tensorflow/python/keras/layers/advanced_activations_test.py b/tensorflow/python/keras/layers/advanced_activations_test.py
index ff3d8af..4d2c360 100644
--- a/tensorflow/python/keras/layers/advanced_activations_test.py
+++ b/tensorflow/python/keras/layers/advanced_activations_test.py
@@ -109,6 +109,7 @@
     model.fit(np.ones((10, 10)), np.ones((10, 1)), batch_size=2)
 
   def test_leaky_relu_with_invalid_alpha(self):
+    # Test case for GitHub issue 46993.
     with self.assertRaisesRegex(
         ValueError, 'alpha of leaky Relu layer cannot be None'):
       testing_utils.layer_test(keras.layers.LeakyReLU,
@@ -116,6 +117,15 @@
                                input_shape=(2, 3, 4),
                                supports_masking=True)
 
+  def test_leaky_elu_with_invalid_alpha(self):
+    # Test case for GitHub issue 46993.
+    with self.assertRaisesRegex(
+        ValueError, 'alpha of ELU layer cannot be None'):
+      testing_utils.layer_test(keras.layers.ELU,
+                               kwargs={'alpha': None},
+                               input_shape=(2, 3, 4),
+                               supports_masking=True)
+
 
 if __name__ == '__main__':
   test.main()