Fix placeholder tensor is empty for relu in mps (#118965)
Fixes #118845
Pull Request resolved: https://github.com/pytorch/pytorch/pull/118965
Approved by: https://github.com/malfet
diff --git a/test/test_mps.py b/test/test_mps.py
index 186518b..6a8807b 100644
--- a/test/test_mps.py
+++ b/test/test_mps.py
@@ -1380,6 +1380,8 @@
self._testReluInPlace(
np.array([[-9, 7, -5, 3, -1], [1, -3, 5, -7, 9]]).astype(t),
device="mps")
+ self._testRelu(np.array([]).astype(t), device="mps")
+ self._testReluInPlace(np.array([]).astype(t), device="mps")
class MatmulTest(TestCaseMPS):
def _helper(self, shape_tensor_1, shape_tensor_2, expand_tensor_1_shape=None, expand_tensor_2_shape=None):