@@ -135,8 +135,20 @@ def test_has_storage(self):
135135
136136 @unittest .skipIf (not TEST_NUMPY , "Numpy not found" )
137137 def test_has_storage_numpy (self ):
138- arr = np .array ([], dtype = np .float32 )
139- self .assertIsNotNone (torch .Tensor (arr ).storage ())
138+ for dtype in [np .float32 , np .float64 , np .int64 ,
139+ np .int32 , np .int16 , np .uint8 ]:
140+ arr = np .array ([1 ], dtype = dtype )
141+ self .assertIsNotNone (torch .FloatTensor (arr ).storage ())
142+ self .assertIsNotNone (torch .DoubleTensor (arr ).storage ())
143+ self .assertIsNotNone (torch .IntTensor (arr ).storage ())
144+ self .assertIsNotNone (torch .LongTensor (arr ).storage ())
145+ self .assertIsNotNone (torch .ByteTensor (arr ).storage ())
146+ if torch .cuda .is_available ():
147+ self .assertIsNotNone (torch .cuda .FloatTensor (arr ).storage ())
148+ self .assertIsNotNone (torch .cuda .DoubleTensor (arr ).storage ())
149+ self .assertIsNotNone (torch .cuda .IntTensor (arr ).storage ())
150+ self .assertIsNotNone (torch .cuda .LongTensor (arr ).storage ())
151+ self .assertIsNotNone (torch .cuda .ByteTensor (arr ).storage ())
140152
141153 def _testSelection (self , torchfn , mathfn ):
142154 # contiguous
0 commit comments