Skip to content
Merged
Original file line number Diff line number Diff line change
Expand Up @@ -4487,6 +4487,17 @@ bool WarpctcOpInferSymbolicShape(
infer_context->GetShapeOrDataForValue(op->operand_source(0));
const std::vector<symbol::DimExpr> &logits_shape =
logits_shape_or_data.shape();
bool logits_0_size = false;
for (size_t i = 0; i < logits_shape.size(); ++i) {
if (logits_shape[i] == 0) {
logits_0_size = true;
break;
}
}
if (logits_0_size) {
PADDLE_THROW(
common::errors::InvalidArgument("The input size can not be zero."));
}

symbol::DimExpr max_sequence_length, num_sequences;
symbol::DimExpr sequence_width = symbol::DimExpr(1);
Expand Down
3 changes: 3 additions & 0 deletions paddle/phi/infermeta/multiary.cc
Original file line number Diff line number Diff line change
Expand Up @@ -5637,6 +5637,9 @@ void WarpctcInferMeta(const MetaTensor& logits,
MetaTensor* loss,
MetaTensor* warpctcgrad) {
auto logits_dims = logits.dims();
if (common::product(logits_dims) == 0) {
PADDLE_THROW(errors::InvalidArgument("The input size can not be zero."));
}
int num_sequences, sequence_width, max_sequence_length;

if (logits_length && labels_length) {
Expand Down
1 change: 1 addition & 0 deletions python/paddle/signal.py
Original file line number Diff line number Diff line change
Expand Up @@ -569,6 +569,7 @@ def istft(
fft_size = x.shape[-2]

if in_dynamic_mode():
assert x.size != 0, 'x should not be an empty tensor.'
if onesided:
assert (
fft_size == n_fft // 2 + 1
Expand Down
7 changes: 7 additions & 0 deletions test/legacy_test/test_signal.py
Original file line number Diff line number Diff line change
Expand Up @@ -1045,5 +1045,12 @@ def test_istft(self):
),


class TestIstftException_ZeroSize(unittest.TestCase):
def test_istft(self):
self.x = np.random.random([5, 0])
with self.assertRaises(AssertionError):
paddle.signal.istft(paddle.to_tensor(self.x), 512)


if __name__ == '__main__':
unittest.main()
16 changes: 16 additions & 0 deletions test/legacy_test/test_warpctc_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -613,8 +613,24 @@ def test_dygraph_with_lod():
reduction='none',
)

def test_dygraph_zero_size():
logits = np.random.uniform(0.1, 1.0, [0, 15]).astype("float32")
# labels should not be blank
labels = np.random.randint(0, 15 - 1, [15, 1], dtype="int32")
softmax = paddle.to_tensor(logits)
labels = paddle.to_tensor(labels)

paddle.nn.functional.ctc_loss(
log_probs=softmax,
labels=labels,
input_lengths=None,
label_lengths=None,
reduction='none',
)

paddle.disable_static()
self.assertRaises(ValueError, test_dygraph_with_lod)
self.assertRaises(ValueError, test_dygraph_zero_size)
paddle.enable_static()


Expand Down