Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 12 additions & 8 deletions test/deprecated/legacy_test/test_program_deprecated.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,10 +140,12 @@ def test_copy_info_from_error(self):
def build_program():
main_program = paddle.static.Program()
startup_program = paddle.static.Program()
with paddle.utils.unique_name.guard():
with paddle.static.program_guard(main_program, startup_program):
x = paddle.static.data(name='x', shape=[3, 2, 1])
out = paddle.static.nn.fc(x=x, size=1, num_flatten_dims=2)
with (
paddle.utils.unique_name.guard(),
paddle.static.program_guard(main_program, startup_program),
):
x = paddle.static.data(name='x', shape=[3, 2, 1])
out = paddle.static.nn.fc(x=x, size=1, num_flatten_dims=2)
return main_program


Expand Down Expand Up @@ -177,10 +179,12 @@ class TestProgramHash(unittest.TestCase):
def build_program(self):
main_program = paddle.static.Program()
startup_program = paddle.static.Program()
with paddle.utils.unique_name.guard():
with paddle.static.program_guard(main_program, startup_program):
x = paddle.static.data(name='x', shape=[3, 2, 1])
out = paddle.static.nn.fc(x=x, size=1, num_flatten_dims=2)
with (
paddle.utils.unique_name.guard(),
paddle.static.program_guard(main_program, startup_program),
):
x = paddle.static.data(name='x', shape=[3, 2, 1])
out = paddle.static.nn.fc(x=x, size=1, num_flatten_dims=2)
return main_program

def test_program_need_update(self):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -579,10 +579,12 @@ def program_scope_guard(self):
prog = base.Program()
startup_prog = base.Program()
scope = base.core.Scope()
with base.scope_guard(scope):
with base.program_guard(prog, startup_prog):
with base.unique_name.guard():
yield
with (
base.scope_guard(scope),
base.program_guard(prog, startup_prog),
base.unique_name.guard(),
):
yield


if __name__ == '__main__':
Expand Down
766 changes: 387 additions & 379 deletions test/deprecated/legacy_test/test_prune_deprecated.py

Large diffs are not rendered by default.

62 changes: 30 additions & 32 deletions test/deprecated/legacy_test/test_py_func_op_deprecated.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,38 +176,36 @@ def test_main(use_cuda, use_py_func_op):
if use_cuda and not base.core.is_compiled_with_cuda():
return None

with base.program_guard(base.Program(), base.Program()):
with base.scope_guard(base.core.Scope()):
gen = paddle.seed(1)
np.random.seed(1)
img = paddle.static.data(
name='image', shape=[-1, 784], dtype='float32'
)
label = paddle.static.data(
name='label', shape=[-1, 1], dtype='int64'
)
loss = simple_fc_net(img, label, use_py_func_op)
optimizer = paddle.optimizer.SGD(learning_rate=1e-3)
optimizer.minimize(loss)

place = base.CUDAPlace(0) if use_cuda else base.CPUPlace()
feeder = base.DataFeeder(feed_list=[img, label], place=place)
r = paddle.batch(reader, batch_size=10)

exe = base.Executor(place)
exe.run(base.default_startup_program())

train_cp = base.default_main_program()
fetch_list = [loss]

ret = []
for epoch_id in range(2):
for d in r():
(L,) = exe.run(
train_cp, feed=feeder.feed(d), fetch_list=fetch_list
)
ret.append(L)
return np.array(ret)
with (
base.program_guard(base.Program(), base.Program()),
base.scope_guard(base.core.Scope()),
):
gen = paddle.seed(1)
np.random.seed(1)
img = paddle.static.data(name='image', shape=[-1, 784], dtype='float32')
label = paddle.static.data(name='label', shape=[-1, 1], dtype='int64')
loss = simple_fc_net(img, label, use_py_func_op)
optimizer = paddle.optimizer.SGD(learning_rate=1e-3)
optimizer.minimize(loss)

place = base.CUDAPlace(0) if use_cuda else base.CPUPlace()
feeder = base.DataFeeder(feed_list=[img, label], place=place)
r = paddle.batch(reader, batch_size=10)

exe = base.Executor(place)
exe.run(base.default_startup_program())

train_cp = base.default_main_program()
fetch_list = [loss]

ret = []
for epoch_id in range(2):
for d in r():
(L,) = exe.run(
train_cp, feed=feeder.feed(d), fetch_list=fetch_list
)
ret.append(L)
return np.array(ret)


class TestPyFuncOpUseExecutor(unittest.TestCase):
Expand Down
18 changes: 10 additions & 8 deletions test/deprecated/legacy_test/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,14 +184,16 @@ def impl(*args, **kwargs):
original_flag_value = get_flags(pt_flag)[pt_flag]
if os.environ.get('FLAGS_use_stride_kernel', False):
return
with static.scope_guard(static.Scope()):
with static.program_guard(static.Program()):
with EnvironmentVariableGuard(ENV_ENABLE_PIR_WITH_PT, True):
try:
set_flags({pt_flag: True})
ir_outs = fn(*args, **kwargs)
finally:
set_flags({pt_flag: original_flag_value})
with (
static.scope_guard(static.Scope()),
static.program_guard(static.Program()),
EnvironmentVariableGuard(ENV_ENABLE_PIR_WITH_PT, True),
):
try:
set_flags({pt_flag: True})
ir_outs = fn(*args, **kwargs)
finally:
set_flags({pt_flag: original_flag_value})
return ir_outs

return impl
Expand Down
30 changes: 16 additions & 14 deletions test/deprecated/mkldnn/test_mkldnn_cpu_bfloat16_pass_deprecated.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,23 +28,25 @@
class TestMKLDNNCpuBfloat16Pass(InferencePassTest):
def setUp(self):
self.init_data()
with paddle.pir_utils.OldIrGuard():
with base.program_guard(self.main_program, self.startup_program):
x = paddle.static.data(
name='x', shape=[-1, *self.shape_x], dtype=self.d_type
)
with (
paddle.pir_utils.OldIrGuard(),
base.program_guard(self.main_program, self.startup_program),
):
x = paddle.static.data(
name='x', shape=[-1, *self.shape_x], dtype=self.d_type
)

out = paddle.transpose(x, perm=[0, 1, 2, 3])
out = paddle.reshape(out, [0, 0, 0, 0])
out = paddle.transpose(x, perm=[0, 1, 2, 3])
out = paddle.reshape(out, [0, 0, 0, 0])

out = paddle.static.nn.fc(out, size=1)
out = paddle.static.nn.fc(out, size=1)

self.feeds = {
"x": np.random.random([self.bs, *self.shape_x]).astype(
self.d_type
)
}
self.fetch_list = [out]
self.feeds = {
"x": np.random.random([self.bs, *self.shape_x]).astype(
self.d_type
)
}
self.fetch_list = [out]

def init_data(self):
self.bs = 8
Expand Down
36 changes: 18 additions & 18 deletions test/deprecated/mkldnn/test_mkldnn_elt_act_fuse_pass_deprecated.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,24 +33,24 @@ class ElementwiseActivationOneDNNFusePassTest(InferencePassTest):

def setUp(self):
self.set_params()
with paddle.pir_utils.OldIrGuard():
with base.program_guard(self.main_program, self.startup_program):
data_A = paddle.static.data(
name="data_A", shape=[-1, 3, 100, 100], dtype="float32"
)
data_B = paddle.static.data(
name="data_B", shape=[-1, 3, 100, 100], dtype="float32"
)
elt_out = self.operand(data_A, data_B)
if self.act is not None:
if self.act_beta is not None:
elt_out = self.act(
elt_out, self.act_alpha, self.act_beta
)
elif self.act_alpha is not None:
elt_out = self.act(elt_out, self.act_alpha)
else:
elt_out = self.act(elt_out)
with (
paddle.pir_utils.OldIrGuard(),
base.program_guard(self.main_program, self.startup_program),
):
data_A = paddle.static.data(
name="data_A", shape=[-1, 3, 100, 100], dtype="float32"
)
data_B = paddle.static.data(
name="data_B", shape=[-1, 3, 100, 100], dtype="float32"
)
elt_out = self.operand(data_A, data_B)
if self.act is not None:
if self.act_beta is not None:
elt_out = self.act(elt_out, self.act_alpha, self.act_beta)
elif self.act_alpha is not None:
elt_out = self.act(elt_out, self.act_alpha)
else:
elt_out = self.act(elt_out)

self.feeds = {
"data_A": np.random.random((1, 3, 100, 100)).astype("float32"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,21 +33,21 @@ def init_data(self):
self.enable_mkldnn = True

def make_network(self):
with paddle.pir_utils.OldIrGuard():
with base.program_guard(self.main_program, self.startup_program):
x = paddle.static.data(
name='x', shape=[-1, *self.shape_x], dtype=self.d_type
)
y = paddle.static.data(
name='y', shape=[-1, *self.shape_y], dtype=self.d_type
)
out = paddle.matmul(x, y)
out = paddle.transpose(out, perm=[0, 2, 1, 3])
out = paddle.reshape(
out, [0, 0, self.shape_y[0] * self.shape_y[2]]
)

out = F.relu(out)
with (
paddle.pir_utils.OldIrGuard(),
base.program_guard(self.main_program, self.startup_program),
):
x = paddle.static.data(
name='x', shape=[-1, *self.shape_x], dtype=self.d_type
)
y = paddle.static.data(
name='y', shape=[-1, *self.shape_y], dtype=self.d_type
)
out = paddle.matmul(x, y)
out = paddle.transpose(out, perm=[0, 2, 1, 3])
out = paddle.reshape(out, [0, 0, self.shape_y[0] * self.shape_y[2]])

out = F.relu(out)
return out

def setUp(self):
Expand Down Expand Up @@ -78,18 +78,20 @@ def init_data(self):

class TestMKLDNNMatmulOpNotFusedWrongTransposeAxis(TestMKLDNNMatmulFuseOp):
def make_network(self):
with paddle.pir_utils.OldIrGuard():
with base.program_guard(self.main_program, self.startup_program):
x = paddle.static.data(
name='x', shape=[-1, *self.shape_x], dtype=self.d_type
)
y = paddle.static.data(
name='y', shape=[-1, *self.shape_y], dtype=self.d_type
)
out = paddle.matmul(x, y)
out = paddle.transpose(out, perm=[0, 1, 2, 3])
out = paddle.reshape(out, [0, 0, 0, 0])
out = paddle.static.nn.fc(out, size=1)
with (
paddle.pir_utils.OldIrGuard(),
base.program_guard(self.main_program, self.startup_program),
):
x = paddle.static.data(
name='x', shape=[-1, *self.shape_x], dtype=self.d_type
)
y = paddle.static.data(
name='y', shape=[-1, *self.shape_y], dtype=self.d_type
)
out = paddle.matmul(x, y)
out = paddle.transpose(out, perm=[0, 1, 2, 3])
out = paddle.reshape(out, [0, 0, 0, 0])
out = paddle.static.nn.fc(out, size=1)
return out


Expand All @@ -102,22 +104,22 @@ def init_data(self):
self.enable_mkldnn = True

def make_network(self):
with paddle.pir_utils.OldIrGuard():
with base.program_guard(self.main_program, self.startup_program):
x = paddle.static.data(
name='x', shape=[-1, *self.shape_x], dtype=self.d_type
)
y = paddle.static.data(
name='y', shape=[-1, *self.shape_y], dtype=self.d_type
)
out = paddle.matmul(x, y)
out = paddle.transpose(out, perm=[0, 2, 1, 3])
out = paddle.transpose(out, perm=[0, 1, 2, 3]) # breaks pattern
out = paddle.reshape(
out, [0, 0, self.shape_y[0] * self.shape_y[2]]
)

out = F.relu(out)
with (
paddle.pir_utils.OldIrGuard(),
base.program_guard(self.main_program, self.startup_program),
):
x = paddle.static.data(
name='x', shape=[-1, *self.shape_x], dtype=self.d_type
)
y = paddle.static.data(
name='y', shape=[-1, *self.shape_y], dtype=self.d_type
)
out = paddle.matmul(x, y)
out = paddle.transpose(out, perm=[0, 2, 1, 3])
out = paddle.transpose(out, perm=[0, 1, 2, 3]) # breaks pattern
out = paddle.reshape(out, [0, 0, self.shape_y[0] * self.shape_y[2]])

out = F.relu(out)
return out


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,24 +30,26 @@ def setUp(self):
self.set_params()
self.transpose_perm = [0, 2, 1, 3]
self.pass_name = 'reshape_transpose_matmul_onednn_fuse_pass'
with paddle.pir_utils.OldIrGuard():
with base.program_guard(self.main_program, self.startup_program):
data = paddle.static.data(
name="data", shape=self.data_shape, dtype="float32"
)
weight = paddle.create_parameter(
shape=self.weight_shape, dtype="float32"
)

reshape = paddle.reshape(data, shape=self.reshape_shape)
transpose = paddle.transpose(reshape, self.transpose_perm)

matmul = paddle.matmul(
transpose,
weight,
transpose_x=self.transpose_x,
transpose_y=self.transpose_y,
)
with (
paddle.pir_utils.OldIrGuard(),
base.program_guard(self.main_program, self.startup_program),
):
data = paddle.static.data(
name="data", shape=self.data_shape, dtype="float32"
)
weight = paddle.create_parameter(
shape=self.weight_shape, dtype="float32"
)

reshape = paddle.reshape(data, shape=self.reshape_shape)
transpose = paddle.transpose(reshape, self.transpose_perm)

matmul = paddle.matmul(
transpose,
weight,
transpose_x=self.transpose_x,
transpose_y=self.transpose_y,
)

self.fetch_list = [matmul]
self.enable_mkldnn = True
Expand Down
Loading
Loading