Skip to content

Commit 454505a

Browse files
committed
add master_grad
1 parent d51edd0 commit 454505a

File tree

1 file changed

+1
-3
lines changed

1 file changed

+1
-3
lines changed

paddlenlp/trainer/training_args.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1174,9 +1174,6 @@ def is_segment_parallel_supported():
11741174
pipeline.micro_batch_size = self.per_device_train_batch_size
11751175
pipeline.schedule_mode = self.pipeline_schedule_mode
11761176

1177-
if self.amp_master_grad:
1178-
warnings.warn("`amp_master_grad` is not supported NOW in AutoParallel!")
1179-
self.amp_master_grad = False
11801177
logger.info(f"PP configs:{strategy.pipeline}, use master_grad: {self.amp_master_grad}")
11811178

11821179
if self.do_eval:
@@ -1260,6 +1257,7 @@ def is_segment_parallel_supported():
12601257
amp.enable = True
12611258
amp.dtype = "bfloat16" if self.bf16 else "float16"
12621259
amp.level = self.fp16_opt_level.lower()
1260+
amp.use_master_grad = self.amp_master_grad
12631261
amp.init_loss_scaling = self.scale_loss
12641262
amp.custom_black_list = self.amp_custom_black_list if self.amp_custom_black_list is not None else []
12651263
amp.custom_white_list = self.amp_custom_white_list if self.amp_custom_white_list is not None else []

0 commit comments

Comments
 (0)