Skip to content

Commit c4d489e

Browse files
sadhana01GitHub Enterprise
authored andcommitted
Merge branch 'transition-neural-parser' into transition-neural-parser-warnfix
2 parents 8cdf7b7 + 2063f03 commit c4d489e

32 files changed

+121
-177
lines changed

README.md

Lines changed: 90 additions & 142 deletions
Large diffs are not rendered by default.

configs/amr2.0-structured-bart-large-joint-voc-neur-al.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -132,7 +132,7 @@ share_all_embeddings=1 # share encoder and decoder input embeddings
132132

133133
arch=transformer_tgt_pointer_bartsv_large
134134
# Load WatBART for initialization
135-
# initialize_with_watbart="/dccstor/phalanx/masayasu/projects/data/trained_model/bart_hap-filtered/fs/checkpoint_best.pt" # use this if using watbart on ccc
135+
# initialize_with_watbart="/path/to/checkpoint_best.pt" # use this if using watbart on ccc
136136
initialize_with_watbart="0" # for not using watbart
137137

138138

configs/amr2.0-structured-bart-large-joint-voc.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -129,7 +129,7 @@ share_all_embeddings=1 # share encoder and decoder input embeddings
129129
arch=transformer_tgt_pointer_bartsv_large
130130

131131
# Load WatBART for initialization
132-
# initialize_with_watbart="/dccstor/phalanx/masayasu/projects/data/trained_model/bart_hap-filtered/fs/checkpoint_best.pt" # use this if using watbart on ccc
132+
# initialize_with_watbart="/path/to/checkpoint_best.pt" # use this if using watbart on ccc
133133
initialize_with_watbart="0" # for not using watbart
134134

135135
initialize_with_bart=1

configs/amr2.0-structured-bart-large-neur-al-importance-sampling5.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -134,7 +134,7 @@ share_decoder_embed=0
134134

135135
arch=transformer_tgt_pointer_bart_large
136136
# Load WatBART for initialization
137-
# initialize_with_watbart="/dccstor/phalanx/masayasu/projects/data/trained_model/bart_hap-filtered/fs/checkpoint_best.pt" # use this if using watbart on ccc
137+
# initialize_with_watbart="/path/to/checkpoint_best.pt" # use this if using watbart on ccc
138138
initialize_with_watbart="0" # for not using watbart
139139

140140

configs/amr2.0-structured-bart-large-neur-al-sampling5.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -132,7 +132,7 @@ share_decoder_embed=0
132132

133133
arch=transformer_tgt_pointer_bart_large
134134
# Load WatBART for initialization
135-
# initialize_with_watbart="/dccstor/phalanx/masayasu/projects/data/trained_model/bart_hap-filtered/fs/checkpoint_best.pt" # use this if using watbart on ccc
135+
# initialize_with_watbart="/path/to/checkpoint_best.pt" # use this if using watbart on ccc
136136
initialize_with_watbart="0" # for not using watbart
137137

138138

configs/amr2.0-structured-bart-large-neur-al.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -131,7 +131,7 @@ share_decoder_embed=0
131131

132132
arch=transformer_tgt_pointer_bart_large
133133
# Load WatBART for initialization
134-
# initialize_with_watbart="/dccstor/phalanx/masayasu/projects/data/trained_model/bart_hap-filtered/fs/checkpoint_best.pt" # use this if using watbart on ccc
134+
# initialize_with_watbart="/path/to/checkpoint_best.pt" # use this if using watbart on ccc
135135
initialize_with_watbart="0" # for not using watbart
136136

137137

configs/amr2.0-structured-bart-large.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -128,7 +128,7 @@ share_decoder_embed=0
128128
arch=transformer_tgt_pointer_bart_large
129129

130130
# Load WatBART for initialization
131-
# initialize_with_watbart="/dccstor/phalanx/masayasu/projects/data/trained_model/bart_hap-filtered/fs/checkpoint_best.pt" # use this if using watbart on ccc
131+
# initialize_with_watbart="/path/to/checkpoint_best.pt" # use this if using watbart on ccc
132132
initialize_with_watbart="0" # for not using watbart
133133

134134
# Standard weight initilization parameters

configs/amr2joint_ontowiki2_g2g-structured-bart-large.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -128,7 +128,7 @@ share_all_embeddings=1 # share encoder and decoder input embeddings
128128

129129
arch=transformer_tgt_pointer_bartsv_large
130130
# Load WatBART for initialization
131-
# initialize_with_watbart="/dccstor/phalanx/masayasu/projects/data/trained_model/bart_hap-filtered/fs/checkpoint_best.pt" # use this if using watbart on ccc
131+
# initialize_with_watbart="/path/to/checkpoint_best.pt" # use this if using watbart on ccc
132132
initialize_with_watbart="0" # for not using watbart
133133

134134

configs/amr3.0-structured-bart-large-doc-truncate-sliding-finetune-ws200x100.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -142,7 +142,7 @@ share_decoder_embed=0
142142

143143
arch=transformer_tgt_pointer_bart_large
144144
# Load WatBART for initialization
145-
# initialize_with_watbart="/dccstor/phalanx/masayasu/projects/data/trained_model/bart_hap-filtered/fs/checkpoint_best.pt" # use this if using watbart on ccc
145+
# initialize_with_watbart="/path/to/checkpoint_best.pt" # use this if using watbart on ccc
146146
initialize_with_watbart="0" # for not using watbart
147147

148148

configs/amr3.0-structured-bart-large-doc-truncate-sliding-ws300x200.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -137,7 +137,7 @@ share_decoder_embed=0
137137

138138
arch=transformer_tgt_pointer_bart_large
139139
# Load WatBART for initialization
140-
# initialize_with_watbart="/dccstor/phalanx/masayasu/projects/data/trained_model/bart_hap-filtered/fs/checkpoint_best.pt" # use this if using watbart on ccc
140+
# initialize_with_watbart="/path/to/checkpoint_best.pt" # use this if using watbart on ccc
141141
initialize_with_watbart="0" # for not using watbart
142142

143143

0 commit comments

Comments
 (0)