Skip to content

Commit 450f038

Browse files
committed
Run make fixup
1 parent 76c7970 commit 450f038

File tree

2 files changed

+4
-4
lines changed

2 files changed

+4
-4
lines changed

src/transformers/models/olmo/convert_olmo_weights_to_hf.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -183,7 +183,9 @@ def write_model(model_path, input_base_path, tokenizer_path=None, safe_serializa
183183
shutil.rmtree(tmp_model_path)
184184

185185

186-
def _write_tokenizer(output_path: Path, config: OLMoConfig, input_tokenizer_path: Path, fix_eos_token_id: bool = True) -> None:
186+
def _write_tokenizer(
187+
output_path: Path, config: OLMoConfig, input_tokenizer_path: Path, fix_eos_token_id: bool = True
188+
) -> None:
187189
print(f"Saving a {GPTNeoXTokenizerFast.__name__} to {output_path}.")
188190

189191
base_tokenizer = Tokenizer.from_file(str(input_tokenizer_path))

tests/models/olmo/test_tokenization_olmo.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -182,9 +182,7 @@ def test_simple_encode_decode(self):
182182
rust_tokenizer = self.rust_tokenizer
183183

184184
self.assertEqual(rust_tokenizer.encode("This is a test"), [1552, 310, 247, 1071])
185-
self.assertEqual(
186-
rust_tokenizer.decode([1552, 310, 247, 1071], skip_special_tokens=True), "This is a test"
187-
)
185+
self.assertEqual(rust_tokenizer.decode([1552, 310, 247, 1071], skip_special_tokens=True), "This is a test")
188186

189187
# bytefallback showcase
190188
self.assertEqual(rust_tokenizer.encode("生活的真谛是"), [20025, 46549, 5225, 48561, 33656, 238, 12105]) # fmt: skip

0 commit comments

Comments
 (0)