Skip to content

Commit eac7b8e

Browse files
jsngnmroeschke
andauthored
TST: Replace ensure_clean utility function with the temp_file pytest fixture in 5 test files (pandas-dev#62481)
Co-authored-by: Matthew Roeschke <10647082+mroeschke@users.noreply.github.com>
1 parent e4e8dca commit eac7b8e

File tree

5 files changed

+137
-156
lines changed

5 files changed

+137
-156
lines changed

pandas/tests/io/formats/style/test_style.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1597,7 +1597,6 @@ def test_no_empty_apply(mi_styler):
15971597

15981598

15991599
@pytest.mark.parametrize("format", ["html", "latex", "string"])
1600-
def test_output_buffer(mi_styler, format):
1600+
def test_output_buffer(mi_styler, format, temp_file):
16011601
# gh 47053
1602-
with tm.ensure_clean(f"delete_me.{format}") as f:
1603-
getattr(mi_styler, f"to_{format}")(f)
1602+
getattr(mi_styler, f"to_{format}")(temp_file)

pandas/tests/io/formats/test_to_latex.py

Lines changed: 12 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@
88
DataFrame,
99
Series,
1010
)
11-
import pandas._testing as tm
1211

1312
pytest.importorskip("jinja2")
1413

@@ -30,27 +29,24 @@ def df_short():
3029

3130

3231
class TestToLatex:
33-
def test_to_latex_to_file(self, float_frame):
34-
with tm.ensure_clean("test.tex") as path:
35-
float_frame.to_latex(path)
36-
with open(path, encoding="utf-8") as f:
37-
assert float_frame.to_latex() == f.read()
32+
def test_to_latex_to_file(self, float_frame, temp_file):
33+
float_frame.to_latex(temp_file)
34+
with open(temp_file, encoding="utf-8") as f:
35+
assert float_frame.to_latex() == f.read()
3836

39-
def test_to_latex_to_file_utf8_with_encoding(self):
37+
def test_to_latex_to_file_utf8_with_encoding(self, temp_file):
4038
# test with utf-8 and encoding option (GH 7061)
4139
df = DataFrame([["au\xdfgangen"]])
42-
with tm.ensure_clean("test.tex") as path:
43-
df.to_latex(path, encoding="utf-8")
44-
with open(path, encoding="utf-8") as f:
45-
assert df.to_latex() == f.read()
40+
df.to_latex(temp_file, encoding="utf-8")
41+
with open(temp_file, encoding="utf-8") as f:
42+
assert df.to_latex() == f.read()
4643

47-
def test_to_latex_to_file_utf8_without_encoding(self):
44+
def test_to_latex_to_file_utf8_without_encoding(self, temp_file):
4845
# test with utf-8 without encoding option
4946
df = DataFrame([["au\xdfgangen"]])
50-
with tm.ensure_clean("test.tex") as path:
51-
df.to_latex(path)
52-
with open(path, encoding="utf-8") as f:
53-
assert df.to_latex() == f.read()
47+
df.to_latex(temp_file)
48+
with open(temp_file, encoding="utf-8") as f:
49+
assert df.to_latex() == f.read()
5450

5551
def test_to_latex_tabular_with_index(self):
5652
df = DataFrame({"a": [1, 2], "b": ["b1", "b2"]})

pandas/tests/io/json/test_readlines.py

Lines changed: 72 additions & 79 deletions
Original file line numberDiff line numberDiff line change
@@ -195,7 +195,7 @@ def test_readjson_each_chunk(request, lines_json_df, engine):
195195
assert chunks[1].shape == (1, 2)
196196

197197

198-
def test_readjson_chunks_from_file(request, engine):
198+
def test_readjson_chunks_from_file(request, engine, temp_file):
199199
if engine == "pyarrow":
200200
# GH 48893
201201
reason = (
@@ -204,41 +204,39 @@ def test_readjson_chunks_from_file(request, engine):
204204
)
205205
request.applymarker(pytest.mark.xfail(reason=reason, raises=ValueError))
206206

207-
with tm.ensure_clean("test.json") as path:
208-
df = DataFrame({"A": [1, 2, 3], "B": [4, 5, 6]})
209-
df.to_json(path, lines=True, orient="records")
210-
with read_json(path, lines=True, chunksize=1, engine=engine) as reader:
211-
chunked = pd.concat(reader)
212-
unchunked = read_json(path, lines=True, engine=engine)
213-
tm.assert_frame_equal(unchunked, chunked)
207+
df = DataFrame({"A": [1, 2, 3], "B": [4, 5, 6]})
208+
df.to_json(temp_file, lines=True, orient="records")
209+
with read_json(temp_file, lines=True, chunksize=1, engine=engine) as reader:
210+
chunked = pd.concat(reader)
211+
unchunked = read_json(temp_file, lines=True, engine=engine)
212+
tm.assert_frame_equal(unchunked, chunked)
214213

215214

216215
@pytest.mark.parametrize("chunksize", [None, 1])
217-
def test_readjson_chunks_closes(chunksize):
218-
with tm.ensure_clean("test.json") as path:
219-
df = DataFrame({"A": [1, 2, 3], "B": [4, 5, 6]})
220-
df.to_json(path, lines=True, orient="records")
221-
reader = JsonReader(
222-
path,
223-
orient=None,
224-
typ="frame",
225-
dtype=True,
226-
convert_axes=True,
227-
convert_dates=True,
228-
keep_default_dates=True,
229-
precise_float=False,
230-
date_unit=None,
231-
encoding=None,
232-
lines=True,
233-
chunksize=chunksize,
234-
compression=None,
235-
nrows=None,
236-
)
237-
with reader:
238-
reader.read()
239-
assert reader.handles.handle.closed, (
240-
f"didn't close stream with chunksize = {chunksize}"
241-
)
216+
def test_readjson_chunks_closes(chunksize, temp_file):
217+
df = DataFrame({"A": [1, 2, 3], "B": [4, 5, 6]})
218+
df.to_json(temp_file, lines=True, orient="records")
219+
reader = JsonReader(
220+
temp_file,
221+
orient=None,
222+
typ="frame",
223+
dtype=True,
224+
convert_axes=True,
225+
convert_dates=True,
226+
keep_default_dates=True,
227+
precise_float=False,
228+
date_unit=None,
229+
encoding=None,
230+
lines=True,
231+
chunksize=chunksize,
232+
compression=None,
233+
nrows=None,
234+
)
235+
with reader:
236+
reader.read()
237+
assert reader.handles.handle.closed, (
238+
f"didn't close stream with chunksize = {chunksize}"
239+
)
242240

243241

244242
@pytest.mark.parametrize("chunksize", [0, -1, 2.2, "foo"])
@@ -278,7 +276,7 @@ def test_readjson_chunks_multiple_empty_lines(chunksize):
278276
tm.assert_frame_equal(orig, test, obj=f"chunksize: {chunksize}")
279277

280278

281-
def test_readjson_unicode(request, monkeypatch, engine):
279+
def test_readjson_unicode(request, monkeypatch, engine, temp_file):
282280
if engine == "pyarrow":
283281
# GH 48893
284282
reason = (
@@ -287,14 +285,13 @@ def test_readjson_unicode(request, monkeypatch, engine):
287285
)
288286
request.applymarker(pytest.mark.xfail(reason=reason, raises=ValueError))
289287

290-
with tm.ensure_clean("test.json") as path:
291-
monkeypatch.setattr("locale.getpreferredencoding", lambda do_setlocale: "cp949")
292-
with open(path, "w", encoding="utf-8") as f:
293-
f.write('{"£©µÀÆÖÞßéöÿ":["АБВГДабвгд가"]}')
288+
monkeypatch.setattr("locale.getpreferredencoding", lambda do_setlocale: "cp949")
289+
with open(temp_file, "w", encoding="utf-8") as f:
290+
f.write('{"£©µÀÆÖÞßéöÿ":["АБВГДабвгд가"]}')
294291

295-
result = read_json(path, engine=engine)
296-
expected = DataFrame({"£©µÀÆÖÞßéöÿ": ["АБВГДабвгд가"]})
297-
tm.assert_frame_equal(result, expected)
292+
result = read_json(temp_file, engine=engine)
293+
expected = DataFrame({"£©µÀÆÖÞßéöÿ": ["АБВГДабвгд가"]})
294+
tm.assert_frame_equal(result, expected)
298295

299296

300297
@pytest.mark.parametrize("nrows", [1, 2])
@@ -441,25 +438,24 @@ def test_to_json_append_mode(mode_):
441438
df.to_json(mode=mode_, lines=False, orient="records")
442439

443440

444-
def test_to_json_append_output_consistent_columns():
441+
def test_to_json_append_output_consistent_columns(temp_file):
445442
# GH 35849
446443
# Testing that resulting output reads in as expected.
447444
# Testing same columns, new rows
448445
df1 = DataFrame({"col1": [1, 2], "col2": ["a", "b"]})
449446
df2 = DataFrame({"col1": [3, 4], "col2": ["c", "d"]})
450447

451448
expected = DataFrame({"col1": [1, 2, 3, 4], "col2": ["a", "b", "c", "d"]})
452-
with tm.ensure_clean("test.json") as path:
453-
# Save dataframes to the same file
454-
df1.to_json(path, lines=True, orient="records")
455-
df2.to_json(path, mode="a", lines=True, orient="records")
449+
# Save dataframes to the same file
450+
df1.to_json(temp_file, lines=True, orient="records")
451+
df2.to_json(temp_file, mode="a", lines=True, orient="records")
456452

457-
# Read path file
458-
result = read_json(path, lines=True)
459-
tm.assert_frame_equal(result, expected)
453+
# Read path file
454+
result = read_json(temp_file, lines=True)
455+
tm.assert_frame_equal(result, expected)
460456

461457

462-
def test_to_json_append_output_inconsistent_columns():
458+
def test_to_json_append_output_inconsistent_columns(temp_file):
463459
# GH 35849
464460
# Testing that resulting output reads in as expected.
465461
# Testing one new column, one old column, new rows
@@ -473,17 +469,16 @@ def test_to_json_append_output_inconsistent_columns():
473469
"col3": [np.nan, np.nan, "!", "#"],
474470
}
475471
)
476-
with tm.ensure_clean("test.json") as path:
477-
# Save dataframes to the same file
478-
df1.to_json(path, mode="a", lines=True, orient="records")
479-
df3.to_json(path, mode="a", lines=True, orient="records")
472+
# Save dataframes to the same file
473+
df1.to_json(temp_file, mode="a", lines=True, orient="records")
474+
df3.to_json(temp_file, mode="a", lines=True, orient="records")
480475

481-
# Read path file
482-
result = read_json(path, lines=True)
483-
tm.assert_frame_equal(result, expected)
476+
# Read path file
477+
result = read_json(temp_file, lines=True)
478+
tm.assert_frame_equal(result, expected)
484479

485480

486-
def test_to_json_append_output_different_columns():
481+
def test_to_json_append_output_different_columns(temp_file):
487482
# GH 35849
488483
# Testing that resulting output reads in as expected.
489484
# Testing same, differing and new columns
@@ -500,19 +495,18 @@ def test_to_json_append_output_different_columns():
500495
"col4": [None, None, None, None, None, None, True, False],
501496
}
502497
).astype({"col4": "float"})
503-
with tm.ensure_clean("test.json") as path:
504-
# Save dataframes to the same file
505-
df1.to_json(path, mode="a", lines=True, orient="records")
506-
df2.to_json(path, mode="a", lines=True, orient="records")
507-
df3.to_json(path, mode="a", lines=True, orient="records")
508-
df4.to_json(path, mode="a", lines=True, orient="records")
509-
510-
# Read path file
511-
result = read_json(path, lines=True)
512-
tm.assert_frame_equal(result, expected)
498+
# Save dataframes to the same file
499+
df1.to_json(temp_file, mode="a", lines=True, orient="records")
500+
df2.to_json(temp_file, mode="a", lines=True, orient="records")
501+
df3.to_json(temp_file, mode="a", lines=True, orient="records")
502+
df4.to_json(temp_file, mode="a", lines=True, orient="records")
503+
504+
# Read path file
505+
result = read_json(temp_file, lines=True)
506+
tm.assert_frame_equal(result, expected)
513507

514508

515-
def test_to_json_append_output_different_columns_reordered():
509+
def test_to_json_append_output_different_columns_reordered(temp_file):
516510
# GH 35849
517511
# Testing that resulting output reads in as expected.
518512
# Testing specific result column order.
@@ -530,13 +524,12 @@ def test_to_json_append_output_different_columns_reordered():
530524
"col1": [None, None, None, None, 3, 4, 1, 2],
531525
}
532526
).astype({"col4": "float"})
533-
with tm.ensure_clean("test.json") as path:
534-
# Save dataframes to the same file
535-
df4.to_json(path, mode="a", lines=True, orient="records")
536-
df3.to_json(path, mode="a", lines=True, orient="records")
537-
df2.to_json(path, mode="a", lines=True, orient="records")
538-
df1.to_json(path, mode="a", lines=True, orient="records")
539-
540-
# Read path file
541-
result = read_json(path, lines=True)
542-
tm.assert_frame_equal(result, expected)
527+
# Save dataframes to the same file
528+
df4.to_json(temp_file, mode="a", lines=True, orient="records")
529+
df3.to_json(temp_file, mode="a", lines=True, orient="records")
530+
df2.to_json(temp_file, mode="a", lines=True, orient="records")
531+
df1.to_json(temp_file, mode="a", lines=True, orient="records")
532+
533+
# Read path file
534+
result = read_json(temp_file, lines=True)
535+
tm.assert_frame_equal(result, expected)

0 commit comments

Comments
 (0)