@@ -88,82 +88,82 @@ def test_multiindex(self):
8888
8989class TestTableSchemaType (object ):
9090
91- def test_as_json_table_type_int_data (self ):
91+ @pytest .mark .parametrize ('int_type' , [
92+ np .int , np .int16 , np .int32 , np .int64 ])
93+ def test_as_json_table_type_int_data (self , int_type ):
9294 int_data = [1 , 2 , 3 ]
93- int_types = [np .int , np .int16 , np .int32 , np .int64 ]
94- for t in int_types :
95- assert as_json_table_type (np .array (
96- int_data , dtype = t )) == 'integer'
95+ assert as_json_table_type (np .array (
96+ int_data , dtype = int_type )) == 'integer'
9797
98- def test_as_json_table_type_float_data (self ):
98+ @pytest .mark .parametrize ('float_type' , [
99+ np .float , np .float16 , np .float32 , np .float64 ])
100+ def test_as_json_table_type_float_data (self , float_type ):
99101 float_data = [1. , 2. , 3. ]
100- float_types = [np .float , np .float16 , np .float32 , np .float64 ]
101- for t in float_types :
102- assert as_json_table_type (np .array (
103- float_data , dtype = t )) == 'number'
102+ assert as_json_table_type (np .array (
103+ float_data , dtype = float_type )) == 'number'
104104
105- def test_as_json_table_type_bool_data (self ):
105+ @pytest .mark .parametrize ('bool_type' , [bool , np .bool ])
106+ def test_as_json_table_type_bool_data (self , bool_type ):
106107 bool_data = [True , False ]
107- bool_types = [bool , np .bool ]
108- for t in bool_types :
109- assert as_json_table_type (np .array (
110- bool_data , dtype = t )) == 'boolean'
111-
112- def test_as_json_table_type_date_data (self ):
113- date_data = [pd .to_datetime (['2016' ]),
114- pd .to_datetime (['2016' ], utc = True ),
115- pd .Series (pd .to_datetime (['2016' ])),
116- pd .Series (pd .to_datetime (['2016' ], utc = True )),
117- pd .period_range ('2016' , freq = 'A' , periods = 3 )]
118- for arr in date_data :
119- assert as_json_table_type (arr ) == 'datetime'
120-
121- def test_as_json_table_type_string_data (self ):
122- strings = [pd .Series (['a' , 'b' ]), pd .Index (['a' , 'b' ])]
123- for t in strings :
124- assert as_json_table_type (t ) == 'string'
125-
126- def test_as_json_table_type_categorical_data (self ):
127- assert as_json_table_type (pd .Categorical (['a' ])) == 'any'
128- assert as_json_table_type (pd .Categorical ([1 ])) == 'any'
129- assert as_json_table_type (pd .Series (pd .Categorical ([1 ]))) == 'any'
130- assert as_json_table_type (pd .CategoricalIndex ([1 ])) == 'any'
131- assert as_json_table_type (pd .Categorical ([1 ])) == 'any'
108+ assert as_json_table_type (np .array (
109+ bool_data , dtype = bool_type )) == 'boolean'
110+
111+ @pytest .mark .parametrize ('date_data' , [
112+ pd .to_datetime (['2016' ]),
113+ pd .to_datetime (['2016' ], utc = True ),
114+ pd .Series (pd .to_datetime (['2016' ])),
115+ pd .Series (pd .to_datetime (['2016' ], utc = True )),
116+ pd .period_range ('2016' , freq = 'A' , periods = 3 )
117+ ])
118+ def test_as_json_table_type_date_data (self , date_data ):
119+ assert as_json_table_type (date_data ) == 'datetime'
120+
121+ @pytest .mark .parametrize ('str_data' , [
122+ pd .Series (['a' , 'b' ]), pd .Index (['a' , 'b' ])])
123+ def test_as_json_table_type_string_data (self , str_data ):
124+ assert as_json_table_type (str_data ) == 'string'
125+
126+ @pytest .mark .parametrize ('cat_data' , [
127+ pd .Categorical (['a' ]),
128+ pd .Categorical ([1 ]),
129+ pd .Series (pd .Categorical ([1 ])),
130+ pd .CategoricalIndex ([1 ]),
131+ pd .Categorical ([1 ])])
132+ def test_as_json_table_type_categorical_data (self , cat_data ):
133+ assert as_json_table_type (cat_data ) == 'any'
132134
133135 # ------
134136 # dtypes
135137 # ------
136- def test_as_json_table_type_int_dtypes (self ):
137- integers = [np .int , np .int16 , np .int32 , np .int64 ]
138- for t in integers :
139- assert as_json_table_type (t ) == 'integer'
140-
141- def test_as_json_table_type_float_dtypes (self ):
142- floats = [np .float , np .float16 , np .float32 , np .float64 ]
143- for t in floats :
144- assert as_json_table_type (t ) == 'number'
145-
146- def test_as_json_table_type_bool_dtypes (self ):
147- bools = [bool , np .bool ]
148- for t in bools :
149- assert as_json_table_type (t ) == 'boolean'
150-
151- def test_as_json_table_type_date_dtypes (self ):
138+ @pytest .mark .parametrize ('int_dtype' , [
139+ np .int , np .int16 , np .int32 , np .int64 ])
140+ def test_as_json_table_type_int_dtypes (self , int_dtype ):
141+ assert as_json_table_type (int_dtype ) == 'integer'
142+
143+ @pytest .mark .parametrize ('float_dtype' , [
144+ np .float , np .float16 , np .float32 , np .float64 ])
145+ def test_as_json_table_type_float_dtypes (self , float_dtype ):
146+ assert as_json_table_type (float_dtype ) == 'number'
147+
148+ @pytest .mark .parametrize ('bool_dtype' , [bool , np .bool ])
149+ def test_as_json_table_type_bool_dtypes (self , bool_dtype ):
150+ assert as_json_table_type (bool_dtype ) == 'boolean'
151+
152+ @pytest .mark .parametrize ('date_dtype' , [
153+ np .datetime64 , np .dtype ("<M8[ns]" ), PeriodDtype (),
154+ DatetimeTZDtype ('ns' , 'US/Central' )])
155+ def test_as_json_table_type_date_dtypes (self , date_dtype ):
152156 # TODO: datedate.date? datetime.time?
153- dates = [np .datetime64 , np .dtype ("<M8[ns]" ), PeriodDtype (),
154- DatetimeTZDtype ('ns' , 'US/Central' )]
155- for t in dates :
156- assert as_json_table_type (t ) == 'datetime'
157+ assert as_json_table_type (date_dtype ) == 'datetime'
157158
158- def test_as_json_table_type_timedelta_dtypes ( self ):
159- durations = [ np .timedelta64 , np .dtype ("<m8[ns]" )]
160- for t in durations :
161- assert as_json_table_type (t ) == 'duration'
159+ @ pytest . mark . parametrize ( 'td_dtype' , [
160+ np .timedelta64 , np .dtype ("<m8[ns]" )])
161+ def test_as_json_table_type_timedelta_dtypes ( self , td_dtype ) :
162+ assert as_json_table_type (td_dtype ) == 'duration'
162163
163- def test_as_json_table_type_string_dtypes (self ):
164- strings = [object ] # TODO
165- for t in strings :
166- assert as_json_table_type (t ) == 'string'
164+ @pytest .mark .parametrize ('str_dtype' , [object ]) # TODO
165+ def test_as_json_table_type_string_dtypes (self , str_dtype ):
166+ assert as_json_table_type (str_dtype ) == 'string'
167167
168168 def test_as_json_table_type_categorical_dtypes (self ):
169169 # TODO: I think before is_categorical_dtype(Categorical)
@@ -336,61 +336,55 @@ def test_date_format_raises(self):
336336 self .df .to_json (orient = 'table' , date_format = 'iso' )
337337 self .df .to_json (orient = 'table' )
338338
339- def test_convert_pandas_type_to_json_field_int (self ):
339+ @pytest .mark .parametrize ('kind' , [pd .Series , pd .Index ])
340+ def test_convert_pandas_type_to_json_field_int (self , kind ):
340341 data = [1 , 2 , 3 ]
341- kinds = [pd .Series (data , name = 'name' ), pd .Index (data , name = 'name' )]
342- for kind in kinds :
343- result = convert_pandas_type_to_json_field (kind )
344- expected = {"name" : "name" , "type" : 'integer' }
345- assert result == expected
342+ result = convert_pandas_type_to_json_field (kind (data , name = 'name' ))
343+ expected = {"name" : "name" , "type" : "integer" }
344+ assert result == expected
346345
347- def test_convert_pandas_type_to_json_field_float (self ):
346+ @pytest .mark .parametrize ('kind' , [pd .Series , pd .Index ])
347+ def test_convert_pandas_type_to_json_field_float (self , kind ):
348348 data = [1. , 2. , 3. ]
349- kinds = [pd .Series (data , name = 'name' ), pd .Index (data , name = 'name' )]
350- for kind in kinds :
351- result = convert_pandas_type_to_json_field (kind )
352- expected = {"name" : "name" , "type" : 'number' }
353- assert result == expected
349+ result = convert_pandas_type_to_json_field (kind (data , name = 'name' ))
350+ expected = {"name" : "name" , "type" : "number" }
351+ assert result == expected
354352
355- def test_convert_pandas_type_to_json_field_datetime (self ):
353+ @pytest .mark .parametrize ('dt_args,extra_exp' , [
354+ ({}, {}), ({'utc' : True }, {'tz' : 'UTC' })])
355+ @pytest .mark .parametrize ('wrapper' , [None , pd .Series ])
356+ def test_convert_pandas_type_to_json_field_datetime (self , dt_args ,
357+ extra_exp , wrapper ):
356358 data = [1. , 2. , 3. ]
357- kinds = [pd .Series (pd .to_datetime (data ), name = 'values' ),
358- pd .to_datetime (data )]
359- for kind in kinds :
360- result = convert_pandas_type_to_json_field (kind )
361- expected = {"name" : "values" , "type" : 'datetime' }
362- assert result == expected
363-
364- kinds = [pd .Series (pd .to_datetime (data , utc = True ), name = 'values' ),
365- pd .to_datetime (data , utc = True )]
366- for kind in kinds :
367- result = convert_pandas_type_to_json_field (kind )
368- expected = {"name" : "values" , "type" : 'datetime' , "tz" : "UTC" }
369- assert result == expected
359+ data = pd .to_datetime (data , ** dt_args )
360+ if wrapper is pd .Series :
361+ data = pd .Series (data , name = 'values' )
362+ result = convert_pandas_type_to_json_field (data )
363+ expected = {"name" : "values" , "type" : 'datetime' }
364+ expected .update (extra_exp )
365+ assert result == expected
370366
367+ def test_convert_pandas_type_to_json_period_range (self ):
371368 arr = pd .period_range ('2016' , freq = 'A-DEC' , periods = 4 )
372369 result = convert_pandas_type_to_json_field (arr )
373370 expected = {"name" : "values" , "type" : 'datetime' , "freq" : "A-DEC" }
374371 assert result == expected
375372
376- def test_convert_pandas_type_to_json_field_categorical (self ):
373+ @pytest .mark .parametrize ('kind' , [pd .Categorical , pd .CategoricalIndex ])
374+ @pytest .mark .parametrize ('ordered' , [True , False ])
375+ def test_convert_pandas_type_to_json_field_categorical (self , kind ,
376+ ordered ):
377377 data = ['a' , 'b' , 'c' ]
378- ordereds = [True , False ]
379-
380- for ordered in ordereds :
381- arr = pd .Series (pd .Categorical (data , ordered = ordered ), name = 'cats' )
382- result = convert_pandas_type_to_json_field (arr )
383- expected = {"name" : "cats" , "type" : "any" ,
384- "constraints" : {"enum" : data },
385- "ordered" : ordered }
386- assert result == expected
387-
388- arr = pd .CategoricalIndex (data , ordered = ordered , name = 'cats' )
389- result = convert_pandas_type_to_json_field (arr )
390- expected = {"name" : "cats" , "type" : "any" ,
391- "constraints" : {"enum" : data },
392- "ordered" : ordered }
393- assert result == expected
378+ if kind is pd .Categorical :
379+ arr = pd .Series (kind (data , ordered = ordered ), name = 'cats' )
380+ elif kind is pd .CategoricalIndex :
381+ arr = kind (data , ordered = ordered , name = 'cats' )
382+
383+ result = convert_pandas_type_to_json_field (arr )
384+ expected = {"name" : "cats" , "type" : "any" ,
385+ "constraints" : {"enum" : data },
386+ "ordered" : ordered }
387+ assert result == expected
394388
395389 @pytest .mark .parametrize ("inp,exp" , [
396390 ({'type' : 'integer' }, 'int64' ),
@@ -440,35 +434,22 @@ def test_categorical(self):
440434 OrderedDict ([('idx' , 2 ), ('values' , 'a' )])])])
441435 assert result == expected
442436
443- def test_set_default_names_unset (self ):
444- data = pd .Series (1 , pd .Index ([1 ]))
445- result = set_default_names (data )
446- assert result .index .name == 'index'
447-
448- def test_set_default_names_set (self ):
449- data = pd .Series (1 , pd .Index ([1 ], name = 'myname' ))
450- result = set_default_names (data )
451- assert result .index .name == 'myname'
452-
453- def test_set_default_names_mi_unset (self ):
454- data = pd .Series (
455- 1 , pd .MultiIndex .from_product ([('a' , 'b' ), ('c' , 'd' )]))
456- result = set_default_names (data )
457- assert result .index .names == ['level_0' , 'level_1' ]
458-
459- def test_set_default_names_mi_set (self ):
460- data = pd .Series (
461- 1 , pd .MultiIndex .from_product ([('a' , 'b' ), ('c' , 'd' )],
462- names = ['n1' , 'n2' ]))
463- result = set_default_names (data )
464- assert result .index .names == ['n1' , 'n2' ]
465-
466- def test_set_default_names_mi_partion (self ):
467- data = pd .Series (
468- 1 , pd .MultiIndex .from_product ([('a' , 'b' ), ('c' , 'd' )],
469- names = ['n1' , None ]))
437+ @pytest .mark .parametrize ('idx,nm,prop' , [
438+ (pd .Index ([1 ]), 'index' , 'name' ),
439+ (pd .Index ([1 ], name = 'myname' ), 'myname' , 'name' ),
440+ (pd .MultiIndex .from_product ([('a' , 'b' ), ('c' , 'd' )]),
441+ ['level_0' , 'level_1' ], 'names' ),
442+ (pd .MultiIndex .from_product ([('a' , 'b' ), ('c' , 'd' )],
443+ names = ['n1' , 'n2' ]),
444+ ['n1' , 'n2' ], 'names' ),
445+ (pd .MultiIndex .from_product ([('a' , 'b' ), ('c' , 'd' )],
446+ names = ['n1' , None ]),
447+ ['n1' , 'level_1' ], 'names' )
448+ ])
449+ def test_set_names_unset (self , idx , nm , prop ):
450+ data = pd .Series (1 , idx )
470451 result = set_default_names (data )
471- assert result .index . names == [ 'n1' , 'level_1' ]
452+ assert getattr ( result .index , prop ) == nm
472453
473454 def test_timestamp_in_columns (self ):
474455 df = pd .DataFrame ([[1 , 2 ]], columns = [pd .Timestamp ('2016' ),
@@ -478,20 +459,15 @@ def test_timestamp_in_columns(self):
478459 assert js ['schema' ]['fields' ][1 ]['name' ] == 1451606400000
479460 assert js ['schema' ]['fields' ][2 ]['name' ] == 10000
480461
481- def test_overlapping_names (self ):
482- cases = [
483- pd .Series ([1 ], index = pd .Index ([1 ], name = 'a' ), name = 'a' ),
484- pd .DataFrame ({"A" : [1 ]}, index = pd .Index ([1 ], name = "A" )),
485- pd .DataFrame ({"A" : [1 ]}, index = pd .MultiIndex .from_arrays ([
486- ['a' ], [1 ]
487- ], names = ["A" , "a" ])),
488- ]
489-
490- for data in cases :
491- with pytest .raises (ValueError ) as excinfo :
492- data .to_json (orient = 'table' )
493-
494- assert 'Overlapping' in str (excinfo .value )
462+ @pytest .mark .parametrize ('case' , [
463+ pd .Series ([1 ], index = pd .Index ([1 ], name = 'a' ), name = 'a' ),
464+ pd .DataFrame ({"A" : [1 ]}, index = pd .Index ([1 ], name = "A" )),
465+ pd .DataFrame ({"A" : [1 ]}, index = pd .MultiIndex .from_arrays ([
466+ ['a' ], [1 ]], names = ["A" , "a" ]))
467+ ])
468+ def test_overlapping_names (self , case ):
469+ with tm .assert_raises_regex (ValueError , 'Overlapping' ):
470+ case .to_json (orient = 'table' )
495471
496472 def test_mi_falsey_name (self ):
497473 # GH 16203
0 commit comments