1818import * as fs from 'fs' ;
1919import * as path from 'path' ;
2020
21- if ( ! process . env . JSON_PATH || ! process . env . ARROW_PATH ) {
22- throw new Error ( 'Integration tests need paths to both json and arrow files' ) ;
23- }
21+ import Arrow from '../Arrow' ;
22+ import { zip } from 'ix/iterable/zip' ;
23+ import { toArray } from 'ix/iterable/toarray' ;
2424
25- const jsonPath = path . resolve ( process . env . JSON_PATH + '' ) ;
26- const arrowPath = path . resolve ( process . env . ARROW_PATH + ' ') ;
25+ /* tslint:disable */
26+ const { parse : bignumJSONParse } = require ( 'json-bignum ') ;
2727
28- if ( ! fs . existsSync ( jsonPath ) || ! fs . existsSync ( arrowPath ) ) {
29- throw new Error ( 'Integration tests need both json and arrow files to exist' ) ;
30- }
28+ const { Table, read } = Arrow ;
3129
32- /* tslint:disable */
33- const { parse } = require ( 'json-bignum' ) ;
30+ if ( ! process . env . JSON_PATHS || ! process . env . ARROW_PATHS ) {
31+ throw new Error ( 'Integration tests need paths to both json and arrow files' ) ;
32+ }
3433
35- const jsonData = parse ( fs . readFileSync ( jsonPath , 'utf8' ) ) ;
36- const arrowBuffers : Uint8Array [ ] = [ fs . readFileSync ( arrowPath ) ] ;
34+ function resolvePathArgs ( paths : string ) {
35+ let pathsArray = JSON . parse ( paths ) as string | string [ ] ;
36+ return ( Array . isArray ( pathsArray ) ? pathsArray : [ pathsArray ] )
37+ . map ( ( p ) => path . resolve ( p ) )
38+ . map ( ( p ) => {
39+ if ( fs . existsSync ( p ) ) {
40+ return p ;
41+ }
42+ console . warn ( `Could not find file "${ p } "` ) ;
43+ return undefined ;
44+ } ) ;
45+ }
3746
38- import Arrow from '../Arrow' ;
39- import { zip } from 'ix/iterable/zip' ;
40- import { toArray } from 'ix/iterable/toarray' ;
47+ const getOrReadFileBuffer = ( ( cache : any ) => function getFileBuffer ( path : string , ... args : any [ ] ) {
48+ return cache [ path ] || ( cache [ path ] = fs . readFileSync ( path , ... args ) ) ;
49+ } ) ( { } ) ;
4150
42- const { Table, read } = Arrow ;
51+ const jsonAndArrowPaths = toArray ( zip (
52+ resolvePathArgs ( process . env . JSON_PATHS ! ) ,
53+ resolvePathArgs ( process . env . ARROW_PATHS ! )
54+ ) )
55+ . filter ( ( [ p1 , p2 ] ) => p1 !== undefined && p2 !== undefined ) as [ string , string ] [ ] ;
4356
4457expect . extend ( {
4558 toEqualVector ( v1 : any , v2 : any ) {
@@ -66,7 +79,7 @@ expect.extend({
6679
6780 for ( let i = - 1 , n = props . length ; ++ i < n ; ) {
6881 const prop = props [ i ] ;
69- if ( this . utils . stringify ( v1 [ prop ] ) !== this . utils . stringify ( v2 [ prop ] ) ) {
82+ if ( ` ${ v1 [ prop ] } ` !== ` ${ v2 [ prop ] } ` ) {
7083 propsFailures . push ( `${ prop } : ${ format ( v1 [ prop ] , v2 [ prop ] , ' !== ' ) } ` ) ;
7184 }
7285 }
@@ -98,35 +111,43 @@ expect.extend({
98111} ) ;
99112
100113describe ( `Integration` , ( ) => {
101- testReaderIntegration ( ) ;
102- testTableFromBuffersIntegration ( ) ;
114+ for ( const [ jsonFilePath , arrowFilePath ] of jsonAndArrowPaths ) {
115+ let { name, dir } = path . parse ( arrowFilePath ) ;
116+ dir = dir . split ( path . sep ) . slice ( - 2 ) . join ( path . sep ) ;
117+ const json = bignumJSONParse ( getOrReadFileBuffer ( jsonFilePath , 'utf8' ) ) ;
118+ const arrowBuffer = getOrReadFileBuffer ( arrowFilePath ) as Uint8Array ;
119+ describe ( path . join ( dir , name ) , ( ) => {
120+ testReaderIntegration ( json , arrowBuffer ) ;
121+ testTableFromBuffersIntegration ( json , arrowBuffer ) ;
122+ } ) ;
123+ }
103124} ) ;
104125
105- function testReaderIntegration ( ) {
106- test ( `json and arrow buffers report the same values` , ( ) => {
107- debugger ;
126+ function testReaderIntegration ( jsonData : any , arrowBuffer : Uint8Array ) {
127+ test ( `json and arrow record batches report the same values` , ( ) => {
108128 expect . hasAssertions ( ) ;
109129 const jsonRecordBatches = toArray ( read ( jsonData ) ) ;
110- const binaryRecordBatches = toArray ( read ( arrowBuffers ) ) ;
130+ const binaryRecordBatches = toArray ( read ( arrowBuffer ) ) ;
111131 for ( const [ jsonRecordBatch , binaryRecordBatch ] of zip ( jsonRecordBatches , binaryRecordBatches ) ) {
112132 expect ( jsonRecordBatch . length ) . toEqual ( binaryRecordBatch . length ) ;
113133 expect ( jsonRecordBatch . numCols ) . toEqual ( binaryRecordBatch . numCols ) ;
114134 for ( let i = - 1 , n = jsonRecordBatch . numCols ; ++ i < n ; ) {
135+ ( jsonRecordBatch . columns [ i ] as any ) . name = jsonRecordBatch . schema . fields [ i ] . name ;
115136 ( expect ( jsonRecordBatch . columns [ i ] ) as any ) . toEqualVector ( binaryRecordBatch . columns [ i ] ) ;
116137 }
117138 }
118139 } ) ;
119140}
120141
121- function testTableFromBuffersIntegration ( ) {
122- test ( `json and arrow buffers report the same values` , ( ) => {
123- debugger ;
142+ function testTableFromBuffersIntegration ( jsonData : any , arrowBuffer : Uint8Array ) {
143+ test ( `json and arrow tables report the same values` , ( ) => {
124144 expect . hasAssertions ( ) ;
125145 const jsonTable = Table . from ( jsonData ) ;
126- const binaryTable = Table . from ( arrowBuffers ) ;
146+ const binaryTable = Table . from ( arrowBuffer ) ;
127147 expect ( jsonTable . length ) . toEqual ( binaryTable . length ) ;
128148 expect ( jsonTable . numCols ) . toEqual ( binaryTable . numCols ) ;
129149 for ( let i = - 1 , n = jsonTable . numCols ; ++ i < n ; ) {
150+ ( jsonTable . columns [ i ] as any ) . name = jsonTable . schema . fields [ i ] . name ;
130151 ( expect ( jsonTable . columns [ i ] ) as any ) . toEqualVector ( binaryTable . columns [ i ] ) ;
131152 }
132153 } ) ;
0 commit comments