@@ -89,7 +89,10 @@ schema.plugin(filterByOrg);
8989schema . plugin ( addCRUDFunctions ) ;
9090
9191const streamAggregation = ( { pipeline, skip, limit, batchSize, maxTimeMS, maxScan } ) => {
92- let query = Statement . aggregate ( pipeline ) . allowDiskUse ( ALLOW_AGGREGATION_DISK_USE ) ;
92+ let query = Statement
93+ . aggregate ( pipeline )
94+ . read ( 'secondaryPreferred' )
95+ . allowDiskUse ( ALLOW_AGGREGATION_DISK_USE ) ;
9396 if ( skip !== - 1 ) query = query . skip ( skip ) ;
9497 if ( limit !== - 1 ) query = query . limit ( limit ) ;
9598 if ( ! query . options ) {
@@ -153,19 +156,19 @@ const setCachedAggregation = ({ client, dataKey, isRunningKey, stream }) =>
153156 * @return {Stream } stream
154157 */
155158schema . statics . aggregateByAuth = function aggregateByAuth (
156- authInfo ,
157- pipeline = [ ] ,
158- {
159- skip = 0 ,
160- limit = - 1 ,
161- cache = false ,
162- batchSize = 100 ,
163- getStream = false ,
164- maxTimeMS = MAX_TIME_MS ,
165- maxScan = MAX_SCAN ,
166- } ,
167- cb = ( ) => { }
168- ) {
159+ authInfo ,
160+ pipeline = [ ] ,
161+ {
162+ skip = 0 ,
163+ limit = - 1 ,
164+ cache = false ,
165+ batchSize = 100 ,
166+ getStream = false ,
167+ maxTimeMS = MAX_TIME_MS ,
168+ maxScan = MAX_SCAN ,
169+ } ,
170+ cb = ( ) => { }
171+ ) {
169172 return parseQuery ( pipeline , {
170173 organisation : getOrgFromAuthInfo ( authInfo )
171174 } ) . then ( async ( parsedPipeline ) => {
@@ -227,9 +230,9 @@ schema.statics.aggregateByAuth = function aggregateByAuth(
227230 }
228231 return dataKeyTTL >= 5 ? cachedStreamPromise : streamPromise ;
229232 } )
230- . then ( ( stringResult ) => {
231- cb ( null , stringResult ) ;
232- } ) ;
233+ . then ( ( stringResult ) => {
234+ cb ( null , stringResult ) ;
235+ } ) ;
233236 } ) . catch ( cb ) ;
234237} ;
235238
0 commit comments