[DEV-6353] Convert DataProcessor to TS
parent
2bdcacd11f
commit
6fcf72f1b6
|
@ -18,13 +18,8 @@
|
|||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const { Class } = require( 'easejs' );
|
||||
|
||||
const { QuoteDataBucket, StagingBucket } = require( '../../' ).bucket;
|
||||
|
||||
import { PositiveInteger } from "../../numeric";
|
||||
import { UserRequest } from "./UserRequest";
|
||||
|
||||
/**
|
||||
* Process data provided by the client
|
||||
|
@ -32,45 +27,25 @@ const { QuoteDataBucket, StagingBucket } = require( '../../' ).bucket;
|
|||
* TOOD: This contains Data API and bucket merging logic that is better done
|
||||
* elsewhere.
|
||||
*/
|
||||
module.exports = Class( 'DataProcessor',
|
||||
export class DataProcessor
|
||||
{
|
||||
/**
|
||||
* Bucket filter
|
||||
* @type {Object}
|
||||
*/
|
||||
'private _filter': null,
|
||||
|
||||
/**
|
||||
* Construct Data API manager
|
||||
* @type {function()}
|
||||
*/
|
||||
'private _dapif': null,
|
||||
|
||||
/**
|
||||
* Metadata source
|
||||
* @type {DapiMetaSource}
|
||||
*/
|
||||
'private _metaSource': null,
|
||||
|
||||
|
||||
/**
|
||||
* Initialize processor
|
||||
*
|
||||
* The staging bucket constructor will be used to wrap the bucket for
|
||||
* diff-related operations.
|
||||
*
|
||||
* @param {Object} filter bucket filter
|
||||
* @param {function()} dapif data API constructor
|
||||
* @param {DapiMetaSource} meta_source metadata source
|
||||
* @param {function(Bucket)} staging_ctor staging bucket constructor
|
||||
* @param filter - bucket filter
|
||||
* @param dapif - data API constructor
|
||||
* @param meta_source - metadata source
|
||||
* @param staging_ctor - staging bucket constructor
|
||||
*/
|
||||
constructor( filter, dapif, meta_source, staging_ctor )
|
||||
{
|
||||
this._filter = filter;
|
||||
this._dapif = dapif;
|
||||
this._metaSource = meta_source;
|
||||
this._stagingCtor = staging_ctor;
|
||||
},
|
||||
constructor(
|
||||
private readonly _filter: any,
|
||||
private readonly _dapif: any,
|
||||
private readonly _meta_source: any,
|
||||
private readonly _stagingCtor: any,
|
||||
) {}
|
||||
|
||||
|
||||
/**
|
||||
|
@ -81,37 +56,52 @@ module.exports = Class( 'DataProcessor',
|
|||
* bucket values, preventing users from using us as their own personal
|
||||
* database.
|
||||
*
|
||||
* @param {Object} data bucket diff data
|
||||
* @param {UserRequest} request submitting request
|
||||
* @param {Program} program active program
|
||||
* @param data - bucket diff data
|
||||
* @param request - submitting request
|
||||
* @param program - active program
|
||||
*
|
||||
* @return {Object} processed diff
|
||||
* @return processed diff
|
||||
*/
|
||||
'public processDiff'( data, request, program, bucket, quote )
|
||||
processDiff(
|
||||
data: Record<string, any>,
|
||||
request: UserRequest,
|
||||
program: any,
|
||||
bucket: any,
|
||||
quote: any,
|
||||
): Record<string, any>
|
||||
{
|
||||
const filtered = this.sanitizeDiff( data, request, program );
|
||||
const dapi_manager = this._dapif( program.apis, request, quote );
|
||||
const staging = this._stagingCtor( bucket );
|
||||
|
||||
// forbidBypass will force diff generation on initQuote
|
||||
staging.setValues( filtered, true );
|
||||
staging.setValues( filtered );
|
||||
staging.forbidBypass();
|
||||
|
||||
program.initQuote( staging, true );
|
||||
|
||||
const diff = staging.getDiff();
|
||||
const rdiff: Record<string, any> = {};
|
||||
|
||||
// array of promises for any dapi requests
|
||||
const [ dapis, meta_clear ] = this._triggerDapis(
|
||||
dapi_manager, program, staging.getDiff(), staging
|
||||
dapi_manager, program, diff, staging
|
||||
);
|
||||
|
||||
for( let diff_key in diff )
|
||||
{
|
||||
rdiff[ diff_key ] = staging.getOriginalDataByName( diff_key );
|
||||
}
|
||||
|
||||
staging.commit();
|
||||
|
||||
return {
|
||||
filtered: filtered,
|
||||
dapis: dapis,
|
||||
meta_clear: meta_clear,
|
||||
rdiff: rdiff,
|
||||
};
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
|
@ -124,22 +114,25 @@ module.exports = Class( 'DataProcessor',
|
|||
* `permit_null` should be used only in the case of bucket diffs, which
|
||||
* contain nulls as terminators.
|
||||
*
|
||||
* @param {Object} data client-provided data
|
||||
* @param {UserRequest} request client request
|
||||
* @param {Program} program active program
|
||||
* @param data - client-provided data
|
||||
* @param request - client request
|
||||
* @param program - active program
|
||||
*
|
||||
* @return {Object} filtered data
|
||||
* @return filtered data
|
||||
*/
|
||||
'public sanitizeDiff'( data, request, program )
|
||||
sanitizeDiff(
|
||||
data: Record<string, any>,
|
||||
request: UserRequest,
|
||||
program: any,
|
||||
): Record<string, any>
|
||||
{
|
||||
if ( !request.getSession().isInternal() )
|
||||
{
|
||||
this._cleanInternals( data, program );
|
||||
}
|
||||
|
||||
const types = program.meta.qtypes;
|
||||
return this._filter.filter( data, types, {}, true );
|
||||
},
|
||||
return this._filter.filter( data, program.meta.qtypes, {}, true );
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
|
@ -147,31 +140,37 @@ module.exports = Class( 'DataProcessor',
|
|||
*
|
||||
* Internal fields are defined by the program `program`.
|
||||
*
|
||||
* @param {Object} data bucket diff data
|
||||
* @param {Program} program active program
|
||||
*
|
||||
* @return {undefined}
|
||||
* @param data - bucket diff data
|
||||
* @param program - active program
|
||||
*/
|
||||
'private _cleanInternals'( data, program )
|
||||
private _cleanInternals(
|
||||
data: Record<string, any>,
|
||||
program: any,
|
||||
): void
|
||||
{
|
||||
for ( let id in program.internal )
|
||||
{
|
||||
delete data[ id ];
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Trigger metadata Data API requests
|
||||
*
|
||||
* @param {DataApiManager} dapi_manager dapi manager
|
||||
* @param {Program} program active program
|
||||
* @param {Object} data client-provided data
|
||||
* @param {Bucket} bucket active bucket
|
||||
* @param dapi_manager - dapi manager
|
||||
* @param program - active program
|
||||
* @param data - client-provided data
|
||||
* @param bucket - active bucket
|
||||
*
|
||||
* @return {undefined}
|
||||
* @return an array containing the dapis and cleared meta values
|
||||
*/
|
||||
'private _triggerDapis'( dapi_manager, program, data, bucket )
|
||||
private _triggerDapis(
|
||||
dapi_manager: any,
|
||||
program: any,
|
||||
data: Record<string, any>,
|
||||
bucket: any,
|
||||
): [ any, Record<string, any> ]
|
||||
{
|
||||
const {
|
||||
mapis = {},
|
||||
|
@ -188,8 +187,8 @@ module.exports = Class( 'DataProcessor',
|
|||
const { dapi } = fields[ field ];
|
||||
const indexes = dapi_fields[ field ];
|
||||
|
||||
return indexes.map( i =>
|
||||
this._metaSource.getFieldData(
|
||||
return indexes.map( ( i: PositiveInteger ) =>
|
||||
this._meta_source.getFieldData(
|
||||
field,
|
||||
i,
|
||||
dapi_manager,
|
||||
|
@ -200,7 +199,7 @@ module.exports = Class( 'DataProcessor',
|
|||
} ).reduce( ( result, x ) => result.concat( x ), [] );
|
||||
|
||||
return [ dapis, clear ];
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
|
@ -211,15 +210,18 @@ module.exports = Class( 'DataProcessor',
|
|||
* lookup, it wouldn't be desirable to use an old rate even though data
|
||||
* used to retrieve it has since changed.
|
||||
*
|
||||
* @param {Object.<string,Array>} fields field names and array of indexes
|
||||
* @param fields - field names and array of indexes
|
||||
*
|
||||
* @return {undefined}
|
||||
* @return cleared values
|
||||
*/
|
||||
'private _genClearMetaValues'( fields )
|
||||
private _genClearMetaValues(
|
||||
fields: Record<string, any>
|
||||
): Record<string, any>
|
||||
{
|
||||
return Object.keys( fields ).reduce( ( result, field ) =>
|
||||
return Object.keys( fields ).reduce(
|
||||
( result: Record<string, any>, field: string ) =>
|
||||
{
|
||||
result[ field ] = fields[ field ].reduce( ( values, i ) =>
|
||||
result[ field ] = fields[ field ].reduce( ( values: any, i: any ) =>
|
||||
{
|
||||
values[ i ] = "";
|
||||
return values;
|
||||
|
@ -227,21 +229,24 @@ module.exports = Class( 'DataProcessor',
|
|||
|
||||
return result;
|
||||
}, {} );
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Determine which fields require a Data API to be triggered
|
||||
*
|
||||
* @param {Object} mapis metadata dapi descriptors
|
||||
* @param {Object} data client-provided data
|
||||
* @param mapis - metadata dapi descriptors
|
||||
* @param data - client-provided data
|
||||
*
|
||||
* @return {Object} fields with indexes in need of dapi calls
|
||||
* @return fields with indexes in need of dapi calls
|
||||
*/
|
||||
'private _determineDapiFields'( mapis, data )
|
||||
private _determineDapiFields(
|
||||
mapis: Record<string, string[]>,
|
||||
data: Record<string, any>
|
||||
): Record<string, any>
|
||||
{
|
||||
return Object.keys( mapis ).reduce(
|
||||
( result, src_field ) =>
|
||||
( result: any, src_field: string ) =>
|
||||
{
|
||||
const fdata = data[ src_field ];
|
||||
|
||||
|
@ -253,7 +258,7 @@ module.exports = Class( 'DataProcessor',
|
|||
const fields = mapis[ src_field ];
|
||||
|
||||
// get each index that changed
|
||||
fields.forEach( field =>
|
||||
fields.forEach( (field: string) =>
|
||||
{
|
||||
result[ field ] = result[ field ] || [];
|
||||
|
||||
|
@ -272,25 +277,30 @@ module.exports = Class( 'DataProcessor',
|
|||
},
|
||||
{}
|
||||
);
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Map data from bucket to dapi inputs
|
||||
*
|
||||
* @param {Object} dapi Data API descriptor
|
||||
* @param {Bucket} bucket active (source) bucket
|
||||
* @param {number} index field index
|
||||
* @param {Object} diff_data client-provided data
|
||||
* @param dapi - Data API descriptor
|
||||
* @param bucket - active (source) bucket
|
||||
* @param index - field index
|
||||
* @param diff_data - client-provided data
|
||||
*
|
||||
* @return {Object} key/value dapi input data
|
||||
* @return key/value dapi input data
|
||||
*/
|
||||
'private _mapDapiData'( dapi, bucket, index, diff_data )
|
||||
private _mapDapiData(
|
||||
dapi: any,
|
||||
bucket: any,
|
||||
index: PositiveInteger,
|
||||
diff_data: Record<string, any>,
|
||||
): Record<string, any>
|
||||
{
|
||||
const { mapsrc } = dapi;
|
||||
|
||||
return Object.keys( mapsrc ).reduce(
|
||||
( result, srcid ) =>
|
||||
( result: any, srcid: any ) =>
|
||||
{
|
||||
const bucketid = mapsrc[ srcid ];
|
||||
|
||||
|
@ -314,5 +324,5 @@ module.exports = Class( 'DataProcessor',
|
|||
},
|
||||
{}
|
||||
);
|
||||
},
|
||||
} );
|
||||
}
|
||||
};
|
|
@ -1,417 +0,0 @@
|
|||
/**
|
||||
* Manages DataAPI requests and return data
|
||||
*
|
||||
* Copyright (C) 2010-2019 R-T Specialty, LLC.
|
||||
*
|
||||
* This file is part of the Liza Data Collection Framework.
|
||||
*
|
||||
* liza is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of the
|
||||
* License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const { Class } = require( 'easejs' );
|
||||
const { expect } = require( 'chai' );
|
||||
const Sut = require( '../../../' ).server.request.DataProcessor;
|
||||
|
||||
|
||||
describe( 'DataProcessor', () =>
|
||||
{
|
||||
[
|
||||
{
|
||||
label: "strips internal field data when not internal",
|
||||
data: {
|
||||
internal: [ "foo", "bar" ],
|
||||
foo: [ "bar", "baz" ],
|
||||
},
|
||||
internals: { internal: true },
|
||||
internal: false,
|
||||
expected: {
|
||||
foo: [ "bar", "baz" ],
|
||||
},
|
||||
},
|
||||
{
|
||||
label: "keeps internal field data when internal",
|
||||
data: {
|
||||
internal: [ "foo", "bar" ],
|
||||
foo: [ "bar", "baz" ],
|
||||
},
|
||||
internals: { internal: true },
|
||||
internal: true,
|
||||
expected: {
|
||||
internal: [ "foo", "bar" ],
|
||||
foo: [ "bar", "baz" ],
|
||||
},
|
||||
},
|
||||
].forEach( ( { label, internal, data, internals = {}, expected } ) =>
|
||||
{
|
||||
const { request, program, sut } =
|
||||
createSutFromStubs( internal, internals );
|
||||
|
||||
it( label, () =>
|
||||
{
|
||||
expect(
|
||||
sut.processDiff( data, request, program ).filtered
|
||||
).to.deep.equal( expected );
|
||||
} );
|
||||
} );
|
||||
|
||||
|
||||
it( "passes data to bucket filter", () =>
|
||||
{
|
||||
const { request, program, meta_source } = createStubs();
|
||||
const data = {};
|
||||
const types = {};
|
||||
|
||||
program.meta.qtypes = types;
|
||||
|
||||
const filter = {
|
||||
filter( given_data, given_types, given_ignore, given_null )
|
||||
{
|
||||
expect( given_data ).to.equal( data );
|
||||
expect( given_types ).to.equal( types );
|
||||
expect( given_null ).to.equal( true );
|
||||
|
||||
// not used
|
||||
expect( given_ignore ).to.deep.equal( {} );
|
||||
|
||||
data.filtered = true;
|
||||
}
|
||||
};
|
||||
|
||||
Sut( filter, () => {}, meta_source, createStubStagingBucket )
|
||||
.processDiff( data, request, program );
|
||||
|
||||
expect( data.filtered ).to.equal( true );
|
||||
} );
|
||||
|
||||
|
||||
it( "instantiates dapi manager using program and session", done =>
|
||||
{
|
||||
const { filter, request, program } = createStubs();
|
||||
|
||||
const dapi_factory = ( given_apis, given_request ) =>
|
||||
{
|
||||
expect( given_apis ).to.equal( program.apis );
|
||||
expect( given_request ).to.equal( request );
|
||||
|
||||
done();
|
||||
};
|
||||
|
||||
Sut( filter, dapi_factory, null, createStubStagingBucket )
|
||||
.processDiff( {}, request, program );
|
||||
} );
|
||||
|
||||
|
||||
it( "invokes dapi manager when monitored bucket value changes", () =>
|
||||
{
|
||||
const triggered = {};
|
||||
|
||||
// g prefix = "given"
|
||||
const getFieldData = function( gfield, gindex, gdapim, gdapi, gdata)
|
||||
{
|
||||
triggered[ gdapi.name ] = triggered[ gdapi.name ] || [];
|
||||
triggered[ gdapi.name ][ gindex ] = arguments;
|
||||
|
||||
return Promise.resolve( true );
|
||||
};
|
||||
|
||||
const dapi_manager = {};
|
||||
|
||||
const {
|
||||
request,
|
||||
program,
|
||||
filter,
|
||||
meta_source,
|
||||
} = createStubs( false, {}, getFieldData );
|
||||
|
||||
const sut = Sut(
|
||||
filter,
|
||||
() => dapi_manager,
|
||||
meta_source,
|
||||
createStubStagingBucket
|
||||
);
|
||||
|
||||
program.meta.fields = {
|
||||
foo: {
|
||||
dapi: {
|
||||
name: 'dapi_foo',
|
||||
mapsrc: { ina: 'src', inb: 'src1' },
|
||||
},
|
||||
},
|
||||
bar: {
|
||||
dapi: {
|
||||
name: 'dapi_bar',
|
||||
mapsrc: { ina: 'src1' },
|
||||
},
|
||||
},
|
||||
baz: {
|
||||
dapi: {
|
||||
name: 'dapi_no_call',
|
||||
mapsrc: {},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
program.mapis = {
|
||||
src: [ 'foo', 'bar' ], // change
|
||||
src1: [ 'foo' ], // change
|
||||
src2: [ 'baz' ], // do not change
|
||||
};
|
||||
|
||||
// data changed
|
||||
const data = {
|
||||
src: [ 'src0', 'src1' ],
|
||||
src1: [ undefined, 'src11' ],
|
||||
};
|
||||
|
||||
const bucket = createStubBucket( {
|
||||
src: [ 'bsrc0', 'bsrc1' ],
|
||||
src1: [ 'bsrc10', 'bsrc11' ],
|
||||
} );
|
||||
|
||||
const { dapis, meta_clear } = sut.processDiff(
|
||||
data, request, program, bucket
|
||||
);
|
||||
|
||||
const expected = {
|
||||
dapi_foo: [
|
||||
{
|
||||
name: 'foo',
|
||||
data: {
|
||||
ina: data.src[ 0 ],
|
||||
inb: bucket.data.src1[ 0 ],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'foo',
|
||||
data: {
|
||||
ina: data.src[ 1 ],
|
||||
inb: data.src1[ 1 ],
|
||||
},
|
||||
},
|
||||
],
|
||||
dapi_bar: [
|
||||
undefined,
|
||||
{
|
||||
name: 'bar',
|
||||
data: {
|
||||
ina: data.src1[ 1 ],
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const expected_clear = {
|
||||
foo: [ "", "" ],
|
||||
bar: [ "", "" ],
|
||||
};
|
||||
|
||||
for ( let dapi_name in expected )
|
||||
{
|
||||
let expected_call = expected[ dapi_name ];
|
||||
|
||||
for ( let i in expected_call )
|
||||
{
|
||||
let chk = expected_call[ i ];
|
||||
|
||||
if ( chk === undefined )
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
let [ gfield, gindex, gdapi_manager, gdapi, gdata ] =
|
||||
triggered[ dapi_name ][ i ];
|
||||
|
||||
expect( gfield ).to.equal( chk.name );
|
||||
expect( gdapi.name ).to.equal( dapi_name );
|
||||
expect( +gindex ).to.equal( +i );
|
||||
expect( gdapi_manager ).to.equal( dapi_manager );
|
||||
|
||||
// see mapsrc
|
||||
expect( gdata ).to.deep.equal( chk.data );
|
||||
}
|
||||
}
|
||||
|
||||
expect( triggered.dapi_no_call ).to.equal( undefined );
|
||||
|
||||
expect( meta_clear ).to.deep.equal( expected_clear );
|
||||
|
||||
return Promise.all( dapis );
|
||||
} );
|
||||
|
||||
|
||||
it( "check _mapDapiData default values", () =>
|
||||
{
|
||||
const triggered = {};
|
||||
|
||||
// g prefix = "given"
|
||||
const getFieldData = function( gfield, gindex, gdapim, gdapi, gdata)
|
||||
{
|
||||
triggered[ gdapi.name ] = triggered[ gdapi.name ] || [];
|
||||
triggered[ gdapi.name ][ gindex ] = arguments;
|
||||
|
||||
expect( gdata ).to.deep.equal( { ina: '', inb: [] } );
|
||||
|
||||
return Promise.resolve( true );
|
||||
};
|
||||
|
||||
const dapi_manager = {};
|
||||
|
||||
const {
|
||||
request,
|
||||
program,
|
||||
filter,
|
||||
meta_source,
|
||||
} = createStubs( false, {}, getFieldData );
|
||||
|
||||
const sut = Sut(
|
||||
filter,
|
||||
() => dapi_manager,
|
||||
meta_source,
|
||||
createStubStagingBucket
|
||||
);
|
||||
|
||||
program.meta.fields = {
|
||||
foo: {
|
||||
dapi: {
|
||||
name: 'dapi_foo',
|
||||
mapsrc: { ina: 'src', inb: 'src1' },
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
program.mapis = {
|
||||
src1: [ 'foo' ], // change
|
||||
};
|
||||
|
||||
// data changed
|
||||
const data = {
|
||||
src: [ 'src0', '' ],
|
||||
src1: [ undefined, '' ],
|
||||
};
|
||||
|
||||
const bucket = createStubBucket( {
|
||||
src: [ 'bsrc0', '' ],
|
||||
src1: [ 'bsrc10', undefined],
|
||||
} );
|
||||
|
||||
const { dapis } = sut.processDiff(
|
||||
data, request, program, bucket
|
||||
);
|
||||
|
||||
return Promise.all( dapis );
|
||||
} );
|
||||
} );
|
||||
|
||||
|
||||
function createSutFromStubs( /* see createStubs */ )
|
||||
{
|
||||
const { request, program, filter, meta_source } =
|
||||
createStubs.apply( null, arguments );
|
||||
|
||||
return {
|
||||
request: request,
|
||||
program: program,
|
||||
filter: filter,
|
||||
meta_source: meta_source,
|
||||
|
||||
sut: Sut(
|
||||
filter,
|
||||
() => {},
|
||||
meta_source,
|
||||
createStubStagingBucket
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
function createStubs( internal, internals, getFieldData )
|
||||
{
|
||||
return {
|
||||
request: createStubUserRequest( internal || false ),
|
||||
program: createStubProgram( internals || {} ),
|
||||
filter: { filter: _ => _ },
|
||||
meta_source: createStubDapiMetaSource( getFieldData ),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
function createStubUserRequest( internal )
|
||||
{
|
||||
return {
|
||||
getSession: () => ( {
|
||||
isInternal: () => internal
|
||||
} )
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
function createStubProgram( internals )
|
||||
{
|
||||
return {
|
||||
internal: internals,
|
||||
meta: { qtypes: {}, fields: {} },
|
||||
apis: {},
|
||||
|
||||
initQuote() {},
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
function createStubDapiMetaSource( getFieldData )
|
||||
{
|
||||
return {
|
||||
getFieldData: getFieldData ||
|
||||
function( field, index, dapi_manager, dapi, data ){},
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
function createStubBucket( data )
|
||||
{
|
||||
return {
|
||||
data: data,
|
||||
|
||||
getDataByName( name )
|
||||
{
|
||||
return data[ name ];
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
function createStubStagingBucket( bucket )
|
||||
{
|
||||
let data = {};
|
||||
|
||||
return {
|
||||
getDataByName( name )
|
||||
{
|
||||
return bucket.getDataByName( name );
|
||||
},
|
||||
|
||||
setValues( values )
|
||||
{
|
||||
data = values;
|
||||
},
|
||||
|
||||
forbidBypass() {},
|
||||
getDiff()
|
||||
{
|
||||
return data;
|
||||
},
|
||||
commit() {},
|
||||
};
|
||||
}
|
|
@ -0,0 +1,804 @@
|
|||
/**
|
||||
* Manages DataAPI requests and return data
|
||||
*
|
||||
* Copyright (C) 2010-2019 R-T Specialty, LLC.
|
||||
*
|
||||
* This file is part of the Liza Data Collection Framework.
|
||||
*
|
||||
* liza is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of the
|
||||
* License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
import { DataProcessor as Sut } from "../../../src/server/request/DataProcessor";
|
||||
|
||||
import { expect, use as chai_use } from 'chai';
|
||||
import { DocumentId } from "../../../src/document/Document";
|
||||
import { PositiveInteger } from "../../../src/numeric";
|
||||
import { UserRequest } from "../../../src/server/request/UserRequest";
|
||||
import { ServerSideQuote } from "../../../src/server/quote/ServerSideQuote";
|
||||
|
||||
chai_use( require( 'chai-as-promised' ) );
|
||||
|
||||
|
||||
describe( 'DataProcessor', () =>
|
||||
{
|
||||
[
|
||||
{
|
||||
label: "strips internal field data when not internal",
|
||||
data: {
|
||||
internal: [ "foo", "bar" ],
|
||||
foo: [ "bar", "baz" ],
|
||||
},
|
||||
internals: { internal: true },
|
||||
internal: false,
|
||||
expected: {
|
||||
foo: [ "bar", "baz" ],
|
||||
},
|
||||
},
|
||||
{
|
||||
label: "keeps internal field data when internal",
|
||||
data: {
|
||||
internal: [ "foo", "bar" ],
|
||||
foo: [ "bar", "baz" ],
|
||||
},
|
||||
internals: { internal: true },
|
||||
internal: true,
|
||||
expected: {
|
||||
internal: [ "foo", "bar" ],
|
||||
foo: [ "bar", "baz" ],
|
||||
},
|
||||
},
|
||||
].forEach( ( { label, internal, data, internals, expected } ) =>
|
||||
{
|
||||
const { request, program, sut, quote } =
|
||||
createSutFromStubs( internal, internals );
|
||||
|
||||
const bucket = createStubBucket( data );
|
||||
|
||||
it( label, () =>
|
||||
{
|
||||
expect(
|
||||
sut.processDiff( data, request, program, bucket, quote ).filtered
|
||||
).to.deep.equal( expected );
|
||||
} );
|
||||
} );
|
||||
|
||||
|
||||
[
|
||||
{
|
||||
label: "Original data is saved to the delta, not new data",
|
||||
old_data: {
|
||||
foo: [ "bar_old", "baz" ],
|
||||
},
|
||||
new_data: {
|
||||
foo: [ "bar_new", "baz" ],
|
||||
},
|
||||
expected_data: {
|
||||
foo: [ "bar_old", "baz" ],
|
||||
},
|
||||
},
|
||||
].forEach( ( { label, old_data, new_data, expected_data } ) =>
|
||||
{
|
||||
const {
|
||||
request,
|
||||
program,
|
||||
quote,
|
||||
filter,
|
||||
dapi_constructor,
|
||||
meta_source
|
||||
} = createStubs();
|
||||
|
||||
const sut = new Sut(
|
||||
filter,
|
||||
dapi_constructor,
|
||||
meta_source,
|
||||
createStubStagingBucket
|
||||
);
|
||||
|
||||
const bucket = createStubBucket( old_data );
|
||||
|
||||
it( label, () =>
|
||||
{
|
||||
const actual = sut.processDiff(
|
||||
new_data,
|
||||
request,
|
||||
program,
|
||||
bucket,
|
||||
quote,
|
||||
);
|
||||
|
||||
expect( actual.rdiff ).to.deep.equal( expected_data );
|
||||
} );
|
||||
} );
|
||||
|
||||
|
||||
it( "#processDiff.rdelta_data is undefined with empty staging diff", () =>
|
||||
{
|
||||
const {
|
||||
request,
|
||||
program,
|
||||
quote,
|
||||
filter,
|
||||
dapi_constructor,
|
||||
meta_source
|
||||
} = createStubs();
|
||||
|
||||
const sut = new Sut(
|
||||
filter,
|
||||
dapi_constructor,
|
||||
meta_source,
|
||||
createStubStagingBucket
|
||||
);
|
||||
|
||||
const data = {
|
||||
foo: [ "bar", "baz" ],
|
||||
};
|
||||
|
||||
const diff = {};
|
||||
|
||||
const bucket = createStubBucket( data );
|
||||
const actual = sut.processDiff( diff, request, program, bucket, quote );
|
||||
|
||||
expect( actual.rdelta_data ).to.deep.equal( undefined );
|
||||
|
||||
} );
|
||||
|
||||
|
||||
it( "passes data to bucket filter", () =>
|
||||
{
|
||||
const {
|
||||
request,
|
||||
program,
|
||||
meta_source,
|
||||
dapi_constructor,
|
||||
quote,
|
||||
} = createStubs();
|
||||
|
||||
const data: { filtered?: boolean } = {};
|
||||
const types = {};
|
||||
|
||||
program.meta.qtypes = types;
|
||||
|
||||
const filter = {
|
||||
filter(
|
||||
given_data: Record<string, any>,
|
||||
given_types: Record<string, any>,
|
||||
given_ignore: any,
|
||||
given_null: boolean,
|
||||
) {
|
||||
expect( given_data ).to.equal( data );
|
||||
expect( given_types ).to.equal( types );
|
||||
expect( given_null ).to.equal( true );
|
||||
|
||||
// not used
|
||||
expect( given_ignore ).to.deep.equal( {} );
|
||||
|
||||
data.filtered = true;
|
||||
|
||||
return data;
|
||||
},
|
||||
|
||||
filterValues(
|
||||
values: string[],
|
||||
_filter: string,
|
||||
_permit_null: boolean,
|
||||
) {
|
||||
return values;
|
||||
}
|
||||
};
|
||||
|
||||
const bucket = createStubBucket( data );
|
||||
|
||||
new Sut(
|
||||
filter,
|
||||
dapi_constructor,
|
||||
meta_source,
|
||||
createStubStagingBucket,
|
||||
).processDiff( data, request, program, bucket, quote );
|
||||
|
||||
expect( data.filtered ).to.equal( true );
|
||||
} );
|
||||
|
||||
|
||||
it( "instantiates dapi manager using program and session", done =>
|
||||
{
|
||||
const { filter, request, program, meta_source, quote } = createStubs();
|
||||
|
||||
let dapi_constructor = (
|
||||
given_apis: any,
|
||||
given_request: UserRequest,
|
||||
_quote: ServerSideQuote
|
||||
) => {
|
||||
expect( given_apis ).to.equal( program.apis );
|
||||
expect( given_request ).to.equal( request );
|
||||
|
||||
done();
|
||||
|
||||
return createStubDataApiManager();
|
||||
};
|
||||
|
||||
const bucket = createStubBucket( {} );
|
||||
|
||||
new Sut(
|
||||
filter,
|
||||
dapi_constructor,
|
||||
meta_source,
|
||||
createStubStagingBucket
|
||||
).processDiff( {}, request, program, bucket, quote );
|
||||
|
||||
} );
|
||||
|
||||
|
||||
it( "invokes dapi manager when monitored bucket value changes", () =>
|
||||
{
|
||||
const triggered: { [key: string]: any[] } = {};
|
||||
|
||||
// g prefix = "given"
|
||||
const meta_source = {
|
||||
getFieldData(
|
||||
_gfield: any,
|
||||
gindex: PositiveInteger,
|
||||
_gdapim: any,
|
||||
gdapi: { name: string },
|
||||
_gdata: any,
|
||||
)
|
||||
{
|
||||
triggered[ gdapi.name ] = triggered[ gdapi.name ] || [];
|
||||
triggered[ gdapi.name ][ gindex ] = arguments;
|
||||
|
||||
return Promise.resolve( true );
|
||||
}
|
||||
}
|
||||
|
||||
const dapi_manager = createStubDataApiManager();
|
||||
|
||||
const {
|
||||
request,
|
||||
program,
|
||||
filter,
|
||||
quote,
|
||||
} = createStubs( false );
|
||||
|
||||
const sut = new Sut(
|
||||
filter,
|
||||
() => dapi_manager,
|
||||
meta_source,
|
||||
createStubStagingBucket,
|
||||
);
|
||||
|
||||
program.meta.fields = {
|
||||
foo: {
|
||||
dapi: {
|
||||
name: 'dapi_foo',
|
||||
mapsrc: { ina: 'src', inb: 'src1' },
|
||||
},
|
||||
},
|
||||
bar: {
|
||||
dapi: {
|
||||
name: 'dapi_bar',
|
||||
mapsrc: { ina: 'src1' },
|
||||
},
|
||||
},
|
||||
baz: {
|
||||
dapi: {
|
||||
name: 'dapi_no_call',
|
||||
mapsrc: {},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
program.mapis = {
|
||||
src: [ 'foo', 'bar' ], // change
|
||||
src1: [ 'foo' ], // change
|
||||
src2: [ 'baz' ], // do not change
|
||||
};
|
||||
|
||||
// data changed
|
||||
const data = {
|
||||
src: [ 'src0', 'src1' ],
|
||||
src1: [ undefined, 'src11' ],
|
||||
};
|
||||
|
||||
const bucket = createStubBucket( {
|
||||
src: [ 'bsrc0', 'bsrc1' ],
|
||||
src1: [ 'bsrc10', 'bsrc11' ],
|
||||
} );
|
||||
|
||||
const { dapis, meta_clear } = sut.processDiff(
|
||||
data, request, program, bucket, quote
|
||||
);
|
||||
|
||||
const expected: { [key: string]: any[] } = {
|
||||
dapi_foo: [
|
||||
{
|
||||
name: 'foo',
|
||||
data: {
|
||||
ina: data.src[ 0 ],
|
||||
inb: bucket.data.src1[ 0 ],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'foo',
|
||||
data: {
|
||||
ina: data.src[ 1 ],
|
||||
inb: data.src1[ 1 ],
|
||||
},
|
||||
},
|
||||
],
|
||||
dapi_bar: [
|
||||
undefined,
|
||||
{
|
||||
name: 'bar',
|
||||
data: {
|
||||
ina: data.src1[ 1 ],
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const expected_clear = {
|
||||
foo: [ "", "" ],
|
||||
bar: [ "", "" ],
|
||||
};
|
||||
|
||||
for ( let dapi_name in expected )
|
||||
{
|
||||
let expected_call = expected[ dapi_name ];
|
||||
|
||||
for ( let i in expected_call )
|
||||
{
|
||||
let chk = expected_call[ i ];
|
||||
|
||||
if ( chk === undefined )
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
let [ gfield, gindex, gdapi_manager, gdapi, gdata ] =
|
||||
triggered[ dapi_name ][ i ];
|
||||
|
||||
expect( gfield ).to.equal( chk.name );
|
||||
expect( gdapi.name ).to.equal( dapi_name );
|
||||
expect( +gindex ).to.equal( +i );
|
||||
expect( gdapi_manager ).to.equal( dapi_manager );
|
||||
|
||||
// see mapsrc
|
||||
expect( gdata ).to.deep.equal( chk.data );
|
||||
}
|
||||
}
|
||||
|
||||
expect( triggered.dapi_no_call ).to.equal( undefined );
|
||||
expect( meta_clear ).to.deep.equal( expected_clear );
|
||||
|
||||
return Promise.all( dapis );
|
||||
} );
|
||||
|
||||
|
||||
it( "check _mapDapiData default values", () =>
|
||||
{
|
||||
const triggered: { [key: string]: any[] }= {};
|
||||
|
||||
// g prefix = "given"
|
||||
const meta_source = {
|
||||
getFieldData(
|
||||
_gfield: any,
|
||||
gindex: any,
|
||||
_gdapim: any,
|
||||
gdapi: any,
|
||||
gdata: any,
|
||||
)
|
||||
{
|
||||
triggered[ gdapi.name ] = triggered[ gdapi.name ] || [];
|
||||
triggered[ gdapi.name ][ gindex ] = arguments;
|
||||
|
||||
expect( gdata ).to.deep.equal( { ina: '', inb: [] } );
|
||||
|
||||
return Promise.resolve( true );
|
||||
}
|
||||
}
|
||||
|
||||
const {
|
||||
request,
|
||||
program,
|
||||
filter,
|
||||
quote,
|
||||
} = createStubs( false );
|
||||
|
||||
const sut = new Sut(
|
||||
filter,
|
||||
createStubDataApiManager,
|
||||
meta_source,
|
||||
createStubStagingBucket
|
||||
);
|
||||
|
||||
program.meta.fields = {
|
||||
foo: {
|
||||
dapi: {
|
||||
name: 'dapi_foo',
|
||||
mapsrc: { ina: 'src', inb: 'src1' },
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
program.mapis = {
|
||||
src1: [ 'foo' ], // change
|
||||
};
|
||||
|
||||
// data changed
|
||||
const data = {
|
||||
src: [ 'src0', '' ],
|
||||
src1: [ undefined, '' ],
|
||||
};
|
||||
|
||||
const bucket = createStubBucket( {
|
||||
src: [ 'bsrc0', '' ],
|
||||
src1: [ 'bsrc10', undefined],
|
||||
} );
|
||||
|
||||
const { dapis } = sut.processDiff(
|
||||
data, request, program, bucket, quote
|
||||
);
|
||||
|
||||
return Promise.all( dapis );
|
||||
} );
|
||||
} );
|
||||
|
||||
|
||||
function createSutFromStubs(
|
||||
internal: boolean = false,
|
||||
internals: { internal: boolean } = { internal: false },
|
||||
)
|
||||
{
|
||||
const {
|
||||
request,
|
||||
program,
|
||||
filter,
|
||||
meta_source,
|
||||
dapi_constructor,
|
||||
quote
|
||||
} = createStubs(internal, internals);
|
||||
|
||||
return {
|
||||
request: request,
|
||||
program: program,
|
||||
filter: filter,
|
||||
meta_source: meta_source,
|
||||
quote: quote,
|
||||
|
||||
sut: new Sut(
|
||||
filter,
|
||||
dapi_constructor,
|
||||
meta_source,
|
||||
createStubStagingBucket
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
function createStubs(
|
||||
internal: boolean = false,
|
||||
internals: { internal: boolean } = { internal: false },
|
||||
)
|
||||
{
|
||||
return {
|
||||
request: createStubUserRequest( internal ),
|
||||
program: createStubProgram( internals ),
|
||||
filter: createStubFilter(),
|
||||
dapi_constructor: createStubDataApiContructor(),
|
||||
meta_source: createStubDapiMetaSource(),
|
||||
quote: createStubQuote(),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
function createStubUserRequest( internal: boolean )
|
||||
{
|
||||
return {
|
||||
getSession: () => ( {
|
||||
isInternal: () => internal
|
||||
} )
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
function createStubProgram( internals: { internal: boolean } )
|
||||
{
|
||||
return {
|
||||
ineligibleLockCount: 0,
|
||||
internal: internals,
|
||||
meta: {
|
||||
arefs: {},
|
||||
fields: {},
|
||||
groups: {},
|
||||
qdata: {},
|
||||
qtypes: {},
|
||||
},
|
||||
mapis: {},
|
||||
apis: {},
|
||||
|
||||
getId(){ return 'Foo'; },
|
||||
|
||||
initQuote() {},
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
function createStubFilter()
|
||||
{
|
||||
return {
|
||||
filter(
|
||||
data: Record<string, any>,
|
||||
_key_types: Record<string, any>,
|
||||
_ignore_types: Record<string, boolean>,
|
||||
_permit_null: boolean,
|
||||
) {
|
||||
return data;
|
||||
},
|
||||
|
||||
filterValues(
|
||||
values: string[],
|
||||
_filter: string,
|
||||
_permit_null: boolean,
|
||||
) {
|
||||
return values;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function createStubDataApiContructor()
|
||||
{
|
||||
return (
|
||||
_apis: any,
|
||||
_request: UserRequest,
|
||||
_quote: ServerSideQuote
|
||||
) => { return createStubDataApiManager(); };
|
||||
}
|
||||
|
||||
|
||||
function createStubDataApiManager()
|
||||
{
|
||||
return {
|
||||
setApis( _apis: any ) { return this; },
|
||||
|
||||
getApiData(
|
||||
_api: string,
|
||||
_data: any,
|
||||
_callback: any,
|
||||
_name: string,
|
||||
_index: PositiveInteger,
|
||||
_bucket: any,
|
||||
_fc: any,
|
||||
){ return this; },
|
||||
|
||||
getPendingApiCalls() { return {}; },
|
||||
|
||||
fieldStale( _field: string, _index: PositiveInteger, _stale?: boolean )
|
||||
{
|
||||
return this;
|
||||
},
|
||||
|
||||
fieldNotReady( _id: any, _i: PositiveInteger, _bucket: any )
|
||||
{
|
||||
return;
|
||||
},
|
||||
|
||||
processFieldApiCalls() { return this; },
|
||||
|
||||
setFieldData(
|
||||
_name: string,
|
||||
_index: PositiveInteger,
|
||||
_data: Record<string, any>,
|
||||
_value: string,
|
||||
_label: string,
|
||||
_unchanged: boolean,
|
||||
) { return this; },
|
||||
|
||||
triggerFieldUpdate(
|
||||
_name: string,
|
||||
_index: PositiveInteger,
|
||||
_value: string,
|
||||
_label: string,
|
||||
_unchanged: boolean,
|
||||
) { return false; },
|
||||
|
||||
hasFieldData( _name: string, _index: PositiveInteger ) { return true; },
|
||||
|
||||
clearFieldData(
|
||||
_name: string,
|
||||
_index: PositiveInteger,
|
||||
_trigger_event: boolean,
|
||||
) { return this; },
|
||||
|
||||
clearPendingApiCall( _id: string ) { return this; },
|
||||
|
||||
expandFieldData(
|
||||
_name: string,
|
||||
_index: PositiveInteger,
|
||||
_bucket: any,
|
||||
_map: any,
|
||||
_predictive: boolean,
|
||||
_diff: any,
|
||||
) { return this; },
|
||||
|
||||
getDataExpansion(
|
||||
_name: string,
|
||||
_index: PositiveInteger,
|
||||
bucket: any,
|
||||
_map: any,
|
||||
_predictive: boolean,
|
||||
_diff: any,
|
||||
) { return bucket; },
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
function createStubQuote()
|
||||
{
|
||||
let quote_data: Record<string, any> = {};
|
||||
|
||||
return {
|
||||
getRatedDate()
|
||||
{
|
||||
return 1572292453;
|
||||
},
|
||||
|
||||
setRatedDate( _timestamp: any )
|
||||
{
|
||||
return this;
|
||||
},
|
||||
|
||||
getProgram()
|
||||
{
|
||||
return createStubProgram( { internal: false } );
|
||||
},
|
||||
|
||||
getProgramId()
|
||||
{
|
||||
return 'Bar';
|
||||
},
|
||||
|
||||
getId()
|
||||
{
|
||||
return <DocumentId>123;
|
||||
},
|
||||
|
||||
getCurrentStepId()
|
||||
{
|
||||
return 1;
|
||||
},
|
||||
|
||||
setExplicitLock( _reason: string, _step: number )
|
||||
{
|
||||
return this;
|
||||
},
|
||||
|
||||
setLastPremiumDate( _timestamp: any )
|
||||
{
|
||||
return this;
|
||||
},
|
||||
|
||||
getLastPremiumDate()
|
||||
{
|
||||
return 1572292453;
|
||||
},
|
||||
|
||||
setRateBucket( _bucket: any )
|
||||
{
|
||||
return this;
|
||||
},
|
||||
|
||||
setRatingData( data: Record<string, any> )
|
||||
{
|
||||
quote_data = data;
|
||||
|
||||
return this;
|
||||
},
|
||||
|
||||
getRatingData()
|
||||
{
|
||||
return quote_data;
|
||||
},
|
||||
|
||||
getBucket()
|
||||
{
|
||||
return;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
function createStubDapiMetaSource()
|
||||
{
|
||||
return {
|
||||
getFieldData(
|
||||
_field: string,
|
||||
_index: PositiveInteger,
|
||||
_dapi_manager: any,
|
||||
_dapi: any,
|
||||
_data: Record<string, any>,
|
||||
)
|
||||
{
|
||||
return new Promise<any>( () => {} );
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
function createStubBucket( data: Record<string, any> )
|
||||
{
|
||||
return {
|
||||
data: data,
|
||||
|
||||
getDataByName( name: string )
|
||||
{
|
||||
return data[ name ];
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
function createStubStagingBucket( bucket: any )
|
||||
{
|
||||
let bucket_data = {};
|
||||
|
||||
return {
|
||||
setCommittedValues( _data: Record<string, any> ) { return this; },
|
||||
|
||||
forbidBypass() { return this; },
|
||||
|
||||
setValues( values: Record<string, any> )
|
||||
{
|
||||
bucket_data = values; return this;
|
||||
},
|
||||
|
||||
overwriteValues( _data: Record<string, any> ) { return this; },
|
||||
|
||||
getDiff() { return bucket_data; },
|
||||
|
||||
getFilledDiff() { return bucket.data || { foo: 'Bar' }; },
|
||||
|
||||
revert( _evented?: boolean ) { return this; },
|
||||
|
||||
commit( _store?: { old: Record<string, any> } ) { return this; },
|
||||
|
||||
clear() { return this; },
|
||||
|
||||
each( _callback: ( value: any, name: string ) => void )
|
||||
{
|
||||
return this;
|
||||
},
|
||||
|
||||
getDataByName( name: string ) { return bucket.getDataByName( name ); },
|
||||
|
||||
getOriginalDataByName( name: string )
|
||||
{
|
||||
return bucket.getDataByName( name );
|
||||
},
|
||||
|
||||
getDataJson() { return 'Foo'; },
|
||||
|
||||
getData() { return [ ( _Foo123: string ) => 'Bar']; },
|
||||
|
||||
filter(
|
||||
_pred: ( name: string ) => boolean,
|
||||
_c: ( value: any, name: string ) => void
|
||||
)
|
||||
{
|
||||
return this;
|
||||
},
|
||||
|
||||
hasIndex( _name: string, _i: PositiveInteger ) { return true; },
|
||||
|
||||
isDirty() { return false; },
|
||||
};
|
||||
}
|
Loading…
Reference in New Issue