diff --git a/src/bucket/QuoteDataBucket.d.ts b/src/bucket/QuoteDataBucket.d.ts
new file mode 100644
index 0000000..7f1c8e6
--- /dev/null
+++ b/src/bucket/QuoteDataBucket.d.ts
@@ -0,0 +1,123 @@
+/**
+ * Key/value store
+ *
+ * Copyright (C) 2010-2019 R-T Specialty, LLC.
+ *
+ * This file is part of liza.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+
+import { PositiveInteger } from "../numeric";
+
+
+/**
+ * General key/value store for document
+ *
+ * The term "Quote" here is an artifact from the initial design of the
+ * system used for insurance quoting. It will be renamed.
+ *
+ * @todo Rename to DocumentDataBucket
+ */
+export declare class QuoteDataBucket
+{
+ /**
+ * Triggered when data in the bucket is updated, before it's committed
+ */
+ static readonly EVENT_UPDATE: string;
+
+
+ /**
+ * Explicitly sets the contents of the bucket
+ *
+ * @param data - associative array of the data
+ */
+ setValues( data: Record ): this;
+
+
+ /**
+ * Alias of setValues
+ */
+ setCommittedValues(): this;
+
+
+ /**
+ * Clears all data from the bucket
+ */
+ clear(): this;
+
+
+ /**
+ * Overwrites values in the original bucket
+ *
+ * For this buckeet, overwriteValues() is an alias for setValues() without
+ * index merging. However, other Bucket implementations may handle it
+ * differently.
+ *
+ * @param data - associative array of the data
+ */
+ overwriteValues( data: Record ): this;
+
+
+ /**
+ * Calls a function for each each of the values in the bucket
+ *
+ * Note: This format is intended to be consistent with Array.forEach()
+ *
+ * @param callback - function to call for each value in the bucket
+ */
+ each( callback: ( val: any, key: string) => {} ): this;
+
+
+ /**
+ * Calls a function for each each of the values in the bucket matching the
+ * given predicate
+ *
+ * @param pred - predicate
+ * @param c - function to call for each value in the bucket
+ */
+ filter(
+ pred: ( key: string ) => {},
+ _c: ( val: any, key: string) => {}
+ ): this;
+
+
+ /**
+ * Returns the data for the requested field
+ *
+ * @param name - name of the field (with or without trailing brackets)
+ *
+ * @return data for the field, or empty array if none
+ */
+ getDataByName( name: string ): any;
+
+
+ /**
+ * Returns the data as a JSON string
+ *
+ * @return data represented as JSON
+ */
+ getDataJson(): string;
+
+
+ /**
+ * Return raw bucket data
+ *
+ * TODO: remove; breaks encapsulation
+ *
+ * @return raw bucket data
+ */
+ getData(): Record;
+}
diff --git a/src/bucket/StagingBucket.d.ts b/src/bucket/StagingBucket.d.ts
new file mode 100644
index 0000000..a96341f
--- /dev/null
+++ b/src/bucket/StagingBucket.d.ts
@@ -0,0 +1,206 @@
+/**
+ * StagingBucket class
+ *
+ * Copyright (C) 2010-2019 R-T Specialty, LLC.
+ *
+ * This file is part of the Liza Data Collection Framework.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as
+ * published by the Free Software Foundation, either version 3 of the
+ * License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see .
+ */
+
+import { PositiveInteger } from "../numeric";
+
+
+export type StagingBucketConstructor = (
+ bucket: StagingBucket
+) => StagingBucket;
+
+
+/**
+ * Stages and merges values into underlying key/value store
+ */
+export declare class StagingBucket
+{
+ /**
+ * Analgous to setValues(), but immediately commits the changes
+ *
+ * This still calls setValues() to ensure all events are properly kicked
+ * off.
+ *
+ * @param data - data to set and commit
+ */
+ setCommittedValues( data: Record ): this;
+
+
+ /**
+ * Prevent #setCommittedValues from bypassing staging
+ *
+ * When set, #setCommittedValues will act as an alias of #setValues.
+ */
+ forbidBypass(): this;
+
+
+ /**
+ * Explicitly sets the contents of the bucket
+ *
+ * Because JSON serializes all undefined values to `null`, only the
+ * final null in a diff is considered terminating; the rest are
+ * converted into `undefined`. Therefore, it is important that all
+ * truncations include no elements in the vector after the truncating null.
+ *
+ * @param given_data - associative array of the data
+ */
+ setValues( given_data: Record ): this;
+
+
+ /**
+ * Overwrites values in the original bucket
+ *
+ * @param data - associative array of the data
+ */
+ overwriteValues( data: Record ): this;
+
+
+ /**
+ * Returns staged data
+ *
+ * @return staged data
+ */
+ getDiff(): Record;
+
+
+ /**
+ * Returns a field-oriented diff filled with all values rather than a
+ * value-oriented diff
+ *
+ * Only the fields that have changed are returned. Each field contains its
+ * actual value---not the diff representation of what portions of the field
+ * have changed.
+ *
+ * @return filled diff
+ */
+ getFilledDiff(): Record;
+
+
+ /**
+ * Reverts staged changes, preventing them from being committed
+ *
+ * This will also generate a diff and raise the same events that would be
+ * raised by setting values in the conventional manner, allowing reverts to
+ * transparently integrate with the remainder of the system.
+ *
+ * @param evented - whether to emit events as part of the revert
+ */
+ revert( evented?: boolean ): this;
+
+
+ /**
+ * Commits staged changes, merging them with the bucket
+ *
+ * @param store - object to save old staged values to
+ */
+ commit( store?: { old: Record } ): this
+
+
+ /**
+ * Clears all data from the bucket
+ */
+ clear(): this;
+
+
+ /**
+ * Calls a function for each each of the values in the bucket
+ *
+ * @param callback - function to call for each value in the bucket
+ */
+ each( callback: ( value: any, name: string ) => void ): this;
+
+
+ /**
+ * Returns the data for the requested field
+ *
+ * WARNING: This can be a potentially expensive operation if there is a
+ * great deal of staged data. The staged data is merged with the bucket data
+ * on each call. Do not make frequent calls to retrieve the same data. Cache
+ * it instead.
+ *
+ * @param name - field name (with or without trailing brackets)
+ *
+ * @return data for the field, or empty array if none
+ */
+ getDataByName( name: string ): Record;
+
+
+ /**
+ * Returns original bucket data by name, even if there is data staged atop
+ * of it
+ *
+ * There is no additional overhead of this operation versus getDataByName()
+ *
+ * @param name - field name (with or without trailing brackets)
+ *
+ * @return data for the field, or empty array if none
+ */
+ getOriginalDataByName( name: string ): Record;
+
+
+ /**
+ * Returns the data as a JSON string
+ *
+ * @return data represented as JSON
+ */
+ getDataJson(): string;
+
+
+ /**
+ * Return raw bucket data
+ *
+ * todo: remove; breaks encapsulation
+ *
+ * @return raw bucket data
+ */
+ getData(): Record;
+
+
+ /**
+ * Calls a function for each each of the values in the bucket matching the
+ * given predicate
+ *
+ * @param pred - predicate
+ * @param c - function to call for each value in the bucket
+ */
+ filter(
+ pred: ( name: string ) => boolean,
+ c: ( value: any, name: string ) => void
+ ): this;
+
+
+ /**
+ * Returns true if the index for the given key exists
+ *
+ * @param name - the data key
+ * @param i - the index
+ *
+ * @return whether the key exists
+ */
+ hasIndex( name: string, i: PositiveInteger ): boolean;
+
+
+ /**
+ * Returns true if the bucket has been changed and not saved
+ *
+ * @return true if the bucket has been changed and not saved
+ */
+ isDirty(): boolean;
+}
diff --git a/src/bucket/bucket_filter.d.ts b/src/bucket/bucket_filter.d.ts
new file mode 100644
index 0000000..f731956
--- /dev/null
+++ b/src/bucket/bucket_filter.d.ts
@@ -0,0 +1,65 @@
+/**
+ * Filters bucket data
+ *
+ * Copyright (C) 2010-2019 R-T Specialty, LLC.
+ *
+ * This file is part of the Liza Data Collection Framework.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+
+export declare module bucket_filter {
+
+ export type filter = {
+ /**
+ * Filters bucket data based on the provided types
+ *
+ * If a type is not provided, the data is considered to be unwanted and is
+ * removed entirely. Otherwise, the filter is applied to every element in the
+ * array.
+ *
+ * The data is modified in place.
+ *
+ * @param data - data to filter
+ * @param key_types - filter types
+ * @param ignore_types - types to ignore
+ * @param permit_null - Allow nulls in results
+ *
+ * @return Object modified data
+ */
+ filter(
+ data: Record,
+ key_types: Record,
+ ignore_types: Record,
+ permit_null: boolean,
+ ): Record
+
+
+ /**
+ * Filter bucket data based on values
+ *
+ * @param values - The values to filter
+ * @param filter - The filter to apply
+ * @param permit_null - Allow nulls in results
+ *
+ * @return the filtered values
+ */
+ filterValues(
+ values: string[],
+ filter: string,
+ permit_null: boolean,
+ ): string[];
+ };
+}
diff --git a/src/bucket/delta.ts b/src/bucket/delta.ts
new file mode 100644
index 0000000..58c75ec
--- /dev/null
+++ b/src/bucket/delta.ts
@@ -0,0 +1,168 @@
+/**
+ * A delta
+ *
+ * Copyright (C) 2010-2019 R-T Specialty, LLC.
+ *
+ * This file is part of the Liza Data Collection Framework.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as
+ * published by the Free Software Foundation, either version 3 of the
+ * License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+/** The data structure expected for a document's internal key/value store */
+export type Kv = Record;
+
+/** Possible delta values for Kv array indexes */
+export type DeltaDatum = T | null | undefined;
+
+
+/**
+ * The constructor type for a delta generating function
+ *
+ * @param src - the source data set
+ * @param dest - the destination data set
+ *
+ * @return the delta which transforms src to dest
+ */
+export type DeltaConstructor = Kv, V extends Kv = U> = (
+ src: U,
+ dest: V,
+) => DeltaResult;
+
+
+/** Transform type T to hold possible delta values */
+export type DeltaResult = { [K in keyof T]: DeltaDatum | null };
+
+
+ /**
+ * Create delta to transform from src into dest
+ *
+ * @param src - the source data set
+ * @param dest - the destination data set
+ *
+ * @return the delta
+ */
+export function createDelta, V extends Kv>(
+ src: U,
+ dest: V,
+): DeltaResult
+{
+ const delta: DeltaResult = {};
+
+ // Loop through all keys
+ const key_set = new Set(
+ Object.keys( src ).concat( Object.keys( dest ) ) );
+
+ key_set.forEach( key =>
+ {
+ const src_data = src[ key ];
+ const dest_data = dest[ key ];
+
+ // If source does not contain the key, use entire dest data
+ if ( !src_data || !src_data.length )
+ {
+ delta[ key ] = dest_data;
+
+ return;
+ }
+
+ // If the key no longer exists in dest then nullify this key
+ if ( !dest_data || !dest_data.length )
+ {
+ delta[ key ] = null;
+
+ return;
+ }
+
+ // If neither condition above is true then create the key iteratively
+ const delta_key = _createDeltaKey( src_data, dest_data );
+
+ if ( delta_key.changed )
+ {
+ delta[ key ] = delta_key.data;
+ }
+ } );
+
+ return >delta;
+}
+
+
+/**
+ * Build the delta key iteratively
+ *
+ * @param src - the source data array
+ * @param dest - the destination data array
+ *
+ * @return an object with an identical flag and a data array
+ */
+function _createDeltaKey(
+ src: T[],
+ dest: T[],
+): { changed: boolean, data: DeltaDatum[] }
+{
+ const data = [];
+ const max_size = Math.max( dest.length, src.length );
+
+ let changed: boolean = false;
+
+ for ( let i = 0; i < max_size; i++ )
+ {
+ const dest_datum = dest[ i ];
+ const src_datum = src[ i ];
+
+ // terminate the key if we don't have a dest value
+ if ( dest_datum === undefined )
+ {
+ changed = true;
+ data[ i ] = null;
+
+ break;
+ }
+ else if ( _deepEqual( dest_datum, src_datum ) )
+ {
+ data[ i ] = undefined;
+ }
+ else
+ {
+ changed = true;
+ data[ i ] = dest_datum;
+ }
+ }
+
+ return {
+ changed: changed,
+ data: data,
+ };
+}
+
+
+/**
+ * Compare two arrays by index
+ *
+ * @param a - the first array to compare
+ * @param b - the second array to compare
+ */
+function _deepEqual( a: any, b: any ): boolean
+{
+ if ( Array.isArray( a ) )
+ {
+ if ( !Array.isArray( b ) || ( a.length !== b.length ) )
+ {
+ return false;
+ }
+
+ return a.every( ( item, i ) => _deepEqual( item, b[ i ] ) );
+ }
+
+ return a === b;
+}
diff --git a/src/dapi/DataApiManager.d.ts b/src/dapi/DataApiManager.d.ts
new file mode 100644
index 0000000..78d70a9
--- /dev/null
+++ b/src/dapi/DataApiManager.d.ts
@@ -0,0 +1,253 @@
+/**
+ * Manages DataAPI requests and return data
+ *
+ * Copyright (C) 2010-2019 R-T Specialty, LLC.
+ *
+ * This file is part of the Liza Data Collection Framework
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+import { PositiveInteger } from "../numeric";
+import { DataApiDefinitions } from "../program/Program";
+import { UserRequest } from "../server/request/UserRequest";
+import { ServerSideQuote } from "../server/quote/ServerSideQuote";
+
+
+export type DataApiConstructor = (
+ apis: DataApiDefinitions,
+ request: UserRequest,
+ quote: ServerSideQuote,
+) => DataApiManager;
+
+
+/**
+ * Pends and manages API calls and return data
+ *
+ * TODO: Extracted pretty much verbatim from Program; needs refactoring
+ */
+export declare class DataApiManager
+{
+
+ /**
+ * Set available APIs
+ *
+ * TODO: Remove me; pass via ctor
+ * TODO: Document API definition format
+ *
+ * @param apis - API definitions
+ */
+ setApis( apis: DataApiDefinitions ): this
+
+
+ /**
+ * Retrieve data from the API identified by the given id
+ *
+ * The optional request id permits cancelling requests if necessary.
+ *
+ * Once a field has finished loading, a `fieldLoaded` event will be
+ * emitted with `name` and `index`.
+ *
+ * TODO: refactor argument list; it's just been built upon too much and
+ * needs reordering
+ *
+ * @param api - API id
+ * @param data - API arguments (key-value)
+ * @param callback - callback to contain response
+ * @param name - element name for tracking
+ * @param index - index for tracking
+ * @param bucket - optional bucket to use as data source
+ * @param fc - failure continuation
+ */
+ getApiData(
+ api: string,
+ data: any,
+ callback: any,
+ name: string,
+ index: PositiveInteger,
+ bucket: any,
+ fc: any,
+ ): this
+
+
+
+ /**
+ * Get pending API calls
+ *
+ * TODO: Added to support a progressive refactoring; this breaks
+ * encapsulation and should be removed, or formalized.
+ *
+ * Returned object contains uid, name, and index fields.
+ *
+ * @return pending API calls
+ */
+ getPendingApiCalls(): any
+
+
+ /**
+ * Marks field for re-loading
+ *
+ * Stale fields will not be considered to have data, but the data
+ * will remain in memory until the next request.
+ *
+ * @param field - field name
+ * @param index - field index
+ * @param stale - whether field is stale
+ */
+ fieldStale( field: string, index: PositiveInteger, stale?: boolean ): this
+
+
+ /**
+ * If the field has data, clear the data here and in the bucket
+ *
+ * @param id - field id
+ * @param i - index to set
+ * @param bucket - bucket to set values in
+ */
+ fieldNotReady( id: string, i: PositiveInteger, bucket: any ): void
+
+
+ /**
+ * perform the API calls
+ */
+ processFieldApiCalls(): this
+
+
+ /**
+ * Set API return data for a given field
+ *
+ * @param name - field name
+ * @param index - field index
+ * @param data - return data set
+ * @param value - param to map to value
+ * @param label - param to map to label
+ * @param unchanged - whether the value has changed
+ */
+ setFieldData(
+ name: string,
+ index: PositiveInteger,
+ data: Record,
+ value: string,
+ label: string,
+ unchanged: boolean,
+ ): this
+
+
+ /**
+ * Update the field data and emit the relevant events
+ *
+ * @param name - field name
+ * @param index - field index
+ * @param value - field value
+ * @param label - field label
+ * @param unchanged - whether the field has changed
+ *
+ * @return true if the field has changed
+ */
+ triggerFieldUpdate(
+ name: string,
+ index: PositiveInteger,
+ value: string,
+ label: string,
+ unchanged: boolean,
+ ): boolean
+
+
+ /**
+ * Returns whether the given field has any result data associated with it
+ *
+ * @param name - field name
+ * @param index - field index
+ *
+ * @return true if result data exists for field, otherwise false
+ */
+ hasFieldData( name: string, index: PositiveInteger ): boolean
+
+
+ /**
+ * Clear all API response data associated with a given field
+ *
+ * @param name - field name
+ * @param index - field index
+ * @param trigger_event - trigger clear event
+ */
+ clearFieldData(
+ name: string,
+ index: PositiveInteger,
+ trigger_event: boolean,
+ ): this
+
+
+ /**
+ * Clear API Pending status
+ * Preventing the result for the associated request from taking effect
+ * This eliminates side-effects of race conditions (e.g. clearing a field
+ * while a request is still pending), but does not actually cancel the API
+ * call itself.
+ *
+ * @param id - tracking identifier
+ */
+ clearPendingApiCall( id: string ): this
+
+
+ /**
+ * Expand the mapped field data for the given field into the bucket
+ *
+ * It is expected that the callers are intelligent enough to not call this
+ * method if it would result in nonsense. That is, an error will be raised
+ * in the event that field data cannot be found; this will help to point out
+ * logic errors that set crap values.
+ *
+ * The predictive parameter allows data for the field to be set when the
+ * caller knows that the data for the value may soon become available (e.g.
+ * setting the value to pre-populate the value of a pending API call).
+ *
+ * @param name - field name
+ * @param index - field index
+ * @param bucket - bucket to expand into
+ * @param map - param mapping to bucket fields
+ * @param predictive - allow value to be set even if its data does not exist
+ * @param diff - changeset
+ */
+ expandFieldData(
+ name: string,
+ index: PositiveInteger,
+ bucket: any,
+ map: any,
+ predictive: boolean,
+ diff: any,
+ ): this
+
+
+ /**
+ * expandFieldData without setting values in the bucket
+ *
+ * @param name - field name
+ * @param index - index
+ * @param bucket - bucket to get data from
+ * @param map - mapping of fields
+ * @param predictive - allow value to be set even if its data does not exist
+ * @param diff - changeset
+ *
+ * @return data
+ */
+ getDataExpansion(
+ name: string,
+ index: PositiveInteger,
+ bucket: any,
+ map: any,
+ predictive: boolean,
+ diff: any,
+ ): Record
+}
diff --git a/src/program/Program.d.ts b/src/program/Program.d.ts
index de3f45a..48b9873 100644
--- a/src/program/Program.d.ts
+++ b/src/program/Program.d.ts
@@ -19,9 +19,30 @@
* along with this program. If not, see .
*/
+import { StagingBucket } from "../bucket/StagingBucket";
+import { PositiveInteger } from "../numeric";
+
+export type DataApiDefinitions = any
+
export declare abstract class Program
{
readonly ineligibleLockCount: number;
+ apis: DataApiDefinitions;
+
+ internal: Record;
+
+ meta: {
+ arefs: Record,
+ fields: Record,
+ groups: Record,
+ qdata: Record>,
+ qtypes: Record,
+ };
+
+ mapis: Record;
+
getId(): string;
+
+ initQuote( bucket: StagingBucket, store_only: boolean ): void
}
diff --git a/src/quote/BaseQuote.d.ts b/src/quote/BaseQuote.d.ts
index 9af47ee..f2271f4 100644
--- a/src/quote/BaseQuote.d.ts
+++ b/src/quote/BaseQuote.d.ts
@@ -23,6 +23,7 @@
import { Program } from "../program/Program";
import { Quote, QuoteId } from "./Quote";
+import { QuoteDataBucket } from "../bucket/QuoteDataBucket";
export declare class BaseQuote implements Quote
@@ -90,4 +91,12 @@ export declare class BaseQuote implements Quote
* @return last calculated time or 0
*/
getLastPremiumDate(): UnixTimestamp;
+
+
+ /**
+ * Returns the bucket used to store the quote form data
+ *
+ * @return the data bucket
+ */
+ getBucket(): QuoteDataBucket
}
diff --git a/src/server/DocumentServer.js b/src/server/DocumentServer.js
index c8abb4a..436fff5 100644
--- a/src/server/DocumentServer.js
+++ b/src/server/DocumentServer.js
@@ -44,7 +44,7 @@ const {
},
request: {
- DataProcessor,
+ DataProcessor: { DataProcessor },
JsonServerResponse,
ServerDataApiFactory,
},
@@ -89,7 +89,7 @@ module.exports = Class( 'DocumentServer',
logger,
enc_service,
- DataProcessor(
+ new DataProcessor(
bucket_filter,
( apis, request, quote ) => this._createDapiManager(
apis, request, origin_url, dapi_conf, quote, collection
diff --git a/src/server/Server.js b/src/server/Server.js
index 8a42ab0..3993640 100644
--- a/src/server/Server.js
+++ b/src/server/Server.js
@@ -49,7 +49,7 @@ const {
server: {
request: {
- DataProcessor,
+ DataProcessor: { DataProcessor },
},
encsvc: {
QuoteDataBucketCipher,
@@ -366,6 +366,7 @@ module.exports = Class( 'Server' )
.setCreditScoreRef( quote_data.creditScoreRef || 0 )
.setLastPremiumDate( quote_data.lastPremDate || 0 )
.setRatedDate( quote_data.initialRatedDate || 0 )
+ .setRatingData( quote_data.ratedata || {} )
.on( 'stepChange', function( step_id )
{
// save the quote state (we don't care if it succeeds or
@@ -1143,14 +1144,28 @@ module.exports = Class( 'Server' )
{
try
{
+ var rdelta_data;
var parsed_data = JSON.parse( post_data.data );
var bucket = quote.getBucket();
- const { filtered, dapis, meta_clear } =
+ const { filtered, dapis, meta_clear, rdiff } =
server._dataProcessor.processDiff(
parsed_data, request, program, bucket, quote
);
+ // Leave rdelta_data undefined if rdiff is an empty object
+ if ( Object.keys( rdiff ).length > 0 )
+ {
+ rdelta_data = {
+ "rdelta.data": {
+ data: rdiff,
+ timestamp: Math.round(
+ new Date().getTime() / 1000
+ ),
+ }
+ };
+ }
+
server._monitorMetadataPromise( quote, dapis, meta_clear );
}
catch ( err )
@@ -1172,7 +1187,7 @@ module.exports = Class( 'Server' )
}
// save the quote
- server._doQuoteSave( step_id, request, quote, program );
+ server._doQuoteSave( step_id, request, quote, program, rdelta_data);
});
return this;
@@ -1204,7 +1219,14 @@ module.exports = Class( 'Server' )
},
- 'private _doQuoteSave': function( step_id, request, quote, program, c )
+ 'private _doQuoteSave': function(
+ step_id,
+ request,
+ quote,
+ program,
+ rdelta_data,
+ c
+ )
{
var server = this;
@@ -1257,7 +1279,9 @@ module.exports = Class( 'Server' )
);
c && c( false );
- }
+ },
+ undefined,
+ rdelta_data
);
} );
},
diff --git a/src/server/daemon/controller.js b/src/server/daemon/controller.js
index 0f9cfa3..06b7a5e 100644
--- a/src/server/daemon/controller.js
+++ b/src/server/daemon/controller.js
@@ -43,6 +43,7 @@ var rating_service = null;
const {
bucket: {
QuoteDataBucket,
+ delta
},
dapi: {
@@ -141,7 +142,7 @@ exports.init = function( logger, enc_service, conf )
rating_service = easejs( RatingService ).use(
RatingServicePublish( amqplib, exports.post_rate_publish, logger )
)(
- logger, dao, server, exports.rater
+ logger, dao, server, exports.rater, delta.createDelta
);
// TODO: exports.init needs to support callbacks; this will work, but
@@ -652,9 +653,11 @@ function createQuote( quote_id, program, request, callback, error_callback )
var bucket = QuoteDataBucket(),
metabucket = QuoteDataBucket(),
+ ratebucket = QuoteDataBucket(),
quote = Quote( quote_id, bucket );
quote.setMetabucket( metabucket );
+ quote.setRateBucket( ratebucket );
var controller = this;
return server.initQuote( quote, program, request,
diff --git a/src/server/db/MongoServerDao.d.ts b/src/server/db/MongoServerDao.d.ts
index 18492cc..1481b9b 100644
--- a/src/server/db/MongoServerDao.d.ts
+++ b/src/server/db/MongoServerDao.d.ts
@@ -41,12 +41,14 @@ export declare class MongoServerDao implements ServerDao
* @param success - function to call on success
* @param failure - function to call if save fails
* @param save_data - quote data to save (optional)
+ * @param push_data - quote data to push (optional)
*/
saveQuote(
quote: ServerSideQuote,
success: Callback,
failure: Callback,
save_data: Record,
+ push_data: Record,
): this;
diff --git a/src/server/db/MongoServerDao.js b/src/server/db/MongoServerDao.js
index 2cc2001..c90422b 100644
--- a/src/server/db/MongoServerDao.js
+++ b/src/server/db/MongoServerDao.js
@@ -280,11 +280,12 @@ module.exports = Class( 'MongoServerDao' )
* @param Function success_callback function to call on success
* @param Function failure_callback function to call if save fails
* @param Object save_data quote data to save (optional)
+ * @param Object push_data quote data to push (optional)
*
* @return MongoServerDao self to allow for method chaining
*/
'public saveQuote': function(
- quote, success_callback, failure_callback, save_data
+ quote, success_callback, failure_callback, save_data, push_data
)
{
var dao = this;
@@ -341,10 +342,15 @@ module.exports = Class( 'MongoServerDao' )
key => save_data[ 'meta.' + key ] = meta[ key ]
);
+ // do not push empty objects
+ const document = ( !push_data || !Object.keys( push_data ).length )
+ ? { '$set': save_data }
+ : { '$set': save_data, '$push': push_data };
+
// update the quote data if it already exists (same id), otherwise
// insert it
this._collection.update( { id: id },
- { '$set': save_data },
+ document,
// create record if it does not yet exist
{ upsert: true },
diff --git a/src/server/db/ServerDao.d.ts b/src/server/db/ServerDao.d.ts
index 9c8ce32..6cc8025 100644
--- a/src/server/db/ServerDao.d.ts
+++ b/src/server/db/ServerDao.d.ts
@@ -45,12 +45,14 @@ export interface ServerDao
* @param success - function to call on success
* @param failure - function to call if save fails
* @param save_data - quote data to save (optional)
+ * @param push_data - quote data to push (optional)
*/
saveQuote(
quote: ServerSideQuote,
success: Callback,
failure: Callback,
save_data: Record,
+ push_data: Record,
): this;
diff --git a/src/server/meta/DapiMetaSource.d.ts b/src/server/meta/DapiMetaSource.d.ts
new file mode 100644
index 0000000..f80ddde
--- /dev/null
+++ b/src/server/meta/DapiMetaSource.d.ts
@@ -0,0 +1,51 @@
+/**
+ * Data-API-based metadata population
+ *
+ * Copyright (C) 2010-2019 R-T Specialty, LLC.
+ *
+ * This file is part of the Liza Data Collection Framework.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as
+ * published by the Free Software Foundation, either version 3 of the
+ * License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+import { DataApiManager } from "../../dapi/DataApiManager";
+import { PositiveInteger } from "../../numeric";
+
+/**
+ * Retrieve data for meta field using Data API
+ *
+ * TODO: The reason this class exists at all is to encapsulate the horrid
+ * API. Once refactored, perhaps this class will no longer be necessary.
+ */
+export declare class DapiMetaSource
+{
+ /**
+ * Retrieve field data
+ *
+ * @param field - field name
+ * @param index - field index
+ * @param dapi_manager - manager for dapi calls
+ * @param dapi - dapi descriptor
+ * @param data - dapi input data
+ *
+ * @return object containing `field`, `index`, and return data
+ */
+ getFieldData(
+ field: string,
+ index: PositiveInteger,
+ dapi_manager: DataApiManager,
+ dapi: any,
+ data: Record,
+ ): Promise;
+}
diff --git a/src/server/quote/ServerSideQuote.d.ts b/src/server/quote/ServerSideQuote.d.ts
index 98eff98..c8bdf2f 100644
--- a/src/server/quote/ServerSideQuote.d.ts
+++ b/src/server/quote/ServerSideQuote.d.ts
@@ -23,6 +23,7 @@
import { Program } from "../../program/Program";
import { BaseQuote } from "../../quote/BaseQuote";
+import { QuoteDataBucket } from "../../bucket/QuoteDataBucket";
export declare class ServerSideQuote extends BaseQuote
@@ -43,4 +44,28 @@ export declare class ServerSideQuote extends BaseQuote
* @return self
*/
setRatedDate( timestamp: UnixTimestamp ): this;
+
+
+ /**
+ * Set rating bucket
+ *
+ * @param bucket - the data bucket
+ */
+ setRateBucket( bucket: QuoteDataBucket ): this;
+
+
+ /**
+ * Set rating data
+ *
+ * @param data - rating data
+ */
+ setRatingData( data: Record ): this;
+
+
+ /**
+ * Get rating data
+ *
+ * @return rating data
+ */
+ getRatingData(): Record;
}
diff --git a/src/server/quote/ServerSideQuote.js b/src/server/quote/ServerSideQuote.js
index b1040c3..3d5f7c0 100644
--- a/src/server/quote/ServerSideQuote.js
+++ b/src/server/quote/ServerSideQuote.js
@@ -40,7 +40,6 @@ module.exports = Class( 'ServerSideQuote' )
*/
'private _creditScoreRef': 0,
-
/**
* Unix timestamp containing date of first premium calculation
* @type {number}
@@ -53,6 +52,12 @@ module.exports = Class( 'ServerSideQuote' )
*/
'private _metabucket': null,
+ /**
+ * Rating Data Bucket
+ * @type {Bucket}
+ */
+ 'private _rate_bucket': null,
+
'public setProgramVersion': function( version )
{
@@ -148,6 +153,64 @@ module.exports = Class( 'ServerSideQuote' )
this._metabucket.setValues( data );
return this;
- }
+ },
+
+
+ /**
+ * Set rating bucket
+ *
+ * @param {Bucket} bucket the rate bucket to set
+ */
+ 'public setRateBucket': function( bucket )
+ {
+ this._rate_bucket = bucket;
+
+ return this;
+ },
+
+
+ /**
+ * Get rating bucket
+ *
+ * @return {Bucket}
+ */
+ 'public getRateBucket': function()
+ {
+ return this._rate_bucket;
+ },
+
+
+ /**
+ * Set rating data
+ *
+ * @param {Object.} data rating data
+ */
+ 'public setRatingData': function( data )
+ {
+ if ( !this._rate_bucket )
+ {
+ throw Error( "No rating bucket available for #setRatingData" );
+ }
+
+ this._rate_bucket.setValues( data );
+
+ return this;
+ },
+
+
+ /**
+ * Get rating data
+ *
+ * @return {Object.} rating data
+ */
+ 'public getRatingData': function()
+ {
+ if ( !this._rate_bucket )
+ {
+ throw Error( "No rating bucket available for #setRatingData" );
+ }
+
+ return this._rate_bucket.getData();
+ },
} );
diff --git a/src/server/request/DataProcessor.js b/src/server/request/DataProcessor.ts
similarity index 60%
rename from src/server/request/DataProcessor.js
rename to src/server/request/DataProcessor.ts
index 3d9bbf6..4f3b6aa 100644
--- a/src/server/request/DataProcessor.js
+++ b/src/server/request/DataProcessor.ts
@@ -18,13 +18,12 @@
* You should have received a copy of the GNU General Public License
* along with this program. If not, see .
*/
-
-'use strict';
-
-const { Class } = require( 'easejs' );
-
-const { QuoteDataBucket, StagingBucket } = require( '../../' ).bucket;
-
+import { StagingBucket, StagingBucketConstructor } from "../../bucket/StagingBucket";
+import { DapiMetaSource } from "../meta/DapiMetaSource";
+import { PositiveInteger } from "../../numeric";
+import { bucket_filter } from "../../bucket/bucket_filter";
+import { UserRequest } from "./UserRequest";
+import { DataApiManager, DataApiConstructor } from "../../dapi/DataApiManager";
/**
* Process data provided by the client
@@ -32,45 +31,25 @@ const { QuoteDataBucket, StagingBucket } = require( '../../' ).bucket;
* TOOD: This contains Data API and bucket merging logic that is better done
* elsewhere.
*/
-module.exports = Class( 'DataProcessor',
+export class DataProcessor
{
- /**
- * Bucket filter
- * @type {Object}
- */
- 'private _filter': null,
-
- /**
- * Construct Data API manager
- * @type {function()}
- */
- 'private _dapif': null,
-
- /**
- * Metadata source
- * @type {DapiMetaSource}
- */
- 'private _metaSource': null,
-
-
/**
* Initialize processor
*
* The staging bucket constructor will be used to wrap the bucket for
* diff-related operations.
*
- * @param {Object} filter bucket filter
- * @param {function()} dapif data API constructor
- * @param {DapiMetaSource} meta_source metadata source
- * @param {function(Bucket)} staging_ctor staging bucket constructor
+ * @param filter - bucket filter
+ * @param dapif - data API constructor
+ * @param meta_source - metadata source
+ * @param staging_ctor - staging bucket constructor
*/
- constructor( filter, dapif, meta_source, staging_ctor )
- {
- this._filter = filter;
- this._dapif = dapif;
- this._metaSource = meta_source;
- this._stagingCtor = staging_ctor;
- },
+ constructor(
+ private readonly _filter: bucket_filter.filter,
+ private readonly _dapif: DataApiConstructor,
+ private readonly _meta_source: DapiMetaSource,
+ private readonly _stagingCtor: StagingBucketConstructor,
+ ) {}
/**
@@ -81,37 +60,52 @@ module.exports = Class( 'DataProcessor',
* bucket values, preventing users from using us as their own personal
* database.
*
- * @param {Object} data bucket diff data
- * @param {UserRequest} request submitting request
- * @param {Program} program active program
+ * @param data - bucket diff data
+ * @param request - submitting request
+ * @param program - active program
*
- * @return {Object} processed diff
+ * @return processed diff
*/
- 'public processDiff'( data, request, program, bucket, quote )
+ processDiff(
+ data: Record,
+ request: UserRequest,
+ program: any,
+ bucket: any,
+ quote: any,
+ ): Record
{
const filtered = this.sanitizeDiff( data, request, program );
const dapi_manager = this._dapif( program.apis, request, quote );
const staging = this._stagingCtor( bucket );
// forbidBypass will force diff generation on initQuote
- staging.setValues( filtered, true );
+ staging.setValues( filtered );
staging.forbidBypass();
program.initQuote( staging, true );
+ const diff = staging.getDiff();
+ const rdiff: Record = {};
+
// array of promises for any dapi requests
const [ dapis, meta_clear ] = this._triggerDapis(
- dapi_manager, program, staging.getDiff(), staging
+ dapi_manager, program, diff, staging
);
+ for( let diff_key in diff )
+ {
+ rdiff[ diff_key ] = staging.getOriginalDataByName( diff_key );
+ }
+
staging.commit();
return {
filtered: filtered,
dapis: dapis,
meta_clear: meta_clear,
+ rdiff: rdiff,
};
- },
+ }
/**
@@ -124,22 +118,25 @@ module.exports = Class( 'DataProcessor',
* `permit_null` should be used only in the case of bucket diffs, which
* contain nulls as terminators.
*
- * @param {Object} data client-provided data
- * @param {UserRequest} request client request
- * @param {Program} program active program
+ * @param data - client-provided data
+ * @param request - client request
+ * @param program - active program
*
- * @return {Object} filtered data
+ * @return filtered data
*/
- 'public sanitizeDiff'( data, request, program )
+ sanitizeDiff(
+ data: Record,
+ request: UserRequest,
+ program: any,
+ ): Record
{
if ( !request.getSession().isInternal() )
{
this._cleanInternals( data, program );
}
- const types = program.meta.qtypes;
- return this._filter.filter( data, types, {}, true );
- },
+ return this._filter.filter( data, program.meta.qtypes, {}, true );
+ }
/**
@@ -147,31 +144,37 @@ module.exports = Class( 'DataProcessor',
*
* Internal fields are defined by the program `program`.
*
- * @param {Object} data bucket diff data
- * @param {Program} program active program
- *
- * @return {undefined}
+ * @param data - bucket diff data
+ * @param program - active program
*/
- 'private _cleanInternals'( data, program )
+ private _cleanInternals(
+ data: Record,
+ program: any,
+ ): void
{
for ( let id in program.internal )
{
delete data[ id ];
}
- },
+ }
/**
* Trigger metadata Data API requests
*
- * @param {DataApiManager} dapi_manager dapi manager
- * @param {Program} program active program
- * @param {Object} data client-provided data
- * @param {Bucket} bucket active bucket
+ * @param dapi_manager - dapi manager
+ * @param program - active program
+ * @param data - client-provided data
+ * @param bucket - active bucket
*
- * @return {undefined}
+ * @return an array containing the dapis and cleared meta values
*/
- 'private _triggerDapis'( dapi_manager, program, data, bucket )
+ private _triggerDapis(
+ dapi_manager: DataApiManager,
+ program: any,
+ data: Record,
+ bucket: StagingBucket,
+ ): [ any, Record ]
{
const {
mapis = {},
@@ -188,8 +191,8 @@ module.exports = Class( 'DataProcessor',
const { dapi } = fields[ field ];
const indexes = dapi_fields[ field ];
- return indexes.map( i =>
- this._metaSource.getFieldData(
+ return indexes.map( ( i: PositiveInteger ) =>
+ this._meta_source.getFieldData(
field,
i,
dapi_manager,
@@ -200,7 +203,7 @@ module.exports = Class( 'DataProcessor',
} ).reduce( ( result, x ) => result.concat( x ), [] );
return [ dapis, clear ];
- },
+ }
/**
@@ -211,15 +214,18 @@ module.exports = Class( 'DataProcessor',
* lookup, it wouldn't be desirable to use an old rate even though data
* used to retrieve it has since changed.
*
- * @param {Object.} fields field names and array of indexes
+ * @param fields - field names and array of indexes
*
- * @return {undefined}
+ * @return cleared values
*/
- 'private _genClearMetaValues'( fields )
+ private _genClearMetaValues(
+ fields: Record
+ ): Record
{
- return Object.keys( fields ).reduce( ( result, field ) =>
+ return Object.keys( fields ).reduce(
+ ( result: Record, field: string ) =>
{
- result[ field ] = fields[ field ].reduce( ( values, i ) =>
+ result[ field ] = fields[ field ].reduce( ( values: any, i: any ) =>
{
values[ i ] = "";
return values;
@@ -227,21 +233,24 @@ module.exports = Class( 'DataProcessor',
return result;
}, {} );
- },
+ }
/**
* Determine which fields require a Data API to be triggered
*
- * @param {Object} mapis metadata dapi descriptors
- * @param {Object} data client-provided data
+ * @param mapis - metadata dapi descriptors
+ * @param data - client-provided data
*
- * @return {Object} fields with indexes in need of dapi calls
+ * @return fields with indexes in need of dapi calls
*/
- 'private _determineDapiFields'( mapis, data )
+ private _determineDapiFields(
+ mapis: Record,
+ data: Record
+ ): Record
{
return Object.keys( mapis ).reduce(
- ( result, src_field ) =>
+ ( result: any, src_field: string ) =>
{
const fdata = data[ src_field ];
@@ -253,7 +262,7 @@ module.exports = Class( 'DataProcessor',
const fields = mapis[ src_field ];
// get each index that changed
- fields.forEach( field =>
+ fields.forEach( (field: string) =>
{
result[ field ] = result[ field ] || [];
@@ -272,25 +281,30 @@ module.exports = Class( 'DataProcessor',
},
{}
);
- },
+ }
/**
* Map data from bucket to dapi inputs
*
- * @param {Object} dapi Data API descriptor
- * @param {Bucket} bucket active (source) bucket
- * @param {number} index field index
- * @param {Object} diff_data client-provided data
+ * @param dapi - Data API descriptor
+ * @param bucket - active (source) bucket
+ * @param index - field index
+ * @param diff_data - client-provided data
*
- * @return {Object} key/value dapi input data
+ * @return key/value dapi input data
*/
- 'private _mapDapiData'( dapi, bucket, index, diff_data )
+ private _mapDapiData(
+ dapi: any,
+ bucket: StagingBucket,
+ index: PositiveInteger,
+ diff_data: Record,
+ ): Record
{
const { mapsrc } = dapi;
return Object.keys( mapsrc ).reduce(
- ( result, srcid ) =>
+ ( result: any, srcid: any ) =>
{
const bucketid = mapsrc[ srcid ];
@@ -314,5 +328,5 @@ module.exports = Class( 'DataProcessor',
},
{}
);
- },
-} );
+ }
+};
diff --git a/src/server/service/RatingService.ts b/src/server/service/RatingService.ts
index f95f4ea..ea4667e 100644
--- a/src/server/service/RatingService.ts
+++ b/src/server/service/RatingService.ts
@@ -31,6 +31,7 @@ import { ServerDao } from "../db/ServerDao";
import { ServerSideQuote } from "../quote/ServerSideQuote";
import { UserRequest } from "../request/UserRequest";
import { UserResponse } from "../request/UserResponse";
+import { DeltaConstructor } from "../../bucket/delta";
type RequestCallback = () => void;
@@ -59,12 +60,14 @@ export class RatingService
* @param _dao - database connection
* @param _server - server actions
* @param _rater_manager - rating manager
+ * @param _createDelta - delta constructor
*/
constructor(
private readonly _logger: PriorityLog,
private readonly _dao: ServerDao,
private readonly _server: Server,
private readonly _rater_manager: ProcessManager,
+ private readonly _createDelta: DeltaConstructor,
) {}
@@ -271,12 +274,19 @@ export class RatingService
quote.setLastPremiumDate( cur_date );
quote.setRatedDate( cur_date );
+ const quote_data = quote.getRatingData();
+ const save_data = { ratedata: data };
+ const rdelta_data = {
+ "rdelta.ratedata": {
+ data: this._createDelta( data, quote_data ),
+ timestamp: cur_date
+ },
+ };
+
// save the last prem status (we pass an empty object as the save
// data argument to ensure that we do not save the actual bucket
// data, which may cause a race condition with the below merge call)
- this._dao.saveQuote( quote, c, c, {
- ratedata: data,
- } );
+ this._dao.saveQuote( quote, c, c, save_data, rdelta_data );
}
else
{
diff --git a/test/bucket/delta.ts b/test/bucket/delta.ts
new file mode 100644
index 0000000..ba1d192
--- /dev/null
+++ b/test/bucket/delta.ts
@@ -0,0 +1,99 @@
+/**
+ * Test the delta generated from two key/value stores
+ *
+ * Copyright (C) 2010-2019 R-T Specialty, LLC.
+ *
+ * This file is part of liza.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ *
+ */
+import { createDelta as sut, Kv , DeltaResult} from "../../src/bucket/delta";
+
+import { expect, use as chai_use } from 'chai';
+chai_use( require( 'chai-as-promised' ) );
+
+interface SutTestCase
+{
+ label: string;
+ src_data: T;
+ dest_data: T;
+ expected: DeltaResult;
+}
+
+describe( 'Delta', () =>
+{
+ ( >[]>[
+ {
+ label: "No changes are made, key is dropped",
+ src_data: { foo: [ 'bar', 'baz' ] },
+ dest_data: { foo: [ 'bar', 'baz' ] },
+ expected: {},
+ },
+ {
+ label: "Only the unchanged key is dropped",
+ src_data: { foo: [ 'bar', 'baz' ], bar: [ 'qwe' ] },
+ dest_data: { foo: [ 'bar', 'baz' ], bar: [ 'asd' ] },
+ expected: { bar: [ 'asd' ] },
+ },
+ {
+ label: "Changed values are updated by index with old value",
+ src_data: { foo: [ "bar", "baz", "quux" ] },
+ dest_data: { foo: [ "bar", "quuux" ], moo: [ "cow" ] },
+ expected: { foo: [ undefined, "quuux", null ], moo: [ "cow" ] },
+ },
+ {
+ label: "The keys are null when they don't exist in first set",
+ src_data: {},
+ dest_data: { foo: [ "bar", "quuux" ], moo: [ "cow" ] },
+ expected: { foo: [ "bar", "quuux" ], moo: [ "cow" ] },
+ },
+ {
+ label: "Removed keys in new set show up",
+ src_data: { foo: [ "bar" ] },
+ dest_data: {},
+ expected: { foo: null },
+ },
+ {
+ label: "Indexes after a null terminator aren't included",
+ src_data: { foo: [ "one", "two", "three", "four" ] },
+ dest_data: { foo: [ "one", "done" ] },
+ expected: { foo: [ undefined, "done", null ] },
+ },
+ {
+ label: "Consider nested arrays to be scalar values",
+ src_data: { foo: [ [ "one" ], [ "two", "three" ] ] },
+ dest_data: { foo: [ [ "one" ], [ "two" ] ] },
+ expected: { foo: [ undefined, [ "two" ] ] },
+ },
+ {
+ label: "Don't evaluate zeros as falsy",
+ src_data: { foo: [ 0 ] },
+ dest_data: { foo: [ 0 ] },
+ expected: {},
+ },
+ {
+ label: "Don't evaluate empty strings as falsy",
+ src_data: { foo: [ '' ] },
+ dest_data: { foo: [ '' ] },
+ expected: {},
+ },
+ ] ).forEach( ( { label, src_data, dest_data, expected } ) =>
+ {
+ it( label, () =>
+ {
+ expect( sut( src_data, dest_data ) ).to.deep.equal( expected );
+ } );
+ } );
+} );
diff --git a/test/server/db/MongoServerDaoTest.js b/test/server/db/MongoServerDaoTest.js
index 0f42f2b..0fa5d30 100644
--- a/test/server/db/MongoServerDaoTest.js
+++ b/test/server/db/MongoServerDaoTest.js
@@ -30,7 +30,7 @@ describe( 'MongoServerDao', () =>
{
describe( '#saveQuote', () =>
{
- describe( "with no save data", () =>
+ describe( "with no save data or push data", () =>
{
it( "saves entire metabucket record individually", done =>
{
@@ -51,6 +51,9 @@ describe( 'MongoServerDao', () =>
expect( data.$set[ 'meta.bar' ] )
.to.deep.equal( metadata.bar );
+
+ expect( data.$push ).to.equal( undefined );
+
done();
}
) );
@@ -60,6 +63,70 @@ describe( 'MongoServerDao', () =>
);
} );
} );
+
+ describe( "with push data", () =>
+ {
+ it( "adds push data to the collection", done =>
+ {
+ const push_data = {
+ foo: [ 'bar', 'baz' ],
+ bar: [ { quux: 'quuux' } ],
+ };
+
+ const quote = createStubQuote( {} );
+
+ const sut = Sut( createMockDb(
+ // update
+ ( selector, data ) =>
+ {
+ expect( data.$push[ 'foo' ] )
+ .to.deep.equal( push_data.foo );
+
+ expect( data.$push[ 'bar' ] )
+ .to.deep.equal( push_data.bar );
+
+ done();
+ }
+ ) );
+
+ sut.init( () =>
+ sut.saveQuote(
+ quote,
+ () => {},
+ () => {},
+ undefined,
+ push_data
+ )
+ );
+ } );
+
+ it( "skips push data when it is an empty object", done =>
+ {
+ const push_data = {};
+
+ const quote = createStubQuote( {} );
+
+ const sut = Sut( createMockDb(
+ // update
+ ( selector, data ) =>
+ {
+ expect( data.$push ).to.equal( undefined );
+
+ done();
+ }
+ ) );
+
+ sut.init( () =>
+ sut.saveQuote(
+ quote,
+ () => {},
+ () => {},
+ undefined,
+ push_data
+ )
+ );
+ } );
+ } );
} );
} );
diff --git a/test/server/quote/ServerSideQuoteTest.js b/test/server/quote/ServerSideQuoteTest.js
index aea8cc8..8e92a67 100644
--- a/test/server/quote/ServerSideQuoteTest.js
+++ b/test/server/quote/ServerSideQuoteTest.js
@@ -21,8 +21,14 @@
'use strict';
-const { expect } = require( 'chai' );
-const Sut = require( '../../..' ).server.quote.ServerSideQuote;
+const root = require( '../../..' );
+const Sut = require( '../../..' ).server.quote.ServerSideQuote;
+const expect = require( 'chai' ).expect;
+const sinon = require( 'sinon' );
+
+const {
+ QuoteDataBucket,
+} = root.bucket;
describe( 'ServerSideQuote', () =>
{
@@ -31,80 +37,85 @@ describe( 'ServerSideQuote', () =>
[
{
property: 'startDate',
- default: 0,
- value: 946684800
+ default: 0,
+ value: 946684800
},
{
property: 'initialRatedDate',
- default: 0,
- value: 946684800
+ default: 0,
+ value: 946684800
},
{
property: 'agentId',
- default: 0,
- value: 12345678
+ default: 0,
+ value: 12345678
},
{
property: 'agentEntityId',
- default: 0,
- value: 12345678
+ default: 0,
+ value: 12345678
},
{
property: 'agentName',
- default: '',
- value: 'name'
+ default: '',
+ value: 'name'
},
{
property: 'imported',
- default: false,
- value: true,
+ default: false,
+ value: true,
accessor: 'is'
},
{
property: 'bound',
- default: false,
- value: true,
+ default: false,
+ value: true,
accessor: 'is'
},
{
property: 'currentStepId',
- default: 1,
- value: 2
+ default: 1,
+ value: 2
},
{
property: 'topVisitedStepId',
- default: 1,
- value: 2
+ default: 1,
+ value: 2
},
{
property: 'topSavedStepId',
- value: 1
+ value: 1
},
{
property: 'error',
- default: '',
- value: 'ERROR'
+ default: '',
+ value: 'ERROR'
},
{
property: 'programVersion',
- default: '',
- value: '1.0.0'
+ default: '',
+ value: '1.0.0'
},
{
property: 'creditScoreRef',
- default: 0,
- value: 800
+ default: 0,
+ value: 800
},
{
property: 'lastPremiumDate',
- default: 0,
- value: 946684800
+ default: 0,
+ value: 946684800
},
{
property: 'ratedDate',
- default: 0,
- value: 946684800
- }
+ default: 0,
+ value: 946684800
+ },
+ {
+ property: 'rateBucket',
+ default: null,
+ value: QuoteDataBucket()
+ },
].forEach( testCase =>
{
@@ -122,4 +133,44 @@ describe( 'ServerSideQuote', () =>
} );
} );
} );
-} );
\ No newline at end of file
+
+ describe( 'rating data', () =>
+ {
+ it( `#setRatingData throws an error if no bucket is set`, () =>
+ {
+ const data = { foo: 'bar' };
+ const sut = Sut();
+
+ expect( function() { sut.setRatingData( data ); } )
+ .to.throw( Error );
+ } );
+
+ it( `Bucket values setters/getters work correctly`, () =>
+ {
+ const data = { foo: 'bar' };
+ let bucket_data = null;
+ const sut = Sut();
+ var called = false;
+
+ const bucket = {
+ setValues( gdata )
+ {
+ expect( gdata ).to.deep.equal( data );
+
+ bucket_data = gdata;
+ called = true;
+ },
+
+ getData()
+ {
+ return bucket_data;
+ },
+ };
+
+ sut.setRateBucket( bucket );
+ sut.setRatingData( data );
+
+ expect( called ).to.equal( true );
+ } );
+ } );
+} );
diff --git a/test/server/request/DataProcessorTest.js b/test/server/request/DataProcessorTest.js
deleted file mode 100644
index 70db17e..0000000
--- a/test/server/request/DataProcessorTest.js
+++ /dev/null
@@ -1,417 +0,0 @@
-/**
- * Manages DataAPI requests and return data
- *
- * Copyright (C) 2010-2019 R-T Specialty, LLC.
- *
- * This file is part of the Liza Data Collection Framework.
- *
- * liza is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License as
- * published by the Free Software Foundation, either version 3 of the
- * License, or (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see .
- */
-
-'use strict';
-
-const { Class } = require( 'easejs' );
-const { expect } = require( 'chai' );
-const Sut = require( '../../../' ).server.request.DataProcessor;
-
-
-describe( 'DataProcessor', () =>
-{
- [
- {
- label: "strips internal field data when not internal",
- data: {
- internal: [ "foo", "bar" ],
- foo: [ "bar", "baz" ],
- },
- internals: { internal: true },
- internal: false,
- expected: {
- foo: [ "bar", "baz" ],
- },
- },
- {
- label: "keeps internal field data when internal",
- data: {
- internal: [ "foo", "bar" ],
- foo: [ "bar", "baz" ],
- },
- internals: { internal: true },
- internal: true,
- expected: {
- internal: [ "foo", "bar" ],
- foo: [ "bar", "baz" ],
- },
- },
- ].forEach( ( { label, internal, data, internals = {}, expected } ) =>
- {
- const { request, program, sut } =
- createSutFromStubs( internal, internals );
-
- it( label, () =>
- {
- expect(
- sut.processDiff( data, request, program ).filtered
- ).to.deep.equal( expected );
- } );
- } );
-
-
- it( "passes data to bucket filter", () =>
- {
- const { request, program, meta_source } = createStubs();
- const data = {};
- const types = {};
-
- program.meta.qtypes = types;
-
- const filter = {
- filter( given_data, given_types, given_ignore, given_null )
- {
- expect( given_data ).to.equal( data );
- expect( given_types ).to.equal( types );
- expect( given_null ).to.equal( true );
-
- // not used
- expect( given_ignore ).to.deep.equal( {} );
-
- data.filtered = true;
- }
- };
-
- Sut( filter, () => {}, meta_source, createStubStagingBucket )
- .processDiff( data, request, program );
-
- expect( data.filtered ).to.equal( true );
- } );
-
-
- it( "instantiates dapi manager using program and session", done =>
- {
- const { filter, request, program } = createStubs();
-
- const dapi_factory = ( given_apis, given_request ) =>
- {
- expect( given_apis ).to.equal( program.apis );
- expect( given_request ).to.equal( request );
-
- done();
- };
-
- Sut( filter, dapi_factory, null, createStubStagingBucket )
- .processDiff( {}, request, program );
- } );
-
-
- it( "invokes dapi manager when monitored bucket value changes", () =>
- {
- const triggered = {};
-
- // g prefix = "given"
- const getFieldData = function( gfield, gindex, gdapim, gdapi, gdata)
- {
- triggered[ gdapi.name ] = triggered[ gdapi.name ] || [];
- triggered[ gdapi.name ][ gindex ] = arguments;
-
- return Promise.resolve( true );
- };
-
- const dapi_manager = {};
-
- const {
- request,
- program,
- filter,
- meta_source,
- } = createStubs( false, {}, getFieldData );
-
- const sut = Sut(
- filter,
- () => dapi_manager,
- meta_source,
- createStubStagingBucket
- );
-
- program.meta.fields = {
- foo: {
- dapi: {
- name: 'dapi_foo',
- mapsrc: { ina: 'src', inb: 'src1' },
- },
- },
- bar: {
- dapi: {
- name: 'dapi_bar',
- mapsrc: { ina: 'src1' },
- },
- },
- baz: {
- dapi: {
- name: 'dapi_no_call',
- mapsrc: {},
- },
- },
- };
-
- program.mapis = {
- src: [ 'foo', 'bar' ], // change
- src1: [ 'foo' ], // change
- src2: [ 'baz' ], // do not change
- };
-
- // data changed
- const data = {
- src: [ 'src0', 'src1' ],
- src1: [ undefined, 'src11' ],
- };
-
- const bucket = createStubBucket( {
- src: [ 'bsrc0', 'bsrc1' ],
- src1: [ 'bsrc10', 'bsrc11' ],
- } );
-
- const { dapis, meta_clear } = sut.processDiff(
- data, request, program, bucket
- );
-
- const expected = {
- dapi_foo: [
- {
- name: 'foo',
- data: {
- ina: data.src[ 0 ],
- inb: bucket.data.src1[ 0 ],
- },
- },
- {
- name: 'foo',
- data: {
- ina: data.src[ 1 ],
- inb: data.src1[ 1 ],
- },
- },
- ],
- dapi_bar: [
- undefined,
- {
- name: 'bar',
- data: {
- ina: data.src1[ 1 ],
- },
- },
- ],
- };
-
- const expected_clear = {
- foo: [ "", "" ],
- bar: [ "", "" ],
- };
-
- for ( let dapi_name in expected )
- {
- let expected_call = expected[ dapi_name ];
-
- for ( let i in expected_call )
- {
- let chk = expected_call[ i ];
-
- if ( chk === undefined )
- {
- continue;
- }
-
- let [ gfield, gindex, gdapi_manager, gdapi, gdata ] =
- triggered[ dapi_name ][ i ];
-
- expect( gfield ).to.equal( chk.name );
- expect( gdapi.name ).to.equal( dapi_name );
- expect( +gindex ).to.equal( +i );
- expect( gdapi_manager ).to.equal( dapi_manager );
-
- // see mapsrc
- expect( gdata ).to.deep.equal( chk.data );
- }
- }
-
- expect( triggered.dapi_no_call ).to.equal( undefined );
-
- expect( meta_clear ).to.deep.equal( expected_clear );
-
- return Promise.all( dapis );
- } );
-
-
- it( "check _mapDapiData default values", () =>
- {
- const triggered = {};
-
- // g prefix = "given"
- const getFieldData = function( gfield, gindex, gdapim, gdapi, gdata)
- {
- triggered[ gdapi.name ] = triggered[ gdapi.name ] || [];
- triggered[ gdapi.name ][ gindex ] = arguments;
-
- expect( gdata ).to.deep.equal( { ina: '', inb: [] } );
-
- return Promise.resolve( true );
- };
-
- const dapi_manager = {};
-
- const {
- request,
- program,
- filter,
- meta_source,
- } = createStubs( false, {}, getFieldData );
-
- const sut = Sut(
- filter,
- () => dapi_manager,
- meta_source,
- createStubStagingBucket
- );
-
- program.meta.fields = {
- foo: {
- dapi: {
- name: 'dapi_foo',
- mapsrc: { ina: 'src', inb: 'src1' },
- },
- },
- };
-
- program.mapis = {
- src1: [ 'foo' ], // change
- };
-
- // data changed
- const data = {
- src: [ 'src0', '' ],
- src1: [ undefined, '' ],
- };
-
- const bucket = createStubBucket( {
- src: [ 'bsrc0', '' ],
- src1: [ 'bsrc10', undefined],
- } );
-
- const { dapis } = sut.processDiff(
- data, request, program, bucket
- );
-
- return Promise.all( dapis );
- } );
-} );
-
-
-function createSutFromStubs( /* see createStubs */ )
-{
- const { request, program, filter, meta_source } =
- createStubs.apply( null, arguments );
-
- return {
- request: request,
- program: program,
- filter: filter,
- meta_source: meta_source,
-
- sut: Sut(
- filter,
- () => {},
- meta_source,
- createStubStagingBucket
- ),
- };
-}
-
-
-function createStubs( internal, internals, getFieldData )
-{
- return {
- request: createStubUserRequest( internal || false ),
- program: createStubProgram( internals || {} ),
- filter: { filter: _ => _ },
- meta_source: createStubDapiMetaSource( getFieldData ),
- };
-}
-
-
-function createStubUserRequest( internal )
-{
- return {
- getSession: () => ( {
- isInternal: () => internal
- } )
- };
-}
-
-
-function createStubProgram( internals )
-{
- return {
- internal: internals,
- meta: { qtypes: {}, fields: {} },
- apis: {},
-
- initQuote() {},
- };
-}
-
-
-function createStubDapiMetaSource( getFieldData )
-{
- return {
- getFieldData: getFieldData ||
- function( field, index, dapi_manager, dapi, data ){},
- };
-}
-
-
-function createStubBucket( data )
-{
- return {
- data: data,
-
- getDataByName( name )
- {
- return data[ name ];
- },
- };
-}
-
-
-function createStubStagingBucket( bucket )
-{
- let data = {};
-
- return {
- getDataByName( name )
- {
- return bucket.getDataByName( name );
- },
-
- setValues( values )
- {
- data = values;
- },
-
- forbidBypass() {},
- getDiff()
- {
- return data;
- },
- commit() {},
- };
-}
diff --git a/test/server/request/DataProcessorTest.ts b/test/server/request/DataProcessorTest.ts
new file mode 100644
index 0000000..d0e52d8
--- /dev/null
+++ b/test/server/request/DataProcessorTest.ts
@@ -0,0 +1,805 @@
+/**
+ * Manages DataAPI requests and return data
+ *
+ * Copyright (C) 2010-2019 R-T Specialty, LLC.
+ *
+ * This file is part of the Liza Data Collection Framework.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as
+ * published by the Free Software Foundation, either version 3 of the
+ * License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+import { DataProcessor as Sut } from "../../../src/server/request/DataProcessor";
+
+import { expect, use as chai_use } from 'chai';
+import { DocumentId } from "../../../src/document/Document";
+import { PositiveInteger } from "../../../src/numeric";
+import { UserRequest } from "../../../src/server/request/UserRequest";
+import { ServerSideQuote } from "../../../src/server/quote/ServerSideQuote";
+import { QuoteDataBucket } from "../../../src/bucket/QuoteDataBucket";
+
+chai_use( require( 'chai-as-promised' ) );
+
+
+describe( 'DataProcessor', () =>
+{
+ [
+ {
+ label: "strips internal field data when not internal",
+ data: {
+ internal: [ "foo", "bar" ],
+ foo: [ "bar", "baz" ],
+ },
+ internals: { internal: true },
+ internal: false,
+ expected: {
+ foo: [ "bar", "baz" ],
+ },
+ },
+ {
+ label: "keeps internal field data when internal",
+ data: {
+ internal: [ "foo", "bar" ],
+ foo: [ "bar", "baz" ],
+ },
+ internals: { internal: true },
+ internal: true,
+ expected: {
+ internal: [ "foo", "bar" ],
+ foo: [ "bar", "baz" ],
+ },
+ },
+ ].forEach( ( { label, internal, data, internals, expected } ) =>
+ {
+ const { request, program, sut, quote } =
+ createSutFromStubs( internal, internals );
+
+ const bucket = createStubBucket( data );
+
+ it( label, () =>
+ {
+ expect(
+ sut.processDiff( data, request, program, bucket, quote ).filtered
+ ).to.deep.equal( expected );
+ } );
+ } );
+
+
+ [
+ {
+ label: "Original data is saved to the delta, not new data",
+ old_data: {
+ foo: [ "bar_old", "baz" ],
+ },
+ new_data: {
+ foo: [ "bar_new", "baz" ],
+ },
+ expected_data: {
+ foo: [ "bar_old", "baz" ],
+ },
+ },
+ ].forEach( ( { label, old_data, new_data, expected_data } ) =>
+ {
+ const {
+ request,
+ program,
+ quote,
+ filter,
+ dapi_constructor,
+ meta_source
+ } = createStubs();
+
+ const sut = new Sut(
+ filter,
+ dapi_constructor,
+ meta_source,
+ createStubStagingBucket
+ );
+
+ const bucket = createStubBucket( old_data );
+
+ it( label, () =>
+ {
+ const actual = sut.processDiff(
+ new_data,
+ request,
+ program,
+ bucket,
+ quote,
+ );
+
+ expect( actual.rdiff ).to.deep.equal( expected_data );
+ } );
+ } );
+
+
+ it( "#processDiff.rdelta_data is undefined with empty staging diff", () =>
+ {
+ const {
+ request,
+ program,
+ quote,
+ filter,
+ dapi_constructor,
+ meta_source
+ } = createStubs();
+
+ const sut = new Sut(
+ filter,
+ dapi_constructor,
+ meta_source,
+ createStubStagingBucket
+ );
+
+ const data = {
+ foo: [ "bar", "baz" ],
+ };
+
+ const diff = {};
+
+ const bucket = createStubBucket( data );
+ const actual = sut.processDiff( diff, request, program, bucket, quote );
+
+ expect( actual.rdelta_data ).to.deep.equal( undefined );
+
+ } );
+
+
+ it( "passes data to bucket filter", () =>
+ {
+ const {
+ request,
+ program,
+ meta_source,
+ dapi_constructor,
+ quote,
+ } = createStubs();
+
+ const data: { filtered?: boolean } = {};
+ const types = {};
+
+ program.meta.qtypes = types;
+
+ const filter = {
+ filter(
+ given_data: Record,
+ given_types: Record,
+ given_ignore: any,
+ given_null: boolean,
+ ) {
+ expect( given_data ).to.equal( data );
+ expect( given_types ).to.equal( types );
+ expect( given_null ).to.equal( true );
+
+ // not used
+ expect( given_ignore ).to.deep.equal( {} );
+
+ data.filtered = true;
+
+ return data;
+ },
+
+ filterValues(
+ values: string[],
+ _filter: string,
+ _permit_null: boolean,
+ ) {
+ return values;
+ }
+ };
+
+ const bucket = createStubBucket( data );
+
+ new Sut(
+ filter,
+ dapi_constructor,
+ meta_source,
+ createStubStagingBucket,
+ ).processDiff( data, request, program, bucket, quote );
+
+ expect( data.filtered ).to.equal( true );
+ } );
+
+
+ it( "instantiates dapi manager using program and session", done =>
+ {
+ const { filter, request, program, meta_source, quote } = createStubs();
+
+ let dapi_constructor = (
+ given_apis: any,
+ given_request: UserRequest,
+ _quote: ServerSideQuote
+ ) => {
+ expect( given_apis ).to.equal( program.apis );
+ expect( given_request ).to.equal( request );
+
+ done();
+
+ return createStubDataApiManager();
+ };
+
+ const bucket = createStubBucket( {} );
+
+ new Sut(
+ filter,
+ dapi_constructor,
+ meta_source,
+ createStubStagingBucket
+ ).processDiff( {}, request, program, bucket, quote );
+
+ } );
+
+
+ it( "invokes dapi manager when monitored bucket value changes", () =>
+ {
+ const triggered: { [key: string]: any[] } = {};
+
+ // g prefix = "given"
+ const meta_source = {
+ getFieldData(
+ _gfield: any,
+ gindex: PositiveInteger,
+ _gdapim: any,
+ gdapi: { name: string },
+ _gdata: any,
+ )
+ {
+ triggered[ gdapi.name ] = triggered[ gdapi.name ] || [];
+ triggered[ gdapi.name ][ gindex ] = arguments;
+
+ return Promise.resolve( true );
+ }
+ }
+
+ const dapi_manager = createStubDataApiManager();
+
+ const {
+ request,
+ program,
+ filter,
+ quote,
+ } = createStubs( false );
+
+ const sut = new Sut(
+ filter,
+ () => dapi_manager,
+ meta_source,
+ createStubStagingBucket,
+ );
+
+ program.meta.fields = {
+ foo: {
+ dapi: {
+ name: 'dapi_foo',
+ mapsrc: { ina: 'src', inb: 'src1' },
+ },
+ },
+ bar: {
+ dapi: {
+ name: 'dapi_bar',
+ mapsrc: { ina: 'src1' },
+ },
+ },
+ baz: {
+ dapi: {
+ name: 'dapi_no_call',
+ mapsrc: {},
+ },
+ },
+ };
+
+ program.mapis = {
+ src: [ 'foo', 'bar' ], // change
+ src1: [ 'foo' ], // change
+ src2: [ 'baz' ], // do not change
+ };
+
+ // data changed
+ const data = {
+ src: [ 'src0', 'src1' ],
+ src1: [ undefined, 'src11' ],
+ };
+
+ const bucket = createStubBucket( {
+ src: [ 'bsrc0', 'bsrc1' ],
+ src1: [ 'bsrc10', 'bsrc11' ],
+ } );
+
+ const { dapis, meta_clear } = sut.processDiff(
+ data, request, program, bucket, quote
+ );
+
+ const expected: { [key: string]: any[] } = {
+ dapi_foo: [
+ {
+ name: 'foo',
+ data: {
+ ina: data.src[ 0 ],
+ inb: bucket.data.src1[ 0 ],
+ },
+ },
+ {
+ name: 'foo',
+ data: {
+ ina: data.src[ 1 ],
+ inb: data.src1[ 1 ],
+ },
+ },
+ ],
+ dapi_bar: [
+ undefined,
+ {
+ name: 'bar',
+ data: {
+ ina: data.src1[ 1 ],
+ },
+ },
+ ],
+ };
+
+ const expected_clear = {
+ foo: [ "", "" ],
+ bar: [ "", "" ],
+ };
+
+ for ( let dapi_name in expected )
+ {
+ let expected_call = expected[ dapi_name ];
+
+ for ( let i in expected_call )
+ {
+ let chk = expected_call[ i ];
+
+ if ( chk === undefined )
+ {
+ continue;
+ }
+
+ let [ gfield, gindex, gdapi_manager, gdapi, gdata ] =
+ triggered[ dapi_name ][ i ];
+
+ expect( gfield ).to.equal( chk.name );
+ expect( gdapi.name ).to.equal( dapi_name );
+ expect( +gindex ).to.equal( +i );
+ expect( gdapi_manager ).to.equal( dapi_manager );
+
+ // see mapsrc
+ expect( gdata ).to.deep.equal( chk.data );
+ }
+ }
+
+ expect( triggered.dapi_no_call ).to.equal( undefined );
+ expect( meta_clear ).to.deep.equal( expected_clear );
+
+ return Promise.all( dapis );
+ } );
+
+
+ it( "check _mapDapiData default values", () =>
+ {
+ const triggered: { [key: string]: any[] }= {};
+
+ // g prefix = "given"
+ const meta_source = {
+ getFieldData(
+ _gfield: any,
+ gindex: any,
+ _gdapim: any,
+ gdapi: any,
+ gdata: any,
+ )
+ {
+ triggered[ gdapi.name ] = triggered[ gdapi.name ] || [];
+ triggered[ gdapi.name ][ gindex ] = arguments;
+
+ expect( gdata ).to.deep.equal( { ina: '', inb: [] } );
+
+ return Promise.resolve( true );
+ }
+ }
+
+ const {
+ request,
+ program,
+ filter,
+ quote,
+ } = createStubs( false );
+
+ const sut = new Sut(
+ filter,
+ createStubDataApiManager,
+ meta_source,
+ createStubStagingBucket
+ );
+
+ program.meta.fields = {
+ foo: {
+ dapi: {
+ name: 'dapi_foo',
+ mapsrc: { ina: 'src', inb: 'src1' },
+ },
+ },
+ };
+
+ program.mapis = {
+ src1: [ 'foo' ], // change
+ };
+
+ // data changed
+ const data = {
+ src: [ 'src0', '' ],
+ src1: [ undefined, '' ],
+ };
+
+ const bucket = createStubBucket( {
+ src: [ 'bsrc0', '' ],
+ src1: [ 'bsrc10', undefined],
+ } );
+
+ const { dapis } = sut.processDiff(
+ data, request, program, bucket, quote
+ );
+
+ return Promise.all( dapis );
+ } );
+} );
+
+
+function createSutFromStubs(
+ internal: boolean = false,
+ internals: { internal: boolean } = { internal: false },
+)
+{
+ const {
+ request,
+ program,
+ filter,
+ meta_source,
+ dapi_constructor,
+ quote
+ } = createStubs(internal, internals);
+
+ return {
+ request: request,
+ program: program,
+ filter: filter,
+ meta_source: meta_source,
+ quote: quote,
+
+ sut: new Sut(
+ filter,
+ dapi_constructor,
+ meta_source,
+ createStubStagingBucket
+ ),
+ };
+}
+
+
+function createStubs(
+ internal: boolean = false,
+ internals: { internal: boolean } = { internal: false },
+)
+{
+ return {
+ request: createStubUserRequest( internal ),
+ program: createStubProgram( internals ),
+ filter: createStubFilter(),
+ dapi_constructor: createStubDataApiContructor(),
+ meta_source: createStubDapiMetaSource(),
+ quote: createStubQuote(),
+ };
+}
+
+
+function createStubUserRequest( internal: boolean )
+{
+ return {
+ getSession: () => ( {
+ isInternal: () => internal
+ } )
+ };
+}
+
+
+function createStubProgram( internals: { internal: boolean } )
+{
+ return {
+ ineligibleLockCount: 0,
+ internal: internals,
+ meta: {
+ arefs: {},
+ fields: {},
+ groups: {},
+ qdata: {},
+ qtypes: {},
+ },
+ mapis: {},
+ apis: {},
+
+ getId(){ return 'Foo'; },
+
+ initQuote() {},
+ };
+}
+
+
+function createStubFilter()
+{
+ return {
+ filter(
+ data: Record,
+ _key_types: Record,
+ _ignore_types: Record,
+ _permit_null: boolean,
+ ) {
+ return data;
+ },
+
+ filterValues(
+ values: string[],
+ _filter: string,
+ _permit_null: boolean,
+ ) {
+ return values;
+ }
+ }
+}
+
+
+function createStubDataApiContructor()
+{
+ return (
+ _apis: any,
+ _request: UserRequest,
+ _quote: ServerSideQuote
+ ) => { return createStubDataApiManager(); };
+}
+
+
+function createStubDataApiManager()
+{
+ return {
+ setApis( _apis: any ) { return this; },
+
+ getApiData(
+ _api: string,
+ _data: any,
+ _callback: any,
+ _name: string,
+ _index: PositiveInteger,
+ _bucket: any,
+ _fc: any,
+ ){ return this; },
+
+ getPendingApiCalls() { return {}; },
+
+ fieldStale( _field: string, _index: PositiveInteger, _stale?: boolean )
+ {
+ return this;
+ },
+
+ fieldNotReady( _id: any, _i: PositiveInteger, _bucket: any )
+ {
+ return;
+ },
+
+ processFieldApiCalls() { return this; },
+
+ setFieldData(
+ _name: string,
+ _index: PositiveInteger,
+ _data: Record,
+ _value: string,
+ _label: string,
+ _unchanged: boolean,
+ ) { return this; },
+
+ triggerFieldUpdate(
+ _name: string,
+ _index: PositiveInteger,
+ _value: string,
+ _label: string,
+ _unchanged: boolean,
+ ) { return false; },
+
+ hasFieldData( _name: string, _index: PositiveInteger ) { return true; },
+
+ clearFieldData(
+ _name: string,
+ _index: PositiveInteger,
+ _trigger_event: boolean,
+ ) { return this; },
+
+ clearPendingApiCall( _id: string ) { return this; },
+
+ expandFieldData(
+ _name: string,
+ _index: PositiveInteger,
+ _bucket: any,
+ _map: any,
+ _predictive: boolean,
+ _diff: any,
+ ) { return this; },
+
+ getDataExpansion(
+ _name: string,
+ _index: PositiveInteger,
+ bucket: any,
+ _map: any,
+ _predictive: boolean,
+ _diff: any,
+ ) { return bucket; },
+ };
+}
+
+
+function createStubQuote()
+{
+ let quote_data: Record = {};
+
+ return {
+ getRatedDate()
+ {
+ return 1572292453;
+ },
+
+ setRatedDate( _timestamp: UnixTimestamp )
+ {
+ return this;
+ },
+
+ getProgram()
+ {
+ return createStubProgram( { internal: false } );
+ },
+
+ getProgramId()
+ {
+ return 'Bar';
+ },
+
+ getId()
+ {
+ return 123;
+ },
+
+ getCurrentStepId()
+ {
+ return 1;
+ },
+
+ setExplicitLock( _reason: string, _step: number )
+ {
+ return this;
+ },
+
+ setLastPremiumDate( _timestamp: UnixTimestamp )
+ {
+ return this;
+ },
+
+ getLastPremiumDate()
+ {
+ return 1572292453;
+ },
+
+ setRateBucket( _bucket: any )
+ {
+ return this;
+ },
+
+ setRatingData( data: Record )
+ {
+ quote_data = data;
+
+ return this;
+ },
+
+ getRatingData()
+ {
+ return quote_data;
+ },
+
+ getBucket()
+ {
+ return new QuoteDataBucket();
+ }
+ };
+}
+
+
+function createStubDapiMetaSource()
+{
+ return {
+ getFieldData(
+ _field: string,
+ _index: PositiveInteger,
+ _dapi_manager: any,
+ _dapi: any,
+ _data: Record,
+ )
+ {
+ return new Promise( () => {} );
+ },
+ };
+}
+
+
+function createStubBucket( data: Record )
+{
+ return {
+ data: data,
+
+ getDataByName( name: string )
+ {
+ return data[ name ];
+ },
+ };
+}
+
+
+function createStubStagingBucket( bucket: any )
+{
+ let bucket_data = {};
+
+ return {
+ setCommittedValues( _data: Record ) { return this; },
+
+ forbidBypass() { return this; },
+
+ setValues( values: Record )
+ {
+ bucket_data = values; return this;
+ },
+
+ overwriteValues( _data: Record ) { return this; },
+
+ getDiff() { return bucket_data; },
+
+ getFilledDiff() { return bucket.data || { foo: 'Bar' }; },
+
+ revert( _evented?: boolean ) { return this; },
+
+ commit( _store?: { old: Record } ) { return this; },
+
+ clear() { return this; },
+
+ each( _callback: ( value: any, name: string ) => void )
+ {
+ return this;
+ },
+
+ getDataByName( name: string ) { return bucket.getDataByName( name ); },
+
+ getOriginalDataByName( name: string )
+ {
+ return bucket.getDataByName( name );
+ },
+
+ getDataJson() { return 'Foo'; },
+
+ getData() { return [ ( _Foo123: string ) => 'Bar']; },
+
+ filter(
+ _pred: ( name: string ) => boolean,
+ _c: ( value: any, name: string ) => void
+ )
+ {
+ return this;
+ },
+
+ hasIndex( _name: string, _i: PositiveInteger ) { return true; },
+
+ isDirty() { return false; },
+ };
+}
diff --git a/test/server/service/RatingServiceTest.ts b/test/server/service/RatingServiceTest.ts
index 1b7f7d7..8896c49 100644
--- a/test/server/service/RatingServiceTest.ts
+++ b/test/server/service/RatingServiceTest.ts
@@ -35,6 +35,8 @@ import { ServerSideQuote } from "../../../src/server/quote/ServerSideQuote";
import { UserRequest } from "../../../src/server/request/UserRequest";
import { UserResponse } from "../../../src/server/request/UserResponse";
import { UserSession } from "../../../src/server/request/UserSession";
+import { QuoteDataBucket } from "../../../src/bucket/QuoteDataBucket";
+import { Kv } from "../../../src/bucket/delta";
import {
ServerDao,
@@ -58,9 +60,10 @@ describe( 'RatingService', () =>
response,
quote,
stub_rate_data,
+ createDelta,
} = getStubs();
- const sut = new Sut( logger, dao, server, raters );
+ const sut = new Sut( logger, dao, server, raters, createDelta);
const expected = {
data: stub_rate_data,
@@ -84,9 +87,10 @@ describe( 'RatingService', () =>
response,
quote,
stub_rate_data,
+ createDelta,
} = getStubs();
- const sut = new Sut( logger, dao, server, raters );
+ const sut = new Sut( logger, dao, server, raters, createDelta );
let last_prem_called = false;
let rated_date_called = false;
@@ -140,7 +144,7 @@ describe( 'RatingService', () =>
.then( () => expect( sent ).to.be.true );
} );
- it( "saves rate data to own field", () =>
+ it( "saves rate data to it's own field", () =>
{
const {
logger,
@@ -151,20 +155,20 @@ describe( 'RatingService', () =>
response,
quote,
stub_rate_data,
+ createDelta,
} = getStubs();
let saved_rates = false;
dao.saveQuote = (
- quote: ServerSideQuote,
- success: ServerDaoCallback,
- _failure: ServerDaoCallback,
- save_data: Record,
+ quote: ServerSideQuote,
+ success: ServerDaoCallback,
+ _failure: ServerDaoCallback,
+ save_data: Record,
+ _push_data: Record,
) =>
{
- expect( save_data ).to.deep.equal( {
- ratedata: stub_rate_data,
- } );
+ expect( save_data.ratedata ).to.deep.equal( stub_rate_data );
saved_rates = true;
success( quote );
@@ -172,7 +176,7 @@ describe( 'RatingService', () =>
return dao;
};
- const sut = new Sut( logger, dao, server, raters );
+ const sut = new Sut( logger, dao, server, raters, createDelta );
return sut.request( request, response, quote, "" )
.then( () =>
@@ -182,6 +186,55 @@ describe( 'RatingService', () =>
} );
+ it( "saves delta to it's own field", () =>
+ {
+ const {
+ logger,
+ server,
+ raters,
+ dao,
+ request,
+ response,
+ quote,
+ stub_rate_delta,
+ createDelta,
+ } = getStubs();
+
+ let saved_quote = false;
+
+ let timestamp = 0;
+
+ quote.setLastPremiumDate = ( ts: UnixTimestamp ) =>
+ {
+ timestamp = ts;
+ return quote;
+ };
+
+ dao.saveQuote = (
+ quote: ServerSideQuote,
+ success: ServerDaoCallback,
+ _failure: ServerDaoCallback,
+ _save_data: Record,
+ push_data: Record,
+ ) =>
+ {
+ stub_rate_delta[ "rdelta.ratedata" ].timestamp = timestamp;
+ saved_quote = true;
+
+ expect( push_data ).to.deep.equal( stub_rate_delta );
+ success( quote );
+
+ return dao;
+ };
+
+ const sut = new Sut( logger, dao, server, raters, createDelta );
+
+ return sut.request( request, response, quote, "" )
+ .then( () => { expect( saved_quote ).to.be.true; } );
+ } );
+
+
+
it( "rejects and responds with error", () =>
{
const {
@@ -194,13 +247,14 @@ describe( 'RatingService', () =>
request,
response,
server,
+ createDelta,
} = getStubs();
const expected_error = new Error( "expected error" );
rater.rate = () => { throw expected_error; };
- const sut = new Sut( logger, dao, server, raters );
+ const sut = new Sut( logger, dao, server, raters, createDelta );
let logged = false;
@@ -242,11 +296,12 @@ describe( 'RatingService', () =>
request,
response,
server,
+ createDelta,
} = getStubs();
const expected_message = 'expected foo';
- const sut = new Sut( logger, dao, server, raters );
+ const sut = new Sut( logger, dao, server, raters, createDelta );
rater.rate = (
_quote: ServerSideQuote,
@@ -279,6 +334,7 @@ describe( 'RatingService', () =>
response,
server,
stub_rate_data,
+ createDelta,
} = getStubs();
let sent = false;
@@ -304,7 +360,7 @@ describe( 'RatingService', () =>
return server;
};
- const sut = new Sut( logger, dao, server, raters );
+ const sut = new Sut( logger, dao, server, raters, createDelta );
return sut.request( request, response, quote, "" )
.then( () => expect( sent ).to.be.true );
@@ -325,6 +381,7 @@ describe( 'RatingService', () =>
request,
response,
quote,
+ createDelta,
} = getStubs();
dao.mergeBucket = () =>
@@ -341,7 +398,7 @@ describe( 'RatingService', () =>
{
processed = true;
}
- }( logger, dao, server, raters );
+ }( logger, dao, server, raters, createDelta );
sut.request( request, response, quote, 'something' );
} );
@@ -361,6 +418,7 @@ describe( 'RatingService', () =>
request,
response,
quote,
+ createDelta,
} = getStubs();
quote.getLastPremiumDate = () =>
@@ -371,7 +429,7 @@ describe( 'RatingService', () =>
quote.getRatedDate = () => initial_date;
- const sut = new Sut( logger, dao, server, raters );
+ const sut = new Sut( logger, dao, server, raters, createDelta );
server.sendResponse = ( _request: any, _quote: any, resp: any, _actions: any ) =>
{
@@ -404,6 +462,19 @@ function getStubs()
_unavailable_all: '0',
};
+ const stub_rate_delta: any = {
+ "rdelta.ratedata": {
+ data: {
+ _unavailable_all: [ undefined ]
+ },
+ timestamp: 123
+ }
+ };
+
+ const createDelta = ( _src: Kv, _dest: Kv ) => {
+ return stub_rate_delta[ "rdelta.ratedata" ][ "data" ];
+ };
+
const rater = new class implements Rater
{
rate(
@@ -452,6 +523,7 @@ function getStubs()
success: ServerDaoCallback,
_failure: ServerDaoCallback,
_save_data: Record,
+ _push_data: Record,
): this
{
success( quote );
@@ -510,19 +582,25 @@ function getStubs()
getLastPremiumDate: () => 0,
getCurrentStepId: () => 0,
setExplicitLock: () => quote,
+ setRateBucket: () => quote,
+ setRatingData: () => quote,
+ getRatingData: () => stub_rate_data,
+ getBucket: () => new QuoteDataBucket(),
};
return {
- program: program,
- stub_rate_data: stub_rate_data,
- rater: rater,
- raters: raters,
- logger: logger,
- server: server,
- dao: dao,
- session: session,
- request: request,
- response: response,
- quote: quote,
+ program: program,
+ stub_rate_data: stub_rate_data,
+ stub_rate_delta: stub_rate_delta,
+ createDelta: createDelta,
+ rater: rater,
+ raters: raters,
+ logger: logger,
+ server: server,
+ dao: dao,
+ session: session,
+ request: request,
+ response: response,
+ quote: quote,
};
};