1
0
Fork 0

[DEV-2692] [BC-BREAK] Bucket stability and consistency fixes and non-term nulls

This mixes in support for non-terminating nulls.  It would have been
nice to handle that in a separate commit for clarity, but the
refactoring came as a consequence of trying to provide a working
implementation.

Various inconsistencies and subtle bugs in unlikely situations have
been fixed by this, including modifying objects passed as arguments to
various methods, and inconsistent handling of diff data.

Changes are more consistently recognized.  Perhaps the most noticeable
consequence is that moving between steps no longer prompts to discard
changes---previously calculated values would trigger the dirty flag on
steps even if the user didn't actually change anything.  I (and
others) have wanted this fixed for many years.

This is a very dense commit that touches a core part of the
system.  Hopefully the Changelog below helps.

* src/bucket/Bucket.js
  (setValues): [BC-BREAK] Remove parameters `merge_index' and
    `merge_null' parameters.
* src/bucket/DelayedStagingBucket.js
  (setValues): [BC-BREAK] Remove `merge_index' and `merge_null
    parameters.  Remove distinction between `merge_index' and non-.
* src/bucket/QuoteDataBucket.js
  (setValues): [BC-BREAK] Remove `merge_index' and `merge_null
    parameters.  Remove respective arguments from `_mergeData' call.
  (_mergeData): Remove same parameters.  Remove handling of
    `merge_index' and `merge_null'.
  (overwriteValues): Append `null' to each vector.
* src/bucket/StagingBucket.js
  (_initState): Use `Object.create' instead of explicit prototype
    instantiation (functionally equivalent).
  (merge): Minor comment correction.
  (_hasChanged): Rename to `_parseChanges'.
  (_parseChanges): Rename from `_hasChanged'.  Remove `merge_index'
    parameter.  Generate new object rather than mutation original
    data (prevent dangerous and subtle bugs from side-effects).  Clone
    each vector rather than modifying/referencing directly (this was
    previously done during merge).  Remove `merge_index'
    distinction.  Handle non-terminating `null' values.
  (setValues): [BC-BREAK] Remove `merge_index' and `merge_null'
    parameters.  Use new object generated by `_parseChanges'.  Remove
    cloning of each vector (`_parseChanges' now does that).  Remove
    `merge_index' distinction.
  (overwriteValues): Remove argument to `setValues' call.
  (getFilledDiff): [BC-BREAK] Use `_staged' rather than `_curdata'.
  (commit): Remove second and third arguments of call to `setValues'
    of underlying bucket.
* src/client/Client.js
  (_initStepUi): Remove second argument of calls to quote `setData'.
* src/client/quote/ClientQuote.js
  (setData): [BC-BREAK] Remove `merge_nulls' parameter.  Remove second
    and third arguments of call to staging bucket `setValues'.  Add
    comment indicating a long-standing problem with committing the
    staging bucket contents before save has succeeded.
* src/server/request/DataProcessor.js
  (processDiff): Remove `permit_null' argument of `sanitizeDiff'
    call.
  (sanitizeDiff): Remove `permit_null' parameter.  Hard-code filter
    call's `permit_null' argument to `true'.
  (_determineDapiFields): Properly handle `null's (ignore) rather than
    inadvertently converting them into the string "null".
* test/bucket/StagingBucketTest.js: Modify test cases
    accordingly.  Add tests to verify that updates and diffs operate
    as expected, especially support for non-terminating `null's.
  (createStubBucket): Use `QuoteDataBucket'.  Ideally remove this
    coupling in the future, but this is a more realistic test case for
    the time being.
* test/server/request/DataProcessorTest.js: Update test to account for
    hard-coded `given_null' argument.
master
Mike Gerwitz 2017-09-05 16:33:46 -04:00
parent 8a1ac4781c
commit 68649dfd9b
9 changed files with 284 additions and 265 deletions

View File

@ -35,13 +35,9 @@ module.exports = Interface( 'Bucket',
*
* @param {Object.<string,Array>} data associative array of the data
*
* @param {boolean} merge_index whether to merge indexes individually
* @param {boolean} merge_null whether to merge undefined values (vs
* ignore)
*
* @return {Bucket} self
*/
'public setValues': [ 'data', 'merge_index', 'merge_null' ],
'public setValues': [ 'data' ],
/**

View File

@ -45,31 +45,17 @@ module.exports = Class( 'DelayedStagingBucket' )
'private _timer': 0,
'public override setValues': function( data, merge_index, merge_null )
'public override setValues': function( data )
{
for ( var name in data )
{
if ( merge_index )
if ( this._queued[ name ] === undefined )
{
if ( this._queued[ name ] === undefined )
{
this._queued[ name ] = [];
}
// merge individual indexes
this.merge( data[ name ], this._queued[ name ] );
this._queued[ name ] = [];
}
else
{
// no index merge; replace any existing data
this._queued[ name ] = Array.prototype.slice.call(
data[ name ], 0
);
// this will ensure that no data will follow what we were
// provided
this._queued[ name ].push( null );
}
// merge individual indexes
this.merge( data[ name ], this._queued[ name ] );
}
this._setTimer();

View File

@ -75,15 +75,11 @@ module.exports = Class( 'QuoteDataBucket' )
*
* @param {Object.<string,Array>} data associative array of the data
*
* @param {boolean} merge_index whether to merge indexes individually
* @param {boolean} merge_null whether to merge undefined values (vs
* ignore)
*
* @return {QuoteDataBucket} self to allow for method chaining
*/
'public setValues': function( data, merge_index, merge_null )
'public setValues': function( data )
{
this._mergeData( data, merge_index, merge_null );
this._mergeData( data );
return this;
},
@ -118,11 +114,8 @@ module.exports = Class( 'QuoteDataBucket' )
*
* @return undefined
*/
'private _mergeData': function( data, merge_index, merge_null )
'private _mergeData': function( data )
{
merge_index = !!merge_index; // default false
merge_null = !!merge_null; // default false
var ignore = {};
// remove any data that has not been updated (the hooks do processing on
@ -171,14 +164,6 @@ module.exports = Class( 'QuoteDataBucket' )
var data_set = data[ name ];
// if we're not supposed to merge the indexes one by one, just set
// it
if ( merge_index === false )
{
this._data[name] = data_set;
continue;
}
// initialize it if its undefined in the bucket
if ( this._data[name] === undefined )
{
@ -197,7 +182,7 @@ module.exports = Class( 'QuoteDataBucket' )
}
// ignore if set to null (implying the index was removed)
if ( !merge_null && data_set[i] === null )
if ( data_set[i] === null )
{
// this marks the end of the array as far as we're concerned
this._data[ name ].length = i;
@ -223,7 +208,16 @@ module.exports = Class( 'QuoteDataBucket' )
*/
'public overwriteValues': function( data )
{
this.setValues( data, false );
this.setValues(
Object.keys( data ).reduce(
( vals, key ) => (
vals[ key ] = data[ key ].concat( [ null ] ),
vals
),
{}
)
);
return this;
},

View File

@ -113,13 +113,10 @@ module.exports = Class( 'StagingBucket' )
'private _initState': function()
{
const data = this._bucket.getData();
const retdata = function() {};
// ensure that we don't modify the original data
retdata.prototype = data;
const retdata = Object.create( this._bucket.getData() );
this._curdata = new retdata();
this._curdata = retdata;
this._dirty = false;
},
@ -142,7 +139,7 @@ module.exports = Class( 'StagingBucket' )
}
else if ( nonull && ( data === null ) )
{
// nulls mark the end of the set
// nulls mark the end of the vector
dest.length = i;
break;
}
@ -193,45 +190,63 @@ module.exports = Class( 'StagingBucket' )
/**
* Determine whether values have changed
*
* If all values are identical to the current bucket values (relative to
* `merge_index`), returns `false`. Otherwise, this stops at the first
* recognized change and returns `true`.
* If all values are identical to the current bucket values returns
* `false`. Otherwise, `true`.
*
* Because JSON serializes all undefined values to `null`, only the
* final null in a diff is considered terminating; the rest are
* converted into `undefined`.
*
* This creates a fresh copy of the data to prevent unexpected
* side-effects.
*
* @param {Object.<string,Array>} data key/value data or diff
* @param {boolean} merge_index compare indexes individually
*
* @return {boolean} whether a change was recognized
*/
'private _hasChanged': function( data, merge_index )
'private _parseChanges': function( data )
{
// generate a new object so that we don't mutate the original, which
// may be used elsewhere
const result = {};
let changed = false;
for ( let name in data )
{
let values = data[ name ];
let values = Array.prototype.slice.call( data[ name ], 0 );
let cur = this._curdata[ name ] || [];
let len = this._length( values );
let has_null = ( len !== values.length );
result[ name ] = values;
let merge_len_change = (
merge_index && has_null && ( len < cur.length )
has_null && ( len < cur.length )
);
let replace_len_change = (
!merge_index && ( len !== cur.length )
);
// quick change check (index removal if merge_index, or index
// count change if not merge_index)
if ( merge_len_change || replace_len_change )
// quick change check; but we still have to keep processing the
// data to generate a proper diff and process non-terminating
// nulls
if ( merge_len_change )
{
changed = true;
continue;
}
for ( let index = 0; index < len; index++ )
{
if ( merge_index && ( values[ index ] === undefined ) )
// see #_length; if ending in a null, this is offset by -1
let last_index = ( index === len );
// only the last null is terminating; the rest are
// interpreted as `undefined' (because JSON serializes
// undefined values to `null')
if ( ( values[ index ] === null ) && !last_index )
{
values[ index ] = undefined;
}
if ( values[ index ] === undefined )
{
continue;
}
@ -249,11 +264,11 @@ module.exports = Class( 'StagingBucket' )
// if nothing is left, remove entirely
if ( !values.some( x => x !== undefined ) )
{
delete data[ name ];
delete result[ name ];
}
}
return changed;
return [ changed, result ];
},
@ -312,17 +327,20 @@ module.exports = Class( 'StagingBucket' )
/**
* Explicitly sets the contents of the bucket
*
* @param {Object.<string,Array>} data associative array of the data
* Because JSON serializes all undefined values to `null`, only the
* final null in a diff is considered terminating; the rest are
* converted into `undefined`. Therefore, it is important that all
* truncations include no elements in the vector after the truncating null.
*
* @param {boolean} merge_index whether to merge indexes individually
* @param {boolean} merge_null whether to merge undefined values (vs
* ignore)
* @param {Object.<string,Array>} given_data associative array of the data
*
* @return {Bucket} self
*/
'virtual public setValues': function( data, merge_index, merge_null )
'virtual public setValues': function( given_data )
{
if ( !this._hasChanged( data, merge_index ) )
const [ changed, data ] = this._parseChanges( given_data );
if ( !changed )
{
return;
}
@ -331,7 +349,7 @@ module.exports = Class( 'StagingBucket' )
for ( let name in data )
{
let item = Array.prototype.slice.call( data[ name ], 0 );
let item = data[ name ];
// initialize as array if necessary
if ( this._staged[ name ] === undefined )
@ -356,21 +374,12 @@ module.exports = Class( 'StagingBucket' )
}
}
if ( merge_index )
{
// merge with previous values
this.merge( item, this._staged[ name ] );
// merge with previous values
this.merge( item, this._staged[ name ] );
// we do not want nulls in our current representation of the
// data
this.merge( item, this._curdata[ name ], true );
}
else
{
// overwrite
this._staged[ name ] = item;
this._curdata[ name ] = item;
}
// we do not want nulls in our current representation of the
// data
this.merge( item, this._curdata[ name ], true );
}
this._dirty = true;
@ -400,7 +409,7 @@ module.exports = Class( 'StagingBucket' )
new_data[ name ].push( null );
}
return this.setValues( new_data, false );
return this.setValues( new_data );
},
@ -432,9 +441,9 @@ module.exports = Class( 'StagingBucket' )
// return each staged field
for ( let field in this._staged )
{
// retrieve the current value for this field
// retrieve the diff for this field
ret[ field ] = Array.prototype.slice.call(
this._curdata[ field ], 0
this._staged[ field ], 0
);
}
@ -521,7 +530,7 @@ module.exports = Class( 'StagingBucket' )
this.emit( this.__self.$('EVENT_PRE_COMMIT') );
this._bucket.setValues( this._staged, true, false );
this._bucket.setValues( this._staged );
this._staged = {};
this.emit( this.__self.$('EVENT_COMMIT') );

View File

@ -1873,7 +1873,7 @@ module.exports = Class( 'Client' )
}
// set data all at once to avoid extraneous calls
quote.setData( values, true );
quote.setData( values );
} ).on( 'indexReset', function( index, groupui )
{
var fields = groupui.getGroup().getFieldNames(),
@ -1894,7 +1894,7 @@ module.exports = Class( 'Client' )
}
// set data all at once to avoid extraneous calls
quote.setData( values, true );
quote.setData( values );
} );
// when the step is rendered, run the onchange events

View File

@ -293,11 +293,9 @@ module.exports = Class( 'ClientQuote' )
*
* @return {ClientQuote} self
*/
'public setData': function( data, merge_nulls )
'public setData': function( data )
{
merge_nulls = !!merge_nulls;
this._staging.setValues( data, true, merge_nulls );
this._staging.setValues( data );
return this;
},
@ -443,6 +441,12 @@ module.exports = Class( 'ClientQuote' )
callback.apply( null, arguments );
} );
// XXX: we need to commit after a _successful_ save, otherwise the
// client will never post again! But we don't want to commit
// everything that is staged, because that will possibly include
// data the user is filling out on the next step. So, we need to
// store the diff separately to be committed.
// commit staged quote data to the data bucket (important: do this
// *after* save); will make the staged values available as old_store.old
this._staging.commit( old_store );

View File

@ -89,7 +89,7 @@ module.exports = Class( 'DataProcessor',
*/
'public processDiff'( data, request, program, bucket )
{
const filtered = this.sanitizeDiff( data, request, program, false );
const filtered = this.sanitizeDiff( data, request, program );
const dapi_manager = this._dapif( program.apis, request );
const staging = this._stagingCtor( bucket );
@ -127,21 +127,18 @@ module.exports = Class( 'DataProcessor',
* @param {Object} data client-provided data
* @param {UserRequest} request client request
* @param {Program} program active program
* @param {boolean} permit_null whether null values should be retained
*
* @return {Object} filtered data
*/
'public sanitizeDiff'( data, request, program, permit_null )
'public sanitizeDiff'( data, request, program )
{
permit_null = ( permit_null === undefined ) ? false : permit_null;
if ( !request.getSession().isInternal() )
{
this._cleanInternals( data, program );
}
const types = program.meta.qtypes;
return this._filter.filter( data, types, {}, permit_null );
return this._filter.filter( data, types, {}, true );
},
@ -246,7 +243,9 @@ module.exports = Class( 'DataProcessor',
return Object.keys( mapis ).reduce(
( result, src_field ) =>
{
if ( data[ src_field ] === undefined )
const fdata = data[ src_field ];
if ( fdata === undefined )
{
return result;
}
@ -258,12 +257,14 @@ module.exports = Class( 'DataProcessor',
{
result[ field ] = result[ field ] || [];
Object.keys( data[ src_field ] ).forEach( i =>
Object.keys( fdata ).forEach( i =>
{
if ( data[ src_field ][ i ] !== undefined )
if ( fdata[ i ] === undefined || fdata[ i ] === null )
{
result[ field ][ i ] = i;
return;
}
result[ field ][ i ] = i;
} );
} );

View File

@ -30,160 +30,204 @@ const sinon = require( 'sinon' );
const {
Bucket,
StagingBucket: Sut
StagingBucket: Sut,
// TODO: decouple test from this
QuoteDataBucket,
} = root.bucket;
describe( 'StagingBucket', () =>
{
describe( 'pre-update event', () =>
it( 'pre-update event allows updating data before set', () =>
{
it( 'allows updating data before set', () =>
const sut = Sut( createStubBucket() );
const data = {
foo: [ 'bar', 'baz' ],
};
sut.on( 'preStagingUpdate', data =>
{
const sut = Sut( createStubBucket() );
data.foo[ 1 ] = 'quux';
} );
const data = {
foo: [ 'bar', 'baz' ],
};
// triggers setValues
sut.setValues( data );
sut.on( 'preStagingUpdate', data =>
expect( sut.getDataByName( 'foo' ) )
.to.deep.equal( [ 'bar', 'quux' ] );
} );
[
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', 'baz' ] },
is_change: false,
expected: {
data: { foo: [ 'bar', 'baz' ] },
diff: {},
},
},
// actual changes
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'change', 'baz' ] },
is_change: true,
expected: {
data: { foo: [ 'change', 'baz' ] },
diff: { foo: [ 'change' ] },
},
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', 'change' ] },
is_change: true,
expected: {
data: { foo: [ 'bar', 'change' ] },
diff: { foo: [ , 'change' ] },
},
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ undefined, 'change' ] },
is_change: true,
expected: {
data: { foo: [ 'bar', 'change' ] },
diff: { foo: [ , 'change' ] },
},
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ undefined, 'baz' ] },
is_change: false,
expected: {
data: { foo: [ 'bar', 'baz' ] },
diff: {},
},
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', undefined ] },
is_change: false,
expected: {
data: { foo: [ 'bar', 'baz' ] },
diff: {},
},
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', null ] },
is_change: true,
expected: {
data: { foo: [ 'bar' ] },
diff: { foo: [ , null ] },
},
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', 'baz', null ] },
is_change: false,
expected: {
data: { foo: [ 'bar', 'baz' ] },
diff: {},
},
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', 'baz', 'quux' ] },
is_change: true,
expected: {
data: { foo: [ 'bar', 'baz', 'quux' ] },
diff: { foo: [ , , 'quux' ]},
},
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [] },
is_change: false,
expected: {
data: { foo: [ 'bar', 'baz' ] },
diff: {},
},
},
// null not at end of set means unchanged
{
initial: { foo: [ 'bar', 'baz', 'quux' ] },
update: { foo: [ null, null, 'quux' ] },
is_change: false,
expected: {
data: { foo: [ 'bar', 'baz', 'quux' ] },
diff: {},
},
},
// but the last one is
{
initial: { foo: [ 'bar', 'baz', 'quux' ] },
update: { foo: [ null, 'baz', null ] },
is_change: true,
expected: {
data: { foo: [ 'bar', 'baz' ] },
diff: { foo: [ , , null ] },
},
},
// given a string of nulls, only the last one is terminating; the
// rest are interpreted as undefined (because JSON serializes
// undefined values to `null' -_-)
{
initial: { foo: [ 'bar', 'baz', 'quux' ] },
update: { foo: [ null, null, null ] },
is_change: true,
expected: {
data: { foo: [ 'bar', 'baz' ] },
diff: { foo: [ , , null ] },
},
},
].forEach( ( { initial, update, is_change, expected }, i ) =>
{
it( `pre-commit, properly processes diff and change (${i})`, () =>
{
const sut = Sut( createStubBucket() );
let called = false;
sut.setValues( initial );
expect( sut.getDiff() ).to.deep.equal( initial );
sut.on( 'preStagingUpdate', () => called = true );
sut.setValues( update );
expect( called ).to.equal( is_change );
if ( expected )
{
data.foo[ 1 ] = 'quux';
} );
// triggers setValues
sut.setValues( data );
expect( sut.getDataByName( 'foo' ) )
.to.deep.equal( [ 'bar', 'quux' ] );
expect( sut.getData() ).to.deep.equal( expected.data );
}
} );
[
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', 'baz' ] },
merge_index: true,
is_change: false,
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', 'baz' ] },
merge_index: false,
is_change: false,
},
// actual changes
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'change', 'baz' ] },
merge_index: true,
is_change: true,
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', 'change' ] },
merge_index: true,
is_change: true,
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ undefined, 'change' ] },
merge_index: true,
is_change: true,
},
// single-index changes make sense only if merge_index
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ undefined, 'baz' ] },
merge_index: true,
is_change: false,
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', undefined ] },
merge_index: true,
is_change: false,
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', null ] },
merge_index: true,
is_change: true,
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', 'baz', null ] },
merge_index: true,
is_change: false,
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', 'baz', null ] },
merge_index: false,
is_change: false,
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', 'baz', 'quux' ] },
merge_index: true,
is_change: true,
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', 'baz', 'quux' ] },
merge_index: false,
is_change: true,
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [] },
merge_index: true,
is_change: false,
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [] },
merge_index: false,
is_change: true,
},
{
initial: { foo: [ 'bar' ] },
update: { foo: [ 'bar', undefined ] },
merge_index: false,
is_change: true,
},
// only interpreted as a diff if merge_index
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', undefined ] },
merge_index: false,
is_change: true,
},
// no index at all
{
initial: { foo: [ 'bar', 'baz' ] },
update: {},
merge_index: true,
is_change: false,
},
].forEach( ( { initial, update, merge_index, is_change }, i ) =>
it( `post-commit, properly processes diff and change (${i})`, () =>
{
it( `is emitted only when data is changed (${i})`, () =>
const sut = Sut( createStubBucket() );
let called = false;
sut.setValues( initial );
sut.commit();
sut.on( 'preStagingUpdate', () => called = true );
sut.setValues( update );
expect( called ).to.equal( is_change );
if ( expected )
{
const sut = Sut( createStubBucket() );
let called = false;
sut.setValues( initial, merge_index );
sut.on( 'preStagingUpdate', () => called = true );
sut.setValues( update, merge_index );
expect( called ).to.equal( is_change );
} );
expect( sut.getData() ).to.deep.equal( expected.data );
expect( sut.getDiff() ).to.deep.equal( expected.diff );
}
} );
} );
@ -233,20 +277,5 @@ describe( 'StagingBucket', () =>
function createStubBucket( bucket_obj )
{
return Class.implement( Bucket ).extend(
{
'public getData'()
{
return bucket_obj;
},
'public setValues'( data, merge_index, merge_null ) {},
'public overwriteValues'( data ) {},
'public clear'() {},
'public each'( callback ) {},
'public getDataByName'( name ) {},
'public getDataJson'() {},
'public filter'( pred, callback) {},
'on'() {},
} )();
return QuoteDataBucket();
}

View File

@ -81,7 +81,7 @@ describe( 'DataProcessor', () =>
{
expect( given_data ).to.equal( data );
expect( given_types ).to.equal( types );
expect( given_null ).to.equal( false );
expect( given_null ).to.equal( true );
// not used
expect( given_ignore ).to.deep.equal( {} );