From 8a1ac4781c07f48b7217c794a70b8aafbb2e11f3 Mon Sep 17 00:00:00 2001 From: Mike Gerwitz Date: Tue, 5 Sep 2017 16:26:59 -0400 Subject: [PATCH 1/3] [DEV-2692] Store cleaned rate response data separately I'm not sure if this has ever caused real problems, but this was dangerous. * src/server/service/RatingService.js (_performRating): Store cleaned rating data in separate variable to ensure cleaned data are not saved in place of the actual complete response. --- src/server/service/RatingService.js | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/server/service/RatingService.js b/src/server/service/RatingService.js index 063b0d1..4df89fb 100644 --- a/src/server/service/RatingService.js +++ b/src/server/service/RatingService.js @@ -158,9 +158,9 @@ module.exports = Class( 'RatingService', rate_data, actions, program, quote ); - var class_dest = {}; + const class_dest = {}; - rate_data = _self._cleanRateData( + const cleaned = _self._cleanRateData( rate_data, class_dest ); @@ -168,7 +168,6 @@ module.exports = Class( 'RatingService', // TODO: move me during refactoring _self._dao.saveQuoteClasses( quote, class_dest ); - // save all data server-side (important: do after // post-processing); async _self._saveRatingData( quote, rate_data, indv, function() @@ -179,7 +178,7 @@ module.exports = Class( 'RatingService', // no need to wait for the save; send the response _self._server.sendResponse( request, quote, { - data: rate_data + data: cleaned }, actions ); }, function( message ) From 68649dfd9b07f664cd2600d5206d758dac3682c5 Mon Sep 17 00:00:00 2001 From: Mike Gerwitz Date: Tue, 5 Sep 2017 16:33:46 -0400 Subject: [PATCH 2/3] [DEV-2692] [BC-BREAK] Bucket stability and consistency fixes and non-term nulls This mixes in support for non-terminating nulls. It would have been nice to handle that in a separate commit for clarity, but the refactoring came as a consequence of trying to provide a working implementation. Various inconsistencies and subtle bugs in unlikely situations have been fixed by this, including modifying objects passed as arguments to various methods, and inconsistent handling of diff data. Changes are more consistently recognized. Perhaps the most noticeable consequence is that moving between steps no longer prompts to discard changes---previously calculated values would trigger the dirty flag on steps even if the user didn't actually change anything. I (and others) have wanted this fixed for many years. This is a very dense commit that touches a core part of the system. Hopefully the Changelog below helps. * src/bucket/Bucket.js (setValues): [BC-BREAK] Remove parameters `merge_index' and `merge_null' parameters. * src/bucket/DelayedStagingBucket.js (setValues): [BC-BREAK] Remove `merge_index' and `merge_null parameters. Remove distinction between `merge_index' and non-. * src/bucket/QuoteDataBucket.js (setValues): [BC-BREAK] Remove `merge_index' and `merge_null parameters. Remove respective arguments from `_mergeData' call. (_mergeData): Remove same parameters. Remove handling of `merge_index' and `merge_null'. (overwriteValues): Append `null' to each vector. * src/bucket/StagingBucket.js (_initState): Use `Object.create' instead of explicit prototype instantiation (functionally equivalent). (merge): Minor comment correction. (_hasChanged): Rename to `_parseChanges'. (_parseChanges): Rename from `_hasChanged'. Remove `merge_index' parameter. Generate new object rather than mutation original data (prevent dangerous and subtle bugs from side-effects). Clone each vector rather than modifying/referencing directly (this was previously done during merge). Remove `merge_index' distinction. Handle non-terminating `null' values. (setValues): [BC-BREAK] Remove `merge_index' and `merge_null' parameters. Use new object generated by `_parseChanges'. Remove cloning of each vector (`_parseChanges' now does that). Remove `merge_index' distinction. (overwriteValues): Remove argument to `setValues' call. (getFilledDiff): [BC-BREAK] Use `_staged' rather than `_curdata'. (commit): Remove second and third arguments of call to `setValues' of underlying bucket. * src/client/Client.js (_initStepUi): Remove second argument of calls to quote `setData'. * src/client/quote/ClientQuote.js (setData): [BC-BREAK] Remove `merge_nulls' parameter. Remove second and third arguments of call to staging bucket `setValues'. Add comment indicating a long-standing problem with committing the staging bucket contents before save has succeeded. * src/server/request/DataProcessor.js (processDiff): Remove `permit_null' argument of `sanitizeDiff' call. (sanitizeDiff): Remove `permit_null' parameter. Hard-code filter call's `permit_null' argument to `true'. (_determineDapiFields): Properly handle `null's (ignore) rather than inadvertently converting them into the string "null". * test/bucket/StagingBucketTest.js: Modify test cases accordingly. Add tests to verify that updates and diffs operate as expected, especially support for non-terminating `null's. (createStubBucket): Use `QuoteDataBucket'. Ideally remove this coupling in the future, but this is a more realistic test case for the time being. * test/server/request/DataProcessorTest.js: Update test to account for hard-coded `given_null' argument. --- src/bucket/Bucket.js | 6 +- src/bucket/DelayedStagingBucket.js | 24 +- src/bucket/QuoteDataBucket.js | 34 +-- src/bucket/StagingBucket.js | 107 +++---- src/client/Client.js | 4 +- src/client/quote/ClientQuote.js | 12 +- src/server/request/DataProcessor.js | 21 +- test/bucket/StagingBucketTest.js | 339 ++++++++++++----------- test/server/request/DataProcessorTest.js | 2 +- 9 files changed, 284 insertions(+), 265 deletions(-) diff --git a/src/bucket/Bucket.js b/src/bucket/Bucket.js index 024248f..650fbe2 100644 --- a/src/bucket/Bucket.js +++ b/src/bucket/Bucket.js @@ -35,13 +35,9 @@ module.exports = Interface( 'Bucket', * * @param {Object.} data associative array of the data * - * @param {boolean} merge_index whether to merge indexes individually - * @param {boolean} merge_null whether to merge undefined values (vs - * ignore) - * * @return {Bucket} self */ - 'public setValues': [ 'data', 'merge_index', 'merge_null' ], + 'public setValues': [ 'data' ], /** diff --git a/src/bucket/DelayedStagingBucket.js b/src/bucket/DelayedStagingBucket.js index 54e76c2..bfc8299 100644 --- a/src/bucket/DelayedStagingBucket.js +++ b/src/bucket/DelayedStagingBucket.js @@ -45,31 +45,17 @@ module.exports = Class( 'DelayedStagingBucket' ) 'private _timer': 0, - 'public override setValues': function( data, merge_index, merge_null ) + 'public override setValues': function( data ) { for ( var name in data ) { - if ( merge_index ) + if ( this._queued[ name ] === undefined ) { - if ( this._queued[ name ] === undefined ) - { - this._queued[ name ] = []; - } - - // merge individual indexes - this.merge( data[ name ], this._queued[ name ] ); + this._queued[ name ] = []; } - else - { - // no index merge; replace any existing data - this._queued[ name ] = Array.prototype.slice.call( - data[ name ], 0 - ); - // this will ensure that no data will follow what we were - // provided - this._queued[ name ].push( null ); - } + // merge individual indexes + this.merge( data[ name ], this._queued[ name ] ); } this._setTimer(); diff --git a/src/bucket/QuoteDataBucket.js b/src/bucket/QuoteDataBucket.js index 952c490..37fc9d4 100644 --- a/src/bucket/QuoteDataBucket.js +++ b/src/bucket/QuoteDataBucket.js @@ -75,15 +75,11 @@ module.exports = Class( 'QuoteDataBucket' ) * * @param {Object.} data associative array of the data * - * @param {boolean} merge_index whether to merge indexes individually - * @param {boolean} merge_null whether to merge undefined values (vs - * ignore) - * * @return {QuoteDataBucket} self to allow for method chaining */ - 'public setValues': function( data, merge_index, merge_null ) + 'public setValues': function( data ) { - this._mergeData( data, merge_index, merge_null ); + this._mergeData( data ); return this; }, @@ -118,11 +114,8 @@ module.exports = Class( 'QuoteDataBucket' ) * * @return undefined */ - 'private _mergeData': function( data, merge_index, merge_null ) + 'private _mergeData': function( data ) { - merge_index = !!merge_index; // default false - merge_null = !!merge_null; // default false - var ignore = {}; // remove any data that has not been updated (the hooks do processing on @@ -171,14 +164,6 @@ module.exports = Class( 'QuoteDataBucket' ) var data_set = data[ name ]; - // if we're not supposed to merge the indexes one by one, just set - // it - if ( merge_index === false ) - { - this._data[name] = data_set; - continue; - } - // initialize it if its undefined in the bucket if ( this._data[name] === undefined ) { @@ -197,7 +182,7 @@ module.exports = Class( 'QuoteDataBucket' ) } // ignore if set to null (implying the index was removed) - if ( !merge_null && data_set[i] === null ) + if ( data_set[i] === null ) { // this marks the end of the array as far as we're concerned this._data[ name ].length = i; @@ -223,7 +208,16 @@ module.exports = Class( 'QuoteDataBucket' ) */ 'public overwriteValues': function( data ) { - this.setValues( data, false ); + this.setValues( + Object.keys( data ).reduce( + ( vals, key ) => ( + vals[ key ] = data[ key ].concat( [ null ] ), + vals + ), + {} + ) + ); + return this; }, diff --git a/src/bucket/StagingBucket.js b/src/bucket/StagingBucket.js index e69aec6..ad89a3b 100644 --- a/src/bucket/StagingBucket.js +++ b/src/bucket/StagingBucket.js @@ -113,13 +113,10 @@ module.exports = Class( 'StagingBucket' ) 'private _initState': function() { - const data = this._bucket.getData(); - const retdata = function() {}; - // ensure that we don't modify the original data - retdata.prototype = data; + const retdata = Object.create( this._bucket.getData() ); - this._curdata = new retdata(); + this._curdata = retdata; this._dirty = false; }, @@ -142,7 +139,7 @@ module.exports = Class( 'StagingBucket' ) } else if ( nonull && ( data === null ) ) { - // nulls mark the end of the set + // nulls mark the end of the vector dest.length = i; break; } @@ -193,45 +190,63 @@ module.exports = Class( 'StagingBucket' ) /** * Determine whether values have changed * - * If all values are identical to the current bucket values (relative to - * `merge_index`), returns `false`. Otherwise, this stops at the first - * recognized change and returns `true`. + * If all values are identical to the current bucket values returns + * `false`. Otherwise, `true`. + * + * Because JSON serializes all undefined values to `null`, only the + * final null in a diff is considered terminating; the rest are + * converted into `undefined`. + * + * This creates a fresh copy of the data to prevent unexpected + * side-effects. * * @param {Object.} data key/value data or diff - * @param {boolean} merge_index compare indexes individually * * @return {boolean} whether a change was recognized */ - 'private _hasChanged': function( data, merge_index ) + 'private _parseChanges': function( data ) { + // generate a new object so that we don't mutate the original, which + // may be used elsewhere + const result = {}; + let changed = false; for ( let name in data ) { - let values = data[ name ]; + let values = Array.prototype.slice.call( data[ name ], 0 ); let cur = this._curdata[ name ] || []; let len = this._length( values ); let has_null = ( len !== values.length ); + result[ name ] = values; + let merge_len_change = ( - merge_index && has_null && ( len < cur.length ) + has_null && ( len < cur.length ) ); - let replace_len_change = ( - !merge_index && ( len !== cur.length ) - ); - - // quick change check (index removal if merge_index, or index - // count change if not merge_index) - if ( merge_len_change || replace_len_change ) + // quick change check; but we still have to keep processing the + // data to generate a proper diff and process non-terminating + // nulls + if ( merge_len_change ) { changed = true; - continue; } for ( let index = 0; index < len; index++ ) { - if ( merge_index && ( values[ index ] === undefined ) ) + // see #_length; if ending in a null, this is offset by -1 + let last_index = ( index === len ); + + // only the last null is terminating; the rest are + // interpreted as `undefined' (because JSON serializes + // undefined values to `null') + if ( ( values[ index ] === null ) && !last_index ) + { + values[ index ] = undefined; + } + + if ( values[ index ] === undefined ) { continue; } @@ -249,11 +264,11 @@ module.exports = Class( 'StagingBucket' ) // if nothing is left, remove entirely if ( !values.some( x => x !== undefined ) ) { - delete data[ name ]; + delete result[ name ]; } } - return changed; + return [ changed, result ]; }, @@ -312,17 +327,20 @@ module.exports = Class( 'StagingBucket' ) /** * Explicitly sets the contents of the bucket * - * @param {Object.} data associative array of the data + * Because JSON serializes all undefined values to `null`, only the + * final null in a diff is considered terminating; the rest are + * converted into `undefined`. Therefore, it is important that all + * truncations include no elements in the vector after the truncating null. * - * @param {boolean} merge_index whether to merge indexes individually - * @param {boolean} merge_null whether to merge undefined values (vs - * ignore) + * @param {Object.} given_data associative array of the data * * @return {Bucket} self */ - 'virtual public setValues': function( data, merge_index, merge_null ) + 'virtual public setValues': function( given_data ) { - if ( !this._hasChanged( data, merge_index ) ) + const [ changed, data ] = this._parseChanges( given_data ); + + if ( !changed ) { return; } @@ -331,7 +349,7 @@ module.exports = Class( 'StagingBucket' ) for ( let name in data ) { - let item = Array.prototype.slice.call( data[ name ], 0 ); + let item = data[ name ]; // initialize as array if necessary if ( this._staged[ name ] === undefined ) @@ -356,21 +374,12 @@ module.exports = Class( 'StagingBucket' ) } } - if ( merge_index ) - { - // merge with previous values - this.merge( item, this._staged[ name ] ); + // merge with previous values + this.merge( item, this._staged[ name ] ); - // we do not want nulls in our current representation of the - // data - this.merge( item, this._curdata[ name ], true ); - } - else - { - // overwrite - this._staged[ name ] = item; - this._curdata[ name ] = item; - } + // we do not want nulls in our current representation of the + // data + this.merge( item, this._curdata[ name ], true ); } this._dirty = true; @@ -400,7 +409,7 @@ module.exports = Class( 'StagingBucket' ) new_data[ name ].push( null ); } - return this.setValues( new_data, false ); + return this.setValues( new_data ); }, @@ -432,9 +441,9 @@ module.exports = Class( 'StagingBucket' ) // return each staged field for ( let field in this._staged ) { - // retrieve the current value for this field + // retrieve the diff for this field ret[ field ] = Array.prototype.slice.call( - this._curdata[ field ], 0 + this._staged[ field ], 0 ); } @@ -521,7 +530,7 @@ module.exports = Class( 'StagingBucket' ) this.emit( this.__self.$('EVENT_PRE_COMMIT') ); - this._bucket.setValues( this._staged, true, false ); + this._bucket.setValues( this._staged ); this._staged = {}; this.emit( this.__self.$('EVENT_COMMIT') ); diff --git a/src/client/Client.js b/src/client/Client.js index 02bb32f..c8553c1 100644 --- a/src/client/Client.js +++ b/src/client/Client.js @@ -1873,7 +1873,7 @@ module.exports = Class( 'Client' ) } // set data all at once to avoid extraneous calls - quote.setData( values, true ); + quote.setData( values ); } ).on( 'indexReset', function( index, groupui ) { var fields = groupui.getGroup().getFieldNames(), @@ -1894,7 +1894,7 @@ module.exports = Class( 'Client' ) } // set data all at once to avoid extraneous calls - quote.setData( values, true ); + quote.setData( values ); } ); // when the step is rendered, run the onchange events diff --git a/src/client/quote/ClientQuote.js b/src/client/quote/ClientQuote.js index f6f1bd0..cc44889 100644 --- a/src/client/quote/ClientQuote.js +++ b/src/client/quote/ClientQuote.js @@ -293,11 +293,9 @@ module.exports = Class( 'ClientQuote' ) * * @return {ClientQuote} self */ - 'public setData': function( data, merge_nulls ) + 'public setData': function( data ) { - merge_nulls = !!merge_nulls; - - this._staging.setValues( data, true, merge_nulls ); + this._staging.setValues( data ); return this; }, @@ -443,6 +441,12 @@ module.exports = Class( 'ClientQuote' ) callback.apply( null, arguments ); } ); + // XXX: we need to commit after a _successful_ save, otherwise the + // client will never post again! But we don't want to commit + // everything that is staged, because that will possibly include + // data the user is filling out on the next step. So, we need to + // store the diff separately to be committed. + // commit staged quote data to the data bucket (important: do this // *after* save); will make the staged values available as old_store.old this._staging.commit( old_store ); diff --git a/src/server/request/DataProcessor.js b/src/server/request/DataProcessor.js index 4cf47a0..ead31d3 100644 --- a/src/server/request/DataProcessor.js +++ b/src/server/request/DataProcessor.js @@ -89,7 +89,7 @@ module.exports = Class( 'DataProcessor', */ 'public processDiff'( data, request, program, bucket ) { - const filtered = this.sanitizeDiff( data, request, program, false ); + const filtered = this.sanitizeDiff( data, request, program ); const dapi_manager = this._dapif( program.apis, request ); const staging = this._stagingCtor( bucket ); @@ -127,21 +127,18 @@ module.exports = Class( 'DataProcessor', * @param {Object} data client-provided data * @param {UserRequest} request client request * @param {Program} program active program - * @param {boolean} permit_null whether null values should be retained * * @return {Object} filtered data */ - 'public sanitizeDiff'( data, request, program, permit_null ) + 'public sanitizeDiff'( data, request, program ) { - permit_null = ( permit_null === undefined ) ? false : permit_null; - if ( !request.getSession().isInternal() ) { this._cleanInternals( data, program ); } const types = program.meta.qtypes; - return this._filter.filter( data, types, {}, permit_null ); + return this._filter.filter( data, types, {}, true ); }, @@ -246,7 +243,9 @@ module.exports = Class( 'DataProcessor', return Object.keys( mapis ).reduce( ( result, src_field ) => { - if ( data[ src_field ] === undefined ) + const fdata = data[ src_field ]; + + if ( fdata === undefined ) { return result; } @@ -258,12 +257,14 @@ module.exports = Class( 'DataProcessor', { result[ field ] = result[ field ] || []; - Object.keys( data[ src_field ] ).forEach( i => + Object.keys( fdata ).forEach( i => { - if ( data[ src_field ][ i ] !== undefined ) + if ( fdata[ i ] === undefined || fdata[ i ] === null ) { - result[ field ][ i ] = i; + return; } + + result[ field ][ i ] = i; } ); } ); diff --git a/test/bucket/StagingBucketTest.js b/test/bucket/StagingBucketTest.js index 6f01e06..3a56a60 100644 --- a/test/bucket/StagingBucketTest.js +++ b/test/bucket/StagingBucketTest.js @@ -30,160 +30,204 @@ const sinon = require( 'sinon' ); const { Bucket, - StagingBucket: Sut + StagingBucket: Sut, + + // TODO: decouple test from this + QuoteDataBucket, } = root.bucket; describe( 'StagingBucket', () => { - describe( 'pre-update event', () => + it( 'pre-update event allows updating data before set', () => { - it( 'allows updating data before set', () => + const sut = Sut( createStubBucket() ); + + const data = { + foo: [ 'bar', 'baz' ], + }; + + sut.on( 'preStagingUpdate', data => { - const sut = Sut( createStubBucket() ); + data.foo[ 1 ] = 'quux'; + } ); - const data = { - foo: [ 'bar', 'baz' ], - }; + // triggers setValues + sut.setValues( data ); - sut.on( 'preStagingUpdate', data => + expect( sut.getDataByName( 'foo' ) ) + .to.deep.equal( [ 'bar', 'quux' ] ); + } ); + + + [ + { + initial: { foo: [ 'bar', 'baz' ] }, + update: { foo: [ 'bar', 'baz' ] }, + is_change: false, + expected: { + data: { foo: [ 'bar', 'baz' ] }, + diff: {}, + }, + }, + + // actual changes + { + initial: { foo: [ 'bar', 'baz' ] }, + update: { foo: [ 'change', 'baz' ] }, + is_change: true, + expected: { + data: { foo: [ 'change', 'baz' ] }, + diff: { foo: [ 'change' ] }, + }, + }, + { + initial: { foo: [ 'bar', 'baz' ] }, + update: { foo: [ 'bar', 'change' ] }, + is_change: true, + expected: { + data: { foo: [ 'bar', 'change' ] }, + diff: { foo: [ , 'change' ] }, + }, + }, + { + initial: { foo: [ 'bar', 'baz' ] }, + update: { foo: [ undefined, 'change' ] }, + is_change: true, + expected: { + data: { foo: [ 'bar', 'change' ] }, + diff: { foo: [ , 'change' ] }, + }, + }, + + { + initial: { foo: [ 'bar', 'baz' ] }, + update: { foo: [ undefined, 'baz' ] }, + is_change: false, + expected: { + data: { foo: [ 'bar', 'baz' ] }, + diff: {}, + }, + }, + { + initial: { foo: [ 'bar', 'baz' ] }, + update: { foo: [ 'bar', undefined ] }, + is_change: false, + expected: { + data: { foo: [ 'bar', 'baz' ] }, + diff: {}, + }, + }, + { + initial: { foo: [ 'bar', 'baz' ] }, + update: { foo: [ 'bar', null ] }, + is_change: true, + expected: { + data: { foo: [ 'bar' ] }, + diff: { foo: [ , null ] }, + }, + }, + { + initial: { foo: [ 'bar', 'baz' ] }, + update: { foo: [ 'bar', 'baz', null ] }, + is_change: false, + expected: { + data: { foo: [ 'bar', 'baz' ] }, + diff: {}, + }, + }, + { + initial: { foo: [ 'bar', 'baz' ] }, + update: { foo: [ 'bar', 'baz', 'quux' ] }, + is_change: true, + expected: { + data: { foo: [ 'bar', 'baz', 'quux' ] }, + diff: { foo: [ , , 'quux' ]}, + }, + }, + { + initial: { foo: [ 'bar', 'baz' ] }, + update: { foo: [] }, + is_change: false, + expected: { + data: { foo: [ 'bar', 'baz' ] }, + diff: {}, + }, + }, + + // null not at end of set means unchanged + { + initial: { foo: [ 'bar', 'baz', 'quux' ] }, + update: { foo: [ null, null, 'quux' ] }, + is_change: false, + expected: { + data: { foo: [ 'bar', 'baz', 'quux' ] }, + diff: {}, + }, + }, + // but the last one is + { + initial: { foo: [ 'bar', 'baz', 'quux' ] }, + update: { foo: [ null, 'baz', null ] }, + is_change: true, + expected: { + data: { foo: [ 'bar', 'baz' ] }, + diff: { foo: [ , , null ] }, + }, + }, + // given a string of nulls, only the last one is terminating; the + // rest are interpreted as undefined (because JSON serializes + // undefined values to `null' -_-) + { + initial: { foo: [ 'bar', 'baz', 'quux' ] }, + update: { foo: [ null, null, null ] }, + is_change: true, + expected: { + data: { foo: [ 'bar', 'baz' ] }, + diff: { foo: [ , , null ] }, + }, + }, + ].forEach( ( { initial, update, is_change, expected }, i ) => + { + it( `pre-commit, properly processes diff and change (${i})`, () => + { + const sut = Sut( createStubBucket() ); + let called = false; + + sut.setValues( initial ); + + expect( sut.getDiff() ).to.deep.equal( initial ); + + sut.on( 'preStagingUpdate', () => called = true ); + sut.setValues( update ); + + expect( called ).to.equal( is_change ); + + if ( expected ) { - data.foo[ 1 ] = 'quux'; - } ); - - // triggers setValues - sut.setValues( data ); - - expect( sut.getDataByName( 'foo' ) ) - .to.deep.equal( [ 'bar', 'quux' ] ); + expect( sut.getData() ).to.deep.equal( expected.data ); + } } ); - [ - { - initial: { foo: [ 'bar', 'baz' ] }, - update: { foo: [ 'bar', 'baz' ] }, - merge_index: true, - is_change: false, - }, - { - initial: { foo: [ 'bar', 'baz' ] }, - update: { foo: [ 'bar', 'baz' ] }, - merge_index: false, - is_change: false, - }, - - // actual changes - { - initial: { foo: [ 'bar', 'baz' ] }, - update: { foo: [ 'change', 'baz' ] }, - merge_index: true, - is_change: true, - }, - { - initial: { foo: [ 'bar', 'baz' ] }, - update: { foo: [ 'bar', 'change' ] }, - merge_index: true, - is_change: true, - }, - { - initial: { foo: [ 'bar', 'baz' ] }, - update: { foo: [ undefined, 'change' ] }, - merge_index: true, - is_change: true, - }, - - // single-index changes make sense only if merge_index - { - initial: { foo: [ 'bar', 'baz' ] }, - update: { foo: [ undefined, 'baz' ] }, - merge_index: true, - is_change: false, - }, - { - initial: { foo: [ 'bar', 'baz' ] }, - update: { foo: [ 'bar', undefined ] }, - merge_index: true, - is_change: false, - }, - { - initial: { foo: [ 'bar', 'baz' ] }, - update: { foo: [ 'bar', null ] }, - merge_index: true, - is_change: true, - }, - { - initial: { foo: [ 'bar', 'baz' ] }, - update: { foo: [ 'bar', 'baz', null ] }, - merge_index: true, - is_change: false, - }, - { - initial: { foo: [ 'bar', 'baz' ] }, - update: { foo: [ 'bar', 'baz', null ] }, - merge_index: false, - is_change: false, - }, - { - initial: { foo: [ 'bar', 'baz' ] }, - update: { foo: [ 'bar', 'baz', 'quux' ] }, - merge_index: true, - is_change: true, - }, - { - initial: { foo: [ 'bar', 'baz' ] }, - update: { foo: [ 'bar', 'baz', 'quux' ] }, - merge_index: false, - is_change: true, - }, - { - initial: { foo: [ 'bar', 'baz' ] }, - update: { foo: [] }, - merge_index: true, - is_change: false, - }, - { - initial: { foo: [ 'bar', 'baz' ] }, - update: { foo: [] }, - merge_index: false, - is_change: true, - }, - { - initial: { foo: [ 'bar' ] }, - update: { foo: [ 'bar', undefined ] }, - merge_index: false, - is_change: true, - }, - - // only interpreted as a diff if merge_index - { - initial: { foo: [ 'bar', 'baz' ] }, - update: { foo: [ 'bar', undefined ] }, - merge_index: false, - is_change: true, - }, - - // no index at all - { - initial: { foo: [ 'bar', 'baz' ] }, - update: {}, - merge_index: true, - is_change: false, - }, - ].forEach( ( { initial, update, merge_index, is_change }, i ) => + it( `post-commit, properly processes diff and change (${i})`, () => { - it( `is emitted only when data is changed (${i})`, () => + const sut = Sut( createStubBucket() ); + let called = false; + + sut.setValues( initial ); + sut.commit(); + + sut.on( 'preStagingUpdate', () => called = true ); + sut.setValues( update ); + + expect( called ).to.equal( is_change ); + + if ( expected ) { - const sut = Sut( createStubBucket() ); - let called = false; - - sut.setValues( initial, merge_index ); - - sut.on( 'preStagingUpdate', () => called = true ); - sut.setValues( update, merge_index ); - - expect( called ).to.equal( is_change ); - } ); + expect( sut.getData() ).to.deep.equal( expected.data ); + expect( sut.getDiff() ).to.deep.equal( expected.diff ); + } } ); } ); @@ -233,20 +277,5 @@ describe( 'StagingBucket', () => function createStubBucket( bucket_obj ) { - return Class.implement( Bucket ).extend( - { - 'public getData'() - { - return bucket_obj; - }, - - 'public setValues'( data, merge_index, merge_null ) {}, - 'public overwriteValues'( data ) {}, - 'public clear'() {}, - 'public each'( callback ) {}, - 'public getDataByName'( name ) {}, - 'public getDataJson'() {}, - 'public filter'( pred, callback) {}, - 'on'() {}, - } )(); + return QuoteDataBucket(); } diff --git a/test/server/request/DataProcessorTest.js b/test/server/request/DataProcessorTest.js index 8e27e06..89d2aa7 100644 --- a/test/server/request/DataProcessorTest.js +++ b/test/server/request/DataProcessorTest.js @@ -81,7 +81,7 @@ describe( 'DataProcessor', () => { expect( given_data ).to.equal( data ); expect( given_types ).to.equal( types ); - expect( given_null ).to.equal( false ); + expect( given_null ).to.equal( true ); // not used expect( given_ignore ).to.deep.equal( {} ); From d9aa7ef4abc3b0fb893146dd866f7983d1cfd8a2 Mon Sep 17 00:00:00 2001 From: Mike Gerwitz Date: Wed, 6 Sep 2017 09:04:27 -0400 Subject: [PATCH 3/3] Basic documentation for bucket diff * doc/bucket.texi (Bucket Diff): Add documentation. --- doc/bucket.texi | 84 ++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 83 insertions(+), 1 deletion(-) diff --git a/doc/bucket.texi b/doc/bucket.texi index 0b4a810..9a581bb 100644 --- a/doc/bucket.texi +++ b/doc/bucket.texi @@ -28,7 +28,89 @@ @node Bucket Diff @section Bucket Diff @cindex Bucket diff -@helpwanted +Changes to the bucket are represented by an array with certain conventions: + +@enumerate +@item A change to some index@tie{}@math{k} is represented by the same + index@tie{}@math{k} in the diff. +@item A value of @code{undefined} indicates that the respective index + has not changed. + Holes in the array (indexes not assigned any value) are treated in + the same manner as @code{undefined}. +@item A @code{null} in the last index of the vector marks a + truncation@tie{}point; + it is used to delete one or more indexes. + The vector will be truncated at that point. + Any preceding @code{null} values are treated as if they were + @code{undefined}.@footnote{ + The reason for this seemingly redundant (and inconvenient) choice + is that JSON encodes @code{undefined} values as @code{null}. + Consequently, when serializing a diff, @code{undefined}s are lost. + To address this, + any @code{null} that is not in the tail position is treated as + @code{undefined}. + We cannot truncate at the first null, + because @samp{[null,null,null]} may actually represent + @samp{[undefined,undefined,null]}.} +@end enumerate + +Diffs are only tracked at the vector (array of scalars) level@mdash{ + }if there is a change to a nested structure assigned to an index, + that index of the outer vector will be tracked as modified. +It will, however, recursively compare nested changes to determine if a + modification @emph{has taken place}.@footnote{ + See @srcrefjs{bucket,StagingBucket} method @code{#_parseChanges}.} +Examples appear in @ref{f:diff-ex}. + +@float Figure, f:diff-ex +@multitable @columnfractions 0.30 0.30 0.40 +@headitem Original @tab Diff @tab Interpretation +@item @samp{["foo", "bar"]} + @tab @samp{["baz", "quux"]} + @tab Index@tie{}0 changed to @samp{baz}. + Index@tie{}1 changed to @samp{quux}. + +@item @samp{["foo", "bar"]} + @tab @samp{[undefined, "quux"]} + @tab Index@tie{}0 did not change. + Index@tie{}1 changed to @samp{quux}. + +@item @samp{["foo", "bar"]} + @tab @samp{[, "quux"]} + @tab Index@tie{}0 did not change. + Index@tie{}1 changed to @samp{quux}. + +@item @samp{["foo", "bar"]} + @tab @samp{["baz", null]} + @tab Index@tie{}0 changed to @samp{baz}. + Index@tie{}1 was removed. + +@item @samp{["foo", "bar", "baz"]} + @tab @samp{[undefined, null]} + @tab Index@tie{}0 was not changed. + Index@tie{}1 was removed. + Index@tie{}2 was removed. + +@item @samp{["foo", "bar", "baz"]} + @tab @samp{[null, undefined, null]} + @tab Index@tie{}0 was not changed. + Index@tie{}1 was not changed. + Index@tie{}2 was removed. + +@item @samp{["foo", "bar", "baz"]} + @tab @samp{[null, null, null]} + @tab Index@tie{}0 was not changed. + Index@tie{}1 was not changed. + Index@tie{}2 was removed. +@end multitable +@caption{Bucket diff examples.} +@end float + +Diffs are generated by @srcrefjs{bucket,StagingBucket}. +@code{null} truncation is understood both by @code{StagingBucket} + and by @srcrefjs{bucket,QuoteDataBucket}. +A diff is applied to the underlying bucket by invoking + @code{StagingBucket#commit}. @node Calculated Values