1
0
Fork 0

[BC-BREAK] Bucket consistency fixes and non-terminating nulls

master
Mike Gerwitz 2017-09-06 11:31:35 -04:00
commit a786605635
11 changed files with 370 additions and 270 deletions

View File

@ -28,7 +28,89 @@
@node Bucket Diff
@section Bucket Diff
@cindex Bucket diff
@helpwanted
Changes to the bucket are represented by an array with certain conventions:
@enumerate
@item A change to some index@tie{}@math{k} is represented by the same
index@tie{}@math{k} in the diff.
@item A value of @code{undefined} indicates that the respective index
has not changed.
Holes in the array (indexes not assigned any value) are treated in
the same manner as @code{undefined}.
@item A @code{null} in the last index of the vector marks a
truncation@tie{}point;
it is used to delete one or more indexes.
The vector will be truncated at that point.
Any preceding @code{null} values are treated as if they were
@code{undefined}.@footnote{
The reason for this seemingly redundant (and inconvenient) choice
is that JSON encodes @code{undefined} values as @code{null}.
Consequently, when serializing a diff, @code{undefined}s are lost.
To address this,
any @code{null} that is not in the tail position is treated as
@code{undefined}.
We cannot truncate at the first null,
because @samp{[null,null,null]} may actually represent
@samp{[undefined,undefined,null]}.}
@end enumerate
Diffs are only tracked at the vector (array of scalars) level@mdash{
}if there is a change to a nested structure assigned to an index,
that index of the outer vector will be tracked as modified.
It will, however, recursively compare nested changes to determine if a
modification @emph{has taken place}.@footnote{
See @srcrefjs{bucket,StagingBucket} method @code{#_parseChanges}.}
Examples appear in @ref{f:diff-ex}.
@float Figure, f:diff-ex
@multitable @columnfractions 0.30 0.30 0.40
@headitem Original @tab Diff @tab Interpretation
@item @samp{["foo", "bar"]}
@tab @samp{["baz", "quux"]}
@tab Index@tie{}0 changed to @samp{baz}.
Index@tie{}1 changed to @samp{quux}.
@item @samp{["foo", "bar"]}
@tab @samp{[undefined, "quux"]}
@tab Index@tie{}0 did not change.
Index@tie{}1 changed to @samp{quux}.
@item @samp{["foo", "bar"]}
@tab @samp{[, "quux"]}
@tab Index@tie{}0 did not change.
Index@tie{}1 changed to @samp{quux}.
@item @samp{["foo", "bar"]}
@tab @samp{["baz", null]}
@tab Index@tie{}0 changed to @samp{baz}.
Index@tie{}1 was removed.
@item @samp{["foo", "bar", "baz"]}
@tab @samp{[undefined, null]}
@tab Index@tie{}0 was not changed.
Index@tie{}1 was removed.
Index@tie{}2 was removed.
@item @samp{["foo", "bar", "baz"]}
@tab @samp{[null, undefined, null]}
@tab Index@tie{}0 was not changed.
Index@tie{}1 was not changed.
Index@tie{}2 was removed.
@item @samp{["foo", "bar", "baz"]}
@tab @samp{[null, null, null]}
@tab Index@tie{}0 was not changed.
Index@tie{}1 was not changed.
Index@tie{}2 was removed.
@end multitable
@caption{Bucket diff examples.}
@end float
Diffs are generated by @srcrefjs{bucket,StagingBucket}.
@code{null} truncation is understood both by @code{StagingBucket}
and by @srcrefjs{bucket,QuoteDataBucket}.
A diff is applied to the underlying bucket by invoking
@code{StagingBucket#commit}.
@node Calculated Values

View File

@ -35,13 +35,9 @@ module.exports = Interface( 'Bucket',
*
* @param {Object.<string,Array>} data associative array of the data
*
* @param {boolean} merge_index whether to merge indexes individually
* @param {boolean} merge_null whether to merge undefined values (vs
* ignore)
*
* @return {Bucket} self
*/
'public setValues': [ 'data', 'merge_index', 'merge_null' ],
'public setValues': [ 'data' ],
/**

View File

@ -45,31 +45,17 @@ module.exports = Class( 'DelayedStagingBucket' )
'private _timer': 0,
'public override setValues': function( data, merge_index, merge_null )
'public override setValues': function( data )
{
for ( var name in data )
{
if ( merge_index )
if ( this._queued[ name ] === undefined )
{
if ( this._queued[ name ] === undefined )
{
this._queued[ name ] = [];
}
// merge individual indexes
this.merge( data[ name ], this._queued[ name ] );
this._queued[ name ] = [];
}
else
{
// no index merge; replace any existing data
this._queued[ name ] = Array.prototype.slice.call(
data[ name ], 0
);
// this will ensure that no data will follow what we were
// provided
this._queued[ name ].push( null );
}
// merge individual indexes
this.merge( data[ name ], this._queued[ name ] );
}
this._setTimer();

View File

@ -75,15 +75,11 @@ module.exports = Class( 'QuoteDataBucket' )
*
* @param {Object.<string,Array>} data associative array of the data
*
* @param {boolean} merge_index whether to merge indexes individually
* @param {boolean} merge_null whether to merge undefined values (vs
* ignore)
*
* @return {QuoteDataBucket} self to allow for method chaining
*/
'public setValues': function( data, merge_index, merge_null )
'public setValues': function( data )
{
this._mergeData( data, merge_index, merge_null );
this._mergeData( data );
return this;
},
@ -118,11 +114,8 @@ module.exports = Class( 'QuoteDataBucket' )
*
* @return undefined
*/
'private _mergeData': function( data, merge_index, merge_null )
'private _mergeData': function( data )
{
merge_index = !!merge_index; // default false
merge_null = !!merge_null; // default false
var ignore = {};
// remove any data that has not been updated (the hooks do processing on
@ -171,14 +164,6 @@ module.exports = Class( 'QuoteDataBucket' )
var data_set = data[ name ];
// if we're not supposed to merge the indexes one by one, just set
// it
if ( merge_index === false )
{
this._data[name] = data_set;
continue;
}
// initialize it if its undefined in the bucket
if ( this._data[name] === undefined )
{
@ -197,7 +182,7 @@ module.exports = Class( 'QuoteDataBucket' )
}
// ignore if set to null (implying the index was removed)
if ( !merge_null && data_set[i] === null )
if ( data_set[i] === null )
{
// this marks the end of the array as far as we're concerned
this._data[ name ].length = i;
@ -223,7 +208,16 @@ module.exports = Class( 'QuoteDataBucket' )
*/
'public overwriteValues': function( data )
{
this.setValues( data, false );
this.setValues(
Object.keys( data ).reduce(
( vals, key ) => (
vals[ key ] = data[ key ].concat( [ null ] ),
vals
),
{}
)
);
return this;
},

View File

@ -113,13 +113,10 @@ module.exports = Class( 'StagingBucket' )
'private _initState': function()
{
const data = this._bucket.getData();
const retdata = function() {};
// ensure that we don't modify the original data
retdata.prototype = data;
const retdata = Object.create( this._bucket.getData() );
this._curdata = new retdata();
this._curdata = retdata;
this._dirty = false;
},
@ -142,7 +139,7 @@ module.exports = Class( 'StagingBucket' )
}
else if ( nonull && ( data === null ) )
{
// nulls mark the end of the set
// nulls mark the end of the vector
dest.length = i;
break;
}
@ -193,45 +190,63 @@ module.exports = Class( 'StagingBucket' )
/**
* Determine whether values have changed
*
* If all values are identical to the current bucket values (relative to
* `merge_index`), returns `false`. Otherwise, this stops at the first
* recognized change and returns `true`.
* If all values are identical to the current bucket values returns
* `false`. Otherwise, `true`.
*
* Because JSON serializes all undefined values to `null`, only the
* final null in a diff is considered terminating; the rest are
* converted into `undefined`.
*
* This creates a fresh copy of the data to prevent unexpected
* side-effects.
*
* @param {Object.<string,Array>} data key/value data or diff
* @param {boolean} merge_index compare indexes individually
*
* @return {boolean} whether a change was recognized
*/
'private _hasChanged': function( data, merge_index )
'private _parseChanges': function( data )
{
// generate a new object so that we don't mutate the original, which
// may be used elsewhere
const result = {};
let changed = false;
for ( let name in data )
{
let values = data[ name ];
let values = Array.prototype.slice.call( data[ name ], 0 );
let cur = this._curdata[ name ] || [];
let len = this._length( values );
let has_null = ( len !== values.length );
result[ name ] = values;
let merge_len_change = (
merge_index && has_null && ( len < cur.length )
has_null && ( len < cur.length )
);
let replace_len_change = (
!merge_index && ( len !== cur.length )
);
// quick change check (index removal if merge_index, or index
// count change if not merge_index)
if ( merge_len_change || replace_len_change )
// quick change check; but we still have to keep processing the
// data to generate a proper diff and process non-terminating
// nulls
if ( merge_len_change )
{
changed = true;
continue;
}
for ( let index = 0; index < len; index++ )
{
if ( merge_index && ( values[ index ] === undefined ) )
// see #_length; if ending in a null, this is offset by -1
let last_index = ( index === len );
// only the last null is terminating; the rest are
// interpreted as `undefined' (because JSON serializes
// undefined values to `null')
if ( ( values[ index ] === null ) && !last_index )
{
values[ index ] = undefined;
}
if ( values[ index ] === undefined )
{
continue;
}
@ -249,11 +264,11 @@ module.exports = Class( 'StagingBucket' )
// if nothing is left, remove entirely
if ( !values.some( x => x !== undefined ) )
{
delete data[ name ];
delete result[ name ];
}
}
return changed;
return [ changed, result ];
},
@ -312,17 +327,20 @@ module.exports = Class( 'StagingBucket' )
/**
* Explicitly sets the contents of the bucket
*
* @param {Object.<string,Array>} data associative array of the data
* Because JSON serializes all undefined values to `null`, only the
* final null in a diff is considered terminating; the rest are
* converted into `undefined`. Therefore, it is important that all
* truncations include no elements in the vector after the truncating null.
*
* @param {boolean} merge_index whether to merge indexes individually
* @param {boolean} merge_null whether to merge undefined values (vs
* ignore)
* @param {Object.<string,Array>} given_data associative array of the data
*
* @return {Bucket} self
*/
'virtual public setValues': function( data, merge_index, merge_null )
'virtual public setValues': function( given_data )
{
if ( !this._hasChanged( data, merge_index ) )
const [ changed, data ] = this._parseChanges( given_data );
if ( !changed )
{
return;
}
@ -331,7 +349,7 @@ module.exports = Class( 'StagingBucket' )
for ( let name in data )
{
let item = Array.prototype.slice.call( data[ name ], 0 );
let item = data[ name ];
// initialize as array if necessary
if ( this._staged[ name ] === undefined )
@ -356,21 +374,12 @@ module.exports = Class( 'StagingBucket' )
}
}
if ( merge_index )
{
// merge with previous values
this.merge( item, this._staged[ name ] );
// merge with previous values
this.merge( item, this._staged[ name ] );
// we do not want nulls in our current representation of the
// data
this.merge( item, this._curdata[ name ], true );
}
else
{
// overwrite
this._staged[ name ] = item;
this._curdata[ name ] = item;
}
// we do not want nulls in our current representation of the
// data
this.merge( item, this._curdata[ name ], true );
}
this._dirty = true;
@ -400,7 +409,7 @@ module.exports = Class( 'StagingBucket' )
new_data[ name ].push( null );
}
return this.setValues( new_data, false );
return this.setValues( new_data );
},
@ -432,9 +441,9 @@ module.exports = Class( 'StagingBucket' )
// return each staged field
for ( let field in this._staged )
{
// retrieve the current value for this field
// retrieve the diff for this field
ret[ field ] = Array.prototype.slice.call(
this._curdata[ field ], 0
this._staged[ field ], 0
);
}
@ -521,7 +530,7 @@ module.exports = Class( 'StagingBucket' )
this.emit( this.__self.$('EVENT_PRE_COMMIT') );
this._bucket.setValues( this._staged, true, false );
this._bucket.setValues( this._staged );
this._staged = {};
this.emit( this.__self.$('EVENT_COMMIT') );

View File

@ -1873,7 +1873,7 @@ module.exports = Class( 'Client' )
}
// set data all at once to avoid extraneous calls
quote.setData( values, true );
quote.setData( values );
} ).on( 'indexReset', function( index, groupui )
{
var fields = groupui.getGroup().getFieldNames(),
@ -1894,7 +1894,7 @@ module.exports = Class( 'Client' )
}
// set data all at once to avoid extraneous calls
quote.setData( values, true );
quote.setData( values );
} );
// when the step is rendered, run the onchange events

View File

@ -293,11 +293,9 @@ module.exports = Class( 'ClientQuote' )
*
* @return {ClientQuote} self
*/
'public setData': function( data, merge_nulls )
'public setData': function( data )
{
merge_nulls = !!merge_nulls;
this._staging.setValues( data, true, merge_nulls );
this._staging.setValues( data );
return this;
},
@ -443,6 +441,12 @@ module.exports = Class( 'ClientQuote' )
callback.apply( null, arguments );
} );
// XXX: we need to commit after a _successful_ save, otherwise the
// client will never post again! But we don't want to commit
// everything that is staged, because that will possibly include
// data the user is filling out on the next step. So, we need to
// store the diff separately to be committed.
// commit staged quote data to the data bucket (important: do this
// *after* save); will make the staged values available as old_store.old
this._staging.commit( old_store );

View File

@ -89,7 +89,7 @@ module.exports = Class( 'DataProcessor',
*/
'public processDiff'( data, request, program, bucket )
{
const filtered = this.sanitizeDiff( data, request, program, false );
const filtered = this.sanitizeDiff( data, request, program );
const dapi_manager = this._dapif( program.apis, request );
const staging = this._stagingCtor( bucket );
@ -127,21 +127,18 @@ module.exports = Class( 'DataProcessor',
* @param {Object} data client-provided data
* @param {UserRequest} request client request
* @param {Program} program active program
* @param {boolean} permit_null whether null values should be retained
*
* @return {Object} filtered data
*/
'public sanitizeDiff'( data, request, program, permit_null )
'public sanitizeDiff'( data, request, program )
{
permit_null = ( permit_null === undefined ) ? false : permit_null;
if ( !request.getSession().isInternal() )
{
this._cleanInternals( data, program );
}
const types = program.meta.qtypes;
return this._filter.filter( data, types, {}, permit_null );
return this._filter.filter( data, types, {}, true );
},
@ -246,7 +243,9 @@ module.exports = Class( 'DataProcessor',
return Object.keys( mapis ).reduce(
( result, src_field ) =>
{
if ( data[ src_field ] === undefined )
const fdata = data[ src_field ];
if ( fdata === undefined )
{
return result;
}
@ -258,12 +257,14 @@ module.exports = Class( 'DataProcessor',
{
result[ field ] = result[ field ] || [];
Object.keys( data[ src_field ] ).forEach( i =>
Object.keys( fdata ).forEach( i =>
{
if ( data[ src_field ][ i ] !== undefined )
if ( fdata[ i ] === undefined || fdata[ i ] === null )
{
result[ field ][ i ] = i;
return;
}
result[ field ][ i ] = i;
} );
} );

View File

@ -158,9 +158,9 @@ module.exports = Class( 'RatingService',
rate_data, actions, program, quote
);
var class_dest = {};
const class_dest = {};
rate_data = _self._cleanRateData(
const cleaned = _self._cleanRateData(
rate_data,
class_dest
);
@ -168,7 +168,6 @@ module.exports = Class( 'RatingService',
// TODO: move me during refactoring
_self._dao.saveQuoteClasses( quote, class_dest );
// save all data server-side (important: do after
// post-processing); async
_self._saveRatingData( quote, rate_data, indv, function()
@ -179,7 +178,7 @@ module.exports = Class( 'RatingService',
// no need to wait for the save; send the response
_self._server.sendResponse( request, quote, {
data: rate_data
data: cleaned
}, actions );
},
function( message )

View File

@ -30,160 +30,204 @@ const sinon = require( 'sinon' );
const {
Bucket,
StagingBucket: Sut
StagingBucket: Sut,
// TODO: decouple test from this
QuoteDataBucket,
} = root.bucket;
describe( 'StagingBucket', () =>
{
describe( 'pre-update event', () =>
it( 'pre-update event allows updating data before set', () =>
{
it( 'allows updating data before set', () =>
const sut = Sut( createStubBucket() );
const data = {
foo: [ 'bar', 'baz' ],
};
sut.on( 'preStagingUpdate', data =>
{
const sut = Sut( createStubBucket() );
data.foo[ 1 ] = 'quux';
} );
const data = {
foo: [ 'bar', 'baz' ],
};
// triggers setValues
sut.setValues( data );
sut.on( 'preStagingUpdate', data =>
expect( sut.getDataByName( 'foo' ) )
.to.deep.equal( [ 'bar', 'quux' ] );
} );
[
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', 'baz' ] },
is_change: false,
expected: {
data: { foo: [ 'bar', 'baz' ] },
diff: {},
},
},
// actual changes
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'change', 'baz' ] },
is_change: true,
expected: {
data: { foo: [ 'change', 'baz' ] },
diff: { foo: [ 'change' ] },
},
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', 'change' ] },
is_change: true,
expected: {
data: { foo: [ 'bar', 'change' ] },
diff: { foo: [ , 'change' ] },
},
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ undefined, 'change' ] },
is_change: true,
expected: {
data: { foo: [ 'bar', 'change' ] },
diff: { foo: [ , 'change' ] },
},
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ undefined, 'baz' ] },
is_change: false,
expected: {
data: { foo: [ 'bar', 'baz' ] },
diff: {},
},
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', undefined ] },
is_change: false,
expected: {
data: { foo: [ 'bar', 'baz' ] },
diff: {},
},
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', null ] },
is_change: true,
expected: {
data: { foo: [ 'bar' ] },
diff: { foo: [ , null ] },
},
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', 'baz', null ] },
is_change: false,
expected: {
data: { foo: [ 'bar', 'baz' ] },
diff: {},
},
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', 'baz', 'quux' ] },
is_change: true,
expected: {
data: { foo: [ 'bar', 'baz', 'quux' ] },
diff: { foo: [ , , 'quux' ]},
},
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [] },
is_change: false,
expected: {
data: { foo: [ 'bar', 'baz' ] },
diff: {},
},
},
// null not at end of set means unchanged
{
initial: { foo: [ 'bar', 'baz', 'quux' ] },
update: { foo: [ null, null, 'quux' ] },
is_change: false,
expected: {
data: { foo: [ 'bar', 'baz', 'quux' ] },
diff: {},
},
},
// but the last one is
{
initial: { foo: [ 'bar', 'baz', 'quux' ] },
update: { foo: [ null, 'baz', null ] },
is_change: true,
expected: {
data: { foo: [ 'bar', 'baz' ] },
diff: { foo: [ , , null ] },
},
},
// given a string of nulls, only the last one is terminating; the
// rest are interpreted as undefined (because JSON serializes
// undefined values to `null' -_-)
{
initial: { foo: [ 'bar', 'baz', 'quux' ] },
update: { foo: [ null, null, null ] },
is_change: true,
expected: {
data: { foo: [ 'bar', 'baz' ] },
diff: { foo: [ , , null ] },
},
},
].forEach( ( { initial, update, is_change, expected }, i ) =>
{
it( `pre-commit, properly processes diff and change (${i})`, () =>
{
const sut = Sut( createStubBucket() );
let called = false;
sut.setValues( initial );
expect( sut.getDiff() ).to.deep.equal( initial );
sut.on( 'preStagingUpdate', () => called = true );
sut.setValues( update );
expect( called ).to.equal( is_change );
if ( expected )
{
data.foo[ 1 ] = 'quux';
} );
// triggers setValues
sut.setValues( data );
expect( sut.getDataByName( 'foo' ) )
.to.deep.equal( [ 'bar', 'quux' ] );
expect( sut.getData() ).to.deep.equal( expected.data );
}
} );
[
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', 'baz' ] },
merge_index: true,
is_change: false,
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', 'baz' ] },
merge_index: false,
is_change: false,
},
// actual changes
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'change', 'baz' ] },
merge_index: true,
is_change: true,
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', 'change' ] },
merge_index: true,
is_change: true,
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ undefined, 'change' ] },
merge_index: true,
is_change: true,
},
// single-index changes make sense only if merge_index
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ undefined, 'baz' ] },
merge_index: true,
is_change: false,
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', undefined ] },
merge_index: true,
is_change: false,
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', null ] },
merge_index: true,
is_change: true,
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', 'baz', null ] },
merge_index: true,
is_change: false,
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', 'baz', null ] },
merge_index: false,
is_change: false,
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', 'baz', 'quux' ] },
merge_index: true,
is_change: true,
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', 'baz', 'quux' ] },
merge_index: false,
is_change: true,
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [] },
merge_index: true,
is_change: false,
},
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [] },
merge_index: false,
is_change: true,
},
{
initial: { foo: [ 'bar' ] },
update: { foo: [ 'bar', undefined ] },
merge_index: false,
is_change: true,
},
// only interpreted as a diff if merge_index
{
initial: { foo: [ 'bar', 'baz' ] },
update: { foo: [ 'bar', undefined ] },
merge_index: false,
is_change: true,
},
// no index at all
{
initial: { foo: [ 'bar', 'baz' ] },
update: {},
merge_index: true,
is_change: false,
},
].forEach( ( { initial, update, merge_index, is_change }, i ) =>
it( `post-commit, properly processes diff and change (${i})`, () =>
{
it( `is emitted only when data is changed (${i})`, () =>
const sut = Sut( createStubBucket() );
let called = false;
sut.setValues( initial );
sut.commit();
sut.on( 'preStagingUpdate', () => called = true );
sut.setValues( update );
expect( called ).to.equal( is_change );
if ( expected )
{
const sut = Sut( createStubBucket() );
let called = false;
sut.setValues( initial, merge_index );
sut.on( 'preStagingUpdate', () => called = true );
sut.setValues( update, merge_index );
expect( called ).to.equal( is_change );
} );
expect( sut.getData() ).to.deep.equal( expected.data );
expect( sut.getDiff() ).to.deep.equal( expected.diff );
}
} );
} );
@ -233,20 +277,5 @@ describe( 'StagingBucket', () =>
function createStubBucket( bucket_obj )
{
return Class.implement( Bucket ).extend(
{
'public getData'()
{
return bucket_obj;
},
'public setValues'( data, merge_index, merge_null ) {},
'public overwriteValues'( data ) {},
'public clear'() {},
'public each'( callback ) {},
'public getDataByName'( name ) {},
'public getDataJson'() {},
'public filter'( pred, callback) {},
'on'() {},
} )();
return QuoteDataBucket();
}

View File

@ -81,7 +81,7 @@ describe( 'DataProcessor', () =>
{
expect( given_data ).to.equal( data );
expect( given_types ).to.equal( types );
expect( given_null ).to.equal( false );
expect( given_null ).to.equal( true );
// not used
expect( given_ignore ).to.deep.equal( {} );