1
0
Fork 0

Consider classification changes on failed fields

The individual commits contain more useful detail, but since there's a
lot of _new_ code, you might be okay with looking at the full diff.

This adds support to Liza for considering classification changes when
detecting field fixes.  It can be boiled down to: abstract the bucket
diff array into a `Store`, which can also handle classification diffs,
and have `ValidStateMonitor` check that instead of the original
array.  The diff data format is unchanged.

- The diff portion of it comes together in `system/client.js` where it
  constructs the Store.

- `ValidStateMonitor` is what performs the validations, and existed
  previously; it was modified to support Promises and be
  async.  `DataValidator` extracts some logic (much less than I had
  hoped to) from `Client` in the `rating-fw` repository and glues it
  together.  If it doesn't look coherent, that's because the process
  is a bit of a mess; the overall architecture _did not_ change with
  this.

A lot of the project design changed or was abandoned upfront before
code was written: I committed my design notes in the new `liza-notes`
repo.  It notes a lot of my thoughts and design decisions, and shows a
lot of what was abandoned or simplified.
master
Mike Gerwitz 2017-01-30 12:10:39 -05:00
commit 08e0d20cad
18 changed files with 2096 additions and 316 deletions

View File

@ -41,6 +41,9 @@ modindex: $(nsindex)
test: check
check:
@PATH="$(PATH):$(CURDIR)/node_modules/mocha/bin" \
mocha --require $(path_test)/pre.js --recursive $(TESTARGS)
mocha @NODE_DESTRUCTURE@ \
--require $(path_test)/pre.js \
--recursive \
$(TESTARGS)
FORCE:

View File

@ -29,6 +29,18 @@ AC_SUBST(MINOR, m4_argn(2, ver_split))
AC_SUBST(REV, m4_argn(3, ver_split))
AC_SUBST(SUFFIX, m4_argn(4, ver_split))
AC_ARG_VAR([NODE], [The node.js interpreter])
AC_CHECK_PROGS(NODE, [node nodejs])
test -n "$NODE" || AC_MSG_ERROR([missing Node.js])
# only needed for older versions of Node
AC_MSG_CHECKING([node --harmony_destructuring])
AS_IF([node --harmony_destructuring >/dev/null 2>/dev/null],
[AC_MSG_RESULT(available)
AC_SUBST([NODE_DESTRUCTURE], [--harmony-destructuring])],
[AC_MSG_RESULT(no)])
# generate files from their *.in counterparts
AC_CONFIG_FILES([Makefile doc/Makefile package.json src/version.js])
AC_OUTPUT

View File

@ -72,7 +72,8 @@ module.exports = Trait( 'Cascading' )
* @param {string} key store key
* @param {Store} value Store to attach
*
* @return {Promise} promise to add item to store
* @return {Promise.<Store>} promise to add item to store, resolving to
* self (for chaining)
*/
'virtual abstract override public add': function( key, value )
{

View File

@ -0,0 +1,293 @@
/**
* Store that lazily computes diffs since last change
*
* Copyright (C) 2017 LoVullo Associates, Inc.
*
* This file is part of the Liza Data Collection Framework
*
* Liza is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
"use strict";
const Class = require( 'easejs' ).Class;
const Store = require( './Store' );
const StoreMissError = require( './StoreMissError' );
/**
* Lazily compute diffs since last change
*
* This store recursively calculates the diff of scalars and
* objects. Unlike many other stores, you don't always get out what you put
* in.
*
* There are three operations:
* - `#add` stages a change to a key;
* - `#get` calculates the diff of a key against its committed value; and
* - `#clear` commits staged values, clearing all diffs.
*
* Values are recursively compared until a scalar is found. If the scalar
* matches the committed value, it is recognized as unchanged and
* represented as `undefined`. Otherwise, the staged value takes its
* place.
*
* @example
* // Promise resolving to [ undefined, "quux" ]
* DiffStore()
* .add( 'foo', [ "bar", "baz" ] )
* .then( store => store.clear() )
* .add( 'foo', [ "bar", "quux" ] )
* .then( store => store.get( 'foo' ) )
*
* // Promise resolving to { foo: undefined, baz: [ undefined, 'c' ] }
* DiffStore()
* .add( 'foo', { foo: 'bar', baz: [ 'a', 'b', ] } )
* .then( store => store.clear() )
* .add( 'foo', { baz: [ 'a', 'c' ] } )
* .then( store => store.get( 'foo' ) )
*
* The union of all keys of all objects are included in the diff:
*
* @example
* // Promise resolving to { foo: undefined, baz: 'quux' }
* DiffStore()
* .add( 'foo', { foo: 'bar' } )
* .then( store => store.clear() )
* .add( 'foo', { baz: 'quux' } )
* .then( store => store.get( 'foo' ) )
*
* Values are diff'd since the last `#clear`, so adding a value multiple
* times will compare only the last one:
*
* @example
* // Promise resolving to undefined
* DiffStore()
* .add( 'foo', 'foo' )
* .then( store => store.clear() )
* .add( 'foo', 'bar' )
* .add( 'foo', 'baz' )
* .add( 'foo', 'foo' )
* .then( store => store.get( 'foo' ) )
*
* // Promise resolving to undefined
* DiffStore()
* .add( 'foo', 'bar' )
* .then( store => store.clear() )
* .then( store => store.get( 'foo' ) )
*
* One caveat: since the diff represents the absence of changes as
* `undefined`, there is no way to distinguish between an actual undefined
* value and a non-change. If this is important to you, you can subtype
* this class and override `#diff`.
*
* For more examples, see the `DiffStoreTest` test case.
*/
module.exports = Class( 'DiffStore' )
.implement( Store )
.extend(
{
/**
* New data staged for committing
* @type {Object}
*/
'private _staged': {},
/**
* Previous values
* @type {Object}
*/
'private _commit': {},
/**
* Proxy item with value `value` to internal store matching against `key`
*
* Note that the key stored may be different than `key`. This
* information is important only if the internal stores are not
* encapsulated.
*
* @param {string} key store key to match against
* @param {*} value value for key
*
* @return {Promise.<Store>} promise to add item to store, resolving to
* self (for chaining)
*/
'virtual public add'( key, value )
{
this._staged[ key ] = value;
return Promise.resolve( this.__inst );
},
/**
* Retrieve diff of `key`
*
* This performs a lazy diff of the data `D` behind `key`. For each
* scalar value in `D`, recursively, the value will be `undefined` if
* there is no change and will be the staged value if changed. A change
* occurs when the data `D` differs from the value of `key` before the
* last `#clear`. A value is staged when it has been added since the
* last `#clear`.
*
* @param {string} key store key
*
* @return {Promise} promise for the key value
*/
'virtual public get'( key )
{
if ( ( this._staged[ key ] || this._commit[ key ] ) === undefined )
{
return Promise.reject(
StoreMissError( `Key '${key}' does not exist` )
);
}
return Promise.resolve(
this.diff( this._staged[ key ], this._commit[ key ] )
);
},
/**
* Commit staged data and clear diffs
*
* All staged data will be committed. Until some committed key `k` has
* its data modified via `#add`, `k` will not be considered to have
* changed.
*
* @return {Promise.<Store>} promise to add item to store, resolving to
* self (for chaining)
*/
'virtual public clear'()
{
Object.keys( this._staged ).forEach(
key => this._commit[ key ] = this._staged[ key ]
);
this._staged = {};
return Promise.resolve( this.__inst );
},
/**
* Recursively diff two objects or scalars `data` and `orig`
*
* A datum in `data` is considered to be changed when it is not equal to
* the corresponding datum in `orig`. If the datum is an object, it is
* processed recursively until a scalar is reached for comparison.
*
* The algorithm processes the union of the keys of both `data` and
* `orig`.
*
* One caveat: since the diff represents the absence of changes as
* `undefined`, there is no way to distinguish between an actual
* undefined value and a non-change. If this is important to you, you
* can override this method.
*
* An example of the output of the algorithm is given in the class-level
* documentation.
*
* @param {*} data new data
* @param {*} orig original data to diff against
*
* @return {*} diff
*/
'virtual protected diff'( data, orig )
{
if ( orig === undefined )
{
// no previous, then data must be new, and so _is_ the diff
return data;
}
else if ( typeof data !== 'object' )
{
// only compare scalars (we'll recurse on objects)
return ( data === orig )
? undefined
: data;
}
const keys = this._getKeyUnion( data, orig );
let diff = ( Array.isArray( data ) ) ? [] : {};
for ( let key of keys )
{
diff[ key ] = this.diff( data[ key ], orig[ key ] );
}
return diff;
},
/**
* Calculate the union of the keys of `first` and `second`
*
* `first` and `second` must both be of type `object`.
*
* @param {*} first some object
* @param {*} second some object
*
* @return {Set} Object.keys(first) Object.keys(second)
*/
'private _getKeyUnion'( first, second )
{
const keys = new Set( Object.keys( first ) );
Object.keys( second )
.forEach( key => keys.add( key ) );
return keys;
},
/**
* Fold (reduce) all staged values
*
* A value is staged when it has been set but `#clear` has not yet
* been called---these are the only values that might be
* different. Since the purpose of this Store is to produce diffs,
* there is no way to iterate over all values previously encountered.
*
* The order of folding is undefined.
*
* The ternary function `callback` is of the same form as
* {@link Array#reduce}: the first argument is the value of the
* accumulator (initialized to the value of `initial`; the second
* is the stored item; and the third is the key of that item.
*
* @param {function(*,*,string=)} callback folding function
* @param {*} initial initial value for accumulator
*
* @return {Promise} promise of a folded value (final accumulator value)
*/
'public reduce'( callback, initial )
{
return Promise.resolve(
Object.keys( this._staged).reduce(
( accum, key ) => {
const value = this.diff(
this._staged[ key ],
this._commit[ key ]
);
return callback( accum, value, key );
},
initial
)
);
},
} );

View File

@ -75,23 +75,17 @@ module.exports = Class( 'MemoryStore' )
/**
* Add item to store under `key` with value `value`
*
* The promise will be fulfilled with an object containing the
* `key` and `value` added to the store; this is convenient for
* promises.
*
* @param {string} key store key
* @param {*} value value for key
*
* @return {Promise} promise to add item to store
* @return {Promise.<Store>} promise to add item to store, resolving to
* self (for chaining)
*/
'virtual public add': function( key, value )
'virtual public add'( key, value )
{
this._store[ key ] = value;
return Promise.resolve( {
key: key,
value: value,
} );
return Promise.resolve( this.__inst );
},
@ -104,7 +98,7 @@ module.exports = Class( 'MemoryStore' )
*
* @return {Promise} promise for the key value
*/
'virtual public get': function( key )
'virtual public get'( key )
{
return ( this._store[ key ] !== undefined )
? Promise.resolve( this._store[ key ] )
@ -117,13 +111,14 @@ module.exports = Class( 'MemoryStore' )
/**
* Clear all items in store
*
* @return {Promise} promise to clear store
* @return {Promise<Store>} promise to clear store, resolving to self
* (for chaining)
*/
'virtual public clear': function()
'virtual public clear'()
{
this._store = {};
return Promise.resolve( true );
return Promise.resolve( this.__inst );
},
@ -148,20 +143,15 @@ module.exports = Class( 'MemoryStore' )
*
* @return {Promise} promise of a folded value (final accumulator value)
*/
'public reduce': function( callback, initial )
'public reduce'( callback, initial )
{
var store = this._store;
const store = this._store;
return Promise.resolve(
Object.keys( store )
.map( function( key )
{
return [ key, store[ key ] ];
} )
.reduce( function( accum, values )
{
return callback( accum, values[ 1 ], values[ 0 ] );
}, initial )
Object.keys( store ).reduce(
( accum, key ) => callback( accum, store[ key ], key ),
initial
)
);
}
} );

View File

@ -0,0 +1,222 @@
/**
* Store proxy to sub-stores based on key patterns
*
* Copyright (C) 2017 LoVullo Associates, Inc.
*
* This file is part of the Liza Data Collection Framework
*
* Liza is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
"use strict";
const Trait = require( 'easejs' ).Trait;
const Class = require( 'easejs' ).Class;
const Store = require( './Store' );
const StorePatternError = require( './StorePatternError' );
/**
* Proxy to sub-stores based on key patterns
*
* Patterns are an array of the form `[pattern, store]`. If a key matches
* `pattern`, then the request is proxied to `store`. If the pattern
* contains a match group, then group 1 will be used as the key for `store`.
*
* @example
* const store1 = Store();
* const store2 = Store();
*
* const patterns = [
* [ /^foo:/, store1 ],
* [ /^bar:(.*)$/, store2 ],
* ];
*
* const proxy = Store.use( PatternProxy( patterns ) )();
*
* // Promise resolving to "baz"
* proxy.add( 'foo:bar', 'baz' ).then( () => store1.get( 'foo:bar' );
*
* // Promise resolving to "quux"
* proxy.add( 'bar:baz', 'quux' ).then( () => store1.get( 'baz' );
*
* // Promise rejecting with StorePatternError
* proxy.add( 'unknown', 'nope' );
*
* // Promise resolving to "quuux"
* store2.add( 'quux', 'quuux' )
* .then( () => proxy.get( 'bar:quux' );)
*
* Note that this will perform a linear search on each of the patterns. You
* can optimize this by putting the patterns in order of most frequently
* encountered, descending.
*
* If a key fails to match any pattern, a `StorePatternError` is thrown. To
* provide a default pattern, create a regular expression that matches on
* any input (e.g. `/./`).)
*/
module.exports = Trait( 'PatternProxy' )
.implement( Store )
.extend(
{
/**
* Pattern mapping to internal store
* @type {Array.<Array.<RegExp,Store>>}
*/
'private _patterns': [],
/**
* Define pattern map
*
* `patterns` should be an array of arrays, of this form:
*
* @example
* [ [ /a/, storea ], [ /^b:(.*)$/, storeb ] ]
*
* That is: a regular expression that, when matched, maps to the
* associated store. If the regular expression contains a match group,
* group 1 will be used as the key name in the destination store.
*
* @param {Array.<Array.<RegExp,Store>>} patterns pattern map
*/
__mixin( patterns )
{
this._patterns = this._validatePatternMap( patterns );
},
/**
* Verify that pattern map contains valid mappings
*
* @param {Array.<Array.<RegExp,Store>>} patterns pattern map
*
* @return {Array} `patterns` argument
*/
'private _validatePatternMap'( patterns )
{
if ( !Array.isArray( patterns ) )
{
throw TypeError( "Pattern map must be an array" );
}
patterns.forEach( ( [ pattern, store ], i ) =>
{
if ( !( pattern instanceof RegExp ) )
{
throw TypeError(
`Pattern must be a RegExp at index ${i}`
);
}
if ( !Class.isA( Store, store ) )
{
throw TypeError(
`Pattern must map to Store at index ${i}`
);
}
} );
return patterns;
},
/**
* Proxy item with value `value` to internal store matching against `key`
*
* Note that the key stored may be different than `key`. This
* information is important only if the internal stores are not
* encapsulated.
*
* @param {string} key store key to match against
* @param {*} value value for key
*
* @return {Promise.<Store>} promise to add item to store, resolving to
* self (for chaining)
*/
'virtual public abstract override add'( key, value )
{
return this.matchKeyToStore( key )
.then( ( { store, key:skey } ) => store.add( skey, value ) );
},
/**
* Retrieve item from an internal store matching against `key`
*
* Note that the key stored may be different than `key`. This
* information is important only if the internal stores are not
* encapsulated.
*
* The promise will be rejected if the key is unavailable.
*
* @param {string} key store key to pattern match
*
* @return {Promise} promise for the key value
*/
'virtual public abstract override get'( key )
{
// XXX
return this.matchKeyToStore( key )
.then( ( { store, key:skey } ) => store.get( skey ) );
},
/**
* Attempt to map `key` to a Store
*
* If no patterns match against `key`, the Promise will be rejected.
*
* @param {string} key key to match against
*
* @return {Promise.<Object>} {store,key} on success,
* StorePatternError on failure
*/
'protected matchKeyToStore'( key )
{
for ( let [ pattern, store ] of this._patterns )
{
const [ match, skey=key ] = key.match( pattern ) || [];
if ( match !== undefined )
{
return Promise.resolve( {
store: store,
key: skey
} );
}
}
return Promise.reject( StorePatternError(
`Key '${key}' does not match any pattern`
) );
},
/**
* Clear all pattern stores
*
* This simply calls `#clear` on all stores associated with all
* patterns.
*
* @return {Promise.<Store>} promise to add item to store, resolving to
* self (for chaining)
*/
'virtual public abstract override clear'()
{
return Promise.all(
this._patterns.map( ( [ , store ] ) => store.clear() )
);
},
} );

View File

@ -44,7 +44,8 @@ module.exports = Interface( 'Store',
* @param {string} key store key
* @param {*} value value for key
*
* @return {Promise} promise to add item to store
* @return {Promise.<Store>} promise to add item to store, resolving to
* self (for chaining)
*/
'public add': [ 'key', 'value' ],
@ -64,7 +65,8 @@ module.exports = Interface( 'Store',
/**
* Clear all items in store
*
* @return {Promise} promise to clear store
* @return {Promise<Store>} promise to clear store, resolving to self
* (for chaining)
*/
'public clear': [],

View File

@ -0,0 +1,31 @@
/**
* Error when Store pattern matching fails
*
* Copyright (C) 2017 LoVullo Associates, Inc.
*
* This file is part of the Liza Data Collection Framework
*
* Liza is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
var Class = require( 'easejs' ).Class;
/**
* Store pattern matching failure
*
* A key request did not match any patterns known to the Store.
*/
module.exports = Class( 'StorePatternError' )
.extend( ReferenceError, {} );

View File

@ -0,0 +1,67 @@
/**
* Client system
*
* Copyright (C) 2017 LoVullo Associates, Inc.
*
* This file is part of liza.
*
* liza is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
"use strict";
const store = require( '../store' );
/**
* Typical client system
*
* This serves as a factory of sorts for the user-facing client that runs in
* the web browser.
*
* This is incomplete; it will be added to as code is ported to liza.
*/
module.exports = {
data: {
/**
* Create a store suitable for comparing diffs
*
* This relies very much on assumptions about how the rest of the
* system works:
* - bstore expects the diff format to be provided directly to it;
* - cstore expects a full classification result set with which
* _it_ will compute the diff; and
* - the outer store proxies to cstore for 'c:*'.
*/
diffStore: () => {
const cstore = store.DiffStore();
const bstore = store.MemoryStore();
const proxy = store.MemoryStore.use(
store.PatternProxy( [
[ /^c:(.*)$/, cstore ],
[ /./, bstore ],
] )
)();
// TODO: breaking encapsulation should not be necessary in the
// future
return {
store: proxy,
cstore: cstore,
bstore: bstore,
};
},
},
};

View File

@ -78,7 +78,7 @@ module.exports = Class( 'BucketDataValidator',
*
* @return {Object.<Array.<string>>} formatted data
*/
'public validate': function( data, err, inplace )
'virtual public validate': function( data, err, inplace )
{
err = err || function() {};
inplace = !!inplace;
@ -125,7 +125,7 @@ module.exports = Class( 'BucketDataValidator',
*
* @return {Array.<string>} formatted data
*/
'public format': function( data, inplace )
'virtual public format': function( data, inplace )
{
inplace = !!inplace;

View File

@ -0,0 +1,188 @@
/**
* Data validator
*
* Copyright (C) 2017 LoVullo Associates, Inc.
*
* This file is part of liza.
*
* liza is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
"use strict";
const Class = require( 'easejs' ).Class;
/**
* Check data update for failures
*
* This validator glues together various parts of the system that contribute
* to a validation on data change.
*
* TODO: Remove reliance on ClientDependencyFactory
*/
module.exports = Class( 'DataValidator',
{
/**
* Bucket data validator
* @type {BucketDataValidator}
*/
'private _bucket_validator': null,
/**
* Bucket field monitor
* @type {ValidStateMonitor}
*/
'private _field_monitor': null,
/**
* Dependency factory
*
* TODO: remove dependency on this class
*
* @type {ClientDependencyFactory}
*/
'private _factory': null,
/**
* Various layers of the diff store
* @type {Object}
*/
'private _stores': {},
/**
* Initialize validator
*
* @param {BucketDataValidator} bucket_validator data validator
* @param {ValidStateMonitor} field_monitor field state monitor
* @param {ClientDependencyFactory} dep_factory REMOVE ME
* @param {function()} store_factory factory for diff store
*/
__construct(
bucket_validator, field_monitor, dep_factory, store_factory
)
{
if ( typeof store_factory !== 'function' )
{
throw TypeError( "Expected function for parameter store_factory" );
}
this._bucket_validator = bucket_validator;
this._field_monitor = field_monitor;
this._factory = dep_factory;
this._createStores( store_factory );
},
/**
* Create internal diff stores
*
* @param {function()} store_factory function to produce stores
*
* @return {undefined}
*/
'private _createStores': function( store_factory )
{
this._stores = store_factory();
},
/**
* Validate diff and update field monitor
*
* The external validator `validatef` is a kluge while the system
* undergoes refactoring.
*
* @param {Object} diff bucket diff
* @param {function(Object,Object)=} validatef external validator
*
* @return {Promise} accepts with unspecified value once field monitor
* has completed its update
*/
'public validate'( diff, classes, validatef )
{
const _self = this;
let failures = {};
if ( diff !== undefined )
{
_self._bucket_validator.validate( diff, ( name, value, i ) =>
{
diff[ name ][ i ] = undefined;
( failures[ name ] = failures[ name ] || {} )[ i ] =
_self._factory.createFieldFailure( name, i, value );
}, true );
validatef && validatef( diff, failures );
}
// XXX: this assumes that the above is synchronous
return this._populateStore( classes, this._stores.cstore, 'indexes' )
.then( () => this.updateFailures( diff, failures ) );
},
/**
* Update failures from external validation
*
* TODO: This is a transitional API---we should handle all validations,
* not allow external systems to meddle in our affairs.
*
* @param {Object} diff bucket diff
* @param {Object} failures failures per field name and index
*
* @return {Promise} promise to populate internal store
*/
'public updateFailures'( diff, failures )
{
return this._populateStore( diff, this._stores.bstore ).then( () =>
this._field_monitor.update(
this._stores.store, failures
)
);
},
/**
* Populate store with data
*
* This effectively converts a basic array into a `Store`. This is
* surprisingly performant on v8. If the stores mix in traits, there
* may be a slight performance hit for trait-overridden methods.
*
* @param {Object} data data to map onto store
*
* @return {Promise} when all items have been added to the store
*/
'private _populateStore'( data, store, subkey )
{
if ( data === undefined )
{
return Promise.resolve( [] );
}
const mapf = ( subkey !== undefined )
? key => store.add( key, data[ key ][ subkey ] )
: key => store.add( key, data[ key ] );
return store.clear()
.then( () => Promise.all(
Object.keys( data ).map( mapf )
) );
},
} );

View File

@ -1,7 +1,7 @@
/**
* Field validity monitor
*
* Copyright (C) 2016 LoVullo Associates, Inc.
* Copyright (C) 2016, 2017 LoVullo Associates, Inc.
*
* This file is part of liza.
*
@ -19,9 +19,12 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
var Class = require( 'easejs' ).Class,
EventEmitter = require( 'events' ).EventEmitter,
Failure = require( './Failure' );
"use strict";
const Class = require( 'easejs' ).Class;
const EventEmitter = require( 'events' ).EventEmitter;
const Failure = require( './Failure' );
const Store = require( '../store/Store' );
/**
@ -40,36 +43,51 @@ module.exports = Class( 'ValidStateMonitor' )
/**
* Mark fields as updated and detect failures and fixes
*
* The field data DATA should be a key-value store with an array as the
* value for each key. If the data are not present, then it is assumed
* to have been left unchanged, and will not contribute to a
* fix. Otherwise, any field in FAILURES but not in DATA will count as
* a fix.
* The field data `data` should be a key-value store with an array as
* the value for each key. If the data are not present, then it is
* assumed to have been left unchanged, and will not contribute to a
* fix. Otherwise, any field in `failures` but not in `data` will count
* as a fix.
*
* FAILURES should follow the same structure as DATA. Indexes should
* omitted from the value if they are not failures.
* `failures` should follow the same structure as `data`. Indexes
* should omitted from the value if they are not failures.
*
* The return value is a promise that is accepted once all fix checks
* have been performed. The `failure` event is always emitted _before_
* the fix event.
*
* @param {Object} data key-value field data
* @param {Object} failures key-value field errors
*
* @return {ValidStateMonitor} self
* @return {Promise.<ValidStateMonitor>} self after fix checks
*/
'public update': function( data, failures )
'public update'( data, failures )
{
var fixed = this.detectFixes( data, this._failures, failures ),
count_new = this.mergeFailures( this._failures, failures );
if ( this.hasFailures() && ( count_new > 0 ) )
if ( !Class.isA( Store, data ) )
{
this.emit( 'failure', this._failures );
throw TypeError(
'Bucket diff data must be a Store; given ' + data
);
}
if ( fixed !== null )
{
this.emit( 'fix', fixed );
}
const fixed = this.detectFixes( data, this._failures, failures );
return this;
return fixed.then( fixes =>
{
const count_new = this.mergeFailures( this._failures, failures );
if ( this.hasFailures() && ( count_new > 0 ) )
{
this.emit( 'failure', this._failures );
}
if ( fixes !== null )
{
this.emit( 'fix', fixes );
}
return this.__inst;
} );
},
@ -80,7 +98,7 @@ module.exports = Class( 'ValidStateMonitor' )
* value is an array with each failure index and
* the value that caused the failure
*/
'public getFailures': function()
'public getFailures'()
{
return this._failures;
},
@ -108,22 +126,22 @@ module.exports = Class( 'ValidStateMonitor' )
*
* @return {boolean} true if errors exist, otherwise false
*/
'virtual public hasFailures': function()
'virtual public hasFailures'()
{
var past = this._failures;
let past = this._failures;
for ( var field in past )
return Object.keys( past ).some( field =>
{
for ( var i in past[ field ] )
for ( let i in past[ field ] )
{
return true;
}
// clean up as we go
delete past[ field ];
}
return false;
return false;
} );
},
@ -138,15 +156,15 @@ module.exports = Class( 'ValidStateMonitor' )
*
* @return {number} number of new failures
*/
'virtual protected mergeFailures': function( past, failures )
'virtual protected mergeFailures'( past, failures )
{
var count_new = 0;
let count_new = 0;
for ( var name in failures )
{
past[ name ] = past[ name ] || [];
var cur_past = past[ name ];
const cur_past = past[ name ];
// copy each failure into the past failures table
for ( var i in failures[ name ] )
@ -182,26 +200,28 @@ module.exports = Class( 'ValidStateMonitor' )
* @param {Object} data validated data
* @param {Object} failures new failures
*
* @return {!Object} fixed list of fixed indexes for each fixed field
* @return {Promise.<!Object>} fixed list of fixed indexes for each fixed field
*/
'virtual protected detectFixes': function( data, past, failures )
'virtual protected detectFixes'( data, past, failures )
{
var fixed = {},
has_fixed = false;
let fixed = {};
for ( var name in past )
{
var past_fail = past[ name ],
fail = failures[ name ];
return Promise.all(
Object.keys( past ).map( name =>
{
const past_fail = past[ name ];
const fail = failures[ name ];
has_fixed = has_fixed || this._checkFailureFix(
name, fail, past_fail, data, fixed
return this._checkFailureFix(
name, fail, past_fail, data, fixed
);
} )
)
.then( fixes =>
fixes.some( fix => fix === true )
? fixed
: null
);
}
return ( has_fixed )
? fixed
: null;
},
@ -214,53 +234,83 @@ module.exports = Class( 'ValidStateMonitor' )
* @param {Object} data validated data
* @param {Object} fixed destination for fixed field data
*
* @return {boolean} whether a field was fixed
* @return {Promise.<boolean>} whether a field was fixed
*/
'private _checkFailureFix': function( name, fail, past_fail, data, fixed )
'private _checkFailureFix'( name, fail, past_fail, data, fixed )
{
var has_fixed = false;
// we must check each individual index because it is possible that
// not every index was modified or fixed (we must loop through like
// this because this is treated as a hash table, not an array)
for ( var i in past_fail )
return Promise.all( past_fail.map( ( failure, fail_i ) =>
{
var causes = past_fail[ i ] && past_fail[ i ].getCauses();
const causes = failure && failure.getCauses() || [];
for ( var cause_i in causes )
{
var cause = causes[ cause_i ],
cause_name = cause.getName(),
cause_index = cause.getIndex(),
field = data[ cause_name ];
// if datum is unchanged, ignore it
if ( field === undefined )
{
continue;
}
// to be marked as fixed, there must both me no failure and
// there must be data for this index for the field in question
// (if the field wasn't touched, then of course there's no
// failure!)
if ( ( fail === undefined )
|| ( !( fail[ cause_index ] )
&& ( field[ cause_index ] !== undefined ) )
// to short-circuit checks, the promise will be _rejected_ once
// a match is found (see catch block)
return causes
.reduce(
this._checkCauseFix.bind( this, data, fail ),
Promise.resolve( true )
)
.then( () => false )
.catch( result =>
{
if ( result instanceof Error )
{
throw result;
}
// looks like it has been resolved
( fixed[ name ] = fixed[ name ] || [] )[ i ] =
field[ cause_index ]
( fixed[ name ] = fixed[ name ] || [] )[ fail_i ] = result;
has_fixed = true;
delete past_fail[ fail_i ];
return true;
} );
} ) ).then( fixes => fixes.some( fix => fix === true ) );
},
delete past_fail[ i ];
break;
}
}
}
return has_fixed;
}
/**
* Check past failure causes
*
* Each past failure in `fail` will be checked against the data in
* `diff` to determine whether it should be considered a possible
* fix. If so, the promise is fulfilled with the fix data. It is the
* responsibility of the caller to handle removing past failures.
*
* @param {Object} data validated data
* @param {Object} fail failure records
* @param {Promise} causep cause promise to chain onto
* @param {Field} cause field that caused the error
*
* @return {Promise} whether a field should be fixed
*/
'private _checkCauseFix'( data, fail, causep, cause )
{
const cause_name = cause.getName();
const cause_index = cause.getIndex();
return causep.then( () =>
new Promise( ( keepgoing, found ) =>
data.get( cause_name ).then( field =>
{
// to be marked as fixed, there must both me no failure
// and there must be data for this index for the field
// in question (if the field wasn't touched, then of
// course there's no failure!)
if ( ( ( fail === undefined ) || !( fail[ cause_index ] ) )
&& ( field[ cause_index ] !== undefined )
)
{
found( field[ cause_index ] );
return;
}
// keep searching
keepgoing( true );
} )
.catch( e => keepgoing( true ) )
)
);
},
} );

View File

@ -0,0 +1,235 @@
/**
* Test case for DiffStore
*
* Copyright (C) 2017 LoVullo Associates, Inc.
*
* This file is part of the Liza Data Collection Framework
*
* Liza is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
"use strict";
const store = require( '../../' ).store;
const chai = require( 'chai' );
const expect = chai.expect;
const Class = require( 'easejs' ).Class;
const Sut = store.DiffStore;
const StoreMissError = store.StoreMissError;
chai.use( require( 'chai-as-promised' ) );
describe( 'store.DiffStore', () =>
{
it( 'considers first add call to be diffable', () =>
{
return expect(
Sut()
.add( 'foo', 'bar' )
.then( sut => sut.get( 'foo' ) )
).to.eventually.equal( 'bar' );
} );
it( 'does not clear diff on add of new key', () =>
{
return expect(
Sut()
.add( 'foo', 'bar' )
.then( sut => sut.add( 'baz', 'quux' ) )
.then( sut => Promise.all( [
sut.get( 'foo' ),
sut.get( 'baz' ),
] ) )
).to.eventually.deep.equal( [ 'bar', 'quux'] );
} );
it( 'updates diff when key modified before clear', () =>
{
return expect(
Sut()
.add( 'foo', 'bar' )
.then( sut => sut.add( 'foo', 'baz' ) )
.then( sut => sut.get( 'foo' ) )
).to.eventually.equal( 'baz' );
} );
it( 'considers key unchanged in diff immediately after clear', () =>
{
debugger;
return expect(
Sut()
.add( 'foo', 'bar' )
.then( sut => sut.clear() )
.then( sut => sut.get( 'foo' ) )
).to.eventually.equal( undefined );
} );
// distinction between unknown key and no change (compare to above test)
it( 'distinguishes between unchanged and unknown keys', () =>
{
debugger;
return expect(
Sut()
.add( 'foo', 'bar' )
.then( sut => sut.clear() )
.then( sut => sut.get( 'unknown' ) )
).to.eventually.be.rejectedWith( StoreMissError );
} );
[
// scalar
{
orig: 'bar',
next: 'baz',
expected: 'baz',
},
{
orig: [ 'bar', 'baz' ],
next: 'baz',
expected: 'baz',
},
// returns new value if entire array changed
{
orig: [ 'bar', 'baz' ],
next: [ 'quux', 'quuux' ],
expected: [ 'quux', 'quuux' ],
},
// sets unchanged indexes to undefined
{
orig: [ 'bar', 'baz', 'quux' ],
next: [ 'bar', 'quux' ],
expected: [ undefined, 'quux', undefined ],
},
// next size > original
{
orig: [ 'bar', 'baz' ],
next: [ 'quux', 'baz', 'quuux' ],
expected: [ 'quux', undefined, 'quuux' ],
},
// 5 ^
// same
{
orig: [ 'bar', 'baz' ],
next: [ 'bar', 'baz' ],
expected: [ undefined, undefined ],
},
// no longer an array
{
orig: [ 'bar', [ 'baz', 'quux' ] ],
next: [ 'bar', 'quux' ],
expected: [ undefined, 'quux'],
},
// nested change
{
orig: [ 'bar', [ 'baz', 'quux' ] ],
next: [ 'bar', [ 'foo', 'quux' ] ],
expected: [ undefined, [ 'foo', undefined ] ],
},
// note that it always recurses to set undefined, even if all of
// them are undefined
{
orig: [ [ 'bar' ], [ [ 'baz', 'quux' ] ] ],
next: [ [ 'bar' ], [ [ 'baz', 'foo' ] ] ],
expected: [ [ undefined ], [ [ undefined, 'foo' ] ] ],
},
// there's not a distinction in the algorithm between numeric
// indexes and object keys
{
orig: { foo: 'bar' },
next: { foo: 'baz' },
expected: { foo: 'baz' },
},
// 10 ^
{
orig: { foo: 'bar' },
next: { foo: 'bar' },
expected: { foo: undefined },
},
{
orig: { foo: 'bar', baz: 'quux' },
next: { foo: 'foo', baz: 'quux' },
expected: { foo: 'foo', baz: undefined },
},
{
orig: { foo: 'bar', baz: 'quux' },
next: { baz: 'change' },
expected: { foo: undefined, baz: 'change' },
},
{
orig: { foo: 'bar', baz: [ 'a', 'b', ] },
next: { baz: [ 'a', 'c' ] },
expected: { foo: undefined, baz: [ undefined, 'c' ] },
},
{
orig: { foo: { bar: [ 'baz' ] } },
next: { foo: { bar: [ 'baz', 'quux' ] } },
expected: { foo: { bar: [ undefined, 'quux' ] } },
},
].forEach( ( { orig, next, expected }, i ) =>
{
it( `properly diffs (${i})`, () =>
{
return expect(
Sut()
.add( 'foo', orig )
.then( sut => sut.clear() )
.then( sut => sut.add( 'foo', next ) )
.then( sut => sut.get( 'foo' ) )
).to.eventually.deep.equal( expected );
} );
} );
describe( '#reduce', () =>
{
it( 'iterates though each diff', () =>
{
return expect(
Sut()
.add( 'foo', [ 'a', 'foo' ] )
.then( sut => sut.add( 'bar', 'b' ) )
.then( sut => sut.add( 'baz', 'c' ) )
.then( sut => sut.clear() )
.then( sut => sut.add( 'foo', [ 'a2', 'foo' ] ) )
.then( sut => sut.add( 'baz', 'c2' ) )
.then( sut => sut.reduce( ( accum, value, key ) =>
{
accum[ key ] = value;
return accum;
}, {} ) )
).to.eventually.deep.equal( {
foo: [ 'a2', undefined ],
baz: 'c2',
} );
} );
} );
} );

View File

@ -60,14 +60,13 @@ describe( 'store.MemoryStore', () =>
} );
it( 'provides the key and value of the added item', () =>
it( 'returns self with promise', () =>
{
const key = 'key';
const value = 'val';
const sut = Sut();
return expect(
Sut().add( key, value )
).to.eventually.deep.equal( { key: key, value: value } );
sut.add( 'foo', 'bar' )
).to.eventually.equal( sut );
} );
} );
@ -104,6 +103,16 @@ describe( 'store.MemoryStore', () =>
);
} );
} );
it( 'returns self with promise', () =>
{
const sut = Sut();
return expect(
sut.clear()
).to.eventually.equal( sut );
} );
} );

View File

@ -0,0 +1,164 @@
/**
* Test case for PatternProxy trait
*
* Copyright (C) 2017 LoVullo Associates, Inc.
*
* This file is part of the Liza Data Collection Framework
*
* Liza is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
"use strict";
const store = require( '../../' ).store;
const chai = require( 'chai' );
const expect = chai.expect;
const Store = store.MemoryStore;
const Sut = store.PatternProxy;
const sinon = require( 'sinon' );
chai.use( require( 'chai-as-promised' ) );
describe( 'store.PatternProxy', () =>
{
describe( 'fails on invalid pattern map', () =>
{
[
// not a pattern
[ {}, Store() ],
// not a Store
[ /^./, {} ],
// missing Store
[ /^./ ],
// missing all
[],
].forEach( ( patterns, i ) =>
it( `(${i})`, () =>
{
expect( () => Store.use( Sut( [ patterns ] ) )() )
.to.throw( TypeError );
} )
);
} );
it( 'proxies #add by pattern', () =>
{
const store1 = Store();
const store2 = Store();
// second strips
const patterns = [
[ /^foo:/, store1 ],
[ /^bar:(.*)$/, store2 ],
];
return Promise.all( [
expect(
Store.use( Sut( patterns ) )()
.add( 'foo:moo', 'moo' )
.then( store => store1.get( 'foo:moo' ) )
).to.eventually.equal( 'moo' ),
expect(
Store.use( Sut( patterns ) )()
.add( 'bar:quux', 'quuxval' )
.then( store => store2.get( 'quux' ) )
).to.eventually.equal( 'quuxval' ),
] );
} );
it( 'proxies #get by pattern', () =>
{
const store1 = Store();
const store2 = Store();
// second strips
const patterns = [
[ /^foo:/, store1 ],
[ /^bar:(.*)$/, store2 ],
];
const sut = Store.use( Sut( patterns ) )();
return Promise.all( [
expect(
store1.add( 'foo:bar', 'moo' )
.then( () => sut.get( 'foo:bar' ) )
).to.eventually.equal( 'moo' ),
expect(
store2.add( 'quux', 'quuxval' )
.then( () => sut.get( 'bar:quux' ) )
).to.eventually.equal( 'quuxval' ),
] );
} );
// if no matches, error (like traditional functional pattern matching)
it( 'fails on #add or #get when match fails', () =>
{
const patterns = [ [ /moo/, Store() ] ];
return Promise.all( [
expect(
Store.use( Sut( patterns ) )()
.add( 'uh', 'no' )
).to.eventually.be.rejectedWith( store.StorePatternError ),
expect(
Store.use( Sut( patterns ) )()
.get( 'sorry', 'sir' )
).to.eventually.be.rejectedWith( store.StorePatternError ),
] );
} );
describe( '#clear', () =>
{
it( 'invokes #clear on all contained stores', () =>
{
const store1 = Store();
const store2 = Store();
const mocks = [ store1, store2 ].map( store =>
{
const mock = sinon.mock( store );
mock.expects( 'clear' ).once();
return mock;
} );
const patterns = [
[ /^a/, store1 ],
[ /^b/, store2 ],
];
const sut = Store.use( Sut( patterns ) )();
return sut.clear()
.then( given_sut => {
// TODO: uncomment once `this.__inst' in Traits is fixed
// in GNU ease.js
// expect( given_sut ).to.equal( sut );
mocks.forEach( mock => mock.verify() );
} );
} );
} );
} );

View File

@ -0,0 +1,75 @@
/**
* Tests instantiation of portions of the client system
*
* Copyright (C) 2017 LoVullo Associates, Inc.
*
* This file is part of liza.
*
* liza is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* This is a functional test of the client system at large; these are _not_
* unit tests.
*/
"use strict";
const root = require( '../../' );
const sut = root.system.client;
const expect = require( 'chai' ).expect;
const Class = require( 'easejs' ).Class;
const { Store, DiffStore } = root.store;
describe( 'client', () =>
{
describe( 'data.diffStore', () =>
{
it( 'produces proper Stores', () =>
{
const { store, cstore, bstore } = sut.data.diffStore();
// we don't care what type of store these two are
expect( Class.isA( Store, store ) )
.to.be.true;
expect( Class.isA( Store, bstore ) )
.to.be.true;
// but it's essential that this is a DiffStore
expect( Class.isA( DiffStore, cstore ) )
.to.be.true;
} );
it( 'proxies c:* to cstore, others to bstore', () =>
{
const { store, cstore, bstore } = sut.data.diffStore();
const cname = 'c:foo'; // Master Shifu
const cval = 'panda';
const bname = 'henry';
const bval = 'liza';
return expect(
store.add( cname, cval )
.then( () => store.add( bname, bval ) )
.then( () => Promise.all( [
cstore.get( cname.replace( /^c:/, '' ) ),
bstore.get( bname )
] ) )
).to.eventually.deep.equal( [ cval, bval ] );
} );
} );
} );

View File

@ -0,0 +1,250 @@
/**
* Test data validator
*
* Copyright (C) 2017 LoVullo Associates, Inc.
*
* This file is part of liza.
*
* liza is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
"use strict";
const root = require( '../../' );
const validate = root.validate;
const Sut = validate.DataValidator;
const MemoryStore = root.store.MemoryStore;
const chai = require( 'chai' );
const expect = chai.expect;
const sinon = require( 'sinon' );
const BucketDataValidator = validate.BucketDataValidator,
ValidStateMonitor = validate.ValidStateMonitor;
chai.use( require( 'chai-as-promised' ) );
describe( 'DataValidator', () =>
{
describe( '#validate', () =>
{
it( 'validates against bucket validator', () =>
{
const bvalidator = createMockBucketValidator(
function( data, err, inplace )
{
expect( data ).to.equal( diff );
expect( inplace ).to.be.true;
// since we're mocking #validate, the callback will not
// be called; we'll have to do so ourselves (eventually
// this will be a promise)
err( 'foo', expected_value, 1 );
}
);
const vmonitor = ValidStateMonitor();
const dep_factory = createMockDependencyFactory();
const getStore = createStubStore();
const { bstore } = getStore();
const mock_bstore = sinon.mock( bstore );
const mock_vmonitor = sinon.mock( vmonitor );
const mock_dep_factory = sinon.mock( dep_factory );
const diff = { foo: [ 'a', 'b', 'c' ] };
const expected_failure = {};
const expected_value = 'errmsg';
const expected_failures = {
foo: { 1: expected_failure }
};
// call to actual validator
mock_vmonitor.expects( 'update' )
.once()
.withExactArgs( getStore().store, expected_failures )
.returns( Promise.resolve( undefined ) );
mock_dep_factory.expects( 'createFieldFailure' )
.once()
.withExactArgs( 'foo', 1, expected_value )
.returns( expected_failure );
// clears previous diffs
mock_bstore.expects( 'clear' )
.once()
.returns( Promise.resolve( bstore ) );
return Sut( bvalidator, vmonitor, dep_factory, getStore )
.validate( diff )
.then( () =>
{
mock_vmonitor.verify();
mock_dep_factory.verify();
// cleared on call to err in above mock validator
return expect( getStore().bstore.get( 'foo' ) )
.to.eventually.deep.equal( [ 'a', undefined, 'c' ] );
} );
} );
it( 'merges classification changes with diff', () =>
{
// SUT will only care about the indexes
const classes = {
first: { indexes: [], is: false },
second: { indexes: [ 0, 1 ], is: true },
};
const bvalidator = createMockBucketValidator();
const vmonitor = ValidStateMonitor();
const dep_factory = createMockDependencyFactory();
const getStore = createStubStore();
const { cstore } = getStore();
const mock_cstore = sinon.mock( cstore );
// clears previous diffs
mock_cstore.expects( 'clear' )
.once()
.returns( Promise.resolve( cstore ) );
return Sut( bvalidator, vmonitor, dep_factory, getStore )
.validate( {}, classes )
.then( () =>
{
// clear should have been called
mock_cstore.verify();
// keep in mind that we are using MemoryStore for this
// test (whereas a real implementation would probably be
// using a DiffStore)
return Promise.all(
Object.keys( classes ).map( key =>
expect( cstore.get( key ) )
.to.eventually.deep.equal( classes[ key ].indexes )
)
);
} );
} );
it( 'considers failures from external validator', () =>
{
const expected_failure = {};
const bvalidator = createMockBucketValidator(
function( data, err, _ )
{
// see `failures` below
err( 'foo', 'moo', 2 );
}
);
const vmonitor = ValidStateMonitor();
const dep_factory = createMockDependencyFactory();
const getStore = createStubStore();
const diff = { foo: [ 'a', 'b', 'c' ] };
const expected_failures = {
foo: {
0: expected_failure,
2: expected_failure,
},
};
const validatef = ( given_diff, given_failures ) =>
{
expect( given_diff ).to.equal( diff );
expect( given_failures.foo[ 2 ] )
.to.equal( expected_failure );
given_failures.foo[ 0 ] = expected_failure;
};
// TODO: this is an implementation detail left over from the
// good 'ol days; remove it
sinon.mock( vmonitor )
.expects( 'update' )
.once()
.withExactArgs( getStore().store, expected_failures )
.returns( Promise.resolve( undefined ) );
sinon.mock( dep_factory )
.expects( 'createFieldFailure' )
.returns( expected_failure );
return Sut( bvalidator, vmonitor, dep_factory, getStore )
.validate( diff, {}, validatef );
} );
it( 'rejects if field monitor update rejects', () =>
{
const bvalidator = createMockBucketValidator();
const vmonitor = ValidStateMonitor();
const dep_factory = createMockDependencyFactory();
const expected_e = Error();
sinon.mock( vmonitor )
.expects( 'update' )
.once()
.returns( Promise.reject( expected_e ) );
return expect(
Sut( bvalidator, vmonitor, dep_factory, createStubStore() )
.validate( {} )
).to.eventually.be.rejectedWith( expected_e );
} );
} );
} );
function createMockBucketValidator( validatef )
{
validatef = validatef || ( ( x, y, z ) => {} );
return BucketDataValidator.extend(
{
'override public validate': validatef,
} )();
}
// This isn't yet moved into liza (at least at the time of writing this)
function createMockDependencyFactory( map )
{
// alternative to mocking since the ClientDependencyFactory is not going
// to be used in the future
return {
createFieldFailure: () => {},
};
}
function createStubStore()
{
const stores = {
store: MemoryStore(),
bstore: MemoryStore(),
cstore: MemoryStore(),
};
return () => stores;
}

View File

@ -1,7 +1,7 @@
/**
* Test field validity monitor
*
* Copyright (C) 2016 LoVullo Associates, Inc.
* Copyright (C) 2016, 2017 LoVullo Associates, Inc.
*
* This file is part of liza.
*
@ -19,14 +19,20 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
var root = require( '../../' ),
Sut = root.validate.ValidStateMonitor,
expect = require( 'chai' ).expect,
Failure = root.validate.Failure,
Field = root.field.BucketField;
"use strict";
const root = require( '../../' );
const Sut = root.validate.ValidStateMonitor;
const chai = require( 'chai' );
const expect = chai.expect;
const Failure = root.validate.Failure;
const Field = root.field.BucketField;
const MemoryStore = root.store.MemoryStore;
chai.use( require( 'chai-as-promised' ) );
var nocall = function( type )
const nocall = function( type )
{
return function()
{
@ -34,7 +40,7 @@ var nocall = function( type )
};
};
var mkfail = function( name, arr )
const mkfail = function( name, arr )
{
return arr.map( function( value, i )
{
@ -51,19 +57,25 @@ describe( 'ValidStateMonitor', function()
{
it( 'does nothing with no data or failures', function()
{
Sut()
.on( 'failure', nocall( 'failure' ) )
.on( 'fix', nocall( 'fix' ) )
.update( {}, {} );
return mkstore( {} ).then( empty =>
{
return Sut()
.on( 'failure', nocall( 'failure' ) )
.on( 'fix', nocall( 'fix' ) )
.update( empty, {} );
} );
} );
it( 'does nothing with data but no failures', function()
{
Sut()
.on( 'failure', nocall( 'failure' ) )
.on( 'fix', nocall( 'fix' ) )
.update( { foo: mkfail( 'foo', [ 'bar' ] ) }, {} );
return mkstore( { foo: mkfail( 'foo', [ 'bar' ] ) } ).then( store =>
{
return Sut()
.on( 'failure', nocall( 'failure' ) )
.on( 'fix', nocall( 'fix' ) )
.update( store, {} );
} );
} );
@ -71,85 +83,118 @@ describe( 'ValidStateMonitor', function()
// need the data
describe( 'given failures', function()
{
it( 'marks failures even when given no data', function( done )
it( 'marks failures even when given no data', function()
{
var fail = mkfail( 'foo', [ 'bar', 'baz' ] );
Sut()
.on( 'failure', function( failures )
return mkstore( {} ).then( empty =>
{
return new Promise( accept =>
{
expect( failures )
.to.deep.equal( { foo: [ fail[ 0 ], fail[ 1 ] ] } );
done();
} )
.on( 'fix', nocall( 'fix' ) )
.update( {}, { foo: fail } );
return Sut()
.on( 'failure', function( failures )
{
expect( failures )
.to.deep.equal(
{ foo: [ fail[ 0 ], fail[ 1 ] ] }
);
accept();
} )
.on( 'fix', nocall( 'fix' ) )
.update( empty, { foo: fail } );
} );
} );
} );
it( 'marks failures with index gaps', function( done )
it( 'marks failures with index gaps', function()
{
var fail = mkfail( 'foo', [ undefined, 'baz' ] );
Sut()
.on( 'failure', function( failures )
return mkstore( {} ).then( empty =>
{
return new Promise( accept =>
{
expect( failures )
.to.deep.equal( { foo: [ undefined, fail[ 1 ] ] } );
done();
} )
.on( 'fix', nocall( 'fix' ) )
.update( {}, { foo: fail } );
Sut()
.on( 'failure', function( failures )
{
expect( failures )
.to.deep.equal(
{ foo: [ undefined, fail[ 1 ] ] }
);
accept();
} )
.on( 'fix', nocall( 'fix' ) )
.update( empty, { foo: fail } );
} );
} );
} );
it( 'retains past failures when setting new', function( done )
it( 'retains past failures when setting new', function()
{
var sut = Sut(),
fail = mkfail( 'foo', [ 'bar', 'baz' ] );
var test_first = function( failures )
return new Promise( ( accept, reject ) =>
{
expect( failures )
.to.deep.equal( { foo: [ undefined, fail[ 1 ] ] } );
var test_first = function( failures )
{
expect( failures )
.to.deep.equal( { foo: [ undefined, fail[ 1 ] ] } );
sut.once( 'failure', test_second );
};
sut.once( 'failure', test_second );
};
var test_second = function( failures )
{
expect( failures )
.to.deep.equal( { foo: [ fail[ 0 ], fail[ 1 ] ] } );
var test_second = function( failures )
{
expect( failures )
.to.deep.equal( { foo: [ fail[ 0 ], fail[ 1 ] ] } );
done();
};
accept();
};
sut
.once( 'failure', test_first )
.on( 'fix', nocall( 'fix' ) )
.update( {}, { foo: [ undefined, fail[ 1 ] ] } )
.update( {}, { foo: [ fail[ 0 ] ] } );
mkstore( {} ).then( empty =>
{
return sut
.once( 'failure', test_first )
.on( 'fix', nocall( 'fix' ) )
.update( empty, { foo: [ undefined, fail[ 1 ] ] } )
.then( () =>
{
return sut.update( empty, { foo: [ fail[ 0 ] ] } );
} );
} ).catch( e => reject( e ) );
} );
} );
// deprecated
it( 'accepts failures as string for BC', function( done )
it( 'accepts failures as string for BC', function()
{
var fail = [ 'foo', 'bar' ];
Sut()
.on( 'failure', function( failures )
return new Promise( ( accept, reject ) =>
{
return mkstore( {} ).then( empty =>
{
expect( failures )
.to.deep.equal( { foo: fail } );
done();
return Sut()
.on( 'failure', function( failures )
{
expect( failures )
.to.deep.equal( { foo: fail } );
accept();
} )
.on( 'fix', nocall( 'fix' ) )
.update( empty, { foo: fail } );
} )
.on( 'fix', nocall( 'fix' ) )
.update( {}, { foo: fail } );
.catch( e => reject( e ) );
} );
} );
it( 'does not discard existing failures', function( done )
it( 'does not discard existing failures', function()
{
var sut = Sut();
@ -170,67 +215,125 @@ describe( 'ValidStateMonitor', function()
// the second failure has fewer causes than the first;
// we need to make sure that it doesn't overwrite,
// leading to fewer caues
sut
.update( {}, { foo: [ fail1 ] } )
.update( {}, { foo: [ fail2 ] } );
// if cause1 wasn't removed, then this will fix it
sut
.once( 'fix', function( fixed )
return new Promise( ( accept, reject ) =>
{
return mkstore( {} ).then( empty =>
{
expect( fixed )
.to.deep.equal( { foo: [ 'causefix1' ] } );
return sut
.update( empty, { foo: [ fail1 ] } )
.then( () =>
{
return sut.update( empty, { foo: [ fail2 ] } );
} )
.then( () =>
{
const update = {
foo: [ 'moo' ],
cause1: [ 'causefix1' ]
};
// and then we should have no failures
expect( sut.hasFailures() ).to.be.false;
return mkstore( update ).then( store =>
{
return sut
.once( 'fix', function( fixed )
{
expect( fixed ).to.deep.equal(
{ foo: [ 'causefix1' ] }
);
done();
// and then we should have no failures
expect( sut.hasFailures() )
.to.be.false;
accept( true );
} )
.update( store, {} );
} );
} );
} )
.update(
{ foo: [ 'moo' ], cause1: [ 'causefix1' ] },
{}
);
.catch( e => reject( e ) );
} );
} );
} );
describe( 'given data with absence of failure', function()
{
it( 'removes non-failures if field is present', function( done )
it( 'removes non-failures if field is present', function()
{
var data = { foo: [ 'bardata', 'baz' ] },
fail = mkfail( 'foo', [ 'bar', 'baz' ] );
const fail = mkfail( 'foo', [ 'bar', 'baz' ] );
const sut = Sut();
Sut()
.on( 'fix', function( fixed )
return new Promise( ( accept, reject ) =>
{
return mkstore( { foo: [ 'bardata', 'baz' ] } ).then( data =>
{
expect( fixed )
.to.deep.equal( { foo: [ 'bardata' ] } );
done();
return sut
.on( 'fix', function( fixed )
{
expect( fixed )
.to.deep.equal( { foo: [ 'bardata' ] } );
accept();
} )
.update( data, { foo: [ fail[ 0 ], fail[ 1 ] ] } )
.then( () =>
{
return sut.update( data, {
foo: [ undefined, fail[ 1 ] ]
} );
} );
} )
.update( data, { foo: [ fail[ 0 ], fail[ 1 ] ] } )
.update( data, { foo: [ undefined, fail[ 1 ] ] } );
.catch( e => reject( e ) );
} );
} );
it( 'keeps failures if field is missing', function( done )
it( 'keeps failures if field is missing', function()
{
var data = { bar: [ 'baz', 'quux' ] },
fail_foo = mkfail( 'foo', [ 'bar', 'baz' ] ),
fail_bar = mkfail( 'bar', [ 'moo', 'cow' ] );
const fail_foo = mkfail( 'foo', [ 'bar', 'baz' ] );
const fail_bar = mkfail( 'bar', [ 'moo', 'cow' ] );
Sut()
.on( 'fix', function( fixed )
return new Promise( ( accept, reject ) =>
{
return mkstore( { bar: [ 'baz', 'quux' ] } ).then( data =>
{
expect( fixed )
.to.deep.equal( { bar: [ 'baz', 'quux' ] } );
done();
return Sut()
.on( 'fix', function( fixed )
{
expect( fixed )
.to.deep.equal( { bar: [ 'baz', 'quux' ] } );
accept();
} )
.update( data, {
foo: fail_foo, // does not exist in data
bar: fail_bar,
} )
.then( sut =>
{
return sut.update( data, {} );
} );
} )
.update( data, {
foo: fail_foo, // does not exist in data
bar: fail_bar,
} )
.update( data, {} );
.catch( e => reject( e ) );
} );
} );
// if a diff is present for a previously failed key (e.g. foo),
// but contains no changes (e.g. [ undefined ]), and doesn't
// include the failure on the second call, then it should not be
// considered to be a fix (this is a bugfix)
it( 'keeps past failures on key if failure does not reoccur', () =>
{
const fail_past = mkfail( 'foo', [ 'bar', 'baz' ] );
return mkstore( { foo: [ undefined, undefined ] } )
.then( data =>
Sut()
.update( data, { foo: fail_past } )
// no failure or fix (foo has no updates)
.then( sut => sut.update( data, {} ) )
.then( sut => expect( sut.hasFailures() ).to.be.true )
);
} );
@ -238,136 +341,199 @@ describe( 'ValidStateMonitor', function()
{
var called = 0;
Sut()
.on( 'failure', function()
{
called++;
} )
.update( {}, { foo: mkfail( 'foo', [ 'bar' ] ) } )
.update( {}, {} ); // do not trigger failure event
expect( called ).to.equal( 1 );
return mkstore( {} ).then( empty =>
{
return Sut()
.on( 'failure', function()
{
called++;
} )
.update( empty, { foo: mkfail( 'foo', [ 'bar' ] ) } )
.then( sut =>
{
return sut.update( empty, {} ); // do not trigger failure event
} )
.then( sut =>
{
expect( called ).to.equal( 1 );
} );
} );
} );
describe( 'given a cause', function()
{
it( 'considers when recognizing fix', function( done )
it( 'considers when recognizing fix', function()
{
// same index
var data = { cause: [ 'bar' ] },
field = Field( 'foo', 0 ),
cause = Field( 'cause', 0 ),
fail = Failure( field, 'reason', [ cause ] );
const field = Field( 'foo', 0 );
const cause = Field( 'cause', 0 );
const fail = Failure( field, 'reason', [ cause ] );
Sut()
.on( 'fix', function( fixed )
return new Promise( ( accept, reject ) =>
{
return mkstore( { cause: [ 'bar' ] } ).then( data =>
{
expect( fixed )
.to.deep.equal( { foo: [ 'bar' ] } );
return Sut()
.on( 'fix', function( fixed )
{
expect( fixed )
.to.deep.equal( { foo: [ 'bar' ] } );
done();
accept();
} )
.update( data, { foo: [ fail ] } )
.then( sut =>
{
return sut.update( data, {} );
} );
} )
.update( data, { foo: [ fail ] } )
.update( data, {} );
.catch( e => reject( e ) );
} );
} );
it( 'considers different cause index', function( done )
it( 'considers different cause index', function()
{
// different index
var data = { cause: [ undefined, 'bar' ] },
field = Field( 'foo', 0 ),
cause = Field( 'cause', 1 ),
fail = Failure( field, 'reason', [ cause ] );
const update_data = { cause: [ undefined, 'bar' ] };
const field = Field( 'foo', 0 );
const cause = Field( 'cause', 1 );
const fail = Failure( field, 'reason', [ cause ] );
Sut()
.on( 'fix', function( fixed )
return new Promise( ( accept, reject ) =>
{
return mkstore( update_data ).then( data =>
{
expect( fixed )
.to.deep.equal( { foo: [ 'bar' ] } );
return Sut()
.on( 'fix', function( fixed )
{
expect( fixed )
.to.deep.equal( { foo: [ 'bar' ] } );
done();
accept();
} )
.update( data, { foo: [ fail ] } )
.then( sut =>
{
return sut.update( data, {} );
} );
} )
.update( data, { foo: [ fail ] } )
.update( data, {} );
.catch( e => reject( e ) );
} );
} );
it( 'considers any number of causes', function( done )
it( 'considers any number of causes', function()
{
// different index
var data = { cause_fix: [ undefined, 'bar' ] },
field = Field( 'foo', 0 ),
cause1 = Field( 'cause_no', 1 ),
cause2 = Field( 'cause_fix', 1 ),
fail = Failure(
field,
'reason',
[ cause1, cause2 ]
);
const update_data = { cause_fix: [ undefined, 'bar' ] };
const field = Field( 'foo', 0 );
const cause1 = Field( 'cause_no', 1 );
const cause2 = Field( 'cause_fix', 1 );
Sut()
.on( 'fix', function( fixed )
const fail = Failure(
field,
'reason',
[ cause1, cause2 ]
);
return new Promise( ( accept, reject ) =>
{
return mkstore( update_data ).then( data =>
{
expect( fixed )
.to.deep.equal( { foo: [ 'bar' ] } );
return Sut()
.on( 'fix', function( fixed )
{
expect( fixed )
.to.deep.equal( { foo: [ 'bar' ] } );
done();
accept();
} )
.update( data, { foo: [ fail ] } )
.then( sut =>
{
return sut.update( data, {} );
} );
} )
.update( data, { foo: [ fail ] } )
.update( data, {} );
.catch( e => reject( e ) );
} );
} );
it( 'recognizes non-fix', function()
{
// no cause data
var data = { noncause: [ undefined, 'bar' ] },
field = Field( 'foo', 0 ),
cause1 = Field( 'cause', 1 ),
cause2 = Field( 'cause', 2 ),
fail = Failure(
field,
'reason',
[ cause1, cause2 ]
);
const update_data = mkstore( { noncause: [ undefined, 'bar' ] } );
const field = Field( 'foo', 0 );
const cause1 = Field( 'cause', 1 );
const cause2 = Field( 'cause', 2 );
Sut()
.on( 'fix', nocall )
.update( data, { foo: [ fail ] } )
.update( data, {} );
const fail = Failure(
field,
'reason',
[ cause1, cause2 ]
);
return mkstore( update_data ).then( data =>
{
return Sut()
.on( 'fix', nocall )
.update( data, { foo: [ fail ] } )
.then( sut =>
{
return sut.update( data, {} );
} );
} );
} );
} );
} );
it( 'can emit both failure and fix', function( done )
it( 'can emit both failure and fix', function()
{
var data = { bar: [ 'baz', 'quux' ] },
fail_foo = mkfail( 'foo', [ 'bar' ] );
var fail_foo = mkfail( 'foo', [ 'bar' ] );
Sut()
.update( data, {
bar: mkfail( 'bar', [ 'moo', 'cow' ] ) // fail
} )
.on( 'failure', function( failed )
{
expect( failed )
.to.deep.equal( {
foo: fail_foo,
return mkstore( { bar: [ 'baz', 'quux' ] } ).then( data =>
{
return Sut()
.update( data, {
bar: mkfail( 'bar', [ 'moo', 'cow' ] ) // fail
} )
.then( sut =>
{
return new Promise( ( accept, reject ) =>
{
sut.on( 'failure', function( failed )
{
expect( failed )
.to.deep.equal( {
foo: fail_foo,
} );
} )
.on( 'fix', function( fixed )
{
expect( fixed )
.to.deep.equal(
{ bar: [ 'baz', 'quux' ] }
);
// note that the documentation for #update
// states that failure will always be
// emitted before fix
accept( true );
} )
.update( data, {
foo: fail_foo, // fail
// fixes bar
} )
.catch( e =>
{
reject( e );
} );
} );
} )
.on( 'fix', function( fixed )
{
expect( fixed )
.to.deep.equal( { bar: [ 'baz', 'quux' ] } );
done();
} )
.update( data, {
foo: fail_foo, // fail
// fixes bar
} );
} );
} );
} );
} );
@ -387,11 +553,17 @@ describe( 'ValidStateMonitor', function()
{
var fail = mkfail( 'foo', [ 'fail' ] );
expect(
Sut()
.update( {}, { foo: fail } )
.getFailures()
).to.deep.equal( { foo: fail } );
return mkstore( {} ).then( empty =>
{
return expect(
Sut()
.update( empty, { foo: fail } )
.then( sut =>
{
return sut.getFailures()
} )
).to.eventually.deep.equal( { foo: fail } );
} );
} );
} );
@ -407,11 +579,27 @@ describe( 'ValidStateMonitor', function()
it( 'is true when failures exist', function()
{
expect(
Sut()
.update( {}, { foo: mkfail( 'foo', [ 'bar' ] ) } )
.hasFailures()
).to.be.true;
return mkstore( {} ).then( empty =>
{
return expect(
Sut()
.update( empty, { foo: mkfail( 'foo', [ 'bar' ] ) } )
.then( sut =>
{
return sut.hasFailures();
} )
).to.eventually.be.true;
} );
} );
} );
} );
function mkstore( data )
{
let store = MemoryStore();
return Promise.all(
Object.keys( data ).map( key => store.add( key, data[ key ] ) )
).then( () => store );
}