1
0
Fork 0

Server configuration initial implementation

This doesn't fully replace the use of environment variables, but it is
a start.  I ran out of development time.
master
Mike Gerwitz 2017-08-30 11:12:16 -04:00
commit 8d4439f16d
26 changed files with 1447 additions and 161 deletions

1
.gitignore vendored
View File

@ -7,6 +7,7 @@ configure
Makefile.in
# generated by configure
bin/server
src/version.js
/config.*
Makefile

32
bin/server.in 100644
View File

@ -0,0 +1,32 @@
#!/bin/sh
# Start Liza Server using Node.js executable determined at configure-time
#
# Copyright (C) 2017 R-T Specialty, LLC.
#
# This file is part of liza.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# In addition to the configure-time NODE_FLAGS, the NODE_FLAGS environment
# variable can be used to add additional arguments to this script.
# WARNING: NODE_FLAGS arguments provided via environment varialbes are _not_
# escaped, so be mindful of word expansion!
#
# @AUTOGENERATED@
##
cd "$( dirname $( readlink -f "$0" ) )"
"@NODE@" @NODE_FLAGS@ $NODE_FLAGS server.js "$@"

66
bin/server.js 100644
View File

@ -0,0 +1,66 @@
/**
* Start the Liza Server
*
* Copyright (C) 2017 R-T Specialty, LLC.
*
* This file is part of the Liza Data Collection Framework.
*
* liza is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
'use strict';
const fs = require( 'fs' );
const {
conf: {
ConfLoader,
ConfStore,
},
server,
version,
} = require( '../' );
// kluge for now
const conf_path = (
( process.argv[ 2 ] === '-c' )
? process.argv[ 3 ]
: ''
) || __dirname + '/../conf/vanilla-server.json';
ConfLoader( fs, ConfStore )
.fromFile( conf_path )
.then( conf => Promise.all( [
conf.get( 'name' ),
conf.get( 'daemon' ),
Promise.resolve( conf ),
] ) )
.then( ([ name, daemon, conf ]) =>
{
greet( name, daemon );
return server.daemon[ daemon ]( conf ).start();
} )
.catch( e => {
console.error( e.stack );
process.exit( 1 );
} );
function greet( name, daemon )
{
console.log( `${name} (liza-${version})`);
console.log( `Server configuration: ${conf_path}` );
console.log( `Starting with ${daemon}, pid ${process.pid}` );
}

View File

@ -0,0 +1,55 @@
{
"name": "Liza Server",
"daemon": "DevDaemon",
"http": {
"port": 8822
},
"log": {
"priority": 10,
"access": {
"path": "/var/log/node/access.log"
},
"debug": {
"path": "/var/log/node/debug.log"
}
},
"user": {
"session": {
"handler": {
"type": "php",
"cookie": "PHPSESSID"
},
"store": {
"type": "memcache",
"host": "localhost",
"port": 11211
}
}
},
"documentStore": {
"store": "mongodb",
"host": "localhost",
"port": 27017
},
"services": {
"rating": {
"process": {
"port": 5859,
"argv": "inherit"
},
"remote": {
"host": "localhost",
"domain": ""
}
},
"c1export": {
"host": "localhost",
"domain": ""
}
}
}

View File

@ -32,7 +32,8 @@ AC_SUBST(MINOR, m4_argn(2, ver_split))
AC_SUBST(REV, m4_argn(3, ver_split))
AC_SUBST(SUFFIX, m4_argn(4, ver_split))
AC_ARG_VAR([NODE], [The node.js interpreter])
AC_ARG_VAR([NODE], [The Node.js interpreter])
AC_ARG_VAR([NODE_FLAGS], [Arguments to Node.js for Liza Server (bin/server)])
AC_CHECK_PROGS(NODE, [node nodejs])
test -n "$NODE" || AC_MSG_ERROR([missing Node.js])
@ -43,12 +44,15 @@ AC_CHECK_PROGS(TWOPI, [twopi])
AM_CONDITIONAL(HAS_TWOPI, [test "$TWOPI"])
# only needed for older versions of Node
AC_MSG_CHECKING([node --harmony_destructuring])
AS_IF([node --harmony_destructuring >/dev/null 2>/dev/null],
AC_MSG_CHECKING([$NODE --harmony_destructuring])
AS_IF(["$NODE" --harmony_destructuring >/dev/null 2>/dev/null],
[AC_MSG_RESULT(available)
AC_SUBST([NODE_DESTRUCTURE], [--harmony_destructuring])],
[AC_MSG_RESULT(no)])
# include above check(s) in NODE_FLAGS
NODE_FLAGS="$NODE_FLAGS $NODE_DESTRUCTURE"
set_devnotes='@set DEVNOTES'
AC_ARG_ENABLE(
[devnotes],
@ -68,8 +72,13 @@ AC_ARG_WITH(
AC_SUBST([SET_SRCURI], [$set_srcuri])
AC_SUBST([AUTOGENERATED],
["THIS FILE IS AUTOGENERATED! DO NOT MODIFY! See *.in."])
# generate files from their *.in counterparts
AC_CONFIG_FILES([Makefile package.json
doc/Makefile doc/config.texi
src/version.js])
src/version.js
bin/server],
[chmod +x bin/server])
AC_OUTPUT

View File

@ -39,31 +39,63 @@ An example script to start the server is shown in @ref{f:server-start}.
use @srcrefjs{server/daemon,DevDaemon},
which uses a dummy encryption service.}
To start the server,
invoke @srcref{bin/server}.
You may also invoke @srcref{bin/server.js} directly using Node.js,
but the use of @srcref{bin/server} is recommended,
as it uses the Node.js executable determined at configure-time,
along with any command-line options required for Liza@tie{}Server
to function correctly.
Additional options can be provided to Node.js using the
@var{NODE_FLAGS} environment variable,
which will be @emph{appended} to the configure-time flags.
This environment variable is @emph{not} escaped or quoted,
so be mindful of word expansion.
@float Figure, f:server-start
@example
const @{ Daemon @} = require( 'liza' ).server.daemon;
const port = 8080;
const log_priority = 10;
$ bin/server -c path/to/config.json
Daemon( port, log_priority ).start();
# providing additional options to node
$ NODE_FLAGS=--debug bin/server -c path/to/config.json
@end example
@caption{Starting the server daemon}
@caption{Starting the Liza Server}
@end float
@cindex HTTP Server
The HTTP server is managed by
@srcrefjs{server/daemon,http_server}.
@menu
* Requests:: Handling HTTP requests.
* Posting Data:: Handling step saves and other posts.
* Server-Side Data API Calls:: Accessing external resources on the server.
* Encryption Service:: Managing sensitive data.
* Configuration:Server Configuration. Server configuration.
* Requests:: Handling HTTP requests.
* Posting Data:: Handling step saves and other posts.
* Server-Side Data API Calls:: Accessing external resources on the server.
* Encryption Service:: Managing sensitive data.
@end menu
@node Server Configuration
@section Configuration
@helpwanted{}
@cindex Configuration
Liza is migrating to actual configuration file in place of environment
variables.
If no configuration is explicitly specified,
it uses @srcrefraw{conf/vanilla-server.json}.
Configuration loading is handled by @srcrefjs{conf,ConfLoader}.
The configuration store @srcrefjs{conf,ConfStore} is asyncrhonous,
so loading configuration from any external system is supported.@footnote{
Provided that you write the code to load from that system,
that is.}
@node Requests
@section HTTP Requests
@helpwanted{}

View File

@ -0,0 +1,123 @@
/**
* Configuration loader
*
* Copyright (C) 2017 R-T Specialty, LLC.
*
* This file is part of the Liza Data Collection Framework.
*
* liza is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
'use strict';
const { Class } = require( 'easejs' );
/**
* Load system configuration from JSON
*
* @example
* ConfLoader( require( 'fs' ), SomeStore )
* .fromFile( 'conf/vanilla-server.json' )
* .then( conf => conf.get( 'foo' ) );
*
* TODO: Merging multiple configuration files would be convenient for
* modular configuration.
*/
module.exports = Class( 'ConfLoader',
{
/**
* Filesystem module
* @type {fs}
*/
'private _fs': null,
/**
* Store object constructor
* @type {function():Store}
*/
'private _storeCtor': null,
/**
* Initialize with provided filesystem module and Store constructor
*
* The module should implement `#readFile` compatible with
* Node.js'. The Store constructor `store_ctor` is used to instantiate
* new stores to be populated with configuration data.
*
* @param {fs} fs filesystem module
* @param {function():Store} store_ctor Store object constructor
*/
constructor( fs, store_ctor )
{
this._fs = fs;
this._storeCtor = store_ctor;
},
/**
* Produce configuration from file
*
* A Store will be produced, populated with the configuration data.
*
* @param {string} filename path to configuration JSON
*
* @return {Promise.<Store>} a promise of a populated Store
*/
'public fromFile'( filename )
{
return new Promise( ( resolve, reject ) =>
{
this._fs.readFile( filename, 'utf8', ( err, data ) =>
{
if ( err )
{
reject( err );
return;
}
try
{
const store = this._storeCtor();
resolve(
this.parseConfData( data )
.then( parsed => store.populate( parsed ) )
.then( _ => store )
);
}
catch ( e )
{
reject( e );
}
} );
} );
},
/**
* Parse raw configuration string
*
* Parses configuration string as JSON.
*
* @param {string} data raw configuration data
*
* @return {Promise.<Object>} `data` parsed as JSON
*/
'virtual protected parseConfData'( data )
{
return Promise.resolve( JSON.parse( data ) );
},
} );

View File

@ -0,0 +1,44 @@
/**
* Ideal Store for system configuration
*
* Copyright (C) 2017 R-T Specialty, LLC.
*
* This file is part of the Liza Data Collection Framework.
*
* liza is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
'use strict';
const {
AutoObjectStore,
DelimitedKey,
MemoryStore,
} = require( '../' ).store;
/**
* A store that recursively instantiates itself
*
* This store is ideal for nested configurations, and handles cases where
* configuration might be asynchronously retrieved. Nested values may be
* retrieved by delimiting the key with `.` (e.g. `foo.bar.baz`); see
* trait `DelimitedKey` for more information and examples.
*/
module.exports = function ConfStore()
{
return MemoryStore
.use( AutoObjectStore( ConfStore ) )
.use( DelimitedKey( '.' ) )();
};

View File

@ -43,23 +43,44 @@ module.exports = Class( 'DataApiFactory',
* The source and method have type-specific meaning; that is, "source"
* may be a URL and "method" may be get/post for a RESTful service.
*
* @param {string} type service type (e.g. "rest")
* @param {Object} desc API description
* @param {string} type service type (e.g. "rest")
* @param {Object} desc API description
* @param {Bucket} bucket active bucket
* @param {string} api_name dapi name
*
* @return {DataApi} appropriate DataApi instance
*/
'public fromType': function( type, desc, bucket )
'public fromType': function( type, desc, bucket, api_name )
{
const static_data = ( desc['static'] || [] );
const nonempty = !!desc.static_nonempty;
const multiple = !!desc.static_multiple;
return this.descLookup( api_name, desc ).then( descl =>
{
const static_data = ( descl['static'] || [] );
const nonempty = !!descl.static_nonempty;
const multiple = !!descl.static_multiple;
const api = this._createDataApi( type, desc, bucket );
const api = this._createDataApi( type, descl, bucket );
return RestrictedDataApi(
StaticAdditionDataApi( api, nonempty, multiple, static_data ),
desc
);
return RestrictedDataApi(
StaticAdditionDataApi( api, nonempty, multiple, static_data ),
descl
);
} );
},
/**
* Look up dapi descriptor from configuration
*
* The default implementation just echoes back the given descriptor.
*
* @param {string} api_name dapi identifier
* @param {Object} desc given descriptor
*
* @return {Object} looked up descriptor
*/
'virtual protected descLookup'( api_name, desc )
{
return Promise.resolve( desc );
},

View File

@ -38,7 +38,7 @@ module.exports = Class( 'DataApiManager' )
'private _dataApiFactory': null,
/**
* DataApi instances, indexed by API id
* DataApi instance promises, indexed by API id
* @type {Object}
*/
'private _dataApis': {},
@ -157,18 +157,18 @@ module.exports = Class( 'DataApiManager' )
}
// create the API if necessary (lazy-load); otherwise, use the existing
// instance
var api = this._dataApis[ api ] || ( function()
// instance (well, a promise for one)
var apip = this._dataApis[ api ] || ( function()
{
var apidesc = _self._apis[ api ];
// create a new instance of the API
return _self._dataApis[ api ] = _self._dataApiFactory.fromType(
apidesc.type, apidesc, bucket
).on( 'error', function( e )
{
_self.emit( 'error', e );
} );
apidesc.type, apidesc, bucket, api
)
.then( api =>
api.on( 'error', e => _self.emit( 'error', e ) )
);
} )();
// this has the effect of wiping out previous requests of the same id,
@ -187,28 +187,22 @@ module.exports = Class( 'DataApiManager' )
};
// process the request; we'll let them know when it comes back
try
apip.then( api => api.request( data, function()
{
api.request( data, function()
// we only wish to populate the field if the request should
// still be considered pending
var curuid = ( _self._pendingApiCall[ id ] || {} ).uid;
if ( curuid === uid )
{
// we only wish to populate the field if the request should
// still be considered pending
var curuid = ( _self._pendingApiCall[ id ] || {} ).uid;
if ( curuid === uid )
{
// forward to the caller
callback.apply( this, arguments );
// forward to the caller
callback.apply( this, arguments );
// clear the pending flag
_self._pendingApiCall[ id ] = undefined;
_self.emit( 'fieldLoaded', name, +index );
}
} );
}
catch ( e )
{
fc( e );
}
// clear the pending flag
_self._pendingApiCall[ id ] = undefined;
_self.emit( 'fieldLoaded', name, +index );
}
} ) )
.catch( e => fc( e ) );
};
// field is about to be re-loaded

View File

@ -53,23 +53,27 @@ const {
*/
module.exports = Class( 'DocumentServer',
{
'public create': ( dao, logger, enc_service, origin_url ) => Server(
new JsonServerResponse.create(),
dao,
logger,
enc_service,
'public create': ( dao, logger, enc_service, origin_url, conf ) =>
Promise.all( [
conf.get( 'dapi' ),
] ).then( ([ dapi_conf ]) => Server(
new JsonServerResponse.create(),
dao,
logger,
enc_service,
DataProcessor(
bucket_filter,
( apis, request ) => DataApiManager(
ServerDataApiFactory(
origin_url || request.getOrigin(),
request
DataProcessor(
bucket_filter,
( apis, request ) => DataApiManager(
ServerDataApiFactory(
origin_url || request.getOrigin(),
request,
dapi_conf
),
apis
),
apis
),
DapiMetaSource( QuoteDataBucket ),
StagingBucket
)
),
DapiMetaSource( QuoteDataBucket ),
StagingBucket
)
) )
} );

View File

@ -34,10 +34,10 @@ var AbstractClass = require( 'easejs' ).AbstractClass,
module.exports = AbstractClass( 'Daemon',
{
/**
* Quote server port
* @type {number}
* System configuration
* @type {Store}
*/
'private _httpPort': 0,
'private _conf': null,
/**
* Server to accept HTTP requests
@ -96,17 +96,9 @@ module.exports = AbstractClass( 'Daemon',
'private _rater': null,
'public __construct': function( http_port, log_priority )
'public __construct': function( conf )
{
this._httpPort = http_port;
this._rater = liza.server.rater.ProcessManager();
this._httpServer = this.getHttpServer();
this._accessLog = this._createAccessLog();
this._debugLog = this._createDebugLog( log_priority );
this._encService = this.getEncryptionService();
this._memcache = this.getMemcacheClient();
this._routers = this.getRouters();
this._conf = conf;
},
@ -115,10 +107,28 @@ module.exports = AbstractClass( 'Daemon',
*
* @return {undefined}
*/
'public start': function()
'public start'()
{
var _self = this;
return Promise.all( [
this._createDebugLog(),
this._createAccessLog(),
] ).then( ([ debug_log, access_log ]) =>
{
this._debugLog = debug_log;
this._accessLog = access_log;
this._httpServer = this.getHttpServer();
this._rater = liza.server.rater.ProcessManager();
this._encService = this.getEncryptionService();
this._memcache = this.getMemcacheClient();
this._routers = this.getRouters();
} )
.then( () => this._startDaemon() );
},
'private _startDaemon'()
{
this._debugLog.log( this._debugLog.PRIORITY_IMPORTANT,
"Access log path: %s", this._accessLogPath
);
@ -128,18 +138,18 @@ module.exports = AbstractClass( 'Daemon',
);
this._initSignalHandlers();
this._testEncryptionService( function()
this._testEncryptionService( () =>
{
_self._memcacheConnect();
_self._initMemoryLogger();
this._memcacheConnect();
this._initMemoryLogger();
_self._initRouters();
_self._initHttpServer( function()
this._initRouters();
this._initHttpServer( () =>
{
_self._initUncaughtExceptionHandler();
this._initUncaughtExceptionHandler();
// ready to roll
_self._debugLog.log( _self._debugLog.PRIORITY_INFO,
this._debugLog.log( this._debugLog.PRIORITY_INFO,
"Daemon initialization complete."
);
} );
@ -299,22 +309,30 @@ module.exports = AbstractClass( 'Daemon',
'private _createAccessLog': function()
{
this._accessLogPath =
( process.env.LOG_PATH_ACCESS || '/var/log/node/access.log' );
return this.getAccessLog()( this._accessLogPath );
return this._conf.get( 'log.access.path' )
.then( log_path =>
{
this._accessLogPath = log_path;
return this.getAccessLog()( this._accessLogPath );
} );
},
'private _createDebugLog': function( log_priority )
'private _createDebugLog': function()
{
this._debugLogPath =
( process.env.LOG_PATH_DEBUG || '/var/log/node/debug.log' );
return Promise.all( [
this._conf.get( 'log.priority' ),
this._conf.get( 'log.debug.path' ),
] )
.then( ([ priority, debug_log_path ]) =>
{
this._debugLogPath = debug_log_path;
return this.getPriorityLog()(
this._debugLogPath,
( process.env.LOG_PRIORITY || log_priority )
);
return this.getPriorityLog()(
debug_log_path,
priority
)
} );
},
@ -473,7 +491,7 @@ module.exports = AbstractClass( 'Daemon',
{
if ( router.init instanceof Function )
{
router.init( _self._debugLog, _self._encService );
router.init( _self._debugLog, _self._encService, _self._conf );
}
});
},
@ -514,25 +532,33 @@ module.exports = AbstractClass( 'Daemon',
this._debugLog
);
this._httpServer.listen( this._httpPort, function()
{
_self._debugLog.log(
1, "Server running on port %d", _self._httpPort
);
this._conf.get( 'http.port' )
.then( port => this._httpServer.listen( port, () =>
{
this._debugLog.log(
1, "Server running on port %d", _self._httpPort
);
callback();
} );
callback();
} ) )
.catch( e => this._httpError( e ) );
}
catch( err )
catch( e )
{
this._debugLog.log( this._debugLog.PRIORITY_ERROR,
"Unable to start HTTP server: %s",
err
);
// exit with an error
process.exit( 1 );
this._httpError( e );
}
},
'private _httpError'( e )
{
this._debugLog.log( this._debugLog.PRIORITY_ERROR,
"Unable to start HTTP server: %s",
err
);
// TODO: use daemon-level promise and reject it
process.exit( 1 );
},
} );

View File

@ -96,7 +96,7 @@ var sflag = {};
exports.rater = {};
exports.init = function( logger, enc_service )
exports.init = function( logger, enc_service, conf )
{
var db = new MongoDb(
'program',
@ -109,46 +109,50 @@ exports.init = function( logger, enc_service )
);
const dao = MongoServerDao( db );
server = _createDocumentServer( dao, logger, enc_service );
server_cache = _createCache( server );
server.init( server_cache, exports.rater );
rating_service = RatingService( logger, dao, server, exports.rater );
// TODO: exports.init needs to support callbacks; this will work, but
// only because it's unlikely that we'll get a request within
// milliseconds of coming online
_initExportService( db, function( service )
_createDocumentServer( dao, logger, enc_service, conf ).then( srv =>
{
c1_export_service = service;
} );
server = srv;
server.on( 'quotePverUpdate', function( quote, program, event )
{
// let them know that we're going to be a moment
var c = event.wait();
server_cache = _createCache( server );
server.init( server_cache, exports.rater );
getCleaner( program ).clean( quote, function( err )
rating_service = RatingService( logger, dao, server, exports.rater );
// TODO: exports.init needs to support callbacks; this will work, but
// only because it's unlikely that we'll get a request within
// milliseconds of coming online
_initExportService( db, function( service )
{
// report on our success/failure
if ( err )
{
event.bad( err );
}
else
{
event.good();
}
c1_export_service = service;
} );
// we're done
c();
server.on( 'quotePverUpdate', function( quote, program, event )
{
// let them know that we're going to be a moment
var c = event.wait();
getCleaner( program ).clean( quote, function( err )
{
// report on our success/failure
if ( err )
{
event.bad( err );
}
else
{
event.good();
}
// we're done
c();
} );
} );
} );
}
function _createDocumentServer( dao, logger, enc_service )
function _createDocumentServer( dao, logger, enc_service, conf )
{
const origin_url = process.env.HTTP_ORIGIN_URL || '';
@ -163,7 +167,8 @@ function _createDocumentServer( dao, logger, enc_service )
);
}
return DocumentServer().create( dao, logger, enc_service, origin_url );
return DocumentServer()
.create( dao, logger, enc_service, origin_url, conf );
}

View File

@ -51,10 +51,12 @@ var script_paths = [
( process.env.LV_ROOT_PATH || '.' ) + '/src/www/scripts/program/',
];
const legacy_path = process.env.LV_LEGACY_PATH + '/';
var script_prefix = {
liza: __dirname + '/../../',
assert: __dirname + '/../../assert/',
program: ( process.env.LV_LEGACY_PATH + '/' ) || '',
program: ( legacy_path + 'program/' ) || '',
};
/**
@ -103,7 +105,7 @@ exports.route = function( request, log )
suffix = parts[ 2 ];
var chk_paths = script_paths.slice();
chk_paths.unshift( script_prefix[ prefix ] || './' );
chk_paths.unshift( script_prefix[ prefix ] || legacy_path );
// check each of the paths for the script that was requested
( function check_path( paths )
@ -119,6 +121,7 @@ exports.route = function( request, log )
// check to see if the file exists within the path
var filename = ( cur_path + suffix );
fs.exists( filename, function( exists )
{
if ( !exists )

View File

@ -21,12 +21,17 @@
const { Class } = require( 'easejs' );
const {
DataApiFactory,
http: {
NodeHttpImpl,
SpoofedNodeHttpImpl,
dapi: {
DataApiFactory,
http: {
NodeHttpImpl,
SpoofedNodeHttpImpl,
},
},
} = require( '../..' ).dapi;
store: {
StoreMissError,
},
} = require( '../..' );
/**
@ -47,11 +52,50 @@ module.exports = Class( 'ServerDataApiFactory' )
*/
'private _session': null,
/**
* Dapi configuration
* @type {Store}
*/
'private _conf': null,
constructor( origin, session )
constructor( origin, session, conf )
{
this._origin = ''+origin;
this._session = session;
this._conf = conf;
},
/**
* Look up dapi descriptor from configuration
*
* If no configuration is found for `api_name`, the original `desc` will
* be returned. Otherwise, they will be merged, with the lookup taking
* precedence.
*
* @param {string} api_name dapi identifier
* @param {Object} desc given descriptor
*
* @return {Object} looked up descriptor
*/
'override protected descLookup'( api_name, desc )
{
return this._conf.get( 'aliases' )
.then( aliases => aliases.get( api_name ) )
.then( desc_lookup => desc_lookup.reduce(
( ret, value, key ) =>
{
// merges the two, with lookup taking precedence
ret[ key ] = value;
return ret;
},
Object.create( desc )
) )
.catch( e => ( Class.isA( StoreMissError, e ) )
? desc
: Promise.reject( e )
);
},

View File

@ -0,0 +1,177 @@
/**
* Convert objects to Stores upon retrieval
*
* Copyright (C) 2017 R-T Specialty, LLC.
*
* This file is part of the Liza Data Collection Framework.
*
* liza is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
'use strict';
const { Trait, Class } = require( 'easejs' );
const Store = require( './Store' );
/**
* Convert objects into sub-stores containing its key/value pairs
*
* When retrieving a value that is an object, it will first be converted
* into a Store and populated with the key/value pairs of that
* object. Non-object values will remain untouched.
*
* This trait expects a constructor function to instantiate a new
* Store. Providing the same constructor as was used to instantiate the
* current object will allow for an object to be recursively converted into
* nested Stores.
*
* Sub-stores are cached until the value of the key is references
* changes, after which point another request to `#get` will instantiate a
* _new_ store. The previous store will not be modified to reflect the new
* value.
*
* @example
* store.get( 'foo' ); // new Store (1)
* store.get( 'foo' ); // existing Store (1)
* store.add( 'foo', {} );
* store.get( 'foo' ); // new Store (2)
* store.add( 'foo', "bar" );
* store.get( 'foo' ); // "bar"
*/
module.exports = Trait( 'AutoObjectStore' )
.implement( Store )
.extend(
{
/**
* Constructor for object sub-stores
* @type {function(Object):Store}
*/
'private _ctor': null,
/**
* Store cache
* @type {Object.<Store>}
*/
'private _stores': {},
/**
* Initialize with Store constructor
*
* `ctor` will be used to instantiate Stores as needed.
*
* @param {function():Store} ctor Store constructor
*/
__mixin( ctor )
{
this._ctor = ctor;
},
/**
* Add item to store under `key` with value `value`
*
* Any cached store for `key` will be cleared so that future `#get`
* requests return up-to-date data.
*
* @param {string} key store key
* @param {*} value value for key
*
* @return {Promise.<Store>} promise to add item to store, resolving to
* self (for chaining)
*/
'virtual abstract override public add'( key, value )
{
return this.__super( key, value )
.then( ret =>
{
delete this._stores[ key ];
return ret;
} );
},
/**
* Retrieve item from store under `key`
*
* If the returned value is an object, it will automatically be
* converted into a store and populated with the object's
* values; otherwise, the value will be returned unaltered.
*
* Only vanilla objects (that is---not instances of anything but
* `Object`) will be converted into a Store.
*
* @param {string} key store key
*
* @return {Promise} promise for the key value
*/
'virtual abstract override public get'( key )
{
if ( this._stores[ key ] !== undefined )
{
return Promise.resolve( this._stores[ key ] );
}
return this.__super( key )
.then( value =>
{
if ( !this._isConvertable( value ) )
{
return value;
}
// create and cache store (we cache _before_ populating,
// otherwise another request might come in and create yet
// another store before we have a chance to complete
// populating)
const substore = this._ctor();
this._stores[ key ] = substore;
return substore.populate( value )
.then( () => substore );
} );
},
/**
* Determine whether given value should be converted into a Store
*
* Only vanilla objects (that is---not instances of anything but
* `Object`) will be converted into a Store.
*
* @param {*} value value under consideration
*
* @return {boolean} whether to convert `value`
*/
'private _isConvertable'( value )
{
if ( typeof value !== 'object' )
{
return false;
};
const ctor = value.constructor || {};
// instances of prototypes should be left alone, so we should ignore
// everything that's not a vanilla object
if ( ctor !== Object )
{
return false;
}
return true;
},
} );

View File

@ -0,0 +1,134 @@
/**
* Add and retrieve nested store values using string of delimited keys
*
* Copyright (C) 2017 R-T Specialty, LLC.
*
* This file is part of the Liza Data Collection Framework.
*
* liza is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
'use strict';
const { Trait, Class } = require( 'easejs' );
const Store = require( './Store' );
/**
* Add and retrieve items from (possibly) nested Stores
*
* This is a convenient syntax for deeply nested Stores and can greatly cut
* down on the verbosity of promises. This is best and least confusingly
* described with an example:
*
* @example
* const outer = Store.use( DelimitedKey( '.' ) )();
* const middle = Store();
* const inner = Store();
*
* // resolves to "inner value get"
* inner.add( 'foo', "inner value get" )
* .then( () => middle.add( 'inner', inner ) )
* .then( () => outer.add( 'middle', middle ) )
* .then( () => outer.get( 'middle.inner.foo' ) );
*
* // resolves to "inner value add"
* outer.add( 'middle.inner.foo', "inner value add" )
* .then( () => inner.get( 'foo' ) );
*/
module.exports = Trait( 'DelimitedKey' )
.implement( Store )
.extend(
{
/**
* Key delimiter
* @type {string}
*/
'private _delim': '',
/**
* Specify key delimiter
*
* @param {string} delim key delimiter
*/
__mixin( delim )
{
this._delim = ''+delim;
},
/**
* Add item to (possibly) nested store under with value `value`
*
* The given key `key` is split on the chosen delimiter (specified at
* the time of mixin). All but the last element in `key` are retrieved
* recursively as Stores; the final Store is then assigned `value` to
* the key represented by the last value in the delimited `key`.
*
* @param {string} key delimited store key
* @param {*} value value for key
*
* @return {Promise.<Store>} promise to add item to store, resolving to
* self (for chaining)
*/
'virtual abstract override public add'( key, value )
{
if ( typeof key !== 'string' )
{
return this.__super( key );
}
const parts = key.split( this._delim );
const maxi = parts.length - 1;
const __super = this.__super;
return parts
.reduce(
( promise, part, i ) => promise.then( store =>
( i < maxi ) ? store.get( part ) : store
),
Promise.resolve( this )
)
.then( store => __super.call( this, parts[ maxi ], value ) );
},
/**
* Retrieve item from (possibly) nested store
*
* The given key `key` is split on the chosen delimiter (specified at
* the time of mixin). All but the last element in `key` are retrieved
* recursively as Stores; the final element in delimited `key` then
* identifies they key to be retrieved from the final Store.
*
* @param {string} key delimited store key
*
* @return {Promise} promise for the key value
*/
'virtual abstract override public get'( key )
{
if ( typeof key !== 'string' )
{
return this.__super( key );
}
const [ first, ...parts ] = key.split( this._delim );
return parts.reduce(
( promise, part ) => promise.then( store => store.get( part ) ),
this.__super( first )
);
},
} );

View File

@ -132,6 +132,31 @@ module.exports = Class( 'DiffStore' )
},
/**
* Populate store with each element in object `obj`
*
* This is simply a convenient way to call `#add` for each element in an
* object. This does directly call `#add`, so overriding that method
* will also affect this one.
*
* If the intent is to change the behavior of what happens when an item
* is added to the store, override the `#add` method instead of this one
* so that it affects _all_ adds, not just calls to this method.
*
* @param {Object} obj object with which to populate store
*
* @return {Array.<Promise.<Store>>} array of #add promises
*/
'virtual public populate'( obj )
{
return Promise.all(
Object.keys( obj ).map(
key => this.add( key, obj[ key ] )
)
);
},
/**
* Retrieve diff of `key`
*

View File

@ -89,6 +89,31 @@ module.exports = Class( 'MemoryStore' )
},
/**
* Populate store with each element in object `obj`
*
* This is simply a convenient way to call `#add` for each element in an
* object. This does directly call `#add`, so overriding that method
* will also affect this one.
*
* If the intent is to change the behavior of what happens when an item
* is added to the store, override the `#add` method instead of this one
* so that it affects _all_ adds, not just calls to this method.
*
* @param {Object} obj object with which to populate store
*
* @return {Array.<Promise.<Store>>} array of #add promises
*/
'virtual public populate'( obj )
{
return Promise.all(
Object.keys( obj ).map(
key => this.add( key, obj[ key ] )
)
);
},
/**
* Retrieve item from store under `key`
*

View File

@ -1,7 +1,7 @@
/**
* Generic key/value store
*
* Copyright (C) 2016 R-T Specialty, LLC.
* Copyright (C) 2016, 2017 R-T Specialty, LLC.
*
* This file is part of the Liza Data Collection Framework
*
@ -50,6 +50,24 @@ module.exports = Interface( 'Store',
'public add': [ 'key', 'value' ],
/**
* Populate store with each element in object `obj`
*
* This is simply a convenient way to call `#add` for each element in an
* object. This does directly call `#add`, so overriding that method
* will also affect this one.
*
* If the intent is to change the behavior of what happens when an item
* is added to the store, override the `#add` method instead of this one
* so that it affects _all_ adds, not just calls to this method.
*
* @param {Object} obj object with which to populate store
*
* @return {Array.<Promise.<Store>>} array of #add promises
*/
'public populate': [ 'obj' ],
/**
* Retrieve item from store under `key`
*

View File

@ -0,0 +1,136 @@
/**
* Tests ConfLoader
*/
'use strict';
const chai = require( 'chai' );
const expect = chai.expect;
const {
conf: {
ConfLoader: Sut,
},
store: {
MemoryStore: Store,
},
} = require( '../../' );
chai.use( require( 'chai-as-promised' ) );
describe( 'ConfLoader', () =>
{
it( "loads Store'd configuration from file", () =>
{
const expected_path = "/foo/bar/baz.json";
const expected_data = '{ "foo": "bar" }';
const fs = {
readFile( path, encoding, callback )
{
expect( path ).to.equal( expected_path );
expect( encoding ).to.equal( 'utf8' );
callback( null, expected_data );
},
};
return expect(
Sut( fs, Store )
.fromFile( expected_path )
.then( conf => conf.get( 'foo' ) )
).to.eventually.deep.equal( JSON.parse( expected_data ).foo );
} );
it( "fails on read error", () =>
{
const expected_err = Error( 'rejected' );
const fs = {
readFile( _, __, callback )
{
callback( expected_err, null );
},
};
return expect( Sut( fs ).fromFile( '' ) )
.to.eventually.be.rejectedWith( expected_err );
} );
it( "can override #parseConfData for custom parser", () =>
{
const result = { foo: {} };
const input = "foo";
const fs = {
readFile( _, __, callback )
{
callback( null, input );
},
};
const sut = Sut.extend(
{
'override parseConfData'( given_input )
{
expect( given_input ).to.equal( input );
return Promise.resolve( result );
},
} )( fs, Store );
return expect(
sut.fromFile( '' )
.then( conf => conf.get( 'foo' ) )
).to.eventually.equal( result.foo );
} );
it( 'rejects promise on parsing error', () =>
{
const expected_err = SyntaxError( 'test parsing error' );
const fs = {
readFile( _, __, callback )
{
// make async so that we clear the stack, and therefore
// try/catch
process.nextTick( () => callback( null, '' ) );
},
};
const sut = Sut.extend(
{
'override parseConfData'( given_input )
{
throw expected_err;
},
} )( fs, Store );
return expect( sut.fromFile( '' ) )
.to.eventually.be.rejectedWith( expected_err );
} );
it( "rejects promise on Store ctor error", () =>
{
const expected_err = Error( 'test Store ctor error' );
const fs = {
readFile: ( _, __, callback ) => callback( null, '' ),
};
const badstore = () => { throw expected_err };
return expect( Sut( fs, badstore ).fromFile( '' ) )
.to.eventually.be.rejectedWith( expected_err );
} );
it( "rejects promise on bad fs call", () =>
{
return expect( Sut( {}, Store ).fromFile( '' ) )
.to.eventually.be.rejected;
} );
} );

View File

@ -116,7 +116,7 @@ function createStubDapiFactory( dapis )
return {
fromType( type )
{
return dapis[ type ];
return Promise.resolve( dapis[ type ] );
},
};
}

View File

@ -0,0 +1,114 @@
/**
* Tests AutoObjectStore
*
* Copyright (C) 2017 R-T Specialty, LLC.
*
* This file is part of the Liza Data Collection Framework.
*
* liza is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
'use strict';
const chai = require( 'chai' );
const expect = chai.expect;
chai.use( require( 'chai-as-promised' ) );
const {
AutoObjectStore: Sut,
MemoryStore: Store,
} = require( '../../' ).store;
describe( 'AutoObjectStore', () =>
{
describe( "given an object value", () =>
{
it( "applies given ctor to objects", () =>
{
const obj = Store();
const dummy_ctor = () => obj;
const sut = Store.use( Sut( dummy_ctor ) )();
const foo = sut
.add( 'foo', {} )
.then( _ => sut.get( 'foo' ) );
return expect( foo )
.to.eventually.deep.equal( obj );
} );
it( "adds object values to new store", () =>
{
const obj = { bar: "baz" };
const sut = Store.use( Sut( Store ) )();
const bar = sut
.add( 'foo', obj )
.then( _ => sut.get( 'foo' ) )
.then( substore => substore.get( 'bar' ) );
return expect( bar ).to.eventually.equal( obj.bar );
} );
it( "caches sub-store until key changes", () =>
{
const obj = {};
const sut = Store.use( Sut( Store ) )();
return sut
.add( 'foo', {} )
.then( _ => sut.get( 'foo' ) )
.then( store1 =>
expect( sut.get( 'foo' ) ).to.eventually.equal( store1 )
.then( _ => sut.add( 'foo', "new" ) )
.then( _ => sut.get( 'foo' ) )
.then( store2 =>
expect( store2 ).to.not.equal( store1 )
)
);
} );
} );
it( "leaves non-objects untouched", () =>
{
const expected = "bar";
const sut = Store.use( Sut( () => null ) )();
const foo = sut
.add( 'foo', expected )
.then( _ => sut.get( 'foo' ) );
return expect( foo ).to.eventually.equal( expected );
} );
// includes class instances, since easejs generates prototypes
it( "leaves prototype instances untouched", () =>
{
const expected = ( new function() {} );
const sut = Store.use( Sut( () => null ) )();
const foo = sut
.add( 'foo', expected )
.then( _ => sut.get( 'foo' ) );
return expect( foo ).to.eventually.equal( expected );
} );
} );

View File

@ -0,0 +1,107 @@
/**
* Tests DelimitedKey
*
* Copyright (C) 2017 R-T Specialty, LLC.
*
* This file is part of the Liza Data Collection Framework.
*
* liza is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
'use strict';
const chai = require( 'chai' );
const expect = chai.expect;
chai.use( require( 'chai-as-promised' ) );
const {
DelimitedKey: Sut,
MemoryStore: Store,
StoreMissError,
} = require( '../../' ).store;
describe( 'DelimitedKey', () =>
{
describe( '#get', () =>
{
it( "retrieves nested store keys", () =>
{
const outer = Store.use( Sut( '.' ) )();
const middle = Store();
const inner = Store();
const inner_val = {};
return expect(
inner.add( 'foo', inner_val )
.then( () => middle.add( 'inner', inner ) )
.then( () => outer.add( 'middle', middle ) )
.then( () => outer.get( 'middle.inner.foo' ) )
).to.eventually.equal( inner_val );
} );
it( "fails on unknown nested key", () =>
{
const outer = Store.use( Sut( '.' ) )();
const inner = Store();
return expect(
outer.add( 'inner', inner )
.then( () => outer.get( 'inner.foo.bar.baz' ) )
).to.eventually.be.rejectedWith( StoreMissError, /[^.]foo\b/ );
} );
// rather than blowing up attempting to split
it( "fails gracefully on non-string key", () =>
{
return expect(
Store.use( Sut( '.' ) )().get( undefined )
).to.eventually.be.rejectedWith( StoreMissError );
} );
} );
describe( '#add', () =>
{
it( "sets nested store keys", () =>
{
const outer = Store.use( Sut( '.' ) )();
const inner = Store();
const inner_val = {};
return expect(
inner.add( 'foo', inner_val )
.then( () => outer.add( 'inner', inner ) )
.then( () => outer.add( 'inner.foo', inner_val ) )
.then( () => inner.get( 'foo' ) )
).to.eventually.equal( inner_val );
} );
it( "fails on unknown nested key", () =>
{
const outer = Store.use( Sut( '.' ) )();
const inner = Store();
return expect(
outer.add( 'inner', inner )
.then( () => outer.add( 'inner.none.foo', "fail" ) )
).to.eventually.be.rejectedWith( StoreMissError, /[^.]none\b/ );
} );
} );
} );

View File

@ -232,4 +232,39 @@ describe( 'store.DiffStore', () =>
} );
} );
} );
describe( '#populate', () =>
{
it( "#add's each element of object to store", () =>
{
const obj = { foo: {}, bar: {} };
const sut = Sut();
return sut.populate( obj )
.then( ps =>
{
// by reference
expect( sut.get( 'foo' ) )
.to.eventually.equal( obj.foo );
expect( sut.get( 'bar' ) )
.to.eventually.equal( obj.bar );
expect( ps.length )
.to.equal( Object.keys( obj ).length );
} );
} );
it( "fails if any add fails", () =>
{
const e = Error( 'ok' );
const sut = Sut.extend( {
'override add': ( k, v ) => Promise.reject( e )
} )();
return expect( sut.populate( { a: 1 } ) )
.to.eventually.be.rejectedWith( e );
} );
} );
} );

View File

@ -71,6 +71,41 @@ describe( 'store.MemoryStore', () =>
} );
describe( '#populate', () =>
{
it( "#add's each element of object to store", () =>
{
const obj = { foo: {}, bar: {} };
const sut = Sut();
return sut.populate( obj )
.then( ps =>
{
// by reference
expect( sut.get( 'foo' ) )
.to.eventually.equal( obj.foo );
expect( sut.get( 'bar' ) )
.to.eventually.equal( obj.bar );
expect( ps.length )
.to.equal( Object.keys( obj ).length );
} );
} );
it( "fails if any add fails", () =>
{
const e = Error( 'ok' );
const sut = Sut.extend( {
'override add': ( k, v ) => Promise.reject( e )
} )();
return expect( sut.populate( { a: 1 } ) )
.to.eventually.be.rejectedWith( e );
} );
} );
// most things implicitly tested above
describe( '#get', () =>
{
@ -118,7 +153,7 @@ describe( 'store.MemoryStore', () =>
describe( 'with mixin', () =>
{
it( 'allows overriding add', done =>
it( 'allows overriding #add', done =>
{
const expected_key = 'foo';
const expected_value = {};
@ -137,7 +172,28 @@ describe( 'store.MemoryStore', () =>
} );
it( 'allows overriding get', done =>
it( "allows overriding #populate", () =>
{
const obj = {};
let called = false;
return Sut.use(
Trait.extend( Sut,
{
'override populate'( given )
{
expect( given ).to.equal( obj );
called = true;
return Promise.resolve( true );
}
} )
)().populate( obj )
.then( () => expect( called ).to.equal( true ) );
} );
it( 'allows overriding #get', done =>
{
const expected_key = 'bar';
@ -154,7 +210,7 @@ describe( 'store.MemoryStore', () =>
} );
it( 'allows overriding clear', done =>
it( 'allows overriding #clear', done =>
{
Sut.use(
Trait.extend( Sut,