1
0
Fork 0

[DEV-5312] Convert MongoServerDao to TS

master
Austin Schaffer 2019-11-07 17:38:47 -05:00
parent c2458dee78
commit 015a3f5d47
19 changed files with 1721 additions and 1134 deletions

View File

@ -24,6 +24,7 @@
import { Program } from "../program/Program";
import { Quote, QuoteId } from "./Quote";
import { QuoteDataBucket } from "../bucket/QuoteDataBucket";
import { PositiveInteger } from "../numeric";
export declare class BaseQuote implements Quote
@ -33,7 +34,7 @@ export declare class BaseQuote implements Quote
*
* @return quote program
*/
getProgram(): Program;
getProgram(): Program
/**
@ -41,7 +42,7 @@ export declare class BaseQuote implements Quote
*
* @return program id
*/
getProgramId(): string;
getProgramId(): string
/**
@ -53,7 +54,7 @@ export declare class BaseQuote implements Quote
*
* @return quote id
*/
getId(): QuoteId;
getId(): QuoteId
/**
@ -61,7 +62,7 @@ export declare class BaseQuote implements Quote
*
* @return id of current step
*/
getCurrentStepId(): number;
getCurrentStepId(): number
/**
@ -72,7 +73,7 @@ export declare class BaseQuote implements Quote
*
* @return self
*/
setExplicitLock( reason: string, step: number ): this;
setExplicitLock( reason: string, step: number ): this
/**
@ -82,7 +83,7 @@ export declare class BaseQuote implements Quote
*
* @return self
*/
setLastPremiumDate( timestamp: UnixTimestamp ): this;
setLastPremiumDate( timestamp: UnixTimestamp ): this
/**
@ -90,7 +91,7 @@ export declare class BaseQuote implements Quote
*
* @return last calculated time or 0
*/
getLastPremiumDate(): UnixTimestamp;
getLastPremiumDate(): UnixTimestamp
/**
@ -99,4 +100,54 @@ export declare class BaseQuote implements Quote
* @return the data bucket
*/
getBucket(): QuoteDataBucket
/**
* Retrieves the reason for an explicit lock
*
* @return lock reason
*/
getExplicitLockReason(): string
/**
* Returns the maximum step to which the explicit lock applies
*
* If no step restriction is set, then 0 will be returned.
*
* @return {number} locked max step or 0 if not applicable
*/
getExplicitLockStep(): PositiveInteger
/**
* Returns whether the quote has been imported
*
* @return true if imported, otherwise false
*/
isImported(): boolean
/**
* Returns whether the quote has been bound
*
* @return true if bound, otherwise false
*/
isBound(): boolean
/**
* Returns the id of the highest step the quote has reached
*
* @return top visited step id
*/
getTopVisitedStepId(): PositiveInteger
/**
* Returns the id of the highest step the quote has saved
*
* @return top saved step id
*/
getTopSavedStepId(): PositiveInteger
}

View File

@ -69,7 +69,7 @@ const {
DocumentServer,
db: {
MongoServerDao,
MongoServerDao: { MongoServerDao },
},
lock: {
@ -127,7 +127,7 @@ exports.post_rate_publish = {};
exports.init = function( logger, enc_service, conf )
{
var db = _createDB( logger );
const dao = MongoServerDao( db );
const dao = new MongoServerDao( db );
db.collection( 'quotes', function( err, collection )
{

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,788 @@
/**
* Mongo DB DAO for program server
*
* Copyright (C) 2010-2019 R-T Specialty, LLC.
*
* This file is part of the Liza Data Collection Framework.
*
* liza is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import { ServerDao, Callback } from "./ServerDao";
import { MongoCollection, MongoUpdate } from "mongodb";
import { PositiveInteger } from "../../numeric";
import { ServerSideQuote } from "../quote/ServerSideQuote";
import { QuoteId } from "../../document/Document";
import { WorksheetData } from "../rater/Rater";
const EventEmitter = require( 'events' ).EventEmitter;
type ErrorCallback = ( err: NullableError ) => void;
/**
* Uses MongoDB as a data store
*/
export class MongoServerDao extends EventEmitter implements ServerDao
{
/** Collection used to store quotes */
readonly COLLECTION: string = 'quotes';
/** Sequence (auto-increment) collection */
readonly COLLECTION_SEQ: string = 'seq';
/** Sequence key for quote ids */
readonly SEQ_QUOTE_ID: string = 'quoteId';
/** Sequence quoteId default */
readonly SEQ_QUOTE_ID_DEFAULT: number = 200000;
/** Whether the DAO is initialized and ready to be used */
private _ready: boolean = false;
/** Collection to save data to */
private _collection?: MongoCollection | null;
/** Collection to read sequences (auto-increments) from */
private _seqCollection?: MongoCollection | null;
/**
* Initializes DAO
*
* @param {Mongo.Db} db mongo database connection
*/
constructor(
private readonly _db: any
)
{
super();
}
/**
* Initializes error events and attempts to connect to the database
*
* connectError event will be emitted on failure.
*
* @param Function callback function to call when connection is complete
* (will not be called if connection fails)
*
* @return MongoServerDao self to allow for method chaining
*/
init( callback: () => {} ): this
{
var dao = this;
// map db error event (on connection error) to our connectError event
this._db.on( 'error', function( err: any )
{
dao._ready = false;
dao._collection = null;
dao.emit( 'connectError', err );
});
this.connect( callback );
return this;
}
/**
* Attempts to connect to the database
*
* connectError event will be emitted on failure.
*
* @param Function callback function to call when connection is complete
* (will not be called if connection fails)
*
* @return MongoServerDao self to allow for method chaining
*/
connect( callback: () => {} ): this
{
var dao = this;
// attempt to connect to the database
this._db.open( function( err: any, db: any )
{
// if there was an error, don't bother with anything else
if ( err )
{
// in some circumstances, it may just be telling us that we're
// already connected (even though the connection may have been
// broken)
if ( err.errno !== undefined )
{
dao.emit( 'connectError', err );
return;
}
}
var ready_count = 0;
var check_ready = function()
{
if ( ++ready_count < 2 )
{
return;
}
// we're ready to roll!
dao._ready = true;
dao.emit( 'ready' );
// connection was successful; call the callback
if ( callback instanceof Function )
{
callback.call( dao );
}
}
// quotes collection
db.collection(
dao.COLLECTION,
function(
_err: any,
collection: MongoCollection,
) {
// for some reason this gets called more than once
if ( collection == null )
{
return;
}
// initialize indexes
collection.createIndex(
[ ['id', 1] ],
true,
function( _err: any, _index: { [P: string]: any } )
{
// mark the DAO as ready to be used
dao._collection = collection;
check_ready();
}
);
}
);
// seq collection
db.collection(
dao.COLLECTION_SEQ,
function(
err: any,
collection: MongoCollection,
) {
if ( err )
{
dao.emit( 'seqError', err );
return;
}
if ( collection == null )
{
return;
}
dao._seqCollection = collection;
// has the sequence we'll be referencing been initialized?
collection.find(
{ _id: dao.SEQ_QUOTE_ID },
{ limit: <PositiveInteger>1 },
function( err: any, cursor )
{
if ( err )
{
dao._initQuoteIdSeq( check_ready )
return;
}
cursor.toArray( function( _err: any, data: any[] )
{
if ( data.length == 0 )
{
dao._initQuoteIdSeq( check_ready );
return;
}
check_ready();
});
}
);
}
);
});
return this;
}
private _initQuoteIdSeq( callback: () => void )
{
var dao = this;
this._seqCollection!.insert(
{
_id: this.SEQ_QUOTE_ID,
val: this.SEQ_QUOTE_ID_DEFAULT,
},
function( err: any, _docs: any )
{
if ( err )
{
dao.emit( 'seqError', err );
return;
}
dao.emit( 'seqInit', dao.SEQ_QUOTE_ID );
callback.call( dao );
}
);
}
/**
* Saves a quote to the database
*
* A full save will include all metadata. This should not cause any
* problems with race conditions for pending Data API calls on meta
* fields because those results write to individual indexes and do not
* rely on existing data.
*
* @param Quote quote the quote to save
* @param Function success_callback function to call on success
* @param Function failure_callback function to call if save fails
* @param Object save_data quote data to save (optional)
* @param Object push_data quote data to push (optional)
*/
saveQuote(
quote: ServerSideQuote,
success_callback: Callback,
failure_callback: Callback,
save_data?: any,
push_data?: any,
): this
{
var dao = this;
var meta: Record<string, any> = {};
// if we're not ready, then we can't save the quote!
if ( this._ready === false )
{
this.emit( 'saveQuoteError',
{ message: 'Database server not ready' },
Error( 'Database not ready' ),
quote
);
failure_callback.call( this, quote );
return dao;
}
if ( save_data === undefined )
{
save_data = {
data: quote.getBucket().getData(),
};
// full save will include all metadata
meta = quote.getMetabucket().getData();
}
var id = quote.getId();
// some data should always be saved because the quote will be created if
// it does not yet exist
save_data.id = id;
save_data.pver = quote.getProgramVersion();
save_data.importDirty = 1;
save_data.lastPremDate = quote.getLastPremiumDate();
save_data.initialRatedDate = quote.getRatedDate();
save_data.explicitLock = quote.getExplicitLockReason();
save_data.explicitLockStepId = quote.getExplicitLockStep();
save_data.importedInd = +quote.isImported();
save_data.boundInd = +quote.isBound();
save_data.lastUpdate = Math.round(
( new Date() ).getTime() / 1000
);
// meta will eventually take over for much of the above data
meta.liza_timestamp_initial_rated = [ quote.getRatedDate() ];
// save the stack so we can track this call via the oplog
save_data._stack = ( new Error() ).stack;
// avoid wiping out other metadata (since this may not be a full set)
Object.keys( meta ).forEach(
key => save_data[ 'meta.' + key ] = meta[ key ]
);
// do not push empty objects
const document = ( !push_data || !Object.keys( push_data ).length )
? { '$set': save_data }
: { '$set': save_data, '$push': push_data };
// update the quote data if it already exists (same id), otherwise
// insert it
this._collection!.update( { id: id },
document,
// create record if it does not yet exist
{ upsert: true },
// on complete
function( err, _docs )
{
// if an error occurred, then we cannot continue
if ( err )
{
dao.emit( 'saveQuoteError', err, quote );
// let the caller handle the error
if ( failure_callback instanceof Function )
{
failure_callback.call( dao, quote );
}
return;
}
// successful
if ( success_callback instanceof Function )
{
success_callback.call( dao, quote );
}
}
);
return this;
}
/**
* Merges quote data with the existing (rather than overwriting)
*
* @param {Quote} quote quote to save
* @param {Object} data quote data
* @param {Function} scallback successful callback
* @param {Function} fcallback failure callback
*/
mergeData(
quote: ServerSideQuote,
data: MongoUpdate,
scallback: Callback,
fcallback: Callback,
): this
{
// we do not want to alter the original data; use it as a prototype
var update = data;
// save the stack so we can track this call via the oplog
var _self = this;
this._collection!.update( { id: quote.getId() },
{ '$set': update },
{},
function( err, _docs )
{
if ( err )
{
_self.emit( 'saveQuoteError', err, quote );
if ( typeof fcallback === 'function' )
{
fcallback( quote );
}
return;
}
if ( typeof scallback === 'function' )
{
scallback( quote );
}
}
);
return this;
}
/**
* Merges bucket data with the existing bucket (rather than overwriting the
* entire bucket)
*
* @param {Quote} quote quote to save
* @param {Object} data bucket data
* @param {Function} scallback successful callback
* @param {Function} fcallback failure callback
*
* @return {MongoServerDao} self
*/
mergeBucket(
quote: ServerSideQuote,
data: MongoUpdate,
success: Callback,
failure: Callback,
): this
{
var update: MongoUpdate = {};
for ( var field in data )
{
if ( !field )
{
continue;
}
update[ 'data.' + field ] = data[ field ];
}
return this.mergeData( quote, update, success, failure );
}
/**
* Saves the quote state to the database
*
* The quote state includes the current step, the top visited step and the
* explicit lock message.
*
* @param Quote quote the quote to save
* @param Function success_callback function to call on success
* @param Function failure_callback function to call if save fails
*
* @return MongoServerDao self
*/
saveQuoteState(
quote: ServerSideQuote,
success_callback: any,
failure_callback: any,
)
{
var update = {
currentStepId: quote.getCurrentStepId(),
topVisitedStepId: quote.getTopVisitedStepId(),
topSavedStepId: quote.getTopSavedStepId(),
};
return this.mergeData(
quote, update, success_callback, failure_callback
);
}
saveQuoteClasses(
quote: ServerSideQuote,
classes: any,
success: any,
failure: any,
)
{
return this.mergeData(
quote,
{ classData: classes },
success,
failure
);
}
/**
* Save document metadata (meta field on document)
*
* Only the provided indexes will be modified (that is---data will be
* merged with what is already in the database).
*
* @param {Quote} quote destination quote
* @param {Object} new_meta bucket-formatted data to write
* @param {Function} success callback on success
* @param {Function} failure callback on error
*
* @return {undefined}
*/
saveQuoteMeta(
quote: ServerSideQuote,
new_meta: any,
success: Callback,
failure: Callback,
): void
{
const update: MongoUpdate = {};
for ( var key in new_meta )
{
var meta = new_meta[ key ];
for ( var i in meta )
{
update[ 'meta.' + key + '.' + i ] = new_meta[ key ][ i ];
}
}
this.mergeData( quote, update, success, failure );
}
/**
* Saves the quote lock state to the database
*
* @param Quote quote the quote to save
* @param Function success_callback function to call on success
* @param Function failure_callback function to call if save fails
*
* @return MongoServerDao self
*/
saveQuoteLockState(
quote: ServerSideQuote,
success_callback: Callback,
failure_callback: Callback,
): this
{
// lock state is saved by default
return this.saveQuote(
quote,
success_callback,
failure_callback,
{}
);
}
/**
* Pulls quote data from the database
*
* @param Integer quote_id id of quote
* @param Function( data ) callback function to call when data is available
*
* @return MongoServerDao self to allow for method chaining
*/
pullQuote(
quote_id: PositiveInteger,
callback: ( data: Record<string, any> | null ) => void
): this
{
var dao = this;
// XXX: TODO: Do not read whole of record into memory; filter out
// revisions!
this._collection!.find( { id: quote_id }, { limit: <PositiveInteger>1 },
function( _err, cursor )
{
cursor.toArray( function( _err: NullableError, data: any[] )
{
// was the quote found?
if ( data.length == 0 )
{
callback.call( dao, null );
return;
}
// return the quote data
callback.call( dao, data[ 0 ] );
});
}
);
return this;
}
getMinQuoteId( callback: ( min_id: number ) => void ): this
{
// just in case it's asynchronous later on
callback.call( this, this.SEQ_QUOTE_ID_DEFAULT );
return this;
}
getMaxQuoteId( callback: ( min_id: number ) => void ): void
{
var dao = this;
this._seqCollection!.find(
{ _id: this.SEQ_QUOTE_ID },
{ limit: <PositiveInteger>1 },
function( _err, cursor )
{
cursor.toArray( function( _err: NullableError, data: any[] )
{
if ( data.length == 0 )
{
callback.call( dao, 0 );
return;
}
// return the max quote id
callback.call( dao, data[ 0 ].val );
});
}
);
}
getNextQuoteId( callback: ( quote_id: number ) => void ): this
{
var dao = this;
this._seqCollection!.findAndModify(
{ _id: this.SEQ_QUOTE_ID },
[ [ 'val', 'descending' ] ],
{ $inc: { val: 1 } },
{ 'new': true },
function( err, doc )
{
if ( err )
{
dao.emit( 'seqError', err );
callback.call( dao, 0 );
return;
}
// return the new id
callback.call( dao, doc.val );
}
);
return this;
}
/**
* Create a new revision with the provided quote data
*
* The revision will contain the whole the quote. If space is a concern, we
* can (in the future) calculate a delta instead (Mike recommends the Git
* model of storing the deltas in previous revisions and the whole of the
* bucket in the most recently created revision).
*/
createRevision(
quote: ServerSideQuote,
callback: ErrorCallback,
): void
{
var _self = this,
qid = quote.getId(),
data = quote.getBucket().getData();
this._collection!.update( { id: qid },
{ '$push': { revisions: { data: data } } },
// create record if it does not yet exist
{ upsert: true },
// on complete
function( err )
{
if ( err )
{
_self.emit( 'mkrevError', err );
}
callback( err );
return;
}
);
}
getRevision(
quote: ServerSideQuote,
revid: PositiveInteger,
callback: ErrorCallback,
): void
{
revid = <PositiveInteger>+revid;
// XXX: TODO: Filter out all but the revision we want
this._collection!.find(
{ id: quote.getId() },
{ limit: <PositiveInteger>1 },
function( _err, cursor )
{
cursor.toArray( function( _err: NullableError, data: any[] )
{
// was the quote found?
if ( ( data.length === 0 )
|| ( data[ 0 ].revisions.length < ( revid + 1 ) )
)
{
callback( null );
return;
}
// return the quote data
callback( data[ 0 ].revisions[ revid ] );
});
}
);
}
setWorksheets(
qid: QuoteId,
data: MongoUpdate,
callback: NodeCallback<void>,
): void
{
this._collection!.update( { id: qid },
{ '$set': { worksheets: { data: data } } },
// create record if it does not yet exist
{ upsert: true },
// on complete
function( err )
{
callback( err );
return;
}
);
}
getWorksheet(
qid: QuoteId,
supplier: string,
index: PositiveInteger,
callback: ( data: WorksheetData | null ) => void,
): void
{
this._collection!.find(
{ id: qid },
{ limit: <PositiveInteger>1 },
function( _err, cursor )
{
cursor.toArray( function( _err: NullableError, data: any[] )
{
// was the quote found?
if ( ( data.length === 0 )
|| ( !data[ 0 ].worksheets )
|| ( !data[ 0 ].worksheets.data )
|| ( !data[ 0 ].worksheets.data[ supplier ] )
)
{
callback( null );
return;
}
// return the quote data
callback( data[ 0 ].worksheets.data[ supplier ][ index ] );
} );
}
);
}
};

View File

@ -131,7 +131,7 @@ export interface ServerDao
qid: QuoteId,
data: WorksheetData,
callback: NodeCallback<void>,
): this;
): void;
/**
@ -147,5 +147,5 @@ export interface ServerDao
supplier: string,
index: PositiveInteger,
callback: ( data: WorksheetData | null ) => void,
): this;
): void;
}

View File

@ -68,4 +68,20 @@ export declare class ServerSideQuote extends BaseQuote
* @return rating data
*/
getRatingData(): Record<string, any>;
/**
* Metadata bucket
*
* @return the metadata bucket
*/
getMetabucket(): QuoteDataBucket;
/**
* Get the program version
*
* @return program version
*/
getProgramVersion(): string;
}

View File

@ -433,7 +433,7 @@ export class RatingService
}
}
this._dao.setWorksheets( qid, worksheets, ( err: Error | null ) =>
this._dao.setWorksheets( qid, worksheets, ( err: NullableError ) =>
{
if ( err )
{

View File

@ -1,4 +1,3 @@
"use strict";
/**
* Publishes message to queue after rating
*
@ -19,22 +18,13 @@
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
var RatingService_1 = require("./RatingService");
var amqplib_1 = require("amqplib");
'use strict';
const { Interface, Trait } = require( 'easejs' );
const { RatingService } = require( './RatingService' );
/**
* Publish message to a queue after rating
*
@ -60,11 +50,35 @@ var amqplib_1 = require("amqplib");
*
* See the body of `#_sendMessage' for their values.
*/
var RatingServicePublish = /** @class */ (function (_super) {
__extends(RatingServicePublish, _super);
function RatingServicePublish() {
return _super !== null && _super.apply(this, arguments) || this;
}
module.exports = Trait( 'RatingServicePublish' )
.implement( Interface( { 'postProcessRaterData': [] } ) )
.extend(
{
/**
* AMQP library (amqplib API)
*
* @type {amqplib}
*/
'private _amqp': null,
/**
* AMQP configuration
*
* This should be the configuration expected by amqplib's #connect. It
* should additionally contain a `queueName' field.
*
* @type {Object}
*/
'private _conf': {},
/**
* Logger
*
* @type {DebugLog}
*/
'private _log': null,
/**
* Initialize trait
*
@ -72,13 +86,14 @@ var RatingServicePublish = /** @class */ (function (_super) {
* @param {Object} conf AMQP configuration
* @param {DebugLog} logger logger instance
*/
RatingServicePublish.prototype.__mixin = function (
// constructor(
// private readonly _amqp: Connection,
_conf) {
// super();
this._conf = _conf;
};
__mixin( amqp, conf, logger )
{
this._amqp = amqp;
this._conf = conf;
this._log = logger;
},
/**
* Publish quote message to exchange post-rating
*
@ -90,25 +105,44 @@ var RatingServicePublish = /** @class */ (function (_super) {
*
* @return {undefined}
*/
RatingServicePublish.prototype.postProcessRaterData = function (request, data, actions, program, quote) {
var _this = this;
'abstract override postProcessRaterData'(
request, data, actions, program, quote
)
{
// check both as we transition from one to the other
var exchange = this._conf.exchange;
amqplib_1.connect(this._conf)
.then(function (conn) {
setTimeout(function () { return conn.close(); }, 10000);
const exchange = this._conf.exchange || this._conf.queueName;
this._amqp.connect( this._conf )
.then( conn =>
{
setTimeout( () => conn.close(), 10000 );
return conn.createChannel();
})
.then(function (ch) {
ch.assertExchange(exchange, 'fanout', { durable: true });
return _this._sendMessage(ch, exchange, request.getSession(), quote);
})
.then(function () { return _this._logger.log(_this._logger.PRIORITY_INFO, "Published quote " + quote.getId() +
" to post-rate exchange '" + exchange + "'"); })
.catch(function (e) { return _this._logger.log(_this._logger.PRIORITY_ERROR, "Post-rate exchange publish failure for quote " +
quote.getId() + ": " + e.message); });
_super.prototype.postProcessRaterData.call(this, request, data, actions, program, quote);
};
} )
.then( ch => {
ch.assertExchange( exchange, 'fanout', { durable: true } );
return this._sendMessage(
ch,
exchange,
request.getSession(),
quote
);
} )
.then( () => this._log.log(
this._log.PRIORITY_INFO,
"Published quote " + quote.getId() +
" to post-rate exchange '" + exchange + "'"
) )
.catch( e => this._log.log(
this._log.PRIORITY_ERROR,
"Post-rate exchange publish failure for quote " +
quote.getId() + ": " + e.message
) );
this.__super( request, data, actions, program, quote );
},
/**
* Send message to exchange
*
@ -117,25 +151,31 @@ var RatingServicePublish = /** @class */ (function (_super) {
* @param {UserSession} session user session
* @param {Quote} quote rated quote
*
* @return whether publish was successful
* @return {Promise} whether publish was successful
*/
RatingServicePublish.prototype._sendMessage = function (channel, exchange, session, quote) {
var headers = {
'private _sendMessage'( channel, exchange, session, quote )
{
const headers = {
version: 1,
created: Date.now(),
};
var data = new Buffer(JSON.stringify({
const data = new Buffer( JSON.stringify( {
quote_id: quote.getId(),
program_id: quote.getProgramId(),
agent_id: session.agentId(),
entity_id: session.agentEntityId(),
entity_name: session.agentName(),
}));
} ) );
// we don't use a routing key; fanout exchange
var routing_key = '';
return channel.publish(exchange, routing_key, data, { headers: headers });
};
return RatingServicePublish;
}(RatingService_1.RatingService));
exports.RatingServicePublish = RatingServicePublish;
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiUmF0aW5nU2VydmljZVB1Ymxpc2guanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyJSYXRpbmdTZXJ2aWNlUHVibGlzaC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiO0FBQUE7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7R0FtQkc7Ozs7Ozs7Ozs7Ozs7OztBQUdILGlEQUFnRDtBQVFoRCxtQ0FJaUI7QUFRakI7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7OztHQXdCRztBQUNIO0lBQTBDLHdDQUFhO0lBQXZEOztJQTJIQSxDQUFDO0lBOUdHOzs7Ozs7T0FNRztJQUNILHNDQUFPLEdBQVA7SUFDQSxlQUFlO0lBQ1gsd0NBQXdDO0lBQ3hDLEtBQW1CO1FBRW5CLFdBQVc7UUFDWCxJQUFJLENBQUMsS0FBSyxHQUFHLEtBQUssQ0FBQztJQUN2QixDQUFDO0lBR0Q7Ozs7Ozs7Ozs7TUFVRTtJQUNPLG1EQUFvQixHQUE5QixVQUNLLE9BQW9CLEVBQ3BCLElBQW1CLEVBQ25CLE9BQXNCLEVBQ3RCLE9BQWdCLEVBQ2hCLEtBQXdCO1FBTDdCLGlCQXVDRTtRQS9CRyxvREFBb0Q7UUFDcEQsSUFBTSxRQUFRLEdBQUcsSUFBSSxDQUFDLEtBQUssQ0FBQyxRQUFRLENBQUM7UUFFckMsaUJBQVcsQ0FBRSxJQUFJLENBQUMsS0FBSyxDQUFFO2FBQ3BCLElBQUksQ0FBRSxVQUFBLElBQUk7WUFFUCxVQUFVLENBQUUsY0FBTSxPQUFBLElBQUksQ0FBQyxLQUFLLEVBQUUsRUFBWixDQUFZLEVBQUUsS0FBSyxDQUFFLENBQUM7WUFDeEMsT0FBTyxJQUFJLENBQUMsYUFBYSxFQUFFLENBQUM7UUFDaEMsQ0FBQyxDQUFFO2FBQ0YsSUFBSSxDQUFFLFVBQUEsRUFBRTtZQUNMLEVBQUUsQ0FBQyxjQUFjLENBQUUsUUFBUSxFQUFFLFFBQVEsRUFBRSxFQUFFLE9BQU8sRUFBRSxJQUFJLEVBQUUsQ0FBRSxDQUFDO1lBRTNELE9BQU8sS0FBSSxDQUFDLFlBQVksQ0FDcEIsRUFBRSxFQUNGLFFBQVEsRUFDUixPQUFPLENBQUMsVUFBVSxFQUFFLEVBQ3BCLEtBQUssQ0FDUixDQUFDO1FBQ04sQ0FBQyxDQUFFO2FBQ0YsSUFBSSxDQUFFLGNBQU0sT0FBQSxLQUFJLENBQUMsT0FBTyxDQUFDLEdBQUcsQ0FDekIsS0FBSSxDQUFDLE9BQU8sQ0FBQyxhQUFhLEVBQzFCLGtCQUFrQixHQUFHLEtBQUssQ0FBQyxLQUFLLEVBQUU7WUFDOUIsMEJBQTBCLEdBQUcsUUFBUSxHQUFHLEdBQUcsQ0FDbEQsRUFKWSxDQUlaLENBQUU7YUFDRixLQUFLLENBQUUsVUFBQSxDQUFDLElBQUksT0FBQSxLQUFJLENBQUMsT0FBTyxDQUFDLEdBQUcsQ0FDekIsS0FBSSxDQUFDLE9BQU8sQ0FBQyxjQUFjLEVBQzNCLCtDQUErQztZQUMzQyxLQUFLLENBQUMsS0FBSyxFQUFFLEdBQUcsSUFBSSxHQUFHLENBQUMsQ0FBQyxPQUFPLENBQ3ZDLEVBSlksQ0FJWixDQUFFLENBQUM7UUFFUixpQkFBTSxvQkFBb0IsWUFBRSxPQUFPLEVBQUUsSUFBSSxFQUFFLE9BQU8sRUFBRSxPQUFPLEVBQUUsS0FBSyxDQUFFLENBQUM7SUFDekUsQ0FBQztJQUdEOzs7Ozs7Ozs7T0FTRztJQUNILDJDQUFZLEdBQVosVUFDSSxPQUFpQixFQUNqQixRQUFnQixFQUNoQixPQUFxQixFQUNyQixLQUF5QjtRQUd6QixJQUFNLE9BQU8sR0FBRztZQUNaLE9BQU8sRUFBRSxDQUFDO1lBQ1YsT0FBTyxFQUFFLElBQUksQ0FBQyxHQUFHLEVBQUU7U0FDdEIsQ0FBQztRQUVGLElBQU0sSUFBSSxHQUFHLElBQUksTUFBTSxDQUFFLElBQUksQ0FBQyxTQUFTLENBQUU7WUFDckMsUUFBUSxFQUFLLEtBQUssQ0FBQyxLQUFLLEVBQUU7WUFDMUIsVUFBVSxFQUFHLEtBQUssQ0FBQyxZQUFZLEVBQUU7WUFDakMsUUFBUSxFQUFLLE9BQU8sQ0FBQyxPQUFPLEVBQUU7WUFDOUIsU0FBUyxFQUFJLE9BQU8sQ0FBQyxhQUFhLEVBQUU7WUFDcEMsV0FBVyxFQUFFLE9BQU8sQ0FBQyxTQUFTLEVBQUU7U0FDbkMsQ0FBRSxDQUFFLENBQUM7UUFFTiw4Q0FBOEM7UUFDOUMsSUFBTSxXQUFXLEdBQUcsRUFBRSxDQUFDO1FBRXZCLE9BQU8sT0FBTyxDQUFDLE9BQU8sQ0FDbEIsUUFBUSxFQUNSLFdBQVcsRUFDWCxJQUFJLEVBQ0osRUFBRSxPQUFPLEVBQUUsT0FBTyxFQUFFLENBQ3ZCLENBQUM7SUFDTixDQUFDO0lBQ0wsMkJBQUM7QUFBRCxDQUFDLEFBM0hELENBQTBDLDZCQUFhLEdBMkh0RDtBQTNIWSxvREFBb0IifQ==
const routing_key = '';
return channel.publish(
exchange,
routing_key,
data,
{ headers: headers }
);
},
} );

View File

@ -1,191 +0,0 @@
/**
* Publishes message to queue after rating
*
* Copyright (C) 2010-2019 R-T Specialty, LLC.
*
* This file is part of liza.
*
* liza is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import { RatingService } from "./RatingService";
import { UserRequest } from "../request/UserRequest";
import { RateResult } from "../rater/Rater";
import { ClientActions } from "../../client/action/ClientAction";
import { Program } from "../../program/Program";
import { ServerSideQuote } from "../quote/ServerSideQuote";
import { UserSession } from "../request/UserSession";
import {
connect as amqpConnect,
Options,
Channel,
} from 'amqplib';
export interface AmqpConfig extends Options.Connect {
/** The name of a queue or exchange to publish to */
exchange: string;
}
/**
* Publish message to a queue after rating
*
* This is an initial proof-of-concept implementation. In particular, we
* have the following considerations:
*
* - A fresh connection is made for each message until we can ensure that
* we can auto-reconnect on failure;
* - This trait is not yet tested;
* - It does not use an exchange;
* - It does a poor job checking for and reporting errors.
*
* The message consists of a `version' header that is set to 1. Future
* changes to the message format will bump this version. There is also a
* `created' header holding a Unix timestamp of the moment that the message
* was created.
*
* Version 1 of the body consists of four fields:
* - quote_id
* - agent_id
* - entity_id
* - entity_name
*
* See the body of `#_sendMessage' for their values.
*/
export class RatingServicePublish extends RatingService
{
/**
* AMQP configuration
*
* This should be the configuration expected by amqplib's #connect. It
* should additionally contain a `queueName' field.
*
* @type {Object}
*/
private _conf: AmqpConfig;
/**
* Initialize trait
*
* @param {amqplib} AMQP library
* @param {Object} conf AMQP configuration
* @param {DebugLog} logger logger instance
*/
__mixin(
// constructor(
// private readonly _amqp: Connection,
_conf: AmqpConfig,
) {
// super();
this._conf = _conf;
}
/**
* Publish quote message to exchange post-rating
*
* @param {UserRequest} request user request
* @param {Object} data rating data returned
* @param {Array} actions actions to send to client
* @param {Program} program program used to perform rating
* @param {Quote} quote quote used for rating
*
* @return {undefined}
*/
protected postProcessRaterData(
request: UserRequest,
data: RateResult,
actions: ClientActions,
program: Program,
quote: ServerSideQuote,
): void
{
// check both as we transition from one to the other
const exchange = this._conf.exchange;
amqpConnect( this._conf )
.then( conn =>
{
setTimeout( () => conn.close(), 10000 );
return conn.createChannel();
} )
.then( ch => {
ch.assertExchange( exchange, 'fanout', { durable: true } );
return this._sendMessage(
ch,
exchange,
request.getSession(),
quote
);
} )
.then( () => this._logger.log(
this._logger.PRIORITY_INFO,
"Published quote " + quote.getId() +
" to post-rate exchange '" + exchange + "'"
) )
.catch( e => this._logger.log(
this._logger.PRIORITY_ERROR,
"Post-rate exchange publish failure for quote " +
quote.getId() + ": " + e.message
) );
super.postProcessRaterData( request, data, actions, program, quote );
}
/**
* Send message to exchange
*
* @param {Channel} channel AMQP channel
* @param {string} exchange exchange name
* @param {UserSession} session user session
* @param {Quote} quote rated quote
*
* @return whether publish was successful
*/
_sendMessage(
channel: Channel,
exchange: string,
session: UserSession,
quote: ServerSideQuote
): boolean
{
const headers = {
version: 1,
created: Date.now(),
};
const data = new Buffer( JSON.stringify( {
quote_id: quote.getId(),
program_id: quote.getProgramId(),
agent_id: session.agentId(),
entity_id: session.agentEntityId(),
entity_name: session.agentName(),
} ) );
// we don't use a routing key; fanout exchange
const routing_key = '';
return channel.publish(
exchange,
routing_key,
data,
{ headers: headers }
);
}
}

View File

@ -34,6 +34,8 @@ import { DocumentId } from "../../document/Document";
import { TokenId, TokenNamespace, TokenState } from "./Token";
import { UnknownTokenError } from "./UnknownTokenError";
import { context } from "../../error/ContextError";
import { MongoCollection } from "mongodb";
/**
@ -118,7 +120,7 @@ export class MongoTokenDao implements TokenDao
},
},
( err: Error|null, prev_data ) =>
( err: NullableError, prev_data ) =>
{
if ( err )
{
@ -250,7 +252,7 @@ export class MongoTokenDao implements TokenDao
this._collection.findOne(
{ id: +doc_id },
{ fields: fields },
( err: Error|null, data: TokenQueryResult ) =>
( err: NullableError, data: TokenQueryResult ) =>
{
if ( err || !data )
{

5
src/types/misc.d.ts vendored
View File

@ -65,4 +65,7 @@ type UnixTimestampMillis = NominalType<Milliseconds, 'UnixTimestampMillis'>;
* reduce the boilerplate of these function definitions, and to clearly
* document that this pattern is something that used to be done frequently.
*/
type NodeCallback<T, R = void> = ( e: Error | null, result: T | null ) => R;
type NodeCallback<T, R = void> = ( e: NullableError, result: T | null ) => R;
/** Nullable error */
type NullableError = Error | null;

View File

@ -23,13 +23,15 @@
* front.
*/
import { PositiveInteger } from "../numeric";
declare module "mongodb";
/**
* Node-style callback for queries
*/
type MongoCallback = ( err: Error|null, data: { [P: string]: any } ) => void;
type MongoCallback = ( err: NullableError, data: { [P: string]: any } ) => void;
/**
@ -52,10 +54,24 @@ interface MongoQueryUpdateOptions
*/
interface MongoFindOneOptions
{
/** Fields to select */
fields?: MongoFieldSelector,
}
/**
* Options for `find` queries
*
* This is not at all comprehensive; it covers only the fields we actually
* make use of.
*/
interface MongoFindOptions
{
/** Limit results returned */
limit?: PositiveInteger,
}
/**
* Options for `findAndModify` queries
*
@ -76,21 +92,26 @@ interface MongoFindAndModifyOptions
/** Mongo query selector */
type MongoSelector = { [P: string]: any };
export type MongoSelector = { [P: string]: any };
/** Field selector */
type MongoFieldSelector = { [P: string]: number };
/** Mongo index specification */
type MongoIndexSpecification = Array< Array < string | number >>;
/** Mongo update clause */
type MongoUpdate = MongoSelector;
export type MongoUpdate = MongoSelector;
/** Mongo object */
type MongoObject = { [P: string]: any };
/** Mongo update clause */
type MongoInsertSpecification = MongoObject | MongoObject[];
/** Sorting clause **/
type MongoSortClause = Array<string | [ string, MongoSortDirection ]>;
/** Sort direction */
type MongoSortDirection = -1 | 1 | 'ascending' | 'descending' | 'asc' | 'desc';
@ -126,6 +147,23 @@ declare interface MongoCollection
): void;
/**
* Execute a query and return the first result
*
* Unlike `update`, the callback return value is not propagated, and so
* the callback ought not return anything.
*
* @param selector document query
* @param fields fields to return
* @param callback continuation on completion
*/
find(
selector: MongoSelector,
fields: MongoFindOptions,
callback: MongoCallback
): void;
/**
* Execute a query and return the first result
*
@ -158,4 +196,23 @@ declare interface MongoCollection
options: MongoFindAndModifyOptions,
callback: MongoCallback,
): void;
/**
* Creates an index on the collection
*/
createIndex(
fieldOrSpec: MongoIndexSpecification,
options: boolean,
callback: MongoCallback,
): void;
/**
* Creates an index on the collection
*/
insert(
docs: MongoInsertSpecification,
callback: MongoCallback,
): void;
}

View File

@ -89,96 +89,6 @@ describe( 'Delta', () =>
dest_data: { foo: [ '' ] },
expected: {},
},
{
label: "",
src_data: { foo: [ 'bar', 'baz' ] },
dest_data: { foo: [ 'change', 'baz' ] },
data: { foo: [ 'change', 'baz' ] },
diff: { foo: [ 'change' ] },
},
{
label: "",
src_data: { foo: [ 'bar', 'baz' ] },
dest_data: { foo: [ 'bar', 'change' ] },
data: { foo: [ 'bar', 'change' ] },
diff: { foo: [ , 'change' ] },
},
{
label: "",
src_data: { foo: [ 'bar', 'baz' ] },
dest_data: { foo: [ undefined, 'change' ] },
data: { foo: [ 'bar', 'change' ] },
diff: { foo: [ , 'change' ] },
},
{
label: "",
src_data: { foo: [ 'bar', 'baz' ] },
dest_data: { foo: [ undefined, 'baz' ] },
data: { foo: [ 'bar', 'baz' ] },
diff: {},
},
{
label: "",
src_data: { foo: [ 'bar', 'baz' ] },
dest_data: { foo: [ 'bar', undefined ] },
data: { foo: [ 'bar', 'baz' ] },
diff: {},
},
{
label: "",
src_data: { foo: [ 'bar', 'baz' ] },
dest_data: { foo: [ 'bar', null ] },
data: { foo: [ 'bar' ] },
diff: { foo: [ , null ] },
},
{
label: "",
src_data: { foo: [ 'bar', 'baz' ] },
dest_data: { foo: [ 'bar', 'baz', null ] },
data: { foo: [ 'bar', 'baz' ] },
diff: {},
},
{
label: "",
src_data: { foo: [ 'bar', 'baz' ] },
dest_data: { foo: [ 'bar', 'baz', 'quux' ] },
data: { foo: [ 'bar', 'baz', 'quux' ] },
diff: { foo: [ , , 'quux' ]},
},
{
label: "",
src_data: { foo: [ 'bar', 'baz' ] },
dest_data: { foo: [] },
data: { foo: [ 'bar', 'baz' ] },
diff: {},
},
// null not at end of set means unchanged
{
label: "",
src_data: { foo: [ 'bar', 'baz', 'quux' ] },
dest_data: { foo: [ null, null, 'quux' ] },
data: { foo: [ 'bar', 'baz', 'quux' ] },
diff: {},
},
// but the last one is
{
label: "",
src_data: { foo: [ 'bar', 'baz', 'quux' ] },
dest_data: { foo: [ null, 'baz', null ] },
data: { foo: [ 'bar', 'baz' ] },
diff: { foo: [ , , null ] },
},
// given a string of nulls, only the last one is terminating; the
// rest are interpreted as undefined (because JSON serializes
// undefined values to `null' -_-)
{
label: "",
src_data: { foo: [ 'bar', 'baz', 'quux' ] },
dest_data: { foo: [ null, null, null ] },
data: { foo: [ 'bar', 'baz' ] },
diff: { foo: [ , , null ] },
},
] ).forEach( ( { label, src_data, dest_data, expected } ) =>
{
it( label, () =>

View File

@ -40,7 +40,7 @@ describe( 'TokenedDataApi', () =>
const expected_ns = 'foo_ns';
( <[string, boolean, ( e: Error|null ) => void][]>[
( <[string, boolean, ( e: NullableError ) => void][]>[
[
"creates token and returns data if last_created",
true,

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,227 @@
/**
* Tests MongoServerDao
*
* Copyright (C) 2010-2019 R-T Specialty, LLC.
*
* This file is part of the Liza Data Collection Framework.
*
* liza is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
'use strict';
import { MongoServerDao as Sut } from "../../../src/server/db/MongoServerDao";
import { MongoSelector, MongoUpdate } from "mongodb";
import { expect, use as chai_use } from 'chai';
import { ServerSideQuote } from "../../../src/server/quote/ServerSideQuote";
import { PositiveInteger } from "../../../src/numeric";
import { Program } from "../../../src/program/Program";
import { RateResult } from "../../../src/server/rater/Rater";
import { QuoteDataBucket } from "../../../src/bucket/QuoteDataBucket";
import { QuoteId } from "../../../src/quote/Quote";
chai_use( require( 'chai-as-promised' ) );
describe( 'MongoServerDao', () =>
{
describe( '#saveQuote', () =>
{
describe( "with no save data or push data", () =>
{
it( "saves entire metabucket record individually", done =>
{
const metadata = {
foo: [ 'bar', 'baz' ],
bar: [ { quux: 'quuux' } ],
};
const quote = createStubQuote( metadata );
const sut = new Sut( createMockDb(
// update
( _selector: MongoSelector, data: MongoUpdate ) =>
{
expect( data.$set[ 'meta.foo' ] )
.to.deep.equal( metadata.foo );
expect( data.$set[ 'meta.bar' ] )
.to.deep.equal( metadata.bar );
expect( data.$push ).to.equal( undefined );
done();
}
) );
sut.init( () =>
sut.saveQuote( quote, () => {}, () => {} )
);
} );
} );
describe( "with push data", () =>
{
it( "adds push data to the collection", done =>
{
const push_data = {
foo: [ 'bar', 'baz' ],
bar: [ { quux: 'quuux' } ],
};
const quote = createStubQuote( {} );
const sut = new Sut( createMockDb(
// update
(_selector: MongoSelector, data: MongoUpdate ) =>
{
expect( data.$push[ 'foo' ] )
.to.deep.equal( push_data.foo );
expect( data.$push[ 'bar' ] )
.to.deep.equal( push_data.bar );
done();
}
) );
sut.init( () =>
sut.saveQuote(
quote,
() => {},
() => {},
undefined,
push_data
)
);
} );
it( "skips push data when it is an empty object", done =>
{
const push_data = {};
const quote = createStubQuote( {} );
const sut = new Sut( createMockDb(
// update
( _selector: MongoSelector, data: MongoUpdate ) =>
{
expect( data.$push ).to.equal( undefined );
done();
}
) );
sut.init( () =>
sut.saveQuote(
quote,
() => {},
() => {},
undefined,
push_data
)
);
} );
} );
} );
} );
function createMockDb( on_update: any )
{
const collection_quotes = {
update: on_update,
createIndex: ( _: any, __: any, c: any ) => c(),
};
const collection_seq = {
find( _: any, __: any, c: any )
{
c( null, {
toArray: ( c: any ) => c( null, { length: 5 } ),
} );
},
};
const db = {
collection( id: any, c: any )
{
const coll = ( id === 'quotes' )
? collection_quotes
: collection_seq;
c( null, coll );
},
};
const driver = {
open: ( c: any ) => c( null, db ),
on: () => {},
};
return driver;
}
function createStubQuote( metadata: Record<string, any> )
{
const program = <Program>{
getId: () => '1',
ineligibleLockCount: 0,
apis: {},
internal: {},
meta: {
arefs: {},
fields: {},
groups: {},
qdata: {},
qtypes: {},
},
mapis: {},
initQuote: () => {},
};
const quote = <ServerSideQuote>{
getBucket: () => <QuoteDataBucket>( {
getData: () => {},
} ),
getMetabucket: () => <QuoteDataBucket>( {
getData: () => metadata,
} ),
getId: () => <QuoteId>123,
getProgramVersion: () => 'Foo',
getLastPremiumDate: () => <UnixTimestamp>0,
getRatedDate: () => <UnixTimestamp>0,
getExplicitLockReason: () => "",
getExplicitLockStep: () => <PositiveInteger>1,
isImported: () => false,
isBound: () => false,
getTopVisitedStepId: () => <PositiveInteger>1,
getTopSavedStepId: () => <PositiveInteger>1,
setRatedDate: () => quote,
setRateBucket: () => quote,
setRatingData: () => quote,
getRatingData: () => <RateResult>{ _unavailable_all: '0' },
getProgram: () => program,
setExplicitLock: () => quote,
getProgramId: () => 'Foo',
getCurrentStepId: () => 0,
setLastPremiumDate: () => quote,
};
return quote;
}

View File

@ -713,7 +713,44 @@ function createStubQuote()
getBucket()
{
return new QuoteDataBucket();
}
},
getMetabucket(){
return new QuoteDataBucket();
},
getProgramVersion(){
return 'Foo';
},
getExplicitLockReason(){
return 'Reason';
},
getExplicitLockStep()
{
return <PositiveInteger>1;
},
isImported()
{
return true;
},
isBound()
{
return true;
},
getTopVisitedStepId()
{
return <PositiveInteger>1;
},
getTopSavedStepId()
{
return <PositiveInteger>1;
},
};
}

View File

@ -36,6 +36,7 @@ import { UserRequest } from "../../../src/server/request/UserRequest";
import { UserResponse } from "../../../src/server/request/UserResponse";
import { UserSession } from "../../../src/server/request/UserSession";
import { QuoteDataBucket } from "../../../src/bucket/QuoteDataBucket";
import { PositiveInteger } from "../../../src/numeric";
import { Kv } from "../../../src/bucket/delta";
import {
@ -586,6 +587,14 @@ function getStubs()
setRatingData: () => quote,
getRatingData: () => stub_rate_data,
getBucket: () => new QuoteDataBucket(),
getMetabucket: () => new QuoteDataBucket(),
getProgramVersion: () => 'Foo',
getExplicitLockReason: () => 'Reason',
getExplicitLockStep: () => <PositiveInteger>1,
isImported: () => true,
isBound: () => true,
getTopVisitedStepId: () => <PositiveInteger>1,
getTopSavedStepId: () => <PositiveInteger>1,
};
return {

View File

@ -26,7 +26,7 @@ import {
} from "../../../src/server/token/TokenDao";
import { MongoTokenDao as Sut } from "../../../src/server/token/MongoTokenDao";
import { MongoCollection } from "mongodb";
import {
TokenId,
TokenNamespace,
@ -248,6 +248,9 @@ describe( 'server.token.TokenDao', () =>
update() {},
findOne() {},
find() {},
createIndex() {},
insert() {},
};
return expect(
@ -269,6 +272,9 @@ describe( 'server.token.TokenDao', () =>
update() {},
findOne() {},
find() {},
createIndex() {},
insert() {},
};
return expect(
@ -477,6 +483,9 @@ describe( 'server.token.TokenDao', () =>
update() {},
findAndModify() {},
find() {},
createIndex() {},
insert() {},
};
const result = new Sut( coll, field, () => <UnixTimestamp>0 )
@ -520,6 +529,9 @@ describe( 'server.token.TokenDao', () =>
update() {},
findAndModify() {},
find() {},
createIndex() {},
insert() {},
};
return expect(