1
0
Fork 0

TokenDao: Return previous state

This causes the DAO to return the state of the document prior to the
database operation (in the case of a retrieval, the previous state is the
same as the current state).  This will allow determining whether other
tokens were modified since a previous request.

* src/server/token/MongoTokenDao.ts: Stop using TokenType in favor of new
    TokenState.
  (updateToken): Query for and return previous state.  Use findAndModify
    instead of update.
  (getToken): Return previous state.  Minor changes to account for new
    TokenQueryResult types (null=>undefined).
* src/server/token/Token.ts: Add comments for existing Token{Id,Namespace}
    nominal types.
  (TokenState): New string enum.
* src/server/token/TokenDao.ts: Import new TokenState.
  (TokenDao)[updateToken]: Use it.
  (TokenType): Remove.
  (TokenQueryResult, TokenNamespaceResults, TokenNamespaceData):
    null=>undefined for unavailable value.  null was wrong.
  (TokenStatus): Token{Type=>State}.
  (TokenData)[prev_state, prev_status]: New fields.
* test/server/token/MongoTokenDaoTest.ts: Update tests accordingly.
master
Mike Gerwitz 2019-09-23 14:11:26 -04:00
parent ddf5892c83
commit 9997da3f65
4 changed files with 368 additions and 75 deletions

View File

@ -27,11 +27,10 @@ import {
TokenNamespaceResults,
TokenQueryResult,
TokenStatus,
TokenType,
} from "./TokenDao";
import { DocumentId } from "../../document/Document";
import { TokenId, TokenNamespace } from "./Token";
import { TokenId, TokenNamespace, TokenState } from "./Token";
import { UnknownTokenError } from "./UnknownTokenError";
import { context } from "../../error/ContextError";
@ -98,7 +97,7 @@ export class MongoTokenDao implements TokenDao
doc_id: DocumentId,
ns: TokenNamespace,
token_id: TokenId,
type: TokenType,
type: TokenState,
data: string | null,
): Promise<TokenData>
{
@ -122,15 +121,24 @@ export class MongoTokenDao implements TokenDao
return new Promise( ( resolve, reject ) =>
{
this._collection.update(
this._collection.findAndModify(
{ id: +doc_id },
[],
{
$set: token_data,
$push: token_log
},
{ upsert: true },
{
upsert: true,
new: false,
fields: {
[ root + 'last' ]: 1,
[ root + 'lastStatus' ]: 1,
[ root + token_id + '.status' ]: 1,
},
},
function ( err: Error|null )
( err: Error|null, prev_data ) =>
{
if ( err )
{
@ -138,9 +146,16 @@ export class MongoTokenDao implements TokenDao
return;
}
const prev_result = <TokenNamespaceResults>
prev_data[ this._rootField ] || {};
const prev_ns = prev_result[ ns ];
resolve( {
id: token_id,
status: token_entry,
id: token_id,
status: token_entry,
prev_status: this._getPrevStatus( prev_ns, token_id ),
prev_last: this._getPrevLast( prev_ns ),
} );
}
);
@ -148,6 +163,57 @@ export class MongoTokenDao implements TokenDao
}
/**
* Determine previous token status, or produce `null`
*
* @param prev_ns previous namespace data
* @param token_id token identifier
*
* @return previous token status
*/
private _getPrevStatus(
prev_ns: TokenNamespaceData | undefined,
token_id: TokenId
): TokenStatus | null
{
if ( prev_ns === undefined )
{
return null;
}
const entry = <TokenEntry>( prev_ns[ token_id ] );
return ( entry === undefined )
? null
: entry.status;
}
/**
* Determine previous last updated token for namespace, otherwise `null`
*
* @param prev_ns previous namespace data
*
* @return previous last token data
*/
private _getPrevLast(
prev_ns: TokenNamespaceData | undefined
): TokenData | null
{
if ( prev_ns === undefined || ( prev_ns || {} ).last === undefined )
{
return null;
}
return {
id: prev_ns.last,
status: prev_ns.lastStatus,
prev_status: null,
prev_last: null,
};
}
/**
* Retrieve existing token under the namespace NS, if any, for the doc
* identified by DOC_ID
@ -192,7 +258,9 @@ export class MongoTokenDao implements TokenDao
const field = <TokenNamespaceResults>data[ this._rootField ]
|| {};
if ( !field[ ns ] )
const ns_data = field[ ns ];
if ( !ns_data )
{
reject( context(
new UnknownTokenError(
@ -203,11 +271,10 @@ export class MongoTokenDao implements TokenDao
ns: ns,
}
) );
return;
}
const ns_data = <TokenNamespaceData>field[ ns ];
resolve( ( token_id )
? this._getRequestedToken( doc_id, ns, token_id, ns_data )
: this._getLatestToken( doc_id, ns, ns_data )
@ -252,8 +319,10 @@ export class MongoTokenDao implements TokenDao
}
return {
id: last,
status: ns_data.lastStatus,
id: last,
status: ns_data.lastStatus,
prev_status: ns_data.lastStatus,
prev_last: this._getPrevLast( ns_data ),
};
}
@ -295,8 +364,10 @@ export class MongoTokenDao implements TokenDao
}
return {
id: token_id,
status: reqtok.status,
id: token_id,
status: reqtok.status,
prev_status: reqtok.status,
prev_last: this._getPrevLast( ns_data ),
};
}

View File

@ -20,7 +20,31 @@
*/
/** Identifier unique to token namespace */
export type TokenId = NominalType<string, 'TokenId'>;
/** Token namespace for identifiers */
export type TokenNamespace = NominalType<string, 'TokenNamespace'>;
/**
* Token states
*
* States are listed as strings for ease of {de,}serialization for storage.
*
* - `ACTIVE` - an outstanding token that has not yet been processed.
* - `DONE` - a token has finished processing and result data may be
* available.
* - `ACCEPTED` - a `DONE` token has been acknowledged by the requester.
* - `DEAD` - a token has been killed and should no longer be used.
*
* For valid state transitions, see `TokenTransition`.
*/
export enum TokenState {
ACTIVE = "ACTIVE",
DONE = "DONE",
ACCEPTED = "ACCEPTED",
DEAD = "DEAD",
};

View File

@ -27,7 +27,7 @@
* compatibility with the existing data.
*/
import { TokenId, TokenNamespace } from "./Token";
import { TokenId, TokenNamespace, TokenState } from "./Token";
import { DocumentId } from "../../document/Document";
@ -38,7 +38,7 @@ export interface TokenDao
doc_id: DocumentId,
ns: TokenNamespace,
token_id: TokenId,
type: TokenType,
type: TokenState,
data: string | null,
): Promise<TokenData>;
@ -51,12 +51,6 @@ export interface TokenDao
}
/**
* Token status types as stored in the database
*/
export type TokenType = 'ACTIVE' | 'DONE' | 'ACCEPTED' | 'DEAD';
/**
* Result of a Mongo query
*
@ -64,7 +58,7 @@ export type TokenType = 'ACTIVE' | 'DONE' | 'ACCEPTED' | 'DEAD';
*/
export interface TokenQueryResult
{
readonly [propName: string]: TokenNamespaceResults | null,
readonly [propName: string]: TokenNamespaceResults | undefined,
}
@ -73,7 +67,7 @@ export interface TokenQueryResult
*/
export interface TokenNamespaceResults
{
readonly [propName: string]: TokenNamespaceData | null,
readonly [propName: string]: TokenNamespaceData | undefined,
}
@ -104,7 +98,7 @@ export interface TokenNamespaceData
* accommodate the above fields. Anything using this should cast to
* `TokenEntry`.
*/
readonly [propName: string]: TokenEntry | TokenStatus | TokenId | null,
readonly [propName: string]: TokenEntry | TokenStatus | TokenId | undefined,
}
@ -132,16 +126,13 @@ export interface TokenEntry
/**
* Status of the token (past or present)
*
* A status is a `TokenType`, along with a timestamp of occurrence and
* optional data.
*/
export interface TokenStatus
{
/**
* State of the token
*/
readonly type: TokenType,
readonly type: TokenState,
/**
* Unix timestamp representing when the status change occurred
@ -160,10 +151,38 @@ export interface TokenStatus
/**
* Token information
* Token information returned from database queries
*
* This attempts to provide raw data without making assumptions as to how it
* may be used. For example, rather than returning whether the token was
* the last modified, it returns the last token before the database
* operation took place (`prev_last`). Note that this interface is
* recursively defined, but will only be a maximum of two levels deep (there
* will be no `prev_last.prev_last !== null`).
*/
export interface TokenData
{
id: TokenId,
/** Token identifier */
id: TokenId,
/** Status of token after the database operation */
status: TokenStatus,
/**
* Status of token before the database operation
*
* If the operation is to retrieve a token (rather than to update it),
* then this status will be identical to `status`.
*/
prev_status: TokenStatus | null,
/**
* Token data of the last updated token for this document id and
* namespace before the last database operation
*
* This is derived from the value of `TokenNamespaceData.last` and
* `TokenNamespaceData.lastStatus` prior to the most recent operation
* (e.g. Mongo's `findAndModify` with `new` set to `false`).
*/
prev_last: TokenData | null,
}

View File

@ -30,6 +30,7 @@ import { MongoTokenDao as Sut } from "../../../src/server/token/MongoTokenDao";
import {
TokenId,
TokenNamespace,
TokenState,
} from "../../../src/server/token/Token";
import { DocumentId } from "../../../src/document/Document";
@ -45,20 +46,159 @@ describe( 'server.token.TokenDao', () =>
{
describe( '#updateToken', () =>
{
it( 'updates token with given data', () =>
const field = 'foo_field';
const did = <DocumentId>12345;
const ns = <TokenNamespace>'namespace';
const tok_id = <TokenId>'tok123';
const tok_type = TokenState.DONE;
const data = "some data";
const timestamp = <UnixTimestamp>12345;
const root = field + '.' + ns;
const last_tok_id = <TokenId>'last-tok';
const last: TokenStatus = {
type: TokenState.DEAD,
timestamp: <UnixTimestamp>4567,
data: "last token",
};
const prev: TokenStatus = {
type: TokenState.ACTIVE,
timestamp: <UnixTimestamp>11111,
data: "prev status",
};
( <{ label: string, given: TokenQueryResult, expected: TokenData }[]>[
{
label: "updates token and returns previous data",
given: {
[field]: {
[ns]: {
last: last_tok_id,
lastStatus: {
type: last.type,
timestamp: last.timestamp,
data: last.data,
},
[tok_id]: {
status: {
type: prev.type,
timestamp: prev.timestamp,
data: prev.data,
},
},
},
},
},
expected: {
id: tok_id,
status: {
type: tok_type,
timestamp: timestamp,
data: data,
},
prev_status: prev,
prev_last: {
id: last_tok_id,
status: last,
prev_status: null,
prev_last: null,
},
},
},
{
label: "returns null for prev status if missing data",
given: {
[field]: {
[ns]: {
last: last_tok_id,
lastStatus: {
type: last.type,
timestamp: last.timestamp,
data: last.data,
},
},
},
},
expected: {
id: tok_id,
status: {
type: tok_type,
timestamp: timestamp,
data: data,
},
prev_status: null,
prev_last: {
id: last_tok_id,
status: last,
prev_status: null,
prev_last: null,
},
},
},
{
label: "returns null for missing namespace data",
given: {
[field]: {
[ns]: {},
},
},
expected: {
id: tok_id,
status: {
type: tok_type,
timestamp: timestamp,
data: data,
},
prev_status: null,
prev_last: null,
},
},
{
label: "returns null for missing namespace",
given: {
[field]: {},
},
expected: {
id: tok_id,
status: {
type: tok_type,
timestamp: timestamp,
data: data,
},
prev_status: null,
prev_last: null,
},
},
{
label: "returns null for missing root field",
given: {},
expected: {
id: tok_id,
status: {
type: tok_type,
timestamp: timestamp,
data: data,
},
prev_status: null,
prev_last: null,
},
},
] ).forEach( ( { given, expected, label } ) => it( label, () =>
{
const field = 'foo_field';
const did = <DocumentId>12345;
const ns = <TokenNamespace>'namespace';
const tok_id = <TokenId>'tok123';
const tok_type = 'DONE';
const data = "some data";
const timestamp = <UnixTimestamp>12345;
const root = field + '.' + ns;
const coll: MongoCollection = {
update( selector: any, given_data: any, options, callback )
findAndModify( selector, _sort, given_data, options, callback )
{
const expected_entry: TokenStatus = {
type: tok_type,
@ -70,36 +210,37 @@ describe( 'server.token.TokenDao', () =>
expect( given_data ).to.deep.equal( {
$set: {
[`${root}.last`]: tok_id,
[`${root}.lastStatus`]: expected_entry,
[`${root}.${tok_id}.status`]: expected_entry,
[ `${root}.last` ]: tok_id,
[ `${root}.lastStatus` ]: expected_entry,
[ `${root}.${tok_id}.status` ]: expected_entry,
},
$push: {
[`${root}.${tok_id}.statusLog`]: expected_entry,
[ `${root}.${tok_id}.statusLog` ]: expected_entry,
},
} );
expect( ( <MongoQueryUpdateOptions>options ).upsert )
.to.be.true;
expect( options ).to.deep.equal( {
upsert: true,
new: false,
fields: {
[ `${root}.last` ]: 1,
[ `${root}.lastStatus` ]: 1,
[ `${root}.${tok_id}.status` ]: 1,
},
} );
callback( null, {} );
callback( null, given );
},
update() {},
findOne() {},
};
return expect(
new Sut( coll, field, () => timestamp )
.updateToken( did, ns, tok_id, tok_type, data )
).to.eventually.deep.equal( {
id: tok_id,
status: {
type: tok_type,
timestamp: timestamp,
data: data,
},
} );
} );
).to.eventually.deep.equal( expected );
} ) );
it( 'proxies error to callback', () =>
@ -107,11 +248,12 @@ describe( 'server.token.TokenDao', () =>
const expected_error = Error( "expected error" );
const coll: MongoCollection = {
update( _selector, _data, _options, callback )
findAndModify( _selector, _sort, _update, _options, callback )
{
callback( expected_error, {} );
},
update() {},
findOne() {},
};
@ -120,7 +262,7 @@ describe( 'server.token.TokenDao', () =>
<DocumentId>0,
<TokenNamespace>'ns',
<TokenId>'id',
'DONE',
TokenState.DONE,
null
)
).to.eventually.be.rejectedWith( expected_error );
@ -135,11 +277,19 @@ describe( 'server.token.TokenDao', () =>
const ns = <TokenNamespace>'get_ns';
const expected_status: TokenStatus = {
type: 'ACTIVE',
type: TokenState.ACTIVE,
timestamp: <UnixTimestamp>0,
data: "",
};
const last_tok_id = <TokenId>'last-tok';
const last: TokenStatus = {
type: TokenState.DEAD,
timestamp: <UnixTimestamp>4567,
data: "last token",
};
( <[string, TokenId, TokenQueryResult, TokenData|null, any, any][]>[
[
'retrieves token by id',
@ -147,8 +297,8 @@ describe( 'server.token.TokenDao', () =>
{
[field]: {
[ns]: {
last: <TokenId>'tok123',
lastStatus: expected_status,
last: last_tok_id,
lastStatus: last,
tok123: {
status: expected_status,
@ -158,8 +308,15 @@ describe( 'server.token.TokenDao', () =>
},
},
{
id: <TokenId>'tok123',
status: expected_status,
id: <TokenId>'tok123',
status: expected_status,
prev_status: expected_status,
prev_last: {
id: last_tok_id,
status: last,
prev_status: null,
prev_last: null,
}
},
null,
null,
@ -171,8 +328,8 @@ describe( 'server.token.TokenDao', () =>
{
[field]: {
[ns]: {
last: <TokenId>'something',
lastStatus: expected_status,
last: last_tok_id,
lastStatus: last,
// just to make sure we don't grab another tok
othertok: {
@ -211,10 +368,10 @@ describe( 'server.token.TokenDao', () =>
{
[field]: {
[ns]: {
last: <TokenId>'toklast',
lastStatus: expected_status,
last: last_tok_id,
lastStatus: last,
toklast: {
[ last_tok_id ]: {
status: expected_status,
statusLog: [ expected_status ],
},
@ -222,8 +379,15 @@ describe( 'server.token.TokenDao', () =>
},
},
{
id: <TokenId>'toklast',
status: expected_status,
id: last_tok_id,
status: last,
prev_status: last,
prev_last: {
id: last_tok_id,
status: last,
prev_status: null,
prev_last: null,
}
},
null,
null,
@ -262,12 +426,26 @@ describe( 'server.token.TokenDao', () =>
it( label, () =>
{
const coll: MongoCollection = {
findOne( _selector, _fields, callback )
findOne( selector, { fields }, callback )
{
const expected_fields = {
[ `${field}.${ns}.last` ]: 1,
[ `${field}.${ns}.lastStatus` ]: 1,
};
if ( tok_id )
{
expected_fields[ `${field}.${ns}.${tok_id}` ] = 1;
}
expect( fields ).to.deep.equal( expected_fields );
expect( selector ).to.deep.equal( { id: did } );
callback( null, dbresult );
},
update() {},
findAndModify() {},
};
const result = new Sut( coll, field, () => <UnixTimestamp>0 )
@ -310,6 +488,7 @@ describe( 'server.token.TokenDao', () =>
},
update() {},
findAndModify() {},
};
return expect(