1
0
Fork 0

TokenDao, TokenStore: Track most recently created tokens

This is much more useful information than the last modified.  For example:

- Token A is created.  It becomes the last modified.
- Token B is created.  It becomes the last modified.
- Token A completes.  Mismatch.  It becomes the last modified.
- Token B completes.  Mismatch.  It becomes the last modified.

So in this case, we're unable to use the flag to determine whether we should
ignore the token.  But if we instead us the new flag to see what token was
last _created_, the problem is solved.

This should have been obvious the first time around.

* src/server/token/MongoTokenDao.ts (updateToken): Query
    `lastState'.  Return its value.  Update its value.
  (getToken): Query lastState.  Return its value.
* src/server/token/Token.ts (Token)[last_state]: New field.
* src/server/token/TokenDao.ts (TokenQueryResult, TokenNamespaceResults):
    Use type instead of interface.
  (TokenStateHistory): New type.
  (TokenNamespaceData)[lastState]: New optional field.
  (TokenData)[prev_state]: New field.
* src/server/token/TokenStore.ts: Return previous state data for each
    method.
* test/server/token/MongoTokenDaoTest.ts: Add last_state.
* test/server/token/TokenStoreTest.ts: Likewise.
master
Mike Gerwitz 2019-10-01 11:39:56 -04:00
parent 0a9a5fe56e
commit c8589a1c57
6 changed files with 184 additions and 32 deletions

View File

@ -26,6 +26,7 @@ import {
TokenNamespaceData,
TokenNamespaceResults,
TokenQueryResult,
TokenStateHistory,
TokenStatus,
} from "./TokenDao";
@ -111,6 +112,7 @@ export class MongoTokenDao implements TokenDao
const token_data = {
[ root + 'last' ]: token_id,
[ root + 'lastState.' + type ]: token_id,
[ root + 'lastStatus' ]: token_entry,
[ root + token_id + '.status' ]: token_entry,
};
@ -133,6 +135,7 @@ export class MongoTokenDao implements TokenDao
new: false,
fields: {
[ root + 'last' ]: 1,
[ root + 'lastState' ]: 1,
[ root + 'lastStatus' ]: 1,
[ root + token_id + '.status' ]: 1,
},
@ -156,6 +159,7 @@ export class MongoTokenDao implements TokenDao
status: token_entry,
prev_status: this._getPrevStatus( prev_ns, token_id ),
prev_last: this._getPrevLast( prev_ns ),
prev_state: this._getPrevState( prev_ns ),
} );
}
);
@ -210,10 +214,31 @@ export class MongoTokenDao implements TokenDao
status: prev_ns.lastStatus,
prev_status: null,
prev_last: null,
prev_state: {},
};
}
/**
* Retrieve previous token states
*
* If token state information is missing, an empty object will be
* returned.
*
* @param prev_ns previous namespace data
*
* @return previous token states
*/
private _getPrevState(
prev_ns: TokenNamespaceData | undefined
): TokenStateHistory
{
return ( !prev_ns || prev_ns.lastState === undefined )
? {}
: prev_ns.lastState;
}
/**
* Retrieve existing token under the namespace NS, if any, for the doc
* identified by DOC_ID
@ -233,7 +258,8 @@ export class MongoTokenDao implements TokenDao
const root = this._genRoot( ns ) + '.';
const fields: any = {};
fields[ root + 'last' ] = 1;
fields[ root + 'last' ] = 1;
fields[ root + 'lastState' ] = 1;
fields[ root + 'lastStatus' ] = 1;
if ( token_id )
@ -323,6 +349,7 @@ export class MongoTokenDao implements TokenDao
status: ns_data.lastStatus,
prev_status: ns_data.lastStatus,
prev_last: this._getPrevLast( ns_data ),
prev_state: this._getPrevState( ns_data ),
};
}
@ -368,6 +395,7 @@ export class MongoTokenDao implements TokenDao
status: reqtok.status,
prev_status: reqtok.status,
prev_last: this._getPrevLast( ns_data ),
prev_state: this._getPrevState( ns_data ),
};
}

View File

@ -103,5 +103,13 @@ export interface Token<T extends TokenState>
* that time.
*/
readonly last_mismatch: boolean;
/**
* Whether this was the most recently created token
*
* This is true iff the last token to have been in the `ACTIVE` status
* is shares the same token id.
*/
readonly last_created: boolean;
}

View File

@ -56,19 +56,15 @@ export interface TokenDao
*
* The returned property depends on the actual query.
*/
export interface TokenQueryResult
{
readonly [propName: string]: TokenNamespaceResults | undefined,
}
export type TokenQueryResult = { readonly [P: string]: TokenNamespaceResults | undefined };
/**
* Token data for requested namespaces
*/
export interface TokenNamespaceResults
{
readonly [propName: string]: TokenNamespaceData | undefined,
}
/** Token data for requested namespaces */
export type TokenNamespaceResults = { readonly [P: string]: TokenNamespaceData | undefined };
/** Last token touching various states */
export type TokenStateHistory = { readonly [P in TokenState]?: TokenId };
/**
@ -84,6 +80,16 @@ export interface TokenNamespaceData
*/
readonly last: TokenId,
/**
* Last token id to have touched each state
*
* A field representing the state will only exist if there is a token
* that last touched it.
*
* This value may not exist on older documents.
*/
readonly lastState?: TokenStateHistory,
/**
* Most recent token status
*
@ -98,7 +104,8 @@ export interface TokenNamespaceData
* accommodate the above fields. Anything using this should cast to
* `TokenEntry`.
*/
readonly [propName: string]: TokenEntry | TokenStatus | TokenId | undefined,
readonly [P: string]:
TokenEntry | TokenStateHistory | TokenStatus | TokenId | undefined,
}
@ -185,4 +192,13 @@ export interface TokenData
* (e.g. Mongo's `findAndModify` with `new` set to `false`).
*/
prev_last: TokenData | null,
/**
* Last token id to have touched each state
*
* A field representing the state will only exist if there is a token
* that last touched it. If there are no previous states, the result
* will be an empty object.
*/
prev_state: { [P in TokenState]?: TokenId },
}

View File

@ -51,33 +51,38 @@ import { DocumentId } from "../../document/Document";
* The philosophy of this store is that any token within a given namespace
* can be updated at any time, but each namespace has a unique "last" token
* by document that represents the last token to have been updated within
* that context. When performing any operation on that namespace,
* information regarding that "last" token will be provided so that the
* caller can determine whether other tokens within that same context have
* been modified since a given token was last updated, which may indicate
* that a token has been superceded by another.
* that context. Also stored is a list of tokens associated with the most
* recent transition to each state. When performing any operation on that
* namespace, information regarding the last tokens will be provided so that
* the caller can determine whether other tokens within that same context
* have been modified since a given token was last updated, which may
* indicate that a token has been superceded by another.
*
* As an example, consider the following sequence of events within some
* namespace "location" for some document 1000:
*
* 1. A token `A` is created for a request to a service. `last` is updated
* to point to `A`.
* to point to `A`. The last `ACTIVE` token is `A`.
*
* 2. The user changes information about the location.
*
* 3. Another token `B` is created to request information for the new
* location data. `last` is updated to point to `B`.
* location data. `last` is updated to point to `B`. The last
* `ACTIVE` token is `B`.
*
* 4. The response for token `A` returns and `A` is updated.
* 4. The response for token `A` returns and `A` is updated. The last
* token in the `DONE` state is `A`.
*
* 5. The caller for token `A` sees that `last` no longer points to `A` (by
* observing `last_mistmatch`), and so ignores the reply, understanding
* that `A` is now stale.
* 5. The caller for token `A` sees that the has `ACTIVE` token no longer
* points to `A` (by observing `last_created`), and so ignores the
* reply, understanding that `A` is now stale.
*
* 6. The response for  `B` returns and `B` is updated.
* 6. The response for  `B` returns and `B` is updated. The last `DONE`
* token is now `B`.
*
* 7. The caller notices that `last_mistmatch` is _not_ set, and so
* proceeds to continue processing token `B`.
* 7. The caller notices that `last_created` is _not_ set, and so
* proceeds to continue processing token `B`. The last token in the
* `DONE` state is now `B`.
*
* For more information on tokens, see `Token`.
*/
@ -150,7 +155,9 @@ export class TokenStore
return this._dao.updateToken(
this._doc_id, this._token_ns, this._idgen(), TokenState.ACTIVE, null
)
.then( data => this._tokenDataToToken( data, TokenState.ACTIVE ) );
.then( data => this._tokenDataToToken(
data, TokenState.ACTIVE, true
) );
}
@ -168,7 +175,11 @@ export class TokenStore
*
* @return new token
*/
private _tokenDataToToken<T extends TokenState>( data: TokenData, state: T ):
private _tokenDataToToken<T extends TokenState>(
data: TokenData,
state: T,
created: boolean = false
):
Token<T>
{
return {
@ -177,6 +188,7 @@ export class TokenStore
timestamp: data.status.timestamp,
data: data.status.data,
last_mismatch: this._isLastMistmatch( data ),
last_created: created || this._isLastCreated( data ),
};
}
@ -196,6 +208,20 @@ export class TokenStore
}
/**
* Whether the token represents the most recently created token
*
* @param data raw token data
*
* @return whether token was the most recently created
*/
private _isLastCreated( data: TokenData ): boolean
{
return ( data.prev_state !== undefined )
&& ( data.prev_state[ TokenState.ACTIVE ] === data.id );
}
/**
* Complete a token
*

View File

@ -78,6 +78,9 @@ describe( 'server.token.TokenDao', () =>
[field]: {
[ns]: {
last: last_tok_id,
lastState: {
[ prev.type ]: last_tok_id,
},
lastStatus: {
type: last.type,
timestamp: last.timestamp,
@ -100,12 +103,16 @@ describe( 'server.token.TokenDao', () =>
timestamp: timestamp,
data: data,
},
prev_state: {
[ prev.type ]: last_tok_id,
},
prev_status: prev,
prev_last: {
id: last_tok_id,
status: last,
prev_status: null,
prev_last: null,
prev_state: {},
},
},
},
@ -133,11 +140,13 @@ describe( 'server.token.TokenDao', () =>
data: data,
},
prev_status: null,
prev_state: {},
prev_last: {
id: last_tok_id,
status: last,
prev_status: null,
prev_last: null,
prev_state: {},
},
},
},
@ -158,6 +167,7 @@ describe( 'server.token.TokenDao', () =>
data: data,
},
prev_status: null,
prev_state: {},
prev_last: null,
},
},
@ -176,6 +186,7 @@ describe( 'server.token.TokenDao', () =>
data: data,
},
prev_status: null,
prev_state: {},
prev_last: null,
},
},
@ -192,6 +203,7 @@ describe( 'server.token.TokenDao', () =>
data: data,
},
prev_status: null,
prev_state: {},
prev_last: null,
},
},
@ -210,9 +222,10 @@ describe( 'server.token.TokenDao', () =>
expect( given_data ).to.deep.equal( {
$set: {
[ `${root}.last` ]: tok_id,
[ `${root}.lastStatus` ]: expected_entry,
[ `${root}.${tok_id}.status` ]: expected_entry,
[ `${root}.last` ]: tok_id,
[ `${root}.lastState.${tok_type}` ]: tok_id,
[ `${root}.lastStatus` ]: expected_entry,
[ `${root}.${tok_id}.status` ]: expected_entry,
},
$push: {
[ `${root}.${tok_id}.statusLog` ]: expected_entry,
@ -224,6 +237,7 @@ describe( 'server.token.TokenDao', () =>
new: false,
fields: {
[ `${root}.last` ]: 1,
[ `${root}.lastState` ]: 1,
[ `${root}.lastStatus` ]: 1,
[ `${root}.${tok_id}.status` ]: 1,
},
@ -298,6 +312,10 @@ describe( 'server.token.TokenDao', () =>
[field]: {
[ns]: {
last: last_tok_id,
lastState: {
[ TokenState.ACTIVE ]: last_tok_id,
[ TokenState.DONE ]: last_tok_id,
},
lastStatus: last,
tok123: {
@ -311,11 +329,16 @@ describe( 'server.token.TokenDao', () =>
id: <TokenId>'tok123',
status: expected_status,
prev_status: expected_status,
prev_state: {
[ TokenState.ACTIVE ]: last_tok_id,
[ TokenState.DONE ]: last_tok_id,
},
prev_last: {
id: last_tok_id,
status: last,
prev_status: null,
prev_last: null,
prev_state: {},
}
},
null,
@ -369,6 +392,9 @@ describe( 'server.token.TokenDao', () =>
[field]: {
[ns]: {
last: last_tok_id,
lastState: {
[ TokenState.DEAD ]: last_tok_id,
},
lastStatus: last,
[ last_tok_id ]: {
@ -382,11 +408,15 @@ describe( 'server.token.TokenDao', () =>
id: last_tok_id,
status: last,
prev_status: last,
prev_state: {
[ TokenState.DEAD ]: last_tok_id,
},
prev_last: {
id: last_tok_id,
status: last,
prev_status: null,
prev_last: null,
prev_state: {},
}
},
null,
@ -430,6 +460,7 @@ describe( 'server.token.TokenDao', () =>
{
const expected_fields = {
[ `${field}.${ns}.last` ]: 1,
[ `${field}.${ns}.lastState` ]: 1,
[ `${field}.${ns}.lastStatus` ]: 1,
};

View File

@ -82,6 +82,7 @@ describe( 'TokenStore', () =>
timestamp: expected_ts,
data: expected_data,
last_mismatch: false,
last_created: false,
},
],
@ -116,6 +117,34 @@ describe( 'TokenStore', () =>
timestamp: expected_ts,
data: expected_data,
last_mismatch: true,
last_created: false,
},
],
[
"returns existing token with set last created",
{
id: token_id,
status: {
type: TokenState.DEAD,
timestamp: expected_ts,
data: expected_data,
},
prev_status: null,
prev_last: null,
prev_state: {
[ TokenState.ACTIVE ]: token_id,
},
},
{
id: token_id,
state: TokenState.DEAD,
timestamp: expected_ts,
data: expected_data,
last_mismatch: true,
last_created: true,
},
],
] ).forEach( ( [ label, dbdata, expected ] ) => it( label, () =>
@ -211,6 +240,8 @@ describe( 'TokenStore', () =>
prev_status: null,
prev_last: null,
},
prev_state: {},
},
{
id: token_id,
@ -218,6 +249,7 @@ describe( 'TokenStore', () =>
timestamp: expected_ts,
data: expected_data,
last_mismatch: true,
last_created: true,
},
],
@ -233,6 +265,7 @@ describe( 'TokenStore', () =>
prev_status: null,
prev_last: null,
prev_state: {},
},
{
id: token_id,
@ -240,6 +273,7 @@ describe( 'TokenStore', () =>
timestamp: expected_ts,
data: expected_data,
last_mismatch: true,
last_created: true,
},
],
] ).forEach( ( [ label, dbdata, expected ] ) => it( label, () =>
@ -290,6 +324,7 @@ describe( 'TokenStore', () =>
timestamp: <UnixTimestamp>0,
data: "",
last_mismatch: true,
last_created: true,
},
"complete-data",
{
@ -298,6 +333,7 @@ describe( 'TokenStore', () =>
timestamp: expected_ts,
data: "complete-data",
last_mismatch: true,
last_created: true,
},
],
@ -309,6 +345,7 @@ describe( 'TokenStore', () =>
timestamp: <UnixTimestamp>0,
data: "accept",
last_mismatch: true,
last_created: true,
},
"accept-data",
{
@ -317,6 +354,7 @@ describe( 'TokenStore', () =>
timestamp: expected_ts,
data: "accept-data",
last_mismatch: true,
last_created: true,
},
],
@ -328,6 +366,7 @@ describe( 'TokenStore', () =>
timestamp: <UnixTimestamp>0,
data: "kill",
last_mismatch: true,
last_created: true,
},
"kill-data",
{
@ -336,6 +375,7 @@ describe( 'TokenStore', () =>
timestamp: expected_ts,
data: "kill-data",
last_mismatch: true,
last_created: true,
},
],
] ).forEach( ( [ method, token, data, expected ] ) => describe( `#${method}`, () =>
@ -378,6 +418,9 @@ describe( 'TokenStore', () =>
prev_status: null,
prev_last: null,
prev_state: {
[ TokenState.ACTIVE ]: token.id,
},
} );
}
}();