diff --git a/src/server/token/Token.ts b/src/server/token/Token.ts
index 75ac6f0..1e29cff 100644
--- a/src/server/token/Token.ts
+++ b/src/server/token/Token.ts
@@ -17,6 +17,9 @@
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see .
+ *
+ * A token represents some sort of long-running asynchronous process. It
+ * was designed to handle HTTP requests.
*/
@@ -48,3 +51,57 @@ export enum TokenState {
DEAD = "DEAD",
};
+
+/** Tokens that can be killed (placed into a `DEAD` state) */
+export type TokenStateDeadable =
+ TokenState.ACTIVE | TokenState.DONE | TokenState.DEAD;
+
+/** Tokens that can be completed (placed into a `DONE` state) */
+export type TokenStateDoneable = TokenState.ACTIVE;
+
+/** Tokens that can be accepted (placed into an `ACCEPTED` state) */
+export type TokenStateAcceptable = TokenState.DONE;
+
+
+/**
+ * Request token
+ *
+ * Tokens are basic state machines with a unique identifier, timestamp of
+ * the last state transition, and associated string data.
+ */
+export interface Token
+{
+ /** Token identifier */
+ readonly id: TokenId;
+
+ /** Token state */
+ readonly state: T
+
+ /** Timestamp of most recent state transition */
+ readonly timestamp: UnixTimestamp;
+
+ /** Data associated with last state transition */
+ readonly data: string | null;
+
+ /**
+ * Whether this token id differs from the last modified for a given
+ * document within a given namespace during the last database operation
+ *
+ * Whether or not this value is significant is dependent on the
+ * caller. For example, when a new token is created, this value will
+ * always be `true`, because the last updated token couldn't possibly
+ * match a new token id. However, when updating a token, this will only
+ * be `true` if another token in the same namespace for the same
+ * document has been modified since this token was last modified.
+ *
+ * This can be used to determine whether activity on a token should be
+ * ignored. For example, a token that is not the latest may represent a
+ * stale request that should be ignored.
+ *
+ * This value can only be trusted within a context of the most recent
+ * database operation; other processes may have manipulated tokens since
+ * that time.
+ */
+ readonly last_mismatch: boolean;
+}
+
diff --git a/src/server/token/TokenStore.ts b/src/server/token/TokenStore.ts
new file mode 100644
index 0000000..3de767e
--- /dev/null
+++ b/src/server/token/TokenStore.ts
@@ -0,0 +1,277 @@
+/**
+ * Token management
+ *
+ * Copyright (C) 2010-2019 R-T Specialty, LLC.
+ *
+ * This file is part of the Liza Data Collection Framework.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as
+ * published by the Free Software Foundation, either version 3 of the
+ * License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see .
+ */
+
+import {
+ Token,
+ TokenId,
+ TokenNamespace,
+ TokenState,
+ TokenStateAcceptable,
+ TokenStateDeadable,
+ TokenStateDoneable,
+} from "./Token";
+
+import { TokenDao, TokenData } from "./TokenDao";
+import { DocumentId } from "../../document/Document";
+
+
+/**
+ * Token storage
+ *
+ * This store is used to create, read, and modify tokens. Its API is
+ * designed to constrain state transitions at compile-time.
+ *
+ * Stores are initialized with a given namespace, and DAOs are initialized
+ * with a root field. Tokens are collected in namespaces at the document
+ * level. Consequently, a new `TokenStore` must be created for each group
+ * (namespace) of tokens that needs to be operated on.
+ *
+ * A nullary token id generator must be provided. Given that it takes no
+ * arguments, this means that it is nondeterministic. This function must
+ * generate a unique token id at the namespace level or higher.
+ *
+ * The philosophy of this store is that any token within a given namespace
+ * can be updated at any time, but each namespace has a unique "last" token
+ * by document that represents the last token to have been updated within
+ * that context. When performing any operation on that namespace,
+ * information regarding that "last" token will be provided so that the
+ * caller can determine whether other tokens within that same context have
+ * been modified since a given token was last updated, which may indicate
+ * that a token has been superceded by another.
+ *
+ * As an example, consider the following sequence of events within some
+ * namespace "location" for some document 1000:
+ *
+ * 1. A token `A` is created for a request to a service. `last` is updated
+ * to point to `A`.
+ *
+ * 2. The user changes information about the location.
+ *
+ * 3. Another token `B` is created to request information for the new
+ * location data. `last` is updated to point to `B`.
+ *
+ * 4. The response for token `A` returns and `A` is updated.
+ *
+ * 5. The caller for token `A` sees that `last` no longer points to `A` (by
+ * observing `last_mistmatch`), and so ignores the reply, understanding
+ * that `A` is now stale.
+ *
+ * 6. The response for `B` returns and `B` is updated.
+ *
+ * 7. The caller notices that `last_mistmatch` is _not_ set, and so
+ * proceeds to continue processing token `B`.
+ *
+ * For more information on tokens, see `Token`.
+ */
+export class TokenStore
+{
+ /** Data access layer for underlying token data */
+ private readonly _dao: TokenDao;
+
+ /** Token namespace used for grouping per document */
+ private readonly _token_ns: TokenNamespace;
+
+ /** Token id generator (nullary, nondeterministic) */
+ private readonly _idgen: () => TokenId;
+
+
+ /**
+ * Initialize store
+ *
+ * @param dao data access layer
+ * @param token_ns token namespace
+ * @param idgen token id generator
+ */
+ constructor( dao: TokenDao, token_ns: TokenNamespace, idgen: () => TokenId )
+ {
+ this._dao = dao;
+ this._token_ns = token_ns;
+ this._idgen = idgen;
+ }
+
+
+ /**
+ * Look up an existing token by id
+ *
+ * This looks up the given token id `token_id` for the document
+ * `doc_id`, constrained to this store's namespace.
+ *
+ * The state of the returned token cannot be determined until runtime,
+ * so the caller is responsible for further constraining the type.
+ *
+ * @param doc_id document id
+ * @param token_id token id
+ *
+ * @return requested token, if it exists
+ */
+ lookupToken( doc_id: DocumentId, token_id: TokenId ):
+ Promise>
+ {
+ return this._dao.getToken( doc_id, this._token_ns, token_id )
+ .then( data => this._tokenDataToToken( data, data.status.type ) );
+ }
+
+
+ /**
+ * Create a new token for the given document within the store's
+ * namespace
+ *
+ * The returned token will always be `ACTIVE` and will always have
+ * `last_mistmatch` set.
+ *
+ * @param doc_id document id
+ */
+ createToken( doc_id: DocumentId ): Promise>
+ {
+ return this._dao.updateToken(
+ doc_id, this._token_ns, this._idgen(), TokenState.ACTIVE, null
+ )
+ .then( data => this._tokenDataToToken( data, TokenState.ACTIVE ) );
+ }
+
+
+ /**
+ * Convert raw token data to a higher-level `Token`
+ *
+ * The token state must be provided in addition to the token data for
+ * compile-time type checking, where permissable.
+ *
+ * A token will have `last_mistmatch` set if the last token before a
+ * database operation does not match `data.id`.
+ *
+ * @param data raw token data
+ * @param state token state
+ *
+ * @return new token
+ */
+ private _tokenDataToToken( data: TokenData, state: T ):
+ Token
+ {
+ return {
+ id: data.id,
+ state: state,
+ timestamp: data.status.timestamp,
+ data: data.status.data,
+ last_mismatch: this._isLastMistmatch( data ),
+ };
+ }
+
+
+ /**
+ * Determine whether the given token data represents a mismatch on the
+ * previous last token id
+ *
+ * For more information on what this means, see `Token.last_mistmatch`.
+ *
+ * @param data raw token data
+ */
+ private _isLastMistmatch( data: TokenData ): boolean
+ {
+ return ( data.prev_last === null )
+ || ( data.id !== data.prev_last.id );
+ }
+
+
+ /**
+ * Complete a token
+ *
+ * Completing a token places it into a `DONE` state. Only certain
+ * types of tokens can be completed (`TokenStateDoneable`).
+ *
+ * A token that in a `DONE` state means that processing has completed
+ * and is waiting acknowledgement from the system responsible for
+ * handling the response.
+ *
+ * @param doc_id document id
+ * @param src token to complete
+ * @param data optional response data
+ *
+ * @return token in `DONE` state
+ */
+ completeToken(
+ doc_id: DocumentId,
+ src: Token,
+ data: string | null
+ ): Promise>
+ {
+ return this._dao.updateToken(
+ doc_id, this._token_ns, src.id, TokenState.DONE, data
+ )
+ .then( data => this._tokenDataToToken( data, TokenState.DONE ) );
+ }
+
+
+ /**
+ * Acknowledge a token as accepted
+ *
+ * Accepting a token places it into an `ACCEPTED` state. Only certain
+ * types of tokens can be accepted (`TokenStateAcceptable`).
+ *
+ * A token that in an `ACCEPTED` state means that a previously completed
+ * token has been acknowledged and all resources related to the
+ * processing of the token can be freed.
+ *
+ * @param doc_id document id
+ * @param src token to accept
+ * @param data optional accept reason
+ *
+ * @return token in `ACCEPTED` state
+ */
+ acceptToken(
+ doc_id: DocumentId,
+ src: Token,
+ data: string | null
+ ): Promise>
+ {
+ return this._dao.updateToken(
+ doc_id, this._token_ns, src.id, TokenState.ACCEPTED, data
+ )
+ .then( data => this._tokenDataToToken( data, TokenState.ACCEPTED ) );
+ }
+
+
+ /**
+ * Kill a token
+ *
+ * Killing a token places it into a `DEAD` state. Only certain types of
+ * tokens can be killed (`TokenStateDeadable`).
+ *
+ * A token that in a `DEAD` state means that any processing related to
+ * that token should be aborted.
+ *
+ * @param doc_id document id
+ * @param src token to kill
+ * @param data optional kill reason
+ *
+ * @return token in `DEAD` state
+ */
+ killToken(
+ doc_id: DocumentId,
+ src: Token,
+ data: string | null
+ ): Promise>
+ {
+ return this._dao.updateToken(
+ doc_id, this._token_ns, src.id, TokenState.DEAD, data
+ )
+ .then( data => this._tokenDataToToken( data, TokenState.DEAD ) );
+ }
+}
diff --git a/test/server/token/TokenStoreTest.ts b/test/server/token/TokenStoreTest.ts
new file mode 100644
index 0000000..01a263c
--- /dev/null
+++ b/test/server/token/TokenStoreTest.ts
@@ -0,0 +1,395 @@
+/**
+ * Tests token management
+ *
+ * Copyright (C) 2010-2019 R-T Specialty, LLC.
+ *
+ * This file is part of the Liza Data Collection Framework.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as
+ * published by the Free Software Foundation, either version 3 of the
+ * License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see .
+ */
+
+import { TokenStore as Sut } from "../../../src/server/token/TokenStore";
+import { TokenDao, TokenData } from "../../../src/server/token/TokenDao";
+import { DocumentId } from "../../../src/document/Document";
+
+import {
+ Token,
+ TokenId,
+ TokenNamespace,
+ TokenState,
+} from "../../../src/server/token/Token";
+
+import { expect, use as chai_use } from 'chai';
+chai_use( require( 'chai-as-promised' ) );
+
+
+describe( 'TokenStore', () =>
+{
+ // required via the ctor, but this name is just used to denote that it's
+ // not used for a particular test
+ const voidIdgen = () => "00";
+
+
+ describe( '#lookupToken', () =>
+ {
+ const doc_id = 5;
+ const ns = 'namespace';
+ const token_id = 'token';
+
+ const expected_ts = 12345;
+ const expected_data = "token data";
+
+ ( <[string, TokenData, Token][]>[
+ [
+ "returns existing token with matching last",
+ {
+ id: token_id,
+
+ status: {
+ type: TokenState.ACTIVE,
+ timestamp: expected_ts,
+ data: expected_data,
+ },
+
+ prev_status: null,
+ prev_last: {
+ id: token_id,
+
+ status: {
+ type: TokenState.ACTIVE,
+ timestamp: expected_ts,
+ data: expected_data,
+ },
+
+ prev_status: null,
+ prev_last: null,
+ },
+ },
+ {
+ id: token_id,
+ state: TokenState.ACTIVE,
+ timestamp: expected_ts,
+ data: expected_data,
+ last_mismatch: false,
+ },
+ ],
+
+ [
+ "returns existing token with mismatched last",
+ {
+ id: token_id,
+
+ status: {
+ type: TokenState.DEAD,
+ timestamp: expected_ts,
+ data: expected_data,
+ },
+
+ prev_status: null,
+ prev_last: {
+ id: 'something-else',
+
+ status: {
+ type: TokenState.DEAD,
+ timestamp: expected_ts,
+ data: expected_data,
+ },
+
+ prev_status: null,
+ prev_last: null,
+ },
+ },
+ {
+ id: token_id,
+ state: TokenState.DEAD,
+ timestamp: expected_ts,
+ data: expected_data,
+ last_mismatch: true,
+ },
+ ],
+ ] ).forEach( ( [ label, dbdata, expected ] ) => it( label, () =>
+ {
+ const dao = new class implements TokenDao
+ {
+ getToken(
+ given_doc_id: DocumentId,
+ given_ns: TokenNamespace,
+ given_token_id: TokenId
+ )
+ {
+ expect( given_doc_id ).to.equal( doc_id );
+ expect( given_ns ).to.equal( ns );
+ expect( given_token_id ).to.equal( token_id );
+
+ return Promise.resolve( dbdata );
+ }
+
+ updateToken()
+ {
+ return Promise.reject( "unused method" );
+ }
+ }();
+
+ return expect(
+ new Sut( dao, ns, voidIdgen )
+ .lookupToken( doc_id, token_id )
+ )
+ .to.eventually.deep.equal( expected );
+ } ) );
+
+
+ it( "propagates database errors", () =>
+ {
+ const doc_id = 0;
+ const ns = 'badns';
+ const token_id = 'badtok';
+
+ const expected_e = new Error( "test error" );
+
+ const dao = new class implements TokenDao
+ {
+ getToken()
+ {
+ return Promise.reject( expected_e );
+ }
+
+ updateToken()
+ {
+ return Promise.reject( "unused method" );
+ }
+ }();
+
+ return expect(
+ new Sut( dao, ns, voidIdgen )
+ .lookupToken( doc_id, token_id )
+ ).to.eventually.be.rejectedWith( expected_e );
+ } );
+ } );
+
+
+ describe( '#createToken', () =>
+ {
+ const doc_id = 5;
+ const ns = 'namespace';
+ const token_id = 'token';
+
+ const expected_ts = 12345;
+ const expected_data = "token data";
+
+ ( <[string, TokenData, Token][]>[
+ [
+ "creates token with last_mismatch given last",
+ {
+ id: token_id,
+ status: {
+ type: TokenState.ACTIVE,
+ timestamp: expected_ts,
+ data: expected_data,
+ },
+
+ prev_status: null,
+
+ prev_last: {
+ id: 'something-else',
+ status: {
+ type: TokenState.ACTIVE,
+ timestamp: expected_ts,
+ data: expected_data,
+ },
+
+ prev_status: null,
+ prev_last: null,
+ },
+ },
+ {
+ id: token_id,
+ state: TokenState.ACTIVE,
+ timestamp: expected_ts,
+ data: expected_data,
+ last_mismatch: true,
+ },
+ ],
+
+ [
+ "creates token with last_mismatch given null last",
+ {
+ id: token_id,
+ status: {
+ type: TokenState.ACTIVE,
+ timestamp: expected_ts,
+ data: expected_data,
+ },
+
+ prev_status: null,
+ prev_last: null,
+ },
+ {
+ id: token_id,
+ state: TokenState.ACTIVE,
+ timestamp: expected_ts,
+ data: expected_data,
+ last_mismatch: true,
+ },
+ ],
+ ] ).forEach( ( [ label, dbdata, expected ] ) => it( label, () =>
+ {
+ const dao = new class implements TokenDao
+ {
+ getToken()
+ {
+ return Promise.reject( "unused method" );
+ }
+
+ updateToken(
+ given_doc_id: DocumentId,
+ given_ns: TokenNamespace,
+ given_token_id: TokenId,
+ given_type: TokenState,
+ given_data: string | null,
+ )
+ {
+ expect( given_doc_id ).to.equal( doc_id );
+ expect( given_ns ).to.equal( ns );
+ expect( given_token_id ).to.equal( token_id );
+ expect( given_type ).to.equal( TokenState.ACTIVE );
+ expect( given_data ).to.equal( null );
+
+ return Promise.resolve( dbdata );
+ }
+ }();
+
+ return expect(
+ new Sut( dao, ns, () => token_id )
+ .createToken( doc_id )
+ ).to.eventually.deep.equal( expected );
+ } ) );
+ } );
+
+
+ // each of the state changes do the same thing, just behind a
+ // type-restrictive API
+ const expected_ts = 123;
+
+ ( <[keyof Sut, Token, string, Token][]>[
+ [
+ 'completeToken',
+ {
+ id: 'complete-test',
+ state: TokenState.ACTIVE,
+ timestamp: 0,
+ data: "",
+ last_mismatch: true,
+ },
+ "complete-data",
+ {
+ id: 'complete-test',
+ state: TokenState.DONE,
+ timestamp: expected_ts,
+ data: "complete-data",
+ last_mismatch: true,
+ },
+ ],
+
+ [
+ 'acceptToken',
+ {
+ id: 'accept-test',
+ state: TokenState.DONE,
+ timestamp: 0,
+ data: "accept",
+ last_mismatch: true,
+ },
+ "accept-data",
+ {
+ id: 'accept-test',
+ state: TokenState.ACCEPTED,
+ timestamp: expected_ts,
+ data: "accept-data",
+ last_mismatch: true,
+ },
+ ],
+
+ [
+ 'killToken',
+ {
+ id: 'kill-test',
+ state: TokenState.ACTIVE,
+ timestamp: 0,
+ data: "kill",
+ last_mismatch: true,
+ },
+ "kill-data",
+ {
+ id: 'kill-test',
+ state: TokenState.DEAD,
+ timestamp: expected_ts,
+ data: "kill-data",
+ last_mismatch: true,
+ },
+ ],
+ ] ).forEach( ( [ method, token, data, expected ] ) => describe( `#${method}`, () =>
+ {
+ const doc_id = 1234;
+ const ns = 'update-ns';
+
+ it( "changes token state", () =>
+ {
+ const dao = new class implements TokenDao
+ {
+ getToken()
+ {
+ return Promise.reject( "unused method" );
+ }
+
+ updateToken(
+ given_doc_id: DocumentId,
+ given_ns: TokenNamespace,
+ given_token_id: TokenId,
+ given_type: TokenState,
+ given_data: string | null,
+ )
+ {
+ expect( given_doc_id ).to.equal( doc_id );
+ expect( given_ns ).to.equal( ns );
+ expect( given_token_id ).to.equal( token.id );
+ expect( given_type ).to.equal( expected.state );
+ expect( given_data ).to.equal( data );
+
+ return Promise.resolve( {
+ id: token.id,
+ status: {
+ // purposefully hard-coded, since this is ignored
+ type: TokenState.ACTIVE,
+
+ timestamp: expected_ts,
+ data: given_data,
+ },
+
+ prev_status: null,
+ prev_last: null,
+ } );
+ }
+ }();
+
+ // this discards some type information for the sake of dynamic
+ // dispatch, so it's not testing the state transition
+ // restrictions that are enforced by the compiler
+ return expect(
+ new Sut( dao, ns, voidIdgen )[ method ](
+ doc_id, token, data
+ )
+ ).to.eventually.deep.equal( expected );
+ } );
+ } ) );
+} );