diff --git a/src/document/Document.ts b/src/document/Document.ts
new file mode 100644
index 0000000..0db893a
--- /dev/null
+++ b/src/document/Document.ts
@@ -0,0 +1,37 @@
+/**
+ * Document (quote) interface
+ *
+ * Copyright (C) 2010-2019 R-T Specialty, LLC.
+ *
+ * This file is part of the Liza Data Collection Framework.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ *
+ * The term "Quote" is synonymous with "Document"; this project is moving
+ * more toward the latter as it is further generalized.
+ */
+
+/**
+ * Document identifier
+ */
+export type DocumentId = NominalType;
+
+
+/**
+ * Quote (Document) id
+ *
+ * Where the term "Quote" is still used, this will allow for type
+ * compatibility and an easy transition.
+ */
+export type QuoteId = DocumentId;
diff --git a/src/server/token/TokenDao.ts b/src/server/token/TokenDao.ts
index c5a7d99..75b7710 100644
--- a/src/server/token/TokenDao.ts
+++ b/src/server/token/TokenDao.ts
@@ -29,6 +29,7 @@ import {
} from "./TokenQueryResult";
import { TokenId, TokenNamespace } from "./Token";
+import { DocumentId } from "../../document/Document";
/**
@@ -91,14 +92,14 @@ export default class TokenDao
* The token entry is entered in the token log, and then the current
* entry is updated to reflect the changes. The operation is atomic.
*
- * @param quote_id unique quote identifier
+ * @param doc_id unique document identifier
* @param ns token namespace
* @param token token value
* @param data token data, if any
* @param status arbitrary token type
*/
updateToken(
- quote_id: number,
+ doc_id: DocumentId,
ns: TokenNamespace,
token_id: TokenId,
type: TokenType,
@@ -126,7 +127,7 @@ export default class TokenDao
return new Promise( ( resolve, reject ) =>
{
this._collection.update(
- { id: +quote_id },
+ { id: +doc_id },
{
$set: token_data,
$push: token_log
@@ -149,19 +150,19 @@ export default class TokenDao
/**
- * Retrieve existing token under the namespace NS, if any, for the quote
- * identified by QUOTE_ID
+ * Retrieve existing token under the namespace NS, if any, for the doc
+ * identified by DOC_ID
*
* If a TOKEN_ID is provided, only that token will be queried; otherwise,
* the most recently created token will be the subject of the query.
*
- * @param quote_id quote identifier
+ * @param doc_id document identifier
* @param ns token namespace
* @param token_id token identifier (unique to NS)
*
* @return token data
*/
- getToken( quote_id: number, ns: TokenNamespace, token_id: TokenId ):
+ getToken( doc_id: DocumentId, ns: TokenNamespace, token_id: TokenId ):
Promise
{
const root = this._genRoot( ns ) + '.';
@@ -179,7 +180,7 @@ export default class TokenDao
return new Promise( ( resolve, reject ) =>
{
this._collection.findOne(
- { id: +quote_id },
+ { id: +doc_id },
{ fields: fields },
( err: Error|null, data: TokenQueryResult ) =>
{
diff --git a/test/server/token/TokenDaoTest.ts b/test/server/token/TokenDaoTest.ts
index ad06159..c992632 100644
--- a/test/server/token/TokenDaoTest.ts
+++ b/test/server/token/TokenDaoTest.ts
@@ -34,6 +34,8 @@ import {
TokenNamespace,
} from "../../../src/server/token/Token";
+import { DocumentId } from "../../../src/document/Document";
+
import { expect, use as chai_use } from 'chai';
chai_use( require( 'chai-as-promised' ) );
@@ -46,7 +48,7 @@ describe( 'server.token.TokenDao', () =>
it( 'updates token with given data', () =>
{
const field = 'foo_field';
- const qid = 12345;
+ const did = 12345;
const ns = 'namespace';
const tok_id = 'tok123';
const tok_type = 'DONE';
@@ -64,7 +66,7 @@ describe( 'server.token.TokenDao', () =>
data: data,
};
- expect( selector.id ).to.equal( qid );
+ expect( selector.id ).to.equal( did );
expect( given_data ).to.deep.equal( {
$set: {
@@ -87,7 +89,7 @@ describe( 'server.token.TokenDao', () =>
};
return new Sut( coll, field, () => timestamp )
- .updateToken( qid, ns, tok_id, tok_type, data );
+ .updateToken( did, ns, tok_id, tok_type, data );
} );
@@ -106,7 +108,11 @@ describe( 'server.token.TokenDao', () =>
return expect(
new Sut( coll, 'foo', () => 0 ).updateToken(
- 0, 'ns', 'id', 'DONE', null
+ 0,
+ 'ns',
+ 'id',
+ 'DONE',
+ null
)
).to.eventually.be.rejectedWith( expected_error );
} );
@@ -116,7 +122,7 @@ describe( 'server.token.TokenDao', () =>
describe( '#getToken', () =>
{
const field = 'get_field';
- const qid = 12345;
+ const did = 12345;
const ns = 'get_ns';
const expected_status: TokenStatus = {
@@ -212,7 +218,7 @@ describe( 'server.token.TokenDao', () =>
return expect(
new Sut( coll, field, () => 0 )
- .getToken( qid, ns, tok_id )
+ .getToken( did, ns, tok_id )
).to.eventually.deep.equal( expected );
} )
);
@@ -233,7 +239,7 @@ describe( 'server.token.TokenDao', () =>
return expect(
new Sut( coll, 'foo', () => 0 )
- .getToken( 0, 'ns', 'id' )
+ .getToken( 0, 'ns', 'id' )
).to.eventually.be.rejectedWith( expected_error );
} );
} );