1
0
Fork 0

TokenDao: "quote"=>"document" with nominal type

* src/document/Document.ts: New file.
* src/server/token/TokenDao.ts: quote=>document and use DocumentId.
* test/server/token/TokenDaoTest.ts: Likewise.
master
Mike Gerwitz 2019-09-10 12:18:51 -04:00
parent 54b3f0db72
commit 37f1b86ac1
3 changed files with 59 additions and 15 deletions

View File

@ -0,0 +1,37 @@
/**
* Document (quote) interface
*
* Copyright (C) 2010-2019 R-T Specialty, LLC.
*
* This file is part of the Liza Data Collection Framework.
*
* liza is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* The term "Quote" is synonymous with "Document"; this project is moving
* more toward the latter as it is further generalized.
*/
/**
* Document identifier
*/
export type DocumentId = NominalType<number, 'DocumentId'>;
/**
* Quote (Document) id
*
* Where the term "Quote" is still used, this will allow for type
* compatibility and an easy transition.
*/
export type QuoteId = DocumentId;

View File

@ -29,6 +29,7 @@ import {
} from "./TokenQueryResult"; } from "./TokenQueryResult";
import { TokenId, TokenNamespace } from "./Token"; import { TokenId, TokenNamespace } from "./Token";
import { DocumentId } from "../../document/Document";
/** /**
@ -91,14 +92,14 @@ export default class TokenDao
* The token entry is entered in the token log, and then the current * The token entry is entered in the token log, and then the current
* entry is updated to reflect the changes. The operation is atomic. * entry is updated to reflect the changes. The operation is atomic.
* *
* @param quote_id unique quote identifier * @param doc_id unique document identifier
* @param ns token namespace * @param ns token namespace
* @param token token value * @param token token value
* @param data token data, if any * @param data token data, if any
* @param status arbitrary token type * @param status arbitrary token type
*/ */
updateToken( updateToken(
quote_id: number, doc_id: DocumentId,
ns: TokenNamespace, ns: TokenNamespace,
token_id: TokenId, token_id: TokenId,
type: TokenType, type: TokenType,
@ -126,7 +127,7 @@ export default class TokenDao
return new Promise( ( resolve, reject ) => return new Promise( ( resolve, reject ) =>
{ {
this._collection.update( this._collection.update(
{ id: +quote_id }, { id: +doc_id },
{ {
$set: token_data, $set: token_data,
$push: token_log $push: token_log
@ -149,19 +150,19 @@ export default class TokenDao
/** /**
* Retrieve existing token under the namespace NS, if any, for the quote * Retrieve existing token under the namespace NS, if any, for the doc
* identified by QUOTE_ID * identified by DOC_ID
* *
* If a TOKEN_ID is provided, only that token will be queried; otherwise, * If a TOKEN_ID is provided, only that token will be queried; otherwise,
* the most recently created token will be the subject of the query. * the most recently created token will be the subject of the query.
* *
* @param quote_id quote identifier * @param doc_id document identifier
* @param ns token namespace * @param ns token namespace
* @param token_id token identifier (unique to NS) * @param token_id token identifier (unique to NS)
* *
* @return token data * @return token data
*/ */
getToken( quote_id: number, ns: TokenNamespace, token_id: TokenId ): getToken( doc_id: DocumentId, ns: TokenNamespace, token_id: TokenId ):
Promise<TokenData|null> Promise<TokenData|null>
{ {
const root = this._genRoot( ns ) + '.'; const root = this._genRoot( ns ) + '.';
@ -179,7 +180,7 @@ export default class TokenDao
return new Promise( ( resolve, reject ) => return new Promise( ( resolve, reject ) =>
{ {
this._collection.findOne( this._collection.findOne(
{ id: +quote_id }, { id: +doc_id },
{ fields: fields }, { fields: fields },
( err: Error|null, data: TokenQueryResult ) => ( err: Error|null, data: TokenQueryResult ) =>
{ {

View File

@ -34,6 +34,8 @@ import {
TokenNamespace, TokenNamespace,
} from "../../../src/server/token/Token"; } from "../../../src/server/token/Token";
import { DocumentId } from "../../../src/document/Document";
import { expect, use as chai_use } from 'chai'; import { expect, use as chai_use } from 'chai';
chai_use( require( 'chai-as-promised' ) ); chai_use( require( 'chai-as-promised' ) );
@ -46,7 +48,7 @@ describe( 'server.token.TokenDao', () =>
it( 'updates token with given data', () => it( 'updates token with given data', () =>
{ {
const field = 'foo_field'; const field = 'foo_field';
const qid = 12345; const did = <DocumentId>12345;
const ns = <TokenNamespace>'namespace'; const ns = <TokenNamespace>'namespace';
const tok_id = <TokenId>'tok123'; const tok_id = <TokenId>'tok123';
const tok_type = 'DONE'; const tok_type = 'DONE';
@ -64,7 +66,7 @@ describe( 'server.token.TokenDao', () =>
data: data, data: data,
}; };
expect( selector.id ).to.equal( qid ); expect( selector.id ).to.equal( did );
expect( given_data ).to.deep.equal( { expect( given_data ).to.deep.equal( {
$set: { $set: {
@ -87,7 +89,7 @@ describe( 'server.token.TokenDao', () =>
}; };
return new Sut( coll, field, () => timestamp ) return new Sut( coll, field, () => timestamp )
.updateToken( qid, ns, tok_id, tok_type, data ); .updateToken( did, ns, tok_id, tok_type, data );
} ); } );
@ -106,7 +108,11 @@ describe( 'server.token.TokenDao', () =>
return expect( return expect(
new Sut( coll, 'foo', () => <UnixTimestamp>0 ).updateToken( new Sut( coll, 'foo', () => <UnixTimestamp>0 ).updateToken(
0, <TokenNamespace>'ns', <TokenId>'id', 'DONE', null <DocumentId>0,
<TokenNamespace>'ns',
<TokenId>'id',
'DONE',
null
) )
).to.eventually.be.rejectedWith( expected_error ); ).to.eventually.be.rejectedWith( expected_error );
} ); } );
@ -116,7 +122,7 @@ describe( 'server.token.TokenDao', () =>
describe( '#getToken', () => describe( '#getToken', () =>
{ {
const field = 'get_field'; const field = 'get_field';
const qid = 12345; const did = <DocumentId>12345;
const ns = <TokenNamespace>'get_ns'; const ns = <TokenNamespace>'get_ns';
const expected_status: TokenStatus = { const expected_status: TokenStatus = {
@ -212,7 +218,7 @@ describe( 'server.token.TokenDao', () =>
return expect( return expect(
new Sut( coll, field, () => <UnixTimestamp>0 ) new Sut( coll, field, () => <UnixTimestamp>0 )
.getToken( qid, ns, tok_id ) .getToken( did, ns, tok_id )
).to.eventually.deep.equal( expected ); ).to.eventually.deep.equal( expected );
} ) } )
); );
@ -233,7 +239,7 @@ describe( 'server.token.TokenDao', () =>
return expect( return expect(
new Sut( coll, 'foo', () => <UnixTimestamp>0 ) new Sut( coll, 'foo', () => <UnixTimestamp>0 )
.getToken( 0, <TokenNamespace>'ns', <TokenId>'id' ) .getToken( <DocumentId>0, <TokenNamespace>'ns', <TokenId>'id' )
).to.eventually.be.rejectedWith( expected_error ); ).to.eventually.be.rejectedWith( expected_error );
} ); } );
} ); } );