Server-side Data API support
The Data API stuff is pretty well coupled to the concept of a UI, and the code is a mess that was just extracted from Program.js, so this isn't the prettiest implementation. I hope that we'll have more time for it in the somewhat-near future, since this is going to be core to all server-side network requests for data.master
commit
35af4bdf19
|
@ -21,7 +21,10 @@
|
|||
MAKEINFOHTML = $(MAKEINFO) --html --css-include liza.css
|
||||
|
||||
info_TEXINFOS = liza.texi
|
||||
liza_TEXINFOS = design.texi macros.texi config.texi liza.css \
|
||||
liza_TEXINFOS = macros.texi config.texi liza.css \
|
||||
design.texi assert.texi bucket.texi client.texi \
|
||||
dapi.texi pred.texi program.texi server.texi \
|
||||
validation.texi \
|
||||
diagram/event-graph.svg
|
||||
|
||||
diagram/%.svg: diagram/%.dot
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
@c This document is part of the Liza Data Collection Framework manual.
|
||||
@c Copyright (C) 2017 R-T Specialty, LLC.
|
||||
@c
|
||||
@c Permission is granted to copy, distribute and/or modify this document
|
||||
@c under the terms of the GNU Free Documentation License, Version 1.3
|
||||
@c or any later version published by the Free Software Foundation;
|
||||
@c with no Invariant Sections, no Front-Cover Texts, and no Back-Cover
|
||||
@c Texts. A copy of the license is included in the section entitled ``GNU
|
||||
@c Free Documentation License''.
|
||||
|
||||
@node Assertions
|
||||
@chapter Assertions
|
||||
@maintstart
|
||||
@footnote{
|
||||
Assertions are compiled from the Program XML (@pxref{Program XML}).
|
||||
Rather than using a library,
|
||||
it compiles a mess of largely duplicate code inline.
|
||||
This system needs to be @emph{replaced},
|
||||
not modified.
|
||||
|
||||
A replacement can either be in the form of a library (removing most
|
||||
if not all code generation from the Program XML compiler),
|
||||
or possibly compile into classifications and use the classification
|
||||
system.
|
||||
@emph{The latter option is preferred,
|
||||
and would be more powerful with less maintenance.}}
|
||||
@maintend
|
||||
|
||||
@helpwanted
|
|
@ -0,0 +1,36 @@
|
|||
@c This document is part of the Liza Data Collection Framework manual.
|
||||
@c Copyright (C) 2017 R-T Specialty, LLC.
|
||||
@c
|
||||
@c Permission is granted to copy, distribute and/or modify this document
|
||||
@c under the terms of the GNU Free Documentation License, Version 1.3
|
||||
@c or any later version published by the Free Software Foundation;
|
||||
@c with no Invariant Sections, no Front-Cover Texts, and no Back-Cover
|
||||
@c Texts. A copy of the license is included in the section entitled ``GNU
|
||||
@c Free Documentation License''.
|
||||
|
||||
@node Bucket
|
||||
@chapter Bucket
|
||||
@helpwanted
|
||||
|
||||
@menu
|
||||
* Value Assignment:Bucket Assignment. Writing data to the Bucket.
|
||||
* Bucket Diff:: Representing bucket changes.
|
||||
* Calculated Values:: Dynamic data derived from other values.
|
||||
@end menu
|
||||
|
||||
|
||||
@c TODO
|
||||
@node Bucket Assignment
|
||||
@section Bucket Value Assignment
|
||||
@helpwanted
|
||||
|
||||
|
||||
@node Bucket Diff
|
||||
@section Bucket Diff
|
||||
@cindex Bucket diff
|
||||
@helpwanted
|
||||
|
||||
|
||||
@node Calculated Values
|
||||
@section Calculated Values
|
||||
@helpwanted
|
|
@ -0,0 +1,98 @@
|
|||
@c This document is part of the Liza Data Collection Framework manual.
|
||||
@c Copyright (C) 2017 R-T Specialty, LLC.
|
||||
@c
|
||||
@c Permission is granted to copy, distribute and/or modify this document
|
||||
@c under the terms of the GNU Free Documentation License, Version 1.3
|
||||
@c or any later version published by the Free Software Foundation;
|
||||
@c with no Invariant Sections, no Front-Cover Texts, and no Back-Cover
|
||||
@c Texts. A copy of the license is included in the section entitled ``GNU
|
||||
@c Free Documentation License''.
|
||||
|
||||
@node Client
|
||||
@chapter Client
|
||||
@maintstart
|
||||
@footnote{
|
||||
The client is largely managed by a single class,
|
||||
@srcref{src/client/Client.js, Client},
|
||||
which has grown out of control.
|
||||
@code{Client} mediates essentially the entire system.
|
||||
Code is to be extracted out of this class as it is touched.
|
||||
|
||||
The other system mammoth is @code{Ui} (@pxref{Program UI}).}
|
||||
@maintend
|
||||
|
||||
@helpwanted
|
||||
|
||||
@menu
|
||||
* Error Handling::
|
||||
@end menu
|
||||
|
||||
|
||||
@node Error Handling
|
||||
@section Error Handling
|
||||
|
||||
@maintenance{
|
||||
The complexity of this system and integration into legacy
|
||||
layers has caused maintenance trouble in the past.
|
||||
Each of the error checking layers need to be integrated
|
||||
to reduce complexity.}
|
||||
|
||||
@cindex Error
|
||||
@cindex Failure
|
||||
There are three layers of error checking:@footnote{
|
||||
Primarily for legacy reasons.
|
||||
They are being consolodated as the system is touched.}
|
||||
|
||||
@enumerate
|
||||
@item Required field checking@mdash{
|
||||
}whether all required questions have been answered.
|
||||
@item Type Validation@mdash{
|
||||
}verify that questions contain valid data according to their
|
||||
declared type. @ref{Validation}.
|
||||
@item Assertions@mdash{
|
||||
}arbitrary checks on data.
|
||||
@ref{Assertions}.
|
||||
@end enumerate
|
||||
|
||||
@cindex Required Field
|
||||
@cindex Field, Required
|
||||
@cindex Field, Fixed
|
||||
@cindex Error, Fixed
|
||||
@cindex Error, Required
|
||||
@cindex Fixed, Error
|
||||
Required fields fail serially@mdash{
|
||||
}the system will notify the user of the required field,
|
||||
and direct him/her to it (usually through scrolling).
|
||||
A field is marked as @dfn{fixed} according to the rules
|
||||
in @ref{Managing Error State}.
|
||||
|
||||
|
||||
@menu
|
||||
* Managing Error State:: Determining when failures should be marked as ``fixed''
|
||||
@end menu
|
||||
|
||||
|
||||
@node Managing Error State
|
||||
@subsection Managing Error State
|
||||
|
||||
@cindex Failure Stack
|
||||
@cindex Error Stack
|
||||
Each failure caused by assertions is associated with a
|
||||
@dfn{failure stack}.
|
||||
The stack represents the trail of assertions that have run,
|
||||
containing the ids of all values asserted against.
|
||||
When any field or classification changes that is represented on the
|
||||
failure stack,
|
||||
the failure for the failed field associated with that failure stack
|
||||
is cleared.
|
||||
|
||||
@exnotice{
|
||||
If an assertion for some question @var{foo} first checked the value
|
||||
of bucket field @var{bar},
|
||||
and within its failure checked the value @var{c:predicate},
|
||||
the failure stack would contain both of those ids.
|
||||
If either @var{bar} or the @var{predicate} classification changed,
|
||||
the question @var{foo} would have its error cleared.}
|
||||
|
||||
Error state is managed by
|
||||
@srcref{src/validate/ValidStateMonitor.js, ValidStateMonitor}.
|
|
@ -0,0 +1,149 @@
|
|||
@c This document is part of the Liza Data Collection Framework manual.
|
||||
@c Copyright (C) 2017 R-T Specialty, LLC.
|
||||
@c
|
||||
@c Permission is granted to copy, distribute and/or modify this document
|
||||
@c under the terms of the GNU Free Documentation License, Version 1.3
|
||||
@c or any later version published by the Free Software Foundation;
|
||||
@c with no Invariant Sections, no Front-Cover Texts, and no Back-Cover
|
||||
@c Texts. A copy of the license is included in the section entitled ``GNU
|
||||
@c Free Documentation License''.
|
||||
|
||||
@node Data API
|
||||
@chapter Data API
|
||||
@maintenance{This is a complex system with too much logic lying in
|
||||
@srcrefjs{dapi,DataApiManager} (having been extracted
|
||||
from its old home in @srcrefjs{program,Program} ).}
|
||||
|
||||
@helpwanted{}
|
||||
|
||||
The @dfn{Data API} is a declarative abstraction for accessing and
|
||||
processing remote data (e.g. a RESTful service).
|
||||
The name stems from how it is used@mdash{
|
||||
}to declare an remote API's inputs and outputs.
|
||||
|
||||
This system is generally used indirectly through the @progxmlref{}.@footnote{
|
||||
@proguicxref{Data API}.}
|
||||
|
||||
@tip{All interaction with this system should be had through the
|
||||
@srcrefjs{dapi,DataApiManager}.}
|
||||
|
||||
The @srcrefjs{dapi,DataApiManager} manages the entire operation@mdash{
|
||||
}from triggering the initial request,
|
||||
to performing mapping,
|
||||
to populating bucket data.
|
||||
It takes only a @srcrefjs{dapi,DataApiFactory} and @dapi{} definitions.
|
||||
|
||||
Definitions have the following schema:@footnote{
|
||||
There are poor design decisions that will likely persist
|
||||
indefinitely because of integration with other systems,
|
||||
so future extensions may be messy (especially in the case of
|
||||
@samp{retvals}).
|
||||
}
|
||||
|
||||
@verbatim
|
||||
{
|
||||
"type": "string",
|
||||
"source": "string",
|
||||
"method": "string",
|
||||
"params": {
|
||||
["string(name)"]: {
|
||||
"name": "string(name)",
|
||||
"default": {
|
||||
"type": "string",
|
||||
"value": "string"
|
||||
},
|
||||
...
|
||||
},
|
||||
},
|
||||
"retvals": [ "string", ... ],
|
||||
"static": [
|
||||
{
|
||||
["string(param)"]: "string",
|
||||
...
|
||||
},
|
||||
...
|
||||
],
|
||||
"static_nonempty": boolean,
|
||||
"static_multiple": boolean
|
||||
}
|
||||
@end verbatim
|
||||
|
||||
Each of the above fields are defined by:
|
||||
|
||||
@table @code
|
||||
@item type
|
||||
Any type supported by @srcrefjs{dapi,DataApiFactory} (e.g. @samp{rest}).
|
||||
|
||||
@item source
|
||||
Type-specific source of data.
|
||||
For e.g. @samp{rest}, this is a URI.
|
||||
|
||||
@item method
|
||||
Type-specific method for interacting with the API.
|
||||
For e.g. @samp{rest}, this is an HTTP@tie{}method.
|
||||
|
||||
@item params
|
||||
Key-value mapping of input parameter names (as received by @samp{source})
|
||||
to their default values.
|
||||
These inputs must be populated by the caller at the time of the request.
|
||||
|
||||
@item retvals
|
||||
Array of fields returned by the data source.
|
||||
|
||||
@item static
|
||||
Static values to prepend to the returned data.
|
||||
This is often used for adding ``please select'' text, for example.
|
||||
|
||||
@item static_nonempty
|
||||
Whether statics should be added when there is return data;
|
||||
Otherwise,
|
||||
they will be added only if the response yields no results.
|
||||
|
||||
@item static_multiple
|
||||
Whether statics should be added only if multiple data are returned.
|
||||
For example,
|
||||
a ``please select'' is only useful if there is more than one
|
||||
option for the user to select from.
|
||||
When @samp{true},
|
||||
this has the convenient side-effect of auto-selecting the only
|
||||
result.
|
||||
@end table
|
||||
|
||||
An example definition appears in @ref{f:dapi-ex}
|
||||
|
||||
@float Figure, f:dapi-ex
|
||||
@example
|
||||
@{
|
||||
"type": "rest",
|
||||
"source": "/foo/city",
|
||||
"method": "post",
|
||||
"params": @{
|
||||
"getVal": @{
|
||||
"name": "getVal",
|
||||
"default": @{
|
||||
"type": "string",
|
||||
"value": "getCityOptions"
|
||||
@}
|
||||
@},
|
||||
"zipcode": @{
|
||||
"name": "zipcode",
|
||||
"default": @{
|
||||
"type": "ref",
|
||||
"value": ""
|
||||
@}
|
||||
@}
|
||||
@},
|
||||
"retvals": [ "city", "id", "state", "county", "country" ],
|
||||
"static": [ @{
|
||||
"city": "(Please Select)",
|
||||
"id": "",
|
||||
"state": "",
|
||||
"county": "",
|
||||
"country": ""
|
||||
@} ],
|
||||
"static_nonempty": false,
|
||||
"static_multiple": true
|
||||
@},
|
||||
@end example
|
||||
@caption{Example @dapi{} definition}
|
||||
@end float
|
450
doc/design.texi
450
doc/design.texi
|
@ -34,10 +34,6 @@ The main components of the system are:
|
|||
and provides hooks that drive the rest of the system.
|
||||
@xref{Bucket}.
|
||||
|
||||
@cindex Calculated Value
|
||||
@item Calculated Values
|
||||
A small sub-system for calculating bucket values from other values.
|
||||
|
||||
@cindex Client
|
||||
@item Client
|
||||
Basic logic for navigating between steps,
|
||||
|
@ -47,6 +43,12 @@ The main components of the system are:
|
|||
etc.
|
||||
@xref{Client}.
|
||||
|
||||
@cindex Data API
|
||||
@item Data API
|
||||
Declarative abstraction for accessing and processing remote data
|
||||
(e.g. a RESTful service).
|
||||
@xref{Data API}.
|
||||
|
||||
@cindex Developer Dialog
|
||||
@item Developer Dialog
|
||||
Renders information about the system for debugging the client.
|
||||
|
@ -91,6 +93,7 @@ The main components of the system are:
|
|||
and other types of processing.
|
||||
Code is shared with the client,
|
||||
ensuring identical behavior for appropriate behaviors.
|
||||
@xref{Server}.
|
||||
|
||||
@cindex Type Validation
|
||||
@cindex Validation, Type
|
||||
|
@ -102,441 +105,4 @@ The main components of the system are:
|
|||
@xref{Validation}.
|
||||
@end table
|
||||
|
||||
More information about each can be found in their respective section.
|
||||
|
||||
|
||||
@menu
|
||||
* Assertions::
|
||||
* Bucket::
|
||||
* Client::
|
||||
* Predicate System::
|
||||
* Program::
|
||||
* Program UI::
|
||||
* Program XML::
|
||||
* Validation::
|
||||
@end menu
|
||||
|
||||
|
||||
|
||||
@node Assertions
|
||||
@section Assertions
|
||||
@maintstart
|
||||
@footnote{
|
||||
Assertions are compiled from the Program XML (@pxref{Program XML}).
|
||||
Rather than using a library,
|
||||
it compiles a mess of largely duplicate code inline.
|
||||
This system needs to be @emph{replaced},
|
||||
not modified.
|
||||
|
||||
A replacement can either be in the form of a library (removing most
|
||||
if not all code generation from the Program XML compiler),
|
||||
or possibly compile into classifications and use the classification
|
||||
system.
|
||||
@emph{The latter option is preferred,
|
||||
and would be more powerful with less maintenance.}}
|
||||
@maintend
|
||||
|
||||
@helpwanted
|
||||
|
||||
|
||||
|
||||
@node Bucket
|
||||
@section Bucket
|
||||
@helpwanted
|
||||
|
||||
@menu
|
||||
* Value Assignment:Bucket Assignment. Writing data to the Bucket.
|
||||
@end menu
|
||||
|
||||
|
||||
@c TODO
|
||||
@node Bucket Assignment
|
||||
@subsection Bucket Value Assignment
|
||||
@helpwanted
|
||||
|
||||
|
||||
|
||||
@node Client
|
||||
@section Client
|
||||
@maintstart
|
||||
@footnote{
|
||||
The client is largely managed by a single class,
|
||||
@srcref{src/client/Client.js, Client},
|
||||
which has grown out of control.
|
||||
@code{Client} mediates essentially the entire system.
|
||||
Code is to be extracted out of this class as it is touched.
|
||||
|
||||
The other system mammoth is @code{Ui} (@pxref{Program UI}).}
|
||||
@maintend
|
||||
|
||||
@helpwanted
|
||||
|
||||
@menu
|
||||
* Error Handling::
|
||||
@end menu
|
||||
|
||||
|
||||
@node Error Handling
|
||||
@subsection Error Handling
|
||||
|
||||
@maintenance{
|
||||
The complexity of this system and integration into legacy
|
||||
layers has caused maintenance trouble in the past.
|
||||
Each of the error checking layers need to be integrated
|
||||
to reduce complexity.}
|
||||
|
||||
@cindex Error
|
||||
@cindex Failure
|
||||
There are three layers of error checking:@footnote{
|
||||
Primarily for legacy reasons.
|
||||
They are being consolodated as the system is touched.}
|
||||
|
||||
@enumerate
|
||||
@item Required field checking@mdash{
|
||||
}whether all required questions have been answered.
|
||||
@item Type Validation@mdash{
|
||||
}verify that questions contain valid data according to their
|
||||
declared type. @ref{Validation}.
|
||||
@item Assertions@mdash{
|
||||
}arbitrary checks on data.
|
||||
@ref{Assertions}.
|
||||
@end enumerate
|
||||
|
||||
@cindex Required Field
|
||||
@cindex Field, Required
|
||||
@cindex Field, Fixed
|
||||
@cindex Error, Fixed
|
||||
@cindex Error, Required
|
||||
@cindex Fixed, Error
|
||||
Required fields fail serially@mdash{
|
||||
}the system will notify the user of the required field,
|
||||
and direct him/her to it (usually through scrolling).
|
||||
A field is marked as @dfn{fixed} according to the rules
|
||||
in @ref{Managing Error State}.
|
||||
|
||||
|
||||
@menu
|
||||
* Managing Error State:: Determining when failures should be marked as ``fixed''
|
||||
@end menu
|
||||
|
||||
|
||||
@node Managing Error State
|
||||
@subsubsection Managing Error State
|
||||
|
||||
@cindex Failure Stack
|
||||
@cindex Error Stack
|
||||
Each failure caused by assertions is associated with a
|
||||
@dfn{failure stack}.
|
||||
The stack represents the trail of assertions that have run,
|
||||
containing the ids of all values asserted against.
|
||||
When any field or classification changes that is represented on the
|
||||
failure stack,
|
||||
the failure for the failed field associated with that failure stack
|
||||
is cleared.
|
||||
|
||||
@exnotice{
|
||||
If an assertion for some question @var{foo} first checked the value
|
||||
of bucket field @var{bar},
|
||||
and within its failure checked the value @var{c:predicate},
|
||||
the failure stack would contain both of those ids.
|
||||
If either @var{bar} or the @var{predicate} classification changed,
|
||||
the question @var{foo} would have its error cleared.}
|
||||
|
||||
Error state is managed by
|
||||
@srcref{src/validate/ValidStateMonitor.js, ValidStateMonitor}.
|
||||
|
||||
|
||||
|
||||
@node Predicate System
|
||||
@section Predicate System
|
||||
@maintstart
|
||||
@footnote{
|
||||
New programs (using the old incarnation of TAME) use the classifier
|
||||
embedded into the rater by TAME.
|
||||
Old ones, however, still use the @dfn{Global Classifier}.
|
||||
This system isn't as well tested as TAME's@mdash{
|
||||
}which needs to work properly for the sake of calculating premium@mdash{
|
||||
}and has suffered from a number of bugs in the past.
|
||||
|
||||
The solution is to migrate all programs to TAME and remove that old
|
||||
code.}
|
||||
@maintend
|
||||
|
||||
@tip{
|
||||
For a practical application of these concepts, see its use in the
|
||||
Program@tie{}XML (@pxref{Specifying Predicates}).}
|
||||
|
||||
@cindex Predicate
|
||||
@cindex Classifier
|
||||
@cindex Applicability
|
||||
@cindex Domain of discourse, Predicate
|
||||
The @dfn{predicate system} determines the @dfn{applicability} of
|
||||
certain objects (like questions and assertions) by associating them
|
||||
with predicates.
|
||||
The domain of discourse (variables which may be quantified) is listed
|
||||
in @ref{t:predicate-dod}.
|
||||
|
||||
What it means for some object to be applicable depends on the context.
|
||||
|
||||
@float Table, t:predicate-dod
|
||||
@multitable @columnfractions 0.25 0.10 0.65
|
||||
@headitem Type @tab Prefix @tab Description
|
||||
|
||||
@item Classifications
|
||||
@tab @emph{None}
|
||||
@tab Results of applying an external @dfn{classifier} to the bucket
|
||||
(@pxref{Bucket}).
|
||||
|
||||
@item Bucket Truth Predicate
|
||||
@tab @code{q:}
|
||||
@tab
|
||||
Whether the given name in the bucket (@pxref{Bucket}) is
|
||||
non-empty and non-zero.
|
||||
The prefix @samp{q:} refers to its most common use
|
||||
case---questions (@pxref{Program UI,,Program@tie{}UI}).
|
||||
@end multitable
|
||||
@caption{Predicate system domain of discourse}
|
||||
@end float
|
||||
|
||||
This system is limited to universal quantification over the domain of
|
||||
discourse.
|
||||
For other quantifiers and higher-order logic,
|
||||
defer to one of the systems that contributes to the domain of
|
||||
discourse,
|
||||
like the classifier.@footnote{
|
||||
This is usually TAME.
|
||||
The Program XML also supports inline classifications
|
||||
with TAME's syntax (@pxref{Specifying Predicates}).}
|
||||
|
||||
Predicates are usually specified in the Program XML
|
||||
(@pxref{Specifying Predicates}) and compiled into the program
|
||||
(@pxref{Program}).
|
||||
|
||||
|
||||
|
||||
@node Program
|
||||
@section Program
|
||||
@maintstart
|
||||
@footnote{
|
||||
The @code{Program} class was one of the first prototypes created,
|
||||
and has evolved poorly with the rest of the system.
|
||||
It is the base class for all compiled programs,
|
||||
and it glues together too many other systems with a terrible
|
||||
API and little to no encapsulation.
|
||||
|
||||
With that said, it is one of the least touched classes (thus its
|
||||
state); developers rarely have the need to touch @code{Program}.}
|
||||
@maintend
|
||||
|
||||
|
||||
@cindex Program
|
||||
The @dfn{Program} is a declarative representation of an entire system.
|
||||
It is the highest level of abstraction from a data perspective.
|
||||
The user observes and interacts with a Program using the
|
||||
@ref{Program UI,,Program@tie{}UI}.
|
||||
|
||||
@cindex Program, XML
|
||||
Programs contain a lot of metadata that is not in a convenience
|
||||
human-readable (or modifiable) format,
|
||||
some of which are redundant.
|
||||
Programs are ideally compiled from a @ref{Program XML,,Program@tie{}XML}
|
||||
document.
|
||||
|
||||
|
||||
|
||||
@node Program UI
|
||||
@section Program UI
|
||||
@maintenance{
|
||||
The @code{Ui} class,
|
||||
in addition to @srcref{src/client/Client,Client} (@pxref{Client}),
|
||||
represent the two monoliths of the system.
|
||||
This mediates all UI-related tasks,
|
||||
and still has far too many concerns with far too many
|
||||
dependencies.
|
||||
Code is to be extracted out of this class as it is touched.
|
||||
}
|
||||
|
||||
|
||||
@cindex Program, User Interface
|
||||
@cindex User Interface, Program
|
||||
The @dfn{Program UI} renders a @ref{Program} as a form.
|
||||
|
||||
@cindex Step
|
||||
@cindex Group
|
||||
At the highest level,
|
||||
steps are rendered in a tab-like manner,
|
||||
above the main form content.
|
||||
A step contains groups,
|
||||
which in turn contain elements such as questions.
|
||||
Groups are delimited in some manner defined by their style
|
||||
(@pxref{Group Styles}).
|
||||
|
||||
@cindex Question
|
||||
@cindex Question, Value Formatting
|
||||
@cindex Bucket, Updating
|
||||
Questions are rendered as form fields.
|
||||
Any time the respective @ref{Bucket} field is changed,
|
||||
the form field is updated to reflect those changes,
|
||||
after having first been formatted with the appropriate validator
|
||||
(@pxref{Formatting Values}).
|
||||
When a question is changed by the user,
|
||||
the value is expected to be propagated to the Bucket
|
||||
(@pxref{Bucket Assignment}).
|
||||
|
||||
@cindex Navigation Bar
|
||||
@cindex User Interface, Navigation Bar
|
||||
@cindex User Interface, Button Navigation
|
||||
Navigation between steps can be done via the
|
||||
@dfn{Navigation Bar} above the step@tie{}content,
|
||||
or using ``Go@tie{}Back'' and ``Continue'' buttons at the foot of the
|
||||
step@tie{}content.
|
||||
|
||||
@cindex Sidebar
|
||||
A @dfn{Sidebar} is rendered adjacent to the step content.
|
||||
It displays the name of the Program,
|
||||
as well as configurable metadata (usually through the @samp{sidebar}
|
||||
node of the @ref{Program XML,,Program@tie{}XML}).
|
||||
It also displays question help text (also configured through the XML)
|
||||
and any error messages (@pxref{Error Handling}).
|
||||
|
||||
@menu
|
||||
* Group Styles:: Different ways of displaying groups of questions to
|
||||
the user.
|
||||
@end menu
|
||||
|
||||
|
||||
@node Group Styles
|
||||
@subsection Group Styles
|
||||
@refactor{
|
||||
Some group styles still use jQuery;
|
||||
they should be modified to use modern formatters and Liza DOM
|
||||
abstractions (see @srcrefraw{src/ui/field}
|
||||
and @srcrefraw{src/ui/styler}).}
|
||||
|
||||
@cindex Group, Styling
|
||||
Groups support a number of @dfn{group styles} that determine how
|
||||
they are delimited from other groups;
|
||||
how the elements they contain are rendered and laid out;
|
||||
and how multiple indexes are displayed, added, and removed.
|
||||
A list of available styles is detailed in @ref{t:group-styles}.
|
||||
|
||||
@float Table, t:group-styles
|
||||
@multitable @columnfractions 0.15 0.65 0.10 0.10
|
||||
@headitem Name @tab Description @tab Multi-Index? @tab Add/Remove Index?
|
||||
|
||||
@item @samp{default}
|
||||
@tab
|
||||
Groups are unstyled by default@mdash{
|
||||
}they render elements as flat fields like a traditional form.
|
||||
Only the first index of elements is rendered.
|
||||
@tab@center N
|
||||
@tab@center N
|
||||
|
||||
@item @samp{collapsetable}
|
||||
@tab
|
||||
Renders element label in the leftmost column like @samp{sidetable}.
|
||||
Indexes are groups of rows delimited by headings,
|
||||
which collapse the respective group of rows when clicked.
|
||||
@tab@center Y
|
||||
@tab@center Add
|
||||
|
||||
@item @samp{sidetable}
|
||||
@tab
|
||||
Renders elements as rows with label in the leftmost column rather
|
||||
than the top row.
|
||||
Each index is rendered as a column.
|
||||
@tab@center Y
|
||||
@tab@center Add
|
||||
|
||||
@item @samp{tabbedblock}
|
||||
@tab
|
||||
Each group is rendered as a block,
|
||||
with each index rendered as a tab to the right of it.
|
||||
Clicking a tab toggles the body content to the associated index.
|
||||
Elements are rendered within the box.
|
||||
@tab@center Y
|
||||
@tab@center N
|
||||
|
||||
@item @samp{tabbed}
|
||||
@tab
|
||||
Like @samp{default},
|
||||
but each index has a tab at the top of the group.
|
||||
Clicking a tab toggles the body content to the associated index.
|
||||
@tab@center Y
|
||||
@tab@center Y
|
||||
|
||||
@item @samp{table}
|
||||
@tab
|
||||
A vanilla table with elements as columns,
|
||||
their labels across the top row.
|
||||
Each index is rendered in its own row.
|
||||
@tab@center Y
|
||||
@tab@center Y
|
||||
@end multitable
|
||||
@caption{Group styles and index support}
|
||||
@end float
|
||||
|
||||
|
||||
|
||||
@node Program XML
|
||||
@section Program XML
|
||||
@helpwanted
|
||||
|
||||
@menu
|
||||
* Specifying Predicates::
|
||||
@end menu
|
||||
|
||||
|
||||
@node Specifying Predicates
|
||||
@subsection Specifying Predicates
|
||||
|
||||
Object predicates (@pxref{Predicate System}) are specified using the
|
||||
@xmlattr{when} attribute of certain nodes.
|
||||
It must contain a string of references understood by the system
|
||||
(see domain of discourse, @ref{Predicate System}),
|
||||
all of which much match for the predicate to be true.
|
||||
|
||||
@float Figure, f:pred-when
|
||||
@example
|
||||
<question id="describe" type="noyes"
|
||||
label="Any special notes for this location?" />
|
||||
|
||||
<question id="vacant_desc" type="textarea"
|
||||
when="q:describe vacant property"
|
||||
label="Show only when a vacant property with the
|
||||
question 'describe' non-empty and non-zero" />
|
||||
@end example
|
||||
@caption{Using the @xmlattr{when} attribute}
|
||||
@end float
|
||||
|
||||
In @ref{f:pred-when} above,
|
||||
question @samp{vacant_desc} will be applicable when @emph{all} of
|
||||
the values of @samp{vacant}, @samp{property},
|
||||
and@tie{}@samp{q:describe} are true.@footnote{
|
||||
@xref{Predicate System} for what ``true'' means for a particular
|
||||
variable in the domain of discourse.}
|
||||
Within the context of the @progxml,
|
||||
this concretely means that the classifications
|
||||
@samp{vacant} and@tie{}@samp{property} are true,
|
||||
and that the question @samp{describe} is answered ``yes''.
|
||||
It reads as a sentence:
|
||||
``@samp{vacant_desc}'' is applicable when we should @tie{}``describe
|
||||
a vacant property''.
|
||||
|
||||
|
||||
|
||||
@node Validation
|
||||
@section Validation
|
||||
@helpwanted
|
||||
|
||||
@menu
|
||||
* Formatting Values::
|
||||
@end menu
|
||||
|
||||
|
||||
@node Formatting Values
|
||||
@subsection Formatting Values
|
||||
|
||||
@cindex Question
|
||||
@cindex Question, Value Formatting
|
||||
@helpwanted
|
||||
More information about each can be found in their respective chapter/section.
|
||||
|
|
|
@ -76,6 +76,14 @@ Free Documentation License''.
|
|||
|
||||
@menu
|
||||
* Design and Architecture:Design. Design and structure of framework
|
||||
* Assertions::
|
||||
* Bucket::
|
||||
* Client::
|
||||
* Data API::
|
||||
* Predicate System::
|
||||
* Program::
|
||||
* Server::
|
||||
* Validation::
|
||||
* License:: Document License
|
||||
* Concept Index::
|
||||
@ifset DEVNOTES
|
||||
|
@ -89,6 +97,14 @@ Free Documentation License''.
|
|||
@end ifnottex
|
||||
|
||||
@include design.texi
|
||||
@include assert.texi
|
||||
@include bucket.texi
|
||||
@include client.texi
|
||||
@include dapi.texi
|
||||
@include pred.texi
|
||||
@include program.texi
|
||||
@include server.texi
|
||||
@include validation.texi
|
||||
@include license.texi
|
||||
|
||||
@node Concept Index
|
||||
|
|
|
@ -66,12 +66,17 @@
|
|||
@end ifset
|
||||
@end macro
|
||||
|
||||
@c documentation TODO
|
||||
@macro todo{text}
|
||||
@devnotice{TODO: \text\}
|
||||
@end macro
|
||||
|
||||
|
||||
@c indicate that help is needed to produce docs
|
||||
@macro helpwanted{}
|
||||
@cindex TODO, Missing Docs
|
||||
@dnindex Missing Docs
|
||||
@notice{There's nothing here yet. Maybe you can help?}
|
||||
@notice{There isn't much here yet. Maybe you can help?}
|
||||
@end macro
|
||||
|
||||
|
||||
|
@ -140,11 +145,15 @@ This system has maintenance concerns.
|
|||
@c the path to the file
|
||||
@ifset SRCURI
|
||||
@macro srcref{path, display}
|
||||
@url{@value{SRCURI}/\path\, @code{\display\}}
|
||||
@url{@value{SRCURI}/\path\, @file{\display\}}
|
||||
@end macro
|
||||
|
||||
@macro srcrefraw{path}
|
||||
@url{@value{SRCURI}/\path\, @code{\path\}}
|
||||
@url{@value{SRCURI}/\path\, @file{\path\}}
|
||||
@end macro
|
||||
|
||||
@macro srcrefjs{base,module}
|
||||
@srcref{src/\base\/\module\.js, \module\}
|
||||
@end macro
|
||||
@end ifset
|
||||
|
||||
|
@ -156,6 +165,12 @@ This system has maintenance concerns.
|
|||
@macro srcrefraw{path}
|
||||
@file{\path\}
|
||||
@end macro
|
||||
|
||||
@c intended to display a name without JS,
|
||||
@c so just do that rather than the actual path
|
||||
@macro srcrefjs{base,path}
|
||||
@srcrefraw{\base\/\path\}
|
||||
@end macro
|
||||
@end ifclear
|
||||
|
||||
|
||||
|
@ -169,7 +184,38 @@ This system has maintenance concerns.
|
|||
@end macro
|
||||
|
||||
|
||||
@c JS formatting
|
||||
@macro jsmethod{name}
|
||||
@code{#\name\}
|
||||
@end macro
|
||||
|
||||
|
||||
@c text to avoid repeated e.g. ties and other formatting
|
||||
@macro progxml
|
||||
Program@tie{}XML
|
||||
@end macro
|
||||
|
||||
@macro progxmlref
|
||||
@ref{Program XML,,Program@tie{}XML}
|
||||
@end macro
|
||||
|
||||
@macro dapi
|
||||
Data@tie{}API
|
||||
@end macro
|
||||
|
||||
@macro dapiref
|
||||
@dapi (@pxref{Data API,,Data@tie{}API})
|
||||
@end macro
|
||||
|
||||
@c todo: link to reference directly
|
||||
@macro proguicref{ref}
|
||||
`\ref\' @proguicrefsuffix
|
||||
@end macro
|
||||
|
||||
@macro proguicxref{ref}
|
||||
See `\ref\' @proguicrefsuffix
|
||||
@end macro
|
||||
|
||||
@macro proguicrefsuffix{}
|
||||
in the Liza Program@tie{}UI Compiler manual
|
||||
@end macro
|
||||
|
|
|
@ -0,0 +1,74 @@
|
|||
@c This document is part of the Liza Data Collection Framework manual.
|
||||
@c Copyright (C) 2017 R-T Specialty, LLC.
|
||||
@c
|
||||
@c Permission is granted to copy, distribute and/or modify this document
|
||||
@c under the terms of the GNU Free Documentation License, Version 1.3
|
||||
@c or any later version published by the Free Software Foundation;
|
||||
@c with no Invariant Sections, no Front-Cover Texts, and no Back-Cover
|
||||
@c Texts. A copy of the license is included in the section entitled ``GNU
|
||||
@c Free Documentation License''.
|
||||
|
||||
@node Predicate System
|
||||
@chapter Predicate System
|
||||
@maintstart
|
||||
@footnote{
|
||||
New programs (using the old incarnation of TAME) use the classifier
|
||||
embedded into the rater by TAME.
|
||||
Old ones, however, still use the @dfn{Global Classifier}.
|
||||
This system isn't as well tested as TAME's@mdash{
|
||||
}which needs to work properly for the sake of calculating premium@mdash{
|
||||
}and has suffered from a number of bugs in the past.
|
||||
|
||||
The solution is to migrate all programs to TAME and remove that old
|
||||
code.}
|
||||
@maintend
|
||||
|
||||
@tip{
|
||||
For a practical application of these concepts, see its use in the
|
||||
Program@tie{}XML (@pxref{Specifying Predicates}).}
|
||||
|
||||
@cindex Predicate
|
||||
@cindex Classifier
|
||||
@cindex Applicability
|
||||
@cindex Domain of discourse, Predicate
|
||||
The @dfn{predicate system} determines the @dfn{applicability} of
|
||||
certain objects (like questions and assertions) by associating them
|
||||
with predicates.
|
||||
The domain of discourse (variables which may be quantified) is listed
|
||||
in @ref{t:predicate-dod}.
|
||||
|
||||
What it means for some object to be applicable depends on the context.
|
||||
|
||||
@float Table, t:predicate-dod
|
||||
@multitable @columnfractions 0.25 0.10 0.65
|
||||
@headitem Type @tab Prefix @tab Description
|
||||
|
||||
@item Classifications
|
||||
@tab @emph{None}
|
||||
@tab Results of applying an external @dfn{classifier} to the bucket
|
||||
(@pxref{Bucket}).
|
||||
|
||||
@item Bucket Truth Predicate
|
||||
@tab @code{q:}
|
||||
@tab
|
||||
Whether the given name in the bucket (@pxref{Bucket}) is
|
||||
non-empty and non-zero.
|
||||
The prefix @samp{q:} refers to its most common use
|
||||
case---questions (@pxref{Program UI,,Program@tie{}UI}).
|
||||
@end multitable
|
||||
@caption{Predicate system domain of discourse}
|
||||
@end float
|
||||
|
||||
This system is limited to universal quantification over the domain of
|
||||
discourse.
|
||||
For other quantifiers and higher-order logic,
|
||||
defer to one of the systems that contributes to the domain of
|
||||
discourse,
|
||||
like the classifier.@footnote{
|
||||
This is usually TAME.
|
||||
The Program XML also supports inline classifications
|
||||
with TAME's syntax (@pxref{Specifying Predicates}).}
|
||||
|
||||
Predicates are usually specified in the Program XML
|
||||
(@pxref{Specifying Predicates}) and compiled into the program
|
||||
(@pxref{Program}).
|
|
@ -0,0 +1,282 @@
|
|||
@c This document is part of the Liza Data Collection Framework manual.
|
||||
@c Copyright (C) 2017 R-T Specialty, LLC.
|
||||
@c
|
||||
@c Permission is granted to copy, distribute and/or modify this document
|
||||
@c under the terms of the GNU Free Documentation License, Version 1.3
|
||||
@c or any later version published by the Free Software Foundation;
|
||||
@c with no Invariant Sections, no Front-Cover Texts, and no Back-Cover
|
||||
@c Texts. A copy of the license is included in the section entitled ``GNU
|
||||
@c Free Documentation License''.
|
||||
|
||||
|
||||
@node Program
|
||||
@chapter Program
|
||||
@maintstart
|
||||
@footnote{
|
||||
The @code{Program} class was one of the first prototypes created,
|
||||
and has evolved poorly with the rest of the system.
|
||||
It is the base class for all compiled programs,
|
||||
and it glues together too many other systems with a terrible
|
||||
API and little to no encapsulation.
|
||||
|
||||
With that said, it is one of the least touched classes (thus its
|
||||
state); developers rarely have the need to touch @code{Program}.}
|
||||
@maintend
|
||||
|
||||
|
||||
@cindex Program
|
||||
The @dfn{Program} is a declarative representation of an entire system.
|
||||
It is the highest level of abstraction from a data perspective.
|
||||
The user observes and interacts with a Program using the
|
||||
@ref{Program UI,,Program@tie{}UI}.
|
||||
|
||||
@cindex Program, XML
|
||||
Programs contain a lot of metadata that is not in a convenience
|
||||
human-readable (or modifiable) format,
|
||||
some of which are redundant.
|
||||
Programs are ideally compiled from a @ref{Program XML,,Program@tie{}XML}
|
||||
document.
|
||||
|
||||
@menu
|
||||
* Program UI::
|
||||
* Program XML::
|
||||
* Document Metadata:: Document-level data that cannot be modified by
|
||||
the client.
|
||||
@end menu
|
||||
|
||||
|
||||
@node Program UI
|
||||
@section Program UI
|
||||
@maintenance{
|
||||
The @code{Ui} class,
|
||||
in addition to @srcref{src/client/Client,Client} (@pxref{Client}),
|
||||
represent the two monoliths of the system.
|
||||
This mediates all UI-related tasks,
|
||||
and still has far too many concerns with far too many
|
||||
dependencies.
|
||||
Code is to be extracted out of this class as it is touched.
|
||||
}
|
||||
|
||||
|
||||
@cindex Program, User Interface
|
||||
@cindex User Interface, Program
|
||||
The @dfn{Program UI} renders a @ref{Program} as a form.
|
||||
|
||||
@cindex Step
|
||||
@cindex Group
|
||||
At the highest level,
|
||||
steps are rendered in a tab-like manner,
|
||||
above the main form content.
|
||||
A step contains groups,
|
||||
which in turn contain elements such as questions.
|
||||
Groups are delimited in some manner defined by their style
|
||||
(@pxref{Group Styles}).
|
||||
|
||||
@cindex Question
|
||||
@cindex Question, Value Formatting
|
||||
@cindex Bucket, Updating
|
||||
Questions are rendered as form fields.
|
||||
Any time the respective @ref{Bucket} field is changed,
|
||||
the form field is updated to reflect those changes,
|
||||
after having first been formatted with the appropriate validator
|
||||
(@pxref{Formatting Values}).
|
||||
When a question is changed by the user,
|
||||
the value is expected to be propagated to the Bucket
|
||||
(@pxref{Bucket Assignment}).
|
||||
|
||||
@cindex Navigation Bar
|
||||
@cindex User Interface, Navigation Bar
|
||||
@cindex User Interface, Button Navigation
|
||||
Navigation between steps can be done via the
|
||||
@dfn{Navigation Bar} above the step@tie{}content,
|
||||
or using ``Go@tie{}Back'' and ``Continue'' buttons at the foot of the
|
||||
step@tie{}content.
|
||||
|
||||
@cindex Sidebar
|
||||
A @dfn{Sidebar} is rendered adjacent to the step content.
|
||||
It displays the name of the Program,
|
||||
as well as configurable metadata (usually through the @samp{sidebar}
|
||||
node of the @ref{Program XML,,Program@tie{}XML}).
|
||||
It also displays question help text (also configured through the XML)
|
||||
and any error messages (@pxref{Error Handling}).
|
||||
|
||||
@menu
|
||||
* Group Styles:: Different ways of displaying groups of questions to
|
||||
the user.
|
||||
@end menu
|
||||
|
||||
|
||||
@node Group Styles
|
||||
@subsection Group Styles
|
||||
@refactor{
|
||||
Some group styles still use jQuery;
|
||||
they should be modified to use modern formatters and Liza DOM
|
||||
abstractions (see @srcrefraw{src/ui/field}
|
||||
and @srcrefraw{src/ui/styler}).}
|
||||
|
||||
@cindex Group, Styling
|
||||
Groups support a number of @dfn{group styles} that determine how
|
||||
they are delimited from other groups;
|
||||
how the elements they contain are rendered and laid out;
|
||||
and how multiple indexes are displayed, added, and removed.
|
||||
A list of available styles is detailed in @ref{t:group-styles}.
|
||||
|
||||
@float Table, t:group-styles
|
||||
@multitable @columnfractions 0.15 0.65 0.10 0.10
|
||||
@headitem Name @tab Description @tab Multi-Index? @tab Add/Remove Index?
|
||||
|
||||
@item @samp{default}
|
||||
@tab
|
||||
Groups are unstyled by default@mdash{
|
||||
}they render elements as flat fields like a traditional form.
|
||||
Only the first index of elements is rendered.
|
||||
@tab@center N
|
||||
@tab@center N
|
||||
|
||||
@item @samp{collapsetable}
|
||||
@tab
|
||||
Renders element label in the leftmost column like @samp{sidetable}.
|
||||
Indexes are groups of rows delimited by headings,
|
||||
which collapse the respective group of rows when clicked.
|
||||
@tab@center Y
|
||||
@tab@center Add
|
||||
|
||||
@item @samp{sidetable}
|
||||
@tab
|
||||
Renders elements as rows with label in the leftmost column rather
|
||||
than the top row.
|
||||
Each index is rendered as a column.
|
||||
@tab@center Y
|
||||
@tab@center Add
|
||||
|
||||
@item @samp{tabbedblock}
|
||||
@tab
|
||||
Each group is rendered as a block,
|
||||
with each index rendered as a tab to the right of it.
|
||||
Clicking a tab toggles the body content to the associated index.
|
||||
Elements are rendered within the box.
|
||||
@tab@center Y
|
||||
@tab@center N
|
||||
|
||||
@item @samp{tabbed}
|
||||
@tab
|
||||
Like @samp{default},
|
||||
but each index has a tab at the top of the group.
|
||||
Clicking a tab toggles the body content to the associated index.
|
||||
@tab@center Y
|
||||
@tab@center Y
|
||||
|
||||
@item @samp{table}
|
||||
@tab
|
||||
A vanilla table with elements as columns,
|
||||
their labels across the top row.
|
||||
Each index is rendered in its own row.
|
||||
@tab@center Y
|
||||
@tab@center Y
|
||||
@end multitable
|
||||
@caption{Group styles and index support}
|
||||
@end float
|
||||
|
||||
|
||||
|
||||
@node Program XML
|
||||
@section Program XML
|
||||
@helpwanted
|
||||
|
||||
@menu
|
||||
* Specifying Predicates::
|
||||
@end menu
|
||||
|
||||
|
||||
@node Specifying Predicates
|
||||
@subsection Specifying Predicates
|
||||
|
||||
Object predicates (@pxref{Predicate System}) are specified using the
|
||||
@xmlattr{when} attribute of certain nodes.
|
||||
It must contain a string of references understood by the system
|
||||
(see domain of discourse, @ref{Predicate System}),
|
||||
all of which much match for the predicate to be true.
|
||||
|
||||
@float Figure, f:pred-when
|
||||
@example
|
||||
<question id="describe" type="noyes"
|
||||
label="Any special notes for this location?" />
|
||||
|
||||
<question id="vacant_desc" type="textarea"
|
||||
when="q:describe vacant property"
|
||||
label="Show only when a vacant property with the
|
||||
question 'describe' non-empty and non-zero" />
|
||||
@end example
|
||||
@caption{Using the @xmlattr{when} attribute}
|
||||
@end float
|
||||
|
||||
In @ref{f:pred-when} above,
|
||||
question @samp{vacant_desc} will be applicable when @emph{all} of
|
||||
the values of @samp{vacant}, @samp{property},
|
||||
and@tie{}@samp{q:describe} are true.@footnote{
|
||||
@xref{Predicate System} for what ``true'' means for a particular
|
||||
variable in the domain of discourse.}
|
||||
Within the context of the @progxml,
|
||||
this concretely means that the classifications
|
||||
@samp{vacant} and@tie{}@samp{property} are true,
|
||||
and that the question @samp{describe} is answered ``yes''.
|
||||
It reads as a sentence:
|
||||
``@samp{vacant_desc}'' is applicable when we should @tie{}``describe
|
||||
a vacant property''.
|
||||
|
||||
|
||||
|
||||
@node Document Metadata
|
||||
@section Document Metadata
|
||||
@dfn{Document metadata} are metadata that describe certain aspects of the document;
|
||||
they are stored adjacent to the bucket in @samp{meta}@tie{}on the
|
||||
document root.@footnote{
|
||||
Terminology note: ``document'' and ``quote'' are the same thing;
|
||||
the latter is transitioning to the former for generality.}
|
||||
They should be used in place of a bucket field any time
|
||||
the client has no business knowing about the data.
|
||||
The @samp{meta} record is called the @dfn{Metabucket}.
|
||||
|
||||
@c don't use a dapi xref here; don't want to confuse the reader by
|
||||
@c directing them away from this section before they continue reading
|
||||
@tip{Metadata in the Metabucket should@tie{}@emph{not} be
|
||||
directly populated by external systems@mdash{
|
||||
}@dapi integration should be used instead (see below).}
|
||||
|
||||
Metadata can be populated using any@tie{}@dapiref@mdash{
|
||||
}return data populate the Metabucket in the same way that they
|
||||
populate the Bucket.
|
||||
Definitions are stored in @code{meta.fields},
|
||||
as shown in @ref{f:meta-fields}.
|
||||
|
||||
@float Figure, f:meta-fields
|
||||
@example
|
||||
"fields":@{
|
||||
["string(name)": @{
|
||||
"desc": "string",
|
||||
"dapi": @{
|
||||
"name": "string",
|
||||
"map": @{
|
||||
"string(dest field)": "string(source field)"
|
||||
@}
|
||||
@}
|
||||
@}
|
||||
@}
|
||||
@end example
|
||||
@caption{Format of @code{meta.fields}.}
|
||||
@end float
|
||||
|
||||
Further, a key-value mapping of all bucket fields that@mdash{
|
||||
}when modified,
|
||||
need to result in a metadata API@tie{}call@mdash{
|
||||
}are stored in the @code{mapis}@tie{}object;
|
||||
this is shown in @ref{f:mapis}.
|
||||
|
||||
@float Figure, f:mapis
|
||||
@example
|
||||
"mapis":@{
|
||||
["string(field name)"]: [ "string(dapi name)", ... ]
|
||||
@}
|
||||
@end example
|
||||
@caption{Format of @code{mapis}.}
|
||||
@end float
|
|
@ -0,0 +1,268 @@
|
|||
@c This document is part of the Liza Data Collection Framework manual.
|
||||
@c Copyright (C) 2017 R-T Specialty, LLC.
|
||||
@c
|
||||
@c Permission is granted to copy, distribute and/or modify this document
|
||||
@c under the terms of the GNU Free Documentation License, Version 1.3
|
||||
@c or any later version published by the Free Software Foundation;
|
||||
@c with no Invariant Sections, no Front-Cover Texts, and no Back-Cover
|
||||
@c Texts. A copy of the license is included in the section entitled ``GNU
|
||||
@c Free Documentation License''.
|
||||
|
||||
@node Server
|
||||
@chapter Liza Server
|
||||
@maintenance{The @srcrefjs{server/daemon,Daemon} monolith and
|
||||
@srcrefjs{server,Server},
|
||||
among other things,
|
||||
need refactoring.}
|
||||
|
||||
@helpwanted{}
|
||||
|
||||
@cindex Server
|
||||
The @dfn{server}@footnote{
|
||||
@cindex Quote Server
|
||||
Which may also be referenced as ``quote server'' in certain legacy
|
||||
contexts,
|
||||
referring to Liza's origin as an insurance rating system.}
|
||||
is a RESTful service that serves as the HTTP server.
|
||||
It is designed to run under Node.js,
|
||||
motivated by the benefits of sharing code with the@tie{}Client
|
||||
(@pxref{Client}).
|
||||
The daemon is handled by the abstract @srcrefjs{server/daemon,Daemon}
|
||||
monolith,
|
||||
which requires that a concrete @code{#getEncryptionService}
|
||||
method be defined by a subtype or trait.
|
||||
An example script to start the server is shown in @ref{f:server-start}.
|
||||
|
||||
@cindex Encryption Service
|
||||
@tip{For local development,
|
||||
or to avoid use of any encryption service,
|
||||
use @srcrefjs{server/daemon,DevDaemon},
|
||||
which uses a dummy encryption service.}
|
||||
|
||||
@float Figure, f:server-start
|
||||
@example
|
||||
const @{ Daemon @} = require( 'liza' ).server.daemon;
|
||||
const port = 8080;
|
||||
const log_priority = 10;
|
||||
|
||||
Daemon( port, log_priority ).start();
|
||||
@end example
|
||||
@caption{Starting the server daemon}
|
||||
@end float
|
||||
|
||||
@cindex HTTP Server
|
||||
The HTTP server is managed by
|
||||
@srcrefjs{server/daemon,http_server}.
|
||||
|
||||
|
||||
@menu
|
||||
* Requests:: Handling HTTP requests.
|
||||
* Posting Data:: Handling step saves and other posts.
|
||||
* Server-Side Data API Calls:: Accessing external resources on the server.
|
||||
* Encryption Service:: Managing sensitive data.
|
||||
@end menu
|
||||
|
||||
|
||||
|
||||
@node Requests
|
||||
@section HTTP Requests
|
||||
@helpwanted{}
|
||||
|
||||
@cindex Session
|
||||
@cindex PHPSESSID
|
||||
@cindex Memcache
|
||||
Each HTTP request produces a @srcrefjs{server/request,UserRequest}
|
||||
associated with a @srcrefjs{server/request,UserSession}.
|
||||
Sessions are tightly coupled with PHP@footnote{
|
||||
They don't have to be@mdash{}refactoring is needed.};
|
||||
an existing PHP session is expected,
|
||||
as identified by the @samp{PHPSESSID} cookie.
|
||||
Sessions are shared via Memcache
|
||||
(see @srcrefjs{server/cache,ResilientMemcache}).@footnote{
|
||||
Via a @url{https://secure.php.net/manual/en/memcached.sessions.php,memcache session handler}.}
|
||||
If a session is not found (or is invalid),
|
||||
an HTTP@tie{}@code{500} status code is returned and the
|
||||
HTTP@tie{}request is aborted.
|
||||
|
||||
@cindex Timeout
|
||||
@cindex Request timeout
|
||||
Requests are subject to a 120@tie{}second timeout,
|
||||
after which the request will be served an HTTP@tie{}@code{408}
|
||||
status code.
|
||||
Note that this @emph{does not stop background processing}@mdash{
|
||||
}this timeout exists to prevent the user from hanging indefinitely.
|
||||
|
||||
@cindex Long-running requests
|
||||
@tip{If a process intends to perform background processing for any length
|
||||
of time (longer than a few seconds),
|
||||
it should complete the request as quickly as possible and
|
||||
use some other mechanism to report back progress
|
||||
(e.g. polling).}
|
||||
|
||||
The @srcrefjs{server/request,UserRequest} exposes raw request data with
|
||||
minor processing.
|
||||
|
||||
@table @strong
|
||||
@item Path (@jsmethod{getUri})
|
||||
The path component of the URI. The method name is unfortunate.
|
||||
|
||||
@item Query data (@jsmethod{getGetData})
|
||||
Query string processed into a key/value object.
|
||||
Despite the name,
|
||||
this is also populated if non-GET requests contain query strings.
|
||||
|
||||
@item POST data (@jsmethod{getPostData})
|
||||
POST data processed into an object as if it were a query string
|
||||
(just as @jsmethod{getGetData}).
|
||||
Since this requires data that is streamed asynchronously,
|
||||
this method takes a callback that waits for all data to become
|
||||
available;
|
||||
if the data are already available,
|
||||
it is immediately invoked with the processed POST data.
|
||||
|
||||
@item Cookies (@jsmethod{getCookies})
|
||||
Cookies parsed into a key/value object.
|
||||
|
||||
@item Remote address (@jsmethod{getRemoteAddr})
|
||||
IP address of the origin of the request.
|
||||
If the server is behind a proxy that sets the
|
||||
@samp{X-Forwarded-For} header,
|
||||
it is used instead.
|
||||
|
||||
@item Host address (@jsmethod{getHostAddr})
|
||||
Hostname of the server.
|
||||
If the server is behind a proxy that sets the
|
||||
@samp{X-Forwarded-Host} header,
|
||||
it is used instead.
|
||||
|
||||
@item Origin (@jsmethod{getOrigin})
|
||||
Origin of request.
|
||||
Only available if at lease one of the @samp{Origin} or
|
||||
@samp{Referer} headers are set.
|
||||
This is useful mainly for determining the protocol and host while
|
||||
behind a proxy.
|
||||
|
||||
@item User agent (@jsmethod{getUserAgent})
|
||||
The user agent string of the request.
|
||||
|
||||
@item Session ID (@jsmethod{getSessionId})
|
||||
The user's unique session id (@samp{PHPSESSID}).
|
||||
|
||||
@item Session ID name (@jsmethod{getSessionIdName})
|
||||
The name of the cookie from which the session ID originated
|
||||
(hard-coded to @samp{PHPSESSID}).
|
||||
@end table
|
||||
|
||||
@todo{Document return format and writing response data.}
|
||||
|
||||
|
||||
|
||||
@node Posting Data
|
||||
@section Posting Data
|
||||
@cindex Post
|
||||
@cindex Bucket diff
|
||||
@cindex Step save
|
||||
A diff of the bucket data (@pxref{Bucket Diff}) is posted to the
|
||||
server on step@tie{}save.
|
||||
This operation is performed asynchronously@mdash{
|
||||
}the client need not wait for the step to save before the next can
|
||||
be requested.
|
||||
|
||||
Since validations are shared between the server and the client
|
||||
(@pxref{Validation}),
|
||||
saving should only fail in exception situations.
|
||||
Should a failure occur,
|
||||
the server will instruct the client to kick the user back to the
|
||||
previous step (@dfn{kickback}).
|
||||
|
||||
A step cannot be saved if it is locked;
|
||||
such attempts will result in an error.
|
||||
|
||||
To prevent a user from skipping steps,
|
||||
the client may post only one step past the last step that has
|
||||
successfully saved;
|
||||
otherwise, the user is kicked back to the last step that was saved.
|
||||
|
||||
Once those basic checks have passed,
|
||||
the document is updated:
|
||||
|
||||
@enumerate
|
||||
@item
|
||||
@cindex Data sanitization
|
||||
The diff is first @dfn{sanitized} to strip out unknown fields,
|
||||
internal fields posted by non-internal users,
|
||||
and to filter fields on permitted characters;
|
||||
|
||||
@item
|
||||
The sanitized diff is then applied to the existing bucket on the
|
||||
document;
|
||||
|
||||
@item
|
||||
@cindex Calculated values, server-side
|
||||
Calculated values marked for storage (@pxref{Calculated Values}) are
|
||||
re-calculated on the server (the values posted by the client have
|
||||
already been discarded by the first step in this list);
|
||||
|
||||
@item
|
||||
Server-side @dapi{} calls (@pxref{Data API}) are triggered using the
|
||||
diff as input data and an empty bucket for response storage
|
||||
(@pxref{Server-Side Data API Calls});
|
||||
|
||||
@item
|
||||
@cindex Premium calculation date
|
||||
The last premium calculation date is cleared (indicating that
|
||||
premiums are no longer valid);@footnote{
|
||||
This concept is tightly coupled with insurance;
|
||||
it should be factored out at some point.}
|
||||
|
||||
@item
|
||||
@cindex Encryption
|
||||
Data marked as sensitive is encrypted and the ciphertext written to
|
||||
the bucket in place of the plaintext (@pxref{Encryption Service});
|
||||
|
||||
@item
|
||||
@cindex Top visited step
|
||||
The current step is incremented and the @dfn{top visited
|
||||
step}@tie{} is set to the larger of the incremented step or the
|
||||
existing top visited step id; and then
|
||||
|
||||
@item
|
||||
The new document state and bucket data are written to the database.
|
||||
@end enumerate
|
||||
|
||||
|
||||
|
||||
@node Server-Side Data API Calls
|
||||
@section Server-Side Data API Calls
|
||||
@maintenance{This makes use of @srcrefjs{server/meta,DapiMetaSource}
|
||||
to encapsulate the horrible API of @srcrefjs{dapi,DataApiManager};
|
||||
the latter needs cleanup to remove the former.}
|
||||
|
||||
@cindex Data API
|
||||
@cindex Document metadata
|
||||
Server-side @dapi{} calls (@pxref{Data API}) are triggered on
|
||||
step save (@pxref{Posting Data}) and are handled much like they are
|
||||
on the client.
|
||||
Such calls are made automatically only for document metadata.
|
||||
Results of sever-side calls are @emph{not} written to the bucket
|
||||
and are therefore useful for data that the client should not be
|
||||
permitted to modify;
|
||||
it also allows data to be kept secret from the client.@footnote{
|
||||
All bucket data is served to the client,
|
||||
with the exception of internal fields if the user is non-internal.}
|
||||
|
||||
@dapi{} results on the client can be mapped back to multiple bucket values;
|
||||
the server, however, has serious concerns with how data are
|
||||
propagated for data integrity and security reasons.
|
||||
Further,
|
||||
document metadata can be structured,
|
||||
unlike the Bucket which has a rigid matrix format (@pxref{Bucket}).
|
||||
Therefore,
|
||||
the entire response is mapped into the parent field;
|
||||
defined return values are used only for filtering.
|
||||
|
||||
|
||||
|
||||
@node Encryption Service
|
||||
@section Encryption Service
|
||||
@helpwanted
|
|
@ -0,0 +1,25 @@
|
|||
@c This document is part of the Liza Data Collection Framework manual.
|
||||
@c Copyright (C) 2017 R-T Specialty, LLC.
|
||||
@c
|
||||
@c Permission is granted to copy, distribute and/or modify this document
|
||||
@c under the terms of the GNU Free Documentation License, Version 1.3
|
||||
@c or any later version published by the Free Software Foundation;
|
||||
@c with no Invariant Sections, no Front-Cover Texts, and no Back-Cover
|
||||
@c Texts. A copy of the license is included in the section entitled ``GNU
|
||||
@c Free Documentation License''.
|
||||
|
||||
@node Validation
|
||||
@chapter Validation
|
||||
@helpwanted
|
||||
|
||||
@menu
|
||||
* Formatting Values::
|
||||
@end menu
|
||||
|
||||
|
||||
@node Formatting Values
|
||||
@section Formatting Values
|
||||
|
||||
@cindex Question
|
||||
@cindex Question, Value Formatting
|
||||
@helpwanted
|
|
@ -19,18 +19,17 @@
|
|||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
var Class = require( 'easejs' ).Class,
|
||||
|
||||
HttpDataApi = require( './http/HttpDataApi' ),
|
||||
XhrHttpImpl = require( './http/XhrHttpImpl' ),
|
||||
JsonResponse = require( './format/JsonResponse' ),
|
||||
RestrictedDataApi = require( './RestrictedDataApi' ),
|
||||
StaticAdditionDataApi = require( './StaticAdditionDataApi' ),
|
||||
BucketDataApi = require( './BucketDataApi' );
|
||||
const Class = require( 'easejs' ).Class;
|
||||
const HttpDataApi = require( './http/HttpDataApi' );
|
||||
const XhrHttpImpl = require( './http/XhrHttpImpl' );
|
||||
const JsonResponse = require( './format/JsonResponse' );
|
||||
const RestrictedDataApi = require( './RestrictedDataApi' );
|
||||
const StaticAdditionDataApi = require( './StaticAdditionDataApi' );
|
||||
const BucketDataApi = require( './BucketDataApi' );
|
||||
|
||||
|
||||
/**
|
||||
* Instantiates the appropriate DataApi object for the givne service type
|
||||
* Instantiates the appropriate DataApi object for the given service type
|
||||
*/
|
||||
module.exports = Class( 'DataApiFactory',
|
||||
{
|
||||
|
@ -58,10 +57,12 @@ module.exports = Class( 'DataApiFactory',
|
|||
switch ( type )
|
||||
{
|
||||
case 'rest':
|
||||
const impl = this.createHttpImpl();
|
||||
|
||||
api = HttpDataApi.use( JsonResponse )(
|
||||
source,
|
||||
method.toUpperCase(),
|
||||
XhrHttpImpl( XMLHttpRequest )
|
||||
impl
|
||||
);
|
||||
break;
|
||||
|
||||
|
@ -83,6 +84,12 @@ module.exports = Class( 'DataApiFactory',
|
|||
StaticAdditionDataApi( api, nonempty, multiple, static_data ),
|
||||
desc
|
||||
);
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
'virtual protected createHttpImpl'()
|
||||
{
|
||||
return XhrHttpImpl( XMLHttpRequest );
|
||||
},
|
||||
} );
|
||||
|
||||
|
|
|
@ -98,9 +98,10 @@ module.exports = Class( 'DataApiManager' )
|
|||
'private _apis': {},
|
||||
|
||||
|
||||
__construct: function( api_factory )
|
||||
__construct: function( api_factory, apis )
|
||||
{
|
||||
this._dataApiFactory = api_factory;
|
||||
this.setApis( apis || {} );
|
||||
},
|
||||
|
||||
|
||||
|
|
|
@ -126,7 +126,7 @@ module.exports = Class( 'RestrictedDataApi' )
|
|||
// fail on unknown params
|
||||
if ( !( this._params[ name ] ) )
|
||||
{
|
||||
throw Error( 'Unkown param: ' + name );
|
||||
throw Error( 'Unknown param: ' + name );
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,56 @@
|
|||
/**
|
||||
* Error representing non-200 HTTP status code
|
||||
*
|
||||
* Copyright (C) 2017 R-T Specialty, LLC.
|
||||
*
|
||||
* This file is part of the Liza Data Collection Framework.
|
||||
*
|
||||
* liza is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const { Class } = require( 'easejs' );
|
||||
|
||||
|
||||
/**
|
||||
* Represents error in performing HTTP request
|
||||
*/
|
||||
module.exports = Class( 'HttpError' )
|
||||
.extend( Error,
|
||||
{
|
||||
/**
|
||||
* HTTP status code
|
||||
* @type {number}
|
||||
*/
|
||||
'public statuscode': 500,
|
||||
|
||||
|
||||
/**
|
||||
* Set error message and HTTP status code
|
||||
*
|
||||
* The HTTP status code defaults to 500 if not set. No check is
|
||||
* performed to determine whether the given status code is a valid error
|
||||
* code.
|
||||
*
|
||||
* The mesage is _not_ automatically set from the status code.
|
||||
*
|
||||
* @param {string} message error message
|
||||
* @param {number=} statuscode HTTP status code
|
||||
*/
|
||||
__construct( message, statuscode )
|
||||
{
|
||||
this.statuscode = statuscode || 500;
|
||||
},
|
||||
} );
|
|
@ -47,5 +47,9 @@ module.exports = Interface( 'HttpImpl',
|
|||
*
|
||||
* @return {HttpImpl} self
|
||||
*/
|
||||
'public requestData': [ 'url', 'method', 'data', 'callback' ]
|
||||
'public requestData': [ 'url', 'method', 'data', 'callback' ],
|
||||
|
||||
// TODO: temporary to work around class extension bug; see
|
||||
// SpoofedNodeHttpImpl
|
||||
'public setOptions': [],
|
||||
} );
|
||||
|
|
|
@ -0,0 +1,249 @@
|
|||
/**
|
||||
* HTTP over Node.js-compatible API
|
||||
*
|
||||
* Copyright (C) 2017 R-T Specialty, LLC.
|
||||
*
|
||||
* This file is part of the Liza Data Collection Framework.
|
||||
*
|
||||
* liza is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const { Class } = require( 'easejs' );
|
||||
const HttpImpl = require( './HttpImpl' );
|
||||
const HttpError = require( './HttpError' );
|
||||
|
||||
|
||||
/**
|
||||
* HTTP adapter using Node.js-compatible objects (e.g. its `http` modules)
|
||||
*/
|
||||
module.exports = Class( 'NodeHttpImpl' )
|
||||
.implement( HttpImpl )
|
||||
.extend(
|
||||
{
|
||||
/**
|
||||
* Clients for desired protocols (e.g. HTTP(s))
|
||||
* @type {Object}
|
||||
*/
|
||||
'private _protoHandlers': {},
|
||||
|
||||
/**
|
||||
* URL parser
|
||||
* @type {url}
|
||||
*/
|
||||
'private _urlParser': '',
|
||||
|
||||
/**
|
||||
* Request origin
|
||||
* @type {string}
|
||||
*/
|
||||
'private _origin': '',
|
||||
|
||||
|
||||
/**
|
||||
* Initialize with protocol handlers and URL parser
|
||||
*
|
||||
* `proto_handlers` must be a key-value mapping of the protocol string
|
||||
* to a handler object conforming to Node's http(s) APIs---that is, it
|
||||
* should provide a `#request` method.
|
||||
*
|
||||
* `origin` is prepended to all request URLs.
|
||||
*
|
||||
* @param {Object} proto_handlers protocol handler key-value map
|
||||
* @param {Object} url_parser URL parser
|
||||
* @param {string} origin request origin
|
||||
*/
|
||||
constructor( proto_handlers, url_parser, origin )
|
||||
{
|
||||
this._protoHandlers = proto_handlers;
|
||||
this._urlParser = url_parser;
|
||||
this._origin = ( origin !== undefined ) ? ''+origin : '';
|
||||
},
|
||||
|
||||
|
||||
/**
|
||||
* Perform HTTP request
|
||||
*
|
||||
* If the request is synchronous, it must still return the data via the
|
||||
* provided callback. The provided data is expected to be key-value if an
|
||||
* object is given, otherwise a string of binary data.
|
||||
*
|
||||
* @param {string} url destination URL
|
||||
* @param {string} method RFC-2616-compliant HTTP method
|
||||
* @param {Object|string} data request params
|
||||
* @param {function(?Error, ?string)} callback server response callback
|
||||
*
|
||||
* @return {HttpImpl} self
|
||||
*/
|
||||
'public requestData'( url, method, data, callback )
|
||||
{
|
||||
const options = this._parseUrl( url );
|
||||
const protocol = options.protocol.replace( /:$/, '' );
|
||||
const handler = this._protoHandlers[ protocol ];
|
||||
|
||||
if ( !handler )
|
||||
{
|
||||
throw Error( `No handler for ${protocol}` );
|
||||
}
|
||||
|
||||
this.setOptions( options, method, data );
|
||||
|
||||
let forbid_end = false;
|
||||
|
||||
const req = handler.request( options, res =>
|
||||
{
|
||||
let data = '';
|
||||
|
||||
res.on( 'data', chunk => data += chunk );
|
||||
res.on( 'end', () =>
|
||||
!forbid_end && this.requestEnd( res, data, callback )
|
||||
);
|
||||
} );
|
||||
|
||||
req.on( 'error', e =>
|
||||
{
|
||||
this.serveError( e, null, null, callback );
|
||||
|
||||
// guarantee that the callback will not be invoked a second time
|
||||
// if something tries to end the request
|
||||
forbid_end = true;
|
||||
} );
|
||||
|
||||
if ( method === 'POST' )
|
||||
{
|
||||
req.write( data );
|
||||
}
|
||||
|
||||
req.end();
|
||||
},
|
||||
|
||||
|
||||
/**
|
||||
* Parse given URL
|
||||
*
|
||||
* If the URL begins with a slash, the origin is prepended.
|
||||
*
|
||||
* @param {string} url URL
|
||||
*
|
||||
* @return {Object} parsed URL
|
||||
*/
|
||||
'private _parseUrl'( url )
|
||||
{
|
||||
const origin = ( url[ 0 ] === '/' )
|
||||
? this._origin
|
||||
: '';
|
||||
|
||||
return this._urlParser.parse( origin + url );
|
||||
},
|
||||
|
||||
|
||||
/**
|
||||
* Set request options
|
||||
*
|
||||
* TODO: public to work around a class extension trait bug; make
|
||||
* protected once fixed
|
||||
*
|
||||
* @param {Object} options request options
|
||||
* @param {string} method HTTP method
|
||||
* @param {string} data request data
|
||||
*
|
||||
* @return {Object} request headers
|
||||
*/
|
||||
'virtual public setOptions'( options, method, data )
|
||||
{
|
||||
const { headers = {} } = options;
|
||||
|
||||
options.method = method;
|
||||
|
||||
if ( method === 'POST' )
|
||||
{
|
||||
headers[ 'Content-Type' ] = 'application/x-www-form-urlencoded';
|
||||
|
||||
options.headers = headers;
|
||||
}
|
||||
else
|
||||
{
|
||||
if ( data )
|
||||
{
|
||||
options.path += '?' + data;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Invoked when a request is completed
|
||||
*
|
||||
* Subtypes may override this method to handle their own request
|
||||
* processing before the continuation `callback` is invoked with the
|
||||
* final data.
|
||||
*
|
||||
* To override only error situations, see `#serveError`.
|
||||
*
|
||||
* @param {Object} res Node http.ServerResponse
|
||||
* @param {string} data raw response data
|
||||
* @param {function(?Error,?string)} callback completion continuation
|
||||
*
|
||||
* @return {undefined}
|
||||
*/
|
||||
'virtual protected requestEnd'( res, data, callback )
|
||||
{
|
||||
if ( !this.isSuccessful( res ) )
|
||||
{
|
||||
this.serveError(
|
||||
HttpError( res.statusMessage, res.statusCode ),
|
||||
res,
|
||||
data,
|
||||
callback
|
||||
);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
callback( null, data );
|
||||
},
|
||||
|
||||
|
||||
/**
|
||||
* Predicate to determine whether HTTP request was successful
|
||||
*
|
||||
* Non-2xx status codes represent failures.
|
||||
*
|
||||
* @param {Object} res Node http.ServerResponse
|
||||
*
|
||||
* @return {boolean} whether HTTP status code represents a success
|
||||
*/
|
||||
'virtual protected isSuccessful'( res )
|
||||
{
|
||||
return ( +res.statusCode >= 200 ) && ( +res.statusCode < 300 );
|
||||
},
|
||||
|
||||
|
||||
/**
|
||||
* Invoke continuation `callback` with an error `e`
|
||||
*
|
||||
* @param {Error} e error
|
||||
* @param {Object} res Node http.ServerResponse
|
||||
* @param {string} data raw response data
|
||||
* @param {function(?Error,?data)} callback continuation
|
||||
*
|
||||
* @return {undefined}
|
||||
*/
|
||||
'virtual protected serveError'( e, res, data, callback )
|
||||
{
|
||||
callback( e, data );
|
||||
},
|
||||
} );
|
|
@ -0,0 +1,78 @@
|
|||
/**
|
||||
* Node-based HTTP client with session spoofing
|
||||
*
|
||||
* Copyright (C) 2017 R-T Specialty, LLC.
|
||||
*
|
||||
* This file is part of the Liza Data Collection Framework.
|
||||
*
|
||||
* liza is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of the
|
||||
* License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const { Trait } = require( 'easejs' );
|
||||
const HttpImpl = require( './HttpImpl' );
|
||||
|
||||
|
||||
/**
|
||||
* Spoof user session during request
|
||||
*
|
||||
* TODO: Implementing HttpImpl instead of overriding NodeHttpImpl to work
|
||||
* around a class extension bug; change once fixed.
|
||||
*/
|
||||
module.exports = Trait( 'SpoofedNodeHttpImpl' )
|
||||
.implement( HttpImpl )
|
||||
.extend(
|
||||
{
|
||||
/**
|
||||
* Session to spoof
|
||||
* @type {UserSession}
|
||||
*/
|
||||
'private _request': null,
|
||||
|
||||
|
||||
/**
|
||||
* Use session for spoofing requests
|
||||
*
|
||||
* @param {UserSession} session session to spoof
|
||||
*/
|
||||
__mixin( session )
|
||||
{
|
||||
this._request = session;
|
||||
},
|
||||
|
||||
|
||||
/**
|
||||
* Set request options to spoof session
|
||||
*
|
||||
* @param {Object} options request options
|
||||
* @param {string} method HTTP method
|
||||
* @param {string} data request data
|
||||
*
|
||||
* @return {Object} request headers
|
||||
*/
|
||||
'virtual abstract override public setOptions'( options, method, data )
|
||||
{
|
||||
const cookie = this._request.getSessionIdName() + '=' +
|
||||
this._request.getSessionId();
|
||||
|
||||
options.headers = {
|
||||
'User-Agent': this._request.getUserAgent(),
|
||||
'X-Forwarded-For': this._request.getRemoteAddr(),
|
||||
'Cookie': cookie,
|
||||
};
|
||||
|
||||
return this.__super( options, method, data );
|
||||
}
|
||||
} );
|
|
@ -19,8 +19,11 @@
|
|||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
var Class = require( 'easejs' ).Class,
|
||||
HttpImpl = require( './HttpImpl' );
|
||||
'use strict';
|
||||
|
||||
const Class = require( 'easejs' ).Class;
|
||||
const HttpImpl = require( './HttpImpl' );
|
||||
const HttpError = require( './HttpError' );
|
||||
|
||||
|
||||
/**
|
||||
|
@ -272,9 +275,15 @@ module.exports = Class( 'XhrHttpImpl' )
|
|||
*/
|
||||
'virtual protected serveError': function( req, callback )
|
||||
{
|
||||
var e = Error( req.status + " error from server" );
|
||||
var e = HttpError( req.status + " error from server" );
|
||||
e.status = req.status;
|
||||
|
||||
callback( e, req.responseText );
|
||||
},
|
||||
|
||||
|
||||
'public setOptions'()
|
||||
{
|
||||
// TOOD: remove (see HttpImpl)
|
||||
}
|
||||
} );
|
||||
|
|
|
@ -0,0 +1,73 @@
|
|||
/**
|
||||
* Facade for vanilla document server
|
||||
*
|
||||
* Copyright (C) 2017 R-T Specialty, LLC.
|
||||
*
|
||||
* This file is part of the Liza Data Collection Framework.
|
||||
*
|
||||
* liza is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of the
|
||||
* License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
const { Class } = require( 'easejs' );
|
||||
|
||||
const {
|
||||
bucket: {
|
||||
bucket_filter,
|
||||
QuoteDataBucket,
|
||||
},
|
||||
|
||||
dapi: {
|
||||
DataApiManager,
|
||||
},
|
||||
|
||||
server: {
|
||||
Server,
|
||||
|
||||
meta: {
|
||||
DapiMetaSource,
|
||||
},
|
||||
|
||||
request: {
|
||||
DataProcessor,
|
||||
JsonServerResponse,
|
||||
ServerDataApiFactory,
|
||||
},
|
||||
},
|
||||
} = require( '../..' );
|
||||
|
||||
|
||||
/**
|
||||
* Vanilla document server
|
||||
*/
|
||||
module.exports = Class( 'DocumentServer',
|
||||
{
|
||||
'public create': ( dao, logger, enc_service, origin_url ) => Server(
|
||||
new JsonServerResponse.create(),
|
||||
dao,
|
||||
logger,
|
||||
enc_service,
|
||||
|
||||
DataProcessor(
|
||||
bucket_filter,
|
||||
( apis, request ) => DataApiManager(
|
||||
ServerDataApiFactory(
|
||||
origin_url || request.getOrigin(),
|
||||
request
|
||||
),
|
||||
apis
|
||||
),
|
||||
DapiMetaSource( QuoteDataBucket )
|
||||
)
|
||||
),
|
||||
} );
|
|
@ -48,6 +48,9 @@ const {
|
|||
},
|
||||
|
||||
server: {
|
||||
request: {
|
||||
DataProcessor,
|
||||
},
|
||||
encsvc: {
|
||||
QuoteDataBucketCipher,
|
||||
},
|
||||
|
@ -109,13 +112,27 @@ module.exports = Class( 'Server' )
|
|||
*/
|
||||
'private _cache': null,
|
||||
|
||||
/**
|
||||
* Client-provided data processor
|
||||
* @type {DataProcessor}
|
||||
*/
|
||||
'private _dataProcessor': null,
|
||||
|
||||
'public __construct': function( response, dao, logger, encsvc )
|
||||
|
||||
'public __construct': function(
|
||||
response, dao, logger, encsvc, data_processor
|
||||
)
|
||||
{
|
||||
if ( !Class.isA( DataProcessor, data_processor ) )
|
||||
{
|
||||
throw TypeError( "Expected DataProcessor" );
|
||||
}
|
||||
|
||||
this.response = response;
|
||||
this.dao = dao;
|
||||
this.logger = logger;
|
||||
this._encService = encsvc;
|
||||
this._dataProcessor = data_processor;
|
||||
},
|
||||
|
||||
|
||||
|
@ -1115,14 +1132,19 @@ module.exports = Class( 'Server' )
|
|||
{
|
||||
try
|
||||
{
|
||||
var filtered = server._sanitizeBucketData(
|
||||
post_data.data, request, program
|
||||
var parsed_data = JSON.parse( post_data.data );
|
||||
var bucket = quote.getBucket();
|
||||
|
||||
const { filtered, dapis } = server._dataProcessor.processDiff(
|
||||
parsed_data, request, program, bucket
|
||||
);
|
||||
|
||||
quote.setData( filtered );
|
||||
|
||||
server._monitorMetadataPromise( quote, dapis );
|
||||
|
||||
// calculated values (store only)
|
||||
program.initQuote( quote.getBucket(), true );
|
||||
program.initQuote( bucket, true );
|
||||
}
|
||||
catch ( err )
|
||||
{
|
||||
|
@ -1150,33 +1172,27 @@ module.exports = Class( 'Server' )
|
|||
},
|
||||
|
||||
|
||||
/**
|
||||
* Sanitize the given bucket data
|
||||
*
|
||||
* Ensures that we are storing only "correct" data within our database. This
|
||||
* also strips any unknown bucket values, preventing users from using us as
|
||||
* their own personal database.
|
||||
*/
|
||||
'private _sanitizeBucketData': function(
|
||||
bucket_data, request, program, permit_null
|
||||
'private _monitorMetadataPromise'( quote, dapis )
|
||||
{
|
||||
dapis.map( promise => promise
|
||||
.then( ( { field, index, data } ) =>
|
||||
this.dao.saveQuoteMeta(
|
||||
quote,
|
||||
data,
|
||||
null,
|
||||
e => { throw e; }
|
||||
)
|
||||
{
|
||||
var data = JSON.parse( bucket_data ),
|
||||
types = program.meta.qtypes,
|
||||
ignore = {};
|
||||
|
||||
// if we're not internal, filter out the internal questions
|
||||
// (so they can't post to them)
|
||||
if ( request.getSession().isInternal() === false )
|
||||
{
|
||||
for ( id in program.internal )
|
||||
{
|
||||
ignore[ id ] = true;
|
||||
}
|
||||
}
|
||||
|
||||
// return the filtered data
|
||||
return bucket_filter.filter( data, types, ignore, permit_null );
|
||||
)
|
||||
.catch( e =>
|
||||
server.logger.log(
|
||||
server.logger.PRIORITY_ERROR,
|
||||
"Failed to save field %s[%s] metadata: %s",
|
||||
field,
|
||||
index,
|
||||
e.message
|
||||
)
|
||||
)
|
||||
);
|
||||
},
|
||||
|
||||
|
||||
|
@ -1619,8 +1635,10 @@ module.exports = Class( 'Server' )
|
|||
// sanitize, permitting nulls (since the diff will have them)
|
||||
try
|
||||
{
|
||||
var filtered = _self._sanitizeBucketData(
|
||||
post_data.data, request, program, true
|
||||
var data = JSON.parse( post_data.data );
|
||||
|
||||
var filtered = _self._dataProcessor.sanitizeDiff(
|
||||
data, request, program, true
|
||||
);
|
||||
}
|
||||
catch ( e )
|
||||
|
|
|
@ -42,8 +42,13 @@ const {
|
|||
QuoteDataBucket,
|
||||
},
|
||||
|
||||
dapi: {
|
||||
DataApiFactory,
|
||||
DataApiManager,
|
||||
},
|
||||
|
||||
server: {
|
||||
Server,
|
||||
DocumentServer,
|
||||
|
||||
db: {
|
||||
MongoServerDao,
|
||||
|
@ -70,7 +75,6 @@ const {
|
|||
|
||||
request: {
|
||||
CapturedUserResponse,
|
||||
JsonServerResponse,
|
||||
SessionSpoofHttpClient,
|
||||
UserResponse,
|
||||
},
|
||||
|
@ -104,14 +108,8 @@ exports.init = function( logger, enc_service )
|
|||
{ native_parser: false, safe: false }
|
||||
);
|
||||
|
||||
var dao = MongoServerDao( db );
|
||||
|
||||
server = Server(
|
||||
new JsonServerResponse.create(),
|
||||
dao,
|
||||
logger,
|
||||
enc_service
|
||||
);
|
||||
const dao = MongoServerDao( db );
|
||||
server = _createDocumentServer( dao, logger, enc_service );
|
||||
|
||||
server_cache = _createCache( server );
|
||||
server.init( server_cache, exports.rater );
|
||||
|
@ -150,6 +148,25 @@ exports.init = function( logger, enc_service )
|
|||
}
|
||||
|
||||
|
||||
function _createDocumentServer( dao, logger, enc_service )
|
||||
{
|
||||
const origin_url = process.env.HTTP_ORIGIN_URL || '';
|
||||
|
||||
if ( !origin_url )
|
||||
{
|
||||
// this allows the system to work without configuration (e.g. for
|
||||
// local development), but is really bad
|
||||
logger.log( logger.PRIORITY_IMPORTANT,
|
||||
"*** HTTP_ORIGIN_URL environment variable not set; " +
|
||||
"system will fall back to using the origin of HTTP requests, " +
|
||||
"meaning an attacker can control where server-side requests go! ***"
|
||||
);
|
||||
}
|
||||
|
||||
return DocumentServer().create( dao, logger, enc_service, origin_url );
|
||||
}
|
||||
|
||||
|
||||
function _initExportService( db, callback )
|
||||
{
|
||||
db.collection( 'quotes', function( err, collection )
|
||||
|
|
|
@ -521,6 +521,38 @@ module.exports = Class( 'MongoServerDao' )
|
|||
},
|
||||
|
||||
|
||||
/**
|
||||
* Save document metadata (meta field on document)
|
||||
*
|
||||
* Only the provided indexes will be modified (that is---data will be
|
||||
* merged with what is already in the database).
|
||||
*
|
||||
* @param {Quote} quote destination quote
|
||||
* @param {Object} new_meta bucket-formatted data to write
|
||||
* @param {Function} success callback on success
|
||||
* @param {Function} failure callback on error
|
||||
*
|
||||
* @return {undefined}
|
||||
*/
|
||||
'public saveQuoteMeta'( quote, new_meta, success, failure )
|
||||
{
|
||||
const update = {};
|
||||
|
||||
for ( var key in new_meta )
|
||||
{
|
||||
var meta = new_meta[ key ];
|
||||
|
||||
for ( var i in meta )
|
||||
{
|
||||
update[ 'meta.' + key + '.' + i ] =
|
||||
new_meta[ key ][ i ];
|
||||
}
|
||||
}
|
||||
|
||||
this.mergeData( quote, update, success, failure );
|
||||
},
|
||||
|
||||
|
||||
/**
|
||||
* Saves the quote lock state to the database
|
||||
*
|
||||
|
|
|
@ -0,0 +1,116 @@
|
|||
/**
|
||||
* Data-API-based metadata population
|
||||
*
|
||||
* Copyright (C) 2017 R-T Specialty, LLC.
|
||||
*
|
||||
* This file is part of the Liza Data Collection Framework.
|
||||
*
|
||||
* liza is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of the
|
||||
* License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const { Class } = require( 'easejs' );
|
||||
|
||||
|
||||
/**
|
||||
* Retrieve data for meta field using Data API
|
||||
*
|
||||
* TODO: The reason this class exists at all is to encapsulate the horrid
|
||||
* API. Once refactored, perhaps this class will no longer be necessary.
|
||||
*/
|
||||
module.exports = Class( 'DapiMetaSource',
|
||||
{
|
||||
/**
|
||||
* Metabucket constructor
|
||||
* @type {function()}
|
||||
*/
|
||||
'private _bucketf': null,
|
||||
|
||||
|
||||
/**
|
||||
* Initialize with metabucket constructor
|
||||
* @type {function()}
|
||||
*/
|
||||
constructor( bucketf )
|
||||
{
|
||||
this._bucketf = bucketf;
|
||||
},
|
||||
|
||||
|
||||
/**
|
||||
* Retrieve field data
|
||||
*
|
||||
* @param {string} field field name
|
||||
* @param {number} index field index
|
||||
* @param {DataApiManager} dapi_manager manager for dapi calls
|
||||
* @param {Object} dapi dapi descriptor
|
||||
* @param {Object} data dapi input data
|
||||
*
|
||||
* @return {Promise} object containing `field`, `index`, and return data
|
||||
*/
|
||||
'public getFieldData'( field, index, dapi_manager, dapi, data )
|
||||
{
|
||||
const metabucket = this._bucketf();
|
||||
|
||||
return new Promise( ( resolve, reject ) =>
|
||||
{
|
||||
dapi_manager.getApiData(
|
||||
dapi.name,
|
||||
data,
|
||||
( err, api_data ) =>
|
||||
{
|
||||
if ( api_data.length > 1 )
|
||||
{
|
||||
reject( Error(
|
||||
"Data API request produced more than one result"
|
||||
) );
|
||||
}
|
||||
|
||||
dapi_manager.setFieldData(
|
||||
dapi.name,
|
||||
index,
|
||||
api_data,
|
||||
dapi.value,
|
||||
'',
|
||||
false
|
||||
);
|
||||
|
||||
dapi_manager.expandFieldData(
|
||||
dapi.name,
|
||||
index,
|
||||
metabucket,
|
||||
dapi.mapdest,
|
||||
true,
|
||||
{
|
||||
[dapi.name]: {
|
||||
[index]: api_data[ 0 ][ dapi.value ],
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
resolve( {
|
||||
field: field,
|
||||
index: index,
|
||||
data: metabucket.getData(),
|
||||
} );
|
||||
},
|
||||
field,
|
||||
index,
|
||||
{},
|
||||
reject
|
||||
);
|
||||
} );
|
||||
},
|
||||
} );
|
|
@ -0,0 +1,260 @@
|
|||
/**
|
||||
* Manages DataAPI requests and return data
|
||||
*
|
||||
* Copyright (C) 2017 R-T Specialty, LLC.
|
||||
*
|
||||
* This file is part of the Liza Data Collection Framework.
|
||||
*
|
||||
* liza is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of the
|
||||
* License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const { Class } = require( 'easejs' );
|
||||
|
||||
const { QuoteDataBucket } = require( '../../' ).bucket;
|
||||
|
||||
|
||||
/**
|
||||
* Process data provided by the client
|
||||
*
|
||||
* TOOD: This contains Data API and bucket merging logic that is better done
|
||||
* elsewhere.
|
||||
*/
|
||||
module.exports = Class( 'DataProcessor',
|
||||
{
|
||||
/**
|
||||
* Bucket filter
|
||||
* @type {Object}
|
||||
*/
|
||||
'private _filter': null,
|
||||
|
||||
/**
|
||||
* Construct Data API manager
|
||||
* @type {function()}
|
||||
*/
|
||||
'private _dapif': null,
|
||||
|
||||
/**
|
||||
* Metadata source
|
||||
* @type {DapiMetaSource}
|
||||
*/
|
||||
'private _metaSource': null,
|
||||
|
||||
|
||||
/**
|
||||
* Initialize processor
|
||||
*
|
||||
* @type {Object} filter bucket filter
|
||||
* @type {function()} dapif data API constructor
|
||||
* @type {DapiMetaSource} meta_source metadata source
|
||||
*/
|
||||
constructor( filter, dapif, meta_source )
|
||||
{
|
||||
this._filter = filter;
|
||||
this._dapif = dapif;
|
||||
this._metaSource = meta_source;
|
||||
},
|
||||
|
||||
|
||||
/**
|
||||
* Process client-provided data diff
|
||||
*
|
||||
* This performs sanitization to ensure that we are storing only
|
||||
* "correct" data within our database. This also strips any unknown
|
||||
* bucket values, preventing users from using us as their own personal
|
||||
* database.
|
||||
*
|
||||
* @param {Object} data bucket diff data
|
||||
* @param {UserRequest} request submitting request
|
||||
* @param {Program} program active program
|
||||
*
|
||||
* @return {Object} processed diff
|
||||
*/
|
||||
'public processDiff'( data, request, program, bucket )
|
||||
{
|
||||
const filtered = this.sanitizeDiff( data, request, program, false );
|
||||
const dapi_manager = this._dapif( program.apis, request );
|
||||
|
||||
// array of promises for any dapi requests
|
||||
const dapis = this._triggerDapis(
|
||||
dapi_manager, program, data, bucket
|
||||
);
|
||||
|
||||
return {
|
||||
filtered: filtered,
|
||||
dapis: dapis,
|
||||
};
|
||||
},
|
||||
|
||||
|
||||
/**
|
||||
* Sanitize client-provided data
|
||||
*
|
||||
* Internal fields will be stripped if the session is not
|
||||
* internal. Following that, the filter provided via the ctor will be
|
||||
* applied.
|
||||
*
|
||||
* `permit_null` should be used only in the case of bucket diffs, which
|
||||
* contain nulls as terminators.
|
||||
*
|
||||
* @param {Object} data client-provided data
|
||||
* @param {UserRequest} request client request
|
||||
* @param {Program} program active program
|
||||
* @param {boolean} permit_null whether null values should be retained
|
||||
*
|
||||
* @return {Object} filtered data
|
||||
*/
|
||||
'public sanitizeDiff'( data, request, program, permit_null )
|
||||
{
|
||||
permit_null = ( permit_null === undefined ) ? false : permit_null;
|
||||
|
||||
if ( !request.getSession().isInternal() )
|
||||
{
|
||||
this._cleanInternals( data, program );
|
||||
}
|
||||
|
||||
const types = program.meta.qtypes;
|
||||
return this._filter.filter( data, types, {}, permit_null );
|
||||
},
|
||||
|
||||
|
||||
/**
|
||||
* Strip internal fields from diff `data`
|
||||
*
|
||||
* Internal fields are defined by the program `program`.
|
||||
*
|
||||
* @param {Object} data bucket diff data
|
||||
* @param {Program} program active program
|
||||
*
|
||||
* @return {undefined}
|
||||
*/
|
||||
'private _cleanInternals'( data, program )
|
||||
{
|
||||
for ( let id in program.internal )
|
||||
{
|
||||
delete data[ id ];
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
/**
|
||||
* Trigger metadata Data API requests
|
||||
*
|
||||
* @param {DataApiManager} dapi_manager dapi manager
|
||||
* @param {Program} program active program
|
||||
* @param {Object} data client-provided data
|
||||
* @param {Bucket} bucket active bucket
|
||||
*
|
||||
* @return {undefined}
|
||||
*/
|
||||
'private _triggerDapis'( dapi_manager, program, data, bucket )
|
||||
{
|
||||
const {
|
||||
mapis = {},
|
||||
meta: {
|
||||
fields = {},
|
||||
},
|
||||
} = program;
|
||||
|
||||
const dapi_fields = this._determineDapiFields( mapis, data );
|
||||
|
||||
return Object.keys( dapi_fields ).map( field =>
|
||||
{
|
||||
const { dapi } = fields[ field ];
|
||||
const indexes = dapi_fields[ field ];
|
||||
|
||||
return indexes.map( i =>
|
||||
this._metaSource.getFieldData(
|
||||
field,
|
||||
i,
|
||||
dapi_manager,
|
||||
dapi,
|
||||
this._mapDapiData( dapi, bucket, i, data )
|
||||
)
|
||||
);
|
||||
} ).reduce( ( result, x ) => result.concat( x ), [] );
|
||||
},
|
||||
|
||||
|
||||
/**
|
||||
* Determine which fields require a Data API to be triggered
|
||||
*
|
||||
* @param {Object} mapis metadata dapi descriptors
|
||||
* @param {Object} data client-provided data
|
||||
*
|
||||
* @return {Object} fields with indexes in need of dapi calls
|
||||
*/
|
||||
'private _determineDapiFields'( mapis, data )
|
||||
{
|
||||
return Object.keys( mapis ).reduce(
|
||||
( result, src_field ) =>
|
||||
{
|
||||
if ( data[ src_field ] === undefined )
|
||||
{
|
||||
return result;
|
||||
}
|
||||
|
||||
const fields = mapis[ src_field ];
|
||||
|
||||
// get each index that changed
|
||||
fields.forEach( field =>
|
||||
{
|
||||
result[ field ] = result[ field ] || [];
|
||||
|
||||
Object.keys( data[ src_field ] ).forEach( i =>
|
||||
{
|
||||
if ( data[ src_field ][ i ] !== undefined )
|
||||
{
|
||||
result[ field ][ i ] = i;
|
||||
}
|
||||
} );
|
||||
} );
|
||||
|
||||
return result;
|
||||
},
|
||||
{}
|
||||
);
|
||||
},
|
||||
|
||||
|
||||
/**
|
||||
* Map data from bucket to dapi inputs
|
||||
*
|
||||
* @param {Object} dapi Data API descriptor
|
||||
* @param {Bucket} bucket active (source) bucket
|
||||
* @param {number} index field index
|
||||
* @param {Object} diff_data client-provided data
|
||||
*
|
||||
* @return {Object} key/value dapi input data
|
||||
*/
|
||||
'private _mapDapiData'( dapi, bucket, index, diff_data )
|
||||
{
|
||||
const { mapsrc } = dapi;
|
||||
|
||||
return Object.keys( mapsrc ).reduce(
|
||||
( result, srcid ) =>
|
||||
{
|
||||
const bucketid = mapsrc[ srcid ];
|
||||
|
||||
const bdata = ( diff_data[ bucketid ] || [] )[ index ] ||
|
||||
( bucket.getDataByName( bucketid ) || [] )[ index ];
|
||||
|
||||
result[ srcid ] = bdata || [];
|
||||
return result;
|
||||
},
|
||||
{}
|
||||
);
|
||||
},
|
||||
} );
|
|
@ -0,0 +1,69 @@
|
|||
/**
|
||||
* Instantiate appropriate DataApi
|
||||
*
|
||||
* Copyright (C) 2017 R-T Specialty, LLC.
|
||||
*
|
||||
* This file is part of the Liza Data Collection Framework.
|
||||
*
|
||||
* liza is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of the
|
||||
* License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
const { Class } = require( 'easejs' );
|
||||
const {
|
||||
DataApiFactory,
|
||||
http: {
|
||||
NodeHttpImpl,
|
||||
SpoofedNodeHttpImpl,
|
||||
},
|
||||
} = require( '../..' ).dapi;
|
||||
|
||||
|
||||
/**
|
||||
* Instantiates the appropriate DataApi object for the given service type
|
||||
*/
|
||||
module.exports = Class( 'ServerDataApiFactory' )
|
||||
.extend( DataApiFactory,
|
||||
{
|
||||
/**
|
||||
* Origin URL
|
||||
* @type {string}
|
||||
*/
|
||||
'private _origin': '',
|
||||
|
||||
/**
|
||||
* Request on behalf of user session
|
||||
* @type {UserSession}
|
||||
*/
|
||||
'private _session': null,
|
||||
|
||||
|
||||
constructor( origin, session )
|
||||
{
|
||||
this._origin = ''+origin;
|
||||
this._session = session;
|
||||
},
|
||||
|
||||
|
||||
'override protected createHttpImpl'()
|
||||
{
|
||||
return NodeHttpImpl.use( SpoofedNodeHttpImpl( this._session ) )(
|
||||
{
|
||||
http: require( 'http' ),
|
||||
https: require( 'https' ),
|
||||
},
|
||||
require( 'url' ),
|
||||
this._origin
|
||||
);
|
||||
},
|
||||
} );
|
|
@ -551,6 +551,22 @@ module.exports = Class.extend( require( 'events' ).EventEmitter,
|
|||
},
|
||||
|
||||
|
||||
'public getHostAddr': function()
|
||||
{
|
||||
return this.request.headers['x-forwarded-host']
|
||||
|| this.request.headers.host;
|
||||
},
|
||||
|
||||
|
||||
'public getOrigin': function()
|
||||
{
|
||||
const referrer = this.request.headers.referrer || "";
|
||||
|
||||
return this.request.headers.origin
|
||||
|| ( referrer.match( '^[a-z]+://[^/]+' ) || [] )[ 0 ];
|
||||
},
|
||||
|
||||
|
||||
'public getUserAgent': function()
|
||||
{
|
||||
return this.request.headers['user-agent'];
|
||||
|
|
|
@ -27,7 +27,11 @@ var dapi = require( '../../../' ).dapi,
|
|||
dummy_url = 'http://foo',
|
||||
dummy_impl = Class
|
||||
.implement( dapi.http.HttpImpl )
|
||||
.extend( { requestData: function( _, __, ___, ____ ) {} } )(),
|
||||
.extend(
|
||||
{
|
||||
requestData: function( _, __, ___, ____ ) {},
|
||||
setOptions() {},
|
||||
} )(),
|
||||
|
||||
dummy_sut = Sut( dummy_url, 'GET', dummy_impl );
|
||||
|
||||
|
@ -86,7 +90,9 @@ describe( 'HttpDataApi', function()
|
|||
{
|
||||
this.provided = arguments;
|
||||
c( this.err, this.data );
|
||||
}
|
||||
},
|
||||
|
||||
setOptions() {},
|
||||
} )();
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,47 @@
|
|||
/**
|
||||
* Tests error representing non-200 HTTP status code
|
||||
*
|
||||
* Copyright (C) 2017 R-T Specialty, LLC.
|
||||
*
|
||||
* This file is part of the Liza Data Collection Framework.
|
||||
*
|
||||
* liza is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
const { expect } = require( 'chai' );
|
||||
const Sut = require( '../../../' ).dapi.http.HttpError;
|
||||
|
||||
'use strict';
|
||||
|
||||
|
||||
describe( "HttpError", () =>
|
||||
{
|
||||
it( "provides HTTP status code", () =>
|
||||
{
|
||||
const code = 418;
|
||||
|
||||
expect( Sut( 'message', code ).statuscode )
|
||||
.to.equal( code );
|
||||
} );
|
||||
|
||||
|
||||
// just make sure overriding ctor calls parent
|
||||
it( "sets message", () =>
|
||||
{
|
||||
const message = 'foobar';
|
||||
|
||||
expect( Sut( message ).message )
|
||||
.to.equal( message );
|
||||
} );
|
||||
} );
|
|
@ -0,0 +1,460 @@
|
|||
/**
|
||||
* Test HTTP using Node.js-compatible API
|
||||
*
|
||||
* Copyright (C) 2017 R-T Specialty, LLC.
|
||||
*
|
||||
* This file is part of the Liza Data Collection Framework.
|
||||
*
|
||||
* liza is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const { expect } = require( 'chai' );
|
||||
const { Class } = require( 'easejs' );
|
||||
|
||||
const {
|
||||
HttpImpl,
|
||||
NodeHttpImpl: Sut,
|
||||
HttpError,
|
||||
} = require( '../../../' ).dapi.http;
|
||||
|
||||
|
||||
describe( "NodeHttpImpl", () =>
|
||||
{
|
||||
it( 'is an HttpImpl', function()
|
||||
{
|
||||
var sut = Sut( function() {} );
|
||||
expect( Class.isA( HttpImpl, sut ) ).to.be.ok;
|
||||
} );
|
||||
|
||||
|
||||
[
|
||||
{
|
||||
label: "uses http for plain HTTP requests",
|
||||
protocol: 'http:',
|
||||
method: 'GET',
|
||||
},
|
||||
{
|
||||
label: "uses http for plain HTTP requests",
|
||||
protocol: 'https:',
|
||||
method: 'GET',
|
||||
}
|
||||
].forEach( ( { label, protocol, method } ) =>
|
||||
{
|
||||
it( label, done =>
|
||||
{
|
||||
const url_result = {
|
||||
protocol: protocol,
|
||||
hostname: 'host',
|
||||
port: 8888,
|
||||
path: 'foo',
|
||||
};
|
||||
|
||||
const url = _createMockUrl( given_url => url_result );
|
||||
|
||||
const data = {};
|
||||
const callback_expected = {};
|
||||
const callback = () => callback_expected;
|
||||
|
||||
const check = proto => ( opts, given_callback ) =>
|
||||
{
|
||||
expect( opts.protocol ).to.equal( proto );
|
||||
expect( opts.hostname ).to.equal( url_result.hostname );
|
||||
expect( opts.port ).to.equal( url_result.port );
|
||||
expect( opts.path ).to.equal( url_result.path );
|
||||
expect( opts.method ).to.equal( method );
|
||||
|
||||
given_callback( _createMockResp() );
|
||||
|
||||
done();
|
||||
};
|
||||
|
||||
const http = _createMockHttp( check( 'http:' ) );
|
||||
const https = _createMockHttp( check( 'https:' ) );
|
||||
|
||||
Sut( { http: http, https: https }, url )
|
||||
.requestData( '', method, data, callback );
|
||||
} );
|
||||
} );
|
||||
|
||||
|
||||
describe( "given an origin", () =>
|
||||
{
|
||||
it( "prepends to URL if URL begins with a slash", done =>
|
||||
{
|
||||
const origin = 'https://foo.com';
|
||||
const path = '/quux/quuux';
|
||||
|
||||
const url = _createMockUrl( given_url =>
|
||||
{
|
||||
expect( given_url ).to.equal( origin + path );
|
||||
done();
|
||||
} );
|
||||
|
||||
const http = _createMockHttp( ( _, callback ) =>
|
||||
{
|
||||
callback( res );
|
||||
res.trigger( 'end' );
|
||||
} );
|
||||
|
||||
Sut( { http: http }, url, origin )
|
||||
.requestData( path, 'GET', {}, () => {} );
|
||||
} );
|
||||
|
||||
|
||||
it( "does not prepend to URL that does not begin with a slash", done =>
|
||||
{
|
||||
const origin = 'https://bar.com';
|
||||
const path = 'http://foo.com/quux/quuux';
|
||||
|
||||
const url = _createMockUrl( given_url =>
|
||||
{
|
||||
expect( given_url ).to.equal( path );
|
||||
done();
|
||||
} );
|
||||
|
||||
const http = _createMockHttp( ( _, callback ) =>
|
||||
{
|
||||
callback( res );
|
||||
res.trigger( 'end' );
|
||||
} );
|
||||
|
||||
Sut( { http: http }, url, origin )
|
||||
.requestData( path, 'GET', {}, () => {} );
|
||||
} );
|
||||
} );
|
||||
|
||||
|
||||
it( "returns response when no error", done =>
|
||||
{
|
||||
const res = _createMockResp();
|
||||
const chunks = [ 'a', 'b', 'c', 'd' ];
|
||||
|
||||
const http = _createMockHttp( ( _, callback ) =>
|
||||
{
|
||||
callback( res );
|
||||
|
||||
chunks.forEach( chunk => res.trigger( 'data', chunk ) );
|
||||
res.trigger( 'end' );
|
||||
} );
|
||||
|
||||
Sut( { http: http }, _createMockUrl() )
|
||||
.requestData( "", 'GET', '', ( e, data ) =>
|
||||
{
|
||||
expect( e ).to.equal( null );
|
||||
expect( data ).to.equal( chunks.join( '' ) );
|
||||
|
||||
done();
|
||||
} );
|
||||
} );
|
||||
|
||||
|
||||
it( "adds data to query string on GET", done =>
|
||||
{
|
||||
const given_path = '/path';
|
||||
const expected_query = 'write data';
|
||||
|
||||
const res = _createMockResp();
|
||||
const url = _createMockUrl( given_url => ( {
|
||||
protocol: 'http:',
|
||||
path: given_path,
|
||||
} ) );
|
||||
|
||||
const http = _createMockHttp( ( options, callback ) =>
|
||||
{
|
||||
expect( options.path )
|
||||
.to.equal( given_path + '?' + expected_query );
|
||||
|
||||
callback( res );
|
||||
res.trigger( 'end' );
|
||||
} );
|
||||
|
||||
Sut( { http: http }, url )
|
||||
.requestData( "", 'GET', expected_query, done );
|
||||
} );
|
||||
|
||||
|
||||
it( "writes form data on POST", done =>
|
||||
{
|
||||
const expected_data = 'expected';
|
||||
const expected_write = 'write data';
|
||||
|
||||
const res = _createMockResp();
|
||||
|
||||
const http = _createMockHttp( ( options, callback ) =>
|
||||
{
|
||||
expect( http.req.written ).to.equal( expected_write );
|
||||
|
||||
expect( options.headers[ 'Content-Type' ] )
|
||||
.to.equal( 'application/x-www-form-urlencoded' );
|
||||
|
||||
callback( res );
|
||||
|
||||
// make sure we're still handling the response as well
|
||||
res.trigger( 'data', expected_data );
|
||||
res.trigger( 'end' );
|
||||
} );
|
||||
|
||||
Sut( { http: http }, _createMockUrl() )
|
||||
.requestData( "", 'POST', expected_write, ( e, data ) =>
|
||||
{
|
||||
expect( e ).to.equal( null );
|
||||
expect( data ).to.equal( expected_data );
|
||||
|
||||
done();
|
||||
} );
|
||||
} );
|
||||
|
||||
|
||||
it( "returns error and response given non-200 status code", done =>
|
||||
{
|
||||
const res = _createMockResp();
|
||||
const http = _createMockHttp( ( _, callback ) =>
|
||||
{
|
||||
callback( res )
|
||||
|
||||
res.statusCode = 418;
|
||||
res.statusMessage = "I'm a teapot";
|
||||
|
||||
res.trigger( 'end' );
|
||||
} );
|
||||
|
||||
Sut( { http: http }, _createMockUrl() )
|
||||
.requestData( "", 'GET', '', ( e, data ) =>
|
||||
{
|
||||
expect( e ).to.be.instanceOf( HttpError );
|
||||
expect( e.message ).to.equal( res.statusMessage );
|
||||
expect( e.statuscode ).to.equal( res.statusCode );
|
||||
|
||||
done();
|
||||
} );
|
||||
} );
|
||||
|
||||
|
||||
describe( "given a request error", () =>
|
||||
{
|
||||
it( "returns error with no response on request error", done =>
|
||||
{
|
||||
const error = Error( 'test error' );
|
||||
const http = _createMockHttp( () => {} );
|
||||
|
||||
Sut( { http: http }, _createMockUrl() )
|
||||
.requestData( "", 'GET', '', ( e, data ) =>
|
||||
{
|
||||
expect( data ).to.equal( null );
|
||||
expect( e ).to.equal( error );
|
||||
|
||||
done();
|
||||
} );
|
||||
|
||||
// request will be hanging at this point since we didn't call
|
||||
// the callback, so we can fail the request
|
||||
http.req.trigger( 'error', error );
|
||||
} );
|
||||
|
||||
// this should never happen in practice, but we want to defend
|
||||
// against it to make sure the callback is not invoked twice
|
||||
it( "will not complete request on end", () =>
|
||||
{
|
||||
let res = _createMockResp();
|
||||
|
||||
const http = _createMockHttp( ( _, callback ) =>
|
||||
{
|
||||
// allow hooking `end'
|
||||
callback( res );
|
||||
} );
|
||||
|
||||
Sut( { http: http }, _createMockUrl() )
|
||||
.requestData( "", 'GET', '', ( e, data ) =>
|
||||
{
|
||||
// will fail on successful callback
|
||||
expect( data ).to.equal( null );
|
||||
} );
|
||||
|
||||
http.req.trigger( 'error', Error() );
|
||||
|
||||
// do not invoke a second time (should do nothing)
|
||||
res.trigger( 'end' );
|
||||
} );
|
||||
} );
|
||||
|
||||
|
||||
describe( "protected API", () =>
|
||||
{
|
||||
it( "allows overriding request end behavior", done =>
|
||||
{
|
||||
const expected_data = "expected";
|
||||
const e = Error( "test e" );
|
||||
const value = "resp data";
|
||||
const res = _createMockResp();
|
||||
|
||||
const http = _createMockHttp( ( _, callback ) =>
|
||||
{
|
||||
callback( res );
|
||||
|
||||
res.trigger( 'data', expected_data );
|
||||
res.trigger( 'end' );
|
||||
} );
|
||||
|
||||
Sut.extend(
|
||||
{
|
||||
'override requestEnd'( given_res, data, callback )
|
||||
{
|
||||
expect( given_res ).to.equal( res );
|
||||
expect( data ).to.equal( expected_data );
|
||||
|
||||
callback( e, value );
|
||||
},
|
||||
} )( { http: http }, _createMockUrl() )
|
||||
.requestData( "", 'GET', '', ( given_e, given_data ) =>
|
||||
{
|
||||
expect( given_e ).to.equal( e );
|
||||
expect( given_data ).to.equal( value );
|
||||
|
||||
done();
|
||||
} );
|
||||
} );
|
||||
|
||||
|
||||
it( "allows overriding concept of success", done =>
|
||||
{
|
||||
const res = _createMockResp();
|
||||
|
||||
const http = _createMockHttp( ( _, callback ) =>
|
||||
{
|
||||
callback( res );
|
||||
res.trigger( 'end' );
|
||||
} );
|
||||
|
||||
// would normally be a failure
|
||||
res.statusCode = 500;
|
||||
|
||||
Sut.extend(
|
||||
{
|
||||
'override isSuccessful': ( given_res ) => true,
|
||||
} )( { http: http }, _createMockUrl() )
|
||||
.requestData( "", 'GET', '', ( e ) =>
|
||||
{
|
||||
expect( e ).to.equal( null );
|
||||
done();
|
||||
} );
|
||||
} );
|
||||
|
||||
|
||||
it( "allows overriding error handling", done =>
|
||||
{
|
||||
const expected_e = Error( 'expected' );
|
||||
const error = {};
|
||||
const value = 'error data';
|
||||
|
||||
const http = _createMockHttp( ( _, callback ) =>
|
||||
{
|
||||
callback( _createMockResp() );
|
||||
} );
|
||||
|
||||
Sut.extend(
|
||||
{
|
||||
'override serveError'(
|
||||
given_e, given_res, given_data, callback
|
||||
)
|
||||
{
|
||||
expect( given_e ).to.equal( expected_e );
|
||||
expect( given_res ).to.equal( null );
|
||||
expect( given_data ).to.equal( null );
|
||||
|
||||
error.e = given_e;
|
||||
callback( error, value );
|
||||
},
|
||||
} )( { http: http }, _createMockUrl() )
|
||||
.requestData( "", 'GET', '', ( e, given_value ) =>
|
||||
{
|
||||
expect( e ).to.equal( error );
|
||||
expect( e.e ).to.equal( expected_e );
|
||||
expect( given_value ).to.equal( value );
|
||||
|
||||
done();
|
||||
} );
|
||||
|
||||
// we're still hanging the request since we haven't called the
|
||||
// callback in http
|
||||
http.req.trigger( 'error', expected_e );
|
||||
} );
|
||||
} );
|
||||
} );
|
||||
|
||||
|
||||
const _createMockHttp = req_callback =>
|
||||
{
|
||||
const events = {};
|
||||
|
||||
return Object.create( {
|
||||
req: Object.create( {
|
||||
written: '',
|
||||
|
||||
on( event, hook )
|
||||
{
|
||||
events[ event ] = hook;
|
||||
},
|
||||
|
||||
trigger( event, data )
|
||||
{
|
||||
events[ event ]( data );
|
||||
},
|
||||
|
||||
end()
|
||||
{
|
||||
// thunk defined by #request below
|
||||
events.onend();
|
||||
},
|
||||
|
||||
write( data )
|
||||
{
|
||||
this.written = data;
|
||||
},
|
||||
} ),
|
||||
|
||||
request( options, callback )
|
||||
{
|
||||
// not a real event; just for convenience
|
||||
events.onend = () => req_callback( options, callback );
|
||||
|
||||
return this.req;
|
||||
},
|
||||
} );
|
||||
};
|
||||
|
||||
|
||||
const _createMockUrl = callback => ( {
|
||||
parse: callback || ( () => ( { protocol: 'http:' } ) ),
|
||||
} );
|
||||
|
||||
const _createMockResp = () => Object.create( {
|
||||
event: {
|
||||
data() {},
|
||||
end() {},
|
||||
},
|
||||
|
||||
statusCode: 200,
|
||||
|
||||
on( ev, hook )
|
||||
{
|
||||
this.event[ ev ] = hook;
|
||||
},
|
||||
|
||||
trigger( ev, data )
|
||||
{
|
||||
this.event[ ev ]( data );
|
||||
}
|
||||
} );
|
|
@ -0,0 +1,84 @@
|
|||
/**
|
||||
* Tests Node-based HTTP client with session spoofing
|
||||
*
|
||||
* Copyright (C) 2017 R-T Specialty, LLC.
|
||||
*
|
||||
* This file is part of the Liza Data Collection Framework.
|
||||
*
|
||||
* liza is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of the
|
||||
* License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const { expect } = require( 'chai' );
|
||||
const {
|
||||
SpoofedNodeHttpImpl: Sut,
|
||||
NodeHttpImpl,
|
||||
} = require( '../../../' ).dapi.http;
|
||||
|
||||
|
||||
describe( 'SpoofNodeHttpImpl', () =>
|
||||
{
|
||||
it( "adds session headers", done =>
|
||||
{
|
||||
const user_agent = 'Agent Foo';
|
||||
const forward_for = '::1';
|
||||
const sessname = 'FOOSESSID';
|
||||
const sessid = '12345';
|
||||
|
||||
const protos = {
|
||||
http: {
|
||||
request( given )
|
||||
{
|
||||
expect( given.headers[ 'User-Agent' ] )
|
||||
.to.equal( user_agent );
|
||||
expect( given.headers[ 'X-Forwarded-For' ] )
|
||||
.to.equal( forward_for );
|
||||
|
||||
expect( given.headers.Cookie )
|
||||
.to.contain( sessname + '=' + sessid );
|
||||
|
||||
done();
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const url = {
|
||||
parse: () => ( {
|
||||
protocol: 'http',
|
||||
} )
|
||||
};
|
||||
|
||||
const session = getStubSession( {
|
||||
agent: user_agent,
|
||||
forward_for: forward_for,
|
||||
sessname: sessname,
|
||||
sessid: sessid,
|
||||
} );
|
||||
|
||||
const given = NodeHttpImpl.use( Sut( session ) )( protos, url )
|
||||
.requestData( '', '', {}, ()=>{} );
|
||||
} );
|
||||
} );
|
||||
|
||||
|
||||
function getStubSession( { agent, forward_for, sessname, sessid } )
|
||||
{
|
||||
return {
|
||||
getUserAgent: () => agent,
|
||||
getRemoteAddr: () => forward_for,
|
||||
getSessionIdName: () => sessname,
|
||||
getSessionId: () => sessid,
|
||||
};
|
||||
}
|
|
@ -19,19 +19,24 @@
|
|||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
var dapi = require( '../../../' ).dapi,
|
||||
expect = require( 'chai' ).expect,
|
||||
Class = require( 'easejs' ).Class,
|
||||
HttpImpl = dapi.http.HttpImpl,
|
||||
Sut = dapi.http.XhrHttpImpl,
|
||||
'use strict';
|
||||
|
||||
DummyXhr = function()
|
||||
{
|
||||
const { expect } = require( 'chai' );
|
||||
const { Class } = require( 'easejs' );
|
||||
|
||||
const {
|
||||
HttpImpl,
|
||||
XhrHttpImpl: Sut,
|
||||
HttpError,
|
||||
} = require( '../../../' ).dapi.http;
|
||||
|
||||
const DummyXhr = function()
|
||||
{
|
||||
this.open = function()
|
||||
{
|
||||
DummyXhr.args = arguments;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
describe( 'XhrHttpImpl', function()
|
||||
|
@ -260,7 +265,7 @@ describe( 'XhrHttpImpl', function()
|
|||
Sut( StubXhr )
|
||||
.requestData( 'http://foo', 'GET', '', function( err, _ )
|
||||
{
|
||||
expect( err ).to.be.instanceOf( Error );
|
||||
expect( err ).to.be.instanceOf( HttpError );
|
||||
|
||||
expect( err.message ).to.contain(
|
||||
StubXhr.prototype.status
|
||||
|
|
|
@ -0,0 +1,162 @@
|
|||
/**
|
||||
* Tests Data-API-based metadata population
|
||||
*
|
||||
* Copyright (C) 2017 R-T Specialty, LLC.
|
||||
*
|
||||
* This file is part of the Liza Data Collection Framework.
|
||||
*
|
||||
* liza is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of the
|
||||
* License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const expect = require( 'chai' ).expect;
|
||||
const Sut = require( '../../../' ).server.meta.DapiMetaSource;
|
||||
|
||||
describe( "DapiMetaSource", () =>
|
||||
{
|
||||
it( "populates field with dapi response", () =>
|
||||
{
|
||||
const dapim = createStubDapiManager();
|
||||
const field_name = 'field_foo';
|
||||
const index = 1;
|
||||
|
||||
const dapi = {
|
||||
name: 'dapi_name',
|
||||
value: 'foo',
|
||||
mapdest: { map: 'dest' },
|
||||
};
|
||||
|
||||
// input data to dapi
|
||||
const given_data = {};
|
||||
|
||||
// dapi output data (response)
|
||||
const ret_data = [ {} ];
|
||||
|
||||
const bucket_result = {
|
||||
[dapi.name]: {
|
||||
[index]: ret_data[ 0 ][ dapi.value ],
|
||||
},
|
||||
};
|
||||
|
||||
const metabucket = getStubBucket();
|
||||
|
||||
// g prefix = "given"
|
||||
// all these show why we want to encapsulate this garbage
|
||||
dapim.getApiData = ( gapi, gdata, gcallback, gname, gindex ) =>
|
||||
{
|
||||
expect( gapi ).to.equal( dapi.name );
|
||||
expect( gdata ).to.equal( given_data );
|
||||
expect( gname ).to.equal( field_name );
|
||||
expect( gindex ).to.equal( index );
|
||||
|
||||
// make sure we handle async
|
||||
process.nextTick( () => gcallback( null, ret_data ) );
|
||||
};
|
||||
|
||||
dapim.setFieldData =
|
||||
( gname, gindex, gdata, gvalue, glabel, gunchanged ) =>
|
||||
{
|
||||
expect( gname ).to.equal( dapi.name );
|
||||
expect( gindex ).to.equal( index );
|
||||
expect( gdata ).to.equal( ret_data );
|
||||
expect( gvalue ).to.equal( dapi.value );
|
||||
expect( glabel ).to.equal( '' ); // unused
|
||||
expect( gunchanged ).to.equal( false );
|
||||
};
|
||||
|
||||
dapim.expandFieldData =
|
||||
( gname, gindex, gbucket, gmap, gpredictive, gdiff ) =>
|
||||
{
|
||||
expect( gname ).to.equal( dapi.name );
|
||||
expect( gindex ).to.equal( index );
|
||||
expect( gbucket ).to.equal( metabucket );
|
||||
expect( gmap ).to.equal( dapi.mapdest );
|
||||
expect( gpredictive ).to.equal( true );
|
||||
expect( gdiff ).to.deep.equal( bucket_result );
|
||||
|
||||
metabucket.getData = () => bucket_result;
|
||||
};
|
||||
|
||||
return Sut( () => metabucket )
|
||||
.getFieldData( field_name, index, dapim, dapi, given_data )
|
||||
.then( result =>
|
||||
{
|
||||
expect( result.field ).to.equal( field_name );
|
||||
expect( result.index ).to.equal( index );
|
||||
expect( result.data ).to.equal( bucket_result );
|
||||
} );
|
||||
} );
|
||||
|
||||
|
||||
it( "rejects promise on error", () =>
|
||||
{
|
||||
const e = Error( "Test error" );
|
||||
const dapim = createStubDapiManager();
|
||||
|
||||
dapim.getApiData = ( _, __, ___, ____, _____, ______, failc ) =>
|
||||
{
|
||||
failc( e );
|
||||
};
|
||||
|
||||
return expect(
|
||||
Sut( () => getStubBucket() )
|
||||
.getFieldData( 'name', 0, dapim, {}, {} )
|
||||
).to.eventually.be.rejectedWith( e );
|
||||
} );
|
||||
|
||||
|
||||
it( "rejects if more than one result is returned from dapi", () =>
|
||||
{
|
||||
const dapim = createStubDapiManager();
|
||||
|
||||
dapim.getApiData = ( _, __, callback ) =>
|
||||
{
|
||||
// more than one result
|
||||
callback( null, [ {}, {} ] );
|
||||
};
|
||||
|
||||
return expect(
|
||||
Sut( () => getStubBucket() )
|
||||
.getFieldData( 'name', 0, dapim, {}, {} )
|
||||
).to.eventually.be.rejectedWith( Error );
|
||||
} );
|
||||
} );
|
||||
|
||||
|
||||
function createStubDapiManager()
|
||||
{
|
||||
return {
|
||||
getApiData() {},
|
||||
setFieldData() {},
|
||||
expandFieldData() {},
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
function getStubBucket()
|
||||
{
|
||||
return {
|
||||
setValues() {},
|
||||
getData() {},
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
function createStubDb()
|
||||
{
|
||||
return {
|
||||
saveQuoteMeta() {},
|
||||
};
|
||||
}
|
|
@ -0,0 +1,307 @@
|
|||
/**
|
||||
* Manages DataAPI requests and return data
|
||||
*
|
||||
* Copyright (C) 2017 R-T Specialty, LLC.
|
||||
*
|
||||
* This file is part of the Liza Data Collection Framework.
|
||||
*
|
||||
* liza is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of the
|
||||
* License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const { Class } = require( 'easejs' );
|
||||
const { expect } = require( 'chai' );
|
||||
const Sut = require( '../../../' ).server.request.DataProcessor;
|
||||
|
||||
|
||||
describe( 'DataProcessor', () =>
|
||||
{
|
||||
[
|
||||
{
|
||||
label: "strips internal field data when not internal",
|
||||
data: {
|
||||
internal: [ "foo", "bar" ],
|
||||
foo: [ "bar", "baz" ],
|
||||
},
|
||||
internals: { internal: true },
|
||||
internal: false,
|
||||
expected: {
|
||||
foo: [ "bar", "baz" ],
|
||||
},
|
||||
},
|
||||
{
|
||||
label: "keeps internal field data when internal",
|
||||
data: {
|
||||
internal: [ "foo", "bar" ],
|
||||
foo: [ "bar", "baz" ],
|
||||
},
|
||||
internals: { internal: true },
|
||||
internal: true,
|
||||
expected: {
|
||||
internal: [ "foo", "bar" ],
|
||||
foo: [ "bar", "baz" ],
|
||||
},
|
||||
},
|
||||
].forEach( ( { label, internal, data, internals = {}, expected } ) =>
|
||||
{
|
||||
const { request, program, sut } =
|
||||
createSutFromStubs( internal, internals );
|
||||
|
||||
it( label, () =>
|
||||
{
|
||||
expect(
|
||||
sut.processDiff( data, request, program ).filtered
|
||||
).to.deep.equal( expected );
|
||||
} );
|
||||
} );
|
||||
|
||||
|
||||
it( "passes data to bucket filter", () =>
|
||||
{
|
||||
const { request, program, meta_source } = createStubs();
|
||||
const data = {};
|
||||
const types = {};
|
||||
|
||||
program.meta.qtypes = types;
|
||||
|
||||
const filter = {
|
||||
filter( given_data, given_types, given_ignore, given_null )
|
||||
{
|
||||
expect( given_data ).to.equal( data );
|
||||
expect( given_types ).to.equal( types );
|
||||
expect( given_null ).to.equal( false );
|
||||
|
||||
// not used
|
||||
expect( given_ignore ).to.deep.equal( {} );
|
||||
|
||||
data.filtered = true;
|
||||
}
|
||||
};
|
||||
|
||||
Sut( filter, () => {}, meta_source )
|
||||
.processDiff( data, request, program );
|
||||
|
||||
expect( data.filtered ).to.equal( true );
|
||||
} );
|
||||
|
||||
|
||||
it( "instantiates dapi manager using program and session", done =>
|
||||
{
|
||||
const { filter, request, program } = createStubs();
|
||||
|
||||
const dapi_factory = ( given_apis, given_request ) =>
|
||||
{
|
||||
expect( given_apis ).to.equal( program.apis );
|
||||
expect( given_request ).to.equal( request );
|
||||
|
||||
done();
|
||||
}
|
||||
|
||||
Sut( filter, dapi_factory )
|
||||
.processDiff( {}, request, program );
|
||||
} );
|
||||
|
||||
|
||||
it( "invokes dapi manager when monitored bucket value changes", () =>
|
||||
{
|
||||
const triggered = {};
|
||||
|
||||
// g prefix = "given"
|
||||
const getFieldData = function( gfield, gindex, gdapim, gdapi, gdata)
|
||||
{
|
||||
triggered[ gdapi.name ] = triggered[ gdapi.name ] || [];
|
||||
triggered[ gdapi.name ][ gindex ] = arguments;
|
||||
|
||||
return Promise.resolve( true );
|
||||
};
|
||||
|
||||
const dapi_manager = {};
|
||||
|
||||
const {
|
||||
request,
|
||||
program,
|
||||
filter,
|
||||
meta_source,
|
||||
} = createStubs( false, {}, getFieldData );
|
||||
|
||||
const sut = Sut( filter, () => dapi_manager, meta_source );
|
||||
|
||||
program.meta.fields = {
|
||||
foo: {
|
||||
dapi: {
|
||||
name: 'dapi_foo',
|
||||
mapsrc: { ina: 'src', inb: 'src1' },
|
||||
},
|
||||
},
|
||||
bar: {
|
||||
dapi: {
|
||||
name: 'dapi_bar',
|
||||
mapsrc: { ina: 'src1' },
|
||||
},
|
||||
},
|
||||
baz: {
|
||||
dapi: {
|
||||
name: 'dapi_no_call',
|
||||
mapsrc: {},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
program.mapis = {
|
||||
src: [ 'foo', 'bar' ], // change
|
||||
src1: [ 'foo' ], // change
|
||||
src2: [ 'baz' ], // do not change
|
||||
};
|
||||
|
||||
// data changed
|
||||
const data = {
|
||||
src: [ 'src0', 'src1' ],
|
||||
src1: [ undefined, 'src11' ],
|
||||
};
|
||||
|
||||
const bucket = createStubBucket( {
|
||||
src: [ 'bsrc0', 'bsrc1' ],
|
||||
src1: [ 'bsrc10', 'bsrc11' ],
|
||||
} );
|
||||
|
||||
const { dapis } = sut.processDiff( data, request, program, bucket );
|
||||
|
||||
const expected = {
|
||||
dapi_foo: [
|
||||
{
|
||||
name: 'foo',
|
||||
data: {
|
||||
ina: data.src[ 0 ],
|
||||
inb: bucket.data.src1[ 0 ],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'foo',
|
||||
data: {
|
||||
ina: data.src[ 1 ],
|
||||
inb: data.src1[ 1 ],
|
||||
},
|
||||
},
|
||||
],
|
||||
dapi_bar: [
|
||||
undefined,
|
||||
{
|
||||
name: 'bar',
|
||||
data: {
|
||||
ina: data.src1[ 1 ],
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
for ( let dapi_name in expected )
|
||||
{
|
||||
let expected_call = expected[ dapi_name ];
|
||||
|
||||
for ( let i in expected_call )
|
||||
{
|
||||
let chk = expected_call[ i ];
|
||||
|
||||
if ( chk === undefined )
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
let [ gfield, gindex, gdapi_manager, gdapi, gdata ] =
|
||||
triggered[ dapi_name ][ i ];
|
||||
|
||||
expect( gfield ).to.equal( chk.name );
|
||||
expect( gdapi.name ).to.equal( dapi_name );
|
||||
expect( +gindex ).to.equal( +i );
|
||||
expect( gdapi_manager ).to.equal( dapi_manager );
|
||||
|
||||
// see mapsrc
|
||||
expect( gdata ).to.deep.equal( chk.data );
|
||||
}
|
||||
}
|
||||
|
||||
expect( triggered.dapi_no_call ).to.equal( undefined );
|
||||
|
||||
return Promise.all( dapis );
|
||||
} );
|
||||
} );
|
||||
|
||||
|
||||
function createSutFromStubs( /* see createStubs */ )
|
||||
{
|
||||
const { request, program, filter, meta_source } =
|
||||
createStubs.apply( null, arguments );
|
||||
|
||||
return {
|
||||
request: request,
|
||||
program: program,
|
||||
filter: filter,
|
||||
meta_source: meta_source,
|
||||
sut: Sut( filter, () => {}, meta_source ),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
function createStubs( internal, internals, getFieldData )
|
||||
{
|
||||
return {
|
||||
request: createStubUserRequest( internal || false ),
|
||||
program: createStubProgram( internals || {} ),
|
||||
filter: { filter: _ => _ },
|
||||
meta_source: createStubDapiMetaSource( getFieldData ),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
function createStubUserRequest( internal )
|
||||
{
|
||||
return {
|
||||
getSession: () => ( {
|
||||
isInternal: () => internal
|
||||
} )
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
function createStubProgram( internals )
|
||||
{
|
||||
return {
|
||||
internal: internals,
|
||||
meta: { qtypes: {}, fields: {} },
|
||||
apis: {},
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
function createStubDapiMetaSource( getFieldData )
|
||||
{
|
||||
return {
|
||||
getFieldData: getFieldData ||
|
||||
function( field, index, dapi_manager, dapi, data ){},
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
function createStubBucket( data )
|
||||
{
|
||||
return {
|
||||
data: data,
|
||||
|
||||
getDataByName( name )
|
||||
{
|
||||
return data[ name ];
|
||||
},
|
||||
};
|
||||
}
|
Loading…
Reference in New Issue