You've already forked wakapi-readme-stats
Bar graph added.
This commit is contained in:
27
node_modules/vega-transforms/LICENSE
generated
vendored
Normal file
27
node_modules/vega-transforms/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
Copyright (c) 2015-2018, University of Washington Interactive Data Lab
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its contributors
|
||||
may be used to endorse or promote products derived from this software
|
||||
without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
50
node_modules/vega-transforms/README.md
generated
vendored
Normal file
50
node_modules/vega-transforms/README.md
generated
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
# vega-transforms
|
||||
|
||||
Data processing transforms for Vega dataflows.
|
||||
|
||||
This package provides the following Vega data transforms:
|
||||
|
||||
- [**Aggregate**](https://vega.github.io/vega/docs/transforms/aggregate/) [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Aggregate.js "Source")
|
||||
- [**Bin**](https://vega.github.io/vega/docs/transforms/bin/) [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Bin.js "Source")
|
||||
- [**Collect**](https://vega.github.io/vega/docs/transforms/collect/) [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Collect.js "Source")
|
||||
- [**CountPattern**](https://vega.github.io/vega/docs/transforms/countpattern/) [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/CountPattern.js "Source")
|
||||
- [**Cross**](https://vega.github.io/vega/docs/transforms/cross/) [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Cross.js "Source")
|
||||
- [**Density**](https://vega.github.io/vega/docs/transforms/density/) [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Density.js "Source")
|
||||
- [**DotBin**](https://vega.github.io/vega/docs/transforms/dotbin/) [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/DotBin.js "Source")
|
||||
- [**Extent**](https://vega.github.io/vega/docs/transforms/extent/) [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Extent.js "Source")
|
||||
- [**Filter**](https://vega.github.io/vega/docs/transforms/filter/) [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Filter.js "Source")
|
||||
- [**Flatten**](https://vega.github.io/vega/docs/transforms/flatten/) [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Flatten.js "Source")
|
||||
- [**Fold**](https://vega.github.io/vega/docs/transforms/fold/) [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Fold.js "Source")
|
||||
- [**Formula**](https://vega.github.io/vega/docs/transforms/formula/) [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Formula.js "Source")
|
||||
- [**Impute**](https://vega.github.io/vega/docs/transforms/Impute/) [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Impute.js "Source")
|
||||
- [**JoinAggregate**](https://vega.github.io/vega/docs/transforms/joinaggregate/) [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/JoinAggregate.js "Source")
|
||||
- [**KDE**](https://vega.github.io/vega/docs/transforms/kde/) [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/KDE.js "Source")
|
||||
- [**Lookup**](https://vega.github.io/vega/docs/transforms/lookup/) [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Lookup.js "Source")
|
||||
- [**Pivot**](https://vega.github.io/vega/docs/transforms/pivot/) [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Pivot.js "Source")
|
||||
- [**Project**](https://vega.github.io/vega/docs/transforms/project/) [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Project.js "Source")
|
||||
- [**Quantile**](https://vega.github.io/vega/docs/transforms/quantile/) [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Quantile.js "Source")
|
||||
- [**Sample**](https://vega.github.io/vega/docs/transforms/sample/) [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Sample.js "Source")
|
||||
- [**Sequence**](https://vega.github.io/vega/docs/transforms/sequence/) [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Sequence.js "Source")
|
||||
- [**TimeUnit**](https://vega.github.io/vega/docs/transforms/timeunit/) [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/TimeUnit.js "Source")
|
||||
- [**Window**](https://vega.github.io/vega/docs/transforms/window/) [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Window.js "Source")
|
||||
|
||||
And provides the following internal transforms:
|
||||
|
||||
- **Compare** [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Compare.js "Source")
|
||||
- **Expression** [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Expression.js "Source")
|
||||
- **Facet** [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Facet.js "Source")
|
||||
- **Field** [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Field.js "Source")
|
||||
- **Generate** [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Generate.js "Source")
|
||||
- **Key** [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Key.js "Source")
|
||||
- **MultiExtent** [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/MultiExtent.js "Source")
|
||||
- **MultiValues** [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/MultiValues.js "Source")
|
||||
- **Params** [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Params.js "Source")
|
||||
- **PreFacet** [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/PreFacet.js "Source")
|
||||
- **Proxy** [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Proxy.js "Source")
|
||||
- **Relay** [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Relay.js "Source")
|
||||
- **Sieve** [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Sieve.js "Source")
|
||||
- **Subflow** [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Subflow.js "Source")
|
||||
- **TupleIndex** [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/TupleIndex.js "Source")
|
||||
- **Values** [<>](https://github.com/vega/vega/blob/master/packages/vega-transforms/src/Values.js "Source")
|
||||
|
||||
For more information about data stream transforms, see the [Vega transform documentation](https://vega.github.io/vega/docs/transforms/).
|
||||
3676
node_modules/vega-transforms/build/vega-transforms.js
generated
vendored
Normal file
3676
node_modules/vega-transforms/build/vega-transforms.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
node_modules/vega-transforms/build/vega-transforms.min.js
generated
vendored
Normal file
1
node_modules/vega-transforms/build/vega-transforms.min.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
40
node_modules/vega-transforms/index.js
generated
vendored
Normal file
40
node_modules/vega-transforms/index.js
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
export {default as aggregate} from './src/Aggregate';
|
||||
export {default as bin} from './src/Bin';
|
||||
export {default as collect} from './src/Collect';
|
||||
export {default as compare} from './src/Compare';
|
||||
export {default as countpattern} from './src/CountPattern';
|
||||
export {default as cross} from './src/Cross';
|
||||
export {default as density} from './src/Density';
|
||||
export {default as dotbin} from './src/DotBin';
|
||||
export {default as expression} from './src/Expression';
|
||||
export {default as extent} from './src/Extent';
|
||||
export {default as facet} from './src/Facet';
|
||||
export {default as field} from './src/Field';
|
||||
export {default as filter} from './src/Filter';
|
||||
export {default as flatten} from './src/Flatten';
|
||||
export {default as fold} from './src/Fold';
|
||||
export {default as formula} from './src/Formula';
|
||||
export {default as generate} from './src/Generate';
|
||||
export {default as impute} from './src/Impute';
|
||||
export {default as joinaggregate} from './src/JoinAggregate';
|
||||
export {default as kde} from './src/KDE';
|
||||
export {default as key} from './src/Key';
|
||||
export {default as load} from './src/Load';
|
||||
export {default as lookup} from './src/Lookup';
|
||||
export {default as multiextent} from './src/MultiExtent';
|
||||
export {default as multivalues} from './src/MultiValues';
|
||||
export {default as params} from './src/Params';
|
||||
export {default as pivot} from './src/Pivot';
|
||||
export {default as prefacet} from './src/PreFacet';
|
||||
export {default as project} from './src/Project';
|
||||
export {default as proxy} from './src/Proxy';
|
||||
export {default as quantile} from './src/Quantile';
|
||||
export {default as relay} from './src/Relay';
|
||||
export {default as sample} from './src/Sample';
|
||||
export {default as sequence} from './src/Sequence';
|
||||
export {default as sieve} from './src/Sieve';
|
||||
export {default as subflow} from './src/Subflow';
|
||||
export {default as timeunit} from './src/TimeUnit';
|
||||
export {default as tupleindex} from './src/TupleIndex';
|
||||
export {default as values} from './src/Values';
|
||||
export {default as window} from './src/Window';
|
||||
75
node_modules/vega-transforms/package.json
generated
vendored
Normal file
75
node_modules/vega-transforms/package.json
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
{
|
||||
"_from": "vega-transforms@~4.9.0",
|
||||
"_id": "vega-transforms@4.9.0",
|
||||
"_inBundle": false,
|
||||
"_integrity": "sha512-xsgvkHsyKgEWdCB86DVts2Zu6fJ+cGjpc56MpcCWPArNuhcUSugivIoTAFAh8w7QempQBsAtnPrnbaytMYOJ8w==",
|
||||
"_location": "/vega-transforms",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"type": "range",
|
||||
"registry": true,
|
||||
"raw": "vega-transforms@~4.9.0",
|
||||
"name": "vega-transforms",
|
||||
"escapedName": "vega-transforms",
|
||||
"rawSpec": "~4.9.0",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "~4.9.0"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/vega"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/vega-transforms/-/vega-transforms-4.9.0.tgz",
|
||||
"_shasum": "272be37adb8565633ccde68f64079fb5029a8ac9",
|
||||
"_spec": "vega-transforms@~4.9.0",
|
||||
"_where": "/home/prabhatdev/Documents/opensource/gitHubStats/waka-readme-stats/node_modules/vega",
|
||||
"author": {
|
||||
"name": "Jeffrey Heer",
|
||||
"url": "http://idl.cs.washington.edu"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/vega/vega/issues"
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"dependencies": {
|
||||
"d3-array": "^2.4.0",
|
||||
"vega-dataflow": "^5.7.0",
|
||||
"vega-statistics": "^1.7.5",
|
||||
"vega-time": "^2.0.1",
|
||||
"vega-util": "^1.14.0"
|
||||
},
|
||||
"deprecated": false,
|
||||
"description": "Data processing transforms for Vega dataflows.",
|
||||
"gitHead": "62565bbe084a422c4a0cbc6e19c6f7c45a3e5137",
|
||||
"homepage": "https://github.com/vega/vega#readme",
|
||||
"keywords": [
|
||||
"vega",
|
||||
"dataflow",
|
||||
"transforms",
|
||||
"filter",
|
||||
"aggregate",
|
||||
"project",
|
||||
"window",
|
||||
"sort",
|
||||
"bin",
|
||||
"data"
|
||||
],
|
||||
"license": "BSD-3-Clause",
|
||||
"main": "build/vega-transforms.js",
|
||||
"module": "index",
|
||||
"name": "vega-transforms",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/vega/vega.git"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "yarn rollup",
|
||||
"postbuild": "terser build/vega-transforms.js -c -m -o build/vega-transforms.min.js",
|
||||
"postpublish": "git push && git push --tags",
|
||||
"prebuild": "rimraf build && mkdir build",
|
||||
"prepublishOnly": "yarn test && yarn build",
|
||||
"pretest": "yarn prebuild && yarn rollup",
|
||||
"rollup": "rollup -g d3-array:d3,vega-dataflow:vega,vega-statistics:vega,vega-time:vega,vega-util:vega -f umd -n vega.transforms -o build/vega-transforms.js -- index.js",
|
||||
"test": "tape 'test/**/*-test.js'"
|
||||
},
|
||||
"version": "4.9.0"
|
||||
}
|
||||
363
node_modules/vega-transforms/src/Aggregate.js
generated
vendored
Normal file
363
node_modules/vega-transforms/src/Aggregate.js
generated
vendored
Normal file
@@ -0,0 +1,363 @@
|
||||
import {groupkey} from './util/AggregateKeys';
|
||||
import {ValidAggregateOps, compileMeasures, createMeasure, measureName} from './util/AggregateOps';
|
||||
import TupleStore from './util/TupleStore';
|
||||
import {Transform, ingest, replace} from 'vega-dataflow';
|
||||
import {accessorFields, accessorName, array, error, inherits} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Group-by aggregation operator.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {Array<function(object): *>} [params.groupby] - An array of accessors to groupby.
|
||||
* @param {Array<function(object): *>} [params.fields] - An array of accessors to aggregate.
|
||||
* @param {Array<string>} [params.ops] - An array of strings indicating aggregation operations.
|
||||
* @param {Array<string>} [params.as] - An array of output field names for aggregated values.
|
||||
* @param {boolean} [params.cross=false] - A flag indicating that the full
|
||||
* cross-product of groupby values should be generated, including empty cells.
|
||||
* If true, the drop parameter is ignored and empty cells are retained.
|
||||
* @param {boolean} [params.drop=true] - A flag indicating if empty cells should be removed.
|
||||
*/
|
||||
export default function Aggregate(params) {
|
||||
Transform.call(this, null, params);
|
||||
|
||||
this._adds = []; // array of added output tuples
|
||||
this._mods = []; // array of modified output tuples
|
||||
this._alen = 0; // number of active added tuples
|
||||
this._mlen = 0; // number of active modified tuples
|
||||
this._drop = true; // should empty aggregation cells be removed
|
||||
this._cross = false; // produce full cross-product of group-by values
|
||||
|
||||
this._dims = []; // group-by dimension accessors
|
||||
this._dnames = []; // group-by dimension names
|
||||
|
||||
this._measures = []; // collection of aggregation monoids
|
||||
this._countOnly = false; // flag indicating only count aggregation
|
||||
this._counts = null; // collection of count fields
|
||||
this._prev = null; // previous aggregation cells
|
||||
|
||||
this._inputs = null; // array of dependent input tuple field names
|
||||
this._outputs = null; // array of output tuple field names
|
||||
}
|
||||
|
||||
Aggregate.Definition = {
|
||||
'type': 'Aggregate',
|
||||
'metadata': {'generates': true, 'changes': true},
|
||||
'params': [
|
||||
{ 'name': 'groupby', 'type': 'field', 'array': true },
|
||||
{ 'name': 'ops', 'type': 'enum', 'array': true, 'values': ValidAggregateOps },
|
||||
{ 'name': 'fields', 'type': 'field', 'null': true, 'array': true },
|
||||
{ 'name': 'as', 'type': 'string', 'null': true, 'array': true },
|
||||
{ 'name': 'drop', 'type': 'boolean', 'default': true },
|
||||
{ 'name': 'cross', 'type': 'boolean', 'default': false },
|
||||
{ 'name': 'key', 'type': 'field' }
|
||||
]
|
||||
};
|
||||
|
||||
var prototype = inherits(Aggregate, Transform);
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
var aggr = this,
|
||||
out = pulse.fork(pulse.NO_SOURCE | pulse.NO_FIELDS),
|
||||
mod = _.modified();
|
||||
|
||||
aggr.stamp = out.stamp;
|
||||
|
||||
if (aggr.value && (mod || pulse.modified(aggr._inputs, true))) {
|
||||
aggr._prev = aggr.value;
|
||||
aggr.value = mod ? aggr.init(_) : {};
|
||||
pulse.visit(pulse.SOURCE, t => aggr.add(t));
|
||||
} else {
|
||||
aggr.value = aggr.value || aggr.init(_);
|
||||
pulse.visit(pulse.REM, t => aggr.rem(t));
|
||||
pulse.visit(pulse.ADD, t => aggr.add(t));
|
||||
}
|
||||
|
||||
// Indicate output fields and return aggregate tuples.
|
||||
out.modifies(aggr._outputs);
|
||||
|
||||
// Should empty cells be dropped?
|
||||
aggr._drop = _.drop !== false;
|
||||
|
||||
// If domain cross-product requested, generate empty cells as needed
|
||||
// and ensure that empty cells are not dropped
|
||||
if (_.cross && aggr._dims.length > 1) {
|
||||
aggr._drop = false;
|
||||
aggr.cross();
|
||||
}
|
||||
|
||||
if (pulse.clean() && aggr._drop) {
|
||||
out.clean(true).runAfter(() => this.clean());
|
||||
}
|
||||
|
||||
return aggr.changes(out);
|
||||
};
|
||||
|
||||
prototype.cross = function() {
|
||||
var aggr = this,
|
||||
curr = aggr.value,
|
||||
dims = aggr._dnames,
|
||||
vals = dims.map(function() { return {}; }),
|
||||
n = dims.length;
|
||||
|
||||
// collect all group-by domain values
|
||||
function collect(cells) {
|
||||
var key, i, t, v;
|
||||
for (key in cells) {
|
||||
t = cells[key].tuple;
|
||||
for (i=0; i<n; ++i) {
|
||||
vals[i][(v = t[dims[i]])] = v;
|
||||
}
|
||||
}
|
||||
}
|
||||
collect(aggr._prev);
|
||||
collect(curr);
|
||||
|
||||
// iterate over key cross-product, create cells as needed
|
||||
function generate(base, tuple, index) {
|
||||
var name = dims[index],
|
||||
v = vals[index++],
|
||||
k, key;
|
||||
|
||||
for (k in v) {
|
||||
tuple[name] = v[k];
|
||||
key = base ? base + '|' + k : k;
|
||||
if (index < n) generate(key, tuple, index);
|
||||
else if (!curr[key]) aggr.cell(key, tuple);
|
||||
}
|
||||
}
|
||||
generate('', {}, 0);
|
||||
};
|
||||
|
||||
prototype.init = function(_) {
|
||||
// initialize input and output fields
|
||||
var inputs = (this._inputs = []),
|
||||
outputs = (this._outputs = []),
|
||||
inputMap = {};
|
||||
|
||||
function inputVisit(get) {
|
||||
var fields = array(accessorFields(get)),
|
||||
i = 0, n = fields.length, f;
|
||||
for (; i<n; ++i) {
|
||||
if (!inputMap[f=fields[i]]) {
|
||||
inputMap[f] = 1;
|
||||
inputs.push(f);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// initialize group-by dimensions
|
||||
this._dims = array(_.groupby);
|
||||
this._dnames = this._dims.map(function(d) {
|
||||
var dname = accessorName(d);
|
||||
inputVisit(d);
|
||||
outputs.push(dname);
|
||||
return dname;
|
||||
});
|
||||
this.cellkey = _.key ? _.key : groupkey(this._dims);
|
||||
|
||||
// initialize aggregate measures
|
||||
this._countOnly = true;
|
||||
this._counts = [];
|
||||
this._measures = [];
|
||||
|
||||
var fields = _.fields || [null],
|
||||
ops = _.ops || ['count'],
|
||||
as = _.as || [],
|
||||
n = fields.length,
|
||||
map = {},
|
||||
field, op, m, mname, outname, i;
|
||||
|
||||
if (n !== ops.length) {
|
||||
error('Unmatched number of fields and aggregate ops.');
|
||||
}
|
||||
|
||||
for (i=0; i<n; ++i) {
|
||||
field = fields[i];
|
||||
op = ops[i];
|
||||
|
||||
if (field == null && op !== 'count') {
|
||||
error('Null aggregate field specified.');
|
||||
}
|
||||
mname = accessorName(field);
|
||||
outname = measureName(op, mname, as[i]);
|
||||
outputs.push(outname);
|
||||
|
||||
if (op === 'count') {
|
||||
this._counts.push(outname);
|
||||
continue;
|
||||
}
|
||||
|
||||
m = map[mname];
|
||||
if (!m) {
|
||||
inputVisit(field);
|
||||
m = (map[mname] = []);
|
||||
m.field = field;
|
||||
this._measures.push(m);
|
||||
}
|
||||
|
||||
if (op !== 'count') this._countOnly = false;
|
||||
m.push(createMeasure(op, outname));
|
||||
}
|
||||
|
||||
this._measures = this._measures.map(function(m) {
|
||||
return compileMeasures(m, m.field);
|
||||
});
|
||||
|
||||
return {}; // aggregation cells (this.value)
|
||||
};
|
||||
|
||||
// -- Cell Management -----
|
||||
|
||||
prototype.cellkey = groupkey();
|
||||
|
||||
prototype.cell = function(key, t) {
|
||||
var cell = this.value[key];
|
||||
if (!cell) {
|
||||
cell = this.value[key] = this.newcell(key, t);
|
||||
this._adds[this._alen++] = cell;
|
||||
} else if (cell.num === 0 && this._drop && cell.stamp < this.stamp) {
|
||||
cell.stamp = this.stamp;
|
||||
this._adds[this._alen++] = cell;
|
||||
} else if (cell.stamp < this.stamp) {
|
||||
cell.stamp = this.stamp;
|
||||
this._mods[this._mlen++] = cell;
|
||||
}
|
||||
return cell;
|
||||
};
|
||||
|
||||
prototype.newcell = function(key, t) {
|
||||
var cell = {
|
||||
key: key,
|
||||
num: 0,
|
||||
agg: null,
|
||||
tuple: this.newtuple(t, this._prev && this._prev[key]),
|
||||
stamp: this.stamp,
|
||||
store: false
|
||||
};
|
||||
|
||||
if (!this._countOnly) {
|
||||
var measures = this._measures,
|
||||
n = measures.length, i;
|
||||
|
||||
cell.agg = Array(n);
|
||||
for (i=0; i<n; ++i) {
|
||||
cell.agg[i] = new measures[i](cell);
|
||||
}
|
||||
}
|
||||
|
||||
if (cell.store) {
|
||||
cell.data = new TupleStore();
|
||||
}
|
||||
|
||||
return cell;
|
||||
};
|
||||
|
||||
prototype.newtuple = function(t, p) {
|
||||
var names = this._dnames,
|
||||
dims = this._dims,
|
||||
x = {}, i, n;
|
||||
|
||||
for (i=0, n=dims.length; i<n; ++i) {
|
||||
x[names[i]] = dims[i](t);
|
||||
}
|
||||
|
||||
return p ? replace(p.tuple, x) : ingest(x);
|
||||
};
|
||||
|
||||
prototype.clean = function() {
|
||||
const cells = this.value;
|
||||
for (const key in cells) {
|
||||
if (cells[key].num === 0) {
|
||||
delete cells[key];
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// -- Process Tuples -----
|
||||
|
||||
prototype.add = function(t) {
|
||||
var key = this.cellkey(t),
|
||||
cell = this.cell(key, t),
|
||||
agg, i, n;
|
||||
|
||||
cell.num += 1;
|
||||
if (this._countOnly) return;
|
||||
|
||||
if (cell.store) cell.data.add(t);
|
||||
|
||||
agg = cell.agg;
|
||||
for (i=0, n=agg.length; i<n; ++i) {
|
||||
agg[i].add(agg[i].get(t), t);
|
||||
}
|
||||
};
|
||||
|
||||
prototype.rem = function(t) {
|
||||
var key = this.cellkey(t),
|
||||
cell = this.cell(key, t),
|
||||
agg, i, n;
|
||||
|
||||
cell.num -= 1;
|
||||
if (this._countOnly) return;
|
||||
|
||||
if (cell.store) cell.data.rem(t);
|
||||
|
||||
agg = cell.agg;
|
||||
for (i=0, n=agg.length; i<n; ++i) {
|
||||
agg[i].rem(agg[i].get(t), t);
|
||||
}
|
||||
};
|
||||
|
||||
prototype.celltuple = function(cell) {
|
||||
var tuple = cell.tuple,
|
||||
counts = this._counts,
|
||||
agg, i, n;
|
||||
|
||||
// consolidate stored values
|
||||
if (cell.store) {
|
||||
cell.data.values();
|
||||
}
|
||||
|
||||
// update tuple properties
|
||||
for (i=0, n=counts.length; i<n; ++i) {
|
||||
tuple[counts[i]] = cell.num;
|
||||
}
|
||||
if (!this._countOnly) {
|
||||
agg = cell.agg;
|
||||
for (i=0, n=agg.length; i<n; ++i) {
|
||||
agg[i].set(tuple);
|
||||
}
|
||||
}
|
||||
|
||||
return tuple;
|
||||
};
|
||||
|
||||
prototype.changes = function(out) {
|
||||
var adds = this._adds,
|
||||
mods = this._mods,
|
||||
prev = this._prev,
|
||||
drop = this._drop,
|
||||
add = out.add,
|
||||
rem = out.rem,
|
||||
mod = out.mod,
|
||||
cell, key, i, n;
|
||||
|
||||
if (prev) for (key in prev) {
|
||||
cell = prev[key];
|
||||
if (!drop || cell.num) rem.push(cell.tuple);
|
||||
}
|
||||
|
||||
for (i=0, n=this._alen; i<n; ++i) {
|
||||
add.push(this.celltuple(adds[i]));
|
||||
adds[i] = null; // for garbage collection
|
||||
}
|
||||
|
||||
for (i=0, n=this._mlen; i<n; ++i) {
|
||||
cell = mods[i];
|
||||
(cell.num === 0 && drop ? rem : mod).push(this.celltuple(cell));
|
||||
mods[i] = null; // for garbage collection
|
||||
}
|
||||
|
||||
this._alen = this._mlen = 0; // reset list of active cells
|
||||
this._prev = null;
|
||||
return out;
|
||||
};
|
||||
114
node_modules/vega-transforms/src/Bin.js
generated
vendored
Normal file
114
node_modules/vega-transforms/src/Bin.js
generated
vendored
Normal file
@@ -0,0 +1,114 @@
|
||||
import {Transform} from 'vega-dataflow';
|
||||
import {bin} from 'vega-statistics';
|
||||
import {accessor, accessorFields, accessorName, inherits, toNumber} from 'vega-util';
|
||||
|
||||
// epsilon bias to offset floating point error (#1737)
|
||||
const EPSILON = 1e-14;
|
||||
|
||||
/**
|
||||
* Generates a binning function for discretizing data.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator. The
|
||||
* provided values should be valid options for the {@link bin} function.
|
||||
* @param {function(object): *} params.field - The data field to bin.
|
||||
*/
|
||||
export default function Bin(params) {
|
||||
Transform.call(this, null, params);
|
||||
}
|
||||
|
||||
Bin.Definition = {
|
||||
'type': 'Bin',
|
||||
'metadata': {'modifies': true},
|
||||
'params': [
|
||||
{ 'name': 'field', 'type': 'field', 'required': true },
|
||||
{ 'name': 'interval', 'type': 'boolean', 'default': true },
|
||||
{ 'name': 'anchor', 'type': 'number' },
|
||||
{ 'name': 'maxbins', 'type': 'number', 'default': 20 },
|
||||
{ 'name': 'base', 'type': 'number', 'default': 10 },
|
||||
{ 'name': 'divide', 'type': 'number', 'array': true, 'default': [5, 2] },
|
||||
{ 'name': 'extent', 'type': 'number', 'array': true, 'length': 2, 'required': true },
|
||||
{ 'name': 'span', 'type': 'number' },
|
||||
{ 'name': 'step', 'type': 'number' },
|
||||
{ 'name': 'steps', 'type': 'number', 'array': true },
|
||||
{ 'name': 'minstep', 'type': 'number', 'default': 0 },
|
||||
{ 'name': 'nice', 'type': 'boolean', 'default': true },
|
||||
{ 'name': 'name', 'type': 'string' },
|
||||
{ 'name': 'as', 'type': 'string', 'array': true, 'length': 2, 'default': ['bin0', 'bin1'] }
|
||||
]
|
||||
};
|
||||
|
||||
var prototype = inherits(Bin, Transform);
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
var band = _.interval !== false,
|
||||
bins = this._bins(_),
|
||||
start = bins.start,
|
||||
step = bins.step,
|
||||
as = _.as || ['bin0', 'bin1'],
|
||||
b0 = as[0],
|
||||
b1 = as[1],
|
||||
flag;
|
||||
|
||||
if (_.modified()) {
|
||||
pulse = pulse.reflow(true);
|
||||
flag = pulse.SOURCE;
|
||||
} else {
|
||||
flag = pulse.modified(accessorFields(_.field)) ? pulse.ADD_MOD : pulse.ADD;
|
||||
}
|
||||
|
||||
pulse.visit(flag, band
|
||||
? function(t) {
|
||||
var v = bins(t);
|
||||
// minimum bin value (inclusive)
|
||||
t[b0] = v;
|
||||
// maximum bin value (exclusive)
|
||||
// use convoluted math for better floating point agreement
|
||||
// see https://github.com/vega/vega/issues/830
|
||||
// infinite values propagate through this formula! #2227
|
||||
t[b1] = v == null ? null : start + step * (1 + (v - start) / step);
|
||||
}
|
||||
: function(t) { t[b0] = bins(t); }
|
||||
);
|
||||
|
||||
return pulse.modifies(band ? as : b0);
|
||||
};
|
||||
|
||||
prototype._bins = function(_) {
|
||||
if (this.value && !_.modified()) {
|
||||
return this.value;
|
||||
}
|
||||
|
||||
var field = _.field,
|
||||
bins = bin(_),
|
||||
step = bins.step,
|
||||
start = bins.start,
|
||||
stop = start + Math.ceil((bins.stop - start) / step) * step,
|
||||
a, d;
|
||||
|
||||
if ((a = _.anchor) != null) {
|
||||
d = a - (start + step * Math.floor((a - start) / step));
|
||||
start += d;
|
||||
stop += d;
|
||||
}
|
||||
|
||||
var f = function(t) {
|
||||
var v = toNumber(field(t));
|
||||
return v == null ? null
|
||||
: v < start ? -Infinity
|
||||
: v > stop ? +Infinity
|
||||
: (
|
||||
v = Math.max(start, Math.min(v, stop - step)),
|
||||
start + step * Math.floor(EPSILON + (v - start) / step)
|
||||
);
|
||||
};
|
||||
|
||||
f.start = start;
|
||||
f.stop = bins.stop;
|
||||
f.step = step;
|
||||
|
||||
return this.value = accessor(
|
||||
f,
|
||||
accessorFields(field),
|
||||
_.name || 'bin_' + accessorName(field)
|
||||
);
|
||||
};
|
||||
44
node_modules/vega-transforms/src/Collect.js
generated
vendored
Normal file
44
node_modules/vega-transforms/src/Collect.js
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
import SortedList from './util/SortedList';
|
||||
import {Transform, stableCompare, tupleid} from 'vega-dataflow';
|
||||
import {inherits} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Collects all data tuples that pass through this operator.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {function(*,*): number} [params.sort] - An optional
|
||||
* comparator function for additionally sorting the collected tuples.
|
||||
*/
|
||||
export default function Collect(params) {
|
||||
Transform.call(this, [], params);
|
||||
}
|
||||
|
||||
Collect.Definition = {
|
||||
'type': 'Collect',
|
||||
'metadata': {'source': true},
|
||||
'params': [
|
||||
{ 'name': 'sort', 'type': 'compare' }
|
||||
]
|
||||
};
|
||||
|
||||
var prototype = inherits(Collect, Transform);
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
var out = pulse.fork(pulse.ALL),
|
||||
list = SortedList(tupleid, this.value, out.materialize(out.ADD).add),
|
||||
sort = _.sort,
|
||||
mod = pulse.changed() || (sort &&
|
||||
(_.modified('sort') || pulse.modified(sort.fields)));
|
||||
|
||||
out.visit(out.REM, list.remove);
|
||||
|
||||
this.modified(mod);
|
||||
this.value = out.source = list.data(stableCompare(sort), mod);
|
||||
|
||||
// propagate tree root if defined
|
||||
if (pulse.source && pulse.source.root) {
|
||||
this.value.root = pulse.source.root;
|
||||
}
|
||||
|
||||
return out;
|
||||
};
|
||||
22
node_modules/vega-transforms/src/Compare.js
generated
vendored
Normal file
22
node_modules/vega-transforms/src/Compare.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
import {Operator} from 'vega-dataflow';
|
||||
import {compare, inherits} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Generates a comparator function.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {Array<string|function>} params.fields - The fields to compare.
|
||||
* @param {Array<string>} [params.orders] - The sort orders.
|
||||
* Each entry should be one of "ascending" (default) or "descending".
|
||||
*/
|
||||
export default function Compare(params) {
|
||||
Operator.call(this, null, update, params);
|
||||
}
|
||||
|
||||
inherits(Compare, Operator);
|
||||
|
||||
function update(_) {
|
||||
return (this.value && !_.modified())
|
||||
? this.value
|
||||
: compare(_.fields, _.orders);
|
||||
}
|
||||
116
node_modules/vega-transforms/src/CountPattern.js
generated
vendored
Normal file
116
node_modules/vega-transforms/src/CountPattern.js
generated
vendored
Normal file
@@ -0,0 +1,116 @@
|
||||
import {Transform, ingest} from 'vega-dataflow';
|
||||
import {inherits} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Count regexp-defined pattern occurrences in a text field.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {function(object): *} params.field - An accessor for the text field.
|
||||
* @param {string} [params.pattern] - RegExp string defining the text pattern.
|
||||
* @param {string} [params.case] - One of 'lower', 'upper' or null (mixed) case.
|
||||
* @param {string} [params.stopwords] - RegExp string of words to ignore.
|
||||
*/
|
||||
export default function CountPattern(params) {
|
||||
Transform.call(this, null, params);
|
||||
}
|
||||
|
||||
CountPattern.Definition = {
|
||||
'type': 'CountPattern',
|
||||
'metadata': {'generates': true, 'changes': true},
|
||||
'params': [
|
||||
{ 'name': 'field', 'type': 'field', 'required': true },
|
||||
{ 'name': 'case', 'type': 'enum', 'values': ['upper', 'lower', 'mixed'], 'default': 'mixed' },
|
||||
{ 'name': 'pattern', 'type': 'string', 'default': '[\\w"]+' },
|
||||
{ 'name': 'stopwords', 'type': 'string', 'default': '' },
|
||||
{ 'name': 'as', 'type': 'string', 'array': true, 'length': 2, 'default': ['text', 'count'] }
|
||||
]
|
||||
};
|
||||
|
||||
function tokenize(text, tcase, match) {
|
||||
switch (tcase) {
|
||||
case 'upper': text = text.toUpperCase(); break;
|
||||
case 'lower': text = text.toLowerCase(); break;
|
||||
}
|
||||
return text.match(match);
|
||||
}
|
||||
|
||||
var prototype = inherits(CountPattern, Transform);
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
function process(update) {
|
||||
return function(tuple) {
|
||||
var tokens = tokenize(get(tuple), _.case, match) || [], t;
|
||||
for (var i=0, n=tokens.length; i<n; ++i) {
|
||||
if (!stop.test(t = tokens[i])) update(t);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
var init = this._parameterCheck(_, pulse),
|
||||
counts = this._counts,
|
||||
match = this._match,
|
||||
stop = this._stop,
|
||||
get = _.field,
|
||||
as = _.as || ['text', 'count'],
|
||||
add = process(function(t) { counts[t] = 1 + (counts[t] || 0); }),
|
||||
rem = process(function(t) { counts[t] -= 1; });
|
||||
|
||||
if (init) {
|
||||
pulse.visit(pulse.SOURCE, add);
|
||||
} else {
|
||||
pulse.visit(pulse.ADD, add);
|
||||
pulse.visit(pulse.REM, rem);
|
||||
}
|
||||
|
||||
return this._finish(pulse, as); // generate output tuples
|
||||
};
|
||||
|
||||
prototype._parameterCheck = function(_, pulse) {
|
||||
var init = false;
|
||||
|
||||
if (_.modified('stopwords') || !this._stop) {
|
||||
this._stop = new RegExp('^' + (_.stopwords || '') + '$', 'i');
|
||||
init = true;
|
||||
}
|
||||
|
||||
if (_.modified('pattern') || !this._match) {
|
||||
this._match = new RegExp((_.pattern || '[\\w\']+'), 'g');
|
||||
init = true;
|
||||
}
|
||||
|
||||
if (_.modified('field') || pulse.modified(_.field.fields)) {
|
||||
init = true;
|
||||
}
|
||||
|
||||
if (init) this._counts = {};
|
||||
return init;
|
||||
};
|
||||
|
||||
prototype._finish = function(pulse, as) {
|
||||
var counts = this._counts,
|
||||
tuples = this._tuples || (this._tuples = {}),
|
||||
text = as[0],
|
||||
count = as[1],
|
||||
out = pulse.fork(pulse.NO_SOURCE | pulse.NO_FIELDS),
|
||||
w, t, c;
|
||||
|
||||
for (w in counts) {
|
||||
t = tuples[w];
|
||||
c = counts[w] || 0;
|
||||
if (!t && c) {
|
||||
tuples[w] = (t = ingest({}));
|
||||
t[text] = w;
|
||||
t[count] = c;
|
||||
out.add.push(t);
|
||||
} else if (c === 0) {
|
||||
if (t) out.rem.push(t);
|
||||
counts[w] = null;
|
||||
tuples[w] = null;
|
||||
} else if (t[count] !== c) {
|
||||
t[count] = c;
|
||||
out.mod.push(t);
|
||||
}
|
||||
}
|
||||
|
||||
return out.modifies(as);
|
||||
};
|
||||
69
node_modules/vega-transforms/src/Cross.js
generated
vendored
Normal file
69
node_modules/vega-transforms/src/Cross.js
generated
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
import {Transform, ingest} from 'vega-dataflow';
|
||||
import {inherits, truthy} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Perform a cross-product of a tuple stream with itself.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {function(object):boolean} [params.filter] - An optional filter
|
||||
* function for selectively including tuples in the cross product.
|
||||
* @param {Array<string>} [params.as] - The names of the output fields.
|
||||
*/
|
||||
export default function Cross(params) {
|
||||
Transform.call(this, null, params);
|
||||
}
|
||||
|
||||
Cross.Definition = {
|
||||
'type': 'Cross',
|
||||
'metadata': {'generates': true},
|
||||
'params': [
|
||||
{ 'name': 'filter', 'type': 'expr' },
|
||||
{ 'name': 'as', 'type': 'string', 'array': true, 'length': 2, 'default': ['a', 'b'] }
|
||||
]
|
||||
};
|
||||
|
||||
var prototype = inherits(Cross, Transform);
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
var out = pulse.fork(pulse.NO_SOURCE),
|
||||
data = this.value,
|
||||
as = _.as || ['a', 'b'],
|
||||
a = as[0], b = as[1],
|
||||
reset = !data
|
||||
|| pulse.changed(pulse.ADD_REM)
|
||||
|| _.modified('as')
|
||||
|| _.modified('filter');
|
||||
|
||||
if (reset) {
|
||||
if (data) out.rem = data;
|
||||
data = pulse.materialize(pulse.SOURCE).source;
|
||||
out.add = this.value = cross(data, a, b, _.filter || truthy);
|
||||
} else {
|
||||
out.mod = data;
|
||||
}
|
||||
|
||||
out.source = this.value;
|
||||
return out.modifies(as);
|
||||
};
|
||||
|
||||
function cross(input, a, b, filter) {
|
||||
var data = [],
|
||||
t = {},
|
||||
n = input.length,
|
||||
i = 0,
|
||||
j, left;
|
||||
|
||||
for (; i<n; ++i) {
|
||||
t[a] = left = input[i];
|
||||
for (j=0; j<n; ++j) {
|
||||
t[b] = input[j];
|
||||
if (filter(t)) {
|
||||
data.push(ingest(t));
|
||||
t = {};
|
||||
t[a] = left;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
129
node_modules/vega-transforms/src/Density.js
generated
vendored
Normal file
129
node_modules/vega-transforms/src/Density.js
generated
vendored
Normal file
@@ -0,0 +1,129 @@
|
||||
import parseDist from './util/Distributions';
|
||||
import {Transform, ingest} from 'vega-dataflow';
|
||||
import {sampleCurve} from 'vega-statistics';
|
||||
import {error, extent, inherits} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Grid sample points for a probability density. Given a distribution and
|
||||
* a sampling extent, will generate points suitable for plotting either
|
||||
* PDF (probability density function) or CDF (cumulative distribution
|
||||
* function) curves.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {object} params.distribution - The probability distribution. This
|
||||
* is an object parameter dependent on the distribution type.
|
||||
* @param {string} [params.method='pdf'] - The distribution method to sample.
|
||||
* One of 'pdf' or 'cdf'.
|
||||
* @param {Array<number>} [params.extent] - The [min, max] extent over which
|
||||
* to sample the distribution. This argument is required in most cases, but
|
||||
* can be omitted if the distribution (e.g., 'kde') supports a 'data' method
|
||||
* that returns numerical sample points from which the extent can be deduced.
|
||||
* @param {number} [params.minsteps=25] - The minimum number of curve samples
|
||||
* for plotting the density.
|
||||
* @param {number} [params.maxsteps=200] - The maximum number of curve samples
|
||||
* for plotting the density.
|
||||
* @param {number} [params.steps] - The exact number of curve samples for
|
||||
* plotting the density. If specified, overrides both minsteps and maxsteps
|
||||
* to set an exact number of uniform samples. Useful in conjunction with
|
||||
* a fixed extent to ensure consistent sample points for stacked densities.
|
||||
*/
|
||||
export default function Density(params) {
|
||||
Transform.call(this, null, params);
|
||||
}
|
||||
|
||||
var distributions = [
|
||||
{
|
||||
'key': {'function': 'normal'},
|
||||
'params': [
|
||||
{ 'name': 'mean', 'type': 'number', 'default': 0 },
|
||||
{ 'name': 'stdev', 'type': 'number', 'default': 1 }
|
||||
]
|
||||
},
|
||||
{
|
||||
'key': {'function': 'lognormal'},
|
||||
'params': [
|
||||
{ 'name': 'mean', 'type': 'number', 'default': 0 },
|
||||
{ 'name': 'stdev', 'type': 'number', 'default': 1 }
|
||||
]
|
||||
},
|
||||
{
|
||||
'key': {'function': 'uniform'},
|
||||
'params': [
|
||||
{ 'name': 'min', 'type': 'number', 'default': 0 },
|
||||
{ 'name': 'max', 'type': 'number', 'default': 1 }
|
||||
]
|
||||
},
|
||||
{
|
||||
'key': {'function': 'kde'},
|
||||
'params': [
|
||||
{ 'name': 'field', 'type': 'field', 'required': true },
|
||||
{ 'name': 'from', 'type': 'data' },
|
||||
{ 'name': 'bandwidth', 'type': 'number', 'default': 0 }
|
||||
]
|
||||
}
|
||||
];
|
||||
|
||||
var mixture = {
|
||||
'key': {'function': 'mixture'},
|
||||
'params': [
|
||||
{ 'name': 'distributions', 'type': 'param', 'array': true,
|
||||
'params': distributions },
|
||||
{ 'name': 'weights', 'type': 'number', 'array': true }
|
||||
]
|
||||
};
|
||||
|
||||
Density.Definition = {
|
||||
'type': 'Density',
|
||||
'metadata': {'generates': true},
|
||||
'params': [
|
||||
{ 'name': 'extent', 'type': 'number', 'array': true, 'length': 2 },
|
||||
{ 'name': 'steps', 'type': 'number' },
|
||||
{ 'name': 'minsteps', 'type': 'number', 'default': 25 },
|
||||
{ 'name': 'maxsteps', 'type': 'number', 'default': 200 },
|
||||
{ 'name': 'method', 'type': 'string', 'default': 'pdf',
|
||||
'values': ['pdf', 'cdf'] },
|
||||
{ 'name': 'distribution', 'type': 'param',
|
||||
'params': distributions.concat(mixture) },
|
||||
{ 'name': 'as', 'type': 'string', 'array': true,
|
||||
'default': ['value', 'density'] }
|
||||
]
|
||||
};
|
||||
|
||||
var prototype = inherits(Density, Transform);
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
var out = pulse.fork(pulse.NO_SOURCE | pulse.NO_FIELDS);
|
||||
|
||||
if (!this.value || pulse.changed() || _.modified()) {
|
||||
var dist = parseDist(_.distribution, source(pulse)),
|
||||
minsteps = _.steps || _.minsteps || 25,
|
||||
maxsteps = _.steps || _.maxsteps || 200,
|
||||
method = _.method || 'pdf';
|
||||
|
||||
if (method !== 'pdf' && method !== 'cdf') {
|
||||
error('Invalid density method: ' + method);
|
||||
}
|
||||
if (!_.extent && !dist.data) {
|
||||
error('Missing density extent parameter.');
|
||||
}
|
||||
method = dist[method];
|
||||
|
||||
var as = _.as || ['value', 'density'],
|
||||
domain = _.extent || extent(dist.data()),
|
||||
values = sampleCurve(method, domain, minsteps, maxsteps).map(v => {
|
||||
var tuple = {};
|
||||
tuple[as[0]] = v[0];
|
||||
tuple[as[1]] = v[1];
|
||||
return ingest(tuple);
|
||||
});
|
||||
|
||||
if (this.value) out.rem = this.value;
|
||||
this.value = out.add = out.source = values;
|
||||
}
|
||||
|
||||
return out;
|
||||
};
|
||||
|
||||
function source(pulse) {
|
||||
return function() { return pulse.materialize(pulse.SOURCE).source; };
|
||||
}
|
||||
75
node_modules/vega-transforms/src/DotBin.js
generated
vendored
Normal file
75
node_modules/vega-transforms/src/DotBin.js
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
import {partition} from './util/util';
|
||||
import {Transform, stableCompare} from 'vega-dataflow';
|
||||
import {dotbin} from 'vega-statistics';
|
||||
import {extent, identity, inherits, span} from 'vega-util';
|
||||
|
||||
const Output = 'bin';
|
||||
|
||||
/**
|
||||
* Dot density binning for dot plot construction.
|
||||
* Based on Leland Wilkinson, Dot Plots, The American Statistician, 1999.
|
||||
* https://www.cs.uic.edu/~wilkinson/Publications/dotplots.pdf
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {function(object): *} params.field - The value field to bin.
|
||||
* @param {Array<function(object): *>} [params.groupby] - An array of accessors to groupby.
|
||||
* @param {number} [params.step] - The step size (bin width) within which dots should be
|
||||
* stacked. Defaults to 1/30 of the extent of the data *field*.
|
||||
* @param {boolean} [params.smooth=false] - A boolean flag indicating if dot density
|
||||
* stacks should be smoothed to reduce variance.
|
||||
*/
|
||||
export default function DotBin(params) {
|
||||
Transform.call(this, null, params);
|
||||
}
|
||||
|
||||
DotBin.Definition = {
|
||||
'type': 'DotBin',
|
||||
'metadata': {'modifies': true},
|
||||
'params': [
|
||||
{ 'name': 'field', 'type': 'field', 'required': true },
|
||||
{ 'name': 'groupby', 'type': 'field', 'array': true },
|
||||
{ 'name': 'step', 'type': 'number' },
|
||||
{ 'name': 'smooth', 'type': 'boolean', 'default': false },
|
||||
{ 'name': 'as', 'type': 'string', 'default': Output }
|
||||
]
|
||||
};
|
||||
|
||||
const prototype = inherits(DotBin, Transform);
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
if (this.value && !(_.modified() || pulse.changed())) {
|
||||
return pulse; // early exit
|
||||
}
|
||||
|
||||
const source = pulse.materialize(pulse.SOURCE).source,
|
||||
groups = partition(pulse.source, _.groupby, identity),
|
||||
smooth = _.smooth || false,
|
||||
field = _.field,
|
||||
step = _.step || autostep(source, field),
|
||||
sort = stableCompare((a, b) => field(a) - field(b)),
|
||||
as = _.as || Output,
|
||||
n = groups.length;
|
||||
|
||||
// compute dotplot bins per group
|
||||
let min = Infinity, max = -Infinity, i = 0, j;
|
||||
for (; i<n; ++i) {
|
||||
const g = groups[i].sort(sort);
|
||||
j = -1;
|
||||
for (const v of dotbin(g, step, smooth, field)) {
|
||||
if (v < min) min = v;
|
||||
if (v > max) max = v;
|
||||
g[++j][as] = v;
|
||||
}
|
||||
}
|
||||
|
||||
this.value = {
|
||||
start: min,
|
||||
stop: max,
|
||||
step: step
|
||||
};
|
||||
return pulse.reflow(true).modifies(as);
|
||||
};
|
||||
|
||||
function autostep(data, field) {
|
||||
return span(extent(data, field)) / 30;
|
||||
}
|
||||
29
node_modules/vega-transforms/src/Expression.js
generated
vendored
Normal file
29
node_modules/vega-transforms/src/Expression.js
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
import {Operator} from 'vega-dataflow';
|
||||
import {accessor, accessorFields, accessorName, inherits} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Wraps an expression function with access to external parameters.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {function} params.expr - The expression function. The
|
||||
* function should accept both a datum and a parameter object.
|
||||
* This operator's value will be a new function that wraps the
|
||||
* expression function with access to this operator's parameters.
|
||||
*/
|
||||
export default function Expression(params) {
|
||||
Operator.call(this, null, update, params);
|
||||
this.modified(true);
|
||||
}
|
||||
|
||||
inherits(Expression, Operator);
|
||||
|
||||
function update(_) {
|
||||
var expr = _.expr;
|
||||
return this.value && !_.modified('expr')
|
||||
? this.value
|
||||
: accessor(
|
||||
datum => expr(datum, _),
|
||||
accessorFields(expr),
|
||||
accessorName(expr)
|
||||
);
|
||||
}
|
||||
56
node_modules/vega-transforms/src/Extent.js
generated
vendored
Normal file
56
node_modules/vega-transforms/src/Extent.js
generated
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
import {Transform} from 'vega-dataflow';
|
||||
import {accessorName, inherits, toNumber} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Computes extents (min/max) for a data field.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {function(object): *} params.field - The field over which to compute extends.
|
||||
*/
|
||||
export default function Extent(params) {
|
||||
Transform.call(this, [undefined, undefined], params);
|
||||
}
|
||||
|
||||
Extent.Definition = {
|
||||
'type': 'Extent',
|
||||
'metadata': {},
|
||||
'params': [
|
||||
{ 'name': 'field', 'type': 'field', 'required': true }
|
||||
]
|
||||
};
|
||||
|
||||
var prototype = inherits(Extent, Transform);
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
var extent = this.value,
|
||||
field = _.field,
|
||||
min = extent[0],
|
||||
max = extent[1],
|
||||
mod;
|
||||
|
||||
mod = pulse.changed()
|
||||
|| pulse.modified(field.fields)
|
||||
|| _.modified('field');
|
||||
|
||||
if (mod || min == null) {
|
||||
min = +Infinity;
|
||||
max = -Infinity;
|
||||
}
|
||||
|
||||
pulse.visit(mod ? pulse.SOURCE : pulse.ADD, function(t) {
|
||||
var v = toNumber(field(t));
|
||||
if (v != null) {
|
||||
// NaNs will fail all comparisons!
|
||||
if (v < min) min = v;
|
||||
if (v > max) max = v;
|
||||
}
|
||||
});
|
||||
|
||||
if (!Number.isFinite(min) || !Number.isFinite(max)) {
|
||||
let name = accessorName(field);
|
||||
if (name) name = ` for field "${name}"`;
|
||||
pulse.dataflow.warn(`Infinite extent${name}: [${min}, ${max}]`);
|
||||
min = max = undefined;
|
||||
}
|
||||
this.value = [min, max];
|
||||
};
|
||||
140
node_modules/vega-transforms/src/Facet.js
generated
vendored
Normal file
140
node_modules/vega-transforms/src/Facet.js
generated
vendored
Normal file
@@ -0,0 +1,140 @@
|
||||
import Subflow from './Subflow';
|
||||
import {Transform, tupleid} from 'vega-dataflow';
|
||||
import {fastmap, hasOwnProperty, inherits} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Facets a dataflow into a set of subflows based on a key.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {function(Dataflow, string): Operator} params.subflow - A function
|
||||
* that generates a subflow of operators and returns its root operator.
|
||||
* @param {function(object): *} params.key - The key field to facet by.
|
||||
*/
|
||||
export default function Facet(params) {
|
||||
Transform.call(this, {}, params);
|
||||
this._keys = fastmap(); // cache previously calculated key values
|
||||
|
||||
// keep track of active subflows, use as targets array for listeners
|
||||
// this allows us to limit propagation to only updated subflows
|
||||
const a = this._targets = [];
|
||||
a.active = 0;
|
||||
a.forEach = f => {
|
||||
for (let i=0, n=a.active; i<n; ++i) {
|
||||
f(a[i], i, a);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const prototype = inherits(Facet, Transform);
|
||||
|
||||
prototype.activate = function(flow) {
|
||||
this._targets[this._targets.active++] = flow;
|
||||
};
|
||||
|
||||
// parent argument provided by PreFacet subclass
|
||||
prototype.subflow = function(key, flow, pulse, parent) {
|
||||
let flows = this.value,
|
||||
sf = hasOwnProperty(flows, key) && flows[key],
|
||||
df, p;
|
||||
|
||||
if (!sf) {
|
||||
p = parent || (p = this._group[key]) && p.tuple;
|
||||
df = pulse.dataflow;
|
||||
sf = new Subflow(pulse.fork(pulse.NO_SOURCE), this);
|
||||
df.add(sf).connect(flow(df, key, p));
|
||||
flows[key] = sf;
|
||||
this.activate(sf);
|
||||
} else if (sf.value.stamp < pulse.stamp) {
|
||||
sf.init(pulse);
|
||||
this.activate(sf);
|
||||
}
|
||||
|
||||
return sf;
|
||||
};
|
||||
|
||||
prototype.clean = function() {
|
||||
const flows = this.value;
|
||||
for (const key in flows) {
|
||||
if (flows[key].count === 0) {
|
||||
const detach = flows[key].detachSubflow;
|
||||
if (detach) detach();
|
||||
delete flows[key];
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
prototype.initTargets = function() {
|
||||
const a = this._targets,
|
||||
n = a.length;
|
||||
for (let i=0; i<n && a[i] != null; ++i) {
|
||||
a[i] = null; // ensure old flows can be garbage collected
|
||||
}
|
||||
a.active = 0;
|
||||
};
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
const df = pulse.dataflow,
|
||||
key = _.key,
|
||||
flow = _.subflow,
|
||||
cache = this._keys,
|
||||
rekey = _.modified('key'),
|
||||
subflow = key => this.subflow(key, flow, pulse);
|
||||
|
||||
this._group = _.group || {};
|
||||
this.initTargets(); // reset list of active subflows
|
||||
|
||||
pulse.visit(pulse.REM, t => {
|
||||
const id = tupleid(t),
|
||||
k = cache.get(id);
|
||||
if (k !== undefined) {
|
||||
cache.delete(id);
|
||||
subflow(k).rem(t);
|
||||
}
|
||||
});
|
||||
|
||||
pulse.visit(pulse.ADD, t => {
|
||||
const k = key(t);
|
||||
cache.set(tupleid(t), k);
|
||||
subflow(k).add(t);
|
||||
});
|
||||
|
||||
if (rekey || pulse.modified(key.fields)) {
|
||||
pulse.visit(pulse.MOD, t => {
|
||||
const id = tupleid(t),
|
||||
k0 = cache.get(id),
|
||||
k1 = key(t);
|
||||
if (k0 === k1) {
|
||||
subflow(k1).mod(t);
|
||||
} else {
|
||||
cache.set(id, k1);
|
||||
subflow(k0).rem(t);
|
||||
subflow(k1).add(t);
|
||||
}
|
||||
});
|
||||
} else if (pulse.changed(pulse.MOD)) {
|
||||
pulse.visit(pulse.MOD, t => {
|
||||
subflow(cache.get(tupleid(t))).mod(t);
|
||||
});
|
||||
}
|
||||
|
||||
if (rekey) {
|
||||
pulse.visit(pulse.REFLOW, t => {
|
||||
const id = tupleid(t),
|
||||
k0 = cache.get(id),
|
||||
k1 = key(t);
|
||||
if (k0 !== k1) {
|
||||
cache.set(id, k1);
|
||||
subflow(k0).rem(t);
|
||||
subflow(k1).add(t);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (pulse.clean()) {
|
||||
df.runAfter(() => { this.clean(); cache.clean(); });
|
||||
} else if (cache.empty > df.cleanThreshold) {
|
||||
df.runAfter(cache.clean);
|
||||
}
|
||||
|
||||
return pulse;
|
||||
};
|
||||
23
node_modules/vega-transforms/src/Field.js
generated
vendored
Normal file
23
node_modules/vega-transforms/src/Field.js
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
import {Operator} from 'vega-dataflow';
|
||||
import {array, field, inherits, isArray} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Generates one or more field accessor functions.
|
||||
* If the 'name' parameter is an array, an array of field accessors
|
||||
* will be created and the 'as' parameter will be ignored.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {string} params.name - The field name(s) to access.
|
||||
* @param {string} params.as - The accessor function name.
|
||||
*/
|
||||
export default function Field(params) {
|
||||
Operator.call(this, null, update, params);
|
||||
}
|
||||
|
||||
inherits(Field, Operator);
|
||||
|
||||
function update(_) {
|
||||
return (this.value && !_.modified()) ? this.value
|
||||
: isArray(_.name) ? array(_.name).map(function(f) { return field(f); })
|
||||
: field(_.name, _.as);
|
||||
}
|
||||
70
node_modules/vega-transforms/src/Filter.js
generated
vendored
Normal file
70
node_modules/vega-transforms/src/Filter.js
generated
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
import {Transform, tupleid} from 'vega-dataflow';
|
||||
import {fastmap, inherits} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Filters data tuples according to a predicate function.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {function(object): *} params.expr - The predicate expression function
|
||||
* that determines a tuple's filter status. Truthy values pass the filter.
|
||||
*/
|
||||
export default function Filter(params) {
|
||||
Transform.call(this, fastmap(), params);
|
||||
}
|
||||
|
||||
Filter.Definition = {
|
||||
'type': 'Filter',
|
||||
'metadata': {'changes': true},
|
||||
'params': [
|
||||
{ 'name': 'expr', 'type': 'expr', 'required': true }
|
||||
]
|
||||
};
|
||||
|
||||
var prototype = inherits(Filter, Transform);
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
var df = pulse.dataflow,
|
||||
cache = this.value, // cache ids of filtered tuples
|
||||
output = pulse.fork(),
|
||||
add = output.add,
|
||||
rem = output.rem,
|
||||
mod = output.mod,
|
||||
test = _.expr,
|
||||
isMod = true;
|
||||
|
||||
pulse.visit(pulse.REM, function(t) {
|
||||
var id = tupleid(t);
|
||||
if (!cache.has(id)) rem.push(t);
|
||||
else cache.delete(id);
|
||||
});
|
||||
|
||||
pulse.visit(pulse.ADD, function(t) {
|
||||
if (test(t, _)) add.push(t);
|
||||
else cache.set(tupleid(t), 1);
|
||||
});
|
||||
|
||||
function revisit(t) {
|
||||
var id = tupleid(t),
|
||||
b = test(t, _),
|
||||
s = cache.get(id);
|
||||
if (b && s) {
|
||||
cache.delete(id);
|
||||
add.push(t);
|
||||
} else if (!b && !s) {
|
||||
cache.set(id, 1);
|
||||
rem.push(t);
|
||||
} else if (isMod && b && !s) {
|
||||
mod.push(t);
|
||||
}
|
||||
}
|
||||
|
||||
pulse.visit(pulse.MOD, revisit);
|
||||
|
||||
if (_.modified()) {
|
||||
isMod = false;
|
||||
pulse.visit(pulse.REFLOW, revisit);
|
||||
}
|
||||
|
||||
if (cache.empty > df.cleanThreshold) df.runAfter(cache.clean);
|
||||
return output;
|
||||
};
|
||||
66
node_modules/vega-transforms/src/Flatten.js
generated
vendored
Normal file
66
node_modules/vega-transforms/src/Flatten.js
generated
vendored
Normal file
@@ -0,0 +1,66 @@
|
||||
import {fieldNames} from './util/util';
|
||||
import {Transform, derive} from 'vega-dataflow';
|
||||
import {inherits} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Flattens array-typed field values into new data objects.
|
||||
* If multiple fields are specified, they are treated as parallel arrays,
|
||||
* with output values included for each matching index (or null if missing).
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {Array<function(object): *>} params.fields - An array of field
|
||||
* accessors for the tuple fields that should be flattened.
|
||||
* @param {string} [params.index] - Optional output field name for index
|
||||
* value. If unspecified, no index field is included in the output.
|
||||
* @param {Array<string>} [params.as] - Output field names for flattened
|
||||
* array fields. Any unspecified fields will use the field name provided
|
||||
* by the fields accessors.
|
||||
*/
|
||||
export default function Flatten(params) {
|
||||
Transform.call(this, [], params);
|
||||
}
|
||||
|
||||
Flatten.Definition = {
|
||||
'type': 'Flatten',
|
||||
'metadata': {'generates': true},
|
||||
'params': [
|
||||
{ 'name': 'fields', 'type': 'field', 'array': true, 'required': true },
|
||||
{ 'name': 'index', 'type': 'string' },
|
||||
{ 'name': 'as', 'type': 'string', 'array': true }
|
||||
]
|
||||
};
|
||||
|
||||
var prototype = inherits(Flatten, Transform);
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
var out = pulse.fork(pulse.NO_SOURCE),
|
||||
fields = _.fields,
|
||||
as = fieldNames(fields, _.as || []),
|
||||
index = _.index || null,
|
||||
m = as.length;
|
||||
|
||||
// remove any previous results
|
||||
out.rem = this.value;
|
||||
|
||||
// generate flattened tuples
|
||||
pulse.visit(pulse.SOURCE, function(t) {
|
||||
var arrays = fields.map(f => f(t)),
|
||||
maxlen = arrays.reduce((l, a) => Math.max(l, a.length), 0),
|
||||
i = 0, j, d, v;
|
||||
|
||||
for (; i<maxlen; ++i) {
|
||||
d = derive(t);
|
||||
for (j=0; j<m; ++j) {
|
||||
d[as[j]] = (v = arrays[j][i]) == null ? null : v;
|
||||
}
|
||||
if (index) {
|
||||
d[index] = i;
|
||||
}
|
||||
out.add.push(d);
|
||||
}
|
||||
});
|
||||
|
||||
this.value = out.source = out.add;
|
||||
if (index) out.modifies(index);
|
||||
return out.modifies(as);
|
||||
};
|
||||
51
node_modules/vega-transforms/src/Fold.js
generated
vendored
Normal file
51
node_modules/vega-transforms/src/Fold.js
generated
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
import {Transform, derive} from 'vega-dataflow';
|
||||
import {accessorName, inherits} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Folds one more tuple fields into multiple tuples in which the field
|
||||
* name and values are available under new 'key' and 'value' fields.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {function(object): *} params.fields - An array of field accessors
|
||||
* for the tuple fields that should be folded.
|
||||
* @param {Array<string>} [params.as] - Output field names for folded key
|
||||
* and value fields, defaults to ['key', 'value'].
|
||||
*/
|
||||
export default function Fold(params) {
|
||||
Transform.call(this, [], params);
|
||||
}
|
||||
|
||||
Fold.Definition = {
|
||||
'type': 'Fold',
|
||||
'metadata': {'generates': true},
|
||||
'params': [
|
||||
{ 'name': 'fields', 'type': 'field', 'array': true, 'required': true },
|
||||
{ 'name': 'as', 'type': 'string', 'array': true, 'length': 2, 'default': ['key', 'value'] }
|
||||
]
|
||||
};
|
||||
|
||||
var prototype = inherits(Fold, Transform);
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
var out = pulse.fork(pulse.NO_SOURCE),
|
||||
fields = _.fields,
|
||||
fnames = fields.map(accessorName),
|
||||
as = _.as || ['key', 'value'],
|
||||
k = as[0],
|
||||
v = as[1],
|
||||
n = fields.length;
|
||||
|
||||
out.rem = this.value;
|
||||
|
||||
pulse.visit(pulse.SOURCE, function(t) {
|
||||
for (var i=0, d; i<n; ++i) {
|
||||
d = derive(t);
|
||||
d[k] = fnames[i];
|
||||
d[v] = fields[i](t);
|
||||
out.add.push(d);
|
||||
}
|
||||
});
|
||||
|
||||
this.value = out.source = out.add;
|
||||
return out.modifies(as);
|
||||
};
|
||||
48
node_modules/vega-transforms/src/Formula.js
generated
vendored
Normal file
48
node_modules/vega-transforms/src/Formula.js
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
import {Transform} from 'vega-dataflow';
|
||||
import {inherits} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Invokes a function for each data tuple and saves the results as a new field.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {function(object): *} params.expr - The formula function to invoke for each tuple.
|
||||
* @param {string} params.as - The field name under which to save the result.
|
||||
* @param {boolean} [params.initonly=false] - If true, the formula is applied to
|
||||
* added tuples only, and does not update in response to modifications.
|
||||
*/
|
||||
export default function Formula(params) {
|
||||
Transform.call(this, null, params);
|
||||
}
|
||||
|
||||
Formula.Definition = {
|
||||
'type': 'Formula',
|
||||
'metadata': {'modifies': true},
|
||||
'params': [
|
||||
{ 'name': 'expr', 'type': 'expr', 'required': true },
|
||||
{ 'name': 'as', 'type': 'string', 'required': true },
|
||||
{ 'name': 'initonly', 'type': 'boolean' }
|
||||
]
|
||||
};
|
||||
|
||||
var prototype = inherits(Formula, Transform);
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
var func = _.expr,
|
||||
as = _.as,
|
||||
mod = _.modified(),
|
||||
flag = _.initonly ? pulse.ADD
|
||||
: mod ? pulse.SOURCE
|
||||
: pulse.modified(func.fields) || pulse.modified(as) ? pulse.ADD_MOD
|
||||
: pulse.ADD;
|
||||
|
||||
if (mod) {
|
||||
// parameters updated, need to reflow
|
||||
pulse = pulse.materialize().reflow(true);
|
||||
}
|
||||
|
||||
if (!_.initonly) {
|
||||
pulse.modifies(as);
|
||||
}
|
||||
|
||||
return pulse.visit(flag, t => t[as] = func(t, _));
|
||||
};
|
||||
47
node_modules/vega-transforms/src/Generate.js
generated
vendored
Normal file
47
node_modules/vega-transforms/src/Generate.js
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
import {Transform, ingest} from 'vega-dataflow';
|
||||
import {inherits} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Generates data tuples using a provided generator function.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {function(Parameters): object} params.generator - A tuple generator
|
||||
* function. This function is given the operator parameters as input.
|
||||
* Changes to any additional parameters will not trigger re-calculation
|
||||
* of previously generated tuples. Only future tuples are affected.
|
||||
* @param {number} params.size - The number of tuples to produce.
|
||||
*/
|
||||
export default function Generate(params) {
|
||||
Transform.call(this, [], params);
|
||||
}
|
||||
|
||||
var prototype = inherits(Generate, Transform);
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
var data = this.value,
|
||||
out = pulse.fork(pulse.ALL),
|
||||
num = _.size - data.length,
|
||||
gen = _.generator,
|
||||
add, rem, t;
|
||||
|
||||
if (num > 0) {
|
||||
// need more tuples, generate and add
|
||||
for (add=[]; --num >= 0;) {
|
||||
add.push(t = ingest(gen(_)));
|
||||
data.push(t);
|
||||
}
|
||||
out.add = out.add.length
|
||||
? out.materialize(out.ADD).add.concat(add)
|
||||
: add;
|
||||
} else {
|
||||
// need fewer tuples, remove
|
||||
rem = data.slice(0, -num);
|
||||
out.rem = out.rem.length
|
||||
? out.materialize(out.REM).rem.concat(rem)
|
||||
: rem;
|
||||
data = data.slice(-num);
|
||||
}
|
||||
|
||||
out.source = this.value = data;
|
||||
return out;
|
||||
};
|
||||
138
node_modules/vega-transforms/src/Impute.js
generated
vendored
Normal file
138
node_modules/vega-transforms/src/Impute.js
generated
vendored
Normal file
@@ -0,0 +1,138 @@
|
||||
import {Transform, ingest} from 'vega-dataflow';
|
||||
import {accessorName, error, inherits} from 'vega-util';
|
||||
import {max, mean, median, min} from 'd3-array';
|
||||
|
||||
var Methods = {
|
||||
value: 'value',
|
||||
median: median,
|
||||
mean: mean,
|
||||
min: min,
|
||||
max: max
|
||||
};
|
||||
|
||||
var Empty = [];
|
||||
|
||||
/**
|
||||
* Impute missing values.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {function(object): *} params.field - The value field to impute.
|
||||
* @param {Array<function(object): *>} [params.groupby] - An array of
|
||||
* accessors to determine series within which to perform imputation.
|
||||
* @param {function(object): *} params.key - An accessor for a key value.
|
||||
* Each key value should be unique within a group. New tuples will be
|
||||
* imputed for any key values that are not found within a group.
|
||||
* @param {Array<*>} [params.keyvals] - Optional array of required key
|
||||
* values. New tuples will be imputed for any key values that are not
|
||||
* found within a group. In addition, these values will be automatically
|
||||
* augmented with the key values observed in the input data.
|
||||
* @param {string} [method='value'] - The imputation method to use. One of
|
||||
* 'value', 'mean', 'median', 'max', 'min'.
|
||||
* @param {*} [value=0] - The constant value to use for imputation
|
||||
* when using method 'value'.
|
||||
*/
|
||||
export default function Impute(params) {
|
||||
Transform.call(this, [], params);
|
||||
}
|
||||
|
||||
Impute.Definition = {
|
||||
'type': 'Impute',
|
||||
'metadata': {'changes': true},
|
||||
'params': [
|
||||
{ 'name': 'field', 'type': 'field', 'required': true },
|
||||
{ 'name': 'key', 'type': 'field', 'required': true },
|
||||
{ 'name': 'keyvals', 'array': true },
|
||||
{ 'name': 'groupby', 'type': 'field', 'array': true },
|
||||
{ 'name': 'method', 'type': 'enum', 'default': 'value',
|
||||
'values': ['value', 'mean', 'median', 'max', 'min'] },
|
||||
{ 'name': 'value', 'default': 0 }
|
||||
]
|
||||
};
|
||||
|
||||
var prototype = inherits(Impute, Transform);
|
||||
|
||||
function getValue(_) {
|
||||
var m = _.method || Methods.value, v;
|
||||
|
||||
if (Methods[m] == null) {
|
||||
error('Unrecognized imputation method: ' + m);
|
||||
} else if (m === Methods.value) {
|
||||
v = _.value !== undefined ? _.value : 0;
|
||||
return function() { return v; };
|
||||
} else {
|
||||
return Methods[m];
|
||||
}
|
||||
}
|
||||
|
||||
function getField(_) {
|
||||
var f = _.field;
|
||||
return function(t) { return t ? f(t) : NaN; };
|
||||
}
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
var out = pulse.fork(pulse.ALL),
|
||||
impute = getValue(_),
|
||||
field = getField(_),
|
||||
fName = accessorName(_.field),
|
||||
kName = accessorName(_.key),
|
||||
gNames = (_.groupby || []).map(accessorName),
|
||||
groups = partition(pulse.source, _.groupby, _.key, _.keyvals),
|
||||
curr = [],
|
||||
prev = this.value,
|
||||
m = groups.domain.length,
|
||||
group, value, gVals, kVal, g, i, j, l, n, t;
|
||||
|
||||
for (g=0, l=groups.length; g<l; ++g) {
|
||||
group = groups[g];
|
||||
gVals = group.values;
|
||||
value = NaN;
|
||||
|
||||
// add tuples for missing values
|
||||
for (j=0; j<m; ++j) {
|
||||
if (group[j] != null) continue;
|
||||
kVal = groups.domain[j];
|
||||
|
||||
t = {_impute: true};
|
||||
for (i=0, n=gVals.length; i<n; ++i) t[gNames[i]] = gVals[i];
|
||||
t[kName] = kVal;
|
||||
t[fName] = Number.isNaN(value) ? (value = impute(group, field)) : value;
|
||||
|
||||
curr.push(ingest(t));
|
||||
}
|
||||
}
|
||||
|
||||
// update pulse with imputed tuples
|
||||
if (curr.length) out.add = out.materialize(out.ADD).add.concat(curr);
|
||||
if (prev.length) out.rem = out.materialize(out.REM).rem.concat(prev);
|
||||
this.value = curr;
|
||||
|
||||
return out;
|
||||
};
|
||||
|
||||
function partition(data, groupby, key, keyvals) {
|
||||
var get = function(f) { return f(t); },
|
||||
groups = [],
|
||||
domain = keyvals ? keyvals.slice() : [],
|
||||
kMap = {},
|
||||
gMap = {}, gVals, gKey,
|
||||
group, i, j, k, n, t;
|
||||
|
||||
domain.forEach(function(k, i) { kMap[k] = i + 1; });
|
||||
|
||||
for (i=0, n=data.length; i<n; ++i) {
|
||||
t = data[i];
|
||||
k = key(t);
|
||||
j = kMap[k] || (kMap[k] = domain.push(k));
|
||||
|
||||
gKey = (gVals = groupby ? groupby.map(get) : Empty) + '';
|
||||
if (!(group = gMap[gKey])) {
|
||||
group = (gMap[gKey] = []);
|
||||
groups.push(group);
|
||||
group.values = gVals;
|
||||
}
|
||||
group[j-1] = t;
|
||||
}
|
||||
|
||||
groups.domain = domain;
|
||||
return groups;
|
||||
}
|
||||
70
node_modules/vega-transforms/src/JoinAggregate.js
generated
vendored
Normal file
70
node_modules/vega-transforms/src/JoinAggregate.js
generated
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
import Aggregate from './Aggregate';
|
||||
import {ValidAggregateOps} from './util/AggregateOps';
|
||||
import {extend, inherits} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Extend input tuples with aggregate values.
|
||||
* Calcuates aggregate values and joins them with the input stream.
|
||||
* @constructor
|
||||
*/
|
||||
export default function JoinAggregate(params) {
|
||||
Aggregate.call(this, params);
|
||||
}
|
||||
|
||||
JoinAggregate.Definition = {
|
||||
'type': 'JoinAggregate',
|
||||
'metadata': {'modifies': true},
|
||||
'params': [
|
||||
{ 'name': 'groupby', 'type': 'field', 'array': true },
|
||||
{ 'name': 'fields', 'type': 'field', 'null': true, 'array': true },
|
||||
{ 'name': 'ops', 'type': 'enum', 'array': true, 'values': ValidAggregateOps },
|
||||
{ 'name': 'as', 'type': 'string', 'null': true, 'array': true },
|
||||
{ 'name': 'key', 'type': 'field' }
|
||||
]
|
||||
};
|
||||
|
||||
var prototype = inherits(JoinAggregate, Aggregate);
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
var aggr = this,
|
||||
mod = _.modified(),
|
||||
cells;
|
||||
|
||||
// process all input tuples to calculate aggregates
|
||||
if (aggr.value && (mod || pulse.modified(aggr._inputs, true))) {
|
||||
cells = aggr.value = mod ? aggr.init(_) : {};
|
||||
pulse.visit(pulse.SOURCE, function(t) { aggr.add(t); });
|
||||
} else {
|
||||
cells = aggr.value = aggr.value || this.init(_);
|
||||
pulse.visit(pulse.REM, function(t) { aggr.rem(t); });
|
||||
pulse.visit(pulse.ADD, function(t) { aggr.add(t); });
|
||||
}
|
||||
|
||||
// update aggregation cells
|
||||
aggr.changes();
|
||||
|
||||
// write aggregate values to input tuples
|
||||
pulse.visit(pulse.SOURCE, function(t) {
|
||||
extend(t, cells[aggr.cellkey(t)].tuple);
|
||||
});
|
||||
|
||||
return pulse.reflow(mod).modifies(this._outputs);
|
||||
};
|
||||
|
||||
prototype.changes = function() {
|
||||
var adds = this._adds,
|
||||
mods = this._mods,
|
||||
i, n;
|
||||
|
||||
for (i=0, n=this._alen; i<n; ++i) {
|
||||
this.celltuple(adds[i]);
|
||||
adds[i] = null; // for garbage collection
|
||||
}
|
||||
|
||||
for (i=0, n=this._mlen; i<n; ++i) {
|
||||
this.celltuple(mods[i]);
|
||||
mods[i] = null; // for garbage collection
|
||||
}
|
||||
|
||||
this._alen = this._mlen = 0; // reset list of active cells
|
||||
};
|
||||
108
node_modules/vega-transforms/src/KDE.js
generated
vendored
Normal file
108
node_modules/vega-transforms/src/KDE.js
generated
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
import {partition} from './util/util';
|
||||
import {randomKDE} from 'vega-statistics';
|
||||
import {Transform, ingest} from 'vega-dataflow';
|
||||
import {sampleCurve} from 'vega-statistics';
|
||||
import {accessorName, error, extent, inherits} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Compute kernel density estimates (KDE) for one or more data groups.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {Array<function(object): *>} [params.groupby] - An array of accessors
|
||||
* to groupby.
|
||||
* @param {function(object): *} params.field - An accessor for the data field
|
||||
* to estimate.
|
||||
* @param {number} [params.bandwidth=0] - The KDE kernel bandwidth.
|
||||
* If zero or unspecified, the bandwidth is automatically determined.
|
||||
* @param {boolean} [params.counts=false] - A boolean flag indicating if the
|
||||
* output values should be probability estimates (false, default) or
|
||||
* smoothed counts (true).
|
||||
* @param {string} [params.cumulative=false] - A boolean flag indicating if a
|
||||
* density (false) or cumulative distribution (true) should be generated.
|
||||
* @param {Array<number>} [params.extent] - The domain extent over which to
|
||||
* plot the density. If unspecified, the [min, max] data extent is used.
|
||||
* @param {string} [params.resolve='independent'] - Indicates how parameters for
|
||||
* multiple densities should be resolved. If "independent" (the default), each
|
||||
* density may have its own domain extent and dynamic number of curve sample
|
||||
* steps. If "shared", the KDE transform will ensure that all densities are
|
||||
* defined over a shared domain and curve steps, enabling stacking.
|
||||
* @param {number} [params.minsteps=25] - The minimum number of curve samples
|
||||
* for plotting the density.
|
||||
* @param {number} [params.maxsteps=200] - The maximum number of curve samples
|
||||
* for plotting the density.
|
||||
* @param {number} [params.steps] - The exact number of curve samples for
|
||||
* plotting the density. If specified, overrides both minsteps and maxsteps
|
||||
* to set an exact number of uniform samples. Useful in conjunction with
|
||||
* a fixed extent to ensure consistent sample points for stacked densities.
|
||||
*/
|
||||
export default function KDE(params) {
|
||||
Transform.call(this, null, params);
|
||||
}
|
||||
|
||||
KDE.Definition = {
|
||||
'type': 'KDE',
|
||||
'metadata': {'generates': true},
|
||||
'params': [
|
||||
{ 'name': 'groupby', 'type': 'field', 'array': true },
|
||||
{ 'name': 'field', 'type': 'field', 'required': true },
|
||||
{ 'name': 'cumulative', 'type': 'boolean', 'default': false },
|
||||
{ 'name': 'counts', 'type': 'boolean', 'default': false },
|
||||
{ 'name': 'bandwidth', 'type': 'number', 'default': 0 },
|
||||
{ 'name': 'extent', 'type': 'number', 'array': true, 'length': 2 },
|
||||
{ 'name': 'resolve', 'type': 'enum', 'values': ['shared', 'independent'], 'default': 'independent' },
|
||||
{ 'name': 'steps', 'type': 'number' },
|
||||
{ 'name': 'minsteps', 'type': 'number', 'default': 25 },
|
||||
{ 'name': 'maxsteps', 'type': 'number', 'default': 200 },
|
||||
{ 'name': 'as', 'type': 'string', 'array': true, 'default': ['value', 'density'] }
|
||||
]
|
||||
};
|
||||
|
||||
var prototype = inherits(KDE, Transform);
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
var out = pulse.fork(pulse.NO_SOURCE | pulse.NO_FIELDS);
|
||||
|
||||
if (!this.value || pulse.changed() || _.modified()) {
|
||||
const source = pulse.materialize(pulse.SOURCE).source,
|
||||
groups = partition(source, _.groupby, _.field),
|
||||
names = (_.groupby || []).map(accessorName),
|
||||
bandwidth = _.bandwidth,
|
||||
method = _.cumulative ? 'cdf' : 'pdf',
|
||||
as = _.as || ['value', 'density'],
|
||||
values = [];
|
||||
|
||||
let domain = _.extent,
|
||||
minsteps = _.steps || _.minsteps || 25,
|
||||
maxsteps = _.steps || _.maxsteps || 200;
|
||||
|
||||
if (method !== 'pdf' && method !== 'cdf') {
|
||||
error('Invalid density method: ' + method);
|
||||
}
|
||||
|
||||
if (_.resolve === 'shared') {
|
||||
if (!domain) domain = extent(source, _.field);
|
||||
minsteps = maxsteps = _.steps || maxsteps;
|
||||
}
|
||||
|
||||
groups.forEach(g => {
|
||||
const density = randomKDE(g, bandwidth)[method],
|
||||
scale = _.counts ? g.length : 1,
|
||||
local = domain || extent(g);
|
||||
|
||||
sampleCurve(density, local, minsteps, maxsteps).forEach(v => {
|
||||
const t = {};
|
||||
for (let i=0; i<names.length; ++i) {
|
||||
t[names[i]] = g.dims[i];
|
||||
}
|
||||
t[as[0]] = v[0];
|
||||
t[as[1]] = v[1] * scale;
|
||||
values.push(ingest(t));
|
||||
});
|
||||
});
|
||||
|
||||
if (this.value) out.rem = this.value;
|
||||
this.value = out.add = out.source = values;
|
||||
}
|
||||
|
||||
return out;
|
||||
};
|
||||
21
node_modules/vega-transforms/src/Key.js
generated
vendored
Normal file
21
node_modules/vega-transforms/src/Key.js
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
import {Operator} from 'vega-dataflow';
|
||||
import {inherits, key} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Generates a key function.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {Array<string>} params.fields - The field name(s) for the key function.
|
||||
* @param {boolean} params.flat - A boolean flag indicating if the field names
|
||||
* should be treated as flat property names, side-stepping nested field
|
||||
* lookups normally indicated by dot or bracket notation.
|
||||
*/
|
||||
export default function Key(params) {
|
||||
Operator.call(this, null, update, params);
|
||||
}
|
||||
|
||||
inherits(Key, Operator);
|
||||
|
||||
function update(_) {
|
||||
return (this.value && !_.modified()) ? this.value : key(_.fields, _.flat);
|
||||
}
|
||||
60
node_modules/vega-transforms/src/Load.js
generated
vendored
Normal file
60
node_modules/vega-transforms/src/Load.js
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
import {Transform, ingest} from 'vega-dataflow';
|
||||
import {array, inherits} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Load and parse data from an external source. Marshalls parameter
|
||||
* values and then invokes the Dataflow request method.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {string} params.url - The URL to load from.
|
||||
* @param {object} params.format - The data format options.
|
||||
*/
|
||||
export default function Load(params) {
|
||||
Transform.call(this, [], params);
|
||||
this._pending = null;
|
||||
}
|
||||
|
||||
var prototype = inherits(Load, Transform);
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
const df = pulse.dataflow;
|
||||
|
||||
if (this._pending) {
|
||||
// update state and return pulse
|
||||
return output(this, pulse, this._pending);
|
||||
}
|
||||
|
||||
if (stop(_)) return pulse.StopPropagation;
|
||||
|
||||
if (_.values) {
|
||||
// parse and ingest values, return output pulse
|
||||
return output(this, pulse, df.parse(_.values, _.format));
|
||||
} else if (_.async) {
|
||||
// return promise for non-blocking async loading
|
||||
const p = df.request(_.url, _.format).then(res => {
|
||||
this._pending = array(res.data);
|
||||
return df => df.touch(this);
|
||||
});
|
||||
return {async: p};
|
||||
} else {
|
||||
// return promise for synchronous loading
|
||||
return df.request(_.url, _.format)
|
||||
.then(res => output(this, pulse, array(res.data)));
|
||||
}
|
||||
};
|
||||
|
||||
function stop(_) {
|
||||
return _.modified('async') && !(
|
||||
_.modified('values') || _.modified('url') || _.modified('format')
|
||||
);
|
||||
}
|
||||
|
||||
function output(op, pulse, data) {
|
||||
data.forEach(ingest);
|
||||
const out = pulse.fork(pulse.NO_FIELDS & pulse.NO_SOURCE);
|
||||
out.rem = op.value;
|
||||
op.value = out.source = out.add = data;
|
||||
op._pending = null;
|
||||
if (out.rem.length) out.clean(true);
|
||||
return out;
|
||||
}
|
||||
86
node_modules/vega-transforms/src/Lookup.js
generated
vendored
Normal file
86
node_modules/vega-transforms/src/Lookup.js
generated
vendored
Normal file
@@ -0,0 +1,86 @@
|
||||
import {Transform} from 'vega-dataflow';
|
||||
import {accessorName, error, inherits} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Extend tuples by joining them with values from a lookup table.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {Map} params.index - The lookup table map.
|
||||
* @param {Array<function(object): *} params.fields - The fields to lookup.
|
||||
* @param {Array<string>} params.as - Output field names for each lookup value.
|
||||
* @param {*} [params.default] - A default value to use if lookup fails.
|
||||
*/
|
||||
export default function Lookup(params) {
|
||||
Transform.call(this, {}, params);
|
||||
}
|
||||
|
||||
Lookup.Definition = {
|
||||
'type': 'Lookup',
|
||||
'metadata': {'modifies': true},
|
||||
'params': [
|
||||
{ 'name': 'index', 'type': 'index', 'params': [
|
||||
{'name': 'from', 'type': 'data', 'required': true },
|
||||
{'name': 'key', 'type': 'field', 'required': true }
|
||||
] },
|
||||
{ 'name': 'values', 'type': 'field', 'array': true },
|
||||
{ 'name': 'fields', 'type': 'field', 'array': true, 'required': true },
|
||||
{ 'name': 'as', 'type': 'string', 'array': true },
|
||||
{ 'name': 'default', 'default': null }
|
||||
]
|
||||
};
|
||||
|
||||
var prototype = inherits(Lookup, Transform);
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
var out = pulse,
|
||||
as = _.as,
|
||||
keys = _.fields,
|
||||
index = _.index,
|
||||
values = _.values,
|
||||
defaultValue = _.default==null ? null : _.default,
|
||||
reset = _.modified(),
|
||||
flag = reset ? pulse.SOURCE : pulse.ADD,
|
||||
n = keys.length,
|
||||
set, m, mods;
|
||||
|
||||
if (values) {
|
||||
m = values.length;
|
||||
|
||||
if (n > 1 && !as) {
|
||||
error('Multi-field lookup requires explicit "as" parameter.');
|
||||
}
|
||||
if (as && as.length !== n * m) {
|
||||
error('The "as" parameter has too few output field names.');
|
||||
}
|
||||
as = as || values.map(accessorName);
|
||||
|
||||
set = function(t) {
|
||||
for (var i=0, k=0, j, v; i<n; ++i) {
|
||||
v = index.get(keys[i](t));
|
||||
if (v == null) for (j=0; j<m; ++j, ++k) t[as[k]] = defaultValue;
|
||||
else for (j=0; j<m; ++j, ++k) t[as[k]] = values[j](v);
|
||||
}
|
||||
};
|
||||
} else {
|
||||
if (!as) {
|
||||
error('Missing output field names.');
|
||||
}
|
||||
|
||||
set = function(t) {
|
||||
for (var i=0, v; i<n; ++i) {
|
||||
v = index.get(keys[i](t));
|
||||
t[as[i]] = v==null ? defaultValue : v;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
if (reset) {
|
||||
out = pulse.reflow(true);
|
||||
} else {
|
||||
mods = keys.some(function(k) { return pulse.modified(k.fields); });
|
||||
flag |= (mods ? pulse.MOD : 0);
|
||||
}
|
||||
pulse.visit(flag, set);
|
||||
|
||||
return out.modifies(as);
|
||||
};
|
||||
32
node_modules/vega-transforms/src/MultiExtent.js
generated
vendored
Normal file
32
node_modules/vega-transforms/src/MultiExtent.js
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
import {Operator} from 'vega-dataflow';
|
||||
import {inherits} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Computes global min/max extents over a collection of extents.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {Array<Array<number>>} params.extents - The input extents.
|
||||
*/
|
||||
export default function MultiExtent(params) {
|
||||
Operator.call(this, null, update, params);
|
||||
}
|
||||
|
||||
inherits(MultiExtent, Operator);
|
||||
|
||||
function update(_) {
|
||||
if (this.value && !_.modified()) {
|
||||
return this.value;
|
||||
}
|
||||
|
||||
var min = +Infinity,
|
||||
max = -Infinity,
|
||||
ext = _.extents,
|
||||
i, n, e;
|
||||
|
||||
for (i=0, n=ext.length; i<n; ++i) {
|
||||
e = ext[i];
|
||||
if (e[0] < min) min = e[0];
|
||||
if (e[1] > max) max = e[1];
|
||||
}
|
||||
return [min, max];
|
||||
}
|
||||
20
node_modules/vega-transforms/src/MultiValues.js
generated
vendored
Normal file
20
node_modules/vega-transforms/src/MultiValues.js
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
import {Operator} from 'vega-dataflow';
|
||||
import {inherits} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Merge a collection of value arrays.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {Array<Array<*>>} params.values - The input value arrrays.
|
||||
*/
|
||||
export default function MultiValues(params) {
|
||||
Operator.call(this, null, update, params);
|
||||
}
|
||||
|
||||
inherits(MultiValues, Operator);
|
||||
|
||||
function update(_) {
|
||||
return (this.value && !_.modified())
|
||||
? this.value
|
||||
: _.values.reduce(function(data, _) { return data.concat(_); }, []);
|
||||
}
|
||||
20
node_modules/vega-transforms/src/Params.js
generated
vendored
Normal file
20
node_modules/vega-transforms/src/Params.js
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
import {Transform} from 'vega-dataflow';
|
||||
import {inherits} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Operator whose value is simply its parameter hash. This operator is
|
||||
* useful for enabling reactive updates to values of nested objects.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
*/
|
||||
export default function Params(params) {
|
||||
Transform.call(this, null, params);
|
||||
}
|
||||
|
||||
inherits(Params, Transform);
|
||||
|
||||
Params.prototype.transform = function(_, pulse) {
|
||||
this.modified(_.modified());
|
||||
this.value = _;
|
||||
return pulse.fork(pulse.NO_SOURCE | pulse.NO_FIELDS); // do not pass tuples
|
||||
};
|
||||
102
node_modules/vega-transforms/src/Pivot.js
generated
vendored
Normal file
102
node_modules/vega-transforms/src/Pivot.js
generated
vendored
Normal file
@@ -0,0 +1,102 @@
|
||||
import Aggregate from './Aggregate';
|
||||
import {ValidAggregateOps} from './util/AggregateOps';
|
||||
import {accessor, accessorFields, inherits} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Aggregate and pivot selected field values to become new fields.
|
||||
* This operator is useful to construction cross-tabulations.
|
||||
* @constructor
|
||||
* @param {Array<function(object): *>} [params.groupby] - An array of accessors
|
||||
* to groupby. These fields act just like groupby fields of an Aggregate transform.
|
||||
* @param {function(object): *} params.field - The field to pivot on. The unique
|
||||
* values of this field become new field names in the output stream.
|
||||
* @param {function(object): *} params.value - The field to populate pivoted fields.
|
||||
* The aggregate values of this field become the values of the new pivoted fields.
|
||||
* @param {string} [params.op] - The aggregation operation for the value field,
|
||||
* applied per cell in the output stream. The default is "sum".
|
||||
* @param {number} [params.limit] - An optional parameter indicating the maximum
|
||||
* number of pivoted fields to generate. The pivoted field names are sorted in
|
||||
* ascending order prior to enforcing the limit.
|
||||
*/
|
||||
export default function Pivot(params) {
|
||||
Aggregate.call(this, params);
|
||||
}
|
||||
|
||||
Pivot.Definition = {
|
||||
'type': 'Pivot',
|
||||
'metadata': {'generates': true, 'changes': true},
|
||||
'params': [
|
||||
{ 'name': 'groupby', 'type': 'field', 'array': true },
|
||||
{ 'name': 'field', 'type': 'field', 'required': true },
|
||||
{ 'name': 'value', 'type': 'field', 'required': true },
|
||||
{ 'name': 'op', 'type': 'enum', 'values': ValidAggregateOps, 'default': 'sum' },
|
||||
{ 'name': 'limit', 'type': 'number', 'default': 0 },
|
||||
{ 'name': 'key', 'type': 'field' }
|
||||
]
|
||||
};
|
||||
|
||||
var prototype = inherits(Pivot, Aggregate);
|
||||
|
||||
prototype._transform = prototype.transform;
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
return this._transform(aggregateParams(_, pulse), pulse);
|
||||
};
|
||||
|
||||
// Shoehorn a pivot transform into an aggregate transform!
|
||||
// First collect all unique pivot field values.
|
||||
// Then generate aggregate fields for each output pivot field.
|
||||
function aggregateParams(_, pulse) {
|
||||
var key = _.field,
|
||||
value = _.value,
|
||||
op = (_.op === 'count' ? '__count__' : _.op) || 'sum',
|
||||
fields = accessorFields(key).concat(accessorFields(value)),
|
||||
keys = pivotKeys(key, _.limit || 0, pulse);
|
||||
|
||||
// if data stream content changes, pivot fields may change
|
||||
// flag parameter modification to ensure re-initialization
|
||||
if (pulse.changed()) _.set('__pivot__', null, null, true);
|
||||
|
||||
return {
|
||||
key: _.key,
|
||||
groupby: _.groupby,
|
||||
ops: keys.map(function() { return op; }),
|
||||
fields: keys.map(function(k) { return get(k, key, value, fields); }),
|
||||
as: keys.map(function(k) { return k + ''; }),
|
||||
modified: _.modified.bind(_)
|
||||
};
|
||||
}
|
||||
|
||||
// Generate aggregate field accessor.
|
||||
// Output NaN for non-existent values; aggregator will ignore!
|
||||
function get(k, key, value, fields) {
|
||||
return accessor(
|
||||
function(d) { return key(d) === k ? value(d) : NaN; },
|
||||
fields,
|
||||
k + ''
|
||||
);
|
||||
}
|
||||
|
||||
// Collect (and optionally limit) all unique pivot values.
|
||||
function pivotKeys(key, limit, pulse) {
|
||||
var map = {},
|
||||
list = [];
|
||||
|
||||
pulse.visit(pulse.SOURCE, function(t) {
|
||||
var k = key(t);
|
||||
if (!map[k]) {
|
||||
map[k] = 1;
|
||||
list.push(k);
|
||||
}
|
||||
});
|
||||
|
||||
// TODO? Move this comparator to vega-util?
|
||||
list.sort(function(u, v) {
|
||||
return (u<v||u==null) && v!=null ? -1
|
||||
: (u>v||v==null) && u!=null ? 1
|
||||
: ((v=v instanceof Date?+v:v),(u=u instanceof Date?+u:u))!==u && v===v ? -1
|
||||
: v!==v && u===u ? 1 : 0;
|
||||
});
|
||||
|
||||
return limit ? list.slice(0, limit) : list;
|
||||
}
|
||||
57
node_modules/vega-transforms/src/PreFacet.js
generated
vendored
Normal file
57
node_modules/vega-transforms/src/PreFacet.js
generated
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
import Facet from './Facet';
|
||||
import {ingest, tupleid} from 'vega-dataflow';
|
||||
import {accessorFields, error, inherits} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Partitions pre-faceted data into tuple subflows.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {function(Dataflow, string): Operator} params.subflow - A function
|
||||
* that generates a subflow of operators and returns its root operator.
|
||||
* @param {function(object): Array<object>} params.field - The field
|
||||
* accessor for an array of subflow tuple objects.
|
||||
*/
|
||||
export default function PreFacet(params) {
|
||||
Facet.call(this, params);
|
||||
}
|
||||
|
||||
const prototype = inherits(PreFacet, Facet);
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
const flow = _.subflow,
|
||||
field = _.field,
|
||||
subflow = t => this.subflow(tupleid(t), flow, pulse, t);
|
||||
|
||||
if (_.modified('field') || field && pulse.modified(accessorFields(field))) {
|
||||
error('PreFacet does not support field modification.');
|
||||
}
|
||||
|
||||
this.initTargets(); // reset list of active subflows
|
||||
|
||||
if (field) {
|
||||
pulse.visit(pulse.MOD, t => {
|
||||
const sf = subflow(t);
|
||||
field(t).forEach(_ => sf.mod(_));
|
||||
});
|
||||
|
||||
pulse.visit(pulse.ADD, t => {
|
||||
const sf = subflow(t);
|
||||
field(t).forEach(_ => sf.add(ingest(_)));
|
||||
});
|
||||
|
||||
pulse.visit(pulse.REM, t => {
|
||||
const sf = subflow(t);
|
||||
field(t).forEach(_ => sf.rem(_));
|
||||
});
|
||||
} else {
|
||||
pulse.visit(pulse.MOD, t => subflow(t).mod(t));
|
||||
pulse.visit(pulse.ADD, t => subflow(t).add(t));
|
||||
pulse.visit(pulse.REM, t => subflow(t).rem(t));
|
||||
}
|
||||
|
||||
if (pulse.clean()) {
|
||||
pulse.runAfter(() => this.clean());
|
||||
}
|
||||
|
||||
return pulse;
|
||||
};
|
||||
73
node_modules/vega-transforms/src/Project.js
generated
vendored
Normal file
73
node_modules/vega-transforms/src/Project.js
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
import {fieldNames} from './util/util';
|
||||
import {Transform, ingest, rederive, tupleid} from 'vega-dataflow';
|
||||
import {inherits} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Performs a relational projection, copying selected fields from source
|
||||
* tuples to a new set of derived tuples.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {Array<function(object): *} params.fields - The fields to project,
|
||||
* as an array of field accessors. If unspecified, all fields will be
|
||||
* copied with names unchanged.
|
||||
* @param {Array<string>} [params.as] - Output field names for each projected
|
||||
* field. Any unspecified fields will use the field name provided by
|
||||
* the field accessor.
|
||||
*/
|
||||
export default function Project(params) {
|
||||
Transform.call(this, null, params);
|
||||
}
|
||||
|
||||
Project.Definition = {
|
||||
'type': 'Project',
|
||||
'metadata': {'generates': true, 'changes': true},
|
||||
'params': [
|
||||
{ 'name': 'fields', 'type': 'field', 'array': true },
|
||||
{ 'name': 'as', 'type': 'string', 'null': true, 'array': true }
|
||||
]
|
||||
};
|
||||
|
||||
var prototype = inherits(Project, Transform);
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
var fields = _.fields,
|
||||
as = fieldNames(_.fields, _.as || []),
|
||||
derive = fields
|
||||
? function(s, t) { return project(s, t, fields, as); }
|
||||
: rederive,
|
||||
out, lut;
|
||||
|
||||
if (this.value) {
|
||||
lut = this.value;
|
||||
} else {
|
||||
pulse = pulse.addAll();
|
||||
lut = this.value = {};
|
||||
}
|
||||
|
||||
out = pulse.fork(pulse.NO_SOURCE);
|
||||
|
||||
pulse.visit(pulse.REM, function(t) {
|
||||
var id = tupleid(t);
|
||||
out.rem.push(lut[id]);
|
||||
lut[id] = null;
|
||||
});
|
||||
|
||||
pulse.visit(pulse.ADD, function(t) {
|
||||
var dt = derive(t, ingest({}));
|
||||
lut[tupleid(t)] = dt;
|
||||
out.add.push(dt);
|
||||
});
|
||||
|
||||
pulse.visit(pulse.MOD, function(t) {
|
||||
out.mod.push(derive(t, lut[tupleid(t)]));
|
||||
});
|
||||
|
||||
return out;
|
||||
};
|
||||
|
||||
function project(s, t, fields, as) {
|
||||
for (var i=0, n=fields.length; i<n; ++i) {
|
||||
t[as[i]] = fields[i](s);
|
||||
}
|
||||
return t;
|
||||
}
|
||||
22
node_modules/vega-transforms/src/Proxy.js
generated
vendored
Normal file
22
node_modules/vega-transforms/src/Proxy.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
import {Transform} from 'vega-dataflow';
|
||||
import {inherits} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Proxy the value of another operator as a pure signal value.
|
||||
* Ensures no tuples are propagated.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {*} params.value - The value to proxy, becomes the value of this operator.
|
||||
*/
|
||||
export default function Proxy(params) {
|
||||
Transform.call(this, null, params);
|
||||
}
|
||||
|
||||
var prototype = inherits(Proxy, Transform);
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
this.value = _.value;
|
||||
return _.modified('value')
|
||||
? pulse.fork(pulse.NO_SOURCE | pulse.NO_FIELDS)
|
||||
: pulse.StopPropagation;
|
||||
};
|
||||
78
node_modules/vega-transforms/src/Quantile.js
generated
vendored
Normal file
78
node_modules/vega-transforms/src/Quantile.js
generated
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
import {partition} from './util/util';
|
||||
import {Transform, ingest} from 'vega-dataflow';
|
||||
import {quantiles} from 'vega-statistics';
|
||||
import {accessorName, inherits} from 'vega-util';
|
||||
import {range} from 'd3-array';
|
||||
|
||||
/**
|
||||
* Generates sample quantile values from an input data stream.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {function(object): *} params.field - An accessor for the data field
|
||||
* over which to calculate quantile values.
|
||||
* @param {Array<function(object): *>} [params.groupby] - An array of accessors
|
||||
* to groupby.
|
||||
* @param {Array<number>} [params.probs] - An array of probabilities in
|
||||
* the range (0, 1) for which to compute quantile values. If not specified,
|
||||
* the *step* parameter will be used.
|
||||
* @param {Array<number>} [params.step=0.01] - A probability step size for
|
||||
* sampling quantile values. All values from one-half the step size up to
|
||||
* 1 (exclusive) will be sampled. This parameter is only used if the
|
||||
* *quantiles* parameter is not provided.
|
||||
*/
|
||||
export default function Quantile(params) {
|
||||
Transform.call(this, null, params);
|
||||
}
|
||||
|
||||
Quantile.Definition = {
|
||||
'type': 'Quantile',
|
||||
'metadata': {'generates': true, 'changes': true},
|
||||
'params': [
|
||||
{ 'name': 'groupby', 'type': 'field', 'array': true },
|
||||
{ 'name': 'field', 'type': 'field', 'required': true },
|
||||
{ 'name': 'probs', 'type': 'number', 'array': true },
|
||||
{ 'name': 'step', 'type': 'number', 'default': 0.01 },
|
||||
{ 'name': 'as', 'type': 'string', 'array': true, 'default': ['prob', 'value'] }
|
||||
]
|
||||
};
|
||||
|
||||
var prototype = inherits(Quantile, Transform);
|
||||
|
||||
var EPSILON = 1e-14;
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
var out = pulse.fork(pulse.NO_SOURCE | pulse.NO_FIELDS),
|
||||
as = _.as || ['prob', 'value'];
|
||||
|
||||
if (this.value && !_.modified() && !pulse.changed()) {
|
||||
out.source = this.value;
|
||||
return out;
|
||||
}
|
||||
|
||||
const source = pulse.materialize(pulse.SOURCE).source,
|
||||
groups = partition(source, _.groupby, _.field),
|
||||
names = (_.groupby || []).map(accessorName),
|
||||
values = [],
|
||||
step = _.step || 0.01,
|
||||
p = _.probs || range(step/2, 1 - EPSILON, step),
|
||||
n = p.length;
|
||||
|
||||
groups.forEach(g => {
|
||||
const q = quantiles(g, p);
|
||||
|
||||
for (let i=0; i<n; ++i) {
|
||||
const t = {};
|
||||
for (let i=0; i<names.length; ++i) {
|
||||
t[names[i]] = g.dims[i];
|
||||
}
|
||||
t[as[0]] = p[i];
|
||||
t[as[1]] = q[i];
|
||||
values.push(ingest(t));
|
||||
}
|
||||
});
|
||||
|
||||
if (this.value) out.rem = this.value;
|
||||
this.value = out.add = out.source = values;
|
||||
|
||||
return out;
|
||||
};
|
||||
58
node_modules/vega-transforms/src/Relay.js
generated
vendored
Normal file
58
node_modules/vega-transforms/src/Relay.js
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
import {Transform, derive, tupleid} from 'vega-dataflow';
|
||||
import {inherits} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Relays a data stream between data processing pipelines.
|
||||
* If the derive parameter is set, this transform will create derived
|
||||
* copies of observed tuples. This provides derived data streams in which
|
||||
* modifications to the tuples do not pollute an upstream data source.
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {number} [params.derive=false] - Boolean flag indicating if
|
||||
* the transform should make derived copies of incoming tuples.
|
||||
* @constructor
|
||||
*/
|
||||
export default function Relay(params) {
|
||||
Transform.call(this, null, params);
|
||||
}
|
||||
|
||||
var prototype = inherits(Relay, Transform);
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
var out, lut;
|
||||
|
||||
if (this.value) {
|
||||
lut = this.value;
|
||||
} else {
|
||||
out = pulse = pulse.addAll();
|
||||
lut = this.value = {};
|
||||
}
|
||||
|
||||
if (_.derive) {
|
||||
out = pulse.fork(pulse.NO_SOURCE);
|
||||
|
||||
pulse.visit(pulse.REM, t => {
|
||||
var id = tupleid(t);
|
||||
out.rem.push(lut[id]);
|
||||
lut[id] = null;
|
||||
});
|
||||
|
||||
pulse.visit(pulse.ADD, t => {
|
||||
var dt = derive(t);
|
||||
lut[tupleid(t)] = dt;
|
||||
out.add.push(dt);
|
||||
});
|
||||
|
||||
pulse.visit(pulse.MOD, t => {
|
||||
var dt = lut[tupleid(t)], k;
|
||||
for (k in t) {
|
||||
dt[k] = t[k];
|
||||
// down stream writes may overwrite re-derived tuples
|
||||
// conservatively mark all source fields as modified
|
||||
out.modifies(k);
|
||||
}
|
||||
out.mod.push(dt);
|
||||
});
|
||||
}
|
||||
|
||||
return out;
|
||||
};
|
||||
109
node_modules/vega-transforms/src/Sample.js
generated
vendored
Normal file
109
node_modules/vega-transforms/src/Sample.js
generated
vendored
Normal file
@@ -0,0 +1,109 @@
|
||||
import {Transform, tupleid} from 'vega-dataflow';
|
||||
import {random} from 'vega-statistics';
|
||||
import {inherits} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Samples tuples passing through this operator.
|
||||
* Uses reservoir sampling to maintain a representative sample.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {number} [params.size=1000] - The maximum number of samples.
|
||||
*/
|
||||
export default function Sample(params) {
|
||||
Transform.call(this, [], params);
|
||||
this.count = 0;
|
||||
}
|
||||
|
||||
Sample.Definition = {
|
||||
'type': 'Sample',
|
||||
'metadata': {},
|
||||
'params': [
|
||||
{ 'name': 'size', 'type': 'number', 'default': 1000 }
|
||||
]
|
||||
};
|
||||
|
||||
var prototype = inherits(Sample, Transform);
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
var out = pulse.fork(pulse.NO_SOURCE),
|
||||
mod = _.modified('size'),
|
||||
num = _.size,
|
||||
res = this.value,
|
||||
cnt = this.count,
|
||||
cap = 0,
|
||||
map = res.reduce(function(m, t) {
|
||||
m[tupleid(t)] = 1;
|
||||
return m;
|
||||
}, {});
|
||||
|
||||
// sample reservoir update function
|
||||
function update(t) {
|
||||
var p, idx;
|
||||
|
||||
if (res.length < num) {
|
||||
res.push(t);
|
||||
} else {
|
||||
idx = ~~((cnt + 1) * random());
|
||||
if (idx < res.length && idx >= cap) {
|
||||
p = res[idx];
|
||||
if (map[tupleid(p)]) out.rem.push(p); // eviction
|
||||
res[idx] = t;
|
||||
}
|
||||
}
|
||||
++cnt;
|
||||
}
|
||||
|
||||
if (pulse.rem.length) {
|
||||
// find all tuples that should be removed, add to output
|
||||
pulse.visit(pulse.REM, function(t) {
|
||||
var id = tupleid(t);
|
||||
if (map[id]) {
|
||||
map[id] = -1;
|
||||
out.rem.push(t);
|
||||
}
|
||||
--cnt;
|
||||
});
|
||||
|
||||
// filter removed tuples out of the sample reservoir
|
||||
res = res.filter(function(t) { return map[tupleid(t)] !== -1; });
|
||||
}
|
||||
|
||||
if ((pulse.rem.length || mod) && res.length < num && pulse.source) {
|
||||
// replenish sample if backing data source is available
|
||||
cap = cnt = res.length;
|
||||
pulse.visit(pulse.SOURCE, function(t) {
|
||||
// update, but skip previously sampled tuples
|
||||
if (!map[tupleid(t)]) update(t);
|
||||
});
|
||||
cap = -1;
|
||||
}
|
||||
|
||||
if (mod && res.length > num) {
|
||||
for (var i=0, n=res.length-num; i<n; ++i) {
|
||||
map[tupleid(res[i])] = -1;
|
||||
out.rem.push(res[i]);
|
||||
}
|
||||
res = res.slice(n);
|
||||
}
|
||||
|
||||
if (pulse.mod.length) {
|
||||
// propagate modified tuples in the sample reservoir
|
||||
pulse.visit(pulse.MOD, function(t) {
|
||||
if (map[tupleid(t)]) out.mod.push(t);
|
||||
});
|
||||
}
|
||||
|
||||
if (pulse.add.length) {
|
||||
// update sample reservoir
|
||||
pulse.visit(pulse.ADD, update);
|
||||
}
|
||||
|
||||
if (pulse.add.length || cap < 0) {
|
||||
// output newly added tuples
|
||||
out.add = res.filter(function(t) { return !map[tupleid(t)]; });
|
||||
}
|
||||
|
||||
this.count = cnt;
|
||||
this.value = out.source = res;
|
||||
return out;
|
||||
};
|
||||
47
node_modules/vega-transforms/src/Sequence.js
generated
vendored
Normal file
47
node_modules/vega-transforms/src/Sequence.js
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
import {Transform, ingest} from 'vega-dataflow';
|
||||
import {inherits} from 'vega-util';
|
||||
import {range} from 'd3-array';
|
||||
|
||||
/**
|
||||
* Generates data tuples for a specified sequence range of numbers.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {number} params.start - The first number in the sequence.
|
||||
* @param {number} params.stop - The last number (exclusive) in the sequence.
|
||||
* @param {number} [params.step=1] - The step size between numbers in the sequence.
|
||||
*/
|
||||
export default function Sequence(params) {
|
||||
Transform.call(this, null, params);
|
||||
}
|
||||
|
||||
Sequence.Definition = {
|
||||
'type': 'Sequence',
|
||||
'metadata': {'generates': true, 'changes': true},
|
||||
'params': [
|
||||
{ 'name': 'start', 'type': 'number', 'required': true },
|
||||
{ 'name': 'stop', 'type': 'number', 'required': true },
|
||||
{ 'name': 'step', 'type': 'number', 'default': 1 },
|
||||
{ 'name': 'as', 'type': 'string', 'default': 'data' }
|
||||
]
|
||||
};
|
||||
|
||||
var prototype = inherits(Sequence, Transform);
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
if (this.value && !_.modified()) return;
|
||||
|
||||
var out = pulse.materialize().fork(pulse.MOD),
|
||||
as = _.as || 'data';
|
||||
|
||||
out.rem = this.value ? pulse.rem.concat(this.value) : pulse.rem;
|
||||
|
||||
this.value = range(_.start, _.stop, _.step || 1).map(function(v) {
|
||||
var t = {};
|
||||
t[as] = v;
|
||||
return ingest(t);
|
||||
});
|
||||
|
||||
out.add = pulse.add.concat(this.value);
|
||||
|
||||
return out;
|
||||
};
|
||||
22
node_modules/vega-transforms/src/Sieve.js
generated
vendored
Normal file
22
node_modules/vega-transforms/src/Sieve.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
import {Transform} from 'vega-dataflow';
|
||||
import {inherits} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Propagates a new pulse without any tuples so long as the input
|
||||
* pulse contains some added, removed or modified tuples.
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @constructor
|
||||
*/
|
||||
export default function Sieve(params) {
|
||||
Transform.call(this, null, params);
|
||||
this.modified(true); // always treat as modified
|
||||
}
|
||||
|
||||
var prototype = inherits(Sieve, Transform);
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
this.value = pulse.source;
|
||||
return pulse.changed()
|
||||
? pulse.fork(pulse.NO_SOURCE | pulse.NO_FIELDS)
|
||||
: pulse.StopPropagation;
|
||||
};
|
||||
72
node_modules/vega-transforms/src/Subflow.js
generated
vendored
Normal file
72
node_modules/vega-transforms/src/Subflow.js
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
import {Operator} from 'vega-dataflow';
|
||||
import {inherits} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Provides a bridge between a parent transform and a target subflow that
|
||||
* consumes only a subset of the tuples that pass through the parent.
|
||||
* @constructor
|
||||
* @param {Pulse} pulse - A pulse to use as the value of this operator.
|
||||
* @param {Transform} parent - The parent transform (typically a Facet instance).
|
||||
*/
|
||||
export default function Subflow(pulse, parent) {
|
||||
Operator.call(this, pulse);
|
||||
this.parent = parent;
|
||||
this.count = 0;
|
||||
}
|
||||
|
||||
var prototype = inherits(Subflow, Operator);
|
||||
|
||||
/**
|
||||
* Routes pulses from this subflow to a target transform.
|
||||
* @param {Transform} target - A transform that receives the subflow of tuples.
|
||||
*/
|
||||
prototype.connect = function(target) {
|
||||
this.detachSubflow = target.detachSubflow;
|
||||
this.targets().add(target);
|
||||
return (target.source = this);
|
||||
};
|
||||
|
||||
/**
|
||||
* Add an 'add' tuple to the subflow pulse.
|
||||
* @param {Tuple} t - The tuple being added.
|
||||
*/
|
||||
prototype.add = function(t) {
|
||||
this.count += 1;
|
||||
this.value.add.push(t);
|
||||
};
|
||||
|
||||
/**
|
||||
* Add a 'rem' tuple to the subflow pulse.
|
||||
* @param {Tuple} t - The tuple being removed.
|
||||
*/
|
||||
prototype.rem = function(t) {
|
||||
this.count -= 1;
|
||||
this.value.rem.push(t);
|
||||
};
|
||||
|
||||
/**
|
||||
* Add a 'mod' tuple to the subflow pulse.
|
||||
* @param {Tuple} t - The tuple being modified.
|
||||
*/
|
||||
prototype.mod = function(t) {
|
||||
this.value.mod.push(t);
|
||||
};
|
||||
|
||||
/**
|
||||
* Re-initialize this operator's pulse value.
|
||||
* @param {Pulse} pulse - The pulse to copy from.
|
||||
* @see Pulse.init
|
||||
*/
|
||||
prototype.init = function(pulse) {
|
||||
this.value.init(pulse, pulse.NO_SOURCE);
|
||||
};
|
||||
|
||||
/**
|
||||
* Evaluate this operator. This method overrides the
|
||||
* default behavior to simply return the contained pulse value.
|
||||
* @return {Pulse}
|
||||
*/
|
||||
prototype.evaluate = function() {
|
||||
// assert: this.value.stamp === pulse.stamp
|
||||
return this.value;
|
||||
};
|
||||
100
node_modules/vega-transforms/src/TimeUnit.js
generated
vendored
Normal file
100
node_modules/vega-transforms/src/TimeUnit.js
generated
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
import {Transform} from 'vega-dataflow';
|
||||
import {
|
||||
TIME_UNITS, timeBin, timeFloor, timeInterval, timeUnits,
|
||||
utcFloor, utcInterval
|
||||
} from 'vega-time';
|
||||
import {accessorFields, extent, inherits, peek} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Discretize dates to specific time units.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {function(object): *} params.field - The data field containing date/time values.
|
||||
*/
|
||||
export default function TimeUnit(params) {
|
||||
Transform.call(this, null, params);
|
||||
}
|
||||
|
||||
const OUTPUT = ['unit0', 'unit1'];
|
||||
|
||||
TimeUnit.Definition = {
|
||||
'type': 'TimeUnit',
|
||||
'metadata': {'modifies': true},
|
||||
'params': [
|
||||
{ 'name': 'field', 'type': 'field', 'required': true },
|
||||
{ 'name': 'interval', 'type': 'boolean', 'default': true },
|
||||
{ 'name': 'units', 'type': 'enum', 'values': TIME_UNITS, 'array': true },
|
||||
{ 'name': 'step', 'type': 'number', 'default': 1 },
|
||||
{ 'name': 'maxbins', 'type': 'number', 'default': 40 },
|
||||
{ 'name': 'extent', 'type': 'date', 'array': true},
|
||||
{ 'name': 'timezone', 'type': 'enum', 'default': 'local', 'values': ['local', 'utc'] },
|
||||
{ 'name': 'as', 'type': 'string', 'array': true, 'length': 2, 'default': OUTPUT }
|
||||
]
|
||||
};
|
||||
|
||||
var prototype = inherits(TimeUnit, Transform);
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
var field = _.field,
|
||||
band = _.interval !== false,
|
||||
utc = _.timezone === 'utc',
|
||||
floor = this._floor(_, pulse),
|
||||
offset = (utc ? utcInterval : timeInterval)(floor.unit).offset,
|
||||
as = _.as || OUTPUT,
|
||||
u0 = as[0],
|
||||
u1 = as[1],
|
||||
min = floor.start || Infinity,
|
||||
max = floor.stop || -Infinity,
|
||||
step = floor.step,
|
||||
flag = pulse.ADD;
|
||||
|
||||
if (_.modified() || pulse.modified(accessorFields(field))) {
|
||||
pulse = pulse.reflow(true);
|
||||
flag = pulse.SOURCE;
|
||||
min = Infinity;
|
||||
max = -Infinity;
|
||||
}
|
||||
|
||||
pulse.visit(flag, function(t) {
|
||||
var v = field(t), a, b;
|
||||
if (v == null) {
|
||||
t[u0] = null;
|
||||
if (band) t[u1] = null;
|
||||
} else {
|
||||
t[u0] = a = b = floor(v);
|
||||
if (band) t[u1] = b = offset(a, step);
|
||||
if (a < min) min = a;
|
||||
if (b > max) max = b;
|
||||
}
|
||||
});
|
||||
|
||||
floor.start = min;
|
||||
floor.stop = max;
|
||||
|
||||
return pulse.modifies(band ? as : u0);
|
||||
};
|
||||
|
||||
prototype._floor = function(_, pulse) {
|
||||
const utc = _.timezone === 'utc';
|
||||
|
||||
// get parameters
|
||||
let {units, step} = _.units
|
||||
? {units: _.units, step: _.step || 1}
|
||||
: timeBin({
|
||||
extent: _.extent || extent(pulse.materialize(pulse.SOURCE).source, _.field),
|
||||
maxbins: _.maxbins
|
||||
});
|
||||
|
||||
// check / standardize time units
|
||||
units = timeUnits(units);
|
||||
|
||||
const prev = this.value || {},
|
||||
floor = (utc ? utcFloor : timeFloor)(units, step);
|
||||
|
||||
floor.unit = peek(units);
|
||||
floor.units = units;
|
||||
floor.step = step;
|
||||
floor.start = prev.start;
|
||||
floor.stop = prev.stop;
|
||||
return this.value = floor;
|
||||
};
|
||||
38
node_modules/vega-transforms/src/TupleIndex.js
generated
vendored
Normal file
38
node_modules/vega-transforms/src/TupleIndex.js
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
import {Transform} from 'vega-dataflow';
|
||||
import {fastmap, inherits} from 'vega-util';
|
||||
|
||||
/**
|
||||
* An index that maps from unique, string-coerced, field values to tuples.
|
||||
* Assumes that the field serves as a unique key with no duplicate values.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {function(object): *} params.field - The field accessor to index.
|
||||
*/
|
||||
export default function TupleIndex(params) {
|
||||
Transform.call(this, fastmap(), params);
|
||||
}
|
||||
|
||||
var prototype = inherits(TupleIndex, Transform);
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
var df = pulse.dataflow,
|
||||
field = _.field,
|
||||
index = this.value,
|
||||
mod = true;
|
||||
|
||||
function set(t) { index.set(field(t), t); }
|
||||
|
||||
if (_.modified('field') || pulse.modified(field.fields)) {
|
||||
index.clear();
|
||||
pulse.visit(pulse.SOURCE, set);
|
||||
} else if (pulse.changed()) {
|
||||
pulse.visit(pulse.REM, function(t) { index.delete(field(t)); });
|
||||
pulse.visit(pulse.ADD, set);
|
||||
} else {
|
||||
mod = false;
|
||||
}
|
||||
|
||||
this.modified(mod);
|
||||
if (index.empty > df.cleanThreshold) df.runAfter(index.clean);
|
||||
return pulse.fork();
|
||||
};
|
||||
32
node_modules/vega-transforms/src/Values.js
generated
vendored
Normal file
32
node_modules/vega-transforms/src/Values.js
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
import {Transform, stableCompare} from 'vega-dataflow';
|
||||
import {inherits} from 'vega-util';
|
||||
|
||||
/**
|
||||
* Extracts an array of values. Assumes the source data has already been
|
||||
* reduced as needed (e.g., by an upstream Aggregate transform).
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {function(object): *} params.field - The domain field to extract.
|
||||
* @param {function(*,*): number} [params.sort] - An optional
|
||||
* comparator function for sorting the values. The comparator will be
|
||||
* applied to backing tuples prior to value extraction.
|
||||
*/
|
||||
export default function Values(params) {
|
||||
Transform.call(this, null, params);
|
||||
}
|
||||
|
||||
var prototype = inherits(Values, Transform);
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
var run = !this.value
|
||||
|| _.modified('field')
|
||||
|| _.modified('sort')
|
||||
|| pulse.changed()
|
||||
|| (_.sort && pulse.modified(_.sort.fields));
|
||||
|
||||
if (run) {
|
||||
this.value = (_.sort
|
||||
? pulse.source.slice().sort(stableCompare(_.sort))
|
||||
: pulse.source).map(_.field);
|
||||
}
|
||||
};
|
||||
142
node_modules/vega-transforms/src/Window.js
generated
vendored
Normal file
142
node_modules/vega-transforms/src/Window.js
generated
vendored
Normal file
@@ -0,0 +1,142 @@
|
||||
import {groupkey} from './util/AggregateKeys';
|
||||
import {ValidAggregateOps} from './util/AggregateOps';
|
||||
import SortedList from './util/SortedList';
|
||||
import {ValidWindowOps} from './util/WindowOps';
|
||||
import WindowState from './util/WindowState';
|
||||
import {Transform, stableCompare, tupleid} from 'vega-dataflow';
|
||||
import {constant, inherits} from 'vega-util';
|
||||
import {bisector} from 'd3-array';
|
||||
|
||||
/**
|
||||
* Perform window calculations and write results to the input stream.
|
||||
* @constructor
|
||||
* @param {object} params - The parameters for this operator.
|
||||
* @param {function(*,*): number} [params.sort] - A comparator function for sorting tuples within a window.
|
||||
* @param {Array<function(object): *>} [params.groupby] - An array of accessors by which to partition tuples into separate windows.
|
||||
* @param {Array<string>} params.ops - An array of strings indicating window operations to perform.
|
||||
* @param {Array<function(object): *>} [params.fields] - An array of accessors
|
||||
* for data fields to use as inputs to window operations.
|
||||
* @param {Array<*>} [params.params] - An array of parameter values for window operations.
|
||||
* @param {Array<string>} [params.as] - An array of output field names for window operations.
|
||||
* @param {Array<number>} [params.frame] - Window frame definition as two-element array.
|
||||
* @param {boolean} [params.ignorePeers=false] - If true, base window frame boundaries on row
|
||||
* number alone, ignoring peers with identical sort values. If false (default),
|
||||
* the window boundaries will be adjusted to include peer values.
|
||||
*/
|
||||
export default function Window(params) {
|
||||
Transform.call(this, {}, params);
|
||||
this._mlen = 0;
|
||||
this._mods = [];
|
||||
}
|
||||
|
||||
Window.Definition = {
|
||||
'type': 'Window',
|
||||
'metadata': {'modifies': true},
|
||||
'params': [
|
||||
{ 'name': 'sort', 'type': 'compare' },
|
||||
{ 'name': 'groupby', 'type': 'field', 'array': true },
|
||||
{ 'name': 'ops', 'type': 'enum', 'array': true, 'values': ValidWindowOps.concat(ValidAggregateOps) },
|
||||
{ 'name': 'params', 'type': 'number', 'null': true, 'array': true },
|
||||
{ 'name': 'fields', 'type': 'field', 'null': true, 'array': true },
|
||||
{ 'name': 'as', 'type': 'string', 'null': true, 'array': true },
|
||||
{ 'name': 'frame', 'type': 'number', 'null': true, 'array': true, 'length': 2, 'default': [null, 0] },
|
||||
{ 'name': 'ignorePeers', 'type': 'boolean', 'default': false }
|
||||
]
|
||||
};
|
||||
|
||||
var prototype = inherits(Window, Transform);
|
||||
|
||||
prototype.transform = function(_, pulse) {
|
||||
var self = this,
|
||||
state = self.state,
|
||||
mod = _.modified(),
|
||||
cmp = stableCompare(_.sort),
|
||||
i, n;
|
||||
|
||||
this.stamp = pulse.stamp;
|
||||
|
||||
// initialize window state
|
||||
if (!state || mod) {
|
||||
state = self.state = new WindowState(_);
|
||||
}
|
||||
|
||||
// retrieve group for a tuple
|
||||
var key = groupkey(_.groupby);
|
||||
function group(t) { return self.group(key(t)); }
|
||||
|
||||
// partition input tuples
|
||||
if (mod || pulse.modified(state.inputs)) {
|
||||
self.value = {};
|
||||
pulse.visit(pulse.SOURCE, function(t) { group(t).add(t); });
|
||||
} else {
|
||||
pulse.visit(pulse.REM, function(t) { group(t).remove(t); });
|
||||
pulse.visit(pulse.ADD, function(t) { group(t).add(t); });
|
||||
}
|
||||
|
||||
// perform window calculations for each modified partition
|
||||
for (i=0, n=self._mlen; i<n; ++i) {
|
||||
processPartition(self._mods[i], state, cmp, _);
|
||||
}
|
||||
self._mlen = 0;
|
||||
self._mods = [];
|
||||
|
||||
// TODO don't reflow everything?
|
||||
return pulse.reflow(mod).modifies(state.outputs);
|
||||
};
|
||||
|
||||
prototype.group = function(key) {
|
||||
var self = this,
|
||||
group = self.value[key];
|
||||
|
||||
if (!group) {
|
||||
group = self.value[key] = SortedList(tupleid);
|
||||
group.stamp = -1;
|
||||
}
|
||||
|
||||
if (group.stamp < self.stamp) {
|
||||
group.stamp = self.stamp;
|
||||
self._mods[self._mlen++] = group;
|
||||
}
|
||||
|
||||
return group;
|
||||
};
|
||||
|
||||
function processPartition(list, state, cmp, _) {
|
||||
var sort = _.sort,
|
||||
range = sort && !_.ignorePeers,
|
||||
frame = _.frame || [null, 0],
|
||||
data = list.data(cmp), // use cmp for stable sort
|
||||
n = data.length,
|
||||
i = 0,
|
||||
b = range ? bisector(sort) : null,
|
||||
w = {
|
||||
i0: 0, i1: 0, p0: 0, p1: 0, index: 0,
|
||||
data: data, compare: sort || constant(-1)
|
||||
};
|
||||
|
||||
for (state.init(); i<n; ++i) {
|
||||
setWindow(w, frame, i, n);
|
||||
if (range) adjustRange(w, b);
|
||||
state.update(w, data[i]);
|
||||
}
|
||||
}
|
||||
|
||||
function setWindow(w, f, i, n) {
|
||||
w.p0 = w.i0;
|
||||
w.p1 = w.i1;
|
||||
w.i0 = f[0] == null ? 0 : Math.max(0, i - Math.abs(f[0]));
|
||||
w.i1 = f[1] == null ? n : Math.min(n, i + Math.abs(f[1]) + 1);
|
||||
w.index = i;
|
||||
}
|
||||
|
||||
// if frame type is 'range', adjust window for peer values
|
||||
function adjustRange(w, bisect) {
|
||||
var r0 = w.i0,
|
||||
r1 = w.i1 - 1,
|
||||
c = w.compare,
|
||||
d = w.data,
|
||||
n = d.length - 1;
|
||||
|
||||
if (r0 > 0 && !c(d[r0], d[r0-1])) w.i0 = bisect.left(d, d[r0]);
|
||||
if (r1 < n && !c(d[r1], d[r1+1])) w.i1 = bisect.right(d, d[r1]);
|
||||
}
|
||||
19
node_modules/vega-transforms/src/util/AggregateKeys.js
generated
vendored
Normal file
19
node_modules/vega-transforms/src/util/AggregateKeys.js
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
export function multikey(f) {
|
||||
return function(x) {
|
||||
var n = f.length,
|
||||
i = 1,
|
||||
k = String(f[0](x));
|
||||
|
||||
for (; i<n; ++i) {
|
||||
k += '|' + f[i](x);
|
||||
}
|
||||
|
||||
return k;
|
||||
};
|
||||
}
|
||||
|
||||
export function groupkey(fields) {
|
||||
return !fields || !fields.length ? function() { return ''; }
|
||||
: fields.length === 1 ? fields[0]
|
||||
: multikey(fields);
|
||||
}
|
||||
214
node_modules/vega-transforms/src/util/AggregateOps.js
generated
vendored
Normal file
214
node_modules/vega-transforms/src/util/AggregateOps.js
generated
vendored
Normal file
@@ -0,0 +1,214 @@
|
||||
import {extend, identity} from 'vega-util';
|
||||
|
||||
export function measureName(op, field, as) {
|
||||
return as || (op + (!field ? '' : '_' + field));
|
||||
}
|
||||
|
||||
const noop = () => {};
|
||||
|
||||
const base_op = {
|
||||
init: noop,
|
||||
add: noop,
|
||||
rem: noop,
|
||||
idx: 0
|
||||
};
|
||||
|
||||
export const AggregateOps = {
|
||||
values: {
|
||||
init: m => m.cell.store = true,
|
||||
value: m => m.cell.data.values(),
|
||||
idx: -1
|
||||
},
|
||||
count: {
|
||||
value: m => m.cell.num
|
||||
},
|
||||
__count__: {
|
||||
value: m => m.missing + m.valid
|
||||
},
|
||||
missing: {
|
||||
value: m => m.missing
|
||||
},
|
||||
valid: {
|
||||
value: m => m.valid
|
||||
},
|
||||
sum: {
|
||||
init: m => m.sum = 0,
|
||||
value: m => m.sum,
|
||||
add: (m, v) => m.sum += +v,
|
||||
rem: (m, v) => m.sum -= v
|
||||
},
|
||||
product: {
|
||||
init: m => m.product = 1,
|
||||
value: m => m.valid ? m.product : undefined,
|
||||
add: (m, v) => m.product *= v,
|
||||
rem: (m, v) => m.product /= v
|
||||
},
|
||||
mean: {
|
||||
init: m => m.mean = 0,
|
||||
value: m => m.valid ? m.mean : undefined,
|
||||
add: (m, v) => (m.mean_d = v - m.mean, m.mean += m.mean_d / m.valid),
|
||||
rem: (m, v) => (m.mean_d = v - m.mean, m.mean -= m.valid ? m.mean_d / m.valid : m.mean)
|
||||
},
|
||||
average: {
|
||||
value: m => m.valid ? m.mean : undefined,
|
||||
req: ['mean'], idx: 1
|
||||
},
|
||||
variance: {
|
||||
init: m => m.dev = 0,
|
||||
value: m => m.valid > 1 ? m.dev / (m.valid - 1) : undefined,
|
||||
add: (m, v) => m.dev += m.mean_d * (v - m.mean),
|
||||
rem: (m, v) => m.dev -= m.mean_d * (v - m.mean),
|
||||
req: ['mean'], idx: 1
|
||||
},
|
||||
variancep: {
|
||||
value: m => m.valid > 1 ? m.dev / m.valid : undefined,
|
||||
req: ['variance'], idx: 2
|
||||
},
|
||||
stdev: {
|
||||
value: m => m.valid > 1 ? Math.sqrt(m.dev / (m.valid - 1)) : undefined,
|
||||
req: ['variance'], idx: 2
|
||||
},
|
||||
stdevp: {
|
||||
value: m => m.valid > 1 ? Math.sqrt(m.dev / m.valid) : undefined,
|
||||
req: ['variance'], idx: 2
|
||||
},
|
||||
stderr: {
|
||||
value: m => m.valid > 1 ? Math.sqrt(m.dev / (m.valid * (m.valid - 1))) : undefined,
|
||||
req: ['variance'], idx: 2
|
||||
},
|
||||
distinct: {
|
||||
value: m => m.cell.data.distinct(m.get),
|
||||
req: ['values'], idx: 3
|
||||
},
|
||||
ci0: {
|
||||
value: m => m.cell.data.ci0(m.get),
|
||||
req: ['values'], idx: 3
|
||||
},
|
||||
ci1: {
|
||||
value: m => m.cell.data.ci1(m.get),
|
||||
req: ['values'], idx: 3
|
||||
},
|
||||
median: {
|
||||
value: m => m.cell.data.q2(m.get),
|
||||
req: ['values'], idx: 3
|
||||
},
|
||||
q1: {
|
||||
value: m => m.cell.data.q1(m.get),
|
||||
req: ['values'], idx: 3
|
||||
},
|
||||
q3: {
|
||||
value: m => m.cell.data.q3(m.get),
|
||||
req: ['values'], idx: 3
|
||||
},
|
||||
min: {
|
||||
init: m => m.min = undefined,
|
||||
value: m => m.min = (Number.isNaN(m.min) ? m.cell.data.min(m.get) : m.min),
|
||||
add: (m, v) => { if (v < m.min || m.min === undefined) m.min = v; },
|
||||
rem: (m, v) => { if (v <= m.min) m.min = NaN; },
|
||||
req: ['values'], idx: 4
|
||||
},
|
||||
max: {
|
||||
init: m => m.max = undefined,
|
||||
value: m => m.max = (Number.isNaN(m.max) ? m.cell.data.max(m.get) : m.max),
|
||||
add: (m, v) => { if (v > m.max || m.max === undefined) m.max = v; },
|
||||
rem: (m, v) => { if (v >= m.max) m.max = NaN; },
|
||||
req: ['values'], idx: 4
|
||||
},
|
||||
argmin: {
|
||||
init: m => m.argmin = undefined,
|
||||
value: m => m.argmin || m.cell.data.argmin(m.get),
|
||||
add: (m, v, t) => { if (v < m.min) m.argmin = t; },
|
||||
rem: (m, v) => { if (v <= m.min) m.argmin = undefined; },
|
||||
req: ['min', 'values'], idx: 3
|
||||
},
|
||||
argmax: {
|
||||
init: m => m.argmax = undefined,
|
||||
value: m => m.argmax || m.cell.data.argmax(m.get),
|
||||
add: (m, v, t) => { if (v > m.max) m.argmax = t; },
|
||||
rem: (m, v) => { if (v >= m.max) m.argmax = undefined; },
|
||||
req: ['max', 'values'], idx: 3
|
||||
}
|
||||
};
|
||||
|
||||
export const ValidAggregateOps = Object.keys(AggregateOps);
|
||||
|
||||
function measure(key, value) {
|
||||
return out => extend({
|
||||
name: key,
|
||||
out: out || key
|
||||
}, base_op, value);
|
||||
}
|
||||
|
||||
ValidAggregateOps.forEach(key => {
|
||||
AggregateOps[key] = measure(key, AggregateOps[key]);
|
||||
});
|
||||
|
||||
export function createMeasure(op, name) {
|
||||
return AggregateOps[op](name);
|
||||
}
|
||||
|
||||
function compareIndex(a, b) {
|
||||
return a.idx - b.idx;
|
||||
}
|
||||
|
||||
function resolve(agg) {
|
||||
const map = {};
|
||||
agg.forEach(a => map[a.name] = a);
|
||||
|
||||
const getreqs = a => {
|
||||
if (!a.req) return;
|
||||
a.req.forEach(key => {
|
||||
if (!map[key]) getreqs(map[key] = AggregateOps[key]());
|
||||
});
|
||||
};
|
||||
agg.forEach(getreqs);
|
||||
|
||||
return Object.values(map).sort(compareIndex);
|
||||
}
|
||||
|
||||
function init() {
|
||||
this.valid = 0;
|
||||
this.missing = 0;
|
||||
this._ops.forEach(op => op.init(this));
|
||||
}
|
||||
|
||||
function add(v, t) {
|
||||
if (v == null || v === '') { ++this.missing; return; }
|
||||
if (v !== v) return;
|
||||
++this.valid;
|
||||
this._ops.forEach(op => op.add(this, v, t));
|
||||
}
|
||||
|
||||
function rem(v, t) {
|
||||
if (v == null || v === '') { --this.missing; return; }
|
||||
if (v !== v) return;
|
||||
--this.valid;
|
||||
this._ops.forEach(op => op.rem(this, v, t));
|
||||
}
|
||||
|
||||
function set(t) {
|
||||
this._out.forEach(op => t[op.out] = op.value(this));
|
||||
return t;
|
||||
}
|
||||
|
||||
export function compileMeasures(agg, field) {
|
||||
var get = field || identity,
|
||||
ops = resolve(agg),
|
||||
out = agg.slice().sort(compareIndex);
|
||||
|
||||
function ctr(cell) {
|
||||
this._ops = ops;
|
||||
this._out = out;
|
||||
this.cell = cell;
|
||||
this.init();
|
||||
}
|
||||
|
||||
ctr.prototype.init = init;
|
||||
ctr.prototype.add = add;
|
||||
ctr.prototype.rem = rem;
|
||||
ctr.prototype.set = set;
|
||||
ctr.prototype.get = get;
|
||||
ctr.fields = agg.map(op => op.out);
|
||||
|
||||
return ctr;
|
||||
}
|
||||
60
node_modules/vega-transforms/src/util/Distributions.js
generated
vendored
Normal file
60
node_modules/vega-transforms/src/util/Distributions.js
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
import {
|
||||
randomKDE,
|
||||
randomLogNormal,
|
||||
randomMixture,
|
||||
randomNormal,
|
||||
randomUniform
|
||||
} from 'vega-statistics';
|
||||
|
||||
import {error, hasOwnProperty} from 'vega-util';
|
||||
|
||||
var Distributions = {
|
||||
kde: randomKDE,
|
||||
mixture: randomMixture,
|
||||
normal: randomNormal,
|
||||
lognormal: randomLogNormal,
|
||||
uniform: randomUniform
|
||||
};
|
||||
|
||||
var DISTRIBUTIONS = 'distributions',
|
||||
FUNCTION = 'function',
|
||||
FIELD = 'field';
|
||||
|
||||
/**
|
||||
* Parse a parameter object for a probability distribution.
|
||||
* @param {object} def - The distribution parameter object.
|
||||
* @param {function():Array<object>} - A method for requesting
|
||||
* source data. Used for distributions (such as KDE) that
|
||||
* require sample data points. This method will only be
|
||||
* invoked if the 'from' parameter for a target data source
|
||||
* is not provided. Typically this method returns backing
|
||||
* source data for a Pulse object.
|
||||
* @return {object} - The output distribution object.
|
||||
*/
|
||||
export default function parse(def, data) {
|
||||
var func = def[FUNCTION];
|
||||
if (!hasOwnProperty(Distributions, func)) {
|
||||
error('Unknown distribution function: ' + func);
|
||||
}
|
||||
|
||||
var d = Distributions[func]();
|
||||
|
||||
for (var name in def) {
|
||||
// if data field, extract values
|
||||
if (name === FIELD) {
|
||||
d.data((def.from || data()).map(def[name]));
|
||||
}
|
||||
|
||||
// if distribution mixture, recurse to parse each definition
|
||||
else if (name === DISTRIBUTIONS) {
|
||||
d[name](def[name].map(function(_) { return parse(_, data); }));
|
||||
}
|
||||
|
||||
// otherwise, simply set the parameter
|
||||
else if (typeof d[name] === FUNCTION) {
|
||||
d[name](def[name]);
|
||||
}
|
||||
}
|
||||
|
||||
return d;
|
||||
}
|
||||
32
node_modules/vega-transforms/src/util/SortedList.js
generated
vendored
Normal file
32
node_modules/vega-transforms/src/util/SortedList.js
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
import {merge} from 'vega-util';
|
||||
|
||||
export default function(idFunc, source, input) {
|
||||
var $ = idFunc,
|
||||
data = source || [],
|
||||
add = input || [],
|
||||
rem = {},
|
||||
cnt = 0;
|
||||
|
||||
return {
|
||||
add: function(t) { add.push(t); },
|
||||
remove: function(t) { rem[$(t)] = ++cnt; },
|
||||
size: function() { return data.length; },
|
||||
data: function(compare, resort) {
|
||||
if (cnt) {
|
||||
data = data.filter(function(t) { return !rem[$(t)]; });
|
||||
rem = {};
|
||||
cnt = 0;
|
||||
}
|
||||
if (resort && compare) {
|
||||
data.sort(compare);
|
||||
}
|
||||
if (add.length) {
|
||||
data = compare
|
||||
? merge(compare, data, add.sort(compare))
|
||||
: data.concat(add);
|
||||
add = [];
|
||||
}
|
||||
return data;
|
||||
}
|
||||
};
|
||||
}
|
||||
137
node_modules/vega-transforms/src/util/TupleStore.js
generated
vendored
Normal file
137
node_modules/vega-transforms/src/util/TupleStore.js
generated
vendored
Normal file
@@ -0,0 +1,137 @@
|
||||
import {tupleid} from 'vega-dataflow';
|
||||
import {bootstrapCI, quartiles} from 'vega-statistics';
|
||||
import {extentIndex, field, hasOwnProperty} from 'vega-util';
|
||||
|
||||
export default function TupleStore(key) {
|
||||
this._key = key ? field(key) : tupleid;
|
||||
this.reset();
|
||||
}
|
||||
|
||||
var prototype = TupleStore.prototype;
|
||||
|
||||
prototype.reset = function() {
|
||||
this._add = [];
|
||||
this._rem = [];
|
||||
this._ext = null;
|
||||
this._get = null;
|
||||
this._q = null;
|
||||
};
|
||||
|
||||
prototype.add = function(v) {
|
||||
this._add.push(v);
|
||||
};
|
||||
|
||||
prototype.rem = function(v) {
|
||||
this._rem.push(v);
|
||||
};
|
||||
|
||||
prototype.values = function() {
|
||||
this._get = null;
|
||||
if (this._rem.length === 0) return this._add;
|
||||
|
||||
var a = this._add,
|
||||
r = this._rem,
|
||||
k = this._key,
|
||||
n = a.length,
|
||||
m = r.length,
|
||||
x = Array(n - m),
|
||||
map = {}, i, j, v;
|
||||
|
||||
// use unique key field to clear removed values
|
||||
for (i=0; i<m; ++i) {
|
||||
map[k(r[i])] = 1;
|
||||
}
|
||||
for (i=0, j=0; i<n; ++i) {
|
||||
if (map[k(v = a[i])]) {
|
||||
map[k(v)] = 0;
|
||||
} else {
|
||||
x[j++] = v;
|
||||
}
|
||||
}
|
||||
|
||||
this._rem = [];
|
||||
return (this._add = x);
|
||||
};
|
||||
|
||||
// memoizing statistics methods
|
||||
|
||||
prototype.distinct = function(get) {
|
||||
var v = this.values(),
|
||||
n = v.length,
|
||||
map = {},
|
||||
count = 0, s;
|
||||
|
||||
while (--n >= 0) {
|
||||
s = get(v[n]) + '';
|
||||
if (!hasOwnProperty(map, s)) {
|
||||
map[s] = 1;
|
||||
++count;
|
||||
}
|
||||
}
|
||||
|
||||
return count;
|
||||
};
|
||||
|
||||
prototype.extent = function(get) {
|
||||
if (this._get !== get || !this._ext) {
|
||||
var v = this.values(),
|
||||
i = extentIndex(v, get);
|
||||
this._ext = [v[i[0]], v[i[1]]];
|
||||
this._get = get;
|
||||
}
|
||||
return this._ext;
|
||||
};
|
||||
|
||||
prototype.argmin = function(get) {
|
||||
return this.extent(get)[0] || {};
|
||||
};
|
||||
|
||||
prototype.argmax = function(get) {
|
||||
return this.extent(get)[1] || {};
|
||||
};
|
||||
|
||||
prototype.min = function(get) {
|
||||
var m = this.extent(get)[0];
|
||||
return m != null ? get(m) : undefined;
|
||||
};
|
||||
|
||||
prototype.max = function(get) {
|
||||
var m = this.extent(get)[1];
|
||||
return m != null ? get(m) : undefined;
|
||||
};
|
||||
|
||||
prototype.quartile = function(get) {
|
||||
if (this._get !== get || !this._q) {
|
||||
this._q = quartiles(this.values(), get);
|
||||
this._get = get;
|
||||
}
|
||||
return this._q;
|
||||
};
|
||||
|
||||
prototype.q1 = function(get) {
|
||||
return this.quartile(get)[0];
|
||||
};
|
||||
|
||||
prototype.q2 = function(get) {
|
||||
return this.quartile(get)[1];
|
||||
};
|
||||
|
||||
prototype.q3 = function(get) {
|
||||
return this.quartile(get)[2];
|
||||
};
|
||||
|
||||
prototype.ci = function(get) {
|
||||
if (this._get !== get || !this._ci) {
|
||||
this._ci = bootstrapCI(this.values(), 1000, 0.05, get);
|
||||
this._get = get;
|
||||
}
|
||||
return this._ci;
|
||||
};
|
||||
|
||||
prototype.ci0 = function(get) {
|
||||
return this.ci(get)[0];
|
||||
};
|
||||
|
||||
prototype.ci1 = function(get) {
|
||||
return this.ci(get)[1];
|
||||
};
|
||||
148
node_modules/vega-transforms/src/util/WindowOps.js
generated
vendored
Normal file
148
node_modules/vega-transforms/src/util/WindowOps.js
generated
vendored
Normal file
@@ -0,0 +1,148 @@
|
||||
import {error, zero} from 'vega-util';
|
||||
|
||||
export function WindowOp(op, field, param, as) {
|
||||
let fn = WindowOps[op](field, param);
|
||||
return {
|
||||
init: fn.init || zero,
|
||||
update: function(w, t) { t[as] = fn.next(w); }
|
||||
};
|
||||
}
|
||||
|
||||
export const WindowOps = {
|
||||
row_number: function() {
|
||||
return {
|
||||
next: w => w.index + 1
|
||||
};
|
||||
},
|
||||
rank: function() {
|
||||
let rank;
|
||||
return {
|
||||
init: () => rank = 1,
|
||||
next: w => {
|
||||
let i = w.index,
|
||||
data = w.data;
|
||||
return (i && w.compare(data[i - 1], data[i])) ? (rank = i + 1) : rank;
|
||||
}
|
||||
};
|
||||
},
|
||||
dense_rank: function() {
|
||||
let drank;
|
||||
return {
|
||||
init: () => drank = 1,
|
||||
next: w => {
|
||||
let i = w.index,
|
||||
d = w.data;
|
||||
return (i && w.compare(d[i - 1], d[i])) ? ++drank : drank;
|
||||
}
|
||||
};
|
||||
},
|
||||
percent_rank: function() {
|
||||
let rank = WindowOps.rank(),
|
||||
next = rank.next;
|
||||
return {
|
||||
init: rank.init,
|
||||
next: w => (next(w) - 1) / (w.data.length - 1)
|
||||
};
|
||||
},
|
||||
cume_dist: function() {
|
||||
let cume;
|
||||
return {
|
||||
init: () => cume = 0,
|
||||
next: w => {
|
||||
let i = w.index,
|
||||
d = w.data,
|
||||
c = w.compare;
|
||||
if (cume < i) {
|
||||
while (i + 1 < d.length && !c(d[i], d[i + 1])) ++i;
|
||||
cume = i;
|
||||
}
|
||||
return (1 + cume) / d.length;
|
||||
}
|
||||
};
|
||||
},
|
||||
ntile: function(field, num) {
|
||||
num = +num;
|
||||
if (!(num > 0)) error('ntile num must be greater than zero.');
|
||||
let cume = WindowOps.cume_dist(),
|
||||
next = cume.next;
|
||||
return {
|
||||
init: cume.init,
|
||||
next: w => Math.ceil(num * next(w))
|
||||
};
|
||||
},
|
||||
|
||||
lag: function(field, offset) {
|
||||
offset = +offset || 1;
|
||||
return {
|
||||
next: w => {
|
||||
let i = w.index - offset;
|
||||
return i >= 0 ? field(w.data[i]) : null;
|
||||
}
|
||||
};
|
||||
},
|
||||
lead: function(field, offset) {
|
||||
offset = +offset || 1;
|
||||
return {
|
||||
next: w => {
|
||||
let i = w.index + offset,
|
||||
d = w.data;
|
||||
return i < d.length ? field(d[i]) : null;
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
first_value: function(field) {
|
||||
return {
|
||||
next: w => field(w.data[w.i0])
|
||||
};
|
||||
},
|
||||
last_value: function(field) {
|
||||
return {
|
||||
next: w => field(w.data[w.i1 - 1])
|
||||
};
|
||||
},
|
||||
nth_value: function(field, nth) {
|
||||
nth = +nth;
|
||||
if (!(nth > 0)) error('nth_value nth must be greater than zero.');
|
||||
return {
|
||||
next: w => {
|
||||
let i = w.i0 + (nth - 1);
|
||||
return i < w.i1 ? field(w.data[i]) : null;
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
prev_value: function(field) {
|
||||
let prev;
|
||||
return {
|
||||
init: () => prev = null,
|
||||
next: w => {
|
||||
let v = field(w.data[w.index]);
|
||||
return v != null ? (prev = v) : prev;
|
||||
}
|
||||
};
|
||||
},
|
||||
next_value: function(field) {
|
||||
let v, i;
|
||||
return {
|
||||
init: () => (v = null, i = -1),
|
||||
next: w => {
|
||||
let d = w.data;
|
||||
return w.index <= i ? v
|
||||
: (i = find(field, d, w.index)) < 0
|
||||
? (i = d.length, v = null)
|
||||
: (v = field(d[i]));
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
function find(field, data, index) {
|
||||
for (let n = data.length; index < n; ++index) {
|
||||
let v = field(data[index]);
|
||||
if (v != null) return index;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
export var ValidWindowOps = Object.keys(WindowOps);
|
||||
146
node_modules/vega-transforms/src/util/WindowState.js
generated
vendored
Normal file
146
node_modules/vega-transforms/src/util/WindowState.js
generated
vendored
Normal file
@@ -0,0 +1,146 @@
|
||||
import {compileMeasures, createMeasure, measureName} from './AggregateOps';
|
||||
import TupleStore from './TupleStore';
|
||||
import {WindowOp, WindowOps} from './WindowOps';
|
||||
import {accessorFields, accessorName, array, error, hasOwnProperty} from 'vega-util';
|
||||
|
||||
export default function WindowState(_) {
|
||||
let self = this,
|
||||
ops = array(_.ops),
|
||||
fields = array(_.fields),
|
||||
params = array(_.params),
|
||||
as = array(_.as),
|
||||
outputs = self.outputs = [],
|
||||
windows = self.windows = [],
|
||||
inputs = {},
|
||||
map = {},
|
||||
countOnly = true,
|
||||
counts = [],
|
||||
measures = [];
|
||||
|
||||
function visitInputs(f) {
|
||||
array(accessorFields(f)).forEach(_ => inputs[_] = 1);
|
||||
}
|
||||
visitInputs(_.sort);
|
||||
|
||||
ops.forEach(function(op, i) {
|
||||
let field = fields[i],
|
||||
mname = accessorName(field),
|
||||
name = measureName(op, mname, as[i]);
|
||||
|
||||
visitInputs(field);
|
||||
outputs.push(name);
|
||||
|
||||
// Window operation
|
||||
if (hasOwnProperty(WindowOps, op)) {
|
||||
windows.push(WindowOp(op, fields[i], params[i], name));
|
||||
}
|
||||
|
||||
// Aggregate operation
|
||||
else {
|
||||
if (field == null && op !== 'count') {
|
||||
error('Null aggregate field specified.');
|
||||
}
|
||||
if (op === 'count') {
|
||||
counts.push(name);
|
||||
return;
|
||||
}
|
||||
|
||||
countOnly = false;
|
||||
let m = map[mname];
|
||||
if (!m) {
|
||||
m = (map[mname] = []);
|
||||
m.field = field;
|
||||
measures.push(m);
|
||||
}
|
||||
m.push(createMeasure(op, name));
|
||||
}
|
||||
});
|
||||
|
||||
if (counts.length || measures.length) {
|
||||
self.cell = cell(measures, counts, countOnly);
|
||||
}
|
||||
|
||||
self.inputs = Object.keys(inputs);
|
||||
}
|
||||
|
||||
const prototype = WindowState.prototype;
|
||||
|
||||
prototype.init = function() {
|
||||
this.windows.forEach(_ => _.init());
|
||||
if (this.cell) this.cell.init();
|
||||
};
|
||||
|
||||
prototype.update = function(w, t) {
|
||||
let self = this,
|
||||
cell = self.cell,
|
||||
wind = self.windows,
|
||||
data = w.data,
|
||||
m = wind && wind.length,
|
||||
j;
|
||||
|
||||
if (cell) {
|
||||
for (j=w.p0; j<w.i0; ++j) cell.rem(data[j]);
|
||||
for (j=w.p1; j<w.i1; ++j) cell.add(data[j]);
|
||||
cell.set(t);
|
||||
}
|
||||
for (j=0; j<m; ++j) wind[j].update(w, t);
|
||||
};
|
||||
|
||||
function cell(measures, counts, countOnly) {
|
||||
measures = measures.map(m => compileMeasures(m, m.field));
|
||||
|
||||
let cell = {
|
||||
num: 0,
|
||||
agg: null,
|
||||
store: false,
|
||||
count: counts
|
||||
};
|
||||
|
||||
if (!countOnly) {
|
||||
var n = measures.length,
|
||||
a = cell.agg = Array(n),
|
||||
i = 0;
|
||||
for (; i<n; ++i) a[i] = new measures[i](cell);
|
||||
}
|
||||
|
||||
if (cell.store) {
|
||||
var store = cell.data = new TupleStore();
|
||||
}
|
||||
|
||||
cell.add = function(t) {
|
||||
cell.num += 1;
|
||||
if (countOnly) return;
|
||||
if (store) store.add(t);
|
||||
for (let i=0; i<n; ++i) {
|
||||
a[i].add(a[i].get(t), t);
|
||||
}
|
||||
};
|
||||
|
||||
cell.rem = function(t) {
|
||||
cell.num -= 1;
|
||||
if (countOnly) return;
|
||||
if (store) store.rem(t);
|
||||
for (let i=0; i<n; ++i) {
|
||||
a[i].rem(a[i].get(t), t);
|
||||
}
|
||||
};
|
||||
|
||||
cell.set = function(t) {
|
||||
let i, n;
|
||||
|
||||
// consolidate stored values
|
||||
if (store) store.values();
|
||||
|
||||
// update tuple properties
|
||||
for (i=0, n=counts.length; i<n; ++i) t[counts[i]] = cell.num;
|
||||
if (!countOnly) for (i=0, n=a.length; i<n; ++i) a[i].set(t);
|
||||
};
|
||||
|
||||
cell.init = function() {
|
||||
cell.num = 0;
|
||||
if (store) store.reset();
|
||||
for (let i=0; i<n; ++i) a[i].init();
|
||||
};
|
||||
|
||||
return cell;
|
||||
}
|
||||
34
node_modules/vega-transforms/src/util/util.js
generated
vendored
Normal file
34
node_modules/vega-transforms/src/util/util.js
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
import {accessorName} from 'vega-util';
|
||||
|
||||
// use either provided alias or accessor field name
|
||||
export function fieldNames(fields, as) {
|
||||
if (!fields) return null;
|
||||
return fields.map(function(f, i) {
|
||||
return as[i] || accessorName(f);
|
||||
});
|
||||
}
|
||||
|
||||
export function partition(data, groupby, field) {
|
||||
var groups = [],
|
||||
get = function(f) { return f(t); },
|
||||
map, i, n, t, k, g;
|
||||
|
||||
// partition data points into groups
|
||||
if (groupby == null) {
|
||||
groups.push(data.map(field));
|
||||
} else {
|
||||
for (map={}, i=0, n=data.length; i<n; ++i) {
|
||||
t = data[i];
|
||||
k = groupby.map(get);
|
||||
g = map[k];
|
||||
if (!g) {
|
||||
map[k] = (g = []);
|
||||
g.dims = k;
|
||||
groups.push(g);
|
||||
}
|
||||
g.push(field(t));
|
||||
}
|
||||
}
|
||||
|
||||
return groups;
|
||||
}
|
||||
Reference in New Issue
Block a user