You've already forked wakapi-readme-stats
28305 lines
860 KiB
JavaScript
28305 lines
860 KiB
JavaScript
"use strict";
|
|
|
|
function _toArray(arr) { return _arrayWithHoles(arr) || _iterableToArray(arr) || _unsupportedIterableToArray(arr) || _nonIterableRest(); }
|
|
|
|
function _get(target, property, receiver) { if (typeof Reflect !== "undefined" && Reflect.get) { _get = Reflect.get; } else { _get = function _get(target, property, receiver) { var base = _superPropBase(target, property); if (!base) return; var desc = Object.getOwnPropertyDescriptor(base, property); if (desc.get) { return desc.get.call(receiver); } return desc.value; }; } return _get(target, property, receiver || target); }
|
|
|
|
function _superPropBase(object, property) { while (!Object.prototype.hasOwnProperty.call(object, property)) { object = _getPrototypeOf(object); if (object === null) break; } return object; }
|
|
|
|
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
|
|
|
|
function _createSuper(Derived) { var hasNativeReflectConstruct = _isNativeReflectConstruct(); return function _createSuperInternal() { var Super = _getPrototypeOf(Derived), result; if (hasNativeReflectConstruct) { var NewTarget = _getPrototypeOf(this).constructor; result = Reflect.construct(Super, arguments, NewTarget); } else { result = Super.apply(this, arguments); } return _possibleConstructorReturn(this, result); }; }
|
|
|
|
function _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
|
|
|
|
function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
|
|
|
|
function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
|
|
|
|
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
|
|
|
|
function _construct(Parent, args, Class) { if (_isNativeReflectConstruct()) { _construct = Reflect.construct; } else { _construct = function _construct(Parent, args, Class) { var a = [null]; a.push.apply(a, args); var Constructor = Function.bind.apply(Parent, a); var instance = new Constructor(); if (Class) _setPrototypeOf(instance, Class.prototype); return instance; }; } return _construct.apply(null, arguments); }
|
|
|
|
function _isNativeReflectConstruct() { if (typeof Reflect === "undefined" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === "function") return true; try { Date.prototype.toString.call(Reflect.construct(Date, [], function () {})); return true; } catch (e) { return false; } }
|
|
|
|
function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
|
|
|
|
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
|
|
|
|
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
|
|
|
|
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
|
|
|
|
function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _unsupportedIterableToArray(arr, i) || _nonIterableRest(); }
|
|
|
|
function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); }
|
|
|
|
function _iterableToArrayLimit(arr, i) { if (typeof Symbol === "undefined" || !(Symbol.iterator in Object(arr))) return; var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; }
|
|
|
|
function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; }
|
|
|
|
function _toConsumableArray(arr) { return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _unsupportedIterableToArray(arr) || _nonIterableSpread(); }
|
|
|
|
function _nonIterableSpread() { throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); }
|
|
|
|
function _iterableToArray(iter) { if (typeof Symbol !== "undefined" && Symbol.iterator in Object(iter)) return Array.from(iter); }
|
|
|
|
function _arrayWithoutHoles(arr) { if (Array.isArray(arr)) return _arrayLikeToArray(arr); }
|
|
|
|
function _createForOfIteratorHelper(o, allowArrayLike) { var it; if (typeof Symbol === "undefined" || o[Symbol.iterator] == null) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e2) { throw _e2; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = o[Symbol.iterator](); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e3) { didErr = true; err = _e3; }, f: function f() { try { if (!normalCompletion && it.return != null) it.return(); } finally { if (didErr) throw err; } } }; }
|
|
|
|
function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
|
|
|
|
function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
|
|
|
|
function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
|
|
|
|
(function (global, factory) {
|
|
(typeof exports === "undefined" ? "undefined" : _typeof(exports)) === 'object' && typeof module !== 'undefined' ? factory(exports) : typeof define === 'function' && define.amd ? define(['exports'], factory) : (global = global || self, factory(global.vegaLite = {}));
|
|
})(void 0, function (exports) {
|
|
'use strict';
|
|
|
|
var version = "4.14.0";
|
|
|
|
function accessor(fn, fields, name) {
|
|
fn.fields = fields || [];
|
|
fn.fname = name;
|
|
return fn;
|
|
}
|
|
|
|
function getter(path) {
|
|
return path.length === 1 ? get1(path[0]) : getN(path);
|
|
}
|
|
|
|
var get1 = function get1(field) {
|
|
return function (obj) {
|
|
return obj[field];
|
|
};
|
|
};
|
|
|
|
var getN = function getN(path) {
|
|
var len = path.length;
|
|
return function (obj) {
|
|
for (var i = 0; i < len; ++i) {
|
|
obj = obj[path[i]];
|
|
}
|
|
|
|
return obj;
|
|
};
|
|
};
|
|
|
|
function error(message) {
|
|
throw Error(message);
|
|
}
|
|
|
|
function splitAccessPath(p) {
|
|
var path = [],
|
|
q = null,
|
|
b = 0,
|
|
n = p.length,
|
|
s = '',
|
|
i,
|
|
j,
|
|
c;
|
|
p = p + '';
|
|
|
|
function push() {
|
|
path.push(s + p.substring(i, j));
|
|
s = '';
|
|
i = j + 1;
|
|
}
|
|
|
|
for (i = j = 0; j < n; ++j) {
|
|
c = p[j];
|
|
|
|
if (c === '\\') {
|
|
s += p.substring(i, j);
|
|
s += p.substring(++j, ++j);
|
|
i = j;
|
|
} else if (c === q) {
|
|
push();
|
|
q = null;
|
|
b = -1;
|
|
} else if (q) {
|
|
continue;
|
|
} else if (i === b && c === '"') {
|
|
i = j + 1;
|
|
q = c;
|
|
} else if (i === b && c === "'") {
|
|
i = j + 1;
|
|
q = c;
|
|
} else if (c === '.' && !b) {
|
|
if (j > i) {
|
|
push();
|
|
} else {
|
|
i = j + 1;
|
|
}
|
|
} else if (c === '[') {
|
|
if (j > i) push();
|
|
b = i = j + 1;
|
|
} else if (c === ']') {
|
|
if (!b) error('Access path missing open bracket: ' + p);
|
|
if (b > 0) push();
|
|
b = 0;
|
|
i = j + 1;
|
|
}
|
|
}
|
|
|
|
if (b) error('Access path missing closing bracket: ' + p);
|
|
if (q) error('Access path missing closing quote: ' + p);
|
|
|
|
if (j > i) {
|
|
j++;
|
|
push();
|
|
}
|
|
|
|
return path;
|
|
}
|
|
|
|
function field(field, name, opt) {
|
|
var path = splitAccessPath(field);
|
|
field = path.length === 1 ? path[0] : field;
|
|
return accessor((opt && opt.get || getter)(path), [field], name || field);
|
|
}
|
|
|
|
var empty = [];
|
|
var id = field('id');
|
|
var identity = accessor(function (_) {
|
|
return _;
|
|
}, empty, 'identity');
|
|
var zero = accessor(function () {
|
|
return 0;
|
|
}, empty, 'zero');
|
|
var one = accessor(function () {
|
|
return 1;
|
|
}, empty, 'one');
|
|
var truthy = accessor(function () {
|
|
return true;
|
|
}, empty, 'true');
|
|
var falsy = accessor(function () {
|
|
return false;
|
|
}, empty, 'false');
|
|
|
|
function log(method, level, input) {
|
|
var args = [level].concat([].slice.call(input));
|
|
console[method].apply(console, args); // eslint-disable-line no-console
|
|
}
|
|
|
|
var None = 0;
|
|
var Error$1 = 1;
|
|
var Warn = 2;
|
|
var Info = 3;
|
|
var Debug = 4;
|
|
|
|
function logger(_, method) {
|
|
var _level = _ || None;
|
|
|
|
return {
|
|
level: function level(_) {
|
|
if (arguments.length) {
|
|
_level = +_;
|
|
return this;
|
|
} else {
|
|
return _level;
|
|
}
|
|
},
|
|
error: function error() {
|
|
if (_level >= Error$1) log(method || 'error', 'ERROR', arguments);
|
|
return this;
|
|
},
|
|
warn: function warn() {
|
|
if (_level >= Warn) log(method || 'warn', 'WARN', arguments);
|
|
return this;
|
|
},
|
|
info: function info() {
|
|
if (_level >= Info) log(method || 'log', 'INFO', arguments);
|
|
return this;
|
|
},
|
|
debug: function debug() {
|
|
if (_level >= Debug) log(method || 'log', 'DEBUG', arguments);
|
|
return this;
|
|
}
|
|
};
|
|
}
|
|
|
|
var isArray = Array.isArray;
|
|
|
|
function isObject(_) {
|
|
return _ === Object(_);
|
|
}
|
|
|
|
var isLegalKey = function isLegalKey(key) {
|
|
return key !== '__proto__';
|
|
};
|
|
|
|
function mergeConfig() {
|
|
for (var _len = arguments.length, configs = new Array(_len), _key = 0; _key < _len; _key++) {
|
|
configs[_key] = arguments[_key];
|
|
}
|
|
|
|
return configs.reduce(function (out, source) {
|
|
for (var key in source) {
|
|
if (key === 'signals') {
|
|
// for signals, we merge the signals arrays
|
|
// source signals take precedence over
|
|
// existing signals with the same name
|
|
out.signals = mergeNamed(out.signals, source.signals);
|
|
} else {
|
|
// otherwise, merge objects subject to recursion constraints
|
|
// for legend block, recurse for the layout entry only
|
|
// for style block, recurse for all properties
|
|
// otherwise, no recursion: objects overwrite, no merging
|
|
var r = key === 'legend' ? {
|
|
layout: 1
|
|
} : key === 'style' ? true : null;
|
|
writeConfig(out, key, source[key], r);
|
|
}
|
|
}
|
|
|
|
return out;
|
|
}, {});
|
|
}
|
|
|
|
function writeConfig(output, key, value, recurse) {
|
|
if (!isLegalKey(key)) return;
|
|
var k, o;
|
|
|
|
if (isObject(value) && !isArray(value)) {
|
|
o = isObject(output[key]) ? output[key] : output[key] = {};
|
|
|
|
for (k in value) {
|
|
if (recurse && (recurse === true || recurse[k])) {
|
|
writeConfig(o, k, value[k]);
|
|
} else if (isLegalKey(k)) {
|
|
o[k] = value[k];
|
|
}
|
|
}
|
|
} else {
|
|
output[key] = value;
|
|
}
|
|
}
|
|
|
|
function mergeNamed(a, b) {
|
|
if (a == null) return b;
|
|
var map = {},
|
|
out = [];
|
|
|
|
function add(_) {
|
|
if (!map[_.name]) {
|
|
map[_.name] = 1;
|
|
out.push(_);
|
|
}
|
|
}
|
|
|
|
b.forEach(add);
|
|
a.forEach(add);
|
|
return out;
|
|
}
|
|
|
|
function array(_) {
|
|
return _ != null ? isArray(_) ? _ : [_] : [];
|
|
}
|
|
|
|
function isFunction(_) {
|
|
return typeof _ === 'function';
|
|
}
|
|
|
|
var hop = Object.prototype.hasOwnProperty;
|
|
|
|
function hasOwnProperty(object, property) {
|
|
return hop.call(object, property);
|
|
}
|
|
|
|
function isBoolean(_) {
|
|
return typeof _ === 'boolean';
|
|
}
|
|
|
|
function isNumber(_) {
|
|
return typeof _ === 'number';
|
|
}
|
|
|
|
function isString(_) {
|
|
return typeof _ === 'string';
|
|
}
|
|
|
|
function $(x) {
|
|
return isArray(x) ? '[' + x.map($) + ']' : isObject(x) || isString(x) ? // Output valid JSON and JS source strings.
|
|
// See http://timelessrepo.com/json-isnt-a-javascript-subset
|
|
JSON.stringify(x).replace("\u2028", "\\u2028").replace("\u2029", "\\u2029") : x;
|
|
}
|
|
|
|
function toSet(_) {
|
|
for (var s = {}, i = 0, n = _.length; i < n; ++i) {
|
|
s[_[i]] = true;
|
|
}
|
|
|
|
return s;
|
|
}
|
|
/*! *****************************************************************************
|
|
Copyright (c) Microsoft Corporation.
|
|
Permission to use, copy, modify, and/or distribute this software for any
|
|
purpose with or without fee is hereby granted.
|
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
|
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
|
|
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
|
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
|
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
|
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
|
PERFORMANCE OF THIS SOFTWARE.
|
|
***************************************************************************** */
|
|
|
|
|
|
function __rest(s, e) {
|
|
var t = {};
|
|
|
|
for (var p in s) {
|
|
if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) t[p] = s[p];
|
|
}
|
|
|
|
if (s != null && typeof Object.getOwnPropertySymbols === "function") for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
|
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) t[p[i]] = s[p[i]];
|
|
}
|
|
return t;
|
|
}
|
|
|
|
Array.prototype.flat || Object.defineProperty(Array.prototype, "flat", {
|
|
configurable: !0,
|
|
value: function r() {
|
|
var t = isNaN(arguments[0]) ? 1 : Number(arguments[0]);
|
|
return t ? Array.prototype.reduce.call(this, function (a, e) {
|
|
return Array.isArray(e) ? a.push.apply(a, r.call(e, t - 1)) : a.push(e), a;
|
|
}, []) : Array.prototype.slice.call(this);
|
|
},
|
|
writable: !0
|
|
}), Array.prototype.flatMap || Object.defineProperty(Array.prototype, "flatMap", {
|
|
configurable: !0,
|
|
value: function value(r) {
|
|
return Array.prototype.map.apply(this, arguments).flat();
|
|
},
|
|
writable: !0
|
|
});
|
|
|
|
function createCommonjsModule(fn, basedir, module) {
|
|
return module = {
|
|
path: basedir,
|
|
exports: {},
|
|
require: function require(path, base) {
|
|
return commonjsRequire(path, base === undefined || base === null ? module.path : base);
|
|
}
|
|
}, fn(module, module.exports), module.exports;
|
|
}
|
|
|
|
function commonjsRequire() {
|
|
throw new Error('Dynamic requires are not currently supported by @rollup/plugin-commonjs');
|
|
}
|
|
|
|
var clone_1 = createCommonjsModule(function (module) {
|
|
var clone = function () {
|
|
function _instanceof(obj, type) {
|
|
return type != null && obj instanceof type;
|
|
}
|
|
|
|
var nativeMap;
|
|
|
|
try {
|
|
nativeMap = Map;
|
|
} catch (_) {
|
|
// maybe a reference error because no `Map`. Give it a dummy value that no
|
|
// value will ever be an instanceof.
|
|
nativeMap = function nativeMap() {};
|
|
}
|
|
|
|
var nativeSet;
|
|
|
|
try {
|
|
nativeSet = Set;
|
|
} catch (_) {
|
|
nativeSet = function nativeSet() {};
|
|
}
|
|
|
|
var nativePromise;
|
|
|
|
try {
|
|
nativePromise = Promise;
|
|
} catch (_) {
|
|
nativePromise = function nativePromise() {};
|
|
}
|
|
/**
|
|
* Clones (copies) an Object using deep copying.
|
|
*
|
|
* This function supports circular references by default, but if you are certain
|
|
* there are no circular references in your object, you can save some CPU time
|
|
* by calling clone(obj, false).
|
|
*
|
|
* Caution: if `circular` is false and `parent` contains circular references,
|
|
* your program may enter an infinite loop and crash.
|
|
*
|
|
* @param `parent` - the object to be cloned
|
|
* @param `circular` - set to true if the object to be cloned may contain
|
|
* circular references. (optional - true by default)
|
|
* @param `depth` - set to a number if the object is only to be cloned to
|
|
* a particular depth. (optional - defaults to Infinity)
|
|
* @param `prototype` - sets the prototype to be used when cloning an object.
|
|
* (optional - defaults to parent prototype).
|
|
* @param `includeNonEnumerable` - set to true if the non-enumerable properties
|
|
* should be cloned as well. Non-enumerable properties on the prototype
|
|
* chain will be ignored. (optional - false by default)
|
|
*/
|
|
|
|
|
|
function clone(parent, circular, depth, prototype, includeNonEnumerable) {
|
|
if (_typeof(circular) === 'object') {
|
|
depth = circular.depth;
|
|
prototype = circular.prototype;
|
|
includeNonEnumerable = circular.includeNonEnumerable;
|
|
circular = circular.circular;
|
|
} // maintain two arrays for circular references, where corresponding parents
|
|
// and children have the same index
|
|
|
|
|
|
var allParents = [];
|
|
var allChildren = [];
|
|
var useBuffer = typeof Buffer != 'undefined';
|
|
if (typeof circular == 'undefined') circular = true;
|
|
if (typeof depth == 'undefined') depth = Infinity; // recurse this function so we don't reset allParents and allChildren
|
|
|
|
function _clone(parent, depth) {
|
|
// cloning null always returns null
|
|
if (parent === null) return null;
|
|
if (depth === 0) return parent;
|
|
var child;
|
|
var proto;
|
|
|
|
if (_typeof(parent) != 'object') {
|
|
return parent;
|
|
}
|
|
|
|
if (_instanceof(parent, nativeMap)) {
|
|
child = new nativeMap();
|
|
} else if (_instanceof(parent, nativeSet)) {
|
|
child = new nativeSet();
|
|
} else if (_instanceof(parent, nativePromise)) {
|
|
child = new nativePromise(function (resolve, reject) {
|
|
parent.then(function (value) {
|
|
resolve(_clone(value, depth - 1));
|
|
}, function (err) {
|
|
reject(_clone(err, depth - 1));
|
|
});
|
|
});
|
|
} else if (clone.__isArray(parent)) {
|
|
child = [];
|
|
} else if (clone.__isRegExp(parent)) {
|
|
child = new RegExp(parent.source, __getRegExpFlags(parent));
|
|
if (parent.lastIndex) child.lastIndex = parent.lastIndex;
|
|
} else if (clone.__isDate(parent)) {
|
|
child = new Date(parent.getTime());
|
|
} else if (useBuffer && Buffer.isBuffer(parent)) {
|
|
if (Buffer.allocUnsafe) {
|
|
// Node.js >= 4.5.0
|
|
child = Buffer.allocUnsafe(parent.length);
|
|
} else {
|
|
// Older Node.js versions
|
|
child = new Buffer(parent.length);
|
|
}
|
|
|
|
parent.copy(child);
|
|
return child;
|
|
} else if (_instanceof(parent, Error)) {
|
|
child = Object.create(parent);
|
|
} else {
|
|
if (typeof prototype == 'undefined') {
|
|
proto = Object.getPrototypeOf(parent);
|
|
child = Object.create(proto);
|
|
} else {
|
|
child = Object.create(prototype);
|
|
proto = prototype;
|
|
}
|
|
}
|
|
|
|
if (circular) {
|
|
var index = allParents.indexOf(parent);
|
|
|
|
if (index != -1) {
|
|
return allChildren[index];
|
|
}
|
|
|
|
allParents.push(parent);
|
|
allChildren.push(child);
|
|
}
|
|
|
|
if (_instanceof(parent, nativeMap)) {
|
|
parent.forEach(function (value, key) {
|
|
var keyChild = _clone(key, depth - 1);
|
|
|
|
var valueChild = _clone(value, depth - 1);
|
|
|
|
child.set(keyChild, valueChild);
|
|
});
|
|
}
|
|
|
|
if (_instanceof(parent, nativeSet)) {
|
|
parent.forEach(function (value) {
|
|
var entryChild = _clone(value, depth - 1);
|
|
|
|
child.add(entryChild);
|
|
});
|
|
}
|
|
|
|
for (var i in parent) {
|
|
var attrs;
|
|
|
|
if (proto) {
|
|
attrs = Object.getOwnPropertyDescriptor(proto, i);
|
|
}
|
|
|
|
if (attrs && attrs.set == null) {
|
|
continue;
|
|
}
|
|
|
|
child[i] = _clone(parent[i], depth - 1);
|
|
}
|
|
|
|
if (Object.getOwnPropertySymbols) {
|
|
var symbols = Object.getOwnPropertySymbols(parent);
|
|
|
|
for (var i = 0; i < symbols.length; i++) {
|
|
// Don't need to worry about cloning a symbol because it is a primitive,
|
|
// like a number or string.
|
|
var symbol = symbols[i];
|
|
var descriptor = Object.getOwnPropertyDescriptor(parent, symbol);
|
|
|
|
if (descriptor && !descriptor.enumerable && !includeNonEnumerable) {
|
|
continue;
|
|
}
|
|
|
|
child[symbol] = _clone(parent[symbol], depth - 1);
|
|
|
|
if (!descriptor.enumerable) {
|
|
Object.defineProperty(child, symbol, {
|
|
enumerable: false
|
|
});
|
|
}
|
|
}
|
|
}
|
|
|
|
if (includeNonEnumerable) {
|
|
var allPropertyNames = Object.getOwnPropertyNames(parent);
|
|
|
|
for (var i = 0; i < allPropertyNames.length; i++) {
|
|
var propertyName = allPropertyNames[i];
|
|
var descriptor = Object.getOwnPropertyDescriptor(parent, propertyName);
|
|
|
|
if (descriptor && descriptor.enumerable) {
|
|
continue;
|
|
}
|
|
|
|
child[propertyName] = _clone(parent[propertyName], depth - 1);
|
|
Object.defineProperty(child, propertyName, {
|
|
enumerable: false
|
|
});
|
|
}
|
|
}
|
|
|
|
return child;
|
|
}
|
|
|
|
return _clone(parent, depth);
|
|
}
|
|
/**
|
|
* Simple flat clone using prototype, accepts only objects, usefull for property
|
|
* override on FLAT configuration object (no nested props).
|
|
*
|
|
* USE WITH CAUTION! This may not behave as you wish if you do not know how this
|
|
* works.
|
|
*/
|
|
|
|
|
|
clone.clonePrototype = function clonePrototype(parent) {
|
|
if (parent === null) return null;
|
|
|
|
var c = function c() {};
|
|
|
|
c.prototype = parent;
|
|
return new c();
|
|
}; // private utility functions
|
|
|
|
|
|
function __objToStr(o) {
|
|
return Object.prototype.toString.call(o);
|
|
}
|
|
|
|
clone.__objToStr = __objToStr;
|
|
|
|
function __isDate(o) {
|
|
return _typeof(o) === 'object' && __objToStr(o) === '[object Date]';
|
|
}
|
|
|
|
clone.__isDate = __isDate;
|
|
|
|
function __isArray(o) {
|
|
return _typeof(o) === 'object' && __objToStr(o) === '[object Array]';
|
|
}
|
|
|
|
clone.__isArray = __isArray;
|
|
|
|
function __isRegExp(o) {
|
|
return _typeof(o) === 'object' && __objToStr(o) === '[object RegExp]';
|
|
}
|
|
|
|
clone.__isRegExp = __isRegExp;
|
|
|
|
function __getRegExpFlags(re) {
|
|
var flags = '';
|
|
if (re.global) flags += 'g';
|
|
if (re.ignoreCase) flags += 'i';
|
|
if (re.multiline) flags += 'm';
|
|
return flags;
|
|
}
|
|
|
|
clone.__getRegExpFlags = __getRegExpFlags;
|
|
return clone;
|
|
}();
|
|
|
|
if (module.exports) {
|
|
module.exports = clone;
|
|
}
|
|
}); // do not edit .js files directly - edit src/index.jst
|
|
|
|
var fastDeepEqual = function equal(a, b) {
|
|
if (a === b) return true;
|
|
|
|
if (a && b && _typeof(a) == 'object' && _typeof(b) == 'object') {
|
|
if (a.constructor !== b.constructor) return false;
|
|
var length, i, keys;
|
|
|
|
if (Array.isArray(a)) {
|
|
length = a.length;
|
|
if (length != b.length) return false;
|
|
|
|
for (i = length; i-- !== 0;) {
|
|
if (!equal(a[i], b[i])) return false;
|
|
}
|
|
|
|
return true;
|
|
}
|
|
|
|
if (a.constructor === RegExp) return a.source === b.source && a.flags === b.flags;
|
|
if (a.valueOf !== Object.prototype.valueOf) return a.valueOf() === b.valueOf();
|
|
if (a.toString !== Object.prototype.toString) return a.toString() === b.toString();
|
|
keys = Object.keys(a);
|
|
length = keys.length;
|
|
if (length !== Object.keys(b).length) return false;
|
|
|
|
for (i = length; i-- !== 0;) {
|
|
if (!Object.prototype.hasOwnProperty.call(b, keys[i])) return false;
|
|
}
|
|
|
|
for (i = length; i-- !== 0;) {
|
|
var key = keys[i];
|
|
if (!equal(a[key], b[key])) return false;
|
|
}
|
|
|
|
return true;
|
|
} // true if both NaN, false otherwise
|
|
|
|
|
|
return a !== a && b !== b;
|
|
};
|
|
|
|
var fastJsonStableStringify = function fastJsonStableStringify(data, opts) {
|
|
if (!opts) opts = {};
|
|
if (typeof opts === 'function') opts = {
|
|
cmp: opts
|
|
};
|
|
var cycles = typeof opts.cycles === 'boolean' ? opts.cycles : false;
|
|
|
|
var cmp = opts.cmp && function (f) {
|
|
return function (node) {
|
|
return function (a, b) {
|
|
var aobj = {
|
|
key: a,
|
|
value: node[a]
|
|
};
|
|
var bobj = {
|
|
key: b,
|
|
value: node[b]
|
|
};
|
|
return f(aobj, bobj);
|
|
};
|
|
};
|
|
}(opts.cmp);
|
|
|
|
var seen = [];
|
|
return function stringify(node) {
|
|
if (node && node.toJSON && typeof node.toJSON === 'function') {
|
|
node = node.toJSON();
|
|
}
|
|
|
|
if (node === undefined) return;
|
|
if (typeof node == 'number') return isFinite(node) ? '' + node : 'null';
|
|
if (_typeof(node) !== 'object') return JSON.stringify(node);
|
|
var i, out;
|
|
|
|
if (Array.isArray(node)) {
|
|
out = '[';
|
|
|
|
for (i = 0; i < node.length; i++) {
|
|
if (i) out += ',';
|
|
out += stringify(node[i]) || 'null';
|
|
}
|
|
|
|
return out + ']';
|
|
}
|
|
|
|
if (node === null) return 'null';
|
|
|
|
if (seen.indexOf(node) !== -1) {
|
|
if (cycles) return JSON.stringify('__cycle__');
|
|
throw new TypeError('Converting circular structure to JSON');
|
|
}
|
|
|
|
var seenIndex = seen.push(node) - 1;
|
|
var keys = Object.keys(node).sort(cmp && cmp(node));
|
|
out = '';
|
|
|
|
for (i = 0; i < keys.length; i++) {
|
|
var key = keys[i];
|
|
var value = stringify(node[key]);
|
|
if (!value) continue;
|
|
if (out) out += ',';
|
|
out += JSON.stringify(key) + ':' + value;
|
|
}
|
|
|
|
seen.splice(seenIndex, 1);
|
|
return '{' + out + '}';
|
|
}(data);
|
|
};
|
|
|
|
function isLogicalOr(op) {
|
|
return !!op.or;
|
|
}
|
|
|
|
function isLogicalAnd(op) {
|
|
return !!op.and;
|
|
}
|
|
|
|
function isLogicalNot(op) {
|
|
return !!op.not;
|
|
}
|
|
|
|
function forEachLeaf(op, fn) {
|
|
if (isLogicalNot(op)) {
|
|
forEachLeaf(op.not, fn);
|
|
} else if (isLogicalAnd(op)) {
|
|
var _iterator = _createForOfIteratorHelper(op.and),
|
|
_step;
|
|
|
|
try {
|
|
for (_iterator.s(); !(_step = _iterator.n()).done;) {
|
|
var subop = _step.value;
|
|
forEachLeaf(subop, fn);
|
|
}
|
|
} catch (err) {
|
|
_iterator.e(err);
|
|
} finally {
|
|
_iterator.f();
|
|
}
|
|
} else if (isLogicalOr(op)) {
|
|
var _iterator2 = _createForOfIteratorHelper(op.or),
|
|
_step2;
|
|
|
|
try {
|
|
for (_iterator2.s(); !(_step2 = _iterator2.n()).done;) {
|
|
var _subop = _step2.value;
|
|
forEachLeaf(_subop, fn);
|
|
}
|
|
} catch (err) {
|
|
_iterator2.e(err);
|
|
} finally {
|
|
_iterator2.f();
|
|
}
|
|
} else {
|
|
fn(op);
|
|
}
|
|
}
|
|
|
|
function normalizeLogicalComposition(op, normalizer) {
|
|
if (isLogicalNot(op)) {
|
|
return {
|
|
not: normalizeLogicalComposition(op.not, normalizer)
|
|
};
|
|
} else if (isLogicalAnd(op)) {
|
|
return {
|
|
and: op.and.map(function (o) {
|
|
return normalizeLogicalComposition(o, normalizer);
|
|
})
|
|
};
|
|
} else if (isLogicalOr(op)) {
|
|
return {
|
|
or: op.or.map(function (o) {
|
|
return normalizeLogicalComposition(o, normalizer);
|
|
})
|
|
};
|
|
} else {
|
|
return normalizer(op);
|
|
}
|
|
}
|
|
|
|
var deepEqual = fastDeepEqual;
|
|
var duplicate = clone_1;
|
|
/**
|
|
* Creates an object composed of the picked object properties.
|
|
*
|
|
* var object = {'a': 1, 'b': '2', 'c': 3};
|
|
* pick(object, ['a', 'c']);
|
|
* // → {'a': 1, 'c': 3}
|
|
*/
|
|
// eslint-disable-next-line @typescript-eslint/ban-types
|
|
|
|
function pick(obj, props) {
|
|
var copy = {};
|
|
|
|
var _iterator3 = _createForOfIteratorHelper(props),
|
|
_step3;
|
|
|
|
try {
|
|
for (_iterator3.s(); !(_step3 = _iterator3.n()).done;) {
|
|
var prop = _step3.value;
|
|
|
|
if (hasOwnProperty(obj, prop)) {
|
|
copy[prop] = obj[prop];
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator3.e(err);
|
|
} finally {
|
|
_iterator3.f();
|
|
}
|
|
|
|
return copy;
|
|
}
|
|
/**
|
|
* The opposite of _.pick; this method creates an object composed of the own
|
|
* and inherited enumerable string keyed properties of object that are not omitted.
|
|
*/
|
|
// eslint-disable-next-line @typescript-eslint/ban-types
|
|
|
|
|
|
function omit(obj, props) {
|
|
var copy = Object.assign({}, obj);
|
|
|
|
var _iterator4 = _createForOfIteratorHelper(props),
|
|
_step4;
|
|
|
|
try {
|
|
for (_iterator4.s(); !(_step4 = _iterator4.n()).done;) {
|
|
var prop = _step4.value;
|
|
delete copy[prop];
|
|
}
|
|
} catch (err) {
|
|
_iterator4.e(err);
|
|
} finally {
|
|
_iterator4.f();
|
|
}
|
|
|
|
return copy;
|
|
}
|
|
/**
|
|
* Monkey patch Set so that `stringify` produces a string representation of sets.
|
|
*/
|
|
|
|
|
|
Set.prototype['toJSON'] = function () {
|
|
return "Set(".concat(_toConsumableArray(this).map(function (x) {
|
|
return fastJsonStableStringify(x);
|
|
}).join(','), ")");
|
|
};
|
|
/**
|
|
* Converts any object to a string representation that can be consumed by humans.
|
|
*/
|
|
|
|
|
|
var stringify = fastJsonStableStringify;
|
|
/**
|
|
* Converts any object to a string of limited size, or a number.
|
|
*/
|
|
|
|
function _hash(a) {
|
|
if (isNumber(a)) {
|
|
return a;
|
|
}
|
|
|
|
var str = isString(a) ? a : fastJsonStableStringify(a); // short strings can be used as hash directly, longer strings are hashed to reduce memory usage
|
|
|
|
if (str.length < 250) {
|
|
return str;
|
|
} // from http://werxltd.com/wp/2010/05/13/javascript-implementation-of-javas-string-hashcode-method/
|
|
|
|
|
|
var h = 0;
|
|
|
|
for (var i = 0; i < str.length; i++) {
|
|
var char = str.charCodeAt(i);
|
|
h = (h << 5) - h + char;
|
|
h = h & h; // Convert to 32bit integer
|
|
}
|
|
|
|
return h;
|
|
}
|
|
|
|
function isNullOrFalse(x) {
|
|
return x === false || x === null;
|
|
}
|
|
|
|
function contains(array, item) {
|
|
return array.indexOf(item) > -1;
|
|
}
|
|
/**
|
|
* Returns true if any item returns true.
|
|
*/
|
|
|
|
|
|
function some(arr, f) {
|
|
var i = 0;
|
|
|
|
var _iterator5 = _createForOfIteratorHelper(arr.entries()),
|
|
_step5;
|
|
|
|
try {
|
|
for (_iterator5.s(); !(_step5 = _iterator5.n()).done;) {
|
|
var _step5$value = _slicedToArray(_step5.value, 2),
|
|
k = _step5$value[0],
|
|
a = _step5$value[1];
|
|
|
|
if (f(a, k, i++)) {
|
|
return true;
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator5.e(err);
|
|
} finally {
|
|
_iterator5.f();
|
|
}
|
|
|
|
return false;
|
|
}
|
|
/**
|
|
* Returns true if all items return true.
|
|
*/
|
|
|
|
|
|
function every(arr, f) {
|
|
var i = 0;
|
|
|
|
var _iterator6 = _createForOfIteratorHelper(arr.entries()),
|
|
_step6;
|
|
|
|
try {
|
|
for (_iterator6.s(); !(_step6 = _iterator6.n()).done;) {
|
|
var _step6$value = _slicedToArray(_step6.value, 2),
|
|
k = _step6$value[0],
|
|
a = _step6$value[1];
|
|
|
|
if (!f(a, k, i++)) {
|
|
return false;
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator6.e(err);
|
|
} finally {
|
|
_iterator6.f();
|
|
}
|
|
|
|
return true;
|
|
}
|
|
/**
|
|
* recursively merges src into dest
|
|
*/
|
|
|
|
|
|
function mergeDeep(dest) {
|
|
for (var _len2 = arguments.length, src = new Array(_len2 > 1 ? _len2 - 1 : 0), _key2 = 1; _key2 < _len2; _key2++) {
|
|
src[_key2 - 1] = arguments[_key2];
|
|
}
|
|
|
|
for (var _i2 = 0, _src = src; _i2 < _src.length; _i2++) {
|
|
var s = _src[_i2];
|
|
deepMerge_(dest, s !== null && s !== void 0 ? s : {});
|
|
}
|
|
|
|
return dest;
|
|
}
|
|
|
|
function deepMerge_(dest, src) {
|
|
var _iterator7 = _createForOfIteratorHelper(keys(src)),
|
|
_step7;
|
|
|
|
try {
|
|
for (_iterator7.s(); !(_step7 = _iterator7.n()).done;) {
|
|
var property = _step7.value;
|
|
writeConfig(dest, property, src[property], true);
|
|
}
|
|
} catch (err) {
|
|
_iterator7.e(err);
|
|
} finally {
|
|
_iterator7.f();
|
|
}
|
|
}
|
|
|
|
function unique(values, f) {
|
|
var results = [];
|
|
var u = {};
|
|
var v;
|
|
|
|
var _iterator8 = _createForOfIteratorHelper(values),
|
|
_step8;
|
|
|
|
try {
|
|
for (_iterator8.s(); !(_step8 = _iterator8.n()).done;) {
|
|
var val = _step8.value;
|
|
v = f(val);
|
|
|
|
if (v in u) {
|
|
continue;
|
|
}
|
|
|
|
u[v] = 1;
|
|
results.push(val);
|
|
}
|
|
} catch (err) {
|
|
_iterator8.e(err);
|
|
} finally {
|
|
_iterator8.f();
|
|
}
|
|
|
|
return results;
|
|
}
|
|
|
|
function setEqual(a, b) {
|
|
if (a.size !== b.size) {
|
|
return false;
|
|
}
|
|
|
|
var _iterator9 = _createForOfIteratorHelper(a),
|
|
_step9;
|
|
|
|
try {
|
|
for (_iterator9.s(); !(_step9 = _iterator9.n()).done;) {
|
|
var e = _step9.value;
|
|
|
|
if (!b.has(e)) {
|
|
return false;
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator9.e(err);
|
|
} finally {
|
|
_iterator9.f();
|
|
}
|
|
|
|
return true;
|
|
}
|
|
|
|
function hasIntersection(a, b) {
|
|
var _iterator10 = _createForOfIteratorHelper(a),
|
|
_step10;
|
|
|
|
try {
|
|
for (_iterator10.s(); !(_step10 = _iterator10.n()).done;) {
|
|
var key = _step10.value;
|
|
|
|
if (b.has(key)) {
|
|
return true;
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator10.e(err);
|
|
} finally {
|
|
_iterator10.f();
|
|
}
|
|
|
|
return false;
|
|
}
|
|
|
|
function prefixGenerator(a) {
|
|
var prefixes = new Set();
|
|
|
|
var _iterator11 = _createForOfIteratorHelper(a),
|
|
_step11;
|
|
|
|
try {
|
|
var _loop = function _loop() {
|
|
var x = _step11.value;
|
|
var splitField = splitAccessPath(x); // Wrap every element other than the first in `[]`
|
|
|
|
var wrappedWithAccessors = splitField.map(function (y, i) {
|
|
return i === 0 ? y : "[".concat(y, "]");
|
|
});
|
|
var computedPrefixes = wrappedWithAccessors.map(function (_, i) {
|
|
return wrappedWithAccessors.slice(0, i + 1).join('');
|
|
});
|
|
|
|
var _iterator12 = _createForOfIteratorHelper(computedPrefixes),
|
|
_step12;
|
|
|
|
try {
|
|
for (_iterator12.s(); !(_step12 = _iterator12.n()).done;) {
|
|
var y = _step12.value;
|
|
prefixes.add(y);
|
|
}
|
|
} catch (err) {
|
|
_iterator12.e(err);
|
|
} finally {
|
|
_iterator12.f();
|
|
}
|
|
};
|
|
|
|
for (_iterator11.s(); !(_step11 = _iterator11.n()).done;) {
|
|
_loop();
|
|
}
|
|
} catch (err) {
|
|
_iterator11.e(err);
|
|
} finally {
|
|
_iterator11.f();
|
|
}
|
|
|
|
return prefixes;
|
|
}
|
|
/**
|
|
* Returns true if a and b have an intersection. Also return true if a or b are undefined
|
|
* since this means we don't know what fields a node produces or depends on.
|
|
*/
|
|
|
|
|
|
function fieldIntersection(a, b) {
|
|
if (a === undefined || b === undefined) {
|
|
return true;
|
|
}
|
|
|
|
return hasIntersection(prefixGenerator(a), prefixGenerator(b));
|
|
} // eslint-disable-next-line @typescript-eslint/ban-types
|
|
|
|
|
|
function isEmpty(obj) {
|
|
return keys(obj).length === 0;
|
|
} // This is a stricter version of Object.keys but with better types. See https://github.com/Microsoft/TypeScript/pull/12253#issuecomment-263132208
|
|
|
|
|
|
var keys = Object.keys;
|
|
var vals = Object.values;
|
|
|
|
function entries(x) {
|
|
var _entries = [];
|
|
|
|
for (var k in x) {
|
|
if (hasOwnProperty(x, k)) {
|
|
_entries.push({
|
|
key: k,
|
|
value: x[k]
|
|
});
|
|
}
|
|
}
|
|
|
|
return _entries;
|
|
}
|
|
|
|
function isBoolean$1(b) {
|
|
return b === true || b === false;
|
|
}
|
|
/**
|
|
* Convert a string into a valid variable name
|
|
*/
|
|
|
|
|
|
function varName(s) {
|
|
// Replace non-alphanumeric characters (anything besides a-zA-Z0-9_) with _
|
|
var alphanumericS = s.replace(/\W/g, '_'); // Add _ if the string has leading numbers.
|
|
|
|
return (s.match(/^\d+/) ? '_' : '') + alphanumericS;
|
|
}
|
|
|
|
function logicalExpr(op, cb) {
|
|
if (isLogicalNot(op)) {
|
|
return '!(' + logicalExpr(op.not, cb) + ')';
|
|
} else if (isLogicalAnd(op)) {
|
|
return '(' + op.and.map(function (and) {
|
|
return logicalExpr(and, cb);
|
|
}).join(') && (') + ')';
|
|
} else if (isLogicalOr(op)) {
|
|
return '(' + op.or.map(function (or) {
|
|
return logicalExpr(or, cb);
|
|
}).join(') || (') + ')';
|
|
} else {
|
|
return cb(op);
|
|
}
|
|
}
|
|
/**
|
|
* Delete nested property of an object, and delete the ancestors of the property if they become empty.
|
|
*/
|
|
|
|
|
|
function deleteNestedProperty(obj, orderedProps) {
|
|
if (orderedProps.length === 0) {
|
|
return true;
|
|
}
|
|
|
|
var prop = orderedProps.shift(); // eslint-disable-line @typescript-eslint/no-non-null-assertion
|
|
|
|
if (prop in obj && deleteNestedProperty(obj[prop], orderedProps)) {
|
|
delete obj[prop];
|
|
}
|
|
|
|
return isEmpty(obj);
|
|
}
|
|
|
|
function titleCase(s) {
|
|
return s.charAt(0).toUpperCase() + s.substr(1);
|
|
}
|
|
/**
|
|
* Converts a path to an access path with datum.
|
|
* @param path The field name.
|
|
* @param datum The string to use for `datum`.
|
|
*/
|
|
|
|
|
|
function accessPathWithDatum(path) {
|
|
var datum = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'datum';
|
|
var pieces = splitAccessPath(path);
|
|
var prefixes = [];
|
|
|
|
for (var i = 1; i <= pieces.length; i++) {
|
|
var prefix = "[".concat(pieces.slice(0, i).map($).join(']['), "]");
|
|
prefixes.push("".concat(datum).concat(prefix));
|
|
}
|
|
|
|
return prefixes.join(' && ');
|
|
}
|
|
/**
|
|
* Return access with datum to the flattened field.
|
|
*
|
|
* @param path The field name.
|
|
* @param datum The string to use for `datum`.
|
|
*/
|
|
|
|
|
|
function flatAccessWithDatum(path) {
|
|
var datum = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'datum';
|
|
return "".concat(datum, "[").concat($(splitAccessPath(path).join('.')), "]");
|
|
}
|
|
|
|
function escapePathAccess(string) {
|
|
return string.replace(/(\[|\]|\.|'|")/g, '\\$1');
|
|
}
|
|
/**
|
|
* Replaces path accesses with access to non-nested field.
|
|
* For example, `foo["bar"].baz` becomes `foo\\.bar\\.baz`.
|
|
*/
|
|
|
|
|
|
function replacePathInField(path) {
|
|
return "".concat(splitAccessPath(path).map(escapePathAccess).join('\\.'));
|
|
}
|
|
/**
|
|
* Replace all occurrences of a string with another string.
|
|
*
|
|
* @param string the string to replace in
|
|
* @param find the string to replace
|
|
* @param replacement the replacement
|
|
*/
|
|
|
|
|
|
function replaceAll(string, find, replacement) {
|
|
return string.replace(new RegExp(find.replace(/[-/\\^$*+?.()|[\]{}]/g, '\\$&'), 'g'), replacement);
|
|
}
|
|
/**
|
|
* Remove path accesses with access from field.
|
|
* For example, `foo["bar"].baz` becomes `foo.bar.baz`.
|
|
*/
|
|
|
|
|
|
function removePathFromField(path) {
|
|
return "".concat(splitAccessPath(path).join('.'));
|
|
}
|
|
/**
|
|
* Count the depth of the path. Returns 1 for fields that are not nested.
|
|
*/
|
|
|
|
|
|
function accessPathDepth(path) {
|
|
if (!path) {
|
|
return 0;
|
|
}
|
|
|
|
return splitAccessPath(path).length;
|
|
}
|
|
/**
|
|
* This is a replacement for chained || for numeric properties or properties that respect null so that 0 will be included.
|
|
*/
|
|
|
|
|
|
function getFirstDefined() {
|
|
for (var _len3 = arguments.length, args = new Array(_len3), _key3 = 0; _key3 < _len3; _key3++) {
|
|
args[_key3] = arguments[_key3];
|
|
}
|
|
|
|
for (var _i3 = 0, _args = args; _i3 < _args.length; _i3++) {
|
|
var arg = _args[_i3];
|
|
|
|
if (arg !== undefined) {
|
|
return arg;
|
|
}
|
|
}
|
|
|
|
return undefined;
|
|
} // variable used to generate id
|
|
|
|
|
|
var idCounter = 42;
|
|
/**
|
|
* Returns a new random id every time it gets called.
|
|
*
|
|
* Has side effect!
|
|
*/
|
|
|
|
function uniqueId(prefix) {
|
|
var id = ++idCounter;
|
|
return prefix ? String(prefix) + id : id;
|
|
}
|
|
|
|
function internalField(name) {
|
|
return isInternalField(name) ? name : "__".concat(name);
|
|
}
|
|
|
|
function isInternalField(name) {
|
|
return name.indexOf('__') === 0;
|
|
}
|
|
/**
|
|
* Normalize angle to be within [0,360).
|
|
*/
|
|
|
|
|
|
function normalizeAngle(angle) {
|
|
if (angle === undefined) {
|
|
return undefined;
|
|
}
|
|
|
|
return (angle % 360 + 360) % 360;
|
|
}
|
|
/**
|
|
* Returns whether the passed in value is a valid number.
|
|
*/
|
|
|
|
|
|
function isNumeric(value) {
|
|
if (isNumber(value)) {
|
|
return true;
|
|
}
|
|
|
|
return !isNaN(value) && !isNaN(parseFloat(value));
|
|
}
|
|
|
|
var CONDITIONAL_AXIS_PROP_INDEX = {
|
|
domainColor: {
|
|
part: 'domain',
|
|
vgProp: 'stroke'
|
|
},
|
|
labelAlign: {
|
|
part: 'labels',
|
|
vgProp: 'align'
|
|
},
|
|
labelBaseline: {
|
|
part: 'labels',
|
|
vgProp: 'baseline'
|
|
},
|
|
labelColor: {
|
|
part: 'labels',
|
|
vgProp: 'fill'
|
|
},
|
|
labelFont: {
|
|
part: 'labels',
|
|
vgProp: 'font'
|
|
},
|
|
labelFontSize: {
|
|
part: 'labels',
|
|
vgProp: 'fontSize'
|
|
},
|
|
labelFontStyle: {
|
|
part: 'labels',
|
|
vgProp: 'fontStyle'
|
|
},
|
|
labelFontWeight: {
|
|
part: 'labels',
|
|
vgProp: 'fontWeight'
|
|
},
|
|
labelOpacity: {
|
|
part: 'labels',
|
|
vgProp: 'opacity'
|
|
},
|
|
labelOffset: null,
|
|
labelPadding: null,
|
|
gridColor: {
|
|
part: 'grid',
|
|
vgProp: 'stroke'
|
|
},
|
|
gridDash: {
|
|
part: 'grid',
|
|
vgProp: 'strokeDash'
|
|
},
|
|
gridDashOffset: {
|
|
part: 'grid',
|
|
vgProp: 'strokeDashOffset'
|
|
},
|
|
gridOpacity: {
|
|
part: 'grid',
|
|
vgProp: 'opacity'
|
|
},
|
|
gridWidth: {
|
|
part: 'grid',
|
|
vgProp: 'strokeWidth'
|
|
},
|
|
tickColor: {
|
|
part: 'ticks',
|
|
vgProp: 'stroke'
|
|
},
|
|
tickDash: {
|
|
part: 'ticks',
|
|
vgProp: 'strokeDash'
|
|
},
|
|
tickDashOffset: {
|
|
part: 'ticks',
|
|
vgProp: 'strokeDashOffset'
|
|
},
|
|
tickOpacity: {
|
|
part: 'ticks',
|
|
vgProp: 'opacity'
|
|
},
|
|
tickSize: null,
|
|
tickWidth: {
|
|
part: 'ticks',
|
|
vgProp: 'strokeWidth'
|
|
},
|
|
titleColor: {
|
|
part: 'title',
|
|
vgProp: 'fill'
|
|
},
|
|
title: null // title supports signal, let's use it.
|
|
|
|
};
|
|
|
|
function isConditionalAxisValue(v) {
|
|
return v && v['condition'];
|
|
}
|
|
|
|
var AXIS_PARTS = ['domain', 'grid', 'labels', 'ticks', 'title'];
|
|
/**
|
|
* A dictionary listing whether a certain axis property is applicable for only main axes or only grid axes.
|
|
*/
|
|
|
|
var AXIS_PROPERTY_TYPE = {
|
|
grid: 'grid',
|
|
gridCap: 'grid',
|
|
gridColor: 'grid',
|
|
gridDash: 'grid',
|
|
gridDashOffset: 'grid',
|
|
gridOpacity: 'grid',
|
|
gridScale: 'grid',
|
|
gridWidth: 'grid',
|
|
orient: 'main',
|
|
bandPosition: 'both',
|
|
aria: 'main',
|
|
description: 'main',
|
|
domain: 'main',
|
|
domainCap: 'main',
|
|
domainColor: 'main',
|
|
domainDash: 'main',
|
|
domainDashOffset: 'main',
|
|
domainOpacity: 'main',
|
|
domainWidth: 'main',
|
|
format: 'main',
|
|
formatType: 'main',
|
|
labelAlign: 'main',
|
|
labelAngle: 'main',
|
|
labelBaseline: 'main',
|
|
labelBound: 'main',
|
|
labelColor: 'main',
|
|
labelFlush: 'main',
|
|
labelFlushOffset: 'main',
|
|
labelFont: 'main',
|
|
labelFontSize: 'main',
|
|
labelFontStyle: 'main',
|
|
labelFontWeight: 'main',
|
|
labelLimit: 'main',
|
|
labelLineHeight: 'main',
|
|
labelOffset: 'main',
|
|
labelOpacity: 'main',
|
|
labelOverlap: 'main',
|
|
labelPadding: 'main',
|
|
labels: 'main',
|
|
labelSeparation: 'main',
|
|
maxExtent: 'main',
|
|
minExtent: 'main',
|
|
offset: 'both',
|
|
position: 'main',
|
|
tickCap: 'main',
|
|
tickColor: 'main',
|
|
tickDash: 'main',
|
|
tickDashOffset: 'main',
|
|
tickMinStep: 'main',
|
|
tickOffset: 'both',
|
|
tickOpacity: 'main',
|
|
tickRound: 'both',
|
|
ticks: 'main',
|
|
tickSize: 'main',
|
|
tickWidth: 'both',
|
|
title: 'main',
|
|
titleAlign: 'main',
|
|
titleAnchor: 'main',
|
|
titleAngle: 'main',
|
|
titleBaseline: 'main',
|
|
titleColor: 'main',
|
|
titleFont: 'main',
|
|
titleFontSize: 'main',
|
|
titleFontStyle: 'main',
|
|
titleFontWeight: 'main',
|
|
titleLimit: 'main',
|
|
titleLineHeight: 'main',
|
|
titleOpacity: 'main',
|
|
titlePadding: 'main',
|
|
titleX: 'main',
|
|
titleY: 'main',
|
|
encode: 'both',
|
|
scale: 'both',
|
|
tickBand: 'both',
|
|
tickCount: 'both',
|
|
tickExtra: 'both',
|
|
translate: 'both',
|
|
values: 'both',
|
|
zindex: 'both' // this is actually set afterward, so it doesn't matter
|
|
|
|
};
|
|
var COMMON_AXIS_PROPERTIES_INDEX = {
|
|
orient: 1,
|
|
aria: 1,
|
|
bandPosition: 1,
|
|
description: 1,
|
|
domain: 1,
|
|
domainCap: 1,
|
|
domainColor: 1,
|
|
domainDash: 1,
|
|
domainDashOffset: 1,
|
|
domainOpacity: 1,
|
|
domainWidth: 1,
|
|
format: 1,
|
|
formatType: 1,
|
|
grid: 1,
|
|
gridCap: 1,
|
|
gridColor: 1,
|
|
gridDash: 1,
|
|
gridDashOffset: 1,
|
|
gridOpacity: 1,
|
|
gridWidth: 1,
|
|
labelAlign: 1,
|
|
labelAngle: 1,
|
|
labelBaseline: 1,
|
|
labelBound: 1,
|
|
labelColor: 1,
|
|
labelFlush: 1,
|
|
labelFlushOffset: 1,
|
|
labelFont: 1,
|
|
labelFontSize: 1,
|
|
labelFontStyle: 1,
|
|
labelFontWeight: 1,
|
|
labelLimit: 1,
|
|
labelLineHeight: 1,
|
|
labelOffset: 1,
|
|
labelOpacity: 1,
|
|
labelOverlap: 1,
|
|
labelPadding: 1,
|
|
labels: 1,
|
|
labelSeparation: 1,
|
|
maxExtent: 1,
|
|
minExtent: 1,
|
|
offset: 1,
|
|
position: 1,
|
|
tickBand: 1,
|
|
tickCap: 1,
|
|
tickColor: 1,
|
|
tickCount: 1,
|
|
tickDash: 1,
|
|
tickDashOffset: 1,
|
|
tickExtra: 1,
|
|
tickMinStep: 1,
|
|
tickOffset: 1,
|
|
tickOpacity: 1,
|
|
tickRound: 1,
|
|
ticks: 1,
|
|
tickSize: 1,
|
|
tickWidth: 1,
|
|
title: 1,
|
|
titleAlign: 1,
|
|
titleAnchor: 1,
|
|
titleAngle: 1,
|
|
titleBaseline: 1,
|
|
titleColor: 1,
|
|
titleFont: 1,
|
|
titleFontSize: 1,
|
|
titleFontStyle: 1,
|
|
titleFontWeight: 1,
|
|
titleLimit: 1,
|
|
titleLineHeight: 1,
|
|
titleOpacity: 1,
|
|
titlePadding: 1,
|
|
titleX: 1,
|
|
titleY: 1,
|
|
translate: 1,
|
|
values: 1,
|
|
zindex: 1
|
|
};
|
|
var AXIS_PROPERTIES_INDEX = Object.assign(Object.assign({}, COMMON_AXIS_PROPERTIES_INDEX), {
|
|
style: 1,
|
|
labelExpr: 1,
|
|
encoding: 1
|
|
});
|
|
|
|
function isAxisProperty(prop) {
|
|
return !!AXIS_PROPERTIES_INDEX[prop];
|
|
}
|
|
/**
|
|
* All types of primitive marks.
|
|
*/
|
|
|
|
|
|
var Mark = {
|
|
arc: 'arc',
|
|
area: 'area',
|
|
bar: 'bar',
|
|
image: 'image',
|
|
line: 'line',
|
|
point: 'point',
|
|
rect: 'rect',
|
|
rule: 'rule',
|
|
text: 'text',
|
|
tick: 'tick',
|
|
trail: 'trail',
|
|
circle: 'circle',
|
|
square: 'square',
|
|
geoshape: 'geoshape'
|
|
};
|
|
var ARC = Mark.arc;
|
|
var AREA = Mark.area;
|
|
var BAR = Mark.bar;
|
|
var IMAGE = Mark.image;
|
|
var LINE = Mark.line;
|
|
var POINT = Mark.point;
|
|
var RECT = Mark.rect;
|
|
var RULE = Mark.rule;
|
|
var TEXT = Mark.text;
|
|
var TICK = Mark.tick;
|
|
var TRAIL = Mark.trail;
|
|
var CIRCLE = Mark.circle;
|
|
var SQUARE = Mark.square;
|
|
var GEOSHAPE = Mark.geoshape;
|
|
|
|
function isPathMark(m) {
|
|
return contains(['line', 'area', 'trail'], m);
|
|
}
|
|
|
|
function isRectBasedMark(m) {
|
|
return contains(['rect', 'bar', 'image', 'arc'
|
|
/* arc is rect/interval in polar coordinate */
|
|
], m);
|
|
}
|
|
|
|
var PRIMITIVE_MARKS = keys(Mark);
|
|
|
|
function isMarkDef(mark) {
|
|
return mark['type'];
|
|
}
|
|
|
|
var PRIMITIVE_MARK_INDEX = toSet(PRIMITIVE_MARKS);
|
|
var STROKE_CONFIG = ['stroke', 'strokeWidth', 'strokeDash', 'strokeDashOffset', 'strokeOpacity', 'strokeJoin', 'strokeMiterLimit'];
|
|
var FILL_CONFIG = ['fill', 'fillOpacity'];
|
|
var FILL_STROKE_CONFIG = [].concat(STROKE_CONFIG, FILL_CONFIG);
|
|
var VL_ONLY_MARK_CONFIG_INDEX = {
|
|
color: 1,
|
|
filled: 1,
|
|
invalid: 1,
|
|
order: 1,
|
|
radius2: 1,
|
|
theta2: 1,
|
|
timeUnitBand: 1,
|
|
timeUnitBandPosition: 1
|
|
};
|
|
var VL_ONLY_MARK_CONFIG_PROPERTIES = keys(VL_ONLY_MARK_CONFIG_INDEX);
|
|
var VL_ONLY_MARK_SPECIFIC_CONFIG_PROPERTY_INDEX = {
|
|
area: ['line', 'point'],
|
|
bar: ['binSpacing', 'continuousBandSize', 'discreteBandSize'],
|
|
rect: ['binSpacing', 'continuousBandSize', 'discreteBandSize'],
|
|
line: ['point'],
|
|
tick: ['bandSize', 'thickness']
|
|
};
|
|
var defaultMarkConfig = {
|
|
color: '#4c78a8',
|
|
invalid: 'filter',
|
|
timeUnitBand: 1
|
|
};
|
|
var BAR_CORNER_RADIUS_INDEX = {
|
|
horizontal: ['cornerRadiusTopRight', 'cornerRadiusBottomRight'],
|
|
vertical: ['cornerRadiusTopLeft', 'cornerRadiusTopRight']
|
|
};
|
|
var DEFAULT_RECT_BAND_SIZE = 5;
|
|
var defaultBarConfig = {
|
|
binSpacing: 1,
|
|
continuousBandSize: DEFAULT_RECT_BAND_SIZE,
|
|
timeUnitBandPosition: 0.5
|
|
};
|
|
var defaultRectConfig = {
|
|
binSpacing: 0,
|
|
continuousBandSize: DEFAULT_RECT_BAND_SIZE,
|
|
timeUnitBandPosition: 0.5
|
|
};
|
|
var defaultTickConfig = {
|
|
thickness: 1
|
|
};
|
|
|
|
function getMarkType(m) {
|
|
return isMarkDef(m) ? m.type : m;
|
|
}
|
|
|
|
function isUnitSpec(spec) {
|
|
return 'mark' in spec;
|
|
}
|
|
|
|
var CompositeMarkNormalizer = /*#__PURE__*/function () {
|
|
function CompositeMarkNormalizer(name, run) {
|
|
_classCallCheck(this, CompositeMarkNormalizer);
|
|
|
|
this.name = name;
|
|
this.run = run;
|
|
}
|
|
|
|
_createClass(CompositeMarkNormalizer, [{
|
|
key: "hasMatchingType",
|
|
value: function hasMatchingType(spec) {
|
|
if (isUnitSpec(spec)) {
|
|
return getMarkType(spec.mark) === this.name;
|
|
}
|
|
|
|
return false;
|
|
}
|
|
}]);
|
|
|
|
return CompositeMarkNormalizer;
|
|
}();
|
|
|
|
var AGGREGATE_OP_INDEX = {
|
|
argmax: 1,
|
|
argmin: 1,
|
|
average: 1,
|
|
count: 1,
|
|
distinct: 1,
|
|
product: 1,
|
|
max: 1,
|
|
mean: 1,
|
|
median: 1,
|
|
min: 1,
|
|
missing: 1,
|
|
q1: 1,
|
|
q3: 1,
|
|
ci0: 1,
|
|
ci1: 1,
|
|
stderr: 1,
|
|
stdev: 1,
|
|
stdevp: 1,
|
|
sum: 1,
|
|
valid: 1,
|
|
values: 1,
|
|
variance: 1,
|
|
variancep: 1
|
|
};
|
|
var MULTIDOMAIN_SORT_OP_INDEX = {
|
|
count: 1,
|
|
min: 1,
|
|
max: 1
|
|
};
|
|
|
|
function isArgminDef(a) {
|
|
return !!a && !!a['argmin'];
|
|
}
|
|
|
|
function isArgmaxDef(a) {
|
|
return !!a && !!a['argmax'];
|
|
}
|
|
|
|
function isAggregateOp(a) {
|
|
return isString(a) && !!AGGREGATE_OP_INDEX[a];
|
|
}
|
|
|
|
var COUNTING_OPS = ['count', 'valid', 'missing', 'distinct'];
|
|
|
|
function isCountingAggregateOp(aggregate) {
|
|
return isString(aggregate) && contains(COUNTING_OPS, aggregate);
|
|
}
|
|
|
|
function isMinMaxOp(aggregate) {
|
|
return isString(aggregate) && contains(['min', 'max'], aggregate);
|
|
}
|
|
/** Additive-based aggregation operations. These can be applied to stack. */
|
|
|
|
|
|
var SUM_OPS = ['count', 'sum', 'distinct', 'valid', 'missing'];
|
|
/**
|
|
* Aggregation operators that always produce values within the range [domainMin, domainMax].
|
|
*/
|
|
|
|
var SHARED_DOMAIN_OPS = ['mean', 'average', 'median', 'q1', 'q3', 'min', 'max'];
|
|
var SHARED_DOMAIN_OP_INDEX = toSet(SHARED_DOMAIN_OPS);
|
|
/*
|
|
* Constants and utilities for encoding channels (Visual variables)
|
|
* such as 'x', 'y', 'color'.
|
|
*/
|
|
// Facet
|
|
|
|
var ROW = 'row';
|
|
var COLUMN = 'column';
|
|
var FACET = 'facet'; // Position
|
|
|
|
var X = 'x';
|
|
var Y = 'y';
|
|
var X2 = 'x2';
|
|
var Y2 = 'y2'; // Arc-Position
|
|
|
|
var RADIUS = 'radius';
|
|
var RADIUS2 = 'radius2';
|
|
var THETA = 'theta';
|
|
var THETA2 = 'theta2'; // Geo Position
|
|
|
|
var LATITUDE = 'latitude';
|
|
var LONGITUDE = 'longitude';
|
|
var LATITUDE2 = 'latitude2';
|
|
var LONGITUDE2 = 'longitude2'; // Mark property with scale
|
|
|
|
var COLOR = 'color';
|
|
var FILL = 'fill';
|
|
var STROKE = 'stroke';
|
|
var SHAPE = 'shape';
|
|
var SIZE = 'size';
|
|
var ANGLE = 'angle';
|
|
var OPACITY = 'opacity';
|
|
var FILLOPACITY = 'fillOpacity';
|
|
var STROKEOPACITY = 'strokeOpacity';
|
|
var STROKEWIDTH = 'strokeWidth';
|
|
var STROKEDASH = 'strokeDash'; // Non-scale channel
|
|
|
|
var TEXT$1 = 'text';
|
|
var ORDER = 'order';
|
|
var DETAIL = 'detail';
|
|
var KEY = 'key';
|
|
var TOOLTIP = 'tooltip';
|
|
var HREF = 'href';
|
|
var URL = 'url';
|
|
var DESCRIPTION = 'description';
|
|
var POSITION_CHANNEL_INDEX = {
|
|
x: 1,
|
|
y: 1,
|
|
x2: 1,
|
|
y2: 1
|
|
};
|
|
var POLAR_POSITION_CHANNEL_INDEX = {
|
|
theta: 1,
|
|
theta2: 1,
|
|
radius: 1,
|
|
radius2: 1
|
|
};
|
|
|
|
function isPolarPositionChannel(c) {
|
|
return c in POLAR_POSITION_CHANNEL_INDEX;
|
|
}
|
|
|
|
var GEO_POSIITON_CHANNEL_INDEX = {
|
|
longitude: 1,
|
|
longitude2: 1,
|
|
latitude: 1,
|
|
latitude2: 1
|
|
};
|
|
|
|
function getPositionChannelFromLatLong(channel) {
|
|
switch (channel) {
|
|
case LATITUDE:
|
|
return 'y';
|
|
|
|
case LATITUDE2:
|
|
return 'y2';
|
|
|
|
case LONGITUDE:
|
|
return 'x';
|
|
|
|
case LONGITUDE2:
|
|
return 'x2';
|
|
}
|
|
}
|
|
|
|
function isGeoPositionChannel(c) {
|
|
return c in GEO_POSIITON_CHANNEL_INDEX;
|
|
}
|
|
|
|
var GEOPOSITION_CHANNELS = keys(GEO_POSIITON_CHANNEL_INDEX);
|
|
var UNIT_CHANNEL_INDEX = Object.assign(Object.assign(Object.assign(Object.assign({}, POSITION_CHANNEL_INDEX), POLAR_POSITION_CHANNEL_INDEX), GEO_POSIITON_CHANNEL_INDEX), {
|
|
// color
|
|
color: 1,
|
|
fill: 1,
|
|
stroke: 1,
|
|
// other non-position with scale
|
|
opacity: 1,
|
|
fillOpacity: 1,
|
|
strokeOpacity: 1,
|
|
strokeWidth: 1,
|
|
strokeDash: 1,
|
|
size: 1,
|
|
angle: 1,
|
|
shape: 1,
|
|
// channels without scales
|
|
order: 1,
|
|
text: 1,
|
|
detail: 1,
|
|
key: 1,
|
|
tooltip: 1,
|
|
href: 1,
|
|
url: 1,
|
|
description: 1
|
|
});
|
|
|
|
function isColorChannel(channel) {
|
|
return channel === COLOR || channel === FILL || channel === STROKE;
|
|
}
|
|
|
|
var FACET_CHANNEL_INDEX = {
|
|
row: 1,
|
|
column: 1,
|
|
facet: 1
|
|
};
|
|
var FACET_CHANNELS = keys(FACET_CHANNEL_INDEX);
|
|
var CHANNEL_INDEX = Object.assign(Object.assign({}, UNIT_CHANNEL_INDEX), FACET_CHANNEL_INDEX);
|
|
var CHANNELS = keys(CHANNEL_INDEX);
|
|
|
|
var SINGLE_DEF_CHANNEL_INDEX = __rest(CHANNEL_INDEX, ["order", "detail", "tooltip"]);
|
|
|
|
var SINGLE_DEF_UNIT_CHANNEL_INDEX = __rest(SINGLE_DEF_CHANNEL_INDEX, ["row", "column", "facet"]);
|
|
|
|
function isSingleDefUnitChannel(str) {
|
|
return !!SINGLE_DEF_UNIT_CHANNEL_INDEX[str];
|
|
}
|
|
|
|
function isChannel(str) {
|
|
return !!CHANNEL_INDEX[str];
|
|
}
|
|
|
|
var SECONDARY_RANGE_CHANNEL = [X2, Y2, LATITUDE2, LONGITUDE2, THETA2, RADIUS2];
|
|
|
|
function isSecondaryRangeChannel(c) {
|
|
var main = getMainRangeChannel(c);
|
|
return main !== c;
|
|
}
|
|
/**
|
|
* Get the main channel for a range channel. E.g. `x` for `x2`.
|
|
*/
|
|
|
|
|
|
function getMainRangeChannel(channel) {
|
|
switch (channel) {
|
|
case X2:
|
|
return X;
|
|
|
|
case Y2:
|
|
return Y;
|
|
|
|
case LATITUDE2:
|
|
return LATITUDE;
|
|
|
|
case LONGITUDE2:
|
|
return LONGITUDE;
|
|
|
|
case THETA2:
|
|
return THETA;
|
|
|
|
case RADIUS2:
|
|
return RADIUS;
|
|
}
|
|
|
|
return channel;
|
|
}
|
|
|
|
function getVgPositionChannel(channel) {
|
|
if (isPolarPositionChannel(channel)) {
|
|
switch (channel) {
|
|
case THETA:
|
|
return 'startAngle';
|
|
|
|
case THETA2:
|
|
return 'endAngle';
|
|
|
|
case RADIUS:
|
|
return 'outerRadius';
|
|
|
|
case RADIUS2:
|
|
return 'innerRadius';
|
|
}
|
|
}
|
|
|
|
return channel;
|
|
}
|
|
/**
|
|
* Get the main channel for a range channel. E.g. `x` for `x2`.
|
|
*/
|
|
|
|
|
|
function getSecondaryRangeChannel(channel) {
|
|
switch (channel) {
|
|
case X:
|
|
return X2;
|
|
|
|
case Y:
|
|
return Y2;
|
|
|
|
case LATITUDE:
|
|
return LATITUDE2;
|
|
|
|
case LONGITUDE:
|
|
return LONGITUDE2;
|
|
|
|
case THETA:
|
|
return THETA2;
|
|
|
|
case RADIUS:
|
|
return RADIUS2;
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function getSizeChannel(channel) {
|
|
switch (channel) {
|
|
case X:
|
|
case X2:
|
|
return 'width';
|
|
|
|
case Y:
|
|
case Y2:
|
|
return 'height';
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
/**
|
|
* Get the main channel for a range channel. E.g. `x` for `x2`.
|
|
*/
|
|
|
|
|
|
function getOffsetChannel(channel) {
|
|
switch (channel) {
|
|
case X:
|
|
return 'xOffset';
|
|
|
|
case Y:
|
|
return 'yOffset';
|
|
|
|
case X2:
|
|
return 'x2Offset';
|
|
|
|
case Y2:
|
|
return 'y2Offset';
|
|
|
|
case THETA:
|
|
return 'thetaOffset';
|
|
|
|
case RADIUS:
|
|
return 'radiusOffset';
|
|
|
|
case THETA2:
|
|
return 'theta2Offset';
|
|
|
|
case RADIUS2:
|
|
return 'radius2Offset';
|
|
}
|
|
|
|
return undefined;
|
|
} // NONPOSITION_CHANNELS = UNIT_CHANNELS without X, Y, X2, Y2;
|
|
|
|
|
|
var // The rest of unit channels then have scale
|
|
NONPOSITION_CHANNEL_INDEX = __rest(UNIT_CHANNEL_INDEX, ["x", "y", "x2", "y2", "latitude", "longitude", "latitude2", "longitude2", "theta", "theta2", "radius", "radius2"]);
|
|
|
|
var NONPOSITION_CHANNELS = keys(NONPOSITION_CHANNEL_INDEX);
|
|
var POSITION_SCALE_CHANNEL_INDEX = {
|
|
x: 1,
|
|
y: 1
|
|
};
|
|
var POSITION_SCALE_CHANNELS = keys(POSITION_SCALE_CHANNEL_INDEX);
|
|
|
|
function isXorY(channel) {
|
|
return channel in POSITION_SCALE_CHANNEL_INDEX;
|
|
}
|
|
|
|
var POLAR_POSITION_SCALE_CHANNEL_INDEX = {
|
|
theta: 1,
|
|
radius: 1
|
|
};
|
|
var POLAR_POSITION_SCALE_CHANNELS = keys(POLAR_POSITION_SCALE_CHANNEL_INDEX);
|
|
|
|
function getPositionScaleChannel(sizeType) {
|
|
return sizeType === 'width' ? X : Y;
|
|
} // NON_POSITION_SCALE_CHANNEL = SCALE_CHANNELS without X, Y
|
|
|
|
|
|
var NONPOSITION_SCALE_CHANNEL_INDEX = __rest(NONPOSITION_CHANNEL_INDEX, ["text", "tooltip", "href", "url", "description", "detail", "key", "order"]);
|
|
|
|
var NONPOSITION_SCALE_CHANNELS = keys(NONPOSITION_SCALE_CHANNEL_INDEX);
|
|
|
|
function isNonPositionScaleChannel(channel) {
|
|
return !!NONPOSITION_CHANNEL_INDEX[channel];
|
|
}
|
|
/**
|
|
* @returns whether Vega supports legends for a particular channel
|
|
*/
|
|
|
|
|
|
function supportLegend(channel) {
|
|
switch (channel) {
|
|
case COLOR:
|
|
case FILL:
|
|
case STROKE:
|
|
case SIZE:
|
|
case SHAPE:
|
|
case OPACITY:
|
|
case STROKEWIDTH:
|
|
case STROKEDASH:
|
|
return true;
|
|
|
|
case FILLOPACITY:
|
|
case STROKEOPACITY:
|
|
case ANGLE:
|
|
return false;
|
|
}
|
|
} // Declare SCALE_CHANNEL_INDEX
|
|
|
|
|
|
var SCALE_CHANNEL_INDEX = Object.assign(Object.assign(Object.assign({}, POSITION_SCALE_CHANNEL_INDEX), POLAR_POSITION_SCALE_CHANNEL_INDEX), NONPOSITION_SCALE_CHANNEL_INDEX);
|
|
/** List of channels with scales */
|
|
|
|
var SCALE_CHANNELS = keys(SCALE_CHANNEL_INDEX);
|
|
|
|
function isScaleChannel(channel) {
|
|
return !!SCALE_CHANNEL_INDEX[channel];
|
|
}
|
|
/**
|
|
* Return whether a channel supports a particular mark type.
|
|
* @param channel channel name
|
|
* @param mark the mark type
|
|
* @return whether the mark supports the channel
|
|
*/
|
|
|
|
|
|
function supportMark(channel, mark) {
|
|
return getSupportedMark(channel)[mark];
|
|
}
|
|
|
|
var ALL_MARKS = {
|
|
// all marks
|
|
arc: 'always',
|
|
area: 'always',
|
|
bar: 'always',
|
|
circle: 'always',
|
|
geoshape: 'always',
|
|
image: 'always',
|
|
line: 'always',
|
|
rule: 'always',
|
|
point: 'always',
|
|
rect: 'always',
|
|
square: 'always',
|
|
trail: 'always',
|
|
text: 'always',
|
|
tick: 'always'
|
|
};
|
|
|
|
var ALL_MARKS_EXCEPT_GEOSHAPE = __rest(ALL_MARKS, ["geoshape"]);
|
|
/**
|
|
* Return a dictionary showing whether a channel supports mark type.
|
|
* @param channel
|
|
* @return A dictionary mapping mark types to 'always', 'binned', or undefined
|
|
*/
|
|
|
|
|
|
function getSupportedMark(channel) {
|
|
switch (channel) {
|
|
case COLOR:
|
|
case FILL:
|
|
case STROKE: // falls through
|
|
|
|
case DESCRIPTION:
|
|
case DETAIL:
|
|
case KEY:
|
|
case TOOLTIP:
|
|
case HREF:
|
|
case ORDER: // TODO: revise (order might not support rect, which is not stackable?)
|
|
|
|
case OPACITY:
|
|
case FILLOPACITY:
|
|
case STROKEOPACITY:
|
|
case STROKEWIDTH: // falls through
|
|
|
|
case FACET:
|
|
case ROW: // falls through
|
|
|
|
case COLUMN:
|
|
return ALL_MARKS;
|
|
|
|
case X:
|
|
case Y:
|
|
case LATITUDE:
|
|
case LONGITUDE:
|
|
// all marks except geoshape. geoshape does not use X, Y -- it uses a projection
|
|
return ALL_MARKS_EXCEPT_GEOSHAPE;
|
|
|
|
case X2:
|
|
case Y2:
|
|
case LATITUDE2:
|
|
case LONGITUDE2:
|
|
return {
|
|
area: 'always',
|
|
bar: 'always',
|
|
image: 'always',
|
|
rect: 'always',
|
|
rule: 'always',
|
|
circle: 'binned',
|
|
point: 'binned',
|
|
square: 'binned',
|
|
tick: 'binned',
|
|
line: 'binned',
|
|
trail: 'binned'
|
|
};
|
|
|
|
case SIZE:
|
|
return {
|
|
point: 'always',
|
|
tick: 'always',
|
|
rule: 'always',
|
|
circle: 'always',
|
|
square: 'always',
|
|
bar: 'always',
|
|
text: 'always',
|
|
line: 'always',
|
|
trail: 'always'
|
|
};
|
|
|
|
case STROKEDASH:
|
|
return {
|
|
line: 'always',
|
|
point: 'always',
|
|
tick: 'always',
|
|
rule: 'always',
|
|
circle: 'always',
|
|
square: 'always',
|
|
bar: 'always',
|
|
geoshape: 'always'
|
|
};
|
|
|
|
case SHAPE:
|
|
return {
|
|
point: 'always',
|
|
geoshape: 'always'
|
|
};
|
|
|
|
case TEXT$1:
|
|
return {
|
|
text: 'always'
|
|
};
|
|
|
|
case ANGLE:
|
|
return {
|
|
point: 'always',
|
|
square: 'always',
|
|
text: 'always'
|
|
};
|
|
|
|
case URL:
|
|
return {
|
|
image: 'always'
|
|
};
|
|
|
|
case THETA:
|
|
return {
|
|
text: 'always',
|
|
arc: 'always'
|
|
};
|
|
|
|
case RADIUS:
|
|
return {
|
|
text: 'always',
|
|
arc: 'always'
|
|
};
|
|
|
|
case THETA2:
|
|
case RADIUS2:
|
|
return {
|
|
arc: 'always'
|
|
};
|
|
}
|
|
}
|
|
|
|
function rangeType(channel) {
|
|
switch (channel) {
|
|
case X:
|
|
case Y:
|
|
case THETA:
|
|
case RADIUS:
|
|
case SIZE:
|
|
case ANGLE:
|
|
case STROKEWIDTH:
|
|
case OPACITY:
|
|
case FILLOPACITY:
|
|
case STROKEOPACITY: // X2 and Y2 use X and Y scales, so they similarly have continuous range. [falls through]
|
|
|
|
case X2:
|
|
case Y2:
|
|
case THETA2:
|
|
case RADIUS2:
|
|
return undefined;
|
|
|
|
case FACET:
|
|
case ROW:
|
|
case COLUMN:
|
|
case SHAPE:
|
|
case STROKEDASH: // TEXT, TOOLTIP, URL, and HREF have no scale but have discrete output [falls through]
|
|
|
|
case TEXT$1:
|
|
case TOOLTIP:
|
|
case HREF:
|
|
case URL:
|
|
case DESCRIPTION:
|
|
return 'discrete';
|
|
// Color can be either continuous or discrete, depending on scale type.
|
|
|
|
case COLOR:
|
|
case FILL:
|
|
case STROKE:
|
|
return 'flexible';
|
|
// No scale, no range type.
|
|
|
|
case LATITUDE:
|
|
case LONGITUDE:
|
|
case LATITUDE2:
|
|
case LONGITUDE2:
|
|
case DETAIL:
|
|
case KEY:
|
|
case ORDER:
|
|
return undefined;
|
|
}
|
|
}
|
|
/**
|
|
* Create a key for the bin configuration. Not for prebinned bin.
|
|
*/
|
|
|
|
|
|
function binToString(bin) {
|
|
if (isBoolean(bin)) {
|
|
bin = normalizeBin(bin, undefined);
|
|
}
|
|
|
|
return 'bin' + keys(bin).map(function (p) {
|
|
return isSelectionExtent(bin[p]) ? varName("_".concat(p, "_").concat(Object.entries(bin[p]))) : varName("_".concat(p, "_").concat(bin[p]));
|
|
}).join('');
|
|
}
|
|
/**
|
|
* Vega-Lite should bin the data.
|
|
*/
|
|
|
|
|
|
function isBinning(bin) {
|
|
return bin === true || isBinParams(bin) && !bin.binned;
|
|
}
|
|
/**
|
|
* The data is already binned and so Vega-Lite should not bin it again.
|
|
*/
|
|
|
|
|
|
function isBinned(bin) {
|
|
return bin === 'binned' || isBinParams(bin) && bin.binned === true;
|
|
}
|
|
|
|
function isBinParams(bin) {
|
|
return isObject(bin);
|
|
}
|
|
|
|
function isSelectionExtent(extent) {
|
|
return extent === null || extent === void 0 ? void 0 : extent['selection'];
|
|
}
|
|
|
|
function autoMaxBins(channel) {
|
|
switch (channel) {
|
|
case ROW:
|
|
case COLUMN:
|
|
case SIZE:
|
|
case COLOR:
|
|
case FILL:
|
|
case STROKE:
|
|
case STROKEWIDTH:
|
|
case OPACITY:
|
|
case FILLOPACITY:
|
|
case STROKEOPACITY: // Facets and Size shouldn't have too many bins
|
|
// We choose 6 like shape to simplify the rule [falls through]
|
|
|
|
case SHAPE:
|
|
return 6;
|
|
// Vega's "shape" has 6 distinct values
|
|
|
|
case STROKEDASH:
|
|
return 4;
|
|
// We only provide 5 different stroke dash values (but 4 is more effective)
|
|
|
|
default:
|
|
return 10;
|
|
}
|
|
}
|
|
|
|
function invalidSpec(spec) {
|
|
return "Invalid specification ".concat(JSON.stringify(spec), ". Make sure the specification includes at least one of the following properties: \"mark\", \"layer\", \"facet\", \"hconcat\", \"vconcat\", \"concat\", or \"repeat\".");
|
|
} // FIT
|
|
|
|
|
|
var FIT_NON_SINGLE = 'Autosize "fit" only works for single views and layered views.';
|
|
|
|
function containerSizeNonSingle(name) {
|
|
var uName = name == 'width' ? 'Width' : 'Height';
|
|
return "".concat(uName, " \"container\" only works for single views and layered views.");
|
|
}
|
|
|
|
function containerSizeNotCompatibleWithAutosize(name) {
|
|
var uName = name == 'width' ? 'Width' : 'Height';
|
|
var fitDirection = name == 'width' ? 'x' : 'y';
|
|
return "".concat(uName, " \"container\" only works well with autosize \"fit\" or \"fit-").concat(fitDirection, "\".");
|
|
}
|
|
|
|
function droppingFit(channel) {
|
|
return channel ? "Dropping \"fit-".concat(channel, "\" because spec has discrete ").concat(getSizeChannel(channel), ".") : "Dropping \"fit\" because spec has discrete size.";
|
|
} // VIEW SIZE
|
|
|
|
|
|
function unknownField(channel) {
|
|
return "Unknown field for ".concat(channel, ". Cannot calculate view size.");
|
|
} // SELECTION
|
|
|
|
|
|
function cannotProjectOnChannelWithoutField(channel) {
|
|
return "Cannot project a selection on encoding channel \"".concat(channel, "\", which has no field.");
|
|
}
|
|
|
|
function cannotProjectAggregate(channel, aggregate) {
|
|
return "Cannot project a selection on encoding channel \"".concat(channel, "\" as it uses an aggregate function (\"").concat(aggregate, "\").");
|
|
}
|
|
|
|
function nearestNotSupportForContinuous(mark) {
|
|
return "The \"nearest\" transform is not supported for ".concat(mark, " marks.");
|
|
}
|
|
|
|
function selectionNotSupported(mark) {
|
|
return "Selection not supported for ".concat(mark, " yet.");
|
|
}
|
|
|
|
function selectionNotFound(name) {
|
|
return "Cannot find a selection named \"".concat(name, "\".");
|
|
}
|
|
|
|
var SCALE_BINDINGS_CONTINUOUS = 'Scale bindings are currently only supported for scales with unbinned, continuous domains.';
|
|
var LEGEND_BINDINGS_MUST_HAVE_PROJECTION = 'Legend bindings are only supported for selections over an individual field or encoding channel.';
|
|
|
|
function noSameUnitLookup(name) {
|
|
return "Cannot define and lookup the \"".concat(name, "\" selection in the same view. ") + "Try moving the lookup into a second, layered view?";
|
|
}
|
|
|
|
var NEEDS_SAME_SELECTION = 'The same selection must be used to override scale domains in a layered view.';
|
|
var INTERVAL_INITIALIZED_WITH_X_Y = 'Interval selections should be initialized using "x" and/or "y" keys.'; // REPEAT
|
|
|
|
function noSuchRepeatedValue(field) {
|
|
return "Unknown repeated value \"".concat(field, "\".");
|
|
}
|
|
|
|
function columnsNotSupportByRowCol(type) {
|
|
return "The \"columns\" property cannot be used when \"".concat(type, "\" has nested row/column.");
|
|
} // CONCAT / REPEAT
|
|
|
|
|
|
var CONCAT_CANNOT_SHARE_AXIS = 'Axes cannot be shared in concatenated or repeated views yet (https://github.com/vega/vega-lite/issues/2415).'; // DATA
|
|
|
|
function unrecognizedParse(p) {
|
|
return "Unrecognized parse \"".concat(p, "\".");
|
|
}
|
|
|
|
function differentParse(field, local, ancestor) {
|
|
return "An ancestor parsed field \"".concat(field, "\" as ").concat(ancestor, " but a child wants to parse the field as ").concat(local, ".");
|
|
}
|
|
|
|
var ADD_SAME_CHILD_TWICE = 'Attempt to add the same child twice.'; // TRANSFORMS
|
|
|
|
function invalidTransformIgnored(transform) {
|
|
return "Ignoring an invalid transform: ".concat(stringify(transform), ".");
|
|
}
|
|
|
|
var NO_FIELDS_NEEDS_AS = 'If "from.fields" is not specified, "as" has to be a string that specifies the key to be used for the data from the secondary source.'; // ENCODING & FACET
|
|
|
|
function customFormatTypeNotAllowed(channel) {
|
|
return "Config.customFormatTypes is not true, thus custom format type and format for channel ".concat(channel, " are dropped.");
|
|
}
|
|
|
|
function projectionOverridden(opt) {
|
|
var parentProjection = opt.parentProjection,
|
|
projection = opt.projection;
|
|
return "Layer's shared projection ".concat(stringify(parentProjection), " is overridden by a child projection ").concat(stringify(projection), ".");
|
|
}
|
|
|
|
var REPLACE_ANGLE_WITH_THETA = 'Arc marks uses theta channel rather than angle, replacing angle with theta.';
|
|
|
|
function primitiveChannelDef(channel, type, value) {
|
|
return "Channel ".concat(channel, " is a ").concat(type, ". Converted to {value: ").concat(stringify(value), "}.");
|
|
}
|
|
|
|
function invalidFieldType(type) {
|
|
return "Invalid field type \"".concat(type, "\".");
|
|
}
|
|
|
|
function invalidFieldTypeForCountAggregate(type, aggregate) {
|
|
return "Invalid field type \"".concat(type, "\" for aggregate: \"").concat(aggregate, "\", using \"quantitative\" instead.");
|
|
}
|
|
|
|
function invalidAggregate(aggregate) {
|
|
return "Invalid aggregation operator \"".concat(aggregate, "\".");
|
|
}
|
|
|
|
function droppingColor(type, opt) {
|
|
var fill = opt.fill,
|
|
stroke = opt.stroke;
|
|
return "Dropping color ".concat(type, " as the plot also has ").concat(fill && stroke ? 'fill and stroke' : fill ? 'fill' : 'stroke', ".");
|
|
}
|
|
|
|
function emptyFieldDef(fieldDef, channel) {
|
|
return "Dropping ".concat(stringify(fieldDef), " from channel \"").concat(channel, "\" since it does not contain any data field, datum, value, or signal.");
|
|
}
|
|
|
|
var LINE_WITH_VARYING_SIZE = 'Line marks cannot encode size with a non-groupby field. You may want to use trail marks instead.';
|
|
|
|
function incompatibleChannel(channel, markOrFacet, when) {
|
|
return "".concat(channel, " dropped as it is incompatible with \"").concat(markOrFacet, "\"").concat(when ? " when ".concat(when) : '', ".");
|
|
}
|
|
|
|
function invalidEncodingChannel(channel) {
|
|
return "".concat(channel, "-encoding is dropped as ").concat(channel, " is not a valid encoding channel.");
|
|
}
|
|
|
|
function facetChannelShouldBeDiscrete(channel) {
|
|
return "".concat(channel, " encoding should be discrete (ordinal / nominal / binned).");
|
|
}
|
|
|
|
function facetChannelDropped(channels) {
|
|
return "Facet encoding dropped as ".concat(channels.join(' and '), " ").concat(channels.length > 1 ? 'are' : 'is', " also specified.");
|
|
}
|
|
|
|
function discreteChannelCannotEncode(channel, type) {
|
|
return "Using discrete channel \"".concat(channel, "\" to encode \"").concat(type, "\" field can be misleading as it does not encode ").concat(type === 'ordinal' ? 'order' : 'magnitude', ".");
|
|
} // MARK
|
|
|
|
|
|
function lineWithRange(hasX2, hasY2) {
|
|
var channels = hasX2 && hasY2 ? 'x2 and y2' : hasX2 ? 'x2' : 'y2';
|
|
return "Line mark is for continuous lines and thus cannot be used with ".concat(channels, ". We will use the rule mark (line segments) instead.");
|
|
}
|
|
|
|
function orientOverridden(original, actual) {
|
|
return "Specified orient \"".concat(original, "\" overridden with \"").concat(actual, "\".");
|
|
}
|
|
|
|
var RANGE_STEP_DEPRECATED = "Scale's \"rangeStep\" is deprecated and will be removed in Vega-Lite 5.0. Please use \"width\"/\"height\": {\"step\": ...} instead. See https://vega.github.io/vega-lite/docs/size.html.";
|
|
|
|
function cannotUseScalePropertyWithNonColor(prop) {
|
|
return "Cannot use the scale property \"".concat(prop, "\" with non-color channel.");
|
|
}
|
|
|
|
function unaggregateDomainHasNoEffectForRawField(fieldDef) {
|
|
return "Using unaggregated domain with raw field has no effect (".concat(stringify(fieldDef), ").");
|
|
}
|
|
|
|
function unaggregateDomainWithNonSharedDomainOp(aggregate) {
|
|
return "Unaggregated domain not applicable for \"".concat(aggregate, "\" since it produces values outside the origin domain of the source data.");
|
|
}
|
|
|
|
function unaggregatedDomainWithLogScale(fieldDef) {
|
|
return "Unaggregated domain is currently unsupported for log scale (".concat(stringify(fieldDef), ").");
|
|
}
|
|
|
|
function cannotApplySizeToNonOrientedMark(mark) {
|
|
return "Cannot apply size to non-oriented mark \"".concat(mark, "\".");
|
|
}
|
|
|
|
function scaleTypeNotWorkWithChannel(channel, scaleType, defaultScaleType) {
|
|
return "Channel \"".concat(channel, "\" does not work with \"").concat(scaleType, "\" scale. We are using \"").concat(defaultScaleType, "\" scale instead.");
|
|
}
|
|
|
|
function scaleTypeNotWorkWithFieldDef(scaleType, defaultScaleType) {
|
|
return "FieldDef does not work with \"".concat(scaleType, "\" scale. We are using \"").concat(defaultScaleType, "\" scale instead.");
|
|
}
|
|
|
|
function scalePropertyNotWorkWithScaleType(scaleType, propName, channel) {
|
|
return "".concat(channel, "-scale's \"").concat(propName, "\" is dropped as it does not work with ").concat(scaleType, " scale.");
|
|
}
|
|
|
|
function stepDropped(channel) {
|
|
return "The step for \"".concat(channel, "\" is dropped because the ").concat(channel === 'width' ? 'x' : 'y', " is continuous.");
|
|
}
|
|
|
|
function mergeConflictingProperty(property, propertyOf, v1, v2) {
|
|
return "Conflicting ".concat(propertyOf.toString(), " property \"").concat(property.toString(), "\" (").concat(stringify(v1), " and ").concat(stringify(v2), "). Using ").concat(stringify(v1), ".");
|
|
}
|
|
|
|
function mergeConflictingDomainProperty(property, propertyOf, v1, v2) {
|
|
return "Conflicting ".concat(propertyOf.toString(), " property \"").concat(property.toString(), "\" (").concat(stringify(v1), " and ").concat(stringify(v2), "). Using the union of the two domains.");
|
|
}
|
|
|
|
function independentScaleMeansIndependentGuide(channel) {
|
|
return "Setting the scale to be independent for \"".concat(channel, "\" means we also have to set the guide (axis or legend) to be independent.");
|
|
}
|
|
|
|
function domainSortDropped(sort) {
|
|
return "Dropping sort property ".concat(stringify(sort), " as unioned domains only support boolean or op \"count\", \"min\", and \"max\".");
|
|
}
|
|
|
|
var MORE_THAN_ONE_SORT = 'Domains that should be unioned has conflicting sort properties. Sort will be set to true.';
|
|
var FACETED_INDEPENDENT_DIFFERENT_SOURCES = 'Detected faceted independent scales that union domain of multiple fields from different data sources. We will use the first field. The result view size may be incorrect.';
|
|
var FACETED_INDEPENDENT_SAME_FIELDS_DIFFERENT_SOURCES = 'Detected faceted independent scales that union domain of the same fields from different source. We will assume that this is the same field from a different fork of the same data source. However, if this is not the case, the result view size may be incorrect.';
|
|
var FACETED_INDEPENDENT_SAME_SOURCE = 'Detected faceted independent scales that union domain of multiple fields from the same data source. We will use the first field. The result view size may be incorrect.'; // STACK
|
|
|
|
function cannotStackRangedMark(channel) {
|
|
return "Cannot stack \"".concat(channel, "\" if there is already \"").concat(channel, "2\".");
|
|
}
|
|
|
|
function cannotStackNonLinearScale(scaleType) {
|
|
return "Cannot stack non-linear scale (".concat(scaleType, ").");
|
|
}
|
|
|
|
function stackNonSummativeAggregate(aggregate) {
|
|
return "Stacking is applied even though the aggregate function is non-summative (\"".concat(aggregate, "\").");
|
|
} // TIMEUNIT
|
|
|
|
|
|
function invalidTimeUnit(unitName, value) {
|
|
return "Invalid ".concat(unitName, ": ").concat(stringify(value), ".");
|
|
}
|
|
|
|
function droppedDay(d) {
|
|
return "Dropping day from datetime ".concat(stringify(d), " as day cannot be combined with other units.");
|
|
}
|
|
|
|
function errorBarCenterAndExtentAreNotNeeded(center, extent) {
|
|
return "".concat(extent ? 'extent ' : '').concat(extent && center ? 'and ' : '').concat(center ? 'center ' : '').concat(extent && center ? 'are ' : 'is ', "not needed when data are aggregated.");
|
|
}
|
|
|
|
function errorBarCenterIsUsedWithWrongExtent(center, extent, mark) {
|
|
return "".concat(center, " is not usually used with ").concat(extent, " for ").concat(mark, ".");
|
|
}
|
|
|
|
function errorBarContinuousAxisHasCustomizedAggregate(aggregate, compositeMark) {
|
|
return "Continuous axis should not have customized aggregation function ".concat(aggregate, "; ").concat(compositeMark, " already agregates the axis.");
|
|
}
|
|
|
|
function errorBand1DNotSupport(property) {
|
|
return "1D error band does not support ".concat(property, ".");
|
|
} // CHANNEL
|
|
|
|
|
|
function channelRequiredForBinned(channel) {
|
|
return "Channel ".concat(channel, " is required for \"binned\" bin.");
|
|
}
|
|
|
|
function channelShouldNotBeUsedForBinned(channel) {
|
|
return "Channel ".concat(channel, " should not be used with \"binned\" bin.");
|
|
}
|
|
|
|
function domainRequiredForThresholdScale(channel) {
|
|
return "Domain for ".concat(channel, " is required for threshold scale.");
|
|
}
|
|
/**
|
|
* Vega-Lite's singleton logger utility.
|
|
*/
|
|
|
|
|
|
var __classPrivateFieldSet = undefined && undefined.__classPrivateFieldSet || function (receiver, privateMap, value) {
|
|
if (!privateMap.has(receiver)) {
|
|
throw new TypeError("attempted to set private field on non-instance");
|
|
}
|
|
|
|
privateMap.set(receiver, value);
|
|
return value;
|
|
};
|
|
|
|
var __classPrivateFieldGet = undefined && undefined.__classPrivateFieldGet || function (receiver, privateMap) {
|
|
if (!privateMap.has(receiver)) {
|
|
throw new TypeError("attempted to get private field on non-instance");
|
|
}
|
|
|
|
return privateMap.get(receiver);
|
|
};
|
|
/**
|
|
* Main (default) Vega Logger instance for Vega-Lite.
|
|
*/
|
|
|
|
|
|
var main = logger(Warn);
|
|
var current = main;
|
|
/**
|
|
* Set the singleton logger to be a custom logger.
|
|
*/
|
|
|
|
function set(newLogger) {
|
|
current = newLogger;
|
|
return current;
|
|
}
|
|
/**
|
|
* Reset the main logger to use the default Vega Logger.
|
|
*/
|
|
|
|
|
|
function reset() {
|
|
current = main;
|
|
return current;
|
|
}
|
|
|
|
function warn() {
|
|
var _current;
|
|
|
|
(_current = current).warn.apply(_current, arguments);
|
|
}
|
|
|
|
function debug() {
|
|
var _current2;
|
|
|
|
(_current2 = current).debug.apply(_current2, arguments);
|
|
} // DateTime definition object
|
|
|
|
|
|
function isDateTime(o) {
|
|
if (o && isObject(o)) {
|
|
var _iterator13 = _createForOfIteratorHelper(TIMEUNIT_PARTS),
|
|
_step13;
|
|
|
|
try {
|
|
for (_iterator13.s(); !(_step13 = _iterator13.n()).done;) {
|
|
var part = _step13.value;
|
|
|
|
if (part in o) {
|
|
return true;
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator13.e(err);
|
|
} finally {
|
|
_iterator13.f();
|
|
}
|
|
}
|
|
|
|
return false;
|
|
}
|
|
|
|
var MONTHS = ['january', 'february', 'march', 'april', 'may', 'june', 'july', 'august', 'september', 'october', 'november', 'december'];
|
|
var SHORT_MONTHS = MONTHS.map(function (m) {
|
|
return m.substr(0, 3);
|
|
});
|
|
var DAYS = ['sunday', 'monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday'];
|
|
var SHORT_DAYS = DAYS.map(function (d) {
|
|
return d.substr(0, 3);
|
|
});
|
|
|
|
function normalizeQuarter(q) {
|
|
if (isNumeric(q)) {
|
|
q = +q;
|
|
}
|
|
|
|
if (isNumber(q)) {
|
|
if (q > 4) {
|
|
warn(invalidTimeUnit('quarter', q));
|
|
} // We accept 1-based quarter, so need to readjust to 0-based quarter
|
|
|
|
|
|
return q - 1;
|
|
} else {
|
|
// Invalid quarter
|
|
throw new Error(invalidTimeUnit('quarter', q));
|
|
}
|
|
}
|
|
|
|
function normalizeMonth(m) {
|
|
if (isNumeric(m)) {
|
|
m = +m;
|
|
}
|
|
|
|
if (isNumber(m)) {
|
|
// We accept 1-based month, so need to readjust to 0-based month
|
|
return m - 1;
|
|
} else {
|
|
var lowerM = m.toLowerCase();
|
|
var monthIndex = MONTHS.indexOf(lowerM);
|
|
|
|
if (monthIndex !== -1) {
|
|
return monthIndex; // 0 for january, ...
|
|
}
|
|
|
|
var shortM = lowerM.substr(0, 3);
|
|
var shortMonthIndex = SHORT_MONTHS.indexOf(shortM);
|
|
|
|
if (shortMonthIndex !== -1) {
|
|
return shortMonthIndex;
|
|
} // Invalid month
|
|
|
|
|
|
throw new Error(invalidTimeUnit('month', m));
|
|
}
|
|
}
|
|
|
|
function normalizeDay(d) {
|
|
if (isNumeric(d)) {
|
|
d = +d;
|
|
}
|
|
|
|
if (isNumber(d)) {
|
|
// mod so that this can be both 0-based where 0 = sunday
|
|
// and 1-based where 7=sunday
|
|
return d % 7;
|
|
} else {
|
|
var lowerD = d.toLowerCase();
|
|
var dayIndex = DAYS.indexOf(lowerD);
|
|
|
|
if (dayIndex !== -1) {
|
|
return dayIndex; // 0 for january, ...
|
|
}
|
|
|
|
var shortD = lowerD.substr(0, 3);
|
|
var shortDayIndex = SHORT_DAYS.indexOf(shortD);
|
|
|
|
if (shortDayIndex !== -1) {
|
|
return shortDayIndex;
|
|
} // Invalid day
|
|
|
|
|
|
throw new Error(invalidTimeUnit('day', d));
|
|
}
|
|
}
|
|
/**
|
|
* @param d the date.
|
|
* @param normalize whether to normalize quarter, month, day. This should probably be true if d is a DateTime.
|
|
* @returns array of date time parts [year, month, day, hours, minutes, seconds, milliseconds]
|
|
*/
|
|
|
|
|
|
function dateTimeParts(d, normalize) {
|
|
var parts = [];
|
|
|
|
if (normalize && d.day !== undefined) {
|
|
if (keys(d).length > 1) {
|
|
warn(droppedDay(d));
|
|
d = duplicate(d);
|
|
delete d.day;
|
|
}
|
|
}
|
|
|
|
if (d.year !== undefined) {
|
|
parts.push(d.year);
|
|
} else {
|
|
// Just like Vega's timeunit transform, set default year to 2012, so domain conversion will be compatible with Vega
|
|
// Note: 2012 is a leap year (and so the date February 29 is respected) that begins on a Sunday (and so days of the week will order properly at the beginning of the year).
|
|
parts.push(2012);
|
|
}
|
|
|
|
if (d.month !== undefined) {
|
|
var month = normalize ? normalizeMonth(d.month) : d.month;
|
|
parts.push(month);
|
|
} else if (d.quarter !== undefined) {
|
|
var quarter = normalize ? normalizeQuarter(d.quarter) : d.quarter;
|
|
parts.push(isNumber(quarter) ? quarter * 3 : quarter + '*3');
|
|
} else {
|
|
parts.push(0); // months start at zero in JS
|
|
}
|
|
|
|
if (d.date !== undefined) {
|
|
parts.push(d.date);
|
|
} else if (d.day !== undefined) {
|
|
// HACK: Day only works as a standalone unit
|
|
// This is only correct because we always set year to 2006 for day
|
|
var day = normalize ? normalizeDay(d.day) : d.day;
|
|
parts.push(isNumber(day) ? day + 1 : day + '+1');
|
|
} else {
|
|
parts.push(1); // Date starts at 1 in JS
|
|
} // Note: can't use TimeUnit enum here as importing it will create
|
|
// circular dependency problem!
|
|
|
|
|
|
for (var _i4 = 0, _arr2 = ['hours', 'minutes', 'seconds', 'milliseconds']; _i4 < _arr2.length; _i4++) {
|
|
var timeUnit = _arr2[_i4];
|
|
var unit = d[timeUnit];
|
|
parts.push(typeof unit === 'undefined' ? 0 : unit);
|
|
}
|
|
|
|
return parts;
|
|
}
|
|
/**
|
|
* Return Vega expression for a date time.
|
|
*
|
|
* @param d the date time.
|
|
* @returns the Vega expression.
|
|
*/
|
|
|
|
|
|
function dateTimeToExpr(d) {
|
|
var parts = dateTimeParts(d, true);
|
|
var string = parts.join(', ');
|
|
|
|
if (d.utc) {
|
|
return "utc(".concat(string, ")");
|
|
} else {
|
|
return "datetime(".concat(string, ")");
|
|
}
|
|
}
|
|
/**
|
|
* Return Vega expression for a date time expression.
|
|
*
|
|
* @param d the internal date time object with expression.
|
|
* @returns the Vega expression.
|
|
*/
|
|
|
|
|
|
function dateTimeExprToExpr(d) {
|
|
var parts = dateTimeParts(d, false);
|
|
var string = parts.join(', ');
|
|
|
|
if (d.utc) {
|
|
return "utc(".concat(string, ")");
|
|
} else {
|
|
return "datetime(".concat(string, ")");
|
|
}
|
|
}
|
|
/**
|
|
* @param d the date time.
|
|
* @returns the timestamp.
|
|
*/
|
|
|
|
|
|
function dateTimeToTimestamp(d) {
|
|
var parts = dateTimeParts(d, true);
|
|
|
|
if (d.utc) {
|
|
return +new Date(Date.UTC.apply(Date, _toConsumableArray(parts)));
|
|
} else {
|
|
return +_construct(Date, _toConsumableArray(parts));
|
|
}
|
|
}
|
|
/** Time Unit that only corresponds to only one part of Date objects. */
|
|
|
|
|
|
var LOCAL_SINGLE_TIMEUNIT_INDEX = {
|
|
year: 1,
|
|
quarter: 1,
|
|
month: 1,
|
|
week: 1,
|
|
day: 1,
|
|
dayofyear: 1,
|
|
date: 1,
|
|
hours: 1,
|
|
minutes: 1,
|
|
seconds: 1,
|
|
milliseconds: 1
|
|
};
|
|
var TIMEUNIT_PARTS = keys(LOCAL_SINGLE_TIMEUNIT_INDEX);
|
|
|
|
function isLocalSingleTimeUnit(timeUnit) {
|
|
return !!LOCAL_SINGLE_TIMEUNIT_INDEX[timeUnit];
|
|
}
|
|
|
|
function isUTCTimeUnit(t) {
|
|
return t.startsWith('utc');
|
|
}
|
|
|
|
function getLocalTimeUnit(t) {
|
|
return t.substr(3);
|
|
} // In order of increasing specificity
|
|
|
|
|
|
var VEGALITE_TIMEFORMAT = {
|
|
'year-month': '%b %Y ',
|
|
'year-month-date': '%b %d, %Y '
|
|
};
|
|
|
|
function getTimeUnitParts(timeUnit) {
|
|
var parts = [];
|
|
|
|
var _iterator14 = _createForOfIteratorHelper(TIMEUNIT_PARTS),
|
|
_step14;
|
|
|
|
try {
|
|
for (_iterator14.s(); !(_step14 = _iterator14.n()).done;) {
|
|
var part = _step14.value;
|
|
|
|
if (containsTimeUnit(timeUnit, part)) {
|
|
parts.push(part);
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator14.e(err);
|
|
} finally {
|
|
_iterator14.f();
|
|
}
|
|
|
|
return parts;
|
|
}
|
|
/** Returns true if fullTimeUnit contains the timeUnit, false otherwise. */
|
|
|
|
|
|
function containsTimeUnit(fullTimeUnit, timeUnit) {
|
|
var index = fullTimeUnit.indexOf(timeUnit);
|
|
|
|
if (index < 0) {
|
|
return false;
|
|
} // exclude milliseconds
|
|
|
|
|
|
if (index > 0 && timeUnit === 'seconds' && fullTimeUnit.charAt(index - 1) === 'i') {
|
|
return false;
|
|
} // exclude dayofyear
|
|
|
|
|
|
if (fullTimeUnit.length > index + 3 && timeUnit === 'day' && fullTimeUnit.charAt(index + 3) === 'o') {
|
|
return false;
|
|
}
|
|
|
|
if (index > 0 && timeUnit === 'year' && fullTimeUnit.charAt(index - 1) === 'f') {
|
|
return false;
|
|
}
|
|
|
|
return true;
|
|
}
|
|
/**
|
|
* Returns Vega expression for a given timeUnit and fieldRef
|
|
*/
|
|
|
|
|
|
function fieldExpr(fullTimeUnit, field) {
|
|
var _ref = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {
|
|
end: false
|
|
},
|
|
end = _ref.end;
|
|
|
|
var fieldRef = accessPathWithDatum(field);
|
|
var utc = isUTCTimeUnit(fullTimeUnit) ? 'utc' : '';
|
|
|
|
function func(timeUnit) {
|
|
if (timeUnit === 'quarter') {
|
|
// quarter starting at 0 (0,3,6,9).
|
|
return "(".concat(utc, "quarter(").concat(fieldRef, ")-1)");
|
|
} else {
|
|
return "".concat(utc).concat(timeUnit, "(").concat(fieldRef, ")");
|
|
}
|
|
}
|
|
|
|
var lastTimeUnit;
|
|
var dateExpr = {};
|
|
|
|
var _iterator15 = _createForOfIteratorHelper(TIMEUNIT_PARTS),
|
|
_step15;
|
|
|
|
try {
|
|
for (_iterator15.s(); !(_step15 = _iterator15.n()).done;) {
|
|
var part = _step15.value;
|
|
|
|
if (containsTimeUnit(fullTimeUnit, part)) {
|
|
dateExpr[part] = func(part);
|
|
lastTimeUnit = part;
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator15.e(err);
|
|
} finally {
|
|
_iterator15.f();
|
|
}
|
|
|
|
if (end) {
|
|
dateExpr[lastTimeUnit] += '+1';
|
|
}
|
|
|
|
return dateTimeExprToExpr(dateExpr);
|
|
}
|
|
|
|
function timeUnitSpecifierExpression(timeUnit) {
|
|
if (!timeUnit) {
|
|
return undefined;
|
|
}
|
|
|
|
var timeUnitParts = getTimeUnitParts(timeUnit);
|
|
return "timeUnitSpecifier(".concat(fastJsonStableStringify(timeUnitParts), ", ").concat(fastJsonStableStringify(VEGALITE_TIMEFORMAT), ")");
|
|
}
|
|
/**
|
|
* Returns the signal expression used for axis labels for a time unit.
|
|
*/
|
|
|
|
|
|
function formatExpression(timeUnit, field, isUTCScale) {
|
|
if (!timeUnit) {
|
|
return undefined;
|
|
}
|
|
|
|
var expr = timeUnitSpecifierExpression(timeUnit); // We only use utcFormat for utc scale
|
|
// For utc time units, the data is already converted as a part of timeUnit transform.
|
|
// Thus, utc time units should use timeFormat to avoid shifting the time twice.
|
|
|
|
var utc = isUTCScale || isUTCTimeUnit(timeUnit);
|
|
return "".concat(utc ? 'utc' : 'time', "Format(").concat(field, ", ").concat(expr, ")");
|
|
}
|
|
|
|
function normalizeTimeUnit(timeUnit) {
|
|
if (!timeUnit) {
|
|
return undefined;
|
|
}
|
|
|
|
var params;
|
|
|
|
if (isString(timeUnit)) {
|
|
params = {
|
|
unit: timeUnit
|
|
};
|
|
} else if (isObject(timeUnit)) {
|
|
params = Object.assign(Object.assign({}, timeUnit), timeUnit.unit ? {
|
|
unit: timeUnit.unit
|
|
} : {});
|
|
}
|
|
|
|
if (isUTCTimeUnit(params.unit)) {
|
|
params.utc = true;
|
|
params.unit = getLocalTimeUnit(params.unit);
|
|
}
|
|
|
|
return params;
|
|
}
|
|
|
|
function timeUnitToString(tu) {
|
|
var _a = normalizeTimeUnit(tu),
|
|
utc = _a.utc,
|
|
rest = __rest(_a, ["utc"]);
|
|
|
|
if (rest.unit) {
|
|
return (utc ? 'utc' : '') + keys(rest).map(function (p) {
|
|
return varName("".concat(p === 'unit' ? '' : "_".concat(p, "_")).concat(rest[p]));
|
|
}).join('');
|
|
} else {
|
|
// when maxbins is specified instead of units
|
|
return (utc ? 'utc' : '') + 'timeunit' + keys(rest).map(function (p) {
|
|
return varName("_".concat(p, "_").concat(rest[p]));
|
|
}).join('');
|
|
}
|
|
}
|
|
|
|
function isSignalRef(o) {
|
|
return o && !!o['signal'];
|
|
}
|
|
|
|
function isVgRangeStep(range) {
|
|
return !!range['step'];
|
|
}
|
|
|
|
function isDataRefUnionedDomain(domain) {
|
|
if (!isArray(domain)) {
|
|
return 'fields' in domain && !('data' in domain);
|
|
}
|
|
|
|
return false;
|
|
}
|
|
|
|
function isFieldRefUnionDomain(domain) {
|
|
if (!isArray(domain)) {
|
|
return 'fields' in domain && 'data' in domain;
|
|
}
|
|
|
|
return false;
|
|
}
|
|
|
|
function isDataRefDomain(domain) {
|
|
if (!isArray(domain)) {
|
|
return 'field' in domain && 'data' in domain;
|
|
}
|
|
|
|
return false;
|
|
}
|
|
|
|
var VG_MARK_CONFIG_INDEX = {
|
|
aria: 1,
|
|
description: 1,
|
|
ariaRole: 1,
|
|
ariaRoleDescription: 1,
|
|
blend: 1,
|
|
opacity: 1,
|
|
fill: 1,
|
|
fillOpacity: 1,
|
|
stroke: 1,
|
|
strokeCap: 1,
|
|
strokeWidth: 1,
|
|
strokeOpacity: 1,
|
|
strokeDash: 1,
|
|
strokeDashOffset: 1,
|
|
strokeJoin: 1,
|
|
strokeOffset: 1,
|
|
strokeMiterLimit: 1,
|
|
startAngle: 1,
|
|
endAngle: 1,
|
|
padAngle: 1,
|
|
innerRadius: 1,
|
|
outerRadius: 1,
|
|
size: 1,
|
|
shape: 1,
|
|
interpolate: 1,
|
|
tension: 1,
|
|
orient: 1,
|
|
align: 1,
|
|
baseline: 1,
|
|
text: 1,
|
|
dir: 1,
|
|
dx: 1,
|
|
dy: 1,
|
|
ellipsis: 1,
|
|
limit: 1,
|
|
radius: 1,
|
|
theta: 1,
|
|
angle: 1,
|
|
font: 1,
|
|
fontSize: 1,
|
|
fontWeight: 1,
|
|
fontStyle: 1,
|
|
lineBreak: 1,
|
|
lineHeight: 1,
|
|
cursor: 1,
|
|
href: 1,
|
|
tooltip: 1,
|
|
cornerRadius: 1,
|
|
cornerRadiusTopLeft: 1,
|
|
cornerRadiusTopRight: 1,
|
|
cornerRadiusBottomLeft: 1,
|
|
cornerRadiusBottomRight: 1,
|
|
aspect: 1,
|
|
width: 1,
|
|
height: 1 // commented below are vg channel that do not have mark config.
|
|
// x: 1,
|
|
// y: 1,
|
|
// x2: 1,
|
|
// y2: 1,
|
|
// xc'|'yc'
|
|
// clip: 1,
|
|
// path: 1,
|
|
// url: 1,
|
|
|
|
};
|
|
var VG_MARK_CONFIGS = keys(VG_MARK_CONFIG_INDEX);
|
|
var VG_MARK_INDEX = {
|
|
arc: 1,
|
|
area: 1,
|
|
group: 1,
|
|
image: 1,
|
|
line: 1,
|
|
path: 1,
|
|
rect: 1,
|
|
rule: 1,
|
|
shape: 1,
|
|
symbol: 1,
|
|
text: 1,
|
|
trail: 1
|
|
}; // Vega's cornerRadius channels.
|
|
|
|
var VG_CORNERRADIUS_CHANNELS = ['cornerRadius', 'cornerRadiusTopLeft', 'cornerRadiusTopRight', 'cornerRadiusBottomLeft', 'cornerRadiusBottomRight'];
|
|
|
|
function isSelectionPredicate(predicate) {
|
|
return predicate === null || predicate === void 0 ? void 0 : predicate['selection'];
|
|
}
|
|
|
|
function isFieldEqualPredicate(predicate) {
|
|
return predicate && !!predicate.field && predicate.equal !== undefined;
|
|
}
|
|
|
|
function isFieldLTPredicate(predicate) {
|
|
return predicate && !!predicate.field && predicate.lt !== undefined;
|
|
}
|
|
|
|
function isFieldLTEPredicate(predicate) {
|
|
return predicate && !!predicate.field && predicate.lte !== undefined;
|
|
}
|
|
|
|
function isFieldGTPredicate(predicate) {
|
|
return predicate && !!predicate.field && predicate.gt !== undefined;
|
|
}
|
|
|
|
function isFieldGTEPredicate(predicate) {
|
|
return predicate && !!predicate.field && predicate.gte !== undefined;
|
|
}
|
|
|
|
function isFieldRangePredicate(predicate) {
|
|
if (predicate && predicate.field) {
|
|
if (isArray(predicate.range) && predicate.range.length === 2) {
|
|
return true;
|
|
} else if (isSignalRef(predicate.range)) {
|
|
return true;
|
|
}
|
|
}
|
|
|
|
return false;
|
|
}
|
|
|
|
function isFieldOneOfPredicate(predicate) {
|
|
return predicate && !!predicate.field && (isArray(predicate.oneOf) || isArray(predicate.in)) // backward compatibility
|
|
;
|
|
}
|
|
|
|
function isFieldValidPredicate(predicate) {
|
|
return predicate && !!predicate.field && predicate.valid !== undefined;
|
|
}
|
|
|
|
function isFieldPredicate(predicate) {
|
|
return isFieldOneOfPredicate(predicate) || isFieldEqualPredicate(predicate) || isFieldRangePredicate(predicate) || isFieldLTPredicate(predicate) || isFieldGTPredicate(predicate) || isFieldLTEPredicate(predicate) || isFieldGTEPredicate(predicate);
|
|
}
|
|
|
|
function predicateValueExpr(v, timeUnit) {
|
|
return valueExpr(v, {
|
|
timeUnit: timeUnit,
|
|
wrapTime: true
|
|
});
|
|
}
|
|
|
|
function predicateValuesExpr(vals, timeUnit) {
|
|
return vals.map(function (v) {
|
|
return predicateValueExpr(v, timeUnit);
|
|
});
|
|
} // This method is used by Voyager. Do not change its behavior without changing Voyager.
|
|
|
|
|
|
function fieldFilterExpression(predicate) {
|
|
var useInRange = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : true;
|
|
|
|
var _a;
|
|
|
|
var field = predicate.field;
|
|
var timeUnit = (_a = normalizeTimeUnit(predicate.timeUnit)) === null || _a === void 0 ? void 0 : _a.unit;
|
|
var fieldExpr$1 = timeUnit ? // For timeUnit, cast into integer with time() so we can use ===, inrange, indexOf to compare values directly.
|
|
// TODO: We calculate timeUnit on the fly here. Consider if we would like to consolidate this with timeUnit pipeline
|
|
// TODO: support utc
|
|
'time(' + fieldExpr(timeUnit, field) + ')' : _vgField(predicate, {
|
|
expr: 'datum'
|
|
});
|
|
|
|
if (isFieldEqualPredicate(predicate)) {
|
|
return fieldExpr$1 + '===' + predicateValueExpr(predicate.equal, timeUnit);
|
|
} else if (isFieldLTPredicate(predicate)) {
|
|
var upper = predicate.lt;
|
|
return "".concat(fieldExpr$1, "<").concat(predicateValueExpr(upper, timeUnit));
|
|
} else if (isFieldGTPredicate(predicate)) {
|
|
var lower = predicate.gt;
|
|
return "".concat(fieldExpr$1, ">").concat(predicateValueExpr(lower, timeUnit));
|
|
} else if (isFieldLTEPredicate(predicate)) {
|
|
var _upper = predicate.lte;
|
|
return "".concat(fieldExpr$1, "<=").concat(predicateValueExpr(_upper, timeUnit));
|
|
} else if (isFieldGTEPredicate(predicate)) {
|
|
var _lower = predicate.gte;
|
|
return "".concat(fieldExpr$1, ">=").concat(predicateValueExpr(_lower, timeUnit));
|
|
} else if (isFieldOneOfPredicate(predicate)) {
|
|
return "indexof([".concat(predicateValuesExpr(predicate.oneOf, timeUnit).join(','), "], ").concat(fieldExpr$1, ") !== -1");
|
|
} else if (isFieldValidPredicate(predicate)) {
|
|
return fieldValidPredicate(fieldExpr$1, predicate.valid);
|
|
} else if (isFieldRangePredicate(predicate)) {
|
|
var range = predicate.range;
|
|
|
|
var _lower2 = isSignalRef(range) ? {
|
|
signal: "".concat(range.signal, "[0]")
|
|
} : range[0];
|
|
|
|
var _upper2 = isSignalRef(range) ? {
|
|
signal: "".concat(range.signal, "[1]")
|
|
} : range[1];
|
|
|
|
if (_lower2 !== null && _upper2 !== null && useInRange) {
|
|
return 'inrange(' + fieldExpr$1 + ', [' + predicateValueExpr(_lower2, timeUnit) + ', ' + predicateValueExpr(_upper2, timeUnit) + '])';
|
|
}
|
|
|
|
var exprs = [];
|
|
|
|
if (_lower2 !== null) {
|
|
exprs.push("".concat(fieldExpr$1, " >= ").concat(predicateValueExpr(_lower2, timeUnit)));
|
|
}
|
|
|
|
if (_upper2 !== null) {
|
|
exprs.push("".concat(fieldExpr$1, " <= ").concat(predicateValueExpr(_upper2, timeUnit)));
|
|
}
|
|
|
|
return exprs.length > 0 ? exprs.join(' && ') : 'true';
|
|
}
|
|
/* istanbul ignore next: it should never reach here */
|
|
|
|
|
|
throw new Error("Invalid field predicate: ".concat(JSON.stringify(predicate)));
|
|
}
|
|
|
|
function fieldValidPredicate(fieldExpr) {
|
|
var valid = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : true;
|
|
|
|
if (valid) {
|
|
return "isValid(".concat(fieldExpr, ") && isFinite(+").concat(fieldExpr, ")");
|
|
} else {
|
|
return "!isValid(".concat(fieldExpr, ") || !isFinite(+").concat(fieldExpr, ")");
|
|
}
|
|
}
|
|
|
|
function normalizePredicate(f) {
|
|
var _a;
|
|
|
|
if (isFieldPredicate(f) && f.timeUnit) {
|
|
return Object.assign(Object.assign({}, f), {
|
|
timeUnit: (_a = normalizeTimeUnit(f.timeUnit)) === null || _a === void 0 ? void 0 : _a.unit
|
|
});
|
|
}
|
|
|
|
return f;
|
|
}
|
|
/**
|
|
* Data type based on level of measurement
|
|
*/
|
|
|
|
|
|
var Type = {
|
|
quantitative: 'quantitative',
|
|
ordinal: 'ordinal',
|
|
temporal: 'temporal',
|
|
nominal: 'nominal',
|
|
geojson: 'geojson'
|
|
};
|
|
var QUANTITATIVE = Type.quantitative;
|
|
var ORDINAL = Type.ordinal;
|
|
var TEMPORAL = Type.temporal;
|
|
var NOMINAL = Type.nominal;
|
|
var GEOJSON = Type.geojson;
|
|
/**
|
|
* Get full, lowercase type name for a given type.
|
|
* @param type
|
|
* @return Full type name.
|
|
*/
|
|
|
|
function getFullName(type) {
|
|
if (type) {
|
|
type = type.toLowerCase();
|
|
|
|
switch (type) {
|
|
case 'q':
|
|
case QUANTITATIVE:
|
|
return 'quantitative';
|
|
|
|
case 't':
|
|
case TEMPORAL:
|
|
return 'temporal';
|
|
|
|
case 'o':
|
|
case ORDINAL:
|
|
return 'ordinal';
|
|
|
|
case 'n':
|
|
case NOMINAL:
|
|
return 'nominal';
|
|
|
|
case GEOJSON:
|
|
return 'geojson';
|
|
}
|
|
} // If we get invalid input, return undefined type.
|
|
|
|
|
|
return undefined;
|
|
}
|
|
|
|
var ScaleType = {
|
|
// Continuous - Quantitative
|
|
LINEAR: 'linear',
|
|
LOG: 'log',
|
|
POW: 'pow',
|
|
SQRT: 'sqrt',
|
|
SYMLOG: 'symlog',
|
|
IDENTITY: 'identity',
|
|
SEQUENTIAL: 'sequential',
|
|
// Continuous - Time
|
|
TIME: 'time',
|
|
UTC: 'utc',
|
|
// Discretizing scales
|
|
QUANTILE: 'quantile',
|
|
QUANTIZE: 'quantize',
|
|
THRESHOLD: 'threshold',
|
|
BIN_ORDINAL: 'bin-ordinal',
|
|
// Discrete scales
|
|
ORDINAL: 'ordinal',
|
|
POINT: 'point',
|
|
BAND: 'band'
|
|
};
|
|
/**
|
|
* Index for scale categories -- only scale of the same categories can be merged together.
|
|
* Current implementation is trying to be conservative and avoid merging scale type that might not work together
|
|
*/
|
|
|
|
var SCALE_CATEGORY_INDEX = {
|
|
linear: 'numeric',
|
|
log: 'numeric',
|
|
pow: 'numeric',
|
|
sqrt: 'numeric',
|
|
symlog: 'numeric',
|
|
identity: 'numeric',
|
|
sequential: 'numeric',
|
|
time: 'time',
|
|
utc: 'time',
|
|
ordinal: 'ordinal',
|
|
'bin-ordinal': 'bin-ordinal',
|
|
point: 'ordinal-position',
|
|
band: 'ordinal-position',
|
|
quantile: 'discretizing',
|
|
quantize: 'discretizing',
|
|
threshold: 'discretizing'
|
|
};
|
|
/**
|
|
* Whether the two given scale types can be merged together.
|
|
*/
|
|
|
|
function scaleCompatible(scaleType1, scaleType2) {
|
|
var scaleCategory1 = SCALE_CATEGORY_INDEX[scaleType1];
|
|
var scaleCategory2 = SCALE_CATEGORY_INDEX[scaleType2];
|
|
return scaleCategory1 === scaleCategory2 || scaleCategory1 === 'ordinal-position' && scaleCategory2 === 'time' || scaleCategory2 === 'ordinal-position' && scaleCategory1 === 'time';
|
|
}
|
|
/**
|
|
* Index for scale precedence -- high score = higher priority for merging.
|
|
*/
|
|
|
|
|
|
var SCALE_PRECEDENCE_INDEX = {
|
|
// numeric
|
|
linear: 0,
|
|
log: 1,
|
|
pow: 1,
|
|
sqrt: 1,
|
|
symlog: 1,
|
|
identity: 1,
|
|
sequential: 1,
|
|
// time
|
|
time: 0,
|
|
utc: 0,
|
|
// ordinal-position -- these have higher precedence than continuous scales as they support more types of data
|
|
point: 10,
|
|
band: 11,
|
|
// non grouped types
|
|
ordinal: 0,
|
|
'bin-ordinal': 0,
|
|
quantile: 0,
|
|
quantize: 0,
|
|
threshold: 0
|
|
};
|
|
/**
|
|
* Return scale categories -- only scale of the same categories can be merged together.
|
|
*/
|
|
|
|
function scaleTypePrecedence(scaleType) {
|
|
return SCALE_PRECEDENCE_INDEX[scaleType];
|
|
}
|
|
|
|
var CONTINUOUS_TO_CONTINUOUS_SCALES = ['linear', 'log', 'pow', 'sqrt', 'symlog', 'time', 'utc'];
|
|
var CONTINUOUS_TO_CONTINUOUS_INDEX = toSet(CONTINUOUS_TO_CONTINUOUS_SCALES);
|
|
var QUANTITATIVE_SCALES = ['linear', 'log', 'pow', 'sqrt', 'symlog'];
|
|
var QUANTITATIVE_SCALES_INDEX = toSet(QUANTITATIVE_SCALES);
|
|
|
|
function isQuantitative(type) {
|
|
return type in QUANTITATIVE_SCALES_INDEX;
|
|
}
|
|
|
|
var CONTINUOUS_TO_DISCRETE_SCALES = ['quantile', 'quantize', 'threshold'];
|
|
var CONTINUOUS_TO_DISCRETE_INDEX = toSet(CONTINUOUS_TO_DISCRETE_SCALES);
|
|
var CONTINUOUS_DOMAIN_SCALES = CONTINUOUS_TO_CONTINUOUS_SCALES.concat(['quantile', 'quantize', 'threshold', 'sequential', 'identity']);
|
|
var CONTINUOUS_DOMAIN_INDEX = toSet(CONTINUOUS_DOMAIN_SCALES);
|
|
var DISCRETE_DOMAIN_SCALES = ['ordinal', 'bin-ordinal', 'point', 'band'];
|
|
var DISCRETE_DOMAIN_INDEX = toSet(DISCRETE_DOMAIN_SCALES);
|
|
|
|
function hasDiscreteDomain(type) {
|
|
return type in DISCRETE_DOMAIN_INDEX;
|
|
}
|
|
|
|
function hasContinuousDomain(type) {
|
|
return type in CONTINUOUS_DOMAIN_INDEX;
|
|
}
|
|
|
|
function isContinuousToContinuous(type) {
|
|
return type in CONTINUOUS_TO_CONTINUOUS_INDEX;
|
|
}
|
|
|
|
function isContinuousToDiscrete(type) {
|
|
return type in CONTINUOUS_TO_DISCRETE_INDEX;
|
|
}
|
|
|
|
var defaultScaleConfig = {
|
|
pointPadding: 0.5,
|
|
barBandPaddingInner: 0.1,
|
|
rectBandPaddingInner: 0,
|
|
minBandSize: 2,
|
|
minFontSize: 8,
|
|
maxFontSize: 40,
|
|
minOpacity: 0.3,
|
|
maxOpacity: 0.8,
|
|
// FIXME: revise if these *can* become ratios of width/height step
|
|
minSize: 9,
|
|
minStrokeWidth: 1,
|
|
maxStrokeWidth: 4,
|
|
quantileCount: 4,
|
|
quantizeCount: 4
|
|
};
|
|
|
|
function isExtendedScheme(scheme) {
|
|
return !isString(scheme) && !!scheme['name'];
|
|
}
|
|
|
|
function isSelectionDomain(domain) {
|
|
return domain === null || domain === void 0 ? void 0 : domain['selection'];
|
|
}
|
|
|
|
function isDomainUnionWith(domain) {
|
|
return domain && domain['unionWith'];
|
|
}
|
|
|
|
var SCALE_PROPERTY_INDEX = {
|
|
type: 1,
|
|
domain: 1,
|
|
domainMax: 1,
|
|
domainMin: 1,
|
|
domainMid: 1,
|
|
align: 1,
|
|
range: 1,
|
|
rangeMax: 1,
|
|
rangeMin: 1,
|
|
scheme: 1,
|
|
bins: 1,
|
|
// Other properties
|
|
reverse: 1,
|
|
round: 1,
|
|
// quantitative / time
|
|
clamp: 1,
|
|
nice: 1,
|
|
// quantitative
|
|
base: 1,
|
|
exponent: 1,
|
|
constant: 1,
|
|
interpolate: 1,
|
|
zero: 1,
|
|
// band/point
|
|
padding: 1,
|
|
paddingInner: 1,
|
|
paddingOuter: 1
|
|
};
|
|
|
|
var NON_TYPE_DOMAIN_RANGE_VEGA_SCALE_PROPERTY_INDEX = __rest(SCALE_PROPERTY_INDEX, ["type", "domain", "range", "rangeMax", "rangeMin", "scheme"]);
|
|
|
|
var NON_TYPE_DOMAIN_RANGE_VEGA_SCALE_PROPERTIES = keys(NON_TYPE_DOMAIN_RANGE_VEGA_SCALE_PROPERTY_INDEX);
|
|
|
|
function scaleTypeSupportProperty(scaleType, propName) {
|
|
switch (propName) {
|
|
case 'type':
|
|
case 'domain':
|
|
case 'reverse':
|
|
case 'range':
|
|
return true;
|
|
|
|
case 'scheme':
|
|
case 'interpolate':
|
|
return !contains(['point', 'band', 'identity'], scaleType);
|
|
|
|
case 'bins':
|
|
return !contains(['point', 'band', 'identity', 'ordinal'], scaleType);
|
|
|
|
case 'round':
|
|
return isContinuousToContinuous(scaleType) || scaleType === 'band' || scaleType === 'point';
|
|
|
|
case 'padding':
|
|
case 'rangeMin':
|
|
case 'rangeMax':
|
|
return isContinuousToContinuous(scaleType) || contains(['point', 'band'], scaleType);
|
|
|
|
case 'paddingOuter':
|
|
case 'align':
|
|
return contains(['point', 'band'], scaleType);
|
|
|
|
case 'paddingInner':
|
|
return scaleType === 'band';
|
|
|
|
case 'domainMax':
|
|
case 'domainMid':
|
|
case 'domainMin':
|
|
case 'clamp':
|
|
return isContinuousToContinuous(scaleType);
|
|
|
|
case 'nice':
|
|
return isContinuousToContinuous(scaleType) || scaleType === 'quantize' || scaleType === 'threshold';
|
|
|
|
case 'exponent':
|
|
return scaleType === 'pow';
|
|
|
|
case 'base':
|
|
return scaleType === 'log';
|
|
|
|
case 'constant':
|
|
return scaleType === 'symlog';
|
|
|
|
case 'zero':
|
|
return hasContinuousDomain(scaleType) && !contains(['log', 'time', 'utc', 'threshold', 'quantile' // quantile depends on distribution so zero does not matter
|
|
], scaleType);
|
|
}
|
|
}
|
|
/**
|
|
* Returns undefined if the input channel supports the input scale property name
|
|
*/
|
|
|
|
|
|
function channelScalePropertyIncompatability(channel, propName) {
|
|
switch (propName) {
|
|
case 'interpolate':
|
|
case 'scheme':
|
|
case 'domainMid':
|
|
if (!isColorChannel(channel)) {
|
|
return cannotUseScalePropertyWithNonColor(channel);
|
|
}
|
|
|
|
return undefined;
|
|
|
|
case 'align':
|
|
case 'type':
|
|
case 'bins':
|
|
case 'domain':
|
|
case 'domainMax':
|
|
case 'domainMin':
|
|
case 'range':
|
|
case 'base':
|
|
case 'exponent':
|
|
case 'constant':
|
|
case 'nice':
|
|
case 'padding':
|
|
case 'paddingInner':
|
|
case 'paddingOuter':
|
|
case 'rangeMax':
|
|
case 'rangeMin':
|
|
case 'reverse':
|
|
case 'round':
|
|
case 'clamp':
|
|
case 'zero':
|
|
return undefined;
|
|
// GOOD!
|
|
}
|
|
}
|
|
|
|
function scaleTypeSupportDataType(specifiedType, fieldDefType) {
|
|
if (contains([ORDINAL, NOMINAL], fieldDefType)) {
|
|
return specifiedType === undefined || hasDiscreteDomain(specifiedType);
|
|
} else if (fieldDefType === TEMPORAL) {
|
|
return contains([ScaleType.TIME, ScaleType.UTC, undefined], specifiedType);
|
|
} else if (fieldDefType === QUANTITATIVE) {
|
|
return contains([ScaleType.LOG, ScaleType.POW, ScaleType.SQRT, ScaleType.SYMLOG, ScaleType.QUANTILE, ScaleType.QUANTIZE, ScaleType.THRESHOLD, ScaleType.LINEAR, undefined], specifiedType);
|
|
}
|
|
|
|
return true;
|
|
}
|
|
|
|
function channelSupportScaleType(channel, scaleType) {
|
|
if (!isScaleChannel(channel)) {
|
|
return false;
|
|
}
|
|
|
|
switch (channel) {
|
|
case X:
|
|
case Y:
|
|
case THETA:
|
|
case RADIUS:
|
|
return isContinuousToContinuous(scaleType) || contains(['band', 'point'], scaleType);
|
|
|
|
case SIZE: // TODO: size and opacity can support ordinal with more modification
|
|
|
|
case STROKEWIDTH:
|
|
case OPACITY:
|
|
case FILLOPACITY:
|
|
case STROKEOPACITY:
|
|
case ANGLE:
|
|
// Although it generally doesn't make sense to use band with size and opacity,
|
|
// it can also work since we use band: 0.5 to get midpoint.
|
|
return isContinuousToContinuous(scaleType) || isContinuousToDiscrete(scaleType) || contains(['band', 'point', 'ordinal'], scaleType);
|
|
|
|
case COLOR:
|
|
case FILL:
|
|
case STROKE:
|
|
return scaleType !== 'band';
|
|
// band does not make sense with color
|
|
|
|
case STROKEDASH:
|
|
return scaleType === 'ordinal' || isContinuousToDiscrete(scaleType);
|
|
|
|
case SHAPE:
|
|
return scaleType === 'ordinal';
|
|
// shape = lookup only
|
|
}
|
|
}
|
|
|
|
function midPointRefWithPositionInvalidTest(params) {
|
|
var channel = params.channel,
|
|
channelDef = params.channelDef,
|
|
markDef = params.markDef,
|
|
scale = params.scale,
|
|
config = params.config;
|
|
var ref = midPoint(params); // Wrap to check if the positional value is invalid, if so, plot the point on the min value
|
|
|
|
if ( // Only this for field def without counting aggregate (as count wouldn't be null)
|
|
isFieldDef(channelDef) && !isCountingAggregateOp(channelDef.aggregate) && // and only for continuous scale without zero (otherwise, null / invalid will be interpreted as zero, which doesn't cause layout problem)
|
|
scale && isContinuousToContinuous(scale.get('type')) && scale.get('zero') === false) {
|
|
return wrapPositionInvalidTest({
|
|
fieldDef: channelDef,
|
|
channel: channel,
|
|
markDef: markDef,
|
|
ref: ref,
|
|
config: config
|
|
});
|
|
}
|
|
|
|
return ref;
|
|
}
|
|
|
|
function wrapPositionInvalidTest(_ref2) {
|
|
var fieldDef = _ref2.fieldDef,
|
|
channel = _ref2.channel,
|
|
markDef = _ref2.markDef,
|
|
ref = _ref2.ref,
|
|
config = _ref2.config;
|
|
|
|
if (isPathMark(markDef.type)) {
|
|
// path mark already use defined to skip points, no need to do it here.
|
|
return ref;
|
|
}
|
|
|
|
var invalid = getMarkPropOrConfig('invalid', markDef, config);
|
|
|
|
if (invalid === null) {
|
|
// if there is no invalid filter, don't do the invalid test
|
|
return ref;
|
|
}
|
|
|
|
return [fieldInvalidTestValueRef(fieldDef, channel), ref];
|
|
}
|
|
|
|
function fieldInvalidTestValueRef(fieldDef, channel) {
|
|
var test = fieldInvalidPredicate(fieldDef, true);
|
|
var mainChannel = getMainRangeChannel(channel); // we can cast here as the output can't be other things.
|
|
|
|
var zeroValueRef = mainChannel === 'y' ? {
|
|
field: {
|
|
group: 'height'
|
|
}
|
|
} : // x / angle / radius can all use 0
|
|
{
|
|
value: 0
|
|
};
|
|
return Object.assign({
|
|
test: test
|
|
}, zeroValueRef);
|
|
}
|
|
|
|
function fieldInvalidPredicate(field) {
|
|
var invalid = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : true;
|
|
return fieldValidPredicate(isString(field) ? field : _vgField(field, {
|
|
expr: 'datum'
|
|
}), !invalid);
|
|
}
|
|
|
|
function datumDefToExpr(datumDef) {
|
|
var datum = datumDef.datum;
|
|
|
|
if (isDateTime(datum)) {
|
|
return dateTimeToExpr(datum);
|
|
}
|
|
|
|
return "".concat(JSON.stringify(datum));
|
|
}
|
|
|
|
function valueRefForFieldOrDatumDef(fieldDef, scaleName, opt, encode) {
|
|
var ref = {};
|
|
|
|
if (scaleName) {
|
|
ref.scale = scaleName;
|
|
}
|
|
|
|
if (isDatumDef(fieldDef)) {
|
|
var datum = fieldDef.datum;
|
|
|
|
if (isDateTime(datum)) {
|
|
ref.signal = dateTimeToExpr(datum);
|
|
} else if (isSignalRef(datum)) {
|
|
ref.signal = datum.signal;
|
|
} else {
|
|
ref.value = datum;
|
|
}
|
|
} else {
|
|
ref.field = _vgField(fieldDef, opt);
|
|
}
|
|
|
|
if (encode) {
|
|
var offset = encode.offset,
|
|
band = encode.band;
|
|
|
|
if (offset) {
|
|
ref.offset = offset;
|
|
}
|
|
|
|
if (band) {
|
|
ref.band = band;
|
|
}
|
|
}
|
|
|
|
return ref;
|
|
}
|
|
/**
|
|
* Signal that returns the middle of a bin from start and end field. Should only be used with x and y.
|
|
*/
|
|
|
|
|
|
function interpolatedSignalRef(_ref3) {
|
|
var scaleName = _ref3.scaleName,
|
|
fieldOrDatumDef = _ref3.fieldOrDatumDef,
|
|
fieldOrDatumDef2 = _ref3.fieldOrDatumDef2,
|
|
offset = _ref3.offset,
|
|
startSuffix = _ref3.startSuffix,
|
|
_ref3$band = _ref3.band,
|
|
band = _ref3$band === void 0 ? 0.5 : _ref3$band;
|
|
var expr = 0 < band && band < 1 ? 'datum' : undefined;
|
|
|
|
var start = _vgField(fieldOrDatumDef, {
|
|
expr: expr,
|
|
suffix: startSuffix
|
|
});
|
|
|
|
var end = fieldOrDatumDef2 !== undefined ? _vgField(fieldOrDatumDef2, {
|
|
expr: expr
|
|
}) : _vgField(fieldOrDatumDef, {
|
|
suffix: 'end',
|
|
expr: expr
|
|
});
|
|
var ref = {};
|
|
|
|
if (band === 0 || band === 1) {
|
|
ref.scale = scaleName;
|
|
var val = band === 0 ? start : end;
|
|
ref.field = val;
|
|
} else {
|
|
var datum = "".concat(band, " * ").concat(start, " + ").concat(1 - band, " * ").concat(end);
|
|
ref.signal = "scale(\"".concat(scaleName, "\", ").concat(datum, ")");
|
|
}
|
|
|
|
if (offset) {
|
|
ref.offset = offset;
|
|
}
|
|
|
|
return ref;
|
|
}
|
|
/**
|
|
* @returns {VgValueRef} Value Ref for xc / yc or mid point for other channels.
|
|
*/
|
|
|
|
|
|
function midPoint(_ref4) {
|
|
var channel = _ref4.channel,
|
|
channelDef = _ref4.channelDef,
|
|
channel2Def = _ref4.channel2Def,
|
|
markDef = _ref4.markDef,
|
|
config = _ref4.config,
|
|
scaleName = _ref4.scaleName,
|
|
scale = _ref4.scale,
|
|
stack = _ref4.stack,
|
|
offset = _ref4.offset,
|
|
defaultRef = _ref4.defaultRef,
|
|
band = _ref4.band;
|
|
|
|
var _a; // TODO: datum support
|
|
|
|
|
|
if (channelDef) {
|
|
/* istanbul ignore else */
|
|
if (isFieldOrDatumDef(channelDef)) {
|
|
if (isTypedFieldDef(channelDef)) {
|
|
band = band !== null && band !== void 0 ? band : getBand({
|
|
channel: channel,
|
|
fieldDef: channelDef,
|
|
fieldDef2: channel2Def,
|
|
markDef: markDef,
|
|
stack: stack,
|
|
config: config,
|
|
isMidPoint: true
|
|
});
|
|
var bin = channelDef.bin,
|
|
timeUnit = channelDef.timeUnit,
|
|
type = channelDef.type;
|
|
|
|
if (isBinning(bin) || band && timeUnit && type === TEMPORAL) {
|
|
// Use middle only for x an y to place marks in the center between start and end of the bin range.
|
|
// We do not use the mid point for other channels (e.g. size) so that properties of legends and marks match.
|
|
if (stack && stack.impute) {
|
|
// For stack, we computed bin_mid so we can impute.
|
|
return valueRefForFieldOrDatumDef(channelDef, scaleName, {
|
|
binSuffix: 'mid'
|
|
}, {
|
|
offset: offset
|
|
});
|
|
}
|
|
|
|
if (band) {
|
|
// if band = 0, no need to call interpolation
|
|
// For non-stack, we can just calculate bin mid on the fly using signal.
|
|
return interpolatedSignalRef({
|
|
scaleName: scaleName,
|
|
fieldOrDatumDef: channelDef,
|
|
band: band,
|
|
offset: offset
|
|
});
|
|
}
|
|
|
|
return valueRefForFieldOrDatumDef(channelDef, scaleName, binRequiresRange(channelDef, channel) ? {
|
|
binSuffix: 'range'
|
|
} : {}, {
|
|
offset: offset
|
|
});
|
|
} else if (isBinned(bin)) {
|
|
if (isFieldDef(channel2Def)) {
|
|
return interpolatedSignalRef({
|
|
scaleName: scaleName,
|
|
fieldOrDatumDef: channelDef,
|
|
fieldOrDatumDef2: channel2Def,
|
|
band: band,
|
|
offset: offset
|
|
});
|
|
} else {
|
|
var channel2 = channel === X ? X2 : Y2;
|
|
warn(channelRequiredForBinned(channel2));
|
|
}
|
|
}
|
|
}
|
|
|
|
var _scaleType = scale === null || scale === void 0 ? void 0 : scale.get('type');
|
|
|
|
return valueRefForFieldOrDatumDef(channelDef, scaleName, hasDiscreteDomain(_scaleType) ? {
|
|
binSuffix: 'range'
|
|
} : {}, // no need for bin suffix if there is no scale
|
|
{
|
|
offset: offset,
|
|
// For band, to get mid point, need to offset by half of the band
|
|
band: _scaleType === 'band' ? (_a = band !== null && band !== void 0 ? band : channelDef.band) !== null && _a !== void 0 ? _a : 0.5 : undefined
|
|
});
|
|
} else if (isValueDef(channelDef)) {
|
|
var value = channelDef.value;
|
|
var offsetMixins = offset ? {
|
|
offset: offset
|
|
} : {};
|
|
return Object.assign(Object.assign({}, widthHeightValueOrSignalRef(channel, value)), offsetMixins);
|
|
} // If channelDef is neither field def or value def, it's a condition-only def.
|
|
// In such case, we will use default ref.
|
|
|
|
}
|
|
|
|
if (isFunction(defaultRef)) {
|
|
defaultRef = defaultRef();
|
|
}
|
|
|
|
if (defaultRef) {
|
|
// for non-position, ref could be undefined.
|
|
return Object.assign(Object.assign({}, defaultRef), offset ? {
|
|
offset: offset
|
|
} : {});
|
|
}
|
|
|
|
return defaultRef;
|
|
}
|
|
/**
|
|
* Convert special "width" and "height" values in Vega-Lite into Vega value ref.
|
|
*/
|
|
|
|
|
|
function widthHeightValueOrSignalRef(channel, value) {
|
|
if (contains(['x', 'x2'], channel) && value === 'width') {
|
|
return {
|
|
field: {
|
|
group: 'width'
|
|
}
|
|
};
|
|
} else if (contains(['y', 'y2'], channel) && value === 'height') {
|
|
return {
|
|
field: {
|
|
group: 'height'
|
|
}
|
|
};
|
|
}
|
|
|
|
return signalOrValueRef(value);
|
|
}
|
|
|
|
function isCustomFormatType(formatType) {
|
|
return formatType && formatType !== 'number' && formatType !== 'time';
|
|
}
|
|
|
|
function customFormatExpr(formatType, field, format) {
|
|
return "".concat(formatType, "(").concat(field).concat(format ? ", ".concat(JSON.stringify(format)) : '', ")");
|
|
}
|
|
|
|
var BIN_RANGE_DELIMITER = " \u2013 ";
|
|
|
|
function formatSignalRef(_ref5) {
|
|
var fieldOrDatumDef = _ref5.fieldOrDatumDef,
|
|
format = _ref5.format,
|
|
formatType = _ref5.formatType,
|
|
expr = _ref5.expr,
|
|
normalizeStack = _ref5.normalizeStack,
|
|
config = _ref5.config;
|
|
|
|
var _a, _b;
|
|
|
|
if (isCustomFormatType(formatType)) {
|
|
return formatCustomType({
|
|
fieldOrDatumDef: fieldOrDatumDef,
|
|
format: format,
|
|
formatType: formatType,
|
|
expr: expr,
|
|
config: config
|
|
});
|
|
}
|
|
|
|
var field = fieldToFormat(fieldOrDatumDef, expr, normalizeStack);
|
|
|
|
if (isFieldOrDatumDefForTimeFormat(fieldOrDatumDef)) {
|
|
var signal = timeFormatExpression(field, isFieldDef(fieldOrDatumDef) ? (_a = normalizeTimeUnit(fieldOrDatumDef.timeUnit)) === null || _a === void 0 ? void 0 : _a.unit : undefined, format, config.timeFormat, isScaleFieldDef(fieldOrDatumDef) && ((_b = fieldOrDatumDef.scale) === null || _b === void 0 ? void 0 : _b.type) === ScaleType.UTC);
|
|
return signal ? {
|
|
signal: signal
|
|
} : undefined;
|
|
}
|
|
|
|
format = numberFormat(channelDefType(fieldOrDatumDef), format, config);
|
|
|
|
if (isFieldDef(fieldOrDatumDef) && isBinning(fieldOrDatumDef.bin)) {
|
|
var endField = _vgField(fieldOrDatumDef, {
|
|
expr: expr,
|
|
binSuffix: 'end'
|
|
});
|
|
|
|
return {
|
|
signal: binFormatExpression(field, endField, format, formatType, config)
|
|
};
|
|
} else if (format || channelDefType(fieldOrDatumDef) === 'quantitative') {
|
|
return {
|
|
signal: "".concat(formatExpr(field, format))
|
|
};
|
|
} else {
|
|
return {
|
|
signal: "isValid(".concat(field, ") ? ").concat(field, " : \"\"+").concat(field)
|
|
};
|
|
}
|
|
}
|
|
|
|
function fieldToFormat(fieldOrDatumDef, expr, normalizeStack) {
|
|
if (isFieldDef(fieldOrDatumDef)) {
|
|
if (normalizeStack) {
|
|
return "".concat(_vgField(fieldOrDatumDef, {
|
|
expr: expr,
|
|
suffix: 'end'
|
|
}), "-").concat(_vgField(fieldOrDatumDef, {
|
|
expr: expr,
|
|
suffix: 'start'
|
|
}));
|
|
} else {
|
|
return _vgField(fieldOrDatumDef, {
|
|
expr: expr
|
|
});
|
|
}
|
|
} else {
|
|
return datumDefToExpr(fieldOrDatumDef);
|
|
}
|
|
}
|
|
|
|
function formatCustomType(_ref6) {
|
|
var fieldOrDatumDef = _ref6.fieldOrDatumDef,
|
|
format = _ref6.format,
|
|
formatType = _ref6.formatType,
|
|
expr = _ref6.expr,
|
|
normalizeStack = _ref6.normalizeStack,
|
|
config = _ref6.config,
|
|
field = _ref6.field;
|
|
field = field !== null && field !== void 0 ? field : fieldToFormat(fieldOrDatumDef, expr, normalizeStack);
|
|
|
|
if (isFieldDef(fieldOrDatumDef) && isBinning(fieldOrDatumDef.bin)) {
|
|
var endField = _vgField(fieldOrDatumDef, {
|
|
expr: expr,
|
|
binSuffix: 'end'
|
|
});
|
|
|
|
return {
|
|
signal: binFormatExpression(field, endField, format, formatType, config)
|
|
};
|
|
}
|
|
|
|
return {
|
|
signal: customFormatExpr(formatType, field, format)
|
|
};
|
|
}
|
|
|
|
function guideFormat(fieldOrDatumDef, type, format, formatType, config, omitTimeFormatConfig // axis doesn't use config.timeFormat
|
|
) {
|
|
var _a;
|
|
|
|
if (isCustomFormatType(formatType)) {
|
|
return undefined; // handled in encode block
|
|
}
|
|
|
|
if (isFieldOrDatumDefForTimeFormat(fieldOrDatumDef)) {
|
|
var timeUnit = isFieldDef(fieldOrDatumDef) ? (_a = normalizeTimeUnit(fieldOrDatumDef.timeUnit)) === null || _a === void 0 ? void 0 : _a.unit : undefined;
|
|
return timeFormat(format, timeUnit, config, omitTimeFormatConfig);
|
|
}
|
|
|
|
return numberFormat(type, format, config);
|
|
}
|
|
|
|
function guideFormatType(formatType, fieldOrDatumDef, scaleType) {
|
|
if (formatType && (isSignalRef(formatType) || formatType === 'number' || formatType === 'time')) {
|
|
return formatType;
|
|
}
|
|
|
|
if (isFieldOrDatumDefForTimeFormat(fieldOrDatumDef) && scaleType !== 'time' && scaleType !== 'utc') {
|
|
return 'time';
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
/**
|
|
* Returns number format for a fieldDef.
|
|
*/
|
|
|
|
|
|
function numberFormat(type, specifiedFormat, config) {
|
|
// Specified format in axis/legend has higher precedence than fieldDef.format
|
|
if (isString(specifiedFormat)) {
|
|
return specifiedFormat;
|
|
}
|
|
|
|
if (type === QUANTITATIVE) {
|
|
// we only apply the default if the field is quantitative
|
|
return config.numberFormat;
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
/**
|
|
* Returns time format for a fieldDef for use in guides.
|
|
*/
|
|
|
|
|
|
function timeFormat(specifiedFormat, timeUnit, config, omitTimeFormatConfig) {
|
|
if (specifiedFormat) {
|
|
return specifiedFormat;
|
|
}
|
|
|
|
if (timeUnit) {
|
|
return {
|
|
signal: timeUnitSpecifierExpression(timeUnit)
|
|
};
|
|
}
|
|
|
|
return omitTimeFormatConfig ? undefined : config.timeFormat;
|
|
}
|
|
|
|
function formatExpr(field, format) {
|
|
return "format(".concat(field, ", \"").concat(format || '', "\")");
|
|
}
|
|
|
|
function binNumberFormatExpr(field, format, formatType, config) {
|
|
var _a;
|
|
|
|
if (isCustomFormatType(formatType)) {
|
|
return customFormatExpr(formatType, field, format);
|
|
}
|
|
|
|
return formatExpr(field, (_a = isString(format) ? format : undefined) !== null && _a !== void 0 ? _a : config.numberFormat);
|
|
}
|
|
|
|
function binFormatExpression(startField, endField, format, formatType, config) {
|
|
var start = binNumberFormatExpr(startField, format, formatType, config);
|
|
var end = binNumberFormatExpr(endField, format, formatType, config);
|
|
return "".concat(fieldValidPredicate(startField, false), " ? \"null\" : ").concat(start, " + \"").concat(BIN_RANGE_DELIMITER, "\" + ").concat(end);
|
|
}
|
|
/**
|
|
* Returns the time expression used for axis/legend labels or text mark for a temporal field
|
|
*/
|
|
|
|
|
|
function timeFormatExpression(field, timeUnit, format, rawTimeFormat, // should be provided only for actual text and headers, not axis/legend labels
|
|
isUTCScale) {
|
|
if (!timeUnit || format) {
|
|
// If there is no time unit, or if user explicitly specifies format for axis/legend/text.
|
|
format = isString(format) ? format : rawTimeFormat; // only use provided timeFormat if there is no timeUnit.
|
|
|
|
return "".concat(isUTCScale ? 'utc' : 'time', "Format(").concat(field, ", '").concat(format, "')");
|
|
} else {
|
|
return formatExpression(timeUnit, field, isUTCScale);
|
|
}
|
|
}
|
|
|
|
var DEFAULT_SORT_OP = 'min';
|
|
var SORT_BY_CHANNEL_INDEX = {
|
|
x: 1,
|
|
y: 1,
|
|
color: 1,
|
|
fill: 1,
|
|
stroke: 1,
|
|
strokeWidth: 1,
|
|
size: 1,
|
|
shape: 1,
|
|
fillOpacity: 1,
|
|
strokeOpacity: 1,
|
|
opacity: 1,
|
|
text: 1
|
|
};
|
|
|
|
function isSortByChannel(c) {
|
|
return c in SORT_BY_CHANNEL_INDEX;
|
|
}
|
|
|
|
function isSortByEncoding(sort) {
|
|
return !!sort && !!sort['encoding'];
|
|
}
|
|
|
|
function isSortField(sort) {
|
|
return !!sort && (sort['op'] === 'count' || !!sort['field']);
|
|
}
|
|
|
|
function isSortArray(sort) {
|
|
return !!sort && isArray(sort);
|
|
}
|
|
|
|
function isFacetMapping(f) {
|
|
return 'row' in f || 'column' in f;
|
|
}
|
|
|
|
function isFacetFieldDef(channelDef) {
|
|
return !!channelDef && 'header' in channelDef;
|
|
}
|
|
|
|
function isFacetSpec(spec) {
|
|
return 'facet' in spec;
|
|
}
|
|
|
|
function isConditionalSelection(c) {
|
|
return c['selection'];
|
|
}
|
|
|
|
function isRepeatRef(field) {
|
|
return field && !isString(field) && 'repeat' in field;
|
|
}
|
|
|
|
function toFieldDefBase(fieldDef) {
|
|
var field = fieldDef.field,
|
|
timeUnit = fieldDef.timeUnit,
|
|
bin = fieldDef.bin,
|
|
aggregate = fieldDef.aggregate;
|
|
return Object.assign(Object.assign(Object.assign(Object.assign({}, timeUnit ? {
|
|
timeUnit: timeUnit
|
|
} : {}), bin ? {
|
|
bin: bin
|
|
} : {}), aggregate ? {
|
|
aggregate: aggregate
|
|
} : {}), {
|
|
field: field
|
|
});
|
|
}
|
|
|
|
function isSortableFieldDef(fieldDef) {
|
|
return 'sort' in fieldDef;
|
|
}
|
|
|
|
function getBand(_ref7) {
|
|
var channel = _ref7.channel,
|
|
fieldDef = _ref7.fieldDef,
|
|
fieldDef2 = _ref7.fieldDef2,
|
|
mark = _ref7.markDef,
|
|
stack = _ref7.stack,
|
|
config = _ref7.config,
|
|
isMidPoint = _ref7.isMidPoint;
|
|
|
|
if (isFieldOrDatumDef(fieldDef) && fieldDef.band !== undefined) {
|
|
return fieldDef.band;
|
|
}
|
|
|
|
if (isFieldDef(fieldDef)) {
|
|
var timeUnit = fieldDef.timeUnit,
|
|
bin = fieldDef.bin;
|
|
|
|
if (timeUnit && !fieldDef2) {
|
|
if (isMidPoint) {
|
|
return getMarkConfig('timeUnitBandPosition', mark, config);
|
|
} else {
|
|
return isRectBasedMark(mark.type) ? getMarkConfig('timeUnitBand', mark, config) : 0;
|
|
}
|
|
} else if (isBinning(bin)) {
|
|
return isRectBasedMark(mark.type) && !isMidPoint ? 1 : 0.5;
|
|
}
|
|
}
|
|
|
|
if ((stack === null || stack === void 0 ? void 0 : stack.fieldChannel) === channel && isMidPoint) {
|
|
return 0.5;
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function hasBand(channel, fieldDef, fieldDef2, stack, markDef, config) {
|
|
if (isBinning(fieldDef.bin) || fieldDef.timeUnit && isTypedFieldDef(fieldDef) && fieldDef.type === 'temporal') {
|
|
return !!getBand({
|
|
channel: channel,
|
|
fieldDef: fieldDef,
|
|
fieldDef2: fieldDef2,
|
|
stack: stack,
|
|
markDef: markDef,
|
|
config: config
|
|
});
|
|
}
|
|
|
|
return false;
|
|
}
|
|
|
|
function isConditionalDef(channelDef) {
|
|
return !!channelDef && 'condition' in channelDef;
|
|
}
|
|
/**
|
|
* Return if a channelDef is a ConditionalValueDef with ConditionFieldDef
|
|
*/
|
|
|
|
|
|
function hasConditionalFieldDef(channelDef) {
|
|
var condition = channelDef && channelDef['condition'];
|
|
return !!condition && !isArray(condition) && isFieldDef(condition);
|
|
}
|
|
|
|
function hasConditionalFieldOrDatumDef(channelDef) {
|
|
var condition = channelDef && channelDef['condition'];
|
|
return !!condition && !isArray(condition) && isFieldOrDatumDef(condition);
|
|
}
|
|
|
|
function hasConditionalValueDef(channelDef) {
|
|
var condition = channelDef && channelDef['condition'];
|
|
return !!condition && (isArray(condition) || isValueDef(condition));
|
|
}
|
|
|
|
function isFieldDef(channelDef) {
|
|
// TODO: we can't use field in channelDef here as it's somehow failing runtime test
|
|
return !!channelDef && (!!channelDef['field'] || channelDef['aggregate'] === 'count');
|
|
}
|
|
|
|
function channelDefType(channelDef) {
|
|
return channelDef && channelDef['type'];
|
|
}
|
|
|
|
function isDatumDef(channelDef) {
|
|
return !!channelDef && 'datum' in channelDef;
|
|
}
|
|
|
|
function isContinuousFieldOrDatumDef(cd) {
|
|
// TODO: make datum support DateTime object
|
|
return isTypedFieldDef(cd) && isContinuous(cd) || isNumericDataDef(cd);
|
|
}
|
|
|
|
function isNumericDataDef(cd) {
|
|
return isDatumDef(cd) && isNumber(cd.datum);
|
|
}
|
|
|
|
function isFieldOrDatumDef(channelDef) {
|
|
return isFieldDef(channelDef) || isDatumDef(channelDef);
|
|
}
|
|
|
|
function isTypedFieldDef(channelDef) {
|
|
return !!channelDef && ('field' in channelDef || channelDef['aggregate'] === 'count') && 'type' in channelDef;
|
|
}
|
|
|
|
function isValueDef(channelDef) {
|
|
return channelDef && 'value' in channelDef && 'value' in channelDef;
|
|
}
|
|
|
|
function isScaleFieldDef(channelDef) {
|
|
return !!channelDef && ('scale' in channelDef || 'sort' in channelDef);
|
|
}
|
|
|
|
function isPositionFieldOrDatumDef(channelDef) {
|
|
return channelDef && ('axis' in channelDef || 'stack' in channelDef || 'impute' in channelDef);
|
|
}
|
|
|
|
function isMarkPropFieldOrDatumDef(channelDef) {
|
|
return !!channelDef && 'legend' in channelDef;
|
|
}
|
|
|
|
function isStringFieldOrDatumDef(channelDef) {
|
|
return !!channelDef && ('format' in channelDef || 'formatType' in channelDef);
|
|
}
|
|
|
|
function toStringFieldDef(fieldDef) {
|
|
// omit properties that don't exist in string field defs
|
|
return omit(fieldDef, ['legend', 'axis', 'header', 'scale']);
|
|
}
|
|
|
|
function isOpFieldDef(fieldDef) {
|
|
return 'op' in fieldDef;
|
|
}
|
|
/**
|
|
* Get a Vega field reference from a Vega-Lite field def.
|
|
*/
|
|
|
|
|
|
function _vgField(fieldDef) {
|
|
var opt = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|
|
|
var _a, _b, _c;
|
|
|
|
var field = fieldDef.field;
|
|
var prefix = opt.prefix;
|
|
var suffix = opt.suffix;
|
|
var argAccessor = ''; // for accessing argmin/argmax field at the end without getting escaped
|
|
|
|
if (isCount(fieldDef)) {
|
|
field = internalField('count');
|
|
} else {
|
|
var fn;
|
|
|
|
if (!opt.nofn) {
|
|
if (isOpFieldDef(fieldDef)) {
|
|
fn = fieldDef.op;
|
|
} else {
|
|
var bin = fieldDef.bin,
|
|
aggregate = fieldDef.aggregate,
|
|
timeUnit = fieldDef.timeUnit;
|
|
|
|
if (isBinning(bin)) {
|
|
fn = binToString(bin);
|
|
suffix = ((_a = opt.binSuffix) !== null && _a !== void 0 ? _a : '') + ((_b = opt.suffix) !== null && _b !== void 0 ? _b : '');
|
|
} else if (aggregate) {
|
|
if (isArgmaxDef(aggregate)) {
|
|
argAccessor = "[\"".concat(field, "\"]");
|
|
field = "argmax_".concat(aggregate.argmax);
|
|
} else if (isArgminDef(aggregate)) {
|
|
argAccessor = "[\"".concat(field, "\"]");
|
|
field = "argmin_".concat(aggregate.argmin);
|
|
} else {
|
|
fn = String(aggregate);
|
|
}
|
|
} else if (timeUnit) {
|
|
fn = timeUnitToString(timeUnit);
|
|
suffix = (!contains(['range', 'mid'], opt.binSuffix) && opt.binSuffix || '') + ((_c = opt.suffix) !== null && _c !== void 0 ? _c : '');
|
|
}
|
|
}
|
|
}
|
|
|
|
if (fn) {
|
|
field = field ? "".concat(fn, "_").concat(field) : fn;
|
|
}
|
|
}
|
|
|
|
if (suffix) {
|
|
field = "".concat(field, "_").concat(suffix);
|
|
}
|
|
|
|
if (prefix) {
|
|
field = "".concat(prefix, "_").concat(field);
|
|
}
|
|
|
|
if (opt.forAs) {
|
|
return removePathFromField(field);
|
|
} else if (opt.expr) {
|
|
// Expression to access flattened field. No need to escape dots.
|
|
return flatAccessWithDatum(field, opt.expr) + argAccessor;
|
|
} else {
|
|
// We flattened all fields so paths should have become dot.
|
|
return replacePathInField(field) + argAccessor;
|
|
}
|
|
}
|
|
|
|
function isDiscrete(def) {
|
|
switch (def.type) {
|
|
case 'nominal':
|
|
case 'ordinal':
|
|
case 'geojson':
|
|
return true;
|
|
|
|
case 'quantitative':
|
|
return isFieldDef(def) && !!def.bin;
|
|
|
|
case 'temporal':
|
|
return false;
|
|
}
|
|
|
|
throw new Error(invalidFieldType(def.type));
|
|
}
|
|
|
|
function isContinuous(fieldDef) {
|
|
return !isDiscrete(fieldDef);
|
|
}
|
|
|
|
function isCount(fieldDef) {
|
|
return fieldDef.aggregate === 'count';
|
|
}
|
|
|
|
function verbalTitleFormatter(fieldDef, config) {
|
|
var _a;
|
|
|
|
var field = fieldDef.field,
|
|
bin = fieldDef.bin,
|
|
timeUnit = fieldDef.timeUnit,
|
|
aggregate = fieldDef.aggregate;
|
|
|
|
if (aggregate === 'count') {
|
|
return config.countTitle;
|
|
} else if (isBinning(bin)) {
|
|
return "".concat(field, " (binned)");
|
|
} else if (timeUnit) {
|
|
var unit = (_a = normalizeTimeUnit(timeUnit)) === null || _a === void 0 ? void 0 : _a.unit;
|
|
|
|
if (unit) {
|
|
return "".concat(field, " (").concat(getTimeUnitParts(unit).join('-'), ")");
|
|
}
|
|
} else if (aggregate) {
|
|
if (isArgmaxDef(aggregate)) {
|
|
return "".concat(field, " for max ").concat(aggregate.argmax);
|
|
} else if (isArgminDef(aggregate)) {
|
|
return "".concat(field, " for min ").concat(aggregate.argmin);
|
|
} else {
|
|
return "".concat(titleCase(aggregate), " of ").concat(field);
|
|
}
|
|
}
|
|
|
|
return field;
|
|
}
|
|
|
|
function functionalTitleFormatter(fieldDef) {
|
|
var aggregate = fieldDef.aggregate,
|
|
bin = fieldDef.bin,
|
|
timeUnit = fieldDef.timeUnit,
|
|
field = fieldDef.field;
|
|
|
|
if (isArgmaxDef(aggregate)) {
|
|
return "".concat(field, " for argmax(").concat(aggregate.argmax, ")");
|
|
} else if (isArgminDef(aggregate)) {
|
|
return "".concat(field, " for argmin(").concat(aggregate.argmin, ")");
|
|
}
|
|
|
|
var timeUnitParams = normalizeTimeUnit(timeUnit);
|
|
var fn = aggregate || (timeUnitParams === null || timeUnitParams === void 0 ? void 0 : timeUnitParams.unit) || (timeUnitParams === null || timeUnitParams === void 0 ? void 0 : timeUnitParams.maxbins) && 'timeunit' || isBinning(bin) && 'bin';
|
|
|
|
if (fn) {
|
|
return fn.toUpperCase() + '(' + field + ')';
|
|
} else {
|
|
return field;
|
|
}
|
|
}
|
|
|
|
var defaultTitleFormatter = function defaultTitleFormatter(fieldDef, config) {
|
|
switch (config.fieldTitle) {
|
|
case 'plain':
|
|
return fieldDef.field;
|
|
|
|
case 'functional':
|
|
return functionalTitleFormatter(fieldDef);
|
|
|
|
default:
|
|
return verbalTitleFormatter(fieldDef, config);
|
|
}
|
|
};
|
|
|
|
var titleFormatter = defaultTitleFormatter;
|
|
|
|
function setTitleFormatter(formatter) {
|
|
titleFormatter = formatter;
|
|
}
|
|
|
|
function resetTitleFormatter() {
|
|
setTitleFormatter(defaultTitleFormatter);
|
|
}
|
|
|
|
function _title3(fieldOrDatumDef, config, _ref8) {
|
|
var allowDisabling = _ref8.allowDisabling,
|
|
_ref8$includeDefault = _ref8.includeDefault,
|
|
includeDefault = _ref8$includeDefault === void 0 ? true : _ref8$includeDefault;
|
|
|
|
var _a, _b;
|
|
|
|
var guideTitle = (_a = getGuide(fieldOrDatumDef)) === null || _a === void 0 ? void 0 : _a.title;
|
|
|
|
if (!isFieldDef(fieldOrDatumDef)) {
|
|
return guideTitle;
|
|
}
|
|
|
|
var fieldDef = fieldOrDatumDef;
|
|
var def = includeDefault ? defaultTitle(fieldDef, config) : undefined;
|
|
|
|
if (allowDisabling) {
|
|
return getFirstDefined(guideTitle, fieldDef.title, def);
|
|
} else {
|
|
return (_b = guideTitle !== null && guideTitle !== void 0 ? guideTitle : fieldDef.title) !== null && _b !== void 0 ? _b : def;
|
|
}
|
|
}
|
|
|
|
function getGuide(fieldDef) {
|
|
if (isPositionFieldOrDatumDef(fieldDef) && fieldDef.axis) {
|
|
return fieldDef.axis;
|
|
} else if (isMarkPropFieldOrDatumDef(fieldDef) && fieldDef.legend) {
|
|
return fieldDef.legend;
|
|
} else if (isFacetFieldDef(fieldDef) && fieldDef.header) {
|
|
return fieldDef.header;
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function defaultTitle(fieldDef, config) {
|
|
return titleFormatter(fieldDef, config);
|
|
}
|
|
|
|
function getFormatMixins(fieldDef) {
|
|
var _a;
|
|
|
|
if (isStringFieldOrDatumDef(fieldDef)) {
|
|
var format = fieldDef.format,
|
|
formatType = fieldDef.formatType;
|
|
return {
|
|
format: format,
|
|
formatType: formatType
|
|
};
|
|
} else {
|
|
var guide = (_a = getGuide(fieldDef)) !== null && _a !== void 0 ? _a : {};
|
|
var _format = guide.format,
|
|
_formatType = guide.formatType;
|
|
return {
|
|
format: _format,
|
|
formatType: _formatType
|
|
};
|
|
}
|
|
}
|
|
|
|
function defaultType(fieldDef, channel) {
|
|
var _a;
|
|
|
|
switch (channel) {
|
|
case 'latitude':
|
|
case 'longitude':
|
|
return 'quantitative';
|
|
|
|
case 'row':
|
|
case 'column':
|
|
case 'facet':
|
|
case 'shape':
|
|
case 'strokeDash':
|
|
return 'nominal';
|
|
}
|
|
|
|
if (isSortableFieldDef(fieldDef) && isArray(fieldDef.sort)) {
|
|
return 'ordinal';
|
|
}
|
|
|
|
var aggregate = fieldDef.aggregate,
|
|
bin = fieldDef.bin,
|
|
timeUnit = fieldDef.timeUnit;
|
|
|
|
if (timeUnit) {
|
|
return 'temporal';
|
|
}
|
|
|
|
if (bin || aggregate && !isArgmaxDef(aggregate) && !isArgminDef(aggregate)) {
|
|
return 'quantitative';
|
|
}
|
|
|
|
if (isScaleFieldDef(fieldDef) && ((_a = fieldDef.scale) === null || _a === void 0 ? void 0 : _a.type)) {
|
|
switch (SCALE_CATEGORY_INDEX[fieldDef.scale.type]) {
|
|
case 'numeric':
|
|
case 'discretizing':
|
|
return 'quantitative';
|
|
|
|
case 'time':
|
|
return 'temporal';
|
|
}
|
|
}
|
|
|
|
return 'nominal';
|
|
}
|
|
/**
|
|
* Returns the fieldDef -- either from the outer channelDef or from the condition of channelDef.
|
|
* @param channelDef
|
|
*/
|
|
|
|
|
|
function getFieldDef(channelDef) {
|
|
if (isFieldDef(channelDef)) {
|
|
return channelDef;
|
|
} else if (hasConditionalFieldDef(channelDef)) {
|
|
return channelDef.condition;
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function getFieldOrDatumDef(channelDef) {
|
|
if (isFieldOrDatumDef(channelDef)) {
|
|
return channelDef;
|
|
} else if (hasConditionalFieldOrDatumDef(channelDef)) {
|
|
return channelDef.condition;
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
/**
|
|
* Convert type to full, lowercase type, or augment the fieldDef with a default type if missing.
|
|
*/
|
|
|
|
|
|
function initChannelDef(channelDef, channel, config) {
|
|
var opt = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {};
|
|
|
|
if (isString(channelDef) || isNumber(channelDef) || isBoolean(channelDef)) {
|
|
var primitiveType = isString(channelDef) ? 'string' : isNumber(channelDef) ? 'number' : 'boolean';
|
|
warn(primitiveChannelDef(channel, primitiveType, channelDef));
|
|
return {
|
|
value: channelDef
|
|
};
|
|
} // If a fieldDef contains a field, we need type.
|
|
|
|
|
|
if (isFieldOrDatumDef(channelDef)) {
|
|
return initFieldOrDatumDef(channelDef, channel, config, opt);
|
|
} else if (hasConditionalFieldOrDatumDef(channelDef)) {
|
|
return Object.assign(Object.assign({}, channelDef), {
|
|
// Need to cast as normalizeFieldDef normally return FieldDef, but here we know that it is definitely Condition<FieldDef>
|
|
condition: initFieldOrDatumDef(channelDef.condition, channel, config, opt)
|
|
});
|
|
}
|
|
|
|
return channelDef;
|
|
}
|
|
|
|
function initFieldOrDatumDef(fd, channel, config, opt) {
|
|
if (isStringFieldOrDatumDef(fd)) {
|
|
var format = fd.format,
|
|
formatType = fd.formatType,
|
|
rest = __rest(fd, ["format", "formatType"]);
|
|
|
|
if (isCustomFormatType(formatType) && !config.customFormatTypes) {
|
|
warn(customFormatTypeNotAllowed(channel));
|
|
return initFieldOrDatumDef(rest, channel, config, opt);
|
|
}
|
|
} else {
|
|
var guideType = isPositionFieldOrDatumDef(fd) ? 'axis' : isMarkPropFieldOrDatumDef(fd) ? 'legend' : isFacetFieldDef(fd) ? 'header' : null;
|
|
|
|
if (guideType && fd[guideType]) {
|
|
var _a = fd[guideType],
|
|
_format2 = _a.format,
|
|
_formatType2 = _a.formatType,
|
|
newGuide = __rest(_a, ["format", "formatType"]);
|
|
|
|
if (isCustomFormatType(_formatType2) && !config.customFormatTypes) {
|
|
warn(customFormatTypeNotAllowed(channel));
|
|
return initFieldOrDatumDef(Object.assign(Object.assign({}, fd), _defineProperty({}, guideType, newGuide)), channel, config, opt);
|
|
}
|
|
}
|
|
}
|
|
|
|
if (isFieldDef(fd)) {
|
|
return initFieldDef(fd, channel, opt);
|
|
}
|
|
|
|
return initDatumDef(fd);
|
|
}
|
|
|
|
function initDatumDef(datumDef) {
|
|
var type = datumDef['type'];
|
|
|
|
if (type) {
|
|
return datumDef;
|
|
}
|
|
|
|
var datum = datumDef.datum;
|
|
type = isNumber(datum) ? 'quantitative' : isString(datum) ? 'nominal' : isDateTime(datum) ? 'temporal' : undefined;
|
|
return Object.assign(Object.assign({}, datumDef), {
|
|
type: type
|
|
});
|
|
}
|
|
|
|
function initFieldDef(fd, channel) {
|
|
var _ref9 = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {},
|
|
_ref9$compositeMark = _ref9.compositeMark,
|
|
compositeMark = _ref9$compositeMark === void 0 ? false : _ref9$compositeMark;
|
|
|
|
var aggregate = fd.aggregate,
|
|
timeUnit = fd.timeUnit,
|
|
bin = fd.bin,
|
|
field = fd.field;
|
|
var fieldDef = Object.assign({}, fd); // Drop invalid aggregate
|
|
|
|
if (!compositeMark && aggregate && !isAggregateOp(aggregate) && !isArgmaxDef(aggregate) && !isArgminDef(aggregate)) {
|
|
warn(invalidAggregate(aggregate));
|
|
delete fieldDef.aggregate;
|
|
} // Normalize Time Unit
|
|
|
|
|
|
if (timeUnit) {
|
|
fieldDef.timeUnit = normalizeTimeUnit(timeUnit);
|
|
}
|
|
|
|
if (field) {
|
|
fieldDef.field = "".concat(field);
|
|
} // Normalize bin
|
|
|
|
|
|
if (isBinning(bin)) {
|
|
fieldDef.bin = normalizeBin(bin, channel);
|
|
}
|
|
|
|
if (isBinned(bin) && !isXorY(channel)) {
|
|
warn(channelShouldNotBeUsedForBinned(channel));
|
|
} // Normalize Type
|
|
|
|
|
|
if (isTypedFieldDef(fieldDef)) {
|
|
var type = fieldDef.type;
|
|
var fullType = getFullName(type);
|
|
|
|
if (type !== fullType) {
|
|
// convert short type to full type
|
|
fieldDef.type = fullType;
|
|
}
|
|
|
|
if (type !== 'quantitative') {
|
|
if (isCountingAggregateOp(aggregate)) {
|
|
warn(invalidFieldTypeForCountAggregate(type, aggregate));
|
|
fieldDef.type = 'quantitative';
|
|
}
|
|
}
|
|
} else if (!isSecondaryRangeChannel(channel)) {
|
|
// If type is empty / invalid, then augment with default type
|
|
var newType = defaultType(fieldDef, channel);
|
|
fieldDef['type'] = newType;
|
|
}
|
|
|
|
if (isTypedFieldDef(fieldDef)) {
|
|
var _ref10 = channelCompatibility(fieldDef, channel) || {},
|
|
compatible = _ref10.compatible,
|
|
warning = _ref10.warning;
|
|
|
|
if (compatible === false) {
|
|
warn(warning);
|
|
}
|
|
}
|
|
|
|
if (isSortableFieldDef(fieldDef) && isString(fieldDef.sort)) {
|
|
var sort = fieldDef.sort;
|
|
|
|
if (isSortByChannel(sort)) {
|
|
return Object.assign(Object.assign({}, fieldDef), {
|
|
sort: {
|
|
encoding: sort
|
|
}
|
|
});
|
|
}
|
|
|
|
var sub = sort.substr(1);
|
|
|
|
if (sort.charAt(0) === '-' && isSortByChannel(sub)) {
|
|
return Object.assign(Object.assign({}, fieldDef), {
|
|
sort: {
|
|
encoding: sub,
|
|
order: 'descending'
|
|
}
|
|
});
|
|
}
|
|
}
|
|
|
|
if (isFacetFieldDef(fieldDef)) {
|
|
var header = fieldDef.header;
|
|
|
|
var _orient = header.orient,
|
|
rest = __rest(header, ["orient"]);
|
|
|
|
if (_orient) {
|
|
return Object.assign(Object.assign({}, fieldDef), {
|
|
header: Object.assign(Object.assign({}, rest), {
|
|
labelOrient: header.labelOrient || _orient,
|
|
titleOrient: header.titleOrient || _orient
|
|
})
|
|
});
|
|
}
|
|
}
|
|
|
|
return fieldDef;
|
|
}
|
|
|
|
function normalizeBin(bin, channel) {
|
|
if (isBoolean(bin)) {
|
|
return {
|
|
maxbins: autoMaxBins(channel)
|
|
};
|
|
} else if (bin === 'binned') {
|
|
return {
|
|
binned: true
|
|
};
|
|
} else if (!bin.maxbins && !bin.step) {
|
|
return Object.assign(Object.assign({}, bin), {
|
|
maxbins: autoMaxBins(channel)
|
|
});
|
|
} else {
|
|
return bin;
|
|
}
|
|
}
|
|
|
|
var COMPATIBLE = {
|
|
compatible: true
|
|
};
|
|
|
|
function channelCompatibility(fieldDef, channel) {
|
|
var type = fieldDef.type;
|
|
|
|
if (type === 'geojson' && channel !== 'shape') {
|
|
return {
|
|
compatible: false,
|
|
warning: "Channel ".concat(channel, " should not be used with a geojson data.")
|
|
};
|
|
}
|
|
|
|
switch (channel) {
|
|
case ROW:
|
|
case COLUMN:
|
|
case FACET:
|
|
if (isContinuous(fieldDef)) {
|
|
return {
|
|
compatible: false,
|
|
warning: facetChannelShouldBeDiscrete(channel)
|
|
};
|
|
}
|
|
|
|
return COMPATIBLE;
|
|
|
|
case X:
|
|
case Y:
|
|
case COLOR:
|
|
case FILL:
|
|
case STROKE:
|
|
case TEXT$1:
|
|
case DETAIL:
|
|
case KEY:
|
|
case TOOLTIP:
|
|
case HREF:
|
|
case URL:
|
|
case ANGLE:
|
|
case THETA:
|
|
case RADIUS:
|
|
case DESCRIPTION:
|
|
return COMPATIBLE;
|
|
|
|
case LONGITUDE:
|
|
case LONGITUDE2:
|
|
case LATITUDE:
|
|
case LATITUDE2:
|
|
if (type !== QUANTITATIVE) {
|
|
return {
|
|
compatible: false,
|
|
warning: "Channel ".concat(channel, " should be used with a quantitative field only, not ").concat(fieldDef.type, " field.")
|
|
};
|
|
}
|
|
|
|
return COMPATIBLE;
|
|
|
|
case OPACITY:
|
|
case FILLOPACITY:
|
|
case STROKEOPACITY:
|
|
case STROKEWIDTH:
|
|
case SIZE:
|
|
case THETA2:
|
|
case RADIUS2:
|
|
case X2:
|
|
case Y2:
|
|
if (type === 'nominal' && !fieldDef['sort']) {
|
|
return {
|
|
compatible: false,
|
|
warning: "Channel ".concat(channel, " should not be used with an unsorted discrete field.")
|
|
};
|
|
}
|
|
|
|
return COMPATIBLE;
|
|
|
|
case STROKEDASH:
|
|
if (!contains(['ordinal', 'nominal'], fieldDef.type)) {
|
|
return {
|
|
compatible: false,
|
|
warning: 'StrokeDash channel should be used with only discrete data.'
|
|
};
|
|
}
|
|
|
|
return COMPATIBLE;
|
|
|
|
case SHAPE:
|
|
if (!contains(['ordinal', 'nominal', 'geojson'], fieldDef.type)) {
|
|
return {
|
|
compatible: false,
|
|
warning: 'Shape channel should be used with only either discrete or geojson data.'
|
|
};
|
|
}
|
|
|
|
return COMPATIBLE;
|
|
|
|
case ORDER:
|
|
if (fieldDef.type === 'nominal' && !('sort' in fieldDef)) {
|
|
return {
|
|
compatible: false,
|
|
warning: "Channel order is inappropriate for nominal field, which has no inherent order."
|
|
};
|
|
}
|
|
|
|
return COMPATIBLE;
|
|
}
|
|
}
|
|
/**
|
|
* Check if the field def uses a time format or does not use any format but is temporal
|
|
* (this does not cover field defs that are temporal but use a number format).
|
|
*/
|
|
|
|
|
|
function isFieldOrDatumDefForTimeFormat(fieldOrDatumDef) {
|
|
var _getFormatMixins = getFormatMixins(fieldOrDatumDef),
|
|
formatType = _getFormatMixins.formatType;
|
|
|
|
return formatType === 'time' || !formatType && isTimeFieldDef(fieldOrDatumDef);
|
|
}
|
|
/**
|
|
* Check if field def has type `temporal`. If you want to also cover field defs that use a time format, use `isTimeFormatFieldDef`.
|
|
*/
|
|
|
|
|
|
function isTimeFieldDef(def) {
|
|
return def && (def['type'] === 'temporal' || isFieldDef(def) && !!def.timeUnit);
|
|
}
|
|
/**
|
|
* Getting a value associated with a fielddef.
|
|
* Convert the value to Vega expression if applicable (for datetime object, or string if the field def is temporal or has timeUnit)
|
|
*/
|
|
|
|
|
|
function valueExpr(v, _ref11) {
|
|
var timeUnit = _ref11.timeUnit,
|
|
type = _ref11.type,
|
|
wrapTime = _ref11.wrapTime,
|
|
undefinedIfExprNotRequired = _ref11.undefinedIfExprNotRequired;
|
|
|
|
var _a;
|
|
|
|
var unit = timeUnit && ((_a = normalizeTimeUnit(timeUnit)) === null || _a === void 0 ? void 0 : _a.unit);
|
|
var isTime = unit || type === 'temporal';
|
|
var expr;
|
|
|
|
if (isSignalRef(v)) {
|
|
expr = v.signal;
|
|
} else if (isDateTime(v)) {
|
|
isTime = true;
|
|
expr = dateTimeToExpr(v);
|
|
} else if (isString(v) || isNumber(v)) {
|
|
if (isTime) {
|
|
expr = "datetime(".concat(JSON.stringify(v), ")");
|
|
|
|
if (isLocalSingleTimeUnit(unit)) {
|
|
// for single timeUnit, we will use dateTimeToExpr to convert number/string to match the timeUnit
|
|
if (isNumber(v) && v < 10000 || isString(v) && isNaN(Date.parse(v))) {
|
|
expr = dateTimeToExpr(_defineProperty({}, unit, v));
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
if (expr) {
|
|
return wrapTime && isTime ? "time(".concat(expr, ")") : expr;
|
|
} // number or boolean or normal string
|
|
|
|
|
|
return undefinedIfExprNotRequired ? undefined : JSON.stringify(v);
|
|
}
|
|
/**
|
|
* Standardize value array -- convert each value to Vega expression if applicable
|
|
*/
|
|
|
|
|
|
function valueArray(fieldOrDatumDef, values) {
|
|
var type = fieldOrDatumDef.type;
|
|
return values.map(function (v) {
|
|
var expr = valueExpr(v, {
|
|
timeUnit: isFieldDef(fieldOrDatumDef) ? fieldOrDatumDef.timeUnit : undefined,
|
|
type: type,
|
|
undefinedIfExprNotRequired: true
|
|
}); // return signal for the expression if we need an expression
|
|
|
|
if (expr !== undefined) {
|
|
return {
|
|
signal: expr
|
|
};
|
|
} // otherwise just return the original value
|
|
|
|
|
|
return v;
|
|
});
|
|
}
|
|
/**
|
|
* Checks whether a fieldDef for a particular channel requires a computed bin range.
|
|
*/
|
|
|
|
|
|
function binRequiresRange(fieldDef, channel) {
|
|
if (!isBinning(fieldDef.bin)) {
|
|
console.warn('Only call this method for binned field defs.');
|
|
return false;
|
|
} // We need the range only when the user explicitly forces a binned field to be use discrete scale. In this case, bin range is used in axis and legend labels.
|
|
// We could check whether the axis or legend exists (not disabled) but that seems overkill.
|
|
|
|
|
|
return isScaleChannel(channel) && contains(['ordinal', 'nominal'], fieldDef.type);
|
|
}
|
|
|
|
function extractTitleConfig(titleConfig) {
|
|
var anchor = titleConfig.anchor,
|
|
frame = titleConfig.frame,
|
|
offset = titleConfig.offset,
|
|
orient = titleConfig.orient,
|
|
color = titleConfig.color,
|
|
subtitleColor = titleConfig.subtitleColor,
|
|
subtitleFont = titleConfig.subtitleFont,
|
|
subtitleFontSize = titleConfig.subtitleFontSize,
|
|
subtitleFontStyle = titleConfig.subtitleFontStyle,
|
|
subtitleFontWeight = titleConfig.subtitleFontWeight,
|
|
subtitleLineHeight = titleConfig.subtitleLineHeight,
|
|
subtitlePadding = titleConfig.subtitlePadding,
|
|
rest = __rest(titleConfig, ["anchor", "frame", "offset", "orient", "color", "subtitleColor", "subtitleFont", "subtitleFontSize", "subtitleFontStyle", "subtitleFontWeight", "subtitleLineHeight", "subtitlePadding"]);
|
|
|
|
var titleMarkConfig = Object.assign(Object.assign({}, rest), color ? {
|
|
fill: color
|
|
} : {}); // These are non-mark title config that need to be hardcoded
|
|
|
|
var nonMark = Object.assign(Object.assign(Object.assign(Object.assign({}, anchor ? {
|
|
anchor: anchor
|
|
} : {}), frame ? {
|
|
frame: frame
|
|
} : {}), offset ? {
|
|
offset: offset
|
|
} : {}), orient ? {
|
|
orient: orient
|
|
} : {}); // subtitle part can stay in config.title since header titles do not use subtitle
|
|
|
|
var subtitle = Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({}, subtitleColor ? {
|
|
subtitleColor: subtitleColor
|
|
} : {}), subtitleFont ? {
|
|
subtitleFont: subtitleFont
|
|
} : {}), subtitleFontSize ? {
|
|
subtitleFontSize: subtitleFontSize
|
|
} : {}), subtitleFontStyle ? {
|
|
subtitleFontStyle: subtitleFontStyle
|
|
} : {}), subtitleFontWeight ? {
|
|
subtitleFontWeight: subtitleFontWeight
|
|
} : {}), subtitleLineHeight ? {
|
|
subtitleLineHeight: subtitleLineHeight
|
|
} : {}), subtitlePadding ? {
|
|
subtitlePadding: subtitlePadding
|
|
} : {});
|
|
var subtitleMarkConfig = pick(titleMarkConfig, ['align', 'baseline', 'dx', 'dy', 'limit']);
|
|
return {
|
|
titleMarkConfig: titleMarkConfig,
|
|
subtitleMarkConfig: subtitleMarkConfig,
|
|
nonMark: nonMark,
|
|
subtitle: subtitle
|
|
};
|
|
}
|
|
|
|
function isText(v) {
|
|
return isString(v) || isArray(v) && isString(v[0]);
|
|
}
|
|
|
|
function signalOrValueRef(value) {
|
|
if (isSignalRef(value)) {
|
|
return value;
|
|
}
|
|
|
|
return value !== undefined ? {
|
|
value: value
|
|
} : undefined;
|
|
}
|
|
|
|
function exprFromValueOrSignalRef(ref) {
|
|
if (isSignalRef(ref)) {
|
|
return ref.signal;
|
|
}
|
|
|
|
return $(ref.value);
|
|
}
|
|
|
|
function signalOrStringValue(v) {
|
|
if (isSignalRef(v)) {
|
|
return v.signal;
|
|
}
|
|
|
|
return v == null ? null : $(v);
|
|
}
|
|
|
|
function applyMarkConfig(e, model, propsList) {
|
|
var _iterator16 = _createForOfIteratorHelper(propsList),
|
|
_step16;
|
|
|
|
try {
|
|
for (_iterator16.s(); !(_step16 = _iterator16.n()).done;) {
|
|
var property = _step16.value;
|
|
var value = getMarkConfig(property, model.markDef, model.config);
|
|
|
|
if (value !== undefined) {
|
|
e[property] = signalOrValueRef(value);
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator16.e(err);
|
|
} finally {
|
|
_iterator16.f();
|
|
}
|
|
|
|
return e;
|
|
}
|
|
|
|
function getStyles(mark) {
|
|
var _a;
|
|
|
|
return [].concat(mark.type, (_a = mark.style) !== null && _a !== void 0 ? _a : []);
|
|
}
|
|
|
|
function getMarkPropOrConfig(channel, mark, config) {
|
|
var opt = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {};
|
|
var vgChannel = opt.vgChannel,
|
|
ignoreVgConfig = opt.ignoreVgConfig;
|
|
|
|
if (vgChannel && mark[vgChannel] !== undefined) {
|
|
return mark[vgChannel];
|
|
} else if (mark[channel] !== undefined) {
|
|
return mark[channel];
|
|
} else if (ignoreVgConfig && (!vgChannel || vgChannel === channel)) {
|
|
return undefined;
|
|
}
|
|
|
|
return getMarkConfig(channel, mark, config, opt);
|
|
}
|
|
/**
|
|
* Return property value from style or mark specific config property if exists.
|
|
* Otherwise, return general mark specific config.
|
|
*/
|
|
|
|
|
|
function getMarkConfig(channel, mark, config) {
|
|
var _ref12 = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {},
|
|
vgChannel = _ref12.vgChannel;
|
|
|
|
return getFirstDefined( // style config has highest precedence
|
|
vgChannel ? getMarkStyleConfig(channel, mark, config.style) : undefined, getMarkStyleConfig(channel, mark, config.style), // then mark-specific config
|
|
vgChannel ? config[mark.type][vgChannel] : undefined, config[mark.type][channel], // Need to cast because MarkDef doesn't perfectly match with AnyMarkConfig, but if the type isn't available, we'll get nothing here, which is fine
|
|
// If there is vgChannel, skip vl channel.
|
|
// For example, vl size for text is vg fontSize, but config.mark.size is only for point size.
|
|
vgChannel ? config.mark[vgChannel] : config.mark[channel] // Need to cast for the same reason as above
|
|
);
|
|
}
|
|
|
|
function getMarkStyleConfig(prop, mark, styleConfigIndex) {
|
|
return getStyleConfig(prop, getStyles(mark), styleConfigIndex);
|
|
}
|
|
|
|
function getStyleConfig(p, styles, styleConfigIndex) {
|
|
styles = array(styles);
|
|
var value;
|
|
|
|
var _iterator17 = _createForOfIteratorHelper(styles),
|
|
_step17;
|
|
|
|
try {
|
|
for (_iterator17.s(); !(_step17 = _iterator17.n()).done;) {
|
|
var style = _step17.value;
|
|
var styleConfig = styleConfigIndex[style];
|
|
|
|
if (styleConfig && styleConfig[p] !== undefined) {
|
|
value = styleConfig[p];
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator17.e(err);
|
|
} finally {
|
|
_iterator17.f();
|
|
}
|
|
|
|
return value;
|
|
}
|
|
/**
|
|
* Return Vega sort parameters (tuple of field and order).
|
|
*/
|
|
|
|
|
|
function sortParams(orderDef, fieldRefOption) {
|
|
return array(orderDef).reduce(function (s, orderChannelDef) {
|
|
var _a;
|
|
|
|
s.field.push(_vgField(orderChannelDef, fieldRefOption));
|
|
s.order.push((_a = orderChannelDef.sort) !== null && _a !== void 0 ? _a : 'ascending');
|
|
return s;
|
|
}, {
|
|
field: [],
|
|
order: []
|
|
});
|
|
}
|
|
|
|
function mergeTitleFieldDefs(f1, f2) {
|
|
var merged = _toConsumableArray(f1);
|
|
|
|
f2.forEach(function (fdToMerge) {
|
|
var _iterator18 = _createForOfIteratorHelper(merged),
|
|
_step18;
|
|
|
|
try {
|
|
for (_iterator18.s(); !(_step18 = _iterator18.n()).done;) {
|
|
var fieldDef1 = _step18.value;
|
|
|
|
// If already exists, no need to append to merged array
|
|
if (deepEqual(fieldDef1, fdToMerge)) {
|
|
return;
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator18.e(err);
|
|
} finally {
|
|
_iterator18.f();
|
|
}
|
|
|
|
merged.push(fdToMerge);
|
|
});
|
|
return merged;
|
|
}
|
|
|
|
function mergeTitle(title1, title2) {
|
|
if (deepEqual(title1, title2) || !title2) {
|
|
// if titles are the same or title2 is falsy
|
|
return title1;
|
|
} else if (!title1) {
|
|
// if title1 is falsy
|
|
return title2;
|
|
} else {
|
|
return [].concat(_toConsumableArray(array(title1)), _toConsumableArray(array(title2))).join(', ');
|
|
}
|
|
}
|
|
|
|
function mergeTitleComponent(v1, v2) {
|
|
var v1Val = v1.value;
|
|
var v2Val = v2.value;
|
|
|
|
if (v1Val == null || v2Val === null) {
|
|
return {
|
|
explicit: v1.explicit,
|
|
value: null
|
|
};
|
|
} else if ((isText(v1Val) || isSignalRef(v1Val)) && (isText(v2Val) || isSignalRef(v2Val))) {
|
|
return {
|
|
explicit: v1.explicit,
|
|
value: mergeTitle(v1Val, v2Val)
|
|
};
|
|
} else if (isText(v1Val) || isSignalRef(v1Val)) {
|
|
return {
|
|
explicit: v1.explicit,
|
|
value: v1Val
|
|
};
|
|
} else if (isText(v2Val) || isSignalRef(v2Val)) {
|
|
return {
|
|
explicit: v1.explicit,
|
|
value: v2Val
|
|
};
|
|
} else if (!isText(v1Val) && !isSignalRef(v1Val) && !isText(v2Val) && !isSignalRef(v2Val)) {
|
|
return {
|
|
explicit: v1.explicit,
|
|
value: mergeTitleFieldDefs(v1Val, v2Val)
|
|
};
|
|
}
|
|
/* istanbul ignore next: Condition should not happen -- only for warning in development. */
|
|
|
|
|
|
throw new Error('It should never reach here');
|
|
}
|
|
|
|
function _channelHasField(encoding, channel) {
|
|
var channelDef = encoding && encoding[channel];
|
|
|
|
if (channelDef) {
|
|
if (isArray(channelDef)) {
|
|
return some(channelDef, function (fieldDef) {
|
|
return !!fieldDef.field;
|
|
});
|
|
} else {
|
|
return isFieldDef(channelDef) || hasConditionalFieldDef(channelDef);
|
|
}
|
|
}
|
|
|
|
return false;
|
|
}
|
|
|
|
function isAggregate(encoding) {
|
|
return some(CHANNELS, function (channel) {
|
|
if (_channelHasField(encoding, channel)) {
|
|
var channelDef = encoding[channel];
|
|
|
|
if (isArray(channelDef)) {
|
|
return some(channelDef, function (fieldDef) {
|
|
return !!fieldDef.aggregate;
|
|
});
|
|
} else {
|
|
var fieldDef = getFieldDef(channelDef);
|
|
return fieldDef && !!fieldDef.aggregate;
|
|
}
|
|
}
|
|
|
|
return false;
|
|
});
|
|
}
|
|
|
|
function extractTransformsFromEncoding(oldEncoding, config) {
|
|
var groupby = [];
|
|
var bins = [];
|
|
var timeUnits = [];
|
|
var aggregate = [];
|
|
var encoding = {};
|
|
forEach(oldEncoding, function (channelDef, channel) {
|
|
// Extract potential embedded transformations along with remaining properties
|
|
if (isFieldDef(channelDef)) {
|
|
var _field2 = channelDef.field,
|
|
aggOp = channelDef.aggregate,
|
|
bin = channelDef.bin,
|
|
timeUnit = channelDef.timeUnit,
|
|
remaining = __rest(channelDef, ["field", "aggregate", "bin", "timeUnit"]);
|
|
|
|
if (aggOp || timeUnit || bin) {
|
|
var guide = getGuide(channelDef);
|
|
var isTitleDefined = guide && guide.title;
|
|
|
|
var newField = _vgField(channelDef, {
|
|
forAs: true
|
|
});
|
|
|
|
var newFieldDef = Object.assign(Object.assign(Object.assign({}, isTitleDefined ? [] : {
|
|
title: _title3(channelDef, config, {
|
|
allowDisabling: true
|
|
})
|
|
}), remaining), {
|
|
// Always overwrite field
|
|
field: newField
|
|
});
|
|
|
|
if (aggOp) {
|
|
var op;
|
|
|
|
if (isArgmaxDef(aggOp)) {
|
|
op = 'argmax';
|
|
newField = _vgField({
|
|
op: 'argmax',
|
|
field: aggOp.argmax
|
|
}, {
|
|
forAs: true
|
|
});
|
|
newFieldDef.field = "".concat(newField, ".").concat(_field2);
|
|
} else if (isArgminDef(aggOp)) {
|
|
op = 'argmin';
|
|
newField = _vgField({
|
|
op: 'argmin',
|
|
field: aggOp.argmin
|
|
}, {
|
|
forAs: true
|
|
});
|
|
newFieldDef.field = "".concat(newField, ".").concat(_field2);
|
|
} else if (aggOp !== 'boxplot' && aggOp !== 'errorbar' && aggOp !== 'errorband') {
|
|
op = aggOp;
|
|
}
|
|
|
|
if (op) {
|
|
var aggregateEntry = {
|
|
op: op,
|
|
as: newField
|
|
};
|
|
|
|
if (_field2) {
|
|
aggregateEntry.field = _field2;
|
|
}
|
|
|
|
aggregate.push(aggregateEntry);
|
|
}
|
|
} else {
|
|
groupby.push(newField);
|
|
|
|
if (isTypedFieldDef(channelDef) && isBinning(bin)) {
|
|
bins.push({
|
|
bin: bin,
|
|
field: _field2,
|
|
as: newField
|
|
}); // Add additional groupbys for range and end of bins
|
|
|
|
groupby.push(_vgField(channelDef, {
|
|
binSuffix: 'end'
|
|
}));
|
|
|
|
if (binRequiresRange(channelDef, channel)) {
|
|
groupby.push(_vgField(channelDef, {
|
|
binSuffix: 'range'
|
|
}));
|
|
} // Create accompanying 'x2' or 'y2' field if channel is 'x' or 'y' respectively
|
|
|
|
|
|
if (isXorY(channel)) {
|
|
var secondaryChannel = {
|
|
field: newField + '_end'
|
|
};
|
|
encoding[channel + '2'] = secondaryChannel;
|
|
}
|
|
|
|
newFieldDef.bin = 'binned';
|
|
|
|
if (!isSecondaryRangeChannel(channel)) {
|
|
newFieldDef['type'] = QUANTITATIVE;
|
|
}
|
|
} else if (timeUnit) {
|
|
timeUnits.push({
|
|
timeUnit: timeUnit,
|
|
field: _field2,
|
|
as: newField
|
|
}); // define the format type for later compilation
|
|
|
|
var formatType = isTypedFieldDef(channelDef) && channelDef.type !== TEMPORAL && 'time';
|
|
|
|
if (formatType) {
|
|
if (channel === TEXT$1 || channel === TOOLTIP) {
|
|
newFieldDef['formatType'] = formatType;
|
|
} else if (isNonPositionScaleChannel(channel)) {
|
|
newFieldDef['legend'] = Object.assign({
|
|
formatType: formatType
|
|
}, newFieldDef['legend']);
|
|
} else if (isXorY(channel)) {
|
|
newFieldDef['axis'] = Object.assign({
|
|
formatType: formatType
|
|
}, newFieldDef['axis']);
|
|
}
|
|
}
|
|
}
|
|
} // now the field should refer to post-transformed field instead
|
|
|
|
|
|
encoding[channel] = newFieldDef;
|
|
} else {
|
|
groupby.push(_field2);
|
|
encoding[channel] = oldEncoding[channel];
|
|
}
|
|
} else {
|
|
// For value def / signal ref / datum def, just copy
|
|
encoding[channel] = oldEncoding[channel];
|
|
}
|
|
});
|
|
return {
|
|
bins: bins,
|
|
timeUnits: timeUnits,
|
|
aggregate: aggregate,
|
|
groupby: groupby,
|
|
encoding: encoding
|
|
};
|
|
}
|
|
|
|
function markChannelCompatible(encoding, channel, mark) {
|
|
var markSupported = supportMark(channel, mark);
|
|
|
|
if (!markSupported) {
|
|
return false;
|
|
} else if (markSupported === 'binned') {
|
|
var primaryFieldDef = encoding[channel === X2 ? X : Y]; // circle, point, square and tick only support x2/y2 when their corresponding x/y fieldDef
|
|
// has "binned" data and thus need x2/y2 to specify the bin-end field.
|
|
|
|
if (isFieldDef(primaryFieldDef) && isFieldDef(encoding[channel]) && isBinned(primaryFieldDef.bin)) {
|
|
return true;
|
|
} else {
|
|
return false;
|
|
}
|
|
}
|
|
|
|
return true;
|
|
}
|
|
|
|
function initEncoding(encoding, mark, filled, config) {
|
|
return keys(encoding).reduce(function (normalizedEncoding, channel) {
|
|
if (!isChannel(channel)) {
|
|
// Drop invalid channel
|
|
warn(invalidEncodingChannel(channel));
|
|
return normalizedEncoding;
|
|
}
|
|
|
|
var channelDef = encoding[channel];
|
|
|
|
if (channel === 'angle' && mark === 'arc' && !encoding.theta) {
|
|
warn(REPLACE_ANGLE_WITH_THETA);
|
|
channel = THETA;
|
|
}
|
|
|
|
if (!markChannelCompatible(encoding, channel, mark)) {
|
|
// Drop unsupported channel
|
|
warn(incompatibleChannel(channel, mark));
|
|
return normalizedEncoding;
|
|
} // Drop line's size if the field is aggregated.
|
|
|
|
|
|
if (channel === SIZE && mark === 'line') {
|
|
var fieldDef = getFieldDef(encoding[channel]);
|
|
|
|
if (fieldDef === null || fieldDef === void 0 ? void 0 : fieldDef.aggregate) {
|
|
warn(LINE_WITH_VARYING_SIZE);
|
|
return normalizedEncoding;
|
|
}
|
|
} // Drop color if either fill or stroke is specified
|
|
|
|
|
|
if (channel === COLOR && (filled ? 'fill' in encoding : 'stroke' in encoding)) {
|
|
warn(droppingColor('encoding', {
|
|
fill: 'fill' in encoding,
|
|
stroke: 'stroke' in encoding
|
|
}));
|
|
return normalizedEncoding;
|
|
}
|
|
|
|
if (channel === DETAIL || channel === ORDER && !isArray(channelDef) && !isValueDef(channelDef) || channel === TOOLTIP && isArray(channelDef)) {
|
|
if (channelDef) {
|
|
// Array of fieldDefs for detail channel (or production rule)
|
|
normalizedEncoding[channel] = array(channelDef).reduce(function (defs, fieldDef) {
|
|
if (!isFieldDef(fieldDef)) {
|
|
warn(emptyFieldDef(fieldDef, channel));
|
|
} else {
|
|
defs.push(initFieldDef(fieldDef, channel));
|
|
}
|
|
|
|
return defs;
|
|
}, []);
|
|
}
|
|
} else {
|
|
if (channel === TOOLTIP && channelDef === null) {
|
|
// Preserve null so we can use it to disable tooltip
|
|
normalizedEncoding[channel] = null;
|
|
} else if (!isFieldDef(channelDef) && !isDatumDef(channelDef) && !isValueDef(channelDef) && !isConditionalDef(channelDef) && !isSignalRef(channelDef)) {
|
|
warn(emptyFieldDef(channelDef, channel));
|
|
return normalizedEncoding;
|
|
}
|
|
|
|
normalizedEncoding[channel] = initChannelDef(channelDef, channel, config);
|
|
}
|
|
|
|
return normalizedEncoding;
|
|
}, {});
|
|
}
|
|
/**
|
|
* For composite marks, we have to call initChannelDef during init so we can infer types earlier.
|
|
*/
|
|
|
|
|
|
function normalizeEncoding(encoding, config) {
|
|
return keys(encoding).reduce(function (normalizedEncoding, channel) {
|
|
var newChannelDef = initChannelDef(encoding[channel], channel, config, {
|
|
compositeMark: true
|
|
});
|
|
normalizedEncoding[channel] = newChannelDef;
|
|
return normalizedEncoding;
|
|
}, {});
|
|
}
|
|
|
|
function fieldDefs(encoding) {
|
|
var arr = [];
|
|
|
|
var _iterator19 = _createForOfIteratorHelper(keys(encoding)),
|
|
_step19;
|
|
|
|
try {
|
|
for (_iterator19.s(); !(_step19 = _iterator19.n()).done;) {
|
|
var channel = _step19.value;
|
|
|
|
if (_channelHasField(encoding, channel)) {
|
|
var channelDef = encoding[channel];
|
|
var channelDefArray = array(channelDef);
|
|
|
|
var _iterator20 = _createForOfIteratorHelper(channelDefArray),
|
|
_step20;
|
|
|
|
try {
|
|
for (_iterator20.s(); !(_step20 = _iterator20.n()).done;) {
|
|
var def = _step20.value;
|
|
|
|
if (isFieldDef(def)) {
|
|
arr.push(def);
|
|
} else if (hasConditionalFieldDef(def)) {
|
|
arr.push(def.condition);
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator20.e(err);
|
|
} finally {
|
|
_iterator20.f();
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator19.e(err);
|
|
} finally {
|
|
_iterator19.f();
|
|
}
|
|
|
|
return arr;
|
|
}
|
|
|
|
function forEach(mapping, f, thisArg) {
|
|
if (!mapping) {
|
|
return;
|
|
}
|
|
|
|
var _iterator21 = _createForOfIteratorHelper(keys(mapping)),
|
|
_step21;
|
|
|
|
try {
|
|
for (_iterator21.s(); !(_step21 = _iterator21.n()).done;) {
|
|
var channel = _step21.value;
|
|
var el = mapping[channel];
|
|
|
|
if (isArray(el)) {
|
|
var _iterator22 = _createForOfIteratorHelper(el),
|
|
_step22;
|
|
|
|
try {
|
|
for (_iterator22.s(); !(_step22 = _iterator22.n()).done;) {
|
|
var channelDef = _step22.value;
|
|
f.call(thisArg, channelDef, channel);
|
|
}
|
|
} catch (err) {
|
|
_iterator22.e(err);
|
|
} finally {
|
|
_iterator22.f();
|
|
}
|
|
} else {
|
|
f.call(thisArg, el, channel);
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator21.e(err);
|
|
} finally {
|
|
_iterator21.f();
|
|
}
|
|
}
|
|
|
|
function reduce(mapping, f, init, thisArg) {
|
|
if (!mapping) {
|
|
return init;
|
|
}
|
|
|
|
return keys(mapping).reduce(function (r, channel) {
|
|
var map = mapping[channel];
|
|
|
|
if (isArray(map)) {
|
|
return map.reduce(function (r1, channelDef) {
|
|
return f.call(thisArg, r1, channelDef, channel);
|
|
}, r);
|
|
} else {
|
|
return f.call(thisArg, r, map, channel);
|
|
}
|
|
}, init);
|
|
}
|
|
/**
|
|
* Returns list of path grouping fields for the given encoding
|
|
*/
|
|
|
|
|
|
function pathGroupingFields(mark, encoding) {
|
|
return keys(encoding).reduce(function (details, channel) {
|
|
switch (channel) {
|
|
// x, y, x2, y2, lat, long, lat1, long2, order, tooltip, href, aria label, cursor should not cause lines to group
|
|
case X:
|
|
case Y:
|
|
case HREF:
|
|
case DESCRIPTION:
|
|
case URL:
|
|
case X2:
|
|
case Y2:
|
|
case THETA:
|
|
case THETA2:
|
|
case RADIUS:
|
|
case RADIUS2: // falls through
|
|
|
|
case LATITUDE:
|
|
case LONGITUDE:
|
|
case LATITUDE2:
|
|
case LONGITUDE2: // TODO: case 'cursor':
|
|
// text, shape, shouldn't be a part of line/trail/area [falls through]
|
|
|
|
case TEXT$1:
|
|
case SHAPE:
|
|
case ANGLE: // falls through
|
|
// tooltip fields should not be added to group by [falls through]
|
|
|
|
case TOOLTIP:
|
|
return details;
|
|
|
|
case ORDER:
|
|
// order should not group line / trail
|
|
if (mark === 'line' || mark === 'trail') {
|
|
return details;
|
|
}
|
|
|
|
// but order should group area for stacking (falls through)
|
|
|
|
case DETAIL:
|
|
case KEY:
|
|
{
|
|
var channelDef = encoding[channel];
|
|
|
|
if (isArray(channelDef) || isFieldDef(channelDef)) {
|
|
var _iterator23 = _createForOfIteratorHelper(array(channelDef)),
|
|
_step23;
|
|
|
|
try {
|
|
for (_iterator23.s(); !(_step23 = _iterator23.n()).done;) {
|
|
var fieldDef = _step23.value;
|
|
|
|
if (!fieldDef.aggregate) {
|
|
details.push(_vgField(fieldDef, {}));
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator23.e(err);
|
|
} finally {
|
|
_iterator23.f();
|
|
}
|
|
}
|
|
|
|
return details;
|
|
}
|
|
|
|
case SIZE:
|
|
if (mark === 'trail') {
|
|
// For trail, size should not group trail lines.
|
|
return details;
|
|
}
|
|
|
|
// For line, size should group lines.
|
|
// falls through
|
|
|
|
case COLOR:
|
|
case FILL:
|
|
case STROKE:
|
|
case OPACITY:
|
|
case FILLOPACITY:
|
|
case STROKEOPACITY:
|
|
case STROKEDASH:
|
|
case STROKEWIDTH:
|
|
{
|
|
// TODO strokeDashOffset:
|
|
// falls through
|
|
var _fieldDef = getFieldDef(encoding[channel]);
|
|
|
|
if (_fieldDef && !_fieldDef.aggregate) {
|
|
details.push(_vgField(_fieldDef, {}));
|
|
}
|
|
|
|
return details;
|
|
}
|
|
}
|
|
}, []);
|
|
}
|
|
|
|
function filterTooltipWithAggregatedField(oldEncoding) {
|
|
var tooltip = oldEncoding.tooltip,
|
|
filteredEncoding = __rest(oldEncoding, ["tooltip"]);
|
|
|
|
if (!tooltip) {
|
|
return {
|
|
filteredEncoding: filteredEncoding
|
|
};
|
|
}
|
|
|
|
var customTooltipWithAggregatedField;
|
|
var customTooltipWithoutAggregatedField;
|
|
|
|
if (isArray(tooltip)) {
|
|
var _iterator24 = _createForOfIteratorHelper(tooltip),
|
|
_step24;
|
|
|
|
try {
|
|
for (_iterator24.s(); !(_step24 = _iterator24.n()).done;) {
|
|
var t = _step24.value;
|
|
|
|
if (t.aggregate) {
|
|
if (!customTooltipWithAggregatedField) {
|
|
customTooltipWithAggregatedField = [];
|
|
}
|
|
|
|
customTooltipWithAggregatedField.push(t);
|
|
} else {
|
|
if (!customTooltipWithoutAggregatedField) {
|
|
customTooltipWithoutAggregatedField = [];
|
|
}
|
|
|
|
customTooltipWithoutAggregatedField.push(t);
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator24.e(err);
|
|
} finally {
|
|
_iterator24.f();
|
|
}
|
|
|
|
if (customTooltipWithAggregatedField) {
|
|
filteredEncoding.tooltip = customTooltipWithAggregatedField;
|
|
}
|
|
} else {
|
|
if (tooltip['aggregate']) {
|
|
filteredEncoding.tooltip = tooltip;
|
|
} else {
|
|
customTooltipWithoutAggregatedField = tooltip;
|
|
}
|
|
}
|
|
|
|
if (isArray(customTooltipWithoutAggregatedField) && customTooltipWithoutAggregatedField.length === 1) {
|
|
customTooltipWithoutAggregatedField = customTooltipWithoutAggregatedField[0];
|
|
}
|
|
|
|
return {
|
|
customTooltipWithoutAggregatedField: customTooltipWithoutAggregatedField,
|
|
filteredEncoding: filteredEncoding
|
|
};
|
|
}
|
|
|
|
function getCompositeMarkTooltip(tooltipSummary, continuousAxisChannelDef, encodingWithoutContinuousAxis) {
|
|
var withFieldName = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : true;
|
|
|
|
if ('tooltip' in encodingWithoutContinuousAxis) {
|
|
return {
|
|
tooltip: encodingWithoutContinuousAxis.tooltip
|
|
};
|
|
}
|
|
|
|
var fiveSummaryTooltip = tooltipSummary.map(function (_ref13) {
|
|
var fieldPrefix = _ref13.fieldPrefix,
|
|
titlePrefix = _ref13.titlePrefix;
|
|
var mainTitle = withFieldName ? " of ".concat(getTitle(continuousAxisChannelDef)) : '';
|
|
return {
|
|
field: fieldPrefix + continuousAxisChannelDef.field,
|
|
type: continuousAxisChannelDef.type,
|
|
title: isSignalRef(titlePrefix) ? {
|
|
signal: titlePrefix + "\"".concat(escape(mainTitle), "\"")
|
|
} : titlePrefix + mainTitle
|
|
};
|
|
});
|
|
var tooltipFieldDefs = fieldDefs(encodingWithoutContinuousAxis).map(toStringFieldDef);
|
|
return {
|
|
tooltip: [].concat(_toConsumableArray(fiveSummaryTooltip), _toConsumableArray(unique(tooltipFieldDefs, _hash)))
|
|
};
|
|
}
|
|
|
|
function getTitle(continuousAxisChannelDef) {
|
|
var title = continuousAxisChannelDef.title,
|
|
field = continuousAxisChannelDef.field;
|
|
return getFirstDefined(title, field);
|
|
}
|
|
|
|
function makeCompositeAggregatePartFactory(compositeMarkDef, continuousAxis, continuousAxisChannelDef, sharedEncoding, compositeMarkConfig) {
|
|
var scale = continuousAxisChannelDef.scale,
|
|
axis = continuousAxisChannelDef.axis;
|
|
return function (_ref14) {
|
|
var partName = _ref14.partName,
|
|
mark = _ref14.mark,
|
|
positionPrefix = _ref14.positionPrefix,
|
|
_ref14$endPositionPre = _ref14.endPositionPrefix,
|
|
endPositionPrefix = _ref14$endPositionPre === void 0 ? undefined : _ref14$endPositionPre,
|
|
_ref14$extraEncoding = _ref14.extraEncoding,
|
|
extraEncoding = _ref14$extraEncoding === void 0 ? {} : _ref14$extraEncoding;
|
|
var title = getTitle(continuousAxisChannelDef);
|
|
return partLayerMixins(compositeMarkDef, partName, compositeMarkConfig, {
|
|
mark: mark,
|
|
encoding: Object.assign(Object.assign(Object.assign(_defineProperty({}, continuousAxis, Object.assign(Object.assign(Object.assign({
|
|
field: positionPrefix + '_' + continuousAxisChannelDef.field,
|
|
type: continuousAxisChannelDef.type
|
|
}, title !== undefined ? {
|
|
title: title
|
|
} : {}), scale !== undefined ? {
|
|
scale: scale
|
|
} : {}), axis !== undefined ? {
|
|
axis: axis
|
|
} : {})), isString(endPositionPrefix) ? _defineProperty({}, continuousAxis + '2', {
|
|
field: endPositionPrefix + '_' + continuousAxisChannelDef.field
|
|
}) : {}), sharedEncoding), extraEncoding)
|
|
});
|
|
};
|
|
}
|
|
|
|
function partLayerMixins(markDef, part, compositeMarkConfig, partBaseSpec) {
|
|
var clip = markDef.clip,
|
|
color = markDef.color,
|
|
opacity = markDef.opacity;
|
|
var mark = markDef.type;
|
|
|
|
if (markDef[part] || markDef[part] === undefined && compositeMarkConfig[part]) {
|
|
return [Object.assign(Object.assign({}, partBaseSpec), {
|
|
mark: Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({}, compositeMarkConfig[part]), clip ? {
|
|
clip: clip
|
|
} : {}), color ? {
|
|
color: color
|
|
} : {}), opacity ? {
|
|
opacity: opacity
|
|
} : {}), isMarkDef(partBaseSpec.mark) ? partBaseSpec.mark : {
|
|
type: partBaseSpec.mark
|
|
}), {
|
|
style: "".concat(mark, "-").concat(part)
|
|
}), isBoolean(markDef[part]) ? {} : markDef[part])
|
|
})];
|
|
}
|
|
|
|
return [];
|
|
}
|
|
|
|
function compositeMarkContinuousAxis(spec, orient, compositeMark) {
|
|
var encoding = spec.encoding;
|
|
var continuousAxis = orient === 'vertical' ? 'y' : 'x';
|
|
var continuousAxisChannelDef = encoding[continuousAxis]; // Safe to cast because if x is not continuous fielddef, the orient would not be horizontal.
|
|
|
|
var continuousAxisChannelDef2 = encoding[continuousAxis + '2'];
|
|
var continuousAxisChannelDefError = encoding[continuousAxis + 'Error'];
|
|
var continuousAxisChannelDefError2 = encoding[continuousAxis + 'Error2'];
|
|
return {
|
|
continuousAxisChannelDef: filterAggregateFromChannelDef(continuousAxisChannelDef, compositeMark),
|
|
continuousAxisChannelDef2: filterAggregateFromChannelDef(continuousAxisChannelDef2, compositeMark),
|
|
continuousAxisChannelDefError: filterAggregateFromChannelDef(continuousAxisChannelDefError, compositeMark),
|
|
continuousAxisChannelDefError2: filterAggregateFromChannelDef(continuousAxisChannelDefError2, compositeMark),
|
|
continuousAxis: continuousAxis
|
|
};
|
|
}
|
|
|
|
function filterAggregateFromChannelDef(continuousAxisChannelDef, compositeMark) {
|
|
if (continuousAxisChannelDef && continuousAxisChannelDef.aggregate) {
|
|
var aggregate = continuousAxisChannelDef.aggregate,
|
|
continuousAxisWithoutAggregate = __rest(continuousAxisChannelDef, ["aggregate"]);
|
|
|
|
if (aggregate !== compositeMark) {
|
|
warn(errorBarContinuousAxisHasCustomizedAggregate(aggregate, compositeMark));
|
|
}
|
|
|
|
return continuousAxisWithoutAggregate;
|
|
} else {
|
|
return continuousAxisChannelDef;
|
|
}
|
|
}
|
|
|
|
function compositeMarkOrient(spec, compositeMark) {
|
|
var mark = spec.mark,
|
|
encoding = spec.encoding;
|
|
var x = encoding.x,
|
|
y = encoding.y;
|
|
|
|
if (isMarkDef(mark) && mark.orient) {
|
|
return mark.orient;
|
|
}
|
|
|
|
if (isContinuousFieldOrDatumDef(x)) {
|
|
// x is continuous
|
|
if (isContinuousFieldOrDatumDef(y)) {
|
|
// both x and y are continuous
|
|
var xAggregate = isFieldDef(x) && x.aggregate;
|
|
var yAggregate = isFieldDef(y) && y.aggregate;
|
|
|
|
if (!xAggregate && yAggregate === compositeMark) {
|
|
return 'vertical';
|
|
} else if (!yAggregate && xAggregate === compositeMark) {
|
|
return 'horizontal';
|
|
} else if (xAggregate === compositeMark && yAggregate === compositeMark) {
|
|
throw new Error('Both x and y cannot have aggregate');
|
|
} else {
|
|
if (isFieldOrDatumDefForTimeFormat(y) && !isFieldOrDatumDefForTimeFormat(x)) {
|
|
// y is temporal but x is not
|
|
return 'horizontal';
|
|
} // default orientation for two continuous
|
|
|
|
|
|
return 'vertical';
|
|
}
|
|
}
|
|
|
|
return 'horizontal';
|
|
} else if (isContinuousFieldOrDatumDef(y)) {
|
|
// y is continuous but x is not
|
|
return 'vertical';
|
|
} else {
|
|
// Neither x nor y is continuous.
|
|
throw new Error("Need a valid continuous axis for ".concat(compositeMark, "s"));
|
|
}
|
|
}
|
|
|
|
var BOXPLOT = 'boxplot';
|
|
var BOXPLOT_PARTS = ['box', 'median', 'outliers', 'rule', 'ticks'];
|
|
var boxPlotNormalizer = new CompositeMarkNormalizer(BOXPLOT, normalizeBoxPlot);
|
|
|
|
function getBoxPlotType(extent) {
|
|
if (isNumber(extent)) {
|
|
return 'tukey';
|
|
} // Ham: If we ever want to, we could add another extent syntax `{kIQR: number}` for the original [Q1-k*IQR, Q3+k*IQR] whisker and call this boxPlotType = `kIQR`. However, I'm not exposing this for now.
|
|
|
|
|
|
return extent;
|
|
}
|
|
|
|
function normalizeBoxPlot(spec, _ref16) {
|
|
var config = _ref16.config;
|
|
|
|
var _a, _b; // Need to initEncoding first so we can infer type
|
|
|
|
|
|
spec = Object.assign(Object.assign({}, spec), {
|
|
encoding: normalizeEncoding(spec.encoding, config)
|
|
});
|
|
|
|
var _spec = spec,
|
|
mark = _spec.mark,
|
|
_encoding = _spec.encoding,
|
|
selection = _spec.selection,
|
|
_p = _spec.projection,
|
|
outerSpec = __rest(spec, ["mark", "encoding", "selection", "projection"]);
|
|
|
|
var markDef = isMarkDef(mark) ? mark : {
|
|
type: mark
|
|
}; // TODO(https://github.com/vega/vega-lite/issues/3702): add selection support
|
|
|
|
if (selection) {
|
|
warn(selectionNotSupported('boxplot'));
|
|
}
|
|
|
|
var extent = (_a = markDef.extent) !== null && _a !== void 0 ? _a : config.boxplot.extent;
|
|
var sizeValue = getMarkPropOrConfig('size', markDef, // TODO: https://github.com/vega/vega-lite/issues/6245
|
|
config);
|
|
var boxPlotType = getBoxPlotType(extent);
|
|
|
|
var _boxParams = boxParams(spec, extent, config),
|
|
transform = _boxParams.transform,
|
|
continuousAxisChannelDef = _boxParams.continuousAxisChannelDef,
|
|
continuousAxis = _boxParams.continuousAxis,
|
|
groupby = _boxParams.groupby,
|
|
aggregate = _boxParams.aggregate,
|
|
encodingWithoutContinuousAxis = _boxParams.encodingWithoutContinuousAxis,
|
|
ticksOrient = _boxParams.ticksOrient,
|
|
boxOrient = _boxParams.boxOrient,
|
|
customTooltipWithoutAggregatedField = _boxParams.customTooltipWithoutAggregatedField;
|
|
|
|
var color = encodingWithoutContinuousAxis.color,
|
|
size = encodingWithoutContinuousAxis.size,
|
|
encodingWithoutSizeColorAndContinuousAxis = __rest(encodingWithoutContinuousAxis, ["color", "size"]);
|
|
|
|
var makeBoxPlotPart = function makeBoxPlotPart(sharedEncoding) {
|
|
return makeCompositeAggregatePartFactory(markDef, continuousAxis, continuousAxisChannelDef, sharedEncoding, config.boxplot);
|
|
};
|
|
|
|
var makeBoxPlotExtent = makeBoxPlotPart(encodingWithoutSizeColorAndContinuousAxis);
|
|
var makeBoxPlotBox = makeBoxPlotPart(encodingWithoutContinuousAxis);
|
|
var makeBoxPlotMidTick = makeBoxPlotPart(Object.assign(Object.assign({}, encodingWithoutSizeColorAndContinuousAxis), size ? {
|
|
size: size
|
|
} : {}));
|
|
var fiveSummaryTooltipEncoding = getCompositeMarkTooltip([{
|
|
fieldPrefix: boxPlotType === 'min-max' ? 'upper_whisker_' : 'max_',
|
|
titlePrefix: 'Max'
|
|
}, {
|
|
fieldPrefix: 'upper_box_',
|
|
titlePrefix: 'Q3'
|
|
}, {
|
|
fieldPrefix: 'mid_box_',
|
|
titlePrefix: 'Median'
|
|
}, {
|
|
fieldPrefix: 'lower_box_',
|
|
titlePrefix: 'Q1'
|
|
}, {
|
|
fieldPrefix: boxPlotType === 'min-max' ? 'lower_whisker_' : 'min_',
|
|
titlePrefix: 'Min'
|
|
}], continuousAxisChannelDef, encodingWithoutContinuousAxis); // ## Whisker Layers
|
|
|
|
var endTick = {
|
|
type: 'tick',
|
|
color: 'black',
|
|
opacity: 1,
|
|
orient: ticksOrient,
|
|
invalid: null,
|
|
aria: false
|
|
};
|
|
var whiskerTooltipEncoding = boxPlotType === 'min-max' ? fiveSummaryTooltipEncoding // for min-max, show five-summary tooltip for whisker
|
|
: // for tukey / k-IQR, just show upper/lower-whisker
|
|
getCompositeMarkTooltip([{
|
|
fieldPrefix: 'upper_whisker_',
|
|
titlePrefix: 'Upper Whisker'
|
|
}, {
|
|
fieldPrefix: 'lower_whisker_',
|
|
titlePrefix: 'Lower Whisker'
|
|
}], continuousAxisChannelDef, encodingWithoutContinuousAxis);
|
|
var whiskerLayers = [].concat(_toConsumableArray(makeBoxPlotExtent({
|
|
partName: 'rule',
|
|
mark: {
|
|
type: 'rule',
|
|
invalid: null,
|
|
aria: false
|
|
},
|
|
positionPrefix: 'lower_whisker',
|
|
endPositionPrefix: 'lower_box',
|
|
extraEncoding: whiskerTooltipEncoding
|
|
})), _toConsumableArray(makeBoxPlotExtent({
|
|
partName: 'rule',
|
|
mark: {
|
|
type: 'rule',
|
|
invalid: null,
|
|
aria: false
|
|
},
|
|
positionPrefix: 'upper_box',
|
|
endPositionPrefix: 'upper_whisker',
|
|
extraEncoding: whiskerTooltipEncoding
|
|
})), _toConsumableArray(makeBoxPlotExtent({
|
|
partName: 'ticks',
|
|
mark: endTick,
|
|
positionPrefix: 'lower_whisker',
|
|
extraEncoding: whiskerTooltipEncoding
|
|
})), _toConsumableArray(makeBoxPlotExtent({
|
|
partName: 'ticks',
|
|
mark: endTick,
|
|
positionPrefix: 'upper_whisker',
|
|
extraEncoding: whiskerTooltipEncoding
|
|
}))); // ## Box Layers
|
|
// TODO: support hiding certain mark parts
|
|
|
|
var boxLayers = [].concat(_toConsumableArray(boxPlotType !== 'tukey' ? whiskerLayers : []), _toConsumableArray(makeBoxPlotBox({
|
|
partName: 'box',
|
|
mark: Object.assign(Object.assign({
|
|
type: 'bar'
|
|
}, sizeValue ? {
|
|
size: sizeValue
|
|
} : {}), {
|
|
orient: boxOrient,
|
|
invalid: null,
|
|
ariaRoleDescription: 'box'
|
|
}),
|
|
positionPrefix: 'lower_box',
|
|
endPositionPrefix: 'upper_box',
|
|
extraEncoding: fiveSummaryTooltipEncoding
|
|
})), _toConsumableArray(makeBoxPlotMidTick({
|
|
partName: 'median',
|
|
mark: Object.assign(Object.assign(Object.assign({
|
|
type: 'tick',
|
|
invalid: null
|
|
}, isObject(config.boxplot.median) && config.boxplot.median.color ? {
|
|
color: config.boxplot.median.color
|
|
} : {}), sizeValue ? {
|
|
size: sizeValue
|
|
} : {}), {
|
|
orient: ticksOrient,
|
|
aria: false
|
|
}),
|
|
positionPrefix: 'mid_box',
|
|
extraEncoding: fiveSummaryTooltipEncoding
|
|
}))); // ## Filtered Layers
|
|
|
|
var filteredLayersMixins;
|
|
|
|
if (boxPlotType !== 'min-max') {
|
|
var lowerBoxExpr = "datum[\"lower_box_".concat(continuousAxisChannelDef.field, "\"]");
|
|
var upperBoxExpr = "datum[\"upper_box_".concat(continuousAxisChannelDef.field, "\"]");
|
|
var iqrExpr = "(".concat(upperBoxExpr, " - ").concat(lowerBoxExpr, ")");
|
|
var lowerWhiskerExpr = "".concat(lowerBoxExpr, " - ").concat(extent, " * ").concat(iqrExpr);
|
|
var upperWhiskerExpr = "".concat(upperBoxExpr, " + ").concat(extent, " * ").concat(iqrExpr);
|
|
|
|
var _fieldExpr = "datum[\"".concat(continuousAxisChannelDef.field, "\"]");
|
|
|
|
var joinaggregateTransform = {
|
|
joinaggregate: boxParamsQuartiles(continuousAxisChannelDef.field),
|
|
groupby: groupby
|
|
};
|
|
var filteredWhiskerSpec = undefined;
|
|
|
|
if (boxPlotType === 'tukey') {
|
|
filteredWhiskerSpec = {
|
|
transform: [{
|
|
filter: "(".concat(lowerWhiskerExpr, " <= ").concat(_fieldExpr, ") && (").concat(_fieldExpr, " <= ").concat(upperWhiskerExpr, ")")
|
|
}, {
|
|
aggregate: [{
|
|
op: 'min',
|
|
field: continuousAxisChannelDef.field,
|
|
as: 'lower_whisker_' + continuousAxisChannelDef.field
|
|
}, {
|
|
op: 'max',
|
|
field: continuousAxisChannelDef.field,
|
|
as: 'upper_whisker_' + continuousAxisChannelDef.field
|
|
}, // preserve lower_box / upper_box
|
|
{
|
|
op: 'min',
|
|
field: 'lower_box_' + continuousAxisChannelDef.field,
|
|
as: 'lower_box_' + continuousAxisChannelDef.field
|
|
}, {
|
|
op: 'max',
|
|
field: 'upper_box_' + continuousAxisChannelDef.field,
|
|
as: 'upper_box_' + continuousAxisChannelDef.field
|
|
}].concat(_toConsumableArray(aggregate)),
|
|
groupby: groupby
|
|
}],
|
|
layer: whiskerLayers
|
|
};
|
|
}
|
|
|
|
var encodingWithoutSizeColorContinuousAxisAndTooltip = __rest(encodingWithoutSizeColorAndContinuousAxis, ["tooltip"]);
|
|
|
|
var scale = continuousAxisChannelDef.scale,
|
|
axis = continuousAxisChannelDef.axis;
|
|
|
|
var _title = getTitle(continuousAxisChannelDef);
|
|
|
|
var axisWithoutTitle = omit(axis, ['title']);
|
|
var outlierLayersMixins = partLayerMixins(markDef, 'outliers', config.boxplot, {
|
|
transform: [{
|
|
filter: "(".concat(_fieldExpr, " < ").concat(lowerWhiskerExpr, ") || (").concat(_fieldExpr, " > ").concat(upperWhiskerExpr, ")")
|
|
}],
|
|
mark: 'point',
|
|
encoding: Object.assign(Object.assign(_defineProperty({}, continuousAxis, Object.assign(Object.assign(Object.assign({
|
|
field: continuousAxisChannelDef.field,
|
|
type: continuousAxisChannelDef.type
|
|
}, _title !== undefined ? {
|
|
title: _title
|
|
} : {}), scale !== undefined ? {
|
|
scale: scale
|
|
} : {}), isEmpty(axisWithoutTitle) ? {} : {
|
|
axis: axisWithoutTitle
|
|
})), encodingWithoutSizeColorContinuousAxisAndTooltip), customTooltipWithoutAggregatedField ? {
|
|
tooltip: customTooltipWithoutAggregatedField
|
|
} : {})
|
|
})[0];
|
|
|
|
if (outlierLayersMixins && filteredWhiskerSpec) {
|
|
filteredLayersMixins = {
|
|
transform: [joinaggregateTransform],
|
|
layer: [outlierLayersMixins, filteredWhiskerSpec]
|
|
};
|
|
} else if (outlierLayersMixins) {
|
|
filteredLayersMixins = outlierLayersMixins;
|
|
filteredLayersMixins.transform.unshift(joinaggregateTransform);
|
|
} else if (filteredWhiskerSpec) {
|
|
filteredLayersMixins = filteredWhiskerSpec;
|
|
filteredLayersMixins.transform.unshift(joinaggregateTransform);
|
|
}
|
|
}
|
|
|
|
if (filteredLayersMixins) {
|
|
// tukey box plot with outliers included
|
|
return Object.assign(Object.assign({}, outerSpec), {
|
|
layer: [filteredLayersMixins, {
|
|
// boxplot
|
|
transform: transform,
|
|
layer: boxLayers
|
|
}]
|
|
});
|
|
}
|
|
|
|
return Object.assign(Object.assign({}, outerSpec), {
|
|
transform: ((_b = outerSpec.transform) !== null && _b !== void 0 ? _b : []).concat(transform),
|
|
layer: boxLayers
|
|
});
|
|
}
|
|
|
|
function boxParamsQuartiles(continousAxisField) {
|
|
return [{
|
|
op: 'q1',
|
|
field: continousAxisField,
|
|
as: 'lower_box_' + continousAxisField
|
|
}, {
|
|
op: 'q3',
|
|
field: continousAxisField,
|
|
as: 'upper_box_' + continousAxisField
|
|
}];
|
|
}
|
|
|
|
function boxParams(spec, extent, config) {
|
|
var orient = compositeMarkOrient(spec, BOXPLOT);
|
|
|
|
var _compositeMarkContinu = compositeMarkContinuousAxis(spec, orient, BOXPLOT),
|
|
continuousAxisChannelDef = _compositeMarkContinu.continuousAxisChannelDef,
|
|
continuousAxis = _compositeMarkContinu.continuousAxis;
|
|
|
|
var continuousFieldName = continuousAxisChannelDef.field;
|
|
var boxPlotType = getBoxPlotType(extent);
|
|
var boxplotSpecificAggregate = [].concat(_toConsumableArray(boxParamsQuartiles(continuousFieldName)), [{
|
|
op: 'median',
|
|
field: continuousFieldName,
|
|
as: 'mid_box_' + continuousFieldName
|
|
}, {
|
|
op: 'min',
|
|
field: continuousFieldName,
|
|
as: (boxPlotType === 'min-max' ? 'lower_whisker_' : 'min_') + continuousFieldName
|
|
}, {
|
|
op: 'max',
|
|
field: continuousFieldName,
|
|
as: (boxPlotType === 'min-max' ? 'upper_whisker_' : 'max_') + continuousFieldName
|
|
}]);
|
|
var postAggregateCalculates = boxPlotType === 'min-max' || boxPlotType === 'tukey' ? [] : [// This is for the original k-IQR, which we do not expose
|
|
{
|
|
calculate: "datum[\"upper_box_".concat(continuousFieldName, "\"] - datum[\"lower_box_").concat(continuousFieldName, "\"]"),
|
|
as: 'iqr_' + continuousFieldName
|
|
}, {
|
|
calculate: "min(datum[\"upper_box_".concat(continuousFieldName, "\"] + datum[\"iqr_").concat(continuousFieldName, "\"] * ").concat(extent, ", datum[\"max_").concat(continuousFieldName, "\"])"),
|
|
as: 'upper_whisker_' + continuousFieldName
|
|
}, {
|
|
calculate: "max(datum[\"lower_box_".concat(continuousFieldName, "\"] - datum[\"iqr_").concat(continuousFieldName, "\"] * ").concat(extent, ", datum[\"min_").concat(continuousFieldName, "\"])"),
|
|
as: 'lower_whisker_' + continuousFieldName
|
|
}];
|
|
|
|
var _a = spec.encoding,
|
|
_b = continuousAxis,
|
|
oldContinuousAxisChannelDef = _a[_b],
|
|
oldEncodingWithoutContinuousAxis = __rest(_a, [_typeof(_b) === "symbol" ? _b : _b + ""]);
|
|
|
|
var _filterTooltipWithAgg = filterTooltipWithAggregatedField(oldEncodingWithoutContinuousAxis),
|
|
customTooltipWithoutAggregatedField = _filterTooltipWithAgg.customTooltipWithoutAggregatedField,
|
|
filteredEncoding = _filterTooltipWithAgg.filteredEncoding;
|
|
|
|
var _extractTransformsFro = extractTransformsFromEncoding(filteredEncoding, config),
|
|
bins = _extractTransformsFro.bins,
|
|
timeUnits = _extractTransformsFro.timeUnits,
|
|
aggregate = _extractTransformsFro.aggregate,
|
|
groupby = _extractTransformsFro.groupby,
|
|
encodingWithoutContinuousAxis = _extractTransformsFro.encoding;
|
|
|
|
var ticksOrient = orient === 'vertical' ? 'horizontal' : 'vertical';
|
|
var boxOrient = orient;
|
|
var transform = [].concat(_toConsumableArray(bins), _toConsumableArray(timeUnits), [{
|
|
aggregate: [].concat(_toConsumableArray(aggregate), _toConsumableArray(boxplotSpecificAggregate)),
|
|
groupby: groupby
|
|
}], postAggregateCalculates);
|
|
return {
|
|
transform: transform,
|
|
groupby: groupby,
|
|
aggregate: aggregate,
|
|
continuousAxisChannelDef: continuousAxisChannelDef,
|
|
continuousAxis: continuousAxis,
|
|
encodingWithoutContinuousAxis: encodingWithoutContinuousAxis,
|
|
ticksOrient: ticksOrient,
|
|
boxOrient: boxOrient,
|
|
customTooltipWithoutAggregatedField: customTooltipWithoutAggregatedField
|
|
};
|
|
}
|
|
|
|
var ERRORBAR = 'errorbar';
|
|
var ERRORBAR_PARTS = ['ticks', 'rule'];
|
|
var errorBarNormalizer = new CompositeMarkNormalizer(ERRORBAR, normalizeErrorBar);
|
|
|
|
function normalizeErrorBar(spec, _ref17) {
|
|
var config = _ref17.config;
|
|
// Need to initEncoding first so we can infer type
|
|
spec = Object.assign(Object.assign({}, spec), {
|
|
encoding: normalizeEncoding(spec.encoding, config)
|
|
});
|
|
|
|
var _errorBarParams = errorBarParams(spec, ERRORBAR, config),
|
|
transform = _errorBarParams.transform,
|
|
continuousAxisChannelDef = _errorBarParams.continuousAxisChannelDef,
|
|
continuousAxis = _errorBarParams.continuousAxis,
|
|
encodingWithoutContinuousAxis = _errorBarParams.encodingWithoutContinuousAxis,
|
|
ticksOrient = _errorBarParams.ticksOrient,
|
|
markDef = _errorBarParams.markDef,
|
|
outerSpec = _errorBarParams.outerSpec,
|
|
tooltipEncoding = _errorBarParams.tooltipEncoding;
|
|
|
|
var makeErrorBarPart = makeCompositeAggregatePartFactory(markDef, continuousAxis, continuousAxisChannelDef, encodingWithoutContinuousAxis, config.errorbar);
|
|
var tick = {
|
|
type: 'tick',
|
|
orient: ticksOrient,
|
|
aria: false
|
|
};
|
|
var layer = [].concat(_toConsumableArray(makeErrorBarPart({
|
|
partName: 'ticks',
|
|
mark: tick,
|
|
positionPrefix: 'lower',
|
|
extraEncoding: tooltipEncoding
|
|
})), _toConsumableArray(makeErrorBarPart({
|
|
partName: 'ticks',
|
|
mark: tick,
|
|
positionPrefix: 'upper',
|
|
extraEncoding: tooltipEncoding
|
|
})), _toConsumableArray(makeErrorBarPart({
|
|
partName: 'rule',
|
|
mark: {
|
|
type: 'rule',
|
|
ariaRoleDescription: 'errorbar'
|
|
},
|
|
positionPrefix: 'lower',
|
|
endPositionPrefix: 'upper',
|
|
extraEncoding: tooltipEncoding
|
|
})));
|
|
return Object.assign(Object.assign(Object.assign({}, outerSpec), {
|
|
transform: transform
|
|
}), layer.length > 1 ? {
|
|
layer: layer
|
|
} : Object.assign({}, layer[0]));
|
|
}
|
|
|
|
function errorBarOrientAndInputType(spec, compositeMark) {
|
|
var encoding = spec.encoding;
|
|
|
|
if (errorBarIsInputTypeRaw(encoding)) {
|
|
return {
|
|
orient: compositeMarkOrient(spec, compositeMark),
|
|
inputType: 'raw'
|
|
};
|
|
}
|
|
|
|
var isTypeAggregatedUpperLower = errorBarIsInputTypeAggregatedUpperLower(encoding);
|
|
var isTypeAggregatedError = errorBarIsInputTypeAggregatedError(encoding);
|
|
var x = encoding.x;
|
|
var y = encoding.y;
|
|
|
|
if (isTypeAggregatedUpperLower) {
|
|
// type is aggregated-upper-lower
|
|
if (isTypeAggregatedError) {
|
|
throw new Error("".concat(compositeMark, " cannot be both type aggregated-upper-lower and aggregated-error"));
|
|
}
|
|
|
|
var x2 = encoding.x2;
|
|
var y2 = encoding.y2;
|
|
|
|
if (isFieldOrDatumDef(x2) && isFieldOrDatumDef(y2)) {
|
|
// having both x, x2 and y, y2
|
|
throw new Error("".concat(compositeMark, " cannot have both x2 and y2"));
|
|
} else if (isFieldOrDatumDef(x2)) {
|
|
if (isContinuousFieldOrDatumDef(x)) {
|
|
// having x, x2 quantitative and field y, y2 are not specified
|
|
return {
|
|
orient: 'horizontal',
|
|
inputType: 'aggregated-upper-lower'
|
|
};
|
|
} else {
|
|
// having x, x2 that are not both quantitative
|
|
throw new Error("Both x and x2 have to be quantitative in ".concat(compositeMark));
|
|
}
|
|
} else if (isFieldOrDatumDef(y2)) {
|
|
// y2 is a FieldDef
|
|
if (isContinuousFieldOrDatumDef(y)) {
|
|
// having y, y2 quantitative and field x, x2 are not specified
|
|
return {
|
|
orient: 'vertical',
|
|
inputType: 'aggregated-upper-lower'
|
|
};
|
|
} else {
|
|
// having y, y2 that are not both quantitative
|
|
throw new Error("Both y and y2 have to be quantitative in ".concat(compositeMark));
|
|
}
|
|
}
|
|
|
|
throw new Error('No ranged axis');
|
|
} else {
|
|
// type is aggregated-error
|
|
var xError = encoding.xError;
|
|
var xError2 = encoding.xError2;
|
|
var yError = encoding.yError;
|
|
var yError2 = encoding.yError2;
|
|
|
|
if (isFieldOrDatumDef(xError2) && !isFieldOrDatumDef(xError)) {
|
|
// having xError2 without xError
|
|
throw new Error("".concat(compositeMark, " cannot have xError2 without xError"));
|
|
}
|
|
|
|
if (isFieldOrDatumDef(yError2) && !isFieldOrDatumDef(yError)) {
|
|
// having yError2 without yError
|
|
throw new Error("".concat(compositeMark, " cannot have yError2 without yError"));
|
|
}
|
|
|
|
if (isFieldOrDatumDef(xError) && isFieldOrDatumDef(yError)) {
|
|
// having both xError and yError
|
|
throw new Error("".concat(compositeMark, " cannot have both xError and yError with both are quantiative"));
|
|
} else if (isFieldOrDatumDef(xError)) {
|
|
if (isContinuousFieldOrDatumDef(x)) {
|
|
// having x and xError that are all quantitative
|
|
return {
|
|
orient: 'horizontal',
|
|
inputType: 'aggregated-error'
|
|
};
|
|
} else {
|
|
// having x, xError, and xError2 that are not all quantitative
|
|
throw new Error('All x, xError, and xError2 (if exist) have to be quantitative');
|
|
}
|
|
} else if (isFieldOrDatumDef(yError)) {
|
|
if (isContinuousFieldOrDatumDef(y)) {
|
|
// having y and yError that are all quantitative
|
|
return {
|
|
orient: 'vertical',
|
|
inputType: 'aggregated-error'
|
|
};
|
|
} else {
|
|
// having y, yError, and yError2 that are not all quantitative
|
|
throw new Error('All y, yError, and yError2 (if exist) have to be quantitative');
|
|
}
|
|
}
|
|
|
|
throw new Error('No ranged axis');
|
|
}
|
|
}
|
|
|
|
function errorBarIsInputTypeRaw(encoding) {
|
|
return (isFieldOrDatumDef(encoding.x) || isFieldOrDatumDef(encoding.y)) && !isFieldOrDatumDef(encoding.x2) && !isFieldOrDatumDef(encoding.y2) && !isFieldOrDatumDef(encoding.xError) && !isFieldOrDatumDef(encoding.xError2) && !isFieldOrDatumDef(encoding.yError) && !isFieldOrDatumDef(encoding.yError2);
|
|
}
|
|
|
|
function errorBarIsInputTypeAggregatedUpperLower(encoding) {
|
|
return isFieldOrDatumDef(encoding.x2) || isFieldOrDatumDef(encoding.y2);
|
|
}
|
|
|
|
function errorBarIsInputTypeAggregatedError(encoding) {
|
|
return isFieldOrDatumDef(encoding.xError) || isFieldOrDatumDef(encoding.xError2) || isFieldOrDatumDef(encoding.yError) || isFieldOrDatumDef(encoding.yError2);
|
|
}
|
|
|
|
function errorBarParams(spec, compositeMark, config) {
|
|
var _a; // TODO: use selection
|
|
|
|
|
|
var mark = spec.mark,
|
|
encoding = spec.encoding,
|
|
selection = spec.selection,
|
|
_p = spec.projection,
|
|
outerSpec = __rest(spec, ["mark", "encoding", "selection", "projection"]);
|
|
|
|
var markDef = isMarkDef(mark) ? mark : {
|
|
type: mark
|
|
}; // TODO(https://github.com/vega/vega-lite/issues/3702): add selection support
|
|
|
|
if (selection) {
|
|
warn(selectionNotSupported(compositeMark));
|
|
}
|
|
|
|
var _errorBarOrientAndInp = errorBarOrientAndInputType(spec, compositeMark),
|
|
orient = _errorBarOrientAndInp.orient,
|
|
inputType = _errorBarOrientAndInp.inputType;
|
|
|
|
var _compositeMarkContinu2 = compositeMarkContinuousAxis(spec, orient, compositeMark),
|
|
continuousAxisChannelDef = _compositeMarkContinu2.continuousAxisChannelDef,
|
|
continuousAxisChannelDef2 = _compositeMarkContinu2.continuousAxisChannelDef2,
|
|
continuousAxisChannelDefError = _compositeMarkContinu2.continuousAxisChannelDefError,
|
|
continuousAxisChannelDefError2 = _compositeMarkContinu2.continuousAxisChannelDefError2,
|
|
continuousAxis = _compositeMarkContinu2.continuousAxis;
|
|
|
|
var _errorBarAggregationA = errorBarAggregationAndCalculation(markDef, continuousAxisChannelDef, continuousAxisChannelDef2, continuousAxisChannelDefError, continuousAxisChannelDefError2, inputType, compositeMark, config),
|
|
errorBarSpecificAggregate = _errorBarAggregationA.errorBarSpecificAggregate,
|
|
postAggregateCalculates = _errorBarAggregationA.postAggregateCalculates,
|
|
tooltipSummary = _errorBarAggregationA.tooltipSummary,
|
|
tooltipTitleWithFieldName = _errorBarAggregationA.tooltipTitleWithFieldName;
|
|
|
|
var _b = encoding,
|
|
_c = continuousAxis,
|
|
oldContinuousAxisChannelDef = _b[_c],
|
|
_d = continuousAxis === 'x' ? 'x2' : 'y2',
|
|
oldContinuousAxisChannelDef2 = _b[_d],
|
|
_e = continuousAxis === 'x' ? 'xError' : 'yError',
|
|
oldContinuousAxisChannelDefError = _b[_e],
|
|
_f = continuousAxis === 'x' ? 'xError2' : 'yError2',
|
|
oldContinuousAxisChannelDefError2 = _b[_f],
|
|
oldEncodingWithoutContinuousAxis = __rest(_b, [_typeof(_c) === "symbol" ? _c : _c + "", _typeof(_d) === "symbol" ? _d : _d + "", _typeof(_e) === "symbol" ? _e : _e + "", _typeof(_f) === "symbol" ? _f : _f + ""]);
|
|
|
|
var _extractTransformsFro2 = extractTransformsFromEncoding(oldEncodingWithoutContinuousAxis, config),
|
|
bins = _extractTransformsFro2.bins,
|
|
timeUnits = _extractTransformsFro2.timeUnits,
|
|
oldAggregate = _extractTransformsFro2.aggregate,
|
|
oldGroupBy = _extractTransformsFro2.groupby,
|
|
encodingWithoutContinuousAxis = _extractTransformsFro2.encoding;
|
|
|
|
var aggregate = [].concat(_toConsumableArray(oldAggregate), _toConsumableArray(errorBarSpecificAggregate));
|
|
var groupby = inputType !== 'raw' ? [] : oldGroupBy;
|
|
var tooltipEncoding = getCompositeMarkTooltip(tooltipSummary, continuousAxisChannelDef, encodingWithoutContinuousAxis, tooltipTitleWithFieldName);
|
|
return {
|
|
transform: [].concat(_toConsumableArray((_a = outerSpec.transform) !== null && _a !== void 0 ? _a : []), _toConsumableArray(bins), _toConsumableArray(timeUnits), _toConsumableArray(aggregate.length === 0 ? [] : [{
|
|
aggregate: aggregate,
|
|
groupby: groupby
|
|
}]), _toConsumableArray(postAggregateCalculates)),
|
|
groupby: groupby,
|
|
continuousAxisChannelDef: continuousAxisChannelDef,
|
|
continuousAxis: continuousAxis,
|
|
encodingWithoutContinuousAxis: encodingWithoutContinuousAxis,
|
|
ticksOrient: orient === 'vertical' ? 'horizontal' : 'vertical',
|
|
markDef: markDef,
|
|
outerSpec: outerSpec,
|
|
tooltipEncoding: tooltipEncoding
|
|
};
|
|
}
|
|
|
|
function errorBarAggregationAndCalculation(markDef, continuousAxisChannelDef, continuousAxisChannelDef2, continuousAxisChannelDefError, continuousAxisChannelDefError2, inputType, compositeMark, config) {
|
|
var errorBarSpecificAggregate = [];
|
|
var postAggregateCalculates = [];
|
|
var continuousFieldName = continuousAxisChannelDef.field;
|
|
var tooltipSummary;
|
|
var tooltipTitleWithFieldName = false;
|
|
|
|
if (inputType === 'raw') {
|
|
var center = markDef.center ? markDef.center : markDef.extent ? markDef.extent === 'iqr' ? 'median' : 'mean' : config.errorbar.center;
|
|
var extent = markDef.extent ? markDef.extent : center === 'mean' ? 'stderr' : 'iqr';
|
|
|
|
if (center === 'median' !== (extent === 'iqr')) {
|
|
warn(errorBarCenterIsUsedWithWrongExtent(center, extent, compositeMark));
|
|
}
|
|
|
|
if (extent === 'stderr' || extent === 'stdev') {
|
|
errorBarSpecificAggregate = [{
|
|
op: extent,
|
|
field: continuousFieldName,
|
|
as: 'extent_' + continuousFieldName
|
|
}, {
|
|
op: center,
|
|
field: continuousFieldName,
|
|
as: 'center_' + continuousFieldName
|
|
}];
|
|
postAggregateCalculates = [{
|
|
calculate: "datum[\"center_".concat(continuousFieldName, "\"] + datum[\"extent_").concat(continuousFieldName, "\"]"),
|
|
as: 'upper_' + continuousFieldName
|
|
}, {
|
|
calculate: "datum[\"center_".concat(continuousFieldName, "\"] - datum[\"extent_").concat(continuousFieldName, "\"]"),
|
|
as: 'lower_' + continuousFieldName
|
|
}];
|
|
tooltipSummary = [{
|
|
fieldPrefix: 'center_',
|
|
titlePrefix: titleCase(center)
|
|
}, {
|
|
fieldPrefix: 'upper_',
|
|
titlePrefix: getTitlePrefix(center, extent, '+')
|
|
}, {
|
|
fieldPrefix: 'lower_',
|
|
titlePrefix: getTitlePrefix(center, extent, '-')
|
|
}];
|
|
tooltipTitleWithFieldName = true;
|
|
} else {
|
|
var centerOp;
|
|
var lowerExtentOp;
|
|
var upperExtentOp;
|
|
|
|
if (extent === 'ci') {
|
|
centerOp = 'mean';
|
|
lowerExtentOp = 'ci0';
|
|
upperExtentOp = 'ci1';
|
|
} else {
|
|
centerOp = 'median';
|
|
lowerExtentOp = 'q1';
|
|
upperExtentOp = 'q3';
|
|
}
|
|
|
|
errorBarSpecificAggregate = [{
|
|
op: lowerExtentOp,
|
|
field: continuousFieldName,
|
|
as: 'lower_' + continuousFieldName
|
|
}, {
|
|
op: upperExtentOp,
|
|
field: continuousFieldName,
|
|
as: 'upper_' + continuousFieldName
|
|
}, {
|
|
op: centerOp,
|
|
field: continuousFieldName,
|
|
as: 'center_' + continuousFieldName
|
|
}];
|
|
tooltipSummary = [{
|
|
fieldPrefix: 'upper_',
|
|
titlePrefix: _title3({
|
|
field: continuousFieldName,
|
|
aggregate: upperExtentOp,
|
|
type: 'quantitative'
|
|
}, config, {
|
|
allowDisabling: false
|
|
})
|
|
}, {
|
|
fieldPrefix: 'lower_',
|
|
titlePrefix: _title3({
|
|
field: continuousFieldName,
|
|
aggregate: lowerExtentOp,
|
|
type: 'quantitative'
|
|
}, config, {
|
|
allowDisabling: false
|
|
})
|
|
}, {
|
|
fieldPrefix: 'center_',
|
|
titlePrefix: _title3({
|
|
field: continuousFieldName,
|
|
aggregate: centerOp,
|
|
type: 'quantitative'
|
|
}, config, {
|
|
allowDisabling: false
|
|
})
|
|
}];
|
|
}
|
|
} else {
|
|
if (markDef.center || markDef.extent) {
|
|
warn(errorBarCenterAndExtentAreNotNeeded(markDef.center, markDef.extent));
|
|
}
|
|
|
|
if (inputType === 'aggregated-upper-lower') {
|
|
tooltipSummary = [];
|
|
postAggregateCalculates = [{
|
|
calculate: "datum[\"".concat(continuousAxisChannelDef2.field, "\"]"),
|
|
as: 'upper_' + continuousFieldName
|
|
}, {
|
|
calculate: "datum[\"".concat(continuousFieldName, "\"]"),
|
|
as: 'lower_' + continuousFieldName
|
|
}];
|
|
} else if (inputType === 'aggregated-error') {
|
|
tooltipSummary = [{
|
|
fieldPrefix: '',
|
|
titlePrefix: continuousFieldName
|
|
}];
|
|
postAggregateCalculates = [{
|
|
calculate: "datum[\"".concat(continuousFieldName, "\"] + datum[\"").concat(continuousAxisChannelDefError.field, "\"]"),
|
|
as: 'upper_' + continuousFieldName
|
|
}];
|
|
|
|
if (continuousAxisChannelDefError2) {
|
|
postAggregateCalculates.push({
|
|
calculate: "datum[\"".concat(continuousFieldName, "\"] + datum[\"").concat(continuousAxisChannelDefError2.field, "\"]"),
|
|
as: 'lower_' + continuousFieldName
|
|
});
|
|
} else {
|
|
postAggregateCalculates.push({
|
|
calculate: "datum[\"".concat(continuousFieldName, "\"] - datum[\"").concat(continuousAxisChannelDefError.field, "\"]"),
|
|
as: 'lower_' + continuousFieldName
|
|
});
|
|
}
|
|
}
|
|
|
|
var _iterator25 = _createForOfIteratorHelper(postAggregateCalculates),
|
|
_step25;
|
|
|
|
try {
|
|
for (_iterator25.s(); !(_step25 = _iterator25.n()).done;) {
|
|
var postAggregateCalculate = _step25.value;
|
|
tooltipSummary.push({
|
|
fieldPrefix: postAggregateCalculate.as.substring(0, 6),
|
|
titlePrefix: replaceAll(replaceAll(postAggregateCalculate.calculate, 'datum["', ''), '"]', '')
|
|
});
|
|
}
|
|
} catch (err) {
|
|
_iterator25.e(err);
|
|
} finally {
|
|
_iterator25.f();
|
|
}
|
|
}
|
|
|
|
return {
|
|
postAggregateCalculates: postAggregateCalculates,
|
|
errorBarSpecificAggregate: errorBarSpecificAggregate,
|
|
tooltipSummary: tooltipSummary,
|
|
tooltipTitleWithFieldName: tooltipTitleWithFieldName
|
|
};
|
|
}
|
|
|
|
function getTitlePrefix(center, extent, operation) {
|
|
return titleCase(center) + ' ' + operation + ' ' + extent;
|
|
}
|
|
|
|
var ERRORBAND = 'errorband';
|
|
var ERRORBAND_PARTS = ['band', 'borders'];
|
|
var errorBandNormalizer = new CompositeMarkNormalizer(ERRORBAND, normalizeErrorBand);
|
|
|
|
function normalizeErrorBand(spec, _ref18) {
|
|
var config = _ref18.config;
|
|
// Need to initEncoding first so we can infer type
|
|
spec = Object.assign(Object.assign({}, spec), {
|
|
encoding: normalizeEncoding(spec.encoding, config)
|
|
});
|
|
|
|
var _errorBarParams2 = errorBarParams(spec, ERRORBAND, config),
|
|
transform = _errorBarParams2.transform,
|
|
continuousAxisChannelDef = _errorBarParams2.continuousAxisChannelDef,
|
|
continuousAxis = _errorBarParams2.continuousAxis,
|
|
encodingWithoutContinuousAxis = _errorBarParams2.encodingWithoutContinuousAxis,
|
|
markDef = _errorBarParams2.markDef,
|
|
outerSpec = _errorBarParams2.outerSpec,
|
|
tooltipEncoding = _errorBarParams2.tooltipEncoding;
|
|
|
|
var errorBandDef = markDef;
|
|
var makeErrorBandPart = makeCompositeAggregatePartFactory(errorBandDef, continuousAxis, continuousAxisChannelDef, encodingWithoutContinuousAxis, config.errorband);
|
|
var is2D = spec.encoding.x !== undefined && spec.encoding.y !== undefined;
|
|
var bandMark = {
|
|
type: is2D ? 'area' : 'rect'
|
|
};
|
|
var bordersMark = {
|
|
type: is2D ? 'line' : 'rule'
|
|
};
|
|
var interpolate = Object.assign(Object.assign({}, errorBandDef.interpolate ? {
|
|
interpolate: errorBandDef.interpolate
|
|
} : {}), errorBandDef.tension && errorBandDef.interpolate ? {
|
|
tension: errorBandDef.tension
|
|
} : {});
|
|
|
|
if (is2D) {
|
|
bandMark = Object.assign(Object.assign(Object.assign({}, bandMark), interpolate), {
|
|
ariaRoleDescription: 'errorband'
|
|
});
|
|
bordersMark = Object.assign(Object.assign(Object.assign({}, bordersMark), interpolate), {
|
|
aria: false
|
|
});
|
|
} else if (errorBandDef.interpolate) {
|
|
warn(errorBand1DNotSupport('interpolate'));
|
|
} else if (errorBandDef.tension) {
|
|
warn(errorBand1DNotSupport('tension'));
|
|
}
|
|
|
|
return Object.assign(Object.assign({}, outerSpec), {
|
|
transform: transform,
|
|
layer: [].concat(_toConsumableArray(makeErrorBandPart({
|
|
partName: 'band',
|
|
mark: bandMark,
|
|
positionPrefix: 'lower',
|
|
endPositionPrefix: 'upper',
|
|
extraEncoding: tooltipEncoding
|
|
})), _toConsumableArray(makeErrorBandPart({
|
|
partName: 'borders',
|
|
mark: bordersMark,
|
|
positionPrefix: 'lower',
|
|
extraEncoding: tooltipEncoding
|
|
})), _toConsumableArray(makeErrorBandPart({
|
|
partName: 'borders',
|
|
mark: bordersMark,
|
|
positionPrefix: 'upper',
|
|
extraEncoding: tooltipEncoding
|
|
})))
|
|
});
|
|
}
|
|
/**
|
|
* Registry index for all composite mark's normalizer
|
|
*/
|
|
|
|
|
|
var compositeMarkRegistry = {};
|
|
|
|
function add(mark, run, parts) {
|
|
var normalizer = new CompositeMarkNormalizer(mark, run);
|
|
compositeMarkRegistry[mark] = {
|
|
normalizer: normalizer,
|
|
parts: parts
|
|
};
|
|
}
|
|
|
|
function getAllCompositeMarks() {
|
|
return keys(compositeMarkRegistry);
|
|
}
|
|
|
|
add(BOXPLOT, normalizeBoxPlot, BOXPLOT_PARTS);
|
|
add(ERRORBAR, normalizeErrorBar, ERRORBAR_PARTS);
|
|
add(ERRORBAND, normalizeErrorBand, ERRORBAND_PARTS);
|
|
var VL_ONLY_LEGEND_CONFIG = ['gradientHorizontalMaxLength', 'gradientHorizontalMinLength', 'gradientVerticalMaxLength', 'gradientVerticalMinLength', 'unselectedOpacity'];
|
|
var LEGEND_SCALE_CHANNELS = ['size', 'shape', 'fill', 'stroke', 'strokeDash', 'strokeWidth', 'opacity'];
|
|
var SIGNAL_LEGEND_PROP_INDEX = {
|
|
fillColor: {
|
|
part: 'legend',
|
|
vgProp: 'fill'
|
|
},
|
|
gradientStrokeColor: {
|
|
part: 'gradient',
|
|
vgProp: 'stroke'
|
|
},
|
|
labelColor: {
|
|
part: 'labels',
|
|
vgProp: 'fill'
|
|
},
|
|
strokeColor: {
|
|
part: 'legend',
|
|
vgProp: 'stroke'
|
|
},
|
|
symbolFillColor: {
|
|
part: 'symbols',
|
|
vgProp: 'fill'
|
|
},
|
|
symbolStrokeColor: {
|
|
part: 'symbols',
|
|
vgProp: 'stroke'
|
|
},
|
|
titleColor: {
|
|
part: 'title',
|
|
vgProp: 'fill'
|
|
}
|
|
};
|
|
var defaultLegendConfig = {
|
|
gradientHorizontalMaxLength: 200,
|
|
gradientHorizontalMinLength: 100,
|
|
gradientVerticalMaxLength: 200,
|
|
gradientVerticalMinLength: 64,
|
|
unselectedOpacity: 0.35
|
|
};
|
|
var COMMON_LEGEND_PROPERTY_INDEX = {
|
|
aria: 1,
|
|
clipHeight: 1,
|
|
columnPadding: 1,
|
|
columns: 1,
|
|
cornerRadius: 1,
|
|
description: 1,
|
|
direction: 1,
|
|
fillColor: 1,
|
|
format: 1,
|
|
formatType: 1,
|
|
gradientLength: 1,
|
|
gradientOpacity: 1,
|
|
gradientStrokeColor: 1,
|
|
gradientStrokeWidth: 1,
|
|
gradientThickness: 1,
|
|
gridAlign: 1,
|
|
labelAlign: 1,
|
|
labelBaseline: 1,
|
|
labelColor: 1,
|
|
labelFont: 1,
|
|
labelFontSize: 1,
|
|
labelFontStyle: 1,
|
|
labelFontWeight: 1,
|
|
labelLimit: 1,
|
|
labelOffset: 1,
|
|
labelOpacity: 1,
|
|
labelOverlap: 1,
|
|
labelPadding: 1,
|
|
labelSeparation: 1,
|
|
legendX: 1,
|
|
legendY: 1,
|
|
offset: 1,
|
|
orient: 1,
|
|
padding: 1,
|
|
rowPadding: 1,
|
|
strokeColor: 1,
|
|
symbolDash: 1,
|
|
symbolDashOffset: 1,
|
|
symbolFillColor: 1,
|
|
symbolLimit: 1,
|
|
symbolOffset: 1,
|
|
symbolOpacity: 1,
|
|
symbolSize: 1,
|
|
symbolStrokeColor: 1,
|
|
symbolStrokeWidth: 1,
|
|
symbolType: 1,
|
|
tickCount: 1,
|
|
tickMinStep: 1,
|
|
title: 1,
|
|
titleAlign: 1,
|
|
titleAnchor: 1,
|
|
titleBaseline: 1,
|
|
titleColor: 1,
|
|
titleFont: 1,
|
|
titleFontSize: 1,
|
|
titleFontStyle: 1,
|
|
titleFontWeight: 1,
|
|
titleLimit: 1,
|
|
titleLineHeight: 1,
|
|
titleOpacity: 1,
|
|
titleOrient: 1,
|
|
titlePadding: 1,
|
|
type: 1,
|
|
values: 1,
|
|
zindex: 1
|
|
};
|
|
var SELECTION_ID = '_vgsid_';
|
|
var defaultConfig = {
|
|
single: {
|
|
on: 'click',
|
|
fields: [SELECTION_ID],
|
|
resolve: 'global',
|
|
empty: 'all',
|
|
clear: 'dblclick'
|
|
},
|
|
multi: {
|
|
on: 'click',
|
|
fields: [SELECTION_ID],
|
|
toggle: 'event.shiftKey',
|
|
resolve: 'global',
|
|
empty: 'all',
|
|
clear: 'dblclick'
|
|
},
|
|
interval: {
|
|
on: '[mousedown, window:mouseup] > window:mousemove!',
|
|
encodings: ['x', 'y'],
|
|
translate: '[mousedown, window:mouseup] > window:mousemove!',
|
|
zoom: 'wheel!',
|
|
mark: {
|
|
fill: '#333',
|
|
fillOpacity: 0.125,
|
|
stroke: 'white'
|
|
},
|
|
resolve: 'global',
|
|
clear: 'dblclick'
|
|
}
|
|
};
|
|
|
|
function isLegendBinding(bind) {
|
|
return !!bind && (bind === 'legend' || !!bind.legend);
|
|
}
|
|
|
|
function isLegendStreamBinding(bind) {
|
|
return isLegendBinding(bind) && isObject(bind);
|
|
}
|
|
|
|
function isAnyConcatSpec(spec) {
|
|
return isVConcatSpec(spec) || isHConcatSpec(spec) || isConcatSpec(spec);
|
|
}
|
|
|
|
function isConcatSpec(spec) {
|
|
return 'concat' in spec;
|
|
}
|
|
|
|
function isVConcatSpec(spec) {
|
|
return 'vconcat' in spec;
|
|
}
|
|
|
|
function isHConcatSpec(spec) {
|
|
return 'hconcat' in spec;
|
|
}
|
|
|
|
function isStep(size) {
|
|
return isObject(size) && size['step'] !== undefined;
|
|
}
|
|
|
|
function isFrameMixins(o) {
|
|
return o['view'] || o['width'] || o['height'];
|
|
}
|
|
|
|
var DEFAULT_SPACING = 20;
|
|
var COMPOSITION_LAYOUT_INDEX = {
|
|
align: 1,
|
|
bounds: 1,
|
|
center: 1,
|
|
columns: 1,
|
|
spacing: 1
|
|
};
|
|
var COMPOSITION_LAYOUT_PROPERTIES = keys(COMPOSITION_LAYOUT_INDEX);
|
|
|
|
function extractCompositionLayout(spec, specType, config) {
|
|
var _a, _b;
|
|
|
|
var compositionConfig = config[specType];
|
|
var layout = {}; // Apply config first
|
|
|
|
var spacingConfig = compositionConfig.spacing,
|
|
columns = compositionConfig.columns;
|
|
|
|
if (spacingConfig !== undefined) {
|
|
layout.spacing = spacingConfig;
|
|
}
|
|
|
|
if (columns !== undefined) {
|
|
if (isFacetSpec(spec) && !isFacetMapping(spec.facet) || isConcatSpec(spec)) {
|
|
layout.columns = columns;
|
|
}
|
|
}
|
|
|
|
if (isVConcatSpec(spec)) {
|
|
layout.columns = 1;
|
|
} // Then copy properties from the spec
|
|
|
|
|
|
var _iterator26 = _createForOfIteratorHelper(COMPOSITION_LAYOUT_PROPERTIES),
|
|
_step26;
|
|
|
|
try {
|
|
for (_iterator26.s(); !(_step26 = _iterator26.n()).done;) {
|
|
var prop = _step26.value;
|
|
|
|
if (spec[prop] !== undefined) {
|
|
if (prop === 'spacing') {
|
|
var spacing = spec[prop];
|
|
layout[prop] = isNumber(spacing) ? spacing : {
|
|
row: (_a = spacing.row) !== null && _a !== void 0 ? _a : spacingConfig,
|
|
column: (_b = spacing.column) !== null && _b !== void 0 ? _b : spacingConfig
|
|
};
|
|
} else {
|
|
layout[prop] = spec[prop];
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator26.e(err);
|
|
} finally {
|
|
_iterator26.f();
|
|
}
|
|
|
|
return layout;
|
|
}
|
|
|
|
function getViewConfigContinuousSize(viewConfig, channel) {
|
|
var _a;
|
|
|
|
return (_a = viewConfig[channel]) !== null && _a !== void 0 ? _a : viewConfig[channel === 'width' ? 'continuousWidth' : 'continuousHeight']; // get width/height for backwards compatibility
|
|
}
|
|
|
|
function getViewConfigDiscreteStep(viewConfig, channel) {
|
|
var size = getViewConfigDiscreteSize(viewConfig, channel);
|
|
return isStep(size) ? size.step : DEFAULT_STEP;
|
|
}
|
|
|
|
function getViewConfigDiscreteSize(viewConfig, channel) {
|
|
var _a;
|
|
|
|
var size = (_a = viewConfig[channel]) !== null && _a !== void 0 ? _a : viewConfig[channel === 'width' ? 'discreteWidth' : 'discreteHeight']; // get width/height for backwards compatibility
|
|
|
|
return getFirstDefined(size, {
|
|
step: viewConfig.step
|
|
});
|
|
}
|
|
|
|
var DEFAULT_STEP = 20;
|
|
var defaultViewConfig = {
|
|
continuousWidth: 200,
|
|
continuousHeight: 200,
|
|
step: DEFAULT_STEP
|
|
};
|
|
var defaultConfig$1 = {
|
|
background: 'white',
|
|
padding: 5,
|
|
timeFormat: '%b %d, %Y',
|
|
countTitle: 'Count of Records',
|
|
view: defaultViewConfig,
|
|
mark: defaultMarkConfig,
|
|
arc: {},
|
|
area: {},
|
|
bar: defaultBarConfig,
|
|
circle: {},
|
|
geoshape: {},
|
|
image: {},
|
|
line: {},
|
|
point: {},
|
|
rect: defaultRectConfig,
|
|
rule: {
|
|
color: 'black'
|
|
},
|
|
square: {},
|
|
text: {
|
|
color: 'black'
|
|
},
|
|
tick: defaultTickConfig,
|
|
trail: {},
|
|
boxplot: {
|
|
size: 14,
|
|
extent: 1.5,
|
|
box: {},
|
|
median: {
|
|
color: 'white'
|
|
},
|
|
outliers: {},
|
|
rule: {},
|
|
ticks: null
|
|
},
|
|
errorbar: {
|
|
center: 'mean',
|
|
rule: true,
|
|
ticks: false
|
|
},
|
|
errorband: {
|
|
band: {
|
|
opacity: 0.3
|
|
},
|
|
borders: false
|
|
},
|
|
scale: defaultScaleConfig,
|
|
projection: {},
|
|
legend: defaultLegendConfig,
|
|
header: {
|
|
titlePadding: 10,
|
|
labelPadding: 10
|
|
},
|
|
headerColumn: {},
|
|
headerRow: {},
|
|
headerFacet: {},
|
|
selection: defaultConfig,
|
|
style: {},
|
|
title: {},
|
|
facet: {
|
|
spacing: DEFAULT_SPACING
|
|
},
|
|
concat: {
|
|
spacing: DEFAULT_SPACING
|
|
}
|
|
}; // Tableau10 color palette, copied from `vegaScale.scheme('tableau10')`
|
|
|
|
var tab10 = ['#4c78a8', '#f58518', '#e45756', '#72b7b2', '#54a24b', '#eeca3b', '#b279a2', '#ff9da6', '#9d755d', '#bab0ac'];
|
|
var DEFAULT_FONT_SIZE = {
|
|
text: 11,
|
|
guideLabel: 10,
|
|
guideTitle: 11,
|
|
groupTitle: 13,
|
|
groupSubtitle: 12
|
|
};
|
|
var DEFAULT_COLOR = {
|
|
blue: tab10[0],
|
|
orange: tab10[1],
|
|
red: tab10[2],
|
|
teal: tab10[3],
|
|
green: tab10[4],
|
|
yellow: tab10[5],
|
|
purple: tab10[6],
|
|
pink: tab10[7],
|
|
brown: tab10[8],
|
|
gray0: '#000',
|
|
gray1: '#111',
|
|
gray2: '#222',
|
|
gray3: '#333',
|
|
gray4: '#444',
|
|
gray5: '#555',
|
|
gray6: '#666',
|
|
gray7: '#777',
|
|
gray8: '#888',
|
|
gray9: '#999',
|
|
gray10: '#aaa',
|
|
gray11: '#bbb',
|
|
gray12: '#ccc',
|
|
gray13: '#ddd',
|
|
gray14: '#eee',
|
|
gray15: '#fff'
|
|
};
|
|
|
|
function colorSignalConfig() {
|
|
var color = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
|
|
return {
|
|
signals: [{
|
|
name: 'color',
|
|
value: isObject(color) ? Object.assign(Object.assign({}, DEFAULT_COLOR), color) : DEFAULT_COLOR
|
|
}],
|
|
mark: {
|
|
color: {
|
|
signal: 'color.blue'
|
|
}
|
|
},
|
|
rule: {
|
|
color: {
|
|
signal: 'color.gray0'
|
|
}
|
|
},
|
|
text: {
|
|
color: {
|
|
signal: 'color.gray0'
|
|
}
|
|
},
|
|
style: {
|
|
'guide-label': {
|
|
fill: {
|
|
signal: 'color.gray0'
|
|
}
|
|
},
|
|
'guide-title': {
|
|
fill: {
|
|
signal: 'color.gray0'
|
|
}
|
|
},
|
|
'group-title': {
|
|
fill: {
|
|
signal: 'color.gray0'
|
|
}
|
|
},
|
|
'group-subtitle': {
|
|
fill: {
|
|
signal: 'color.gray0'
|
|
}
|
|
},
|
|
cell: {
|
|
stroke: {
|
|
signal: 'color.gray8'
|
|
}
|
|
}
|
|
},
|
|
axis: {
|
|
domainColor: {
|
|
signal: 'color.gray13'
|
|
},
|
|
gridColor: {
|
|
signal: 'color.gray8'
|
|
},
|
|
tickColor: {
|
|
signal: 'color.gray13'
|
|
}
|
|
},
|
|
range: {
|
|
category: [{
|
|
signal: 'color.blue'
|
|
}, {
|
|
signal: 'color.orange'
|
|
}, {
|
|
signal: 'color.red'
|
|
}, {
|
|
signal: 'color.teal'
|
|
}, {
|
|
signal: 'color.green'
|
|
}, {
|
|
signal: 'color.yellow'
|
|
}, {
|
|
signal: 'color.purple'
|
|
}, {
|
|
signal: 'color.pink'
|
|
}, {
|
|
signal: 'color.brown'
|
|
}, {
|
|
signal: 'color.grey8'
|
|
}]
|
|
}
|
|
};
|
|
}
|
|
|
|
function fontSizeSignalConfig(fontSize) {
|
|
return {
|
|
signals: [{
|
|
name: 'fontSize',
|
|
value: isObject(fontSize) ? Object.assign(Object.assign({}, DEFAULT_FONT_SIZE), fontSize) : DEFAULT_FONT_SIZE
|
|
}],
|
|
text: {
|
|
fontSize: {
|
|
signal: 'fontSize.text'
|
|
}
|
|
},
|
|
style: {
|
|
'guide-label': {
|
|
fontSize: {
|
|
signal: 'fontSize.guideLabel'
|
|
}
|
|
},
|
|
'guide-title': {
|
|
fontSize: {
|
|
signal: 'fontSize.guideTitle'
|
|
}
|
|
},
|
|
'group-title': {
|
|
fontSize: {
|
|
signal: 'fontSize.groupTitle'
|
|
}
|
|
},
|
|
'group-subtitle': {
|
|
fontSize: {
|
|
signal: 'fontSize.groupSubtitle'
|
|
}
|
|
}
|
|
}
|
|
};
|
|
}
|
|
|
|
function fontConfig(font) {
|
|
return {
|
|
text: {
|
|
font: font
|
|
},
|
|
style: {
|
|
'guide-label': {
|
|
font: font
|
|
},
|
|
'guide-title': {
|
|
font: font
|
|
},
|
|
'group-title': {
|
|
font: font
|
|
},
|
|
'group-subtitle': {
|
|
font: font
|
|
}
|
|
}
|
|
};
|
|
}
|
|
|
|
function initConfig() {
|
|
var config = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
|
|
|
|
var color = config.color,
|
|
font = config.font,
|
|
fontSize = config.fontSize,
|
|
restConfig = __rest(config, ["color", "font", "fontSize"]);
|
|
|
|
return mergeConfig({}, defaultConfig$1, font ? fontConfig(font) : {}, color ? colorSignalConfig(color) : {}, fontSize ? fontSizeSignalConfig(fontSize) : {}, restConfig || {});
|
|
}
|
|
|
|
var MARK_STYLES = ['view'].concat(_toConsumableArray(PRIMITIVE_MARKS));
|
|
var VL_ONLY_CONFIG_PROPERTIES = ['color', 'fontSize', 'background', 'padding', 'facet', 'concat', 'numberFormat', 'timeFormat', 'countTitle', 'header', 'axisQuantitative', 'axisTemporal', 'axisDiscrete', 'axisPoint', 'axisXBand', 'axisXPoint', 'axisXDiscrete', 'axisXQuantitative', 'axisXTemporal', 'axisYBand', 'axisYPoint', 'axisYDiscrete', 'axisYQuantitative', 'axisYTemporal', 'scale', 'selection', 'overlay' // FIXME: Redesign and unhide this
|
|
];
|
|
var VL_ONLY_ALL_MARK_SPECIFIC_CONFIG_PROPERTY_INDEX = Object.assign({
|
|
view: ['continuousWidth', 'continuousHeight', 'discreteWidth', 'discreteHeight', 'step']
|
|
}, VL_ONLY_MARK_SPECIFIC_CONFIG_PROPERTY_INDEX);
|
|
|
|
function stripAndRedirectConfig(config) {
|
|
config = duplicate(config);
|
|
|
|
var _iterator27 = _createForOfIteratorHelper(VL_ONLY_CONFIG_PROPERTIES),
|
|
_step27;
|
|
|
|
try {
|
|
for (_iterator27.s(); !(_step27 = _iterator27.n()).done;) {
|
|
var _prop4 = _step27.value;
|
|
delete config[_prop4];
|
|
}
|
|
} catch (err) {
|
|
_iterator27.e(err);
|
|
} finally {
|
|
_iterator27.f();
|
|
}
|
|
|
|
if (config.axis) {
|
|
// delete condition axis config
|
|
for (var prop in config.axis) {
|
|
if (isConditionalAxisValue(config.axis[prop])) {
|
|
delete config.axis[prop];
|
|
}
|
|
}
|
|
}
|
|
|
|
if (config.legend) {
|
|
var _iterator28 = _createForOfIteratorHelper(VL_ONLY_LEGEND_CONFIG),
|
|
_step28;
|
|
|
|
try {
|
|
for (_iterator28.s(); !(_step28 = _iterator28.n()).done;) {
|
|
var _prop = _step28.value;
|
|
delete config.legend[_prop];
|
|
}
|
|
} catch (err) {
|
|
_iterator28.e(err);
|
|
} finally {
|
|
_iterator28.f();
|
|
}
|
|
} // Remove Vega-Lite only generic mark config
|
|
|
|
|
|
if (config.mark) {
|
|
var _iterator29 = _createForOfIteratorHelper(VL_ONLY_MARK_CONFIG_PROPERTIES),
|
|
_step29;
|
|
|
|
try {
|
|
for (_iterator29.s(); !(_step29 = _iterator29.n()).done;) {
|
|
var _prop2 = _step29.value;
|
|
delete config.mark[_prop2];
|
|
}
|
|
} catch (err) {
|
|
_iterator29.e(err);
|
|
} finally {
|
|
_iterator29.f();
|
|
}
|
|
|
|
if (config.mark.tooltip && isObject(config.mark.tooltip)) {
|
|
delete config.mark.tooltip;
|
|
}
|
|
}
|
|
|
|
var _iterator30 = _createForOfIteratorHelper(MARK_STYLES),
|
|
_step30;
|
|
|
|
try {
|
|
for (_iterator30.s(); !(_step30 = _iterator30.n()).done;) {
|
|
var markType = _step30.value;
|
|
|
|
// Remove Vega-Lite-only mark config
|
|
var _iterator32 = _createForOfIteratorHelper(VL_ONLY_MARK_CONFIG_PROPERTIES),
|
|
_step32;
|
|
|
|
try {
|
|
for (_iterator32.s(); !(_step32 = _iterator32.n()).done;) {
|
|
var _prop6 = _step32.value;
|
|
delete config[markType][_prop6];
|
|
} // Remove Vega-Lite only mark-specific config
|
|
|
|
} catch (err) {
|
|
_iterator32.e(err);
|
|
} finally {
|
|
_iterator32.f();
|
|
}
|
|
|
|
var vlOnlyMarkSpecificConfigs = VL_ONLY_ALL_MARK_SPECIFIC_CONFIG_PROPERTY_INDEX[markType];
|
|
|
|
if (vlOnlyMarkSpecificConfigs) {
|
|
var _iterator33 = _createForOfIteratorHelper(vlOnlyMarkSpecificConfigs),
|
|
_step33;
|
|
|
|
try {
|
|
for (_iterator33.s(); !(_step33 = _iterator33.n()).done;) {
|
|
var _prop5 = _step33.value;
|
|
delete config[markType][_prop5];
|
|
}
|
|
} catch (err) {
|
|
_iterator33.e(err);
|
|
} finally {
|
|
_iterator33.f();
|
|
}
|
|
} // Redirect mark config to config.style so that mark config only affect its own mark type
|
|
// without affecting other marks that share the same underlying Vega marks.
|
|
// For example, config.rect should not affect bar marks.
|
|
|
|
|
|
redirectConfigToStyleConfig(config, markType);
|
|
}
|
|
} catch (err) {
|
|
_iterator30.e(err);
|
|
} finally {
|
|
_iterator30.f();
|
|
}
|
|
|
|
var _iterator31 = _createForOfIteratorHelper(getAllCompositeMarks()),
|
|
_step31;
|
|
|
|
try {
|
|
for (_iterator31.s(); !(_step31 = _iterator31.n()).done;) {
|
|
var m = _step31.value;
|
|
// Clean up the composite mark config as we don't need them in the output specs anymore
|
|
delete config[m];
|
|
}
|
|
} catch (err) {
|
|
_iterator31.e(err);
|
|
} finally {
|
|
_iterator31.f();
|
|
}
|
|
|
|
redirectTitleConfig(config); // Remove empty config objects.
|
|
|
|
for (var _prop3 in config) {
|
|
if (isObject(config[_prop3]) && isEmpty(config[_prop3])) {
|
|
delete config[_prop3];
|
|
}
|
|
}
|
|
|
|
return isEmpty(config) ? undefined : config;
|
|
}
|
|
/**
|
|
*
|
|
* Redirect config.title -- so that title config do not affect header labels,
|
|
* which also uses `title` directive to implement.
|
|
*
|
|
* For subtitle configs in config.title, keep them in config.title as header titles never have subtitles.
|
|
*/
|
|
|
|
|
|
function redirectTitleConfig(config) {
|
|
var _extractTitleConfig = extractTitleConfig(config.title),
|
|
titleMarkConfig = _extractTitleConfig.titleMarkConfig,
|
|
subtitleMarkConfig = _extractTitleConfig.subtitleMarkConfig,
|
|
subtitle = _extractTitleConfig.subtitle; // set config.style if title/subtitleMarkConfig is not an empty object
|
|
|
|
|
|
if (!isEmpty(titleMarkConfig)) {
|
|
config.style['group-title'] = Object.assign(Object.assign({}, config.style['group-title']), titleMarkConfig // config.title has higher precedence than config.style.group-title in Vega
|
|
);
|
|
}
|
|
|
|
if (!isEmpty(subtitleMarkConfig)) {
|
|
config.style['group-subtitle'] = Object.assign(Object.assign({}, config.style['group-subtitle']), subtitleMarkConfig);
|
|
} // subtitle part can stay in config.title since header titles do not use subtitle
|
|
|
|
|
|
if (!isEmpty(subtitle)) {
|
|
config.title = subtitle;
|
|
} else {
|
|
delete config.title;
|
|
}
|
|
}
|
|
|
|
function redirectConfigToStyleConfig(config, prop, // string = composite mark
|
|
toProp, compositeMarkPart) {
|
|
var propConfig = compositeMarkPart ? config[prop][compositeMarkPart] : config[prop];
|
|
|
|
if (prop === 'view') {
|
|
toProp = 'cell'; // View's default style is "cell"
|
|
}
|
|
|
|
var style = Object.assign(Object.assign({}, propConfig), config.style[toProp !== null && toProp !== void 0 ? toProp : prop]); // set config.style if it is not an empty object
|
|
|
|
if (!isEmpty(style)) {
|
|
config.style[toProp !== null && toProp !== void 0 ? toProp : prop] = style;
|
|
}
|
|
|
|
if (!compositeMarkPart) {
|
|
// For composite mark, so don't delete the whole config yet as we have to do multiple redirections.
|
|
delete config[prop];
|
|
}
|
|
}
|
|
|
|
function isLayerSpec(spec) {
|
|
return 'layer' in spec;
|
|
}
|
|
|
|
function isRepeatSpec(spec) {
|
|
return 'repeat' in spec;
|
|
}
|
|
|
|
function isLayerRepeatSpec(spec) {
|
|
return !isArray(spec.repeat) && spec.repeat['layer'];
|
|
}
|
|
|
|
var SpecMapper = /*#__PURE__*/function () {
|
|
function SpecMapper() {
|
|
_classCallCheck(this, SpecMapper);
|
|
}
|
|
|
|
_createClass(SpecMapper, [{
|
|
key: "map",
|
|
value: function map(spec, params) {
|
|
if (isFacetSpec(spec)) {
|
|
return this.mapFacet(spec, params);
|
|
} else if (isRepeatSpec(spec)) {
|
|
return this.mapRepeat(spec, params);
|
|
} else if (isHConcatSpec(spec)) {
|
|
return this.mapHConcat(spec, params);
|
|
} else if (isVConcatSpec(spec)) {
|
|
return this.mapVConcat(spec, params);
|
|
} else if (isConcatSpec(spec)) {
|
|
return this.mapConcat(spec, params);
|
|
} else {
|
|
return this.mapLayerOrUnit(spec, params);
|
|
}
|
|
}
|
|
}, {
|
|
key: "mapLayerOrUnit",
|
|
value: function mapLayerOrUnit(spec, params) {
|
|
if (isLayerSpec(spec)) {
|
|
return this.mapLayer(spec, params);
|
|
} else if (isUnitSpec(spec)) {
|
|
return this.mapUnit(spec, params);
|
|
}
|
|
|
|
throw new Error(invalidSpec(spec));
|
|
}
|
|
}, {
|
|
key: "mapLayer",
|
|
value: function mapLayer(spec, params) {
|
|
var _this = this;
|
|
|
|
return Object.assign(Object.assign({}, spec), {
|
|
layer: spec.layer.map(function (subspec) {
|
|
return _this.mapLayerOrUnit(subspec, params);
|
|
})
|
|
});
|
|
}
|
|
}, {
|
|
key: "mapHConcat",
|
|
value: function mapHConcat(spec, params) {
|
|
var _this2 = this;
|
|
|
|
return Object.assign(Object.assign({}, spec), {
|
|
hconcat: spec.hconcat.map(function (subspec) {
|
|
return _this2.map(subspec, params);
|
|
})
|
|
});
|
|
}
|
|
}, {
|
|
key: "mapVConcat",
|
|
value: function mapVConcat(spec, params) {
|
|
var _this3 = this;
|
|
|
|
return Object.assign(Object.assign({}, spec), {
|
|
vconcat: spec.vconcat.map(function (subspec) {
|
|
return _this3.map(subspec, params);
|
|
})
|
|
});
|
|
}
|
|
}, {
|
|
key: "mapConcat",
|
|
value: function mapConcat(spec, params) {
|
|
var _this4 = this;
|
|
|
|
var concat = spec.concat,
|
|
rest = __rest(spec, ["concat"]);
|
|
|
|
return Object.assign(Object.assign({}, rest), {
|
|
concat: concat.map(function (subspec) {
|
|
return _this4.map(subspec, params);
|
|
})
|
|
});
|
|
}
|
|
}, {
|
|
key: "mapFacet",
|
|
value: function mapFacet(spec, params) {
|
|
return Object.assign(Object.assign({}, spec), {
|
|
// TODO: remove "any" once we support all facet listed in https://github.com/vega/vega-lite/issues/2760
|
|
spec: this.map(spec.spec, params)
|
|
});
|
|
}
|
|
}, {
|
|
key: "mapRepeat",
|
|
value: function mapRepeat(spec, params) {
|
|
return Object.assign(Object.assign({}, spec), {
|
|
// as any is required here since TS cannot infer that the output type satisfies the input type
|
|
spec: this.map(spec.spec, params)
|
|
});
|
|
}
|
|
}]);
|
|
|
|
return SpecMapper;
|
|
}();
|
|
|
|
var STACK_OFFSET_INDEX = {
|
|
zero: 1,
|
|
center: 1,
|
|
normalize: 1
|
|
};
|
|
|
|
function isStackOffset(s) {
|
|
return s in STACK_OFFSET_INDEX;
|
|
}
|
|
|
|
var STACKABLE_MARKS = new Set([ARC, BAR, AREA, RULE, POINT, CIRCLE, SQUARE, LINE, TEXT, TICK]);
|
|
var STACK_BY_DEFAULT_MARKS = new Set([BAR, AREA, ARC]);
|
|
|
|
function potentialStackedChannel(encoding, x) {
|
|
var _a, _b;
|
|
|
|
var y = x === 'x' ? 'y' : 'radius';
|
|
var xDef = encoding[x];
|
|
var yDef = encoding[y];
|
|
|
|
if (isFieldDef(xDef) && isFieldDef(yDef)) {
|
|
if (channelDefType(xDef) === 'quantitative' && channelDefType(yDef) === 'quantitative') {
|
|
if (xDef.stack) {
|
|
return x;
|
|
} else if (yDef.stack) {
|
|
return y;
|
|
}
|
|
|
|
var xAggregate = isFieldDef(xDef) && !!xDef.aggregate;
|
|
var yAggregate = isFieldDef(yDef) && !!yDef.aggregate; // if there is no explicit stacking, only apply stack if there is only one aggregate for x or y
|
|
|
|
if (xAggregate !== yAggregate) {
|
|
return xAggregate ? x : y;
|
|
} else {
|
|
var xScale = (_a = xDef.scale) === null || _a === void 0 ? void 0 : _a.type;
|
|
var yScale = (_b = yDef.scale) === null || _b === void 0 ? void 0 : _b.type;
|
|
|
|
if (xScale && xScale !== 'linear') {
|
|
return y;
|
|
} else if (yScale && yScale !== 'linear') {
|
|
return x;
|
|
}
|
|
}
|
|
} else if (channelDefType(xDef) === 'quantitative') {
|
|
return x;
|
|
} else if (channelDefType(yDef) === 'quantitative') {
|
|
return y;
|
|
}
|
|
} else if (channelDefType(xDef) === 'quantitative') {
|
|
return x;
|
|
} else if (channelDefType(yDef) === 'quantitative') {
|
|
return y;
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function getDimensionChannel(channel) {
|
|
switch (channel) {
|
|
case 'x':
|
|
return 'y';
|
|
|
|
case 'y':
|
|
return 'x';
|
|
|
|
case 'theta':
|
|
return 'radius';
|
|
|
|
case 'radius':
|
|
return 'theta';
|
|
}
|
|
} // Note: CompassQL uses this method and only pass in required properties of each argument object.
|
|
// If required properties change, make sure to update CompassQL.
|
|
|
|
|
|
function stack(m, encoding) {
|
|
var opt = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
|
|
var mark = isMarkDef(m) ? m.type : m; // Should have stackable mark
|
|
|
|
if (!STACKABLE_MARKS.has(mark)) {
|
|
return null;
|
|
} // Run potential stacked twice, one for Cartesian and another for Polar,
|
|
// so text marks can be stacked in any of the coordinates.
|
|
// Note: The logic here is not perfectly correct. If we want to support stacked dot plots where each dot is a pie chart with label, we have to change the stack logic here to separate Cartesian stacking for polar stacking.
|
|
// However, since we probably never want to do that, let's just note the limitation here.
|
|
|
|
|
|
var fieldChannel = potentialStackedChannel(encoding, 'x') || potentialStackedChannel(encoding, 'theta');
|
|
|
|
if (!fieldChannel) {
|
|
return null;
|
|
}
|
|
|
|
var stackedFieldDef = encoding[fieldChannel];
|
|
var stackedField = isFieldDef(stackedFieldDef) ? _vgField(stackedFieldDef, {}) : undefined;
|
|
var dimensionChannel = getDimensionChannel(fieldChannel);
|
|
var dimensionDef = encoding[dimensionChannel];
|
|
var dimensionField = isFieldDef(dimensionDef) ? _vgField(dimensionDef, {}) : undefined; // avoid grouping by the stacked field
|
|
|
|
if (dimensionField === stackedField) {
|
|
dimensionField = undefined;
|
|
dimensionDef = undefined;
|
|
dimensionChannel = undefined;
|
|
} // Should have grouping level of detail that is different from the dimension field
|
|
|
|
|
|
var stackBy = NONPOSITION_CHANNELS.reduce(function (sc, channel) {
|
|
// Ignore tooltip in stackBy (https://github.com/vega/vega-lite/issues/4001)
|
|
if (channel !== 'tooltip' && _channelHasField(encoding, channel)) {
|
|
var channelDef = encoding[channel];
|
|
|
|
var _iterator34 = _createForOfIteratorHelper(array(channelDef)),
|
|
_step34;
|
|
|
|
try {
|
|
for (_iterator34.s(); !(_step34 = _iterator34.n()).done;) {
|
|
var cDef = _step34.value;
|
|
var fieldDef = getFieldDef(cDef);
|
|
|
|
if (fieldDef.aggregate) {
|
|
continue;
|
|
} // Check whether the channel's field is identical to x/y's field or if the channel is a repeat
|
|
|
|
|
|
var f = _vgField(fieldDef, {});
|
|
|
|
if ( // if fielddef is a repeat, just include it in the stack by
|
|
!f || // otherwise, the field must be different from x and y fields.
|
|
f !== dimensionField) {
|
|
sc.push({
|
|
channel: channel,
|
|
fieldDef: fieldDef
|
|
});
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator34.e(err);
|
|
} finally {
|
|
_iterator34.f();
|
|
}
|
|
}
|
|
|
|
return sc;
|
|
}, []); // Automatically determine offset
|
|
|
|
var offset;
|
|
|
|
if (stackedFieldDef.stack !== undefined) {
|
|
if (isBoolean(stackedFieldDef.stack)) {
|
|
offset = stackedFieldDef.stack ? 'zero' : null;
|
|
} else {
|
|
offset = stackedFieldDef.stack;
|
|
}
|
|
} else if (stackBy.length > 0 && STACK_BY_DEFAULT_MARKS.has(mark)) {
|
|
// Bar and Area with sum ops are automatically stacked by default
|
|
offset = 'zero';
|
|
}
|
|
|
|
if (!offset || !isStackOffset(offset)) {
|
|
return null;
|
|
}
|
|
|
|
if (isAggregate(encoding) && stackBy.length === 0) {
|
|
return null;
|
|
} // warn when stacking non-linear
|
|
|
|
|
|
if (stackedFieldDef.scale && stackedFieldDef.scale.type && stackedFieldDef.scale.type !== ScaleType.LINEAR) {
|
|
if (opt.disallowNonLinearStack) {
|
|
return null;
|
|
} else {
|
|
warn(cannotStackNonLinearScale(stackedFieldDef.scale.type));
|
|
}
|
|
} // Check if it is a ranged mark
|
|
|
|
|
|
if (isFieldOrDatumDef(encoding[getSecondaryRangeChannel(fieldChannel)])) {
|
|
if (stackedFieldDef.stack !== undefined) {
|
|
warn(cannotStackRangedMark(fieldChannel));
|
|
}
|
|
|
|
return null;
|
|
} // Warn if stacking non-summative aggregate
|
|
|
|
|
|
if (isFieldDef(stackedFieldDef) && stackedFieldDef.aggregate && !contains(SUM_OPS, stackedFieldDef.aggregate)) {
|
|
warn(stackNonSummativeAggregate(stackedFieldDef.aggregate));
|
|
}
|
|
|
|
return {
|
|
groupbyChannel: dimensionDef ? dimensionChannel : undefined,
|
|
groupbyField: dimensionField,
|
|
fieldChannel: fieldChannel,
|
|
impute: stackedFieldDef.impute === null ? false : isPathMark(mark),
|
|
stackBy: stackBy,
|
|
offset: offset
|
|
};
|
|
}
|
|
|
|
function dropLineAndPoint(markDef) {
|
|
var mark = __rest(markDef, ["point", "line"]);
|
|
|
|
return keys(mark).length > 1 ? mark : mark.type;
|
|
}
|
|
|
|
function dropLineAndPointFromConfig(config) {
|
|
for (var _i5 = 0, _arr3 = ['line', 'area', 'rule', 'trail']; _i5 < _arr3.length; _i5++) {
|
|
var mark = _arr3[_i5];
|
|
|
|
if (config[mark]) {
|
|
config = Object.assign(Object.assign({}, config), _defineProperty({}, mark, omit(config[mark], ['point', 'line'])));
|
|
}
|
|
}
|
|
|
|
return config;
|
|
}
|
|
|
|
function getPointOverlay(markDef) {
|
|
var markConfig = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|
var encoding = arguments.length > 2 ? arguments[2] : undefined;
|
|
|
|
if (markDef.point === 'transparent') {
|
|
return {
|
|
opacity: 0
|
|
};
|
|
} else if (markDef.point) {
|
|
// truthy : true or object
|
|
return isObject(markDef.point) ? markDef.point : {};
|
|
} else if (markDef.point !== undefined) {
|
|
// false or null
|
|
return null;
|
|
} else {
|
|
// undefined (not disabled)
|
|
if (markConfig.point || encoding.shape) {
|
|
// enable point overlay if config[mark].point is truthy or if encoding.shape is provided
|
|
return isObject(markConfig.point) ? markConfig.point : {};
|
|
} // markDef.point is defined as falsy
|
|
|
|
|
|
return undefined;
|
|
}
|
|
}
|
|
|
|
function getLineOverlay(markDef) {
|
|
var markConfig = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|
|
|
if (markDef.line) {
|
|
// true or object
|
|
return markDef.line === true ? {} : markDef.line;
|
|
} else if (markDef.line !== undefined) {
|
|
// false or null
|
|
return null;
|
|
} else {
|
|
// undefined (not disabled)
|
|
if (markConfig.line) {
|
|
// enable line overlay if config[mark].line is truthy
|
|
return markConfig.line === true ? {} : markConfig.line;
|
|
} // markDef.point is defined as falsy
|
|
|
|
|
|
return undefined;
|
|
}
|
|
}
|
|
|
|
var PathOverlayNormalizer = /*#__PURE__*/function () {
|
|
function PathOverlayNormalizer() {
|
|
_classCallCheck(this, PathOverlayNormalizer);
|
|
|
|
this.name = 'path-overlay';
|
|
}
|
|
|
|
_createClass(PathOverlayNormalizer, [{
|
|
key: "hasMatchingType",
|
|
value: function hasMatchingType(spec, config) {
|
|
if (isUnitSpec(spec)) {
|
|
var mark = spec.mark,
|
|
encoding = spec.encoding;
|
|
var markDef = isMarkDef(mark) ? mark : {
|
|
type: mark
|
|
};
|
|
|
|
switch (markDef.type) {
|
|
case 'line':
|
|
case 'rule':
|
|
case 'trail':
|
|
return !!getPointOverlay(markDef, config[markDef.type], encoding);
|
|
|
|
case 'area':
|
|
return (// false / null are also included as we want to remove the properties
|
|
!!getPointOverlay(markDef, config[markDef.type], encoding) || !!getLineOverlay(markDef, config[markDef.type])
|
|
);
|
|
}
|
|
}
|
|
|
|
return false;
|
|
}
|
|
}, {
|
|
key: "run",
|
|
value: function run(spec, params, normalize) {
|
|
var config = params.config;
|
|
|
|
var selection = spec.selection,
|
|
projection = spec.projection,
|
|
mark = spec.mark,
|
|
e = spec.encoding,
|
|
outerSpec = __rest(spec, ["selection", "projection", "mark", "encoding"]); // Need to call normalizeEncoding because we need the inferred types to correctly determine stack
|
|
|
|
|
|
var encoding = normalizeEncoding(e, config);
|
|
var markDef = isMarkDef(mark) ? mark : {
|
|
type: mark
|
|
};
|
|
var pointOverlay = getPointOverlay(markDef, config[markDef.type], encoding);
|
|
var lineOverlay = markDef.type === 'area' && getLineOverlay(markDef, config[markDef.type]);
|
|
var layer = [Object.assign(Object.assign({}, selection ? {
|
|
selection: selection
|
|
} : {}), {
|
|
mark: dropLineAndPoint(Object.assign(Object.assign({}, markDef.type === 'area' && markDef.opacity === undefined && markDef.fillOpacity === undefined ? {
|
|
opacity: 0.7
|
|
} : {}), markDef)),
|
|
// drop shape from encoding as this might be used to trigger point overlay
|
|
encoding: omit(encoding, ['shape'])
|
|
})]; // FIXME: determine rules for applying selections.
|
|
// Need to copy stack config to overlayed layer
|
|
|
|
var stackProps = stack(markDef, encoding);
|
|
var overlayEncoding = encoding;
|
|
|
|
if (stackProps) {
|
|
var stackFieldChannel = stackProps.fieldChannel,
|
|
offset = stackProps.offset;
|
|
overlayEncoding = Object.assign(Object.assign({}, encoding), _defineProperty({}, stackFieldChannel, Object.assign(Object.assign({}, encoding[stackFieldChannel]), offset ? {
|
|
stack: offset
|
|
} : {})));
|
|
}
|
|
|
|
if (lineOverlay) {
|
|
layer.push(Object.assign(Object.assign({}, projection ? {
|
|
projection: projection
|
|
} : {}), {
|
|
mark: Object.assign(Object.assign({
|
|
type: 'line'
|
|
}, pick(markDef, ['clip', 'interpolate', 'tension', 'tooltip'])), lineOverlay),
|
|
encoding: overlayEncoding
|
|
}));
|
|
}
|
|
|
|
if (pointOverlay) {
|
|
layer.push(Object.assign(Object.assign({}, projection ? {
|
|
projection: projection
|
|
} : {}), {
|
|
mark: Object.assign(Object.assign({
|
|
type: 'point',
|
|
opacity: 1,
|
|
filled: true
|
|
}, pick(markDef, ['clip', 'tooltip'])), pointOverlay),
|
|
encoding: overlayEncoding
|
|
}));
|
|
}
|
|
|
|
return normalize(Object.assign(Object.assign({}, outerSpec), {
|
|
layer: layer
|
|
}), Object.assign(Object.assign({}, params), {
|
|
config: dropLineAndPointFromConfig(config)
|
|
}));
|
|
}
|
|
}]);
|
|
|
|
return PathOverlayNormalizer;
|
|
}();
|
|
|
|
var RangeStepNormalizer = /*#__PURE__*/function () {
|
|
function RangeStepNormalizer() {
|
|
_classCallCheck(this, RangeStepNormalizer);
|
|
|
|
this.name = 'RangeStep';
|
|
}
|
|
|
|
_createClass(RangeStepNormalizer, [{
|
|
key: "hasMatchingType",
|
|
value: function hasMatchingType(spec) {
|
|
var _a;
|
|
|
|
if (isUnitSpec(spec) && spec.encoding) {
|
|
var _iterator35 = _createForOfIteratorHelper(POSITION_SCALE_CHANNELS),
|
|
_step35;
|
|
|
|
try {
|
|
for (_iterator35.s(); !(_step35 = _iterator35.n()).done;) {
|
|
var channel = _step35.value;
|
|
var def = spec.encoding[channel];
|
|
|
|
if (def && isFieldOrDatumDef(def)) {
|
|
if ((_a = def === null || def === void 0 ? void 0 : def.scale) === null || _a === void 0 ? void 0 : _a['rangeStep']) {
|
|
return true;
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator35.e(err);
|
|
} finally {
|
|
_iterator35.f();
|
|
}
|
|
}
|
|
|
|
return false;
|
|
}
|
|
}, {
|
|
key: "run",
|
|
value: function run(spec) {
|
|
var _a;
|
|
|
|
var sizeMixins = {};
|
|
var encoding = Object.assign({}, spec.encoding);
|
|
|
|
var _iterator36 = _createForOfIteratorHelper(POSITION_SCALE_CHANNELS),
|
|
_step36;
|
|
|
|
try {
|
|
for (_iterator36.s(); !(_step36 = _iterator36.n()).done;) {
|
|
var channel = _step36.value;
|
|
var sizeType = getSizeChannel(channel);
|
|
var def = encoding[channel];
|
|
|
|
if (def && isFieldOrDatumDef(def)) {
|
|
if ((_a = def === null || def === void 0 ? void 0 : def.scale) === null || _a === void 0 ? void 0 : _a['rangeStep']) {
|
|
var scale = def.scale,
|
|
defWithoutScale = __rest(def, ["scale"]);
|
|
|
|
var _b = scale,
|
|
scaleWithoutRangeStep = __rest(_b, ["rangeStep"]);
|
|
|
|
sizeMixins[sizeType] = {
|
|
step: scale['rangeStep']
|
|
};
|
|
warn(RANGE_STEP_DEPRECATED);
|
|
encoding = Object.assign(Object.assign({}, encoding), _defineProperty({}, channel, Object.assign(Object.assign({}, defWithoutScale), isEmpty(scaleWithoutRangeStep) ? {} : {
|
|
scale: scaleWithoutRangeStep
|
|
})));
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator36.e(err);
|
|
} finally {
|
|
_iterator36.f();
|
|
}
|
|
|
|
return Object.assign(Object.assign(Object.assign({}, sizeMixins), spec), {
|
|
encoding: encoding
|
|
});
|
|
}
|
|
}]);
|
|
|
|
return RangeStepNormalizer;
|
|
}();
|
|
|
|
function replaceRepeaterInFacet(facet, repeater) {
|
|
if (!repeater) {
|
|
return facet;
|
|
}
|
|
|
|
if (isFacetMapping(facet)) {
|
|
return replaceRepeaterInMapping(facet, repeater);
|
|
}
|
|
|
|
return replaceRepeaterInFieldDef(facet, repeater);
|
|
}
|
|
|
|
function replaceRepeaterInEncoding(encoding, repeater) {
|
|
if (!repeater) {
|
|
return encoding;
|
|
}
|
|
|
|
return replaceRepeaterInMapping(encoding, repeater);
|
|
}
|
|
/**
|
|
* Replaces repeated value and returns if the repeated value is valid.
|
|
*/
|
|
|
|
|
|
function replaceRepeatInProp(prop, o, repeater) {
|
|
var val = o[prop];
|
|
|
|
if (isRepeatRef(val)) {
|
|
if (val.repeat in repeater) {
|
|
return Object.assign(Object.assign({}, o), _defineProperty({}, prop, repeater[val.repeat]));
|
|
} else {
|
|
warn(noSuchRepeatedValue(val.repeat));
|
|
return undefined;
|
|
}
|
|
}
|
|
|
|
return o;
|
|
}
|
|
/**
|
|
* Replace repeater values in a field def with the concrete field name.
|
|
*/
|
|
|
|
|
|
function replaceRepeaterInFieldDef(fieldDef, repeater) {
|
|
fieldDef = replaceRepeatInProp('field', fieldDef, repeater);
|
|
|
|
if (fieldDef === undefined) {
|
|
// the field def should be ignored
|
|
return undefined;
|
|
} else if (fieldDef === null) {
|
|
return null;
|
|
}
|
|
|
|
if (isSortableFieldDef(fieldDef) && isSortField(fieldDef.sort)) {
|
|
var sort = replaceRepeatInProp('field', fieldDef.sort, repeater);
|
|
fieldDef = Object.assign(Object.assign({}, fieldDef), sort ? {
|
|
sort: sort
|
|
} : {});
|
|
}
|
|
|
|
return fieldDef;
|
|
}
|
|
|
|
function replaceRepeaterInFieldOrDatumDef(def, repeater) {
|
|
if (isFieldDef(def)) {
|
|
return replaceRepeaterInFieldDef(def, repeater);
|
|
} else {
|
|
var datumDef = replaceRepeatInProp('datum', def, repeater);
|
|
|
|
if (datumDef !== def && !datumDef.type) {
|
|
datumDef.type = 'nominal';
|
|
}
|
|
|
|
return datumDef;
|
|
}
|
|
}
|
|
|
|
function replaceRepeaterInChannelDef(channelDef, repeater) {
|
|
if (isFieldOrDatumDef(channelDef)) {
|
|
var fd = replaceRepeaterInFieldOrDatumDef(channelDef, repeater);
|
|
|
|
if (fd) {
|
|
return fd;
|
|
} else if (isConditionalDef(channelDef)) {
|
|
return {
|
|
condition: channelDef.condition
|
|
};
|
|
}
|
|
} else {
|
|
if (hasConditionalFieldOrDatumDef(channelDef)) {
|
|
var _fd = replaceRepeaterInFieldOrDatumDef(channelDef.condition, repeater);
|
|
|
|
if (_fd) {
|
|
return Object.assign(Object.assign({}, channelDef), {
|
|
condition: _fd
|
|
});
|
|
} else {
|
|
var channelDefWithoutCondition = __rest(channelDef, ["condition"]);
|
|
|
|
return channelDefWithoutCondition;
|
|
}
|
|
}
|
|
|
|
return channelDef;
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function replaceRepeaterInMapping(mapping, repeater) {
|
|
var out = {};
|
|
|
|
for (var channel in mapping) {
|
|
if (hasOwnProperty(mapping, channel)) {
|
|
var channelDef = mapping[channel];
|
|
|
|
if (isArray(channelDef)) {
|
|
// array cannot have condition
|
|
out[channel] = channelDef // somehow we need to cast it here
|
|
.map(function (cd) {
|
|
return replaceRepeaterInChannelDef(cd, repeater);
|
|
}).filter(function (cd) {
|
|
return cd;
|
|
});
|
|
} else {
|
|
var cd = replaceRepeaterInChannelDef(channelDef, repeater);
|
|
|
|
if (cd !== undefined) {
|
|
out[channel] = cd;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
return out;
|
|
}
|
|
|
|
var RuleForRangedLineNormalizer = /*#__PURE__*/function () {
|
|
function RuleForRangedLineNormalizer() {
|
|
_classCallCheck(this, RuleForRangedLineNormalizer);
|
|
|
|
this.name = 'RuleForRangedLine';
|
|
}
|
|
|
|
_createClass(RuleForRangedLineNormalizer, [{
|
|
key: "hasMatchingType",
|
|
value: function hasMatchingType(spec) {
|
|
if (isUnitSpec(spec)) {
|
|
var encoding = spec.encoding,
|
|
mark = spec.mark;
|
|
|
|
if (mark === 'line') {
|
|
var _iterator37 = _createForOfIteratorHelper(SECONDARY_RANGE_CHANNEL),
|
|
_step37;
|
|
|
|
try {
|
|
for (_iterator37.s(); !(_step37 = _iterator37.n()).done;) {
|
|
var channel = _step37.value;
|
|
var mainChannel = getMainRangeChannel(channel);
|
|
var mainChannelDef = encoding[mainChannel];
|
|
|
|
if (encoding[channel]) {
|
|
if (isFieldDef(mainChannelDef) && !isBinned(mainChannelDef.bin) || isDatumDef(mainChannelDef)) {
|
|
return true;
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator37.e(err);
|
|
} finally {
|
|
_iterator37.f();
|
|
}
|
|
}
|
|
}
|
|
|
|
return false;
|
|
}
|
|
}, {
|
|
key: "run",
|
|
value: function run(spec, params, normalize) {
|
|
var encoding = spec.encoding;
|
|
warn(lineWithRange(!!encoding.x2, !!encoding.y2));
|
|
return normalize(Object.assign(Object.assign({}, spec), {
|
|
mark: 'rule'
|
|
}), params);
|
|
}
|
|
}]);
|
|
|
|
return RuleForRangedLineNormalizer;
|
|
}();
|
|
|
|
var CoreNormalizer = /*#__PURE__*/function (_SpecMapper) {
|
|
_inherits(CoreNormalizer, _SpecMapper);
|
|
|
|
var _super = _createSuper(CoreNormalizer);
|
|
|
|
function CoreNormalizer() {
|
|
var _this5;
|
|
|
|
_classCallCheck(this, CoreNormalizer);
|
|
|
|
_this5 = _super.apply(this, arguments);
|
|
_this5.nonFacetUnitNormalizers = [boxPlotNormalizer, errorBarNormalizer, errorBandNormalizer, new PathOverlayNormalizer(), new RuleForRangedLineNormalizer(), new RangeStepNormalizer()];
|
|
return _this5;
|
|
}
|
|
|
|
_createClass(CoreNormalizer, [{
|
|
key: "map",
|
|
value: function map(spec, params) {
|
|
// Special handling for a faceted unit spec as it can return a facet spec, not just a layer or unit spec like a normal unit spec.
|
|
if (isUnitSpec(spec)) {
|
|
var hasRow = _channelHasField(spec.encoding, ROW);
|
|
|
|
var hasColumn = _channelHasField(spec.encoding, COLUMN);
|
|
|
|
var hasFacet = _channelHasField(spec.encoding, FACET);
|
|
|
|
if (hasRow || hasColumn || hasFacet) {
|
|
return this.mapFacetedUnit(spec, params);
|
|
}
|
|
}
|
|
|
|
return _get(_getPrototypeOf(CoreNormalizer.prototype), "map", this).call(this, spec, params);
|
|
} // This is for normalizing non-facet unit
|
|
|
|
}, {
|
|
key: "mapUnit",
|
|
value: function mapUnit(spec, params) {
|
|
var parentEncoding = params.parentEncoding,
|
|
parentProjection = params.parentProjection;
|
|
var encoding = replaceRepeaterInEncoding(spec.encoding, params.repeater);
|
|
var specWithReplacedEncoding = Object.assign(Object.assign({}, spec), encoding ? {
|
|
encoding: encoding
|
|
} : {});
|
|
|
|
if (parentEncoding || parentProjection) {
|
|
return this.mapUnitWithParentEncodingOrProjection(specWithReplacedEncoding, params);
|
|
}
|
|
|
|
var normalizeLayerOrUnit = this.mapLayerOrUnit.bind(this);
|
|
|
|
var _iterator38 = _createForOfIteratorHelper(this.nonFacetUnitNormalizers),
|
|
_step38;
|
|
|
|
try {
|
|
for (_iterator38.s(); !(_step38 = _iterator38.n()).done;) {
|
|
var unitNormalizer = _step38.value;
|
|
|
|
if (unitNormalizer.hasMatchingType(specWithReplacedEncoding, params.config)) {
|
|
return unitNormalizer.run(specWithReplacedEncoding, params, normalizeLayerOrUnit);
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator38.e(err);
|
|
} finally {
|
|
_iterator38.f();
|
|
}
|
|
|
|
return specWithReplacedEncoding;
|
|
}
|
|
}, {
|
|
key: "mapRepeat",
|
|
value: function mapRepeat(spec, params) {
|
|
if (isLayerRepeatSpec(spec)) {
|
|
return this.mapLayerRepeat(spec, params);
|
|
} else {
|
|
return this.mapNonLayerRepeat(spec, params);
|
|
}
|
|
}
|
|
}, {
|
|
key: "mapLayerRepeat",
|
|
value: function mapLayerRepeat(spec, params) {
|
|
var _this6 = this;
|
|
|
|
var repeat = spec.repeat,
|
|
childSpec = spec.spec,
|
|
rest = __rest(spec, ["repeat", "spec"]);
|
|
|
|
var row = repeat.row,
|
|
column = repeat.column,
|
|
layer = repeat.layer;
|
|
var _params$repeater = params.repeater,
|
|
repeater = _params$repeater === void 0 ? {} : _params$repeater,
|
|
_params$repeaterPrefi = params.repeaterPrefix,
|
|
repeaterPrefix = _params$repeaterPrefi === void 0 ? '' : _params$repeaterPrefi;
|
|
|
|
if (row || column) {
|
|
return this.mapRepeat(Object.assign(Object.assign({}, spec), {
|
|
repeat: Object.assign(Object.assign({}, row ? {
|
|
row: row
|
|
} : {}), column ? {
|
|
column: column
|
|
} : {}),
|
|
spec: {
|
|
repeat: {
|
|
layer: layer
|
|
},
|
|
spec: childSpec
|
|
}
|
|
}), params);
|
|
} else {
|
|
return Object.assign(Object.assign({}, rest), {
|
|
layer: layer.map(function (layerValue) {
|
|
var childRepeater = Object.assign(Object.assign({}, repeater), {
|
|
layer: layerValue
|
|
});
|
|
var childName = (childSpec.name || '') + repeaterPrefix + "child__layer_".concat(varName(layerValue));
|
|
|
|
var child = _this6.mapLayerOrUnit(childSpec, Object.assign(Object.assign({}, params), {
|
|
repeater: childRepeater,
|
|
repeaterPrefix: childName
|
|
}));
|
|
|
|
child.name = childName;
|
|
return child;
|
|
})
|
|
});
|
|
}
|
|
}
|
|
}, {
|
|
key: "mapNonLayerRepeat",
|
|
value: function mapNonLayerRepeat(spec, params) {
|
|
var _a;
|
|
|
|
var _spec2 = spec,
|
|
repeat = _spec2.repeat,
|
|
childSpec = _spec2.spec,
|
|
data = _spec2.data,
|
|
remainingProperties = __rest(spec, ["repeat", "spec", "data"]);
|
|
|
|
if (!isArray(repeat) && spec.columns) {
|
|
// is repeat with row/column
|
|
spec = omit(spec, ['columns']);
|
|
warn(columnsNotSupportByRowCol('repeat'));
|
|
}
|
|
|
|
var concat = [];
|
|
var _params$repeater2 = params.repeater,
|
|
repeater = _params$repeater2 === void 0 ? {} : _params$repeater2,
|
|
_params$repeaterPrefi2 = params.repeaterPrefix,
|
|
repeaterPrefix = _params$repeaterPrefi2 === void 0 ? '' : _params$repeaterPrefi2;
|
|
var row = !isArray(repeat) && repeat.row || [repeater ? repeater.row : null];
|
|
var column = !isArray(repeat) && repeat.column || [repeater ? repeater.column : null];
|
|
var repeatValues = isArray(repeat) && repeat || [repeater ? repeater.repeat : null]; // cross product
|
|
|
|
var _iterator39 = _createForOfIteratorHelper(repeatValues),
|
|
_step39;
|
|
|
|
try {
|
|
for (_iterator39.s(); !(_step39 = _iterator39.n()).done;) {
|
|
var repeatValue = _step39.value;
|
|
|
|
var _iterator40 = _createForOfIteratorHelper(row),
|
|
_step40;
|
|
|
|
try {
|
|
for (_iterator40.s(); !(_step40 = _iterator40.n()).done;) {
|
|
var rowValue = _step40.value;
|
|
|
|
var _iterator41 = _createForOfIteratorHelper(column),
|
|
_step41;
|
|
|
|
try {
|
|
for (_iterator41.s(); !(_step41 = _iterator41.n()).done;) {
|
|
var columnValue = _step41.value;
|
|
var childRepeater = {
|
|
repeat: repeatValue,
|
|
row: rowValue,
|
|
column: columnValue,
|
|
layer: repeater.layer
|
|
};
|
|
var childName = (childSpec.name || '') + repeaterPrefix + 'child__' + (isArray(repeat) ? "".concat(varName(repeatValue)) : (repeat.row ? "row_".concat(varName(rowValue)) : '') + (repeat.column ? "column_".concat(varName(columnValue)) : ''));
|
|
var child = this.map(childSpec, Object.assign(Object.assign({}, params), {
|
|
repeater: childRepeater,
|
|
repeaterPrefix: childName
|
|
}));
|
|
child.name = childName; // we move data up
|
|
|
|
concat.push(omit(child, ['data']));
|
|
}
|
|
} catch (err) {
|
|
_iterator41.e(err);
|
|
} finally {
|
|
_iterator41.f();
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator40.e(err);
|
|
} finally {
|
|
_iterator40.f();
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator39.e(err);
|
|
} finally {
|
|
_iterator39.f();
|
|
}
|
|
|
|
var columns = isArray(repeat) ? spec.columns : repeat.column ? repeat.column.length : 1;
|
|
return Object.assign(Object.assign({
|
|
data: (_a = childSpec.data) !== null && _a !== void 0 ? _a : data,
|
|
align: 'all'
|
|
}, remainingProperties), {
|
|
columns: columns,
|
|
concat: concat
|
|
});
|
|
}
|
|
}, {
|
|
key: "mapFacet",
|
|
value: function mapFacet(spec, params) {
|
|
var _spec3 = spec,
|
|
facet = _spec3.facet;
|
|
|
|
if (isFacetMapping(facet) && spec.columns) {
|
|
// is facet with row/column
|
|
spec = omit(spec, ['columns']);
|
|
warn(columnsNotSupportByRowCol('facet'));
|
|
}
|
|
|
|
return _get(_getPrototypeOf(CoreNormalizer.prototype), "mapFacet", this).call(this, spec, params);
|
|
}
|
|
}, {
|
|
key: "mapUnitWithParentEncodingOrProjection",
|
|
value: function mapUnitWithParentEncodingOrProjection(spec, params) {
|
|
var encoding = spec.encoding,
|
|
projection = spec.projection;
|
|
var parentEncoding = params.parentEncoding,
|
|
parentProjection = params.parentProjection,
|
|
config = params.config;
|
|
var mergedProjection = mergeProjection({
|
|
parentProjection: parentProjection,
|
|
projection: projection
|
|
});
|
|
var mergedEncoding = mergeEncoding({
|
|
parentEncoding: parentEncoding,
|
|
encoding: replaceRepeaterInEncoding(encoding, params.repeater)
|
|
});
|
|
return this.mapUnit(Object.assign(Object.assign(Object.assign({}, spec), mergedProjection ? {
|
|
projection: mergedProjection
|
|
} : {}), mergedEncoding ? {
|
|
encoding: mergedEncoding
|
|
} : {}), {
|
|
config: config
|
|
});
|
|
}
|
|
}, {
|
|
key: "mapFacetedUnit",
|
|
value: function mapFacetedUnit(spec, params) {
|
|
// New encoding in the inside spec should not contain row / column
|
|
// as row/column should be moved to facet
|
|
var _a = spec.encoding,
|
|
row = _a.row,
|
|
column = _a.column,
|
|
facet = _a.facet,
|
|
encoding = __rest(_a, ["row", "column", "facet"]); // Mark and encoding should be moved into the inner spec
|
|
|
|
|
|
var mark = spec.mark,
|
|
width = spec.width,
|
|
projection = spec.projection,
|
|
height = spec.height,
|
|
view = spec.view,
|
|
selection = spec.selection,
|
|
_ = spec.encoding,
|
|
outerSpec = __rest(spec, ["mark", "width", "projection", "height", "view", "selection", "encoding"]);
|
|
|
|
var _this$getFacetMapping = this.getFacetMappingAndLayout({
|
|
row: row,
|
|
column: column,
|
|
facet: facet
|
|
}, params),
|
|
facetMapping = _this$getFacetMapping.facetMapping,
|
|
layout = _this$getFacetMapping.layout;
|
|
|
|
var newEncoding = replaceRepeaterInEncoding(encoding, params.repeater);
|
|
return this.mapFacet(Object.assign(Object.assign(Object.assign({}, outerSpec), layout), {
|
|
// row / column has higher precedence than facet
|
|
facet: facetMapping,
|
|
spec: Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({}, width ? {
|
|
width: width
|
|
} : {}), height ? {
|
|
height: height
|
|
} : {}), view ? {
|
|
view: view
|
|
} : {}), projection ? {
|
|
projection: projection
|
|
} : {}), {
|
|
mark: mark,
|
|
encoding: newEncoding
|
|
}), selection ? {
|
|
selection: selection
|
|
} : {})
|
|
}), params);
|
|
}
|
|
}, {
|
|
key: "getFacetMappingAndLayout",
|
|
value: function getFacetMappingAndLayout(facets, params) {
|
|
var _a;
|
|
|
|
var row = facets.row,
|
|
column = facets.column,
|
|
facet = facets.facet;
|
|
|
|
if (row || column) {
|
|
if (facet) {
|
|
warn(facetChannelDropped([].concat(_toConsumableArray(row ? [ROW] : []), _toConsumableArray(column ? [COLUMN] : []))));
|
|
}
|
|
|
|
var facetMapping = {};
|
|
var layout = {};
|
|
|
|
for (var _i6 = 0, _arr4 = [ROW, COLUMN]; _i6 < _arr4.length; _i6++) {
|
|
var channel = _arr4[_i6];
|
|
var def = facets[channel];
|
|
|
|
if (def) {
|
|
var defWithoutLayout = __rest(def, ["align", "center", "spacing", "columns"]);
|
|
|
|
facetMapping[channel] = defWithoutLayout;
|
|
|
|
for (var _i7 = 0, _arr5 = ['align', 'center', 'spacing']; _i7 < _arr5.length; _i7++) {
|
|
var prop = _arr5[_i7];
|
|
|
|
if (def[prop] !== undefined) {
|
|
layout[prop] = (_a = layout[prop]) !== null && _a !== void 0 ? _a : {};
|
|
layout[prop][channel] = def[prop];
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
return {
|
|
facetMapping: facetMapping,
|
|
layout: layout
|
|
};
|
|
} else {
|
|
var _align = facet.align,
|
|
center = facet.center,
|
|
spacing = facet.spacing,
|
|
columns = facet.columns,
|
|
_facetMapping = __rest(facet, ["align", "center", "spacing", "columns"]);
|
|
|
|
return {
|
|
facetMapping: replaceRepeaterInFacet(_facetMapping, params.repeater),
|
|
layout: Object.assign(Object.assign(Object.assign(Object.assign({}, _align ? {
|
|
align: _align
|
|
} : {}), center ? {
|
|
center: center
|
|
} : {}), spacing ? {
|
|
spacing: spacing
|
|
} : {}), columns ? {
|
|
columns: columns
|
|
} : {})
|
|
};
|
|
}
|
|
}
|
|
}, {
|
|
key: "mapLayer",
|
|
value: function mapLayer(spec, _a) {
|
|
// Special handling for extended layer spec
|
|
var parentEncoding = _a.parentEncoding,
|
|
parentProjection = _a.parentProjection,
|
|
otherParams = __rest(_a, ["parentEncoding", "parentProjection"]);
|
|
|
|
var encoding = spec.encoding,
|
|
projection = spec.projection,
|
|
rest = __rest(spec, ["encoding", "projection"]);
|
|
|
|
var params = Object.assign(Object.assign({}, otherParams), {
|
|
parentEncoding: mergeEncoding({
|
|
parentEncoding: parentEncoding,
|
|
encoding: encoding,
|
|
layer: true
|
|
}),
|
|
parentProjection: mergeProjection({
|
|
parentProjection: parentProjection,
|
|
projection: projection
|
|
})
|
|
});
|
|
return _get(_getPrototypeOf(CoreNormalizer.prototype), "mapLayer", this).call(this, rest, params);
|
|
}
|
|
}]);
|
|
|
|
return CoreNormalizer;
|
|
}(SpecMapper);
|
|
|
|
function mergeEncoding(_ref19) {
|
|
var parentEncoding = _ref19.parentEncoding,
|
|
_ref19$encoding = _ref19.encoding,
|
|
encoding = _ref19$encoding === void 0 ? {} : _ref19$encoding,
|
|
layer = _ref19.layer;
|
|
var merged = {};
|
|
|
|
if (parentEncoding) {
|
|
var channels = new Set([].concat(_toConsumableArray(keys(parentEncoding)), _toConsumableArray(keys(encoding))));
|
|
|
|
var _iterator42 = _createForOfIteratorHelper(channels),
|
|
_step42;
|
|
|
|
try {
|
|
for (_iterator42.s(); !(_step42 = _iterator42.n()).done;) {
|
|
var channel = _step42.value;
|
|
var channelDef = encoding[channel];
|
|
var parentChannelDef = parentEncoding[channel];
|
|
|
|
if (isFieldOrDatumDef(channelDef)) {
|
|
// Field/Datum Def can inherit properties from its parent
|
|
// Note that parentChannelDef doesn't have to be a field/datum def if the channelDef is already one.
|
|
var mergedChannelDef = Object.assign(Object.assign({}, parentChannelDef), channelDef);
|
|
merged[channel] = mergedChannelDef;
|
|
} else if (hasConditionalFieldOrDatumDef(channelDef)) {
|
|
merged[channel] = Object.assign(Object.assign({}, channelDef), {
|
|
condition: Object.assign(Object.assign({}, parentChannelDef), channelDef.condition)
|
|
});
|
|
} else if (channelDef) {
|
|
merged[channel] = channelDef;
|
|
} else if (layer || isValueDef(parentChannelDef) || isSignalRef(parentChannelDef) || isFieldOrDatumDef(parentChannelDef) || isArray(parentChannelDef)) {
|
|
merged[channel] = parentChannelDef;
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator42.e(err);
|
|
} finally {
|
|
_iterator42.f();
|
|
}
|
|
} else {
|
|
merged = encoding;
|
|
}
|
|
|
|
return !merged || isEmpty(merged) ? undefined : merged;
|
|
}
|
|
|
|
function mergeProjection(opt) {
|
|
var parentProjection = opt.parentProjection,
|
|
projection = opt.projection;
|
|
|
|
if (parentProjection && projection) {
|
|
warn(projectionOverridden({
|
|
parentProjection: parentProjection,
|
|
projection: projection
|
|
}));
|
|
}
|
|
|
|
return projection !== null && projection !== void 0 ? projection : parentProjection;
|
|
}
|
|
|
|
function normalize(spec, config) {
|
|
if (config === undefined) {
|
|
config = initConfig(spec.config);
|
|
}
|
|
|
|
var normalizedSpec = normalizeGenericSpec(spec, config);
|
|
var width = spec.width,
|
|
height = spec.height;
|
|
var autosize = normalizeAutoSize(normalizedSpec, {
|
|
width: width,
|
|
height: height,
|
|
autosize: spec.autosize
|
|
}, config);
|
|
return Object.assign(Object.assign({}, normalizedSpec), autosize ? {
|
|
autosize: autosize
|
|
} : {});
|
|
}
|
|
|
|
var normalizer = new CoreNormalizer();
|
|
/**
|
|
* Decompose extended unit specs into composition of pure unit specs.
|
|
*/
|
|
|
|
function normalizeGenericSpec(spec) {
|
|
var config = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|
return normalizer.map(spec, {
|
|
config: config
|
|
});
|
|
}
|
|
|
|
function _normalizeAutoSize(autosize) {
|
|
return isString(autosize) ? {
|
|
type: autosize
|
|
} : autosize !== null && autosize !== void 0 ? autosize : {};
|
|
}
|
|
/**
|
|
* Normalize autosize and deal with width or height == "container".
|
|
*/
|
|
|
|
|
|
function normalizeAutoSize(spec, sizeInfo, config) {
|
|
var width = sizeInfo.width,
|
|
height = sizeInfo.height;
|
|
var isFitCompatible = isUnitSpec(spec) || isLayerSpec(spec);
|
|
var autosizeDefault = {};
|
|
|
|
if (!isFitCompatible) {
|
|
// If spec is not compatible with autosize == "fit", discard width/height == container
|
|
if (width == 'container') {
|
|
warn(containerSizeNonSingle('width'));
|
|
width = undefined;
|
|
}
|
|
|
|
if (height == 'container') {
|
|
warn(containerSizeNonSingle('height'));
|
|
height = undefined;
|
|
}
|
|
} else {
|
|
// Default autosize parameters to fit when width/height is "container"
|
|
if (width == 'container' && height == 'container') {
|
|
autosizeDefault.type = 'fit';
|
|
autosizeDefault.contains = 'padding';
|
|
} else if (width == 'container') {
|
|
autosizeDefault.type = 'fit-x';
|
|
autosizeDefault.contains = 'padding';
|
|
} else if (height == 'container') {
|
|
autosizeDefault.type = 'fit-y';
|
|
autosizeDefault.contains = 'padding';
|
|
}
|
|
}
|
|
|
|
var autosize = Object.assign(Object.assign(Object.assign({
|
|
type: 'pad'
|
|
}, autosizeDefault), config ? _normalizeAutoSize(config.autosize) : {}), _normalizeAutoSize(spec.autosize));
|
|
|
|
if (autosize.type === 'fit' && !isFitCompatible) {
|
|
warn(FIT_NON_SINGLE);
|
|
autosize.type = 'pad';
|
|
}
|
|
|
|
if (width == 'container' && !(autosize.type == 'fit' || autosize.type == 'fit-x')) {
|
|
warn(containerSizeNotCompatibleWithAutosize('width'));
|
|
}
|
|
|
|
if (height == 'container' && !(autosize.type == 'fit' || autosize.type == 'fit-y')) {
|
|
warn(containerSizeNotCompatibleWithAutosize('height'));
|
|
} // Delete autosize property if it's Vega's default
|
|
|
|
|
|
if (deepEqual(autosize, {
|
|
type: 'pad'
|
|
})) {
|
|
return undefined;
|
|
}
|
|
|
|
return autosize;
|
|
}
|
|
|
|
function isFitType(autoSizeType) {
|
|
return autoSizeType === 'fit' || autoSizeType === 'fit-x' || autoSizeType === 'fit-y';
|
|
}
|
|
|
|
function getFitType(sizeType) {
|
|
return sizeType ? "fit-".concat(getPositionScaleChannel(sizeType)) : 'fit';
|
|
}
|
|
|
|
var TOP_LEVEL_PROPERTIES = ['background', 'padding' // We do not include "autosize" here as it is supported by only unit and layer specs and thus need to be normalized
|
|
];
|
|
|
|
function extractTopLevelProperties(t) {
|
|
return TOP_LEVEL_PROPERTIES.reduce(function (o, p) {
|
|
if (t && t[p] !== undefined) {
|
|
o[p] = t[p];
|
|
}
|
|
|
|
return o;
|
|
}, {});
|
|
}
|
|
/**
|
|
* Generic class for storing properties that are explicitly specified
|
|
* and implicitly determined by the compiler.
|
|
* This is important for scale/axis/legend merging as
|
|
* we want to prioritize properties that users explicitly specified.
|
|
*/
|
|
// eslint-disable-next-line @typescript-eslint/ban-types
|
|
|
|
|
|
var Split = /*#__PURE__*/function () {
|
|
function Split() {
|
|
var explicit = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
|
|
var implicit = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|
|
|
_classCallCheck(this, Split);
|
|
|
|
this.explicit = explicit;
|
|
this.implicit = implicit;
|
|
}
|
|
|
|
_createClass(Split, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
return new Split(duplicate(this.explicit), duplicate(this.implicit));
|
|
}
|
|
}, {
|
|
key: "combine",
|
|
value: function combine() {
|
|
// FIXME remove "as any".
|
|
// Add "as any" to avoid an error "Spread types may only be created from object types".
|
|
return Object.assign(Object.assign({}, this.explicit), this.implicit);
|
|
}
|
|
}, {
|
|
key: "get",
|
|
value: function get(key) {
|
|
// Explicit has higher precedence
|
|
return getFirstDefined(this.explicit[key], this.implicit[key]);
|
|
}
|
|
}, {
|
|
key: "getWithExplicit",
|
|
value: function getWithExplicit(key) {
|
|
// Explicit has higher precedence
|
|
if (this.explicit[key] !== undefined) {
|
|
return {
|
|
explicit: true,
|
|
value: this.explicit[key]
|
|
};
|
|
} else if (this.implicit[key] !== undefined) {
|
|
return {
|
|
explicit: false,
|
|
value: this.implicit[key]
|
|
};
|
|
}
|
|
|
|
return {
|
|
explicit: false,
|
|
value: undefined
|
|
};
|
|
}
|
|
}, {
|
|
key: "setWithExplicit",
|
|
value: function setWithExplicit(key, value) {
|
|
if (value.value !== undefined) {
|
|
this.set(key, value.value, value.explicit);
|
|
}
|
|
}
|
|
}, {
|
|
key: "set",
|
|
value: function set(key, value, explicit) {
|
|
delete this[explicit ? 'implicit' : 'explicit'][key];
|
|
this[explicit ? 'explicit' : 'implicit'][key] = value;
|
|
return this;
|
|
}
|
|
}, {
|
|
key: "copyKeyFromSplit",
|
|
value: function copyKeyFromSplit(key, s) {
|
|
// Explicit has higher precedence
|
|
if (s.explicit[key] !== undefined) {
|
|
this.set(key, s.explicit[key], true);
|
|
} else if (s.implicit[key] !== undefined) {
|
|
this.set(key, s.implicit[key], false);
|
|
}
|
|
}
|
|
}, {
|
|
key: "copyKeyFromObject",
|
|
value: function copyKeyFromObject(key, s) {
|
|
// Explicit has higher precedence
|
|
if (s[key] !== undefined) {
|
|
this.set(key, s[key], true);
|
|
}
|
|
}
|
|
/**
|
|
* Merge split object into this split object. Properties from the other split
|
|
* overwrite properties from this split.
|
|
*/
|
|
|
|
}, {
|
|
key: "copyAll",
|
|
value: function copyAll(other) {
|
|
var _iterator43 = _createForOfIteratorHelper(keys(other.combine())),
|
|
_step43;
|
|
|
|
try {
|
|
for (_iterator43.s(); !(_step43 = _iterator43.n()).done;) {
|
|
var key = _step43.value;
|
|
var val = other.getWithExplicit(key);
|
|
this.setWithExplicit(key, val);
|
|
}
|
|
} catch (err) {
|
|
_iterator43.e(err);
|
|
} finally {
|
|
_iterator43.f();
|
|
}
|
|
}
|
|
}]);
|
|
|
|
return Split;
|
|
}();
|
|
|
|
function makeExplicit(value) {
|
|
return {
|
|
explicit: true,
|
|
value: value
|
|
};
|
|
}
|
|
|
|
function makeImplicit(value) {
|
|
return {
|
|
explicit: false,
|
|
value: value
|
|
};
|
|
}
|
|
|
|
function tieBreakByComparing(compare) {
|
|
return function (v1, v2, property, propertyOf) {
|
|
var diff = compare(v1.value, v2.value);
|
|
|
|
if (diff > 0) {
|
|
return v1;
|
|
} else if (diff < 0) {
|
|
return v2;
|
|
}
|
|
|
|
return defaultTieBreaker(v1, v2, property, propertyOf);
|
|
};
|
|
}
|
|
|
|
function defaultTieBreaker(v1, v2, property, propertyOf) {
|
|
if (v1.explicit && v2.explicit) {
|
|
warn(mergeConflictingProperty(property, propertyOf, v1.value, v2.value));
|
|
} // If equal score, prefer v1.
|
|
|
|
|
|
return v1;
|
|
}
|
|
|
|
function mergeValuesWithExplicit(v1, v2, property, propertyOf) {
|
|
var tieBreaker = arguments.length > 4 && arguments[4] !== undefined ? arguments[4] : defaultTieBreaker;
|
|
|
|
if (v1 === undefined || v1.value === undefined) {
|
|
// For first run
|
|
return v2;
|
|
}
|
|
|
|
if (v1.explicit && !v2.explicit) {
|
|
return v1;
|
|
} else if (v2.explicit && !v1.explicit) {
|
|
return v2;
|
|
} else if (deepEqual(v1.value, v2.value)) {
|
|
return v1;
|
|
} else {
|
|
return tieBreaker(v1, v2, property, propertyOf);
|
|
}
|
|
}
|
|
/**
|
|
* Class to track interesting properties (see https://15721.courses.cs.cmu.edu/spring2016/papers/graefe-ieee1995.pdf)
|
|
* about how fields have been parsed or whether they have been derived in a transform. We use this to not parse the
|
|
* same field again (or differently).
|
|
*/
|
|
|
|
|
|
var AncestorParse = /*#__PURE__*/function (_Split) {
|
|
_inherits(AncestorParse, _Split);
|
|
|
|
var _super2 = _createSuper(AncestorParse);
|
|
|
|
function AncestorParse() {
|
|
var _this7;
|
|
|
|
var explicit = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
|
|
var implicit = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|
var parseNothing = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : false;
|
|
|
|
_classCallCheck(this, AncestorParse);
|
|
|
|
_this7 = _super2.call(this, explicit, implicit);
|
|
_this7.explicit = explicit;
|
|
_this7.implicit = implicit;
|
|
_this7.parseNothing = parseNothing;
|
|
return _this7;
|
|
}
|
|
|
|
_createClass(AncestorParse, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
var clone = _get(_getPrototypeOf(AncestorParse.prototype), "clone", this).call(this);
|
|
|
|
clone.parseNothing = this.parseNothing;
|
|
return clone;
|
|
}
|
|
}]);
|
|
|
|
return AncestorParse;
|
|
}(Split);
|
|
|
|
function isUrlData(data) {
|
|
return 'url' in data;
|
|
}
|
|
|
|
function isInlineData(data) {
|
|
return 'values' in data;
|
|
}
|
|
|
|
function isNamedData(data) {
|
|
return 'name' in data && !isUrlData(data) && !isInlineData(data) && !isGenerator(data);
|
|
}
|
|
|
|
function isGenerator(data) {
|
|
return data && (isSequenceGenerator(data) || isSphereGenerator(data) || isGraticuleGenerator(data));
|
|
}
|
|
|
|
function isSequenceGenerator(data) {
|
|
return 'sequence' in data;
|
|
}
|
|
|
|
function isSphereGenerator(data) {
|
|
return 'sphere' in data;
|
|
}
|
|
|
|
function isGraticuleGenerator(data) {
|
|
return 'graticule' in data;
|
|
}
|
|
|
|
var DataSourceType;
|
|
|
|
(function (DataSourceType) {
|
|
DataSourceType[DataSourceType["Raw"] = 0] = "Raw";
|
|
DataSourceType[DataSourceType["Main"] = 1] = "Main";
|
|
DataSourceType[DataSourceType["Row"] = 2] = "Row";
|
|
DataSourceType[DataSourceType["Column"] = 3] = "Column";
|
|
DataSourceType[DataSourceType["Lookup"] = 4] = "Lookup";
|
|
})(DataSourceType || (DataSourceType = {}));
|
|
|
|
function isFilter(t) {
|
|
return 'filter' in t;
|
|
}
|
|
|
|
function isImputeSequence(t) {
|
|
return (t === null || t === void 0 ? void 0 : t['stop']) !== undefined;
|
|
}
|
|
|
|
function isLookup(t) {
|
|
return 'lookup' in t;
|
|
}
|
|
|
|
function isLookupData(from) {
|
|
return 'data' in from;
|
|
}
|
|
|
|
function isLookupSelection(from) {
|
|
return 'selection' in from;
|
|
}
|
|
|
|
function isPivot(t) {
|
|
return 'pivot' in t;
|
|
}
|
|
|
|
function isDensity(t) {
|
|
return 'density' in t;
|
|
}
|
|
|
|
function isQuantile(t) {
|
|
return 'quantile' in t;
|
|
}
|
|
|
|
function isRegression(t) {
|
|
return 'regression' in t;
|
|
}
|
|
|
|
function isLoess(t) {
|
|
return 'loess' in t;
|
|
}
|
|
|
|
function isSample(t) {
|
|
return 'sample' in t;
|
|
}
|
|
|
|
function isWindow(t) {
|
|
return 'window' in t;
|
|
}
|
|
|
|
function isJoinAggregate(t) {
|
|
return 'joinaggregate' in t;
|
|
}
|
|
|
|
function isFlatten(t) {
|
|
return 'flatten' in t;
|
|
}
|
|
|
|
function isCalculate(t) {
|
|
return 'calculate' in t;
|
|
}
|
|
|
|
function isBin(t) {
|
|
return 'bin' in t;
|
|
}
|
|
|
|
function isImpute(t) {
|
|
return 'impute' in t;
|
|
}
|
|
|
|
function isTimeUnit(t) {
|
|
return 'timeUnit' in t;
|
|
}
|
|
|
|
function isAggregate$1(t) {
|
|
return 'aggregate' in t;
|
|
}
|
|
|
|
function isStack(t) {
|
|
return 'stack' in t;
|
|
}
|
|
|
|
function isFold(t) {
|
|
return 'fold' in t;
|
|
}
|
|
|
|
function normalizeTransform(transform) {
|
|
return transform.map(function (t) {
|
|
if (isFilter(t)) {
|
|
return {
|
|
filter: normalizeLogicalComposition(t.filter, normalizePredicate)
|
|
};
|
|
}
|
|
|
|
return t;
|
|
});
|
|
}
|
|
/**
|
|
* Parse an event selector string.
|
|
* Returns an array of event stream definitions.
|
|
*/
|
|
|
|
|
|
function parseSelector(selector, source, marks) {
|
|
DEFAULT_SOURCE = source || VIEW;
|
|
MARKS = marks || DEFAULT_MARKS;
|
|
return parseMerge(selector.trim()).map(parseSelector$1);
|
|
}
|
|
|
|
var VIEW = 'view',
|
|
LBRACK = '[',
|
|
RBRACK = ']',
|
|
LBRACE = '{',
|
|
RBRACE = '}',
|
|
COLON = ':',
|
|
COMMA = ',',
|
|
NAME = '@',
|
|
GT = '>',
|
|
ILLEGAL = /[[\]{}]/,
|
|
DEFAULT_SOURCE,
|
|
MARKS,
|
|
DEFAULT_MARKS = {
|
|
'*': 1,
|
|
arc: 1,
|
|
area: 1,
|
|
group: 1,
|
|
image: 1,
|
|
line: 1,
|
|
path: 1,
|
|
rect: 1,
|
|
rule: 1,
|
|
shape: 1,
|
|
symbol: 1,
|
|
text: 1,
|
|
trail: 1
|
|
};
|
|
|
|
function isMarkType(type) {
|
|
return MARKS[type];
|
|
}
|
|
|
|
function find(s, i, endChar, pushChar, popChar) {
|
|
var count = 0,
|
|
n = s.length,
|
|
c;
|
|
|
|
for (; i < n; ++i) {
|
|
c = s[i];
|
|
if (!count && c === endChar) return i;else if (popChar && popChar.indexOf(c) >= 0) --count;else if (pushChar && pushChar.indexOf(c) >= 0) ++count;
|
|
}
|
|
|
|
return i;
|
|
}
|
|
|
|
function parseMerge(s) {
|
|
var output = [],
|
|
start = 0,
|
|
n = s.length,
|
|
i = 0;
|
|
|
|
while (i < n) {
|
|
i = find(s, i, COMMA, LBRACK + LBRACE, RBRACK + RBRACE);
|
|
output.push(s.substring(start, i).trim());
|
|
start = ++i;
|
|
}
|
|
|
|
if (output.length === 0) {
|
|
throw 'Empty event selector: ' + s;
|
|
}
|
|
|
|
return output;
|
|
}
|
|
|
|
function parseSelector$1(s) {
|
|
return s[0] === '[' ? parseBetween(s) : parseStream(s);
|
|
}
|
|
|
|
function parseBetween(s) {
|
|
var n = s.length,
|
|
i = 1,
|
|
b,
|
|
stream;
|
|
i = find(s, i, RBRACK, LBRACK, RBRACK);
|
|
|
|
if (i === n) {
|
|
throw 'Empty between selector: ' + s;
|
|
}
|
|
|
|
b = parseMerge(s.substring(1, i));
|
|
|
|
if (b.length !== 2) {
|
|
throw 'Between selector must have two elements: ' + s;
|
|
}
|
|
|
|
s = s.slice(i + 1).trim();
|
|
|
|
if (s[0] !== GT) {
|
|
throw 'Expected \'>\' after between selector: ' + s;
|
|
}
|
|
|
|
b = b.map(parseSelector$1);
|
|
stream = parseSelector$1(s.slice(1).trim());
|
|
|
|
if (stream.between) {
|
|
return {
|
|
between: b,
|
|
stream: stream
|
|
};
|
|
} else {
|
|
stream.between = b;
|
|
}
|
|
|
|
return stream;
|
|
}
|
|
|
|
function parseStream(s) {
|
|
var stream = {
|
|
source: DEFAULT_SOURCE
|
|
},
|
|
source = [],
|
|
throttle = [0, 0],
|
|
markname = 0,
|
|
start = 0,
|
|
n = s.length,
|
|
i = 0,
|
|
j,
|
|
filter; // extract throttle from end
|
|
|
|
if (s[n - 1] === RBRACE) {
|
|
i = s.lastIndexOf(LBRACE);
|
|
|
|
if (i >= 0) {
|
|
try {
|
|
throttle = parseThrottle(s.substring(i + 1, n - 1));
|
|
} catch (e) {
|
|
throw 'Invalid throttle specification: ' + s;
|
|
}
|
|
|
|
s = s.slice(0, i).trim();
|
|
n = s.length;
|
|
} else throw 'Unmatched right brace: ' + s;
|
|
|
|
i = 0;
|
|
}
|
|
|
|
if (!n) throw s; // set name flag based on first char
|
|
|
|
if (s[0] === NAME) markname = ++i; // extract first part of multi-part stream selector
|
|
|
|
j = find(s, i, COLON);
|
|
|
|
if (j < n) {
|
|
source.push(s.substring(start, j).trim());
|
|
start = i = ++j;
|
|
} // extract remaining part of stream selector
|
|
|
|
|
|
i = find(s, i, LBRACK);
|
|
|
|
if (i === n) {
|
|
source.push(s.substring(start, n).trim());
|
|
} else {
|
|
source.push(s.substring(start, i).trim());
|
|
filter = [];
|
|
start = ++i;
|
|
if (start === n) throw 'Unmatched left bracket: ' + s;
|
|
} // extract filters
|
|
|
|
|
|
while (i < n) {
|
|
i = find(s, i, RBRACK);
|
|
if (i === n) throw 'Unmatched left bracket: ' + s;
|
|
filter.push(s.substring(start, i).trim());
|
|
if (i < n - 1 && s[++i] !== LBRACK) throw 'Expected left bracket: ' + s;
|
|
start = ++i;
|
|
} // marshall event stream specification
|
|
|
|
|
|
if (!(n = source.length) || ILLEGAL.test(source[n - 1])) {
|
|
throw 'Invalid event selector: ' + s;
|
|
}
|
|
|
|
if (n > 1) {
|
|
stream.type = source[1];
|
|
|
|
if (markname) {
|
|
stream.markname = source[0].slice(1);
|
|
} else if (isMarkType(source[0])) {
|
|
stream.marktype = source[0];
|
|
} else {
|
|
stream.source = source[0];
|
|
}
|
|
} else {
|
|
stream.type = source[0];
|
|
}
|
|
|
|
if (stream.type.slice(-1) === '!') {
|
|
stream.consume = true;
|
|
stream.type = stream.type.slice(0, -1);
|
|
}
|
|
|
|
if (filter != null) stream.filter = filter;
|
|
if (throttle[0]) stream.throttle = throttle[0];
|
|
if (throttle[1]) stream.debounce = throttle[1];
|
|
return stream;
|
|
}
|
|
|
|
function parseThrottle(s) {
|
|
var a = s.split(COMMA);
|
|
if (!s.length || a.length > 2) throw s;
|
|
return a.map(function (_) {
|
|
var x = +_;
|
|
if (x !== x) throw s;
|
|
return x;
|
|
});
|
|
}
|
|
/**
|
|
* Return a mixin that includes a Vega production rule for a Vega-Lite conditional channel definition
|
|
* or a simple mixin if channel def has no condition.
|
|
*/
|
|
|
|
|
|
function wrapCondition(model, channelDef, vgChannel, refFn) {
|
|
var condition = isConditionalDef(channelDef) && channelDef.condition;
|
|
var valueRef = refFn(channelDef);
|
|
|
|
if (condition) {
|
|
var conditions = array(condition);
|
|
var vgConditions = conditions.map(function (c) {
|
|
var conditionValueRef = refFn(c);
|
|
var test = isConditionalSelection(c) ? parseSelectionPredicate(model, c.selection) // FIXME: remove casting once TS is no longer dumb about it
|
|
: expression(model, c.test); // FIXME: remove casting once TS is no longer dumb about it
|
|
|
|
return Object.assign({
|
|
test: test
|
|
}, conditionValueRef);
|
|
});
|
|
return _defineProperty({}, vgChannel, [].concat(_toConsumableArray(vgConditions), _toConsumableArray(valueRef !== undefined ? [valueRef] : [])));
|
|
} else {
|
|
return valueRef !== undefined ? _defineProperty({}, vgChannel, valueRef) : {};
|
|
}
|
|
}
|
|
|
|
function text(model) {
|
|
var channel = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'text';
|
|
var channelDef = model.encoding[channel];
|
|
return wrapCondition(model, channelDef, channel, function (cDef) {
|
|
return textRef(cDef, model.config);
|
|
});
|
|
}
|
|
|
|
function textRef(channelDef, config) {
|
|
var expr = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 'datum';
|
|
|
|
// text
|
|
if (channelDef) {
|
|
if (isValueDef(channelDef)) {
|
|
return signalOrValueRef(channelDef.value);
|
|
}
|
|
|
|
if (isFieldOrDatumDef(channelDef)) {
|
|
var _getFormatMixins2 = getFormatMixins(channelDef),
|
|
format = _getFormatMixins2.format,
|
|
formatType = _getFormatMixins2.formatType;
|
|
|
|
return formatSignalRef({
|
|
fieldOrDatumDef: channelDef,
|
|
format: format,
|
|
formatType: formatType,
|
|
expr: expr,
|
|
config: config
|
|
});
|
|
}
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function tooltip(model) {
|
|
var opt = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|
var encoding = model.encoding,
|
|
markDef = model.markDef,
|
|
config = model.config,
|
|
stack = model.stack;
|
|
var channelDef = encoding.tooltip;
|
|
|
|
if (isArray(channelDef)) {
|
|
return {
|
|
tooltip: tooltipRefForEncoding({
|
|
tooltip: channelDef
|
|
}, stack, config, opt)
|
|
};
|
|
} else {
|
|
var datum = opt.reactiveGeom ? 'datum.datum' : 'datum';
|
|
return wrapCondition(model, channelDef, 'tooltip', function (cDef) {
|
|
// use valueRef based on channelDef first
|
|
var tooltipRefFromChannelDef = textRef(cDef, config, datum);
|
|
|
|
if (tooltipRefFromChannelDef) {
|
|
return tooltipRefFromChannelDef;
|
|
}
|
|
|
|
if (cDef === null) {
|
|
// Allow using encoding.tooltip = null to disable tooltip
|
|
return undefined;
|
|
}
|
|
|
|
var markTooltip = getMarkPropOrConfig('tooltip', markDef, config);
|
|
|
|
if (markTooltip === true) {
|
|
markTooltip = {
|
|
content: 'encoding'
|
|
};
|
|
}
|
|
|
|
if (isString(markTooltip)) {
|
|
return {
|
|
value: markTooltip
|
|
};
|
|
} else if (isObject(markTooltip)) {
|
|
// `tooltip` is `{fields: 'encodings' | 'fields'}`
|
|
if (isSignalRef(markTooltip)) {
|
|
return markTooltip;
|
|
} else if (markTooltip.content === 'encoding') {
|
|
return tooltipRefForEncoding(encoding, stack, config, opt);
|
|
} else {
|
|
return {
|
|
signal: datum
|
|
};
|
|
}
|
|
}
|
|
|
|
return undefined;
|
|
});
|
|
}
|
|
}
|
|
|
|
function tooltipData(encoding, stack, config) {
|
|
var _ref22 = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {},
|
|
reactiveGeom = _ref22.reactiveGeom;
|
|
|
|
var toSkip = {};
|
|
var expr = reactiveGeom ? 'datum.datum' : 'datum';
|
|
var tuples = [];
|
|
|
|
function add(fDef, channel) {
|
|
var mainChannel = getMainRangeChannel(channel);
|
|
var fieldDef = isTypedFieldDef(fDef) ? fDef : Object.assign(Object.assign({}, fDef), {
|
|
type: encoding[mainChannel].type // for secondary field def, copy type from main channel
|
|
|
|
});
|
|
var title = fieldDef.title || defaultTitle(fieldDef, config);
|
|
var key = array(title).join(', ');
|
|
var value;
|
|
|
|
if (isXorY(channel)) {
|
|
var channel2 = channel === 'x' ? 'x2' : 'y2';
|
|
var fieldDef2 = getFieldDef(encoding[channel2]);
|
|
|
|
if (isBinned(fieldDef.bin) && fieldDef2) {
|
|
var startField = _vgField(fieldDef, {
|
|
expr: expr
|
|
});
|
|
|
|
var endField = _vgField(fieldDef2, {
|
|
expr: expr
|
|
});
|
|
|
|
var _getFormatMixins3 = getFormatMixins(fieldDef),
|
|
format = _getFormatMixins3.format,
|
|
formatType = _getFormatMixins3.formatType;
|
|
|
|
value = binFormatExpression(startField, endField, format, formatType, config);
|
|
toSkip[channel2] = true;
|
|
} else if (stack && stack.fieldChannel === channel && stack.offset === 'normalize') {
|
|
var _getFormatMixins4 = getFormatMixins(fieldDef),
|
|
_format3 = _getFormatMixins4.format,
|
|
_formatType3 = _getFormatMixins4.formatType;
|
|
|
|
value = formatSignalRef({
|
|
fieldOrDatumDef: fieldDef,
|
|
format: _format3,
|
|
formatType: _formatType3,
|
|
expr: expr,
|
|
config: config,
|
|
normalizeStack: true
|
|
}).signal;
|
|
}
|
|
}
|
|
|
|
value = value !== null && value !== void 0 ? value : textRef(fieldDef, config, expr).signal;
|
|
tuples.push({
|
|
channel: channel,
|
|
key: key,
|
|
value: value
|
|
});
|
|
}
|
|
|
|
forEach(encoding, function (channelDef, channel) {
|
|
if (isFieldDef(channelDef)) {
|
|
add(channelDef, channel);
|
|
} else if (hasConditionalFieldDef(channelDef)) {
|
|
add(channelDef.condition, channel);
|
|
}
|
|
});
|
|
var out = {};
|
|
|
|
for (var _i8 = 0, _tuples = tuples; _i8 < _tuples.length; _i8++) {
|
|
var _tuples$_i = _tuples[_i8],
|
|
channel = _tuples$_i.channel,
|
|
key = _tuples$_i.key,
|
|
value = _tuples$_i.value;
|
|
|
|
if (!toSkip[channel] && !out[key]) {
|
|
out[key] = value;
|
|
}
|
|
}
|
|
|
|
return out;
|
|
}
|
|
|
|
function tooltipRefForEncoding(encoding, stack, config) {
|
|
var _ref23 = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {},
|
|
reactiveGeom = _ref23.reactiveGeom;
|
|
|
|
var data = tooltipData(encoding, stack, config, {
|
|
reactiveGeom: reactiveGeom
|
|
});
|
|
var keyValues = entries(data).map(function (_ref24) {
|
|
var key = _ref24.key,
|
|
value = _ref24.value;
|
|
return "\"".concat(key, "\": ").concat(value);
|
|
});
|
|
return keyValues.length > 0 ? {
|
|
signal: "{".concat(keyValues.join(', '), "}")
|
|
} : undefined;
|
|
}
|
|
|
|
function aria(model) {
|
|
var markDef = model.markDef,
|
|
config = model.config;
|
|
var enableAria = getMarkPropOrConfig('aria', markDef, config); // We can ignore other aria properties if ariaHidden is true.
|
|
|
|
if (enableAria === false) {
|
|
// getMarkGroups sets aria to false already so we don't have to set it in the encode block
|
|
return {};
|
|
}
|
|
|
|
return Object.assign(Object.assign(Object.assign({}, enableAria ? {
|
|
aria: enableAria
|
|
} : {}), ariaRoleDescription(model)), description(model));
|
|
}
|
|
|
|
function ariaRoleDescription(model) {
|
|
var mark = model.mark,
|
|
markDef = model.markDef,
|
|
config = model.config;
|
|
|
|
if (config.aria === false) {
|
|
return {};
|
|
}
|
|
|
|
var ariaRoleDesc = getMarkPropOrConfig('ariaRoleDescription', markDef, config);
|
|
|
|
if (ariaRoleDesc != null) {
|
|
return {
|
|
ariaRoleDescription: {
|
|
value: ariaRoleDesc
|
|
}
|
|
};
|
|
}
|
|
|
|
return mark in VG_MARK_INDEX ? {} : {
|
|
ariaRoleDescription: {
|
|
value: mark
|
|
}
|
|
};
|
|
}
|
|
|
|
function description(model) {
|
|
var encoding = model.encoding,
|
|
markDef = model.markDef,
|
|
config = model.config,
|
|
stack = model.stack;
|
|
var channelDef = encoding.description;
|
|
|
|
if (channelDef) {
|
|
return wrapCondition(model, channelDef, 'description', function (cDef) {
|
|
return textRef(cDef, model.config);
|
|
});
|
|
} // Use default from mark def or config if defined.
|
|
// Functions in encode usually just return undefined but since we are defining a default below, we need to check the default here.
|
|
|
|
|
|
var descriptionValue = getMarkPropOrConfig('description', markDef, config);
|
|
|
|
if (descriptionValue != null) {
|
|
return {
|
|
description: signalOrValueRef(descriptionValue)
|
|
};
|
|
}
|
|
|
|
if (config.aria === false) {
|
|
return {};
|
|
}
|
|
|
|
var data = tooltipData(encoding, stack, config);
|
|
|
|
if (isEmpty(data)) {
|
|
return undefined;
|
|
}
|
|
|
|
return {
|
|
description: {
|
|
signal: entries(data).map(function (_ref25, index) {
|
|
var key = _ref25.key,
|
|
value = _ref25.value;
|
|
return "\"".concat(index > 0 ? '; ' : '').concat(key, ": \" + (").concat(value, ")");
|
|
}).join(' + ')
|
|
}
|
|
};
|
|
}
|
|
/**
|
|
* Return encode for non-positional channels with scales. (Text doesn't have scale.)
|
|
*/
|
|
|
|
|
|
function nonPosition(channel, model) {
|
|
var opt = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
|
|
var markDef = model.markDef,
|
|
encoding = model.encoding,
|
|
config = model.config;
|
|
var vgChannel = opt.vgChannel;
|
|
var defaultRef = opt.defaultRef,
|
|
defaultValue = opt.defaultValue;
|
|
|
|
if (defaultRef === undefined) {
|
|
// prettier-ignore
|
|
defaultValue = defaultValue !== null && defaultValue !== void 0 ? defaultValue : getMarkPropOrConfig(channel, markDef, config, {
|
|
vgChannel: vgChannel,
|
|
ignoreVgConfig: true
|
|
});
|
|
|
|
if (defaultValue !== undefined) {
|
|
defaultRef = signalOrValueRef(defaultValue);
|
|
}
|
|
}
|
|
|
|
var channelDef = encoding[channel];
|
|
return wrapCondition(model, channelDef, vgChannel !== null && vgChannel !== void 0 ? vgChannel : channel, function (cDef) {
|
|
return midPoint({
|
|
channel: channel,
|
|
channelDef: cDef,
|
|
markDef: markDef,
|
|
config: config,
|
|
scaleName: model.scaleName(channel),
|
|
scale: model.getScaleComponent(channel),
|
|
stack: null,
|
|
defaultRef: defaultRef
|
|
});
|
|
});
|
|
}
|
|
|
|
function color(model) {
|
|
var opt = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {
|
|
filled: undefined
|
|
};
|
|
|
|
var _a, _b, _c, _d;
|
|
|
|
var markDef = model.markDef,
|
|
encoding = model.encoding,
|
|
config = model.config;
|
|
var markType = markDef.type; // Allow filled to be overridden (for trail's "filled")
|
|
|
|
var filled = (_a = opt.filled) !== null && _a !== void 0 ? _a : getMarkPropOrConfig('filled', markDef, config);
|
|
var transparentIfNeeded = contains(['bar', 'point', 'circle', 'square', 'geoshape'], markType) ? 'transparent' : undefined;
|
|
var defaultFill = (_c = (_b = getMarkPropOrConfig(filled === true ? 'color' : undefined, markDef, config, {
|
|
vgChannel: 'fill'
|
|
})) !== null && _b !== void 0 ? _b : // need to add this manually as getMarkConfig normally drops config.mark[channel] if vgChannel is specified
|
|
config.mark[filled === true && 'color']) !== null && _c !== void 0 ? _c : // If there is no fill, always fill symbols, bar, geoshape
|
|
// with transparent fills https://github.com/vega/vega-lite/issues/1316
|
|
transparentIfNeeded;
|
|
var defaultStroke = (_d = getMarkPropOrConfig(filled === false ? 'color' : undefined, markDef, config, {
|
|
vgChannel: 'stroke'
|
|
})) !== null && _d !== void 0 ? _d : // need to add this manually as getMarkConfig normally drops config.mark[channel] if vgChannel is specified
|
|
config.mark[filled === false && 'color'];
|
|
var colorVgChannel = filled ? 'fill' : 'stroke';
|
|
var fillStrokeMarkDefAndConfig = Object.assign(Object.assign({}, defaultFill ? {
|
|
fill: signalOrValueRef(defaultFill)
|
|
} : {}), defaultStroke ? {
|
|
stroke: signalOrValueRef(defaultStroke)
|
|
} : {});
|
|
|
|
if (markDef.color && (filled ? markDef.fill : markDef.stroke)) {
|
|
warn(droppingColor('property', {
|
|
fill: 'fill' in markDef,
|
|
stroke: 'stroke' in markDef
|
|
}));
|
|
}
|
|
|
|
return Object.assign(Object.assign(Object.assign(Object.assign({}, fillStrokeMarkDefAndConfig), nonPosition('color', model, {
|
|
vgChannel: colorVgChannel,
|
|
defaultValue: filled ? defaultFill : defaultStroke
|
|
})), nonPosition('fill', model, {
|
|
// if there is encoding.fill, include default fill just in case we have conditional-only fill encoding
|
|
defaultValue: encoding.fill ? defaultFill : undefined
|
|
})), nonPosition('stroke', model, {
|
|
// if there is encoding.stroke, include default fill just in case we have conditional-only stroke encoding
|
|
defaultValue: encoding.stroke ? defaultStroke : undefined
|
|
}));
|
|
}
|
|
|
|
function zindex(model) {
|
|
var encoding = model.encoding,
|
|
mark = model.mark;
|
|
var order = encoding.order;
|
|
|
|
if (!isPathMark(mark) && isValueDef(order)) {
|
|
return wrapCondition(model, order, 'zindex', function (cd) {
|
|
return cd;
|
|
});
|
|
}
|
|
|
|
return {};
|
|
}
|
|
|
|
function getOffset(channel, markDef) {
|
|
var offsetChannel = getOffsetChannel(channel); // TODO: in the future read from encoding channel too
|
|
|
|
var markDefOffsetValue = markDef[offsetChannel];
|
|
|
|
if (markDefOffsetValue) {
|
|
return markDefOffsetValue;
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
/**
|
|
* Return encode for point (non-band) position channels.
|
|
*/
|
|
|
|
|
|
function pointPosition(channel, model, _ref26) {
|
|
var defaultPos = _ref26.defaultPos,
|
|
vgChannel = _ref26.vgChannel,
|
|
isMidPoint = _ref26.isMidPoint;
|
|
var encoding = model.encoding,
|
|
markDef = model.markDef,
|
|
config = model.config,
|
|
stack = model.stack;
|
|
var channelDef = encoding[channel];
|
|
var channel2Def = encoding[getSecondaryRangeChannel(channel)];
|
|
var scaleName = model.scaleName(channel);
|
|
var scale = model.getScaleComponent(channel);
|
|
var offset = getOffset(channel, markDef); // Get default position or position from mark def
|
|
|
|
var defaultRef = pointPositionDefaultRef({
|
|
model: model,
|
|
defaultPos: defaultPos,
|
|
channel: channel,
|
|
scaleName: scaleName,
|
|
scale: scale
|
|
});
|
|
var valueRef = !channelDef && isXorY(channel) && (encoding.latitude || encoding.longitude) ? // use geopoint output if there are lat/long and there is no point position overriding lat/long.
|
|
{
|
|
field: model.getName(channel)
|
|
} : positionRef({
|
|
channel: channel,
|
|
channelDef: channelDef,
|
|
channel2Def: channel2Def,
|
|
markDef: markDef,
|
|
config: config,
|
|
isMidPoint: isMidPoint,
|
|
scaleName: scaleName,
|
|
scale: scale,
|
|
stack: stack,
|
|
offset: offset,
|
|
defaultRef: defaultRef
|
|
});
|
|
return valueRef ? _defineProperty({}, vgChannel || channel, valueRef) : undefined;
|
|
} // TODO: we need to find a way to refactor these so that scaleName is a part of scale
|
|
// but that's complicated. For now, this is a huge step moving forward.
|
|
|
|
/**
|
|
* @return Vega ValueRef for normal x- or y-position without projection
|
|
*/
|
|
|
|
|
|
function positionRef(params) {
|
|
var channel = params.channel,
|
|
channelDef = params.channelDef,
|
|
isMidPoint = params.isMidPoint,
|
|
scaleName = params.scaleName,
|
|
stack = params.stack,
|
|
offset = params.offset,
|
|
markDef = params.markDef,
|
|
config = params.config; // This isn't a part of midPoint because we use midPoint for non-position too
|
|
|
|
if (isFieldOrDatumDef(channelDef) && stack && channel === stack.fieldChannel) {
|
|
if (isFieldDef(channelDef)) {
|
|
var band = getBand({
|
|
channel: channel,
|
|
fieldDef: channelDef,
|
|
isMidPoint: isMidPoint,
|
|
markDef: markDef,
|
|
stack: stack,
|
|
config: config
|
|
});
|
|
|
|
if (band !== undefined) {
|
|
return interpolatedSignalRef({
|
|
scaleName: scaleName,
|
|
fieldOrDatumDef: channelDef,
|
|
startSuffix: 'start',
|
|
band: band,
|
|
offset: offset
|
|
});
|
|
}
|
|
} // x or y use stack_end so that stacked line's point mark use stack_end too.
|
|
|
|
|
|
return valueRefForFieldOrDatumDef(channelDef, scaleName, {
|
|
suffix: 'end'
|
|
}, {
|
|
offset: offset
|
|
});
|
|
}
|
|
|
|
return midPointRefWithPositionInvalidTest(params);
|
|
}
|
|
|
|
function pointPositionDefaultRef(_ref28) {
|
|
var model = _ref28.model,
|
|
defaultPos = _ref28.defaultPos,
|
|
channel = _ref28.channel,
|
|
scaleName = _ref28.scaleName,
|
|
scale = _ref28.scale;
|
|
var markDef = model.markDef,
|
|
config = model.config;
|
|
return function () {
|
|
var mainChannel = getMainRangeChannel(channel);
|
|
var vgChannel = getVgPositionChannel(channel);
|
|
var definedValueOrConfig = getMarkPropOrConfig(channel, markDef, config, {
|
|
vgChannel: vgChannel
|
|
});
|
|
|
|
if (definedValueOrConfig !== undefined) {
|
|
return widthHeightValueOrSignalRef(channel, definedValueOrConfig);
|
|
}
|
|
|
|
switch (defaultPos) {
|
|
case 'zeroOrMin':
|
|
case 'zeroOrMax':
|
|
if (scaleName) {
|
|
var _scaleType2 = scale.get('type');
|
|
|
|
if (contains([ScaleType.LOG, ScaleType.TIME, ScaleType.UTC], _scaleType2)) ;else {
|
|
if (scale.domainDefinitelyIncludesZero()) {
|
|
return {
|
|
scale: scaleName,
|
|
value: 0
|
|
};
|
|
}
|
|
}
|
|
}
|
|
|
|
if (defaultPos === 'zeroOrMin') {
|
|
return mainChannel === 'y' ? {
|
|
field: {
|
|
group: 'height'
|
|
}
|
|
} : {
|
|
value: 0
|
|
};
|
|
} else {
|
|
// zeroOrMax
|
|
switch (mainChannel) {
|
|
case 'radius':
|
|
// max of radius is min(width, height) / 2
|
|
return {
|
|
signal: "min(".concat(model.width.signal, ",").concat(model.height.signal, ")/2")
|
|
};
|
|
|
|
case 'theta':
|
|
return {
|
|
signal: '2*PI'
|
|
};
|
|
|
|
case 'x':
|
|
return {
|
|
field: {
|
|
group: 'width'
|
|
}
|
|
};
|
|
|
|
case 'y':
|
|
return {
|
|
value: 0
|
|
};
|
|
}
|
|
}
|
|
|
|
break;
|
|
|
|
case 'mid':
|
|
{
|
|
var sizeRef = model[getSizeChannel(channel)];
|
|
return Object.assign(Object.assign({}, sizeRef), {
|
|
mult: 0.5
|
|
});
|
|
}
|
|
} // defaultPos === null
|
|
|
|
|
|
return undefined;
|
|
};
|
|
}
|
|
|
|
var ALIGNED_X_CHANNEL = {
|
|
left: 'x',
|
|
center: 'xc',
|
|
right: 'x2'
|
|
};
|
|
var BASELINED_Y_CHANNEL = {
|
|
top: 'y',
|
|
middle: 'yc',
|
|
bottom: 'y2'
|
|
};
|
|
|
|
function vgAlignedPositionChannel(channel, markDef, config) {
|
|
var defaultAlign = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : 'middle';
|
|
|
|
if (channel === 'radius' || channel === 'theta') {
|
|
return getVgPositionChannel(channel);
|
|
}
|
|
|
|
var alignChannel = channel === 'x' ? 'align' : 'baseline';
|
|
var align = getMarkPropOrConfig(alignChannel, markDef, config);
|
|
|
|
if (channel === 'x') {
|
|
return ALIGNED_X_CHANNEL[align || (defaultAlign === 'top' ? 'left' : 'center')];
|
|
} else {
|
|
return BASELINED_Y_CHANNEL[align || defaultAlign];
|
|
}
|
|
}
|
|
/**
|
|
* Utility for area/rule position, which can be either point or range. (One of the axes should be point and the other should be range.)
|
|
*/
|
|
|
|
|
|
function pointOrRangePosition(channel, model, _ref29) {
|
|
var defaultPos = _ref29.defaultPos,
|
|
defaultPos2 = _ref29.defaultPos2,
|
|
range = _ref29.range;
|
|
|
|
if (range) {
|
|
return rangePosition(channel, model, {
|
|
defaultPos: defaultPos,
|
|
defaultPos2: defaultPos2
|
|
});
|
|
}
|
|
|
|
return pointPosition(channel, model, {
|
|
defaultPos: defaultPos
|
|
});
|
|
}
|
|
|
|
function rangePosition(channel, model, _ref30) {
|
|
var defaultPos = _ref30.defaultPos,
|
|
defaultPos2 = _ref30.defaultPos2;
|
|
var markDef = model.markDef,
|
|
config = model.config;
|
|
var channel2 = getSecondaryRangeChannel(channel);
|
|
var sizeChannel = getSizeChannel(channel);
|
|
var pos2Mixins = pointPosition2OrSize(model, defaultPos2, channel2);
|
|
var vgChannel = pos2Mixins[sizeChannel] ? // If there is width/height, we need to position the marks based on the alignment.
|
|
vgAlignedPositionChannel(channel, markDef, config) : // Otherwise, make sure to apply to the right Vg Channel (for arc mark)
|
|
getVgPositionChannel(channel);
|
|
return Object.assign(Object.assign({}, pointPosition(channel, model, {
|
|
defaultPos: defaultPos,
|
|
vgChannel: vgChannel
|
|
})), pos2Mixins);
|
|
}
|
|
/**
|
|
* Return encode for x2, y2.
|
|
* If channel is not specified, return one channel based on orientation.
|
|
*/
|
|
|
|
|
|
function pointPosition2OrSize(model, defaultPos, channel) {
|
|
var _position2orSize;
|
|
|
|
var encoding = model.encoding,
|
|
mark = model.mark,
|
|
markDef = model.markDef,
|
|
stack = model.stack,
|
|
config = model.config;
|
|
var baseChannel = getMainRangeChannel(channel);
|
|
var sizeChannel = getSizeChannel(channel);
|
|
var vgChannel = getVgPositionChannel(channel);
|
|
var channelDef = encoding[baseChannel];
|
|
var scaleName = model.scaleName(baseChannel);
|
|
var scale = model.getScaleComponent(baseChannel);
|
|
var offset = channel in encoding || channel in markDef ? getOffset(channel, model.markDef) : getOffset(baseChannel, model.markDef);
|
|
|
|
if (!channelDef && (channel === 'x2' || channel === 'y2') && (encoding.latitude || encoding.longitude)) {
|
|
// use geopoint output if there are lat2/long2 and there is no point position2 overriding lat2/long2.
|
|
return _defineProperty({}, vgChannel, {
|
|
field: model.getName(channel)
|
|
});
|
|
}
|
|
|
|
var valueRef = position2Ref({
|
|
channel: channel,
|
|
channelDef: channelDef,
|
|
channel2Def: encoding[channel],
|
|
markDef: markDef,
|
|
config: config,
|
|
scaleName: scaleName,
|
|
scale: scale,
|
|
stack: stack,
|
|
offset: offset,
|
|
defaultRef: undefined
|
|
});
|
|
|
|
if (valueRef !== undefined) {
|
|
return _defineProperty({}, vgChannel, valueRef);
|
|
} // TODO: check width/height encoding here once we add them
|
|
// no x2/y2 encoding, then try to read x2/y2 or width/height based on precedence:
|
|
// markDef > config.style > mark-specific config (config[mark]) > general mark config (config.mark)
|
|
|
|
|
|
return position2orSize(channel, markDef) || position2orSize(channel, (_position2orSize = {}, _defineProperty(_position2orSize, channel, getMarkStyleConfig(channel, markDef, config.style)), _defineProperty(_position2orSize, sizeChannel, getMarkStyleConfig(sizeChannel, markDef, config.style)), _position2orSize)) || position2orSize(channel, config[mark]) || position2orSize(channel, config.mark) || _defineProperty({}, vgChannel, pointPositionDefaultRef({
|
|
model: model,
|
|
defaultPos: defaultPos,
|
|
channel: channel,
|
|
scaleName: scaleName,
|
|
scale: scale
|
|
})());
|
|
}
|
|
|
|
function position2Ref(_ref34) {
|
|
var channel = _ref34.channel,
|
|
channelDef = _ref34.channelDef,
|
|
channel2Def = _ref34.channel2Def,
|
|
markDef = _ref34.markDef,
|
|
config = _ref34.config,
|
|
scaleName = _ref34.scaleName,
|
|
scale = _ref34.scale,
|
|
stack = _ref34.stack,
|
|
offset = _ref34.offset,
|
|
defaultRef = _ref34.defaultRef;
|
|
|
|
if (isFieldOrDatumDef(channelDef) && stack && // If fieldChannel is X and channel is X2 (or Y and Y2)
|
|
channel.charAt(0) === stack.fieldChannel.charAt(0)) {
|
|
return valueRefForFieldOrDatumDef(channelDef, scaleName, {
|
|
suffix: 'start'
|
|
}, {
|
|
offset: offset
|
|
});
|
|
}
|
|
|
|
return midPointRefWithPositionInvalidTest({
|
|
channel: channel,
|
|
channelDef: channel2Def,
|
|
scaleName: scaleName,
|
|
scale: scale,
|
|
stack: stack,
|
|
markDef: markDef,
|
|
config: config,
|
|
offset: offset,
|
|
defaultRef: defaultRef
|
|
});
|
|
}
|
|
|
|
function position2orSize(channel, markDef) {
|
|
var sizeChannel = getSizeChannel(channel);
|
|
var vgChannel = getVgPositionChannel(channel);
|
|
|
|
if (markDef[vgChannel] !== undefined) {
|
|
return _defineProperty({}, vgChannel, widthHeightValueOrSignalRef(channel, markDef[vgChannel]));
|
|
} else if (markDef[channel] !== undefined) {
|
|
return _defineProperty({}, vgChannel, widthHeightValueOrSignalRef(channel, markDef[channel]));
|
|
} else if (markDef[sizeChannel]) {
|
|
return _defineProperty({}, sizeChannel, widthHeightValueOrSignalRef(channel, markDef[sizeChannel]));
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function rectPosition(model, channel, mark) {
|
|
var _a, _b, _c, _d;
|
|
|
|
var config = model.config,
|
|
encoding = model.encoding,
|
|
markDef = model.markDef,
|
|
stack = model.stack;
|
|
var channel2 = getSecondaryRangeChannel(channel);
|
|
var sizeChannel = getSizeChannel(channel);
|
|
var channelDef = encoding[channel];
|
|
var channelDef2 = encoding[channel2];
|
|
var scale = model.getScaleComponent(channel);
|
|
var scaleType = scale ? scale.get('type') : undefined;
|
|
var scaleName = model.scaleName(channel);
|
|
var orient = markDef.orient;
|
|
var hasSizeDef = (_b = (_a = encoding[sizeChannel]) !== null && _a !== void 0 ? _a : encoding.size) !== null && _b !== void 0 ? _b : getMarkPropOrConfig('size', markDef, config, {
|
|
vgChannel: sizeChannel
|
|
});
|
|
var isBarBand = mark === 'bar' && (channel === 'x' ? orient === 'vertical' : orient === 'horizontal'); // x, x2, and width -- we must specify two of these in all conditions
|
|
|
|
if (isFieldDef(channelDef) && (isBinning(channelDef.bin) || isBinned(channelDef.bin) || channelDef.timeUnit && !channelDef2) && !hasSizeDef && !hasDiscreteDomain(scaleType)) {
|
|
var band = getBand({
|
|
channel: channel,
|
|
fieldDef: channelDef,
|
|
stack: stack,
|
|
markDef: markDef,
|
|
config: config
|
|
});
|
|
var axis = (_c = model.component.axes[channel]) === null || _c === void 0 ? void 0 : _c[0];
|
|
var axisTranslate = (_d = axis === null || axis === void 0 ? void 0 : axis.get('translate')) !== null && _d !== void 0 ? _d : 0.5; // vega default is 0.5
|
|
|
|
return rectBinPosition({
|
|
fieldDef: channelDef,
|
|
fieldDef2: channelDef2,
|
|
channel: channel,
|
|
markDef: markDef,
|
|
scaleName: scaleName,
|
|
band: band,
|
|
axisTranslate: axisTranslate,
|
|
spacing: isXorY(channel) ? getMarkPropOrConfig('binSpacing', markDef, config) : undefined,
|
|
reverse: scale.get('reverse'),
|
|
config: config
|
|
});
|
|
} else if ((isFieldOrDatumDef(channelDef) && hasDiscreteDomain(scaleType) || isBarBand) && !channelDef2) {
|
|
return positionAndSize(mark, channelDef, channel, model);
|
|
} else {
|
|
return rangePosition(channel, model, {
|
|
defaultPos: 'zeroOrMax',
|
|
defaultPos2: 'zeroOrMin'
|
|
});
|
|
}
|
|
}
|
|
|
|
function defaultSizeRef(mark, sizeChannel, scaleName, scale, config, band) {
|
|
if (scale) {
|
|
var _scaleType3 = scale.get('type');
|
|
|
|
if (_scaleType3 === 'point' || _scaleType3 === 'band') {
|
|
if (config[mark].discreteBandSize !== undefined) {
|
|
return {
|
|
value: config[mark].discreteBandSize
|
|
};
|
|
}
|
|
|
|
if (_scaleType3 === ScaleType.POINT) {
|
|
var scaleRange = scale.get('range');
|
|
|
|
if (isVgRangeStep(scaleRange) && isNumber(scaleRange.step)) {
|
|
return {
|
|
value: scaleRange.step - 2
|
|
};
|
|
}
|
|
|
|
return {
|
|
value: DEFAULT_STEP - 2
|
|
};
|
|
} else {
|
|
// BAND
|
|
return {
|
|
scale: scaleName,
|
|
band: band
|
|
};
|
|
}
|
|
} else {
|
|
// continuous scale
|
|
return {
|
|
value: config[mark].continuousBandSize
|
|
};
|
|
}
|
|
} // No Scale
|
|
|
|
|
|
var step = getViewConfigDiscreteStep(config.view, sizeChannel);
|
|
var value = getFirstDefined( // No scale is like discrete bar (with one item)
|
|
config[mark].discreteBandSize, step - 2);
|
|
return value !== undefined ? {
|
|
value: value
|
|
} : undefined;
|
|
}
|
|
/**
|
|
* Output position encoding and its size encoding for continuous, point, and band scales.
|
|
*/
|
|
|
|
|
|
function positionAndSize(mark, fieldDef, channel, model) {
|
|
var _a;
|
|
|
|
var markDef = model.markDef,
|
|
encoding = model.encoding,
|
|
config = model.config,
|
|
stack = model.stack;
|
|
var orient = markDef.orient;
|
|
var scaleName = model.scaleName(channel);
|
|
var scale = model.getScaleComponent(channel);
|
|
var vgSizeChannel = getSizeChannel(channel);
|
|
var channel2 = getSecondaryRangeChannel(channel); // use "size" channel for bars, if there is orient and the channel matches the right orientation
|
|
|
|
var useVlSizeChannel = orient === 'horizontal' && channel === 'y' || orient === 'vertical' && channel === 'x';
|
|
var sizeFromMarkOrConfig = getMarkPropOrConfig(useVlSizeChannel ? 'size' : vgSizeChannel, markDef, config, {
|
|
vgChannel: vgSizeChannel
|
|
}); // Use size encoding / mark property / config if it exists
|
|
|
|
var sizeMixins;
|
|
|
|
if (encoding.size || sizeFromMarkOrConfig !== undefined) {
|
|
if (useVlSizeChannel) {
|
|
sizeMixins = nonPosition('size', model, {
|
|
vgChannel: vgSizeChannel,
|
|
defaultValue: sizeFromMarkOrConfig
|
|
});
|
|
} else {
|
|
warn(cannotApplySizeToNonOrientedMark(markDef.type));
|
|
}
|
|
} // Otherwise, apply default value
|
|
|
|
|
|
var band = (_a = isFieldOrDatumDef(fieldDef) ? getBand({
|
|
channel: channel,
|
|
fieldDef: fieldDef,
|
|
markDef: markDef,
|
|
stack: stack,
|
|
config: config
|
|
}) : undefined) !== null && _a !== void 0 ? _a : 1;
|
|
sizeMixins = sizeMixins || _defineProperty({}, vgSizeChannel, defaultSizeRef(mark, vgSizeChannel, scaleName, scale, config, band));
|
|
/*
|
|
Band scales with size value and all point scales, use xc/yc + band=0.5
|
|
Otherwise (band scales that has size based on a band ref), use x/y with position band = (1 - size_band) / 2.
|
|
In this case, size_band is the band specified in the x/y-encoding.
|
|
By default band is 1, so `(1 - band) / 2` = 0.
|
|
If band is 0.6, the the x/y position in such case should be `(1 - band) / 2` = 0.2
|
|
*/
|
|
|
|
var center = (scale === null || scale === void 0 ? void 0 : scale.get('type')) !== 'band' || !('band' in sizeMixins[vgSizeChannel]);
|
|
var vgChannel = vgAlignedPositionChannel(channel, markDef, config, center ? 'middle' : 'top');
|
|
var offset = getOffset(channel, markDef);
|
|
var posRef = midPointRefWithPositionInvalidTest({
|
|
channel: channel,
|
|
channelDef: fieldDef,
|
|
markDef: markDef,
|
|
config: config,
|
|
scaleName: scaleName,
|
|
scale: scale,
|
|
stack: stack,
|
|
offset: offset,
|
|
defaultRef: pointPositionDefaultRef({
|
|
model: model,
|
|
defaultPos: 'mid',
|
|
channel: channel,
|
|
scaleName: scaleName,
|
|
scale: scale
|
|
}),
|
|
band: center ? 0.5 : (1 - band) / 2
|
|
});
|
|
|
|
if (vgSizeChannel) {
|
|
return Object.assign(_defineProperty({}, vgChannel, posRef), sizeMixins);
|
|
} else {
|
|
var _ref39;
|
|
|
|
// otherwise, we must simulate size by setting position2 = position + size
|
|
// (for theta/radius since Vega doesn't have thetaWidth/radiusWidth)
|
|
var vgChannel2 = getVgPositionChannel(channel2);
|
|
var sizeRef = sizeMixins[vgSizeChannel];
|
|
var sizeOffset = offset ? Object.assign(Object.assign({}, sizeRef), {
|
|
offset: offset
|
|
}) : sizeRef;
|
|
return _ref39 = {}, _defineProperty(_ref39, vgChannel, posRef), _defineProperty(_ref39, vgChannel2, isArray(posRef) ? [posRef[0], Object.assign(Object.assign({}, posRef[1]), {
|
|
offset: sizeOffset
|
|
})] : Object.assign(Object.assign({}, posRef), {
|
|
offset: sizeOffset
|
|
})), _ref39;
|
|
}
|
|
}
|
|
|
|
function getBinSpacing(channel, spacing, reverse, translate, offset) {
|
|
if (isPolarPositionChannel(channel)) {
|
|
return 0;
|
|
}
|
|
|
|
var spacingOffset = channel === 'x' || channel === 'y2' ? -spacing / 2 : spacing / 2;
|
|
|
|
if (isSignalRef(reverse) || isSignalRef(offset) || isSignalRef(translate)) {
|
|
var reverseExpr = signalOrStringValue(reverse);
|
|
var offsetExpr = signalOrStringValue(offset);
|
|
var translateExpr = signalOrStringValue(translate);
|
|
var t = translateExpr ? "".concat(translateExpr, " + ") : '';
|
|
var r = reverseExpr ? "(".concat(reverseExpr, " ? -1 : 1) * ") : '';
|
|
var o = offsetExpr ? "(".concat(offsetExpr, " + ").concat(spacingOffset, ")") : spacingOffset;
|
|
return {
|
|
signal: t + r + o
|
|
};
|
|
} else {
|
|
offset = offset || 0;
|
|
return translate + (reverse ? -offset - spacingOffset : +offset + spacingOffset);
|
|
}
|
|
}
|
|
|
|
function rectBinPosition(_ref40) {
|
|
var fieldDef = _ref40.fieldDef,
|
|
fieldDef2 = _ref40.fieldDef2,
|
|
channel = _ref40.channel,
|
|
band = _ref40.band,
|
|
scaleName = _ref40.scaleName,
|
|
markDef = _ref40.markDef,
|
|
_ref40$spacing = _ref40.spacing,
|
|
spacing = _ref40$spacing === void 0 ? 0 : _ref40$spacing,
|
|
axisTranslate = _ref40.axisTranslate,
|
|
reverse = _ref40.reverse,
|
|
config = _ref40.config;
|
|
var channel2 = getSecondaryRangeChannel(channel);
|
|
var vgChannel = getVgPositionChannel(channel);
|
|
var vgChannel2 = getVgPositionChannel(channel2);
|
|
var offset = getOffset(channel, markDef);
|
|
|
|
if (isBinning(fieldDef.bin) || fieldDef.timeUnit) {
|
|
var _ref41;
|
|
|
|
return _ref41 = {}, _defineProperty(_ref41, vgChannel2, rectBinRef({
|
|
channel: channel,
|
|
fieldDef: fieldDef,
|
|
scaleName: scaleName,
|
|
markDef: markDef,
|
|
band: (1 - band) / 2,
|
|
offset: getBinSpacing(channel2, spacing, reverse, axisTranslate, offset),
|
|
config: config
|
|
})), _defineProperty(_ref41, vgChannel, rectBinRef({
|
|
channel: channel,
|
|
fieldDef: fieldDef,
|
|
scaleName: scaleName,
|
|
markDef: markDef,
|
|
band: 1 - (1 - band) / 2,
|
|
offset: getBinSpacing(channel, spacing, reverse, axisTranslate, offset),
|
|
config: config
|
|
})), _ref41;
|
|
} else if (isBinned(fieldDef.bin)) {
|
|
var startRef = valueRefForFieldOrDatumDef(fieldDef, scaleName, {}, {
|
|
offset: getBinSpacing(channel2, spacing, reverse, axisTranslate, offset)
|
|
});
|
|
|
|
if (isFieldDef(fieldDef2)) {
|
|
var _ref42;
|
|
|
|
return _ref42 = {}, _defineProperty(_ref42, vgChannel2, startRef), _defineProperty(_ref42, vgChannel, valueRefForFieldOrDatumDef(fieldDef2, scaleName, {}, {
|
|
offset: getBinSpacing(channel, spacing, reverse, axisTranslate, offset)
|
|
})), _ref42;
|
|
} else if (isBinParams(fieldDef.bin) && fieldDef.bin.step) {
|
|
var _ref43;
|
|
|
|
return _ref43 = {}, _defineProperty(_ref43, vgChannel2, startRef), _defineProperty(_ref43, vgChannel, {
|
|
signal: "scale(\"".concat(scaleName, "\", ").concat(_vgField(fieldDef, {
|
|
expr: 'datum'
|
|
}), " + ").concat(fieldDef.bin.step, ")"),
|
|
offset: getBinSpacing(channel, spacing, reverse, axisTranslate, offset)
|
|
}), _ref43;
|
|
}
|
|
}
|
|
|
|
warn(channelRequiredForBinned(channel2));
|
|
return undefined;
|
|
}
|
|
/**
|
|
* Value Ref for binned fields
|
|
*/
|
|
|
|
|
|
function rectBinRef(_ref44) {
|
|
var channel = _ref44.channel,
|
|
fieldDef = _ref44.fieldDef,
|
|
scaleName = _ref44.scaleName,
|
|
markDef = _ref44.markDef,
|
|
band = _ref44.band,
|
|
offset = _ref44.offset,
|
|
config = _ref44.config;
|
|
var r = interpolatedSignalRef({
|
|
scaleName: scaleName,
|
|
fieldOrDatumDef: fieldDef,
|
|
band: band,
|
|
offset: offset
|
|
});
|
|
return wrapPositionInvalidTest({
|
|
fieldDef: fieldDef,
|
|
channel: channel,
|
|
markDef: markDef,
|
|
ref: r,
|
|
config: config
|
|
});
|
|
}
|
|
|
|
var ALWAYS_IGNORE = new Set(['aria']);
|
|
|
|
function baseEncodeEntry(model, ignore) {
|
|
var _ref45 = ignore.color === 'include' ? color(model) : {},
|
|
_ref45$fill = _ref45.fill,
|
|
fill = _ref45$fill === void 0 ? undefined : _ref45$fill,
|
|
_ref45$stroke = _ref45.stroke,
|
|
stroke = _ref45$stroke === void 0 ? undefined : _ref45$stroke;
|
|
|
|
return Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({}, markDefProperties(model.markDef, ignore)), wrapAllFieldsInvalid(model, 'fill', fill)), wrapAllFieldsInvalid(model, 'stroke', stroke)), nonPosition('opacity', model)), nonPosition('fillOpacity', model)), nonPosition('strokeOpacity', model)), nonPosition('strokeWidth', model)), nonPosition('strokeDash', model)), zindex(model)), tooltip(model)), text(model, 'href')), aria(model));
|
|
} // TODO: mark VgValueRef[] as readonly after https://github.com/vega/vega/pull/1987
|
|
|
|
|
|
function wrapAllFieldsInvalid(model, channel, valueRef) {
|
|
var config = model.config,
|
|
mark = model.mark,
|
|
markDef = model.markDef;
|
|
var invalid = getMarkPropOrConfig('invalid', markDef, config);
|
|
|
|
if (invalid === 'hide' && valueRef && !isPathMark(mark)) {
|
|
// For non-path marks, we have to exclude invalid values (null and NaN) for scales with continuous domains.
|
|
// For path marks, we will use "defined" property and skip these values instead.
|
|
var test = allFieldsInvalidPredicate(model, {
|
|
invalid: true,
|
|
channels: SCALE_CHANNELS
|
|
});
|
|
|
|
if (test) {
|
|
return _defineProperty({}, channel, [// prepend the invalid case
|
|
// TODO: support custom value
|
|
{
|
|
test: test,
|
|
value: null
|
|
}].concat(_toConsumableArray(array(valueRef))));
|
|
}
|
|
}
|
|
|
|
return valueRef ? _defineProperty({}, channel, valueRef) : {};
|
|
}
|
|
|
|
function markDefProperties(mark, ignore) {
|
|
return VG_MARK_CONFIGS.reduce(function (m, prop) {
|
|
if (!ALWAYS_IGNORE.has(prop) && mark[prop] !== undefined && ignore[prop] !== 'ignore') {
|
|
m[prop] = signalOrValueRef(mark[prop]);
|
|
}
|
|
|
|
return m;
|
|
}, {});
|
|
}
|
|
|
|
function allFieldsInvalidPredicate(model, _ref48) {
|
|
var _ref48$invalid = _ref48.invalid,
|
|
invalid = _ref48$invalid === void 0 ? false : _ref48$invalid,
|
|
channels = _ref48.channels;
|
|
var filterIndex = channels.reduce(function (aggregator, channel) {
|
|
var scaleComponent = model.getScaleComponent(channel);
|
|
|
|
if (scaleComponent) {
|
|
var _scaleType4 = scaleComponent.get('type');
|
|
|
|
var _field3 = model.vgField(channel, {
|
|
expr: 'datum'
|
|
}); // While discrete domain scales can handle invalid values, continuous scales can't.
|
|
|
|
|
|
if (_field3 && hasContinuousDomain(_scaleType4)) {
|
|
aggregator[_field3] = true;
|
|
}
|
|
}
|
|
|
|
return aggregator;
|
|
}, {});
|
|
var fields = keys(filterIndex);
|
|
|
|
if (fields.length > 0) {
|
|
var op = invalid ? '||' : '&&';
|
|
return fields.map(function (field) {
|
|
return fieldInvalidPredicate(field, invalid);
|
|
}).join(" ".concat(op, " "));
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function defined(model) {
|
|
var config = model.config,
|
|
markDef = model.markDef;
|
|
var invalid = getMarkPropOrConfig('invalid', markDef, config);
|
|
|
|
if (invalid) {
|
|
var signal = allFieldsInvalidPredicate$1(model, {
|
|
channels: POSITION_SCALE_CHANNELS
|
|
});
|
|
|
|
if (signal) {
|
|
return {
|
|
defined: {
|
|
signal: signal
|
|
}
|
|
};
|
|
}
|
|
}
|
|
|
|
return {};
|
|
}
|
|
|
|
function allFieldsInvalidPredicate$1(model, _ref49) {
|
|
var _ref49$invalid = _ref49.invalid,
|
|
invalid = _ref49$invalid === void 0 ? false : _ref49$invalid,
|
|
channels = _ref49.channels;
|
|
var filterIndex = channels.reduce(function (aggregator, channel) {
|
|
var scaleComponent = model.getScaleComponent(channel);
|
|
|
|
if (scaleComponent) {
|
|
var _scaleType5 = scaleComponent.get('type');
|
|
|
|
var _field4 = model.vgField(channel, {
|
|
expr: 'datum'
|
|
}); // While discrete domain scales can handle invalid values, continuous scales can't.
|
|
|
|
|
|
if (_field4 && hasContinuousDomain(_scaleType5)) {
|
|
aggregator[_field4] = true;
|
|
}
|
|
}
|
|
|
|
return aggregator;
|
|
}, {});
|
|
var fields = keys(filterIndex);
|
|
|
|
if (fields.length > 0) {
|
|
var op = invalid ? '||' : '&&';
|
|
return fields.map(function (field) {
|
|
return fieldInvalidPredicate(field, invalid);
|
|
}).join(" ".concat(op, " "));
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function valueIfDefined(prop, value) {
|
|
if (value !== undefined) {
|
|
return _defineProperty({}, prop, signalOrValueRef(value));
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
var VORONOI = 'voronoi';
|
|
var nearest = {
|
|
has: function has(selCmpt) {
|
|
return selCmpt.type !== 'interval' && selCmpt.nearest;
|
|
},
|
|
parse: function parse(model, selCmpt) {
|
|
// Scope selection events to the voronoi mark to prevent capturing
|
|
// events that occur on the group mark (https://github.com/vega/vega/issues/2112).
|
|
if (selCmpt.events) {
|
|
var _iterator44 = _createForOfIteratorHelper(selCmpt.events),
|
|
_step44;
|
|
|
|
try {
|
|
for (_iterator44.s(); !(_step44 = _iterator44.n()).done;) {
|
|
var s = _step44.value;
|
|
s.markname = model.getName(VORONOI);
|
|
}
|
|
} catch (err) {
|
|
_iterator44.e(err);
|
|
} finally {
|
|
_iterator44.f();
|
|
}
|
|
}
|
|
},
|
|
marks: function marks(model, selCmpt, _marks) {
|
|
var _selCmpt$project$hasC = selCmpt.project.hasChannel,
|
|
x = _selCmpt$project$hasC.x,
|
|
y = _selCmpt$project$hasC.y;
|
|
var markType = model.mark;
|
|
|
|
if (isPathMark(markType)) {
|
|
warn(nearestNotSupportForContinuous(markType));
|
|
return _marks;
|
|
}
|
|
|
|
var cellDef = {
|
|
name: model.getName(VORONOI),
|
|
type: 'path',
|
|
interactive: true,
|
|
from: {
|
|
data: model.getName('marks')
|
|
},
|
|
encode: {
|
|
update: Object.assign({
|
|
fill: {
|
|
value: 'transparent'
|
|
},
|
|
strokeWidth: {
|
|
value: 0.35
|
|
},
|
|
stroke: {
|
|
value: 'transparent'
|
|
},
|
|
isVoronoi: {
|
|
value: true
|
|
}
|
|
}, tooltip(model, {
|
|
reactiveGeom: true
|
|
}))
|
|
},
|
|
transform: [{
|
|
type: 'voronoi',
|
|
x: {
|
|
expr: x || !y ? 'datum.datum.x || 0' : '0'
|
|
},
|
|
y: {
|
|
expr: y || !x ? 'datum.datum.y || 0' : '0'
|
|
},
|
|
size: [model.getSizeSignalRef('width'), model.getSizeSignalRef('height')]
|
|
}]
|
|
};
|
|
var index = 0;
|
|
var exists = false;
|
|
|
|
_marks.forEach(function (mark, i) {
|
|
var _a;
|
|
|
|
var name = (_a = mark.name) !== null && _a !== void 0 ? _a : '';
|
|
|
|
if (name === model.component.mark[0].name) {
|
|
index = i;
|
|
} else if (name.indexOf(VORONOI) >= 0) {
|
|
exists = true;
|
|
}
|
|
});
|
|
|
|
if (!exists) {
|
|
_marks.splice(index + 1, 0, cellDef);
|
|
}
|
|
|
|
return _marks;
|
|
}
|
|
};
|
|
/**
|
|
* A node in the dataflow tree.
|
|
*/
|
|
|
|
var DataFlowNode = /*#__PURE__*/function () {
|
|
function DataFlowNode(parent, debugName) {
|
|
_classCallCheck(this, DataFlowNode);
|
|
|
|
this.debugName = debugName;
|
|
this._children = [];
|
|
this._parent = null;
|
|
|
|
if (parent) {
|
|
this.parent = parent;
|
|
}
|
|
}
|
|
/**
|
|
* Clone this node with a deep copy but don't clone links to children or parents.
|
|
*/
|
|
|
|
|
|
_createClass(DataFlowNode, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
throw new Error('Cannot clone node');
|
|
}
|
|
}, {
|
|
key: "numChildren",
|
|
value: function numChildren() {
|
|
return this._children.length;
|
|
}
|
|
}, {
|
|
key: "addChild",
|
|
value: function addChild(child, loc) {
|
|
// do not add the same child twice
|
|
if (this._children.indexOf(child) > -1) {
|
|
warn(ADD_SAME_CHILD_TWICE);
|
|
return;
|
|
}
|
|
|
|
if (loc !== undefined) {
|
|
this._children.splice(loc, 0, child);
|
|
} else {
|
|
this._children.push(child);
|
|
}
|
|
}
|
|
}, {
|
|
key: "removeChild",
|
|
value: function removeChild(oldChild) {
|
|
var loc = this._children.indexOf(oldChild);
|
|
|
|
this._children.splice(loc, 1);
|
|
|
|
return loc;
|
|
}
|
|
/**
|
|
* Remove node from the dataflow.
|
|
*/
|
|
|
|
}, {
|
|
key: "remove",
|
|
value: function remove() {
|
|
var loc = this._parent.removeChild(this);
|
|
|
|
var _iterator45 = _createForOfIteratorHelper(this._children),
|
|
_step45;
|
|
|
|
try {
|
|
for (_iterator45.s(); !(_step45 = _iterator45.n()).done;) {
|
|
var child = _step45.value;
|
|
// do not use the set method because we want to insert at a particular location
|
|
child._parent = this._parent;
|
|
|
|
this._parent.addChild(child, loc++);
|
|
}
|
|
} catch (err) {
|
|
_iterator45.e(err);
|
|
} finally {
|
|
_iterator45.f();
|
|
}
|
|
}
|
|
/**
|
|
* Insert another node as a parent of this node.
|
|
*/
|
|
|
|
}, {
|
|
key: "insertAsParentOf",
|
|
value: function insertAsParentOf(other) {
|
|
var parent = other.parent;
|
|
parent.removeChild(this);
|
|
this.parent = parent;
|
|
other.parent = this;
|
|
}
|
|
}, {
|
|
key: "swapWithParent",
|
|
value: function swapWithParent() {
|
|
var parent = this._parent;
|
|
var newParent = parent.parent; // reconnect the children
|
|
|
|
var _iterator46 = _createForOfIteratorHelper(this._children),
|
|
_step46;
|
|
|
|
try {
|
|
for (_iterator46.s(); !(_step46 = _iterator46.n()).done;) {
|
|
var child = _step46.value;
|
|
child.parent = parent;
|
|
} // remove old links
|
|
|
|
} catch (err) {
|
|
_iterator46.e(err);
|
|
} finally {
|
|
_iterator46.f();
|
|
}
|
|
|
|
this._children = []; // equivalent to removing every child link one by one
|
|
|
|
parent.removeChild(this);
|
|
parent.parent.removeChild(parent); // swap two nodes
|
|
|
|
this.parent = newParent;
|
|
parent.parent = this;
|
|
}
|
|
}, {
|
|
key: "parent",
|
|
get: function get() {
|
|
return this._parent;
|
|
}
|
|
/**
|
|
* Set the parent of the node and also add this node to the parent's children.
|
|
*/
|
|
,
|
|
set: function set(parent) {
|
|
this._parent = parent;
|
|
|
|
if (parent) {
|
|
parent.addChild(this);
|
|
}
|
|
}
|
|
}, {
|
|
key: "children",
|
|
get: function get() {
|
|
return this._children;
|
|
}
|
|
}]);
|
|
|
|
return DataFlowNode;
|
|
}();
|
|
|
|
var OutputNode = /*#__PURE__*/function (_DataFlowNode) {
|
|
_inherits(OutputNode, _DataFlowNode);
|
|
|
|
var _super3 = _createSuper(OutputNode);
|
|
|
|
/**
|
|
* @param source The name of the source. Will change in assemble.
|
|
* @param type The type of the output node.
|
|
* @param refCounts A global ref counter map.
|
|
*/
|
|
function OutputNode(parent, source, type, refCounts) {
|
|
var _this8;
|
|
|
|
_classCallCheck(this, OutputNode);
|
|
|
|
_this8 = _super3.call(this, parent, source);
|
|
_this8.type = type;
|
|
_this8.refCounts = refCounts;
|
|
_this8._source = _this8._name = source;
|
|
|
|
if (_this8.refCounts && !(_this8._name in _this8.refCounts)) {
|
|
_this8.refCounts[_this8._name] = 0;
|
|
}
|
|
|
|
return _this8;
|
|
}
|
|
|
|
_createClass(OutputNode, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
var cloneObj = new this.constructor();
|
|
cloneObj.debugName = 'clone_' + this.debugName;
|
|
cloneObj._source = this._source;
|
|
cloneObj._name = 'clone_' + this._name;
|
|
cloneObj.type = this.type;
|
|
cloneObj.refCounts = this.refCounts;
|
|
cloneObj.refCounts[cloneObj._name] = 0;
|
|
return cloneObj;
|
|
}
|
|
}, {
|
|
key: "dependentFields",
|
|
value: function dependentFields() {
|
|
return new Set();
|
|
}
|
|
}, {
|
|
key: "producedFields",
|
|
value: function producedFields() {
|
|
return new Set();
|
|
}
|
|
}, {
|
|
key: "hash",
|
|
value: function hash() {
|
|
if (this._hash === undefined) {
|
|
this._hash = "Output ".concat(uniqueId());
|
|
}
|
|
|
|
return this._hash;
|
|
}
|
|
/**
|
|
* Request the datasource name and increase the ref counter.
|
|
*
|
|
* During the parsing phase, this will return the simple name such as 'main' or 'raw'.
|
|
* It is crucial to request the name from an output node to mark it as a required node.
|
|
* If nobody ever requests the name, this datasource will not be instantiated in the assemble phase.
|
|
*
|
|
* In the assemble phase, this will return the correct name.
|
|
*/
|
|
|
|
}, {
|
|
key: "getSource",
|
|
value: function getSource() {
|
|
this.refCounts[this._name]++;
|
|
return this._source;
|
|
}
|
|
}, {
|
|
key: "isRequired",
|
|
value: function isRequired() {
|
|
return !!this.refCounts[this._name];
|
|
}
|
|
}, {
|
|
key: "setSource",
|
|
value: function setSource(source) {
|
|
this._source = source;
|
|
}
|
|
}]);
|
|
|
|
return OutputNode;
|
|
}(DataFlowNode);
|
|
|
|
var TimeUnitNode = /*#__PURE__*/function (_DataFlowNode2) {
|
|
_inherits(TimeUnitNode, _DataFlowNode2);
|
|
|
|
var _super4 = _createSuper(TimeUnitNode);
|
|
|
|
function TimeUnitNode(parent, formula) {
|
|
var _this9;
|
|
|
|
_classCallCheck(this, TimeUnitNode);
|
|
|
|
_this9 = _super4.call(this, parent);
|
|
_this9.formula = formula;
|
|
return _this9;
|
|
}
|
|
|
|
_createClass(TimeUnitNode, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
return new TimeUnitNode(null, duplicate(this.formula));
|
|
}
|
|
}, {
|
|
key: "merge",
|
|
|
|
/**
|
|
* Merge together TimeUnitNodes assigning the children of `other` to `this`
|
|
* and removing `other`.
|
|
*/
|
|
value: function merge(other) {
|
|
this.formula = Object.assign({}, this.formula); // if the same hash happen twice, merge "band"
|
|
|
|
for (var key in other.formula) {
|
|
if (!this.formula[key] || other.formula[key].band) {
|
|
// copy if it's not a duplicate or if we need to include copy band over
|
|
this.formula[key] = other.formula[key];
|
|
}
|
|
}
|
|
|
|
var _iterator47 = _createForOfIteratorHelper(other.children),
|
|
_step47;
|
|
|
|
try {
|
|
for (_iterator47.s(); !(_step47 = _iterator47.n()).done;) {
|
|
var child = _step47.value;
|
|
other.removeChild(child);
|
|
child.parent = this;
|
|
}
|
|
} catch (err) {
|
|
_iterator47.e(err);
|
|
} finally {
|
|
_iterator47.f();
|
|
}
|
|
|
|
other.remove();
|
|
}
|
|
}, {
|
|
key: "producedFields",
|
|
value: function producedFields() {
|
|
return new Set(vals(this.formula).map(function (f) {
|
|
return f.as;
|
|
}));
|
|
}
|
|
}, {
|
|
key: "dependentFields",
|
|
value: function dependentFields() {
|
|
return new Set(vals(this.formula).map(function (f) {
|
|
return f.field;
|
|
}));
|
|
}
|
|
}, {
|
|
key: "hash",
|
|
value: function hash() {
|
|
return "TimeUnit ".concat(_hash(this.formula));
|
|
}
|
|
}, {
|
|
key: "assemble",
|
|
value: function assemble() {
|
|
var transforms = [];
|
|
|
|
var _iterator48 = _createForOfIteratorHelper(vals(this.formula)),
|
|
_step48;
|
|
|
|
try {
|
|
for (_iterator48.s(); !(_step48 = _iterator48.n()).done;) {
|
|
var f = _step48.value;
|
|
var _field5 = f.field,
|
|
as = f.as,
|
|
timeUnit = f.timeUnit;
|
|
|
|
var _a = normalizeTimeUnit(timeUnit),
|
|
unit = _a.unit,
|
|
utc = _a.utc,
|
|
params = __rest(_a, ["unit", "utc"]);
|
|
|
|
transforms.push(Object.assign(Object.assign(Object.assign(Object.assign({
|
|
field: replacePathInField(_field5),
|
|
type: 'timeunit'
|
|
}, unit ? {
|
|
units: getTimeUnitParts(unit)
|
|
} : {}), utc ? {
|
|
timezone: 'utc'
|
|
} : {}), params), {
|
|
as: [as, "".concat(as, "_end")]
|
|
}));
|
|
}
|
|
} catch (err) {
|
|
_iterator48.e(err);
|
|
} finally {
|
|
_iterator48.f();
|
|
}
|
|
|
|
return transforms;
|
|
}
|
|
}], [{
|
|
key: "makeFromEncoding",
|
|
value: function makeFromEncoding(parent, model) {
|
|
var formula = model.reduceFieldDef(function (timeUnitComponent, fieldDef, channel) {
|
|
var field = fieldDef.field,
|
|
timeUnit = fieldDef.timeUnit;
|
|
var channelDef2 = isUnitModel(model) ? model.encoding[getSecondaryRangeChannel(channel)] : undefined;
|
|
var band = isUnitModel(model) && hasBand(channel, fieldDef, channelDef2, model.stack, model.markDef, model.config);
|
|
|
|
if (timeUnit) {
|
|
var as = _vgField(fieldDef, {
|
|
forAs: true
|
|
});
|
|
|
|
timeUnitComponent[_hash({
|
|
as: as,
|
|
field: field,
|
|
timeUnit: timeUnit
|
|
})] = Object.assign({
|
|
as: as,
|
|
field: field,
|
|
timeUnit: timeUnit
|
|
}, band ? {
|
|
band: true
|
|
} : {});
|
|
}
|
|
|
|
return timeUnitComponent;
|
|
}, {});
|
|
|
|
if (isEmpty(formula)) {
|
|
return null;
|
|
}
|
|
|
|
return new TimeUnitNode(parent, formula);
|
|
}
|
|
}, {
|
|
key: "makeFromTransform",
|
|
value: function makeFromTransform(parent, t) {
|
|
var _a = Object.assign({}, t),
|
|
timeUnit = _a.timeUnit,
|
|
other = __rest(_a, ["timeUnit"]);
|
|
|
|
var normalizedTimeUnit = normalizeTimeUnit(timeUnit);
|
|
var component = Object.assign(Object.assign({}, other), {
|
|
timeUnit: normalizedTimeUnit
|
|
});
|
|
return new TimeUnitNode(parent, _defineProperty({}, _hash(component), component));
|
|
}
|
|
}]);
|
|
|
|
return TimeUnitNode;
|
|
}(DataFlowNode);
|
|
|
|
var TUPLE_FIELDS = '_tuple_fields';
|
|
|
|
var SelectionProjectionComponent = function SelectionProjectionComponent() {
|
|
_classCallCheck(this, SelectionProjectionComponent);
|
|
|
|
for (var _len4 = arguments.length, items = new Array(_len4), _key4 = 0; _key4 < _len4; _key4++) {
|
|
items[_key4] = arguments[_key4];
|
|
}
|
|
|
|
this.items = items;
|
|
this.hasChannel = {};
|
|
this.hasField = {};
|
|
};
|
|
|
|
var project = {
|
|
has: function has() {
|
|
return true; // This transform handles its own defaults, so always run parse.
|
|
},
|
|
parse: function parse(model, selCmpt, selDef) {
|
|
var _a, _b, _c;
|
|
|
|
var name = selCmpt.name;
|
|
var proj = (_a = selCmpt.project) !== null && _a !== void 0 ? _a : selCmpt.project = new SelectionProjectionComponent();
|
|
var parsed = {};
|
|
var timeUnits = {};
|
|
var signals = new Set();
|
|
|
|
var signalName = function signalName(p, range) {
|
|
var suffix = range === 'visual' ? p.channel : p.field;
|
|
var sg = varName("".concat(name, "_").concat(suffix));
|
|
|
|
for (var counter = 1; signals.has(sg); counter++) {
|
|
sg = varName("".concat(name, "_").concat(suffix, "_").concat(counter));
|
|
}
|
|
|
|
signals.add(sg);
|
|
return _defineProperty({}, range, sg);
|
|
}; // If no explicit projection (either fields or encodings) is specified, set some defaults.
|
|
// If an initial value is set, try to infer projections.
|
|
// Otherwise, use the default configuration.
|
|
|
|
|
|
if (!selDef.fields && !selDef.encodings) {
|
|
var cfg = model.config.selection[selDef.type];
|
|
|
|
if (selDef.init) {
|
|
var _iterator49 = _createForOfIteratorHelper(array(selDef.init)),
|
|
_step49;
|
|
|
|
try {
|
|
for (_iterator49.s(); !(_step49 = _iterator49.n()).done;) {
|
|
var init = _step49.value;
|
|
|
|
var _iterator50 = _createForOfIteratorHelper(keys(init)),
|
|
_step50;
|
|
|
|
try {
|
|
for (_iterator50.s(); !(_step50 = _iterator50.n()).done;) {
|
|
var key = _step50.value;
|
|
|
|
if (isSingleDefUnitChannel(key)) {
|
|
(selDef.encodings || (selDef.encodings = [])).push(key);
|
|
} else {
|
|
if (selDef.type === 'interval') {
|
|
warn(INTERVAL_INITIALIZED_WITH_X_Y);
|
|
selDef.encodings = cfg.encodings;
|
|
} else {
|
|
(selDef.fields || (selDef.fields = [])).push(key);
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator50.e(err);
|
|
} finally {
|
|
_iterator50.f();
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator49.e(err);
|
|
} finally {
|
|
_iterator49.f();
|
|
}
|
|
} else {
|
|
selDef.encodings = cfg.encodings;
|
|
selDef.fields = cfg.fields;
|
|
}
|
|
} // TODO: find a possible channel mapping for these fields.
|
|
|
|
|
|
var _iterator51 = _createForOfIteratorHelper((_b = selDef.fields) !== null && _b !== void 0 ? _b : []),
|
|
_step51;
|
|
|
|
try {
|
|
for (_iterator51.s(); !(_step51 = _iterator51.n()).done;) {
|
|
var _field6 = _step51.value;
|
|
var p = {
|
|
type: 'E',
|
|
field: _field6
|
|
};
|
|
p.signals = Object.assign({}, signalName(p, 'data'));
|
|
proj.items.push(p);
|
|
proj.hasField[_field6] = p;
|
|
}
|
|
} catch (err) {
|
|
_iterator51.e(err);
|
|
} finally {
|
|
_iterator51.f();
|
|
}
|
|
|
|
var _iterator52 = _createForOfIteratorHelper((_c = selDef.encodings) !== null && _c !== void 0 ? _c : []),
|
|
_step52;
|
|
|
|
try {
|
|
for (_iterator52.s(); !(_step52 = _iterator52.n()).done;) {
|
|
var channel = _step52.value;
|
|
var fieldDef = model.fieldDef(channel);
|
|
|
|
if (fieldDef) {
|
|
var _field7 = fieldDef.field;
|
|
|
|
if (fieldDef.aggregate) {
|
|
warn(cannotProjectAggregate(channel, fieldDef.aggregate));
|
|
continue;
|
|
} else if (!_field7) {
|
|
warn(cannotProjectOnChannelWithoutField(channel));
|
|
continue;
|
|
}
|
|
|
|
if (fieldDef.timeUnit) {
|
|
_field7 = model.vgField(channel); // Construct TimeUnitComponents which will be combined into a
|
|
// TimeUnitNode. This node may need to be inserted into the
|
|
// dataflow if the selection is used across views that do not
|
|
// have these time units defined.
|
|
|
|
var component = {
|
|
timeUnit: fieldDef.timeUnit,
|
|
as: _field7,
|
|
field: fieldDef.field
|
|
};
|
|
timeUnits[_hash(component)] = component;
|
|
} // Prevent duplicate projections on the same field.
|
|
// TODO: what if the same field is bound to multiple channels (e.g., SPLOM diag).
|
|
|
|
|
|
if (!parsed[_field7]) {
|
|
// Determine whether the tuple will store enumerated or ranged values.
|
|
// Interval selections store ranges for continuous scales, and enumerations otherwise.
|
|
// Single/multi selections store ranges for binned fields, and enumerations otherwise.
|
|
var type = 'E';
|
|
|
|
if (selCmpt.type === 'interval') {
|
|
var _scaleType6 = model.getScaleComponent(channel).get('type');
|
|
|
|
if (hasContinuousDomain(_scaleType6)) {
|
|
type = 'R';
|
|
}
|
|
} else if (fieldDef.bin) {
|
|
type = 'R-RE';
|
|
}
|
|
|
|
var _p2 = {
|
|
field: _field7,
|
|
channel: channel,
|
|
type: type
|
|
};
|
|
_p2.signals = Object.assign(Object.assign({}, signalName(_p2, 'data')), signalName(_p2, 'visual'));
|
|
proj.items.push(parsed[_field7] = _p2);
|
|
proj.hasField[_field7] = proj.hasChannel[channel] = parsed[_field7];
|
|
}
|
|
} else {
|
|
warn(cannotProjectOnChannelWithoutField(channel));
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator52.e(err);
|
|
} finally {
|
|
_iterator52.f();
|
|
}
|
|
|
|
if (selDef.init) {
|
|
var parseInit = function parseInit(i) {
|
|
return proj.items.map(function (p) {
|
|
return i[p.channel] !== undefined ? i[p.channel] : i[p.field];
|
|
});
|
|
};
|
|
|
|
if (selDef.type === 'interval') {
|
|
selCmpt.init = parseInit(selDef.init);
|
|
} else {
|
|
var _init = array(selDef.init);
|
|
|
|
selCmpt.init = _init.map(parseInit);
|
|
}
|
|
}
|
|
|
|
if (!isEmpty(timeUnits)) {
|
|
proj.timeUnit = new TimeUnitNode(null, timeUnits);
|
|
}
|
|
},
|
|
signals: function signals(model, selCmpt, allSignals) {
|
|
var name = selCmpt.name + TUPLE_FIELDS;
|
|
var hasSignal = allSignals.filter(function (s) {
|
|
return s.name === name;
|
|
});
|
|
return hasSignal.length > 0 ? allSignals : allSignals.concat({
|
|
name: name,
|
|
value: selCmpt.project.items.map(function (proj) {
|
|
var rest = __rest(proj, ["signals", "hasLegend"]);
|
|
|
|
rest.field = replacePathInField(rest.field);
|
|
return rest;
|
|
})
|
|
});
|
|
}
|
|
};
|
|
var inputBindings = {
|
|
has: function has(selCmpt) {
|
|
return selCmpt.type === 'single' && selCmpt.resolve === 'global' && selCmpt.bind && selCmpt.bind !== 'scales' && !isLegendBinding(selCmpt.bind);
|
|
},
|
|
parse: function parse(model, selCmpt, selDef, origDef) {
|
|
// Binding a selection to input widgets disables default direct manipulation interaction.
|
|
// A user can choose to re-enable it by explicitly specifying triggering input events.
|
|
if (!origDef.on) delete selCmpt.events;
|
|
if (!origDef.clear) delete selCmpt.clear;
|
|
},
|
|
topLevelSignals: function topLevelSignals(model, selCmpt, signals) {
|
|
var name = selCmpt.name;
|
|
var proj = selCmpt.project;
|
|
var bind = selCmpt.bind;
|
|
var init = selCmpt.init && selCmpt.init[0]; // Can only exist on single selections (one initial value).
|
|
|
|
var datum = nearest.has(selCmpt) ? '(item().isVoronoi ? datum.datum : datum)' : 'datum';
|
|
proj.items.forEach(function (p, i) {
|
|
var _a, _b;
|
|
|
|
var sgname = varName("".concat(name, "_").concat(p.field));
|
|
var hasSignal = signals.filter(function (s) {
|
|
return s.name === sgname;
|
|
});
|
|
|
|
if (!hasSignal.length) {
|
|
signals.unshift(Object.assign(Object.assign({
|
|
name: sgname
|
|
}, init ? {
|
|
init: assembleInit(init[i])
|
|
} : {
|
|
value: null
|
|
}), {
|
|
on: selCmpt.events ? [{
|
|
events: selCmpt.events,
|
|
update: "datum && item().mark.marktype !== 'group' ? ".concat(datum, "[").concat($(p.field), "] : null")
|
|
}] : [],
|
|
bind: (_b = (_a = bind[p.field]) !== null && _a !== void 0 ? _a : bind[p.channel]) !== null && _b !== void 0 ? _b : bind
|
|
}));
|
|
}
|
|
});
|
|
return signals;
|
|
},
|
|
signals: function signals(model, selCmpt, _signals) {
|
|
var name = selCmpt.name;
|
|
var proj = selCmpt.project;
|
|
|
|
var signal = _signals.filter(function (s) {
|
|
return s.name === name + TUPLE;
|
|
})[0];
|
|
|
|
var fields = name + TUPLE_FIELDS;
|
|
var values = proj.items.map(function (p) {
|
|
return varName("".concat(name, "_").concat(p.field));
|
|
});
|
|
var valid = values.map(function (v) {
|
|
return "".concat(v, " !== null");
|
|
}).join(' && ');
|
|
|
|
if (values.length) {
|
|
signal.update = "".concat(valid, " ? {fields: ").concat(fields, ", values: [").concat(values.join(', '), "]} : null");
|
|
}
|
|
|
|
delete signal.value;
|
|
delete signal.on;
|
|
return _signals;
|
|
}
|
|
};
|
|
var TOGGLE = '_toggle';
|
|
var toggle = {
|
|
has: function has(selCmpt) {
|
|
return selCmpt.type === 'multi' && !!selCmpt.toggle;
|
|
},
|
|
signals: function signals(model, selCmpt, _signals2) {
|
|
return _signals2.concat({
|
|
name: selCmpt.name + TOGGLE,
|
|
value: false,
|
|
on: [{
|
|
events: selCmpt.events,
|
|
update: selCmpt.toggle
|
|
}]
|
|
});
|
|
},
|
|
modifyExpr: function modifyExpr(model, selCmpt) {
|
|
var tpl = selCmpt.name + TUPLE;
|
|
var signal = selCmpt.name + TOGGLE;
|
|
return "".concat(signal, " ? null : ").concat(tpl, ", ") + (selCmpt.resolve === 'global' ? "".concat(signal, " ? null : true, ") : "".concat(signal, " ? null : {unit: ").concat(unitName(model), "}, ")) + "".concat(signal, " ? ").concat(tpl, " : null");
|
|
}
|
|
};
|
|
var clear = {
|
|
has: function has(selCmpt) {
|
|
return selCmpt.clear !== undefined && selCmpt.clear !== false;
|
|
},
|
|
parse: function parse(model, selCmpt, selDef) {
|
|
if (selDef.clear) {
|
|
selCmpt.clear = isString(selDef.clear) ? parseSelector(selDef.clear, 'scope') : selDef.clear;
|
|
}
|
|
},
|
|
topLevelSignals: function topLevelSignals(model, selCmpt, signals) {
|
|
if (inputBindings.has(selCmpt)) {
|
|
var _iterator53 = _createForOfIteratorHelper(selCmpt.project.items),
|
|
_step53;
|
|
|
|
try {
|
|
var _loop2 = function _loop2() {
|
|
var proj = _step53.value;
|
|
var idx = signals.findIndex(function (n) {
|
|
return n.name === varName("".concat(selCmpt.name, "_").concat(proj.field));
|
|
});
|
|
|
|
if (idx !== -1) {
|
|
signals[idx].on.push({
|
|
events: selCmpt.clear,
|
|
update: 'null'
|
|
});
|
|
}
|
|
};
|
|
|
|
for (_iterator53.s(); !(_step53 = _iterator53.n()).done;) {
|
|
_loop2();
|
|
}
|
|
} catch (err) {
|
|
_iterator53.e(err);
|
|
} finally {
|
|
_iterator53.f();
|
|
}
|
|
}
|
|
|
|
return signals;
|
|
},
|
|
signals: function signals(model, selCmpt, _signals3) {
|
|
function addClear(idx, update) {
|
|
if (idx !== -1 && _signals3[idx].on) {
|
|
_signals3[idx].on.push({
|
|
events: selCmpt.clear,
|
|
update: update
|
|
});
|
|
}
|
|
} // Be as minimalist as possible when adding clear triggers to minimize dataflow execution.
|
|
|
|
|
|
if (selCmpt.type === 'interval') {
|
|
var _iterator54 = _createForOfIteratorHelper(selCmpt.project.items),
|
|
_step54;
|
|
|
|
try {
|
|
var _loop3 = function _loop3() {
|
|
var proj = _step54.value;
|
|
|
|
var vIdx = _signals3.findIndex(function (n) {
|
|
return n.name === proj.signals.visual;
|
|
});
|
|
|
|
addClear(vIdx, '[0, 0]');
|
|
|
|
if (vIdx === -1) {
|
|
var dIdx = _signals3.findIndex(function (n) {
|
|
return n.name === proj.signals.data;
|
|
});
|
|
|
|
addClear(dIdx, 'null');
|
|
}
|
|
};
|
|
|
|
for (_iterator54.s(); !(_step54 = _iterator54.n()).done;) {
|
|
_loop3();
|
|
}
|
|
} catch (err) {
|
|
_iterator54.e(err);
|
|
} finally {
|
|
_iterator54.f();
|
|
}
|
|
} else {
|
|
var tIdx = _signals3.findIndex(function (n) {
|
|
return n.name === selCmpt.name + TUPLE;
|
|
});
|
|
|
|
addClear(tIdx, 'null');
|
|
|
|
if (toggle.has(selCmpt)) {
|
|
tIdx = _signals3.findIndex(function (n) {
|
|
return n.name === selCmpt.name + TOGGLE;
|
|
});
|
|
addClear(tIdx, 'false');
|
|
}
|
|
}
|
|
|
|
return _signals3;
|
|
}
|
|
};
|
|
var scaleBindings = {
|
|
has: function has(selCmpt) {
|
|
return selCmpt.type === 'interval' && selCmpt.resolve === 'global' && selCmpt.bind && selCmpt.bind === 'scales';
|
|
},
|
|
parse: function parse(model, selCmpt) {
|
|
var bound = selCmpt.scales = [];
|
|
|
|
var _iterator55 = _createForOfIteratorHelper(selCmpt.project.items),
|
|
_step55;
|
|
|
|
try {
|
|
for (_iterator55.s(); !(_step55 = _iterator55.n()).done;) {
|
|
var proj = _step55.value;
|
|
var channel = proj.channel;
|
|
|
|
if (!isScaleChannel(channel)) {
|
|
continue;
|
|
}
|
|
|
|
var scale = model.getScaleComponent(channel);
|
|
|
|
var _scaleType7 = scale ? scale.get('type') : undefined;
|
|
|
|
if (!scale || !hasContinuousDomain(_scaleType7)) {
|
|
warn(SCALE_BINDINGS_CONTINUOUS);
|
|
continue;
|
|
}
|
|
|
|
var extent = {
|
|
selection: selCmpt.name,
|
|
field: proj.field
|
|
};
|
|
scale.set('selectionExtent', extent, true);
|
|
bound.push(proj);
|
|
}
|
|
} catch (err) {
|
|
_iterator55.e(err);
|
|
} finally {
|
|
_iterator55.f();
|
|
}
|
|
},
|
|
topLevelSignals: function topLevelSignals(model, selCmpt, signals) {
|
|
var bound = selCmpt.scales.filter(function (proj) {
|
|
return signals.filter(function (s) {
|
|
return s.name === proj.signals.data;
|
|
}).length === 0;
|
|
}); // Top-level signals are only needed for multiview displays and if this
|
|
// view's top-level signals haven't already been generated.
|
|
|
|
if (!model.parent || isTopLevelLayer(model) || bound.length === 0) {
|
|
return signals;
|
|
} // vlSelectionResolve does not account for the behavior of bound scales in
|
|
// multiview displays. Each unit view adds a tuple to the store, but the
|
|
// state of the selection is the unit selection most recently updated. This
|
|
// state is captured by the top-level signals that we insert and "push
|
|
// outer" to from within the units. We need to reassemble this state into
|
|
// the top-level named signal, except no single selCmpt has a global view.
|
|
|
|
|
|
var namedSg = signals.filter(function (s) {
|
|
return s.name === selCmpt.name;
|
|
})[0];
|
|
var update = namedSg.update;
|
|
|
|
if (update.indexOf(VL_SELECTION_RESOLVE) >= 0) {
|
|
namedSg.update = "{".concat(bound.map(function (proj) {
|
|
return "".concat($(proj.field), ": ").concat(proj.signals.data);
|
|
}).join(', '), "}");
|
|
} else {
|
|
var _iterator56 = _createForOfIteratorHelper(bound),
|
|
_step56;
|
|
|
|
try {
|
|
for (_iterator56.s(); !(_step56 = _iterator56.n()).done;) {
|
|
var proj = _step56.value;
|
|
var mapping = "".concat($(proj.field), ": ").concat(proj.signals.data);
|
|
|
|
if (update.indexOf(mapping) < 0) {
|
|
update = "".concat(update.substring(0, update.length - 1), ", ").concat(mapping, "}");
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator56.e(err);
|
|
} finally {
|
|
_iterator56.f();
|
|
}
|
|
|
|
namedSg.update = update;
|
|
}
|
|
|
|
return signals.concat(bound.map(function (proj) {
|
|
return {
|
|
name: proj.signals.data
|
|
};
|
|
}));
|
|
},
|
|
signals: function signals(model, selCmpt, _signals4) {
|
|
// Nested signals need only push to top-level signals with multiview displays.
|
|
if (model.parent && !isTopLevelLayer(model)) {
|
|
var _iterator57 = _createForOfIteratorHelper(selCmpt.scales),
|
|
_step57;
|
|
|
|
try {
|
|
var _loop4 = function _loop4() {
|
|
var proj = _step57.value;
|
|
|
|
var signal = _signals4.filter(function (s) {
|
|
return s.name === proj.signals.data;
|
|
})[0];
|
|
|
|
signal.push = 'outer';
|
|
delete signal.value;
|
|
delete signal.update;
|
|
};
|
|
|
|
for (_iterator57.s(); !(_step57 = _iterator57.n()).done;) {
|
|
_loop4();
|
|
}
|
|
} catch (err) {
|
|
_iterator57.e(err);
|
|
} finally {
|
|
_iterator57.f();
|
|
}
|
|
}
|
|
|
|
return _signals4;
|
|
}
|
|
};
|
|
|
|
function domain(model, channel) {
|
|
var scale = $(model.scaleName(channel));
|
|
return "domain(".concat(scale, ")");
|
|
}
|
|
|
|
function isTopLevelLayer(model) {
|
|
var _a;
|
|
|
|
return model.parent && isLayerModel(model.parent) && ((_a = !model.parent.parent) !== null && _a !== void 0 ? _a : isTopLevelLayer(model.parent.parent));
|
|
}
|
|
|
|
var legendBindings = {
|
|
has: function has(selCmpt) {
|
|
var spec = selCmpt.resolve === 'global' && selCmpt.bind && isLegendBinding(selCmpt.bind);
|
|
var projLen = selCmpt.project.items.length === 1 && selCmpt.project.items[0].field !== SELECTION_ID;
|
|
|
|
if (spec && !projLen) {
|
|
warn(LEGEND_BINDINGS_MUST_HAVE_PROJECTION);
|
|
}
|
|
|
|
return spec && projLen;
|
|
},
|
|
parse: function parse(model, selCmpt, selDef, origDef) {
|
|
var _a; // Binding a selection to a legend disables default direct manipulation interaction.
|
|
// A user can choose to re-enable it by explicitly specifying triggering input events.
|
|
|
|
|
|
if (!origDef.on) delete selCmpt.events;
|
|
if (!origDef.clear) delete selCmpt.clear;
|
|
|
|
if (origDef.on || origDef.clear) {
|
|
var legendFilter = 'event.item && indexof(event.item.mark.role, "legend") < 0';
|
|
|
|
var _iterator58 = _createForOfIteratorHelper(selCmpt.events),
|
|
_step58;
|
|
|
|
try {
|
|
for (_iterator58.s(); !(_step58 = _iterator58.n()).done;) {
|
|
var _evt = _step58.value;
|
|
_evt.filter = array((_a = _evt.filter) !== null && _a !== void 0 ? _a : []);
|
|
|
|
if (_evt.filter.indexOf(legendFilter) < 0) {
|
|
_evt.filter.push(legendFilter);
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator58.e(err);
|
|
} finally {
|
|
_iterator58.f();
|
|
}
|
|
}
|
|
|
|
var evt = isLegendStreamBinding(selCmpt.bind) ? selCmpt.bind.legend : 'click';
|
|
var stream = isString(evt) ? parseSelector(evt, 'view') : array(evt);
|
|
selCmpt.bind = {
|
|
legend: {
|
|
merge: stream
|
|
}
|
|
};
|
|
},
|
|
topLevelSignals: function topLevelSignals(model, selCmpt, signals) {
|
|
var selName = selCmpt.name;
|
|
var stream = isLegendStreamBinding(selCmpt.bind) && selCmpt.bind.legend;
|
|
|
|
var markName = function markName(name) {
|
|
return function (s) {
|
|
var ds = duplicate(s);
|
|
ds.markname = name;
|
|
return ds;
|
|
};
|
|
};
|
|
|
|
var _iterator59 = _createForOfIteratorHelper(selCmpt.project.items),
|
|
_step59;
|
|
|
|
try {
|
|
var _loop5 = function _loop5() {
|
|
var proj = _step59.value;
|
|
if (!proj.hasLegend) return "continue";
|
|
var prefix = "".concat(varName(proj.field), "_legend");
|
|
var sgName = "".concat(selName, "_").concat(prefix);
|
|
var hasSignal = signals.filter(function (s) {
|
|
return s.name === sgName;
|
|
});
|
|
|
|
if (hasSignal.length === 0) {
|
|
var _events = stream.merge.map(markName("".concat(prefix, "_symbols"))).concat(stream.merge.map(markName("".concat(prefix, "_labels")))).concat(stream.merge.map(markName("".concat(prefix, "_entries"))));
|
|
|
|
signals.unshift(Object.assign(Object.assign({
|
|
name: sgName
|
|
}, !selCmpt.init ? {
|
|
value: null
|
|
} : {}), {
|
|
on: [// Legend entries do not store values, so we need to walk the scenegraph to the symbol datum.
|
|
{
|
|
events: _events,
|
|
update: 'datum.value || item().items[0].items[0].datum.value',
|
|
force: true
|
|
}, {
|
|
events: stream.merge,
|
|
update: "!event.item || !datum ? null : ".concat(sgName),
|
|
force: true
|
|
}]
|
|
}));
|
|
}
|
|
};
|
|
|
|
for (_iterator59.s(); !(_step59 = _iterator59.n()).done;) {
|
|
var _ret = _loop5();
|
|
|
|
if (_ret === "continue") continue;
|
|
}
|
|
} catch (err) {
|
|
_iterator59.e(err);
|
|
} finally {
|
|
_iterator59.f();
|
|
}
|
|
|
|
return signals;
|
|
},
|
|
signals: function signals(model, selCmpt, _signals5) {
|
|
var name = selCmpt.name;
|
|
var proj = selCmpt.project;
|
|
|
|
var tuple = _signals5.find(function (s) {
|
|
return s.name === name + TUPLE;
|
|
});
|
|
|
|
var fields = name + TUPLE_FIELDS;
|
|
var values = proj.items.filter(function (p) {
|
|
return p.hasLegend;
|
|
}).map(function (p) {
|
|
return varName("".concat(name, "_").concat(varName(p.field), "_legend"));
|
|
});
|
|
var valid = values.map(function (v) {
|
|
return "".concat(v, " !== null");
|
|
}).join(' && ');
|
|
var update = "".concat(valid, " ? {fields: ").concat(fields, ", values: [").concat(values.join(', '), "]} : null");
|
|
|
|
if (selCmpt.events && values.length > 0) {
|
|
tuple.on.push({
|
|
events: values.map(function (signal) {
|
|
return {
|
|
signal: signal
|
|
};
|
|
}),
|
|
update: update
|
|
});
|
|
} else if (values.length > 0) {
|
|
tuple.update = update;
|
|
delete tuple.value;
|
|
delete tuple.on;
|
|
}
|
|
|
|
var toggle = _signals5.find(function (s) {
|
|
return s.name === name + TOGGLE;
|
|
});
|
|
|
|
var events = isLegendStreamBinding(selCmpt.bind) && selCmpt.bind.legend;
|
|
|
|
if (toggle) {
|
|
if (!selCmpt.events) toggle.on[0].events = events;else toggle.on.push(Object.assign(Object.assign({}, toggle.on[0]), {
|
|
events: events
|
|
}));
|
|
}
|
|
|
|
return _signals5;
|
|
}
|
|
};
|
|
|
|
function parseInteractiveLegend(model, channel, legendCmpt) {
|
|
var _a;
|
|
|
|
var field = (_a = model.fieldDef(channel)) === null || _a === void 0 ? void 0 : _a.field;
|
|
forEachSelection(model, function (selCmpt) {
|
|
var _a, _b;
|
|
|
|
var proj = (_a = selCmpt.project.hasField[field]) !== null && _a !== void 0 ? _a : selCmpt.project.hasChannel[channel];
|
|
|
|
if (proj && legendBindings.has(selCmpt)) {
|
|
var legendSelections = (_b = legendCmpt.get('selections')) !== null && _b !== void 0 ? _b : [];
|
|
legendSelections.push(selCmpt.name);
|
|
legendCmpt.set('selections', legendSelections, false);
|
|
proj.hasLegend = true;
|
|
}
|
|
});
|
|
}
|
|
|
|
var ANCHOR = '_translate_anchor';
|
|
var DELTA = '_translate_delta';
|
|
var translate = {
|
|
has: function has(selCmpt) {
|
|
return selCmpt.type === 'interval' && selCmpt.translate;
|
|
},
|
|
signals: function signals(model, selCmpt, _signals6) {
|
|
var name = selCmpt.name;
|
|
var hasScales = scaleBindings.has(selCmpt);
|
|
var anchor = name + ANCHOR;
|
|
var _selCmpt$project$hasC2 = selCmpt.project.hasChannel,
|
|
x = _selCmpt$project$hasC2.x,
|
|
y = _selCmpt$project$hasC2.y;
|
|
var events = parseSelector(selCmpt.translate, 'scope');
|
|
|
|
if (!hasScales) {
|
|
events = events.map(function (e) {
|
|
return e.between[0].markname = name + BRUSH, e;
|
|
});
|
|
}
|
|
|
|
_signals6.push({
|
|
name: anchor,
|
|
value: {},
|
|
on: [{
|
|
events: events.map(function (e) {
|
|
return e.between[0];
|
|
}),
|
|
update: '{x: x(unit), y: y(unit)' + (x !== undefined ? ', extent_x: ' + (hasScales ? domain(model, X) : "slice(".concat(x.signals.visual, ")")) : '') + (y !== undefined ? ', extent_y: ' + (hasScales ? domain(model, Y) : "slice(".concat(y.signals.visual, ")")) : '') + '}'
|
|
}]
|
|
}, {
|
|
name: name + DELTA,
|
|
value: {},
|
|
on: [{
|
|
events: events,
|
|
update: "{x: ".concat(anchor, ".x - x(unit), y: ").concat(anchor, ".y - y(unit)}")
|
|
}]
|
|
});
|
|
|
|
if (x !== undefined) {
|
|
onDelta(model, selCmpt, x, 'width', _signals6);
|
|
}
|
|
|
|
if (y !== undefined) {
|
|
onDelta(model, selCmpt, y, 'height', _signals6);
|
|
}
|
|
|
|
return _signals6;
|
|
}
|
|
};
|
|
|
|
function onDelta(model, selCmpt, proj, size, signals) {
|
|
var _a;
|
|
|
|
var name = selCmpt.name;
|
|
var anchor = name + ANCHOR;
|
|
var delta = name + DELTA;
|
|
var channel = proj.channel;
|
|
var hasScales = scaleBindings.has(selCmpt);
|
|
var signal = signals.filter(function (s) {
|
|
return s.name === proj.signals[hasScales ? 'data' : 'visual'];
|
|
})[0];
|
|
var sizeSg = model.getSizeSignalRef(size).signal;
|
|
var scaleCmpt = model.getScaleComponent(channel);
|
|
var scaleType = scaleCmpt.get('type');
|
|
var sign = hasScales && channel === X ? '-' : ''; // Invert delta when panning x-scales.
|
|
|
|
var extent = "".concat(anchor, ".extent_").concat(channel);
|
|
var offset = "".concat(sign).concat(delta, ".").concat(channel, " / ") + (hasScales ? "".concat(sizeSg) : "span(".concat(extent, ")"));
|
|
var panFn = !hasScales ? 'panLinear' : scaleType === 'log' ? 'panLog' : scaleType === 'pow' ? 'panPow' : 'panLinear';
|
|
var update = "".concat(panFn, "(").concat(extent, ", ").concat(offset) + (hasScales && scaleType === 'pow' ? ", ".concat((_a = scaleCmpt.get('exponent')) !== null && _a !== void 0 ? _a : 1) : '') + ')';
|
|
signal.on.push({
|
|
events: {
|
|
signal: delta
|
|
},
|
|
update: hasScales ? update : "clampRange(".concat(update, ", 0, ").concat(sizeSg, ")")
|
|
});
|
|
}
|
|
|
|
var ANCHOR$1 = '_zoom_anchor';
|
|
var DELTA$1 = '_zoom_delta';
|
|
var zoom = {
|
|
has: function has(selCmpt) {
|
|
return selCmpt.type === 'interval' && selCmpt.zoom;
|
|
},
|
|
signals: function signals(model, selCmpt, _signals7) {
|
|
var name = selCmpt.name;
|
|
var hasScales = scaleBindings.has(selCmpt);
|
|
var delta = name + DELTA$1;
|
|
var _selCmpt$project$hasC3 = selCmpt.project.hasChannel,
|
|
x = _selCmpt$project$hasC3.x,
|
|
y = _selCmpt$project$hasC3.y;
|
|
var sx = $(model.scaleName(X));
|
|
var sy = $(model.scaleName(Y));
|
|
var events = parseSelector(selCmpt.zoom, 'scope');
|
|
|
|
if (!hasScales) {
|
|
events = events.map(function (e) {
|
|
return e.markname = name + BRUSH, e;
|
|
});
|
|
}
|
|
|
|
_signals7.push({
|
|
name: name + ANCHOR$1,
|
|
on: [{
|
|
events: events,
|
|
update: !hasScales ? "{x: x(unit), y: y(unit)}" : '{' + [sx ? "x: invert(".concat(sx, ", x(unit))") : '', sy ? "y: invert(".concat(sy, ", y(unit))") : ''].filter(function (expr) {
|
|
return !!expr;
|
|
}).join(', ') + '}'
|
|
}]
|
|
}, {
|
|
name: delta,
|
|
on: [{
|
|
events: events,
|
|
force: true,
|
|
update: 'pow(1.001, event.deltaY * pow(16, event.deltaMode))'
|
|
}]
|
|
});
|
|
|
|
if (x !== undefined) {
|
|
onDelta$1(model, selCmpt, x, 'width', _signals7);
|
|
}
|
|
|
|
if (y !== undefined) {
|
|
onDelta$1(model, selCmpt, y, 'height', _signals7);
|
|
}
|
|
|
|
return _signals7;
|
|
}
|
|
};
|
|
|
|
function onDelta$1(model, selCmpt, proj, size, signals) {
|
|
var _a;
|
|
|
|
var name = selCmpt.name;
|
|
var channel = proj.channel;
|
|
var hasScales = scaleBindings.has(selCmpt);
|
|
var signal = signals.filter(function (s) {
|
|
return s.name === proj.signals[hasScales ? 'data' : 'visual'];
|
|
})[0];
|
|
var sizeSg = model.getSizeSignalRef(size).signal;
|
|
var scaleCmpt = model.getScaleComponent(channel);
|
|
var scaleType = scaleCmpt.get('type');
|
|
var base = hasScales ? domain(model, channel) : signal.name;
|
|
var delta = name + DELTA$1;
|
|
var anchor = "".concat(name).concat(ANCHOR$1, ".").concat(channel);
|
|
var zoomFn = !hasScales ? 'zoomLinear' : scaleType === 'log' ? 'zoomLog' : scaleType === 'pow' ? 'zoomPow' : 'zoomLinear';
|
|
var update = "".concat(zoomFn, "(").concat(base, ", ").concat(anchor, ", ").concat(delta) + (hasScales && scaleType === 'pow' ? ", ".concat((_a = scaleCmpt.get('exponent')) !== null && _a !== void 0 ? _a : 1) : '') + ')';
|
|
signal.on.push({
|
|
events: {
|
|
signal: delta
|
|
},
|
|
update: hasScales ? update : "clampRange(".concat(update, ", 0, ").concat(sizeSg, ")")
|
|
});
|
|
}
|
|
|
|
var compilers = [project, toggle, scaleBindings, legendBindings, translate, zoom, inputBindings, nearest, clear];
|
|
|
|
function forEachTransform(selCmpt, cb) {
|
|
var _iterator60 = _createForOfIteratorHelper(compilers),
|
|
_step60;
|
|
|
|
try {
|
|
for (_iterator60.s(); !(_step60 = _iterator60.n()).done;) {
|
|
var t = _step60.value;
|
|
|
|
if (t.has(selCmpt)) {
|
|
cb(t);
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator60.e(err);
|
|
} finally {
|
|
_iterator60.f();
|
|
}
|
|
}
|
|
|
|
function assembleInit(init) {
|
|
var isExpr = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : true;
|
|
var wrap = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : identity;
|
|
|
|
if (isArray(init)) {
|
|
var assembled = init.map(function (v) {
|
|
return assembleInit(v, isExpr, wrap);
|
|
});
|
|
return isExpr ? "[".concat(assembled.join(', '), "]") : assembled;
|
|
} else if (isDateTime(init)) {
|
|
if (isExpr) {
|
|
return wrap(dateTimeToExpr(init));
|
|
} else {
|
|
return wrap(dateTimeToTimestamp(init));
|
|
}
|
|
}
|
|
|
|
return isExpr ? wrap(JSON.stringify(init)) : init;
|
|
}
|
|
|
|
function assembleUnitSelectionSignals(model, signals) {
|
|
forEachSelection(model, function (selCmpt, selCompiler) {
|
|
var _signals8;
|
|
|
|
var name = selCmpt.name;
|
|
var modifyExpr = selCompiler.modifyExpr(model, selCmpt);
|
|
|
|
(_signals8 = signals).push.apply(_signals8, _toConsumableArray(selCompiler.signals(model, selCmpt)));
|
|
|
|
forEachTransform(selCmpt, function (txCompiler) {
|
|
if (txCompiler.signals) {
|
|
signals = txCompiler.signals(model, selCmpt, signals);
|
|
}
|
|
|
|
if (txCompiler.modifyExpr) {
|
|
modifyExpr = txCompiler.modifyExpr(model, selCmpt, modifyExpr);
|
|
}
|
|
});
|
|
signals.push({
|
|
name: name + MODIFY,
|
|
on: [{
|
|
events: {
|
|
signal: selCmpt.name + TUPLE
|
|
},
|
|
update: "modify(".concat($(selCmpt.name + STORE), ", ").concat(modifyExpr, ")")
|
|
}]
|
|
});
|
|
});
|
|
return cleanupEmptyOnArray(signals);
|
|
}
|
|
|
|
function assembleFacetSignals(model, signals) {
|
|
if (model.component.selection && keys(model.component.selection).length) {
|
|
var name = $(model.getName('cell'));
|
|
signals.unshift({
|
|
name: 'facet',
|
|
value: {},
|
|
on: [{
|
|
events: parseSelector('mousemove', 'scope'),
|
|
update: "isTuple(facet) ? facet : group(".concat(name, ").datum")
|
|
}]
|
|
});
|
|
}
|
|
|
|
return cleanupEmptyOnArray(signals);
|
|
}
|
|
|
|
function assembleTopLevelSignals(model, signals) {
|
|
var hasSelections = false;
|
|
forEachSelection(model, function (selCmpt, selCompiler) {
|
|
var name = selCmpt.name;
|
|
var store = $(name + STORE);
|
|
var hasSg = signals.filter(function (s) {
|
|
return s.name === name;
|
|
});
|
|
|
|
if (hasSg.length === 0) {
|
|
var resolve = selCmpt.resolve === 'global' ? 'union' : selCmpt.resolve;
|
|
var isMulti = selCmpt.type === 'multi' ? ', true)' : ')';
|
|
signals.push({
|
|
name: selCmpt.name,
|
|
update: "".concat(VL_SELECTION_RESOLVE, "(").concat(store, ", ").concat($(resolve)).concat(isMulti)
|
|
});
|
|
}
|
|
|
|
hasSelections = true;
|
|
|
|
if (selCompiler.topLevelSignals) {
|
|
signals = selCompiler.topLevelSignals(model, selCmpt, signals);
|
|
}
|
|
|
|
forEachTransform(selCmpt, function (txCompiler) {
|
|
if (txCompiler.topLevelSignals) {
|
|
signals = txCompiler.topLevelSignals(model, selCmpt, signals);
|
|
}
|
|
});
|
|
});
|
|
|
|
if (hasSelections) {
|
|
var hasUnit = signals.filter(function (s) {
|
|
return s.name === 'unit';
|
|
});
|
|
|
|
if (hasUnit.length === 0) {
|
|
signals.unshift({
|
|
name: 'unit',
|
|
value: {},
|
|
on: [{
|
|
events: 'mousemove',
|
|
update: 'isTuple(group()) ? group() : unit'
|
|
}]
|
|
});
|
|
}
|
|
}
|
|
|
|
return cleanupEmptyOnArray(signals);
|
|
}
|
|
|
|
function assembleUnitSelectionData(model, data) {
|
|
var dataCopy = _toConsumableArray(data);
|
|
|
|
forEachSelection(model, function (selCmpt) {
|
|
var init = {
|
|
name: selCmpt.name + STORE
|
|
};
|
|
|
|
if (selCmpt.init) {
|
|
var fields = selCmpt.project.items.map(function (proj) {
|
|
var rest = __rest(proj, ["signals"]);
|
|
|
|
return rest;
|
|
});
|
|
var insert = selCmpt.init.map(function (i) {
|
|
return assembleInit(i, false);
|
|
});
|
|
init.values = selCmpt.type === 'interval' ? [{
|
|
unit: unitName(model, {
|
|
escape: false
|
|
}),
|
|
fields: fields,
|
|
values: insert
|
|
}] : insert.map(function (i) {
|
|
return {
|
|
unit: unitName(model, {
|
|
escape: false
|
|
}),
|
|
fields: fields,
|
|
values: i
|
|
};
|
|
});
|
|
}
|
|
|
|
var contains = dataCopy.filter(function (d) {
|
|
return d.name === selCmpt.name + STORE;
|
|
});
|
|
|
|
if (!contains.length) {
|
|
dataCopy.push(init);
|
|
}
|
|
});
|
|
return dataCopy;
|
|
}
|
|
|
|
function assembleUnitSelectionMarks(model, marks) {
|
|
forEachSelection(model, function (selCmpt, selCompiler) {
|
|
marks = selCompiler.marks ? selCompiler.marks(model, selCmpt, marks) : marks;
|
|
forEachTransform(selCmpt, function (txCompiler) {
|
|
if (txCompiler.marks) {
|
|
marks = txCompiler.marks(model, selCmpt, marks);
|
|
}
|
|
});
|
|
});
|
|
return marks;
|
|
}
|
|
|
|
function assembleLayerSelectionMarks(model, marks) {
|
|
var _iterator61 = _createForOfIteratorHelper(model.children),
|
|
_step61;
|
|
|
|
try {
|
|
for (_iterator61.s(); !(_step61 = _iterator61.n()).done;) {
|
|
var child = _step61.value;
|
|
|
|
if (isUnitModel(child)) {
|
|
marks = assembleUnitSelectionMarks(child, marks);
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator61.e(err);
|
|
} finally {
|
|
_iterator61.f();
|
|
}
|
|
|
|
return marks;
|
|
}
|
|
|
|
function assembleSelectionScaleDomain(model, extent) {
|
|
var name = extent.selection;
|
|
var selCmpt = model.getSelectionComponent(name, varName(name));
|
|
return {
|
|
signal: parseSelectionBinExtent(selCmpt, extent)
|
|
};
|
|
}
|
|
|
|
function cleanupEmptyOnArray(signals) {
|
|
return signals.map(function (s) {
|
|
if (s.on && !s.on.length) delete s.on;
|
|
return s;
|
|
});
|
|
}
|
|
|
|
var BRUSH = '_brush';
|
|
var SCALE_TRIGGER = '_scale_trigger';
|
|
var interval = {
|
|
signals: function signals(model, selCmpt) {
|
|
var name = selCmpt.name;
|
|
var fieldsSg = name + TUPLE_FIELDS;
|
|
var hasScales = scaleBindings.has(selCmpt);
|
|
var signals = [];
|
|
var dataSignals = [];
|
|
var scaleTriggers = [];
|
|
|
|
if (selCmpt.translate && !hasScales) {
|
|
var filterExpr = "!event.item || event.item.mark.name !== ".concat($(name + BRUSH));
|
|
events(selCmpt, function (on, evt) {
|
|
var _a;
|
|
|
|
var filters = array((_a = evt.between[0].filter) !== null && _a !== void 0 ? _a : evt.between[0].filter = []);
|
|
|
|
if (filters.indexOf(filterExpr) < 0) {
|
|
filters.push(filterExpr);
|
|
}
|
|
|
|
return on;
|
|
});
|
|
}
|
|
|
|
selCmpt.project.items.forEach(function (proj, i) {
|
|
var channel = proj.channel;
|
|
|
|
if (channel !== X && channel !== Y) {
|
|
warn('Interval selections only support x and y encoding channels.');
|
|
return;
|
|
}
|
|
|
|
var init = selCmpt.init ? selCmpt.init[i] : null;
|
|
var cs = channelSignals(model, selCmpt, proj, init);
|
|
var dname = proj.signals.data;
|
|
var vname = proj.signals.visual;
|
|
var scaleName = $(model.scaleName(channel));
|
|
var scaleType = model.getScaleComponent(channel).get('type');
|
|
var toNum = hasContinuousDomain(scaleType) ? '+' : '';
|
|
signals.push.apply(signals, _toConsumableArray(cs));
|
|
dataSignals.push(dname);
|
|
scaleTriggers.push({
|
|
scaleName: model.scaleName(channel),
|
|
expr: "(!isArray(".concat(dname, ") || ") + "(".concat(toNum, "invert(").concat(scaleName, ", ").concat(vname, ")[0] === ").concat(toNum).concat(dname, "[0] && ") + "".concat(toNum, "invert(").concat(scaleName, ", ").concat(vname, ")[1] === ").concat(toNum).concat(dname, "[1]))")
|
|
});
|
|
}); // Proxy scale reactions to ensure that an infinite loop doesn't occur
|
|
// when an interval selection filter touches the scale.
|
|
|
|
if (!hasScales) {
|
|
signals.push({
|
|
name: name + SCALE_TRIGGER,
|
|
value: {},
|
|
on: [{
|
|
events: scaleTriggers.map(function (t) {
|
|
return {
|
|
scale: t.scaleName
|
|
};
|
|
}),
|
|
update: scaleTriggers.map(function (t) {
|
|
return t.expr;
|
|
}).join(' && ') + " ? ".concat(name + SCALE_TRIGGER, " : {}")
|
|
}]
|
|
});
|
|
} // Only add an interval to the store if it has valid data extents. Data extents
|
|
// are set to null if pixel extents are equal to account for intervals over
|
|
// ordinal/nominal domains which, when inverted, will still produce a valid datum.
|
|
|
|
|
|
var init = selCmpt.init;
|
|
var update = "unit: ".concat(unitName(model), ", fields: ").concat(fieldsSg, ", values");
|
|
return signals.concat(Object.assign(Object.assign({
|
|
name: name + TUPLE
|
|
}, init ? {
|
|
init: "{".concat(update, ": ").concat(assembleInit(init), "}")
|
|
} : {}), {
|
|
on: [{
|
|
events: [{
|
|
signal: dataSignals.join(' || ')
|
|
}],
|
|
update: dataSignals.join(' && ') + " ? {".concat(update, ": [").concat(dataSignals, "]} : null")
|
|
}]
|
|
}));
|
|
},
|
|
modifyExpr: function modifyExpr(model, selCmpt) {
|
|
var tpl = selCmpt.name + TUPLE;
|
|
return tpl + ', ' + (selCmpt.resolve === 'global' ? 'true' : "{unit: ".concat(unitName(model), "}"));
|
|
},
|
|
marks: function marks(model, selCmpt, _marks2) {
|
|
var name = selCmpt.name;
|
|
var _selCmpt$project$hasC4 = selCmpt.project.hasChannel,
|
|
x = _selCmpt$project$hasC4.x,
|
|
y = _selCmpt$project$hasC4.y;
|
|
var xvname = x && x.signals.visual;
|
|
var yvname = y && y.signals.visual;
|
|
var store = "data(".concat($(selCmpt.name + STORE), ")"); // Do not add a brush if we're binding to scales.
|
|
|
|
if (scaleBindings.has(selCmpt)) {
|
|
return _marks2;
|
|
}
|
|
|
|
var update = {
|
|
x: x !== undefined ? {
|
|
signal: "".concat(xvname, "[0]")
|
|
} : {
|
|
value: 0
|
|
},
|
|
y: y !== undefined ? {
|
|
signal: "".concat(yvname, "[0]")
|
|
} : {
|
|
value: 0
|
|
},
|
|
x2: x !== undefined ? {
|
|
signal: "".concat(xvname, "[1]")
|
|
} : {
|
|
field: {
|
|
group: 'width'
|
|
}
|
|
},
|
|
y2: y !== undefined ? {
|
|
signal: "".concat(yvname, "[1]")
|
|
} : {
|
|
field: {
|
|
group: 'height'
|
|
}
|
|
}
|
|
}; // If the selection is resolved to global, only a single interval is in
|
|
// the store. Wrap brush mark's encodings with a production rule to test
|
|
// this based on the `unit` property. Hide the brush mark if it corresponds
|
|
// to a unit different from the one in the store.
|
|
|
|
if (selCmpt.resolve === 'global') {
|
|
var _iterator62 = _createForOfIteratorHelper(keys(update)),
|
|
_step62;
|
|
|
|
try {
|
|
for (_iterator62.s(); !(_step62 = _iterator62.n()).done;) {
|
|
var key = _step62.value;
|
|
update[key] = [Object.assign({
|
|
test: "".concat(store, ".length && ").concat(store, "[0].unit === ").concat(unitName(model))
|
|
}, update[key]), {
|
|
value: 0
|
|
}];
|
|
}
|
|
} catch (err) {
|
|
_iterator62.e(err);
|
|
} finally {
|
|
_iterator62.f();
|
|
}
|
|
} // Two brush marks ensure that fill colors and other aesthetic choices do
|
|
// not interefere with the core marks, but that the brushed region can still
|
|
// be interacted with (e.g., dragging it around).
|
|
|
|
|
|
var _a = selCmpt.mark,
|
|
fill = _a.fill,
|
|
fillOpacity = _a.fillOpacity,
|
|
cursor = _a.cursor,
|
|
stroke = __rest(_a, ["fill", "fillOpacity", "cursor"]);
|
|
|
|
var vgStroke = keys(stroke).reduce(function (def, k) {
|
|
def[k] = [{
|
|
test: [x !== undefined && "".concat(xvname, "[0] !== ").concat(xvname, "[1]"), y !== undefined && "".concat(yvname, "[0] !== ").concat(yvname, "[1]")].filter(function (t) {
|
|
return t;
|
|
}).join(' && '),
|
|
value: stroke[k]
|
|
}, {
|
|
value: null
|
|
}];
|
|
return def;
|
|
}, {});
|
|
return [{
|
|
name: name + BRUSH + '_bg',
|
|
type: 'rect',
|
|
clip: true,
|
|
encode: {
|
|
enter: {
|
|
fill: {
|
|
value: fill
|
|
},
|
|
fillOpacity: {
|
|
value: fillOpacity
|
|
}
|
|
},
|
|
update: update
|
|
}
|
|
}].concat(_toConsumableArray(_marks2), [{
|
|
name: name + BRUSH,
|
|
type: 'rect',
|
|
clip: true,
|
|
encode: {
|
|
enter: Object.assign(Object.assign({}, cursor ? {
|
|
cursor: {
|
|
value: cursor
|
|
}
|
|
} : {}), {
|
|
fill: {
|
|
value: 'transparent'
|
|
}
|
|
}),
|
|
update: Object.assign(Object.assign({}, update), vgStroke)
|
|
}
|
|
}]);
|
|
}
|
|
};
|
|
/**
|
|
* Returns the visual and data signals for an interval selection.
|
|
*/
|
|
|
|
function channelSignals(model, selCmpt, proj, init) {
|
|
var channel = proj.channel;
|
|
var vname = proj.signals.visual;
|
|
var dname = proj.signals.data;
|
|
var hasScales = scaleBindings.has(selCmpt);
|
|
var scaleName = $(model.scaleName(channel));
|
|
var scale = model.getScaleComponent(channel);
|
|
var scaleType = scale ? scale.get('type') : undefined;
|
|
|
|
var scaled = function scaled(str) {
|
|
return "scale(".concat(scaleName, ", ").concat(str, ")");
|
|
};
|
|
|
|
var size = model.getSizeSignalRef(channel === X ? 'width' : 'height').signal;
|
|
var coord = "".concat(channel, "(unit)");
|
|
var on = events(selCmpt, function (def, evt) {
|
|
return [].concat(_toConsumableArray(def), [{
|
|
events: evt.between[0],
|
|
update: "[".concat(coord, ", ").concat(coord, "]")
|
|
}, {
|
|
events: evt,
|
|
update: "[".concat(vname, "[0], clamp(").concat(coord, ", 0, ").concat(size, ")]")
|
|
} // Brush End
|
|
]);
|
|
}); // React to pan/zooms of continuous scales. Non-continuous scales
|
|
// (band, point) cannot be pan/zoomed and any other changes
|
|
// to their domains (e.g., filtering) should clear the brushes.
|
|
|
|
on.push({
|
|
events: {
|
|
signal: selCmpt.name + SCALE_TRIGGER
|
|
},
|
|
update: hasContinuousDomain(scaleType) ? "[".concat(scaled("".concat(dname, "[0]")), ", ").concat(scaled("".concat(dname, "[1]")), "]") : "[0, 0]"
|
|
});
|
|
return hasScales ? [{
|
|
name: dname,
|
|
on: []
|
|
}] : [Object.assign(Object.assign({
|
|
name: vname
|
|
}, init ? {
|
|
init: assembleInit(init, true, scaled)
|
|
} : {
|
|
value: []
|
|
}), {
|
|
on: on
|
|
}), Object.assign(Object.assign({
|
|
name: dname
|
|
}, init ? {
|
|
init: assembleInit(init)
|
|
} : {}), {
|
|
on: [{
|
|
events: {
|
|
signal: vname
|
|
},
|
|
update: "".concat(vname, "[0] === ").concat(vname, "[1] ? null : invert(").concat(scaleName, ", ").concat(vname, ")")
|
|
}]
|
|
})];
|
|
}
|
|
|
|
function events(selCmpt, cb) {
|
|
return selCmpt.events.reduce(function (on, evt) {
|
|
if (!evt.between) {
|
|
warn("".concat(evt, " is not an ordered event stream for interval selections."));
|
|
return on;
|
|
}
|
|
|
|
return cb(on, evt);
|
|
}, []);
|
|
}
|
|
|
|
function singleOrMultiSignals(model, selCmpt) {
|
|
var name = selCmpt.name;
|
|
var fieldsSg = name + TUPLE_FIELDS;
|
|
var project = selCmpt.project;
|
|
var datum = '(item().isVoronoi ? datum.datum : datum)';
|
|
var values = project.items.map(function (p) {
|
|
var fieldDef = model.fieldDef(p.channel); // Binned fields should capture extents, for a range test against the raw field.
|
|
|
|
return fieldDef && fieldDef.bin ? "[".concat(datum, "[").concat($(model.vgField(p.channel, {})), "], ") + "".concat(datum, "[").concat($(model.vgField(p.channel, {
|
|
binSuffix: 'end'
|
|
})), "]]") : "".concat(datum, "[").concat($(p.field), "]");
|
|
}).join(', '); // Only add a discrete selection to the store if a datum is present _and_
|
|
// the interaction isn't occurring on a group mark. This guards against
|
|
// polluting interactive state with invalid values in faceted displays
|
|
// as the group marks are also data-driven. We force the update to account
|
|
// for constant null states but varying toggles (e.g., shift-click in
|
|
// whitespace followed by a click in whitespace; the store should only
|
|
// be cleared on the second click).
|
|
|
|
var update = "unit: ".concat(unitName(model), ", fields: ").concat(fieldsSg, ", values");
|
|
var events = selCmpt.events;
|
|
return [{
|
|
name: name + TUPLE,
|
|
on: events ? [{
|
|
events: events,
|
|
update: "datum && item().mark.marktype !== 'group' ? {".concat(update, ": [").concat(values, "]} : null"),
|
|
force: true
|
|
}] : []
|
|
}];
|
|
}
|
|
|
|
var multi = {
|
|
signals: singleOrMultiSignals,
|
|
modifyExpr: function modifyExpr(model, selCmpt) {
|
|
var tpl = selCmpt.name + TUPLE;
|
|
return tpl + ', ' + (selCmpt.resolve === 'global' ? 'null' : "{unit: ".concat(unitName(model), "}"));
|
|
}
|
|
};
|
|
var single = {
|
|
signals: singleOrMultiSignals,
|
|
modifyExpr: function modifyExpr(model, selCmpt) {
|
|
var tpl = selCmpt.name + TUPLE;
|
|
return tpl + ', ' + (selCmpt.resolve === 'global' ? 'true' : "{unit: ".concat(unitName(model), "}"));
|
|
}
|
|
};
|
|
var STORE = '_store';
|
|
var TUPLE = '_tuple';
|
|
var MODIFY = '_modify';
|
|
var VL_SELECTION_RESOLVE = 'vlSelectionResolve';
|
|
var compilers$1 = {
|
|
single: single,
|
|
multi: multi,
|
|
interval: interval
|
|
};
|
|
|
|
function forEachSelection(model, cb) {
|
|
var selections = model.component.selection;
|
|
|
|
if (selections) {
|
|
var _iterator63 = _createForOfIteratorHelper(vals(selections)),
|
|
_step63;
|
|
|
|
try {
|
|
for (_iterator63.s(); !(_step63 = _iterator63.n()).done;) {
|
|
var sel = _step63.value;
|
|
var success = cb(sel, compilers$1[sel.type]);
|
|
if (success === true) break;
|
|
}
|
|
} catch (err) {
|
|
_iterator63.e(err);
|
|
} finally {
|
|
_iterator63.f();
|
|
}
|
|
}
|
|
}
|
|
|
|
function getFacetModel(model) {
|
|
var parent = model.parent;
|
|
|
|
while (parent) {
|
|
if (isFacetModel(parent)) {
|
|
break;
|
|
}
|
|
|
|
parent = parent.parent;
|
|
}
|
|
|
|
return parent;
|
|
}
|
|
|
|
function unitName(model) {
|
|
var _ref52 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {
|
|
escape: true
|
|
},
|
|
escape = _ref52.escape;
|
|
|
|
var name = escape ? $(model.name) : model.name;
|
|
var facetModel = getFacetModel(model);
|
|
|
|
if (facetModel) {
|
|
var facet = facetModel.facet;
|
|
|
|
var _iterator64 = _createForOfIteratorHelper(FACET_CHANNELS),
|
|
_step64;
|
|
|
|
try {
|
|
for (_iterator64.s(); !(_step64 = _iterator64.n()).done;) {
|
|
var channel = _step64.value;
|
|
|
|
if (facet[channel]) {
|
|
name += " + '__facet_".concat(channel, "_' + (facet[").concat($(facetModel.vgField(channel)), "])");
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator64.e(err);
|
|
} finally {
|
|
_iterator64.f();
|
|
}
|
|
}
|
|
|
|
return name;
|
|
}
|
|
|
|
function requiresSelectionId(model) {
|
|
var identifier = false;
|
|
forEachSelection(model, function (selCmpt) {
|
|
identifier = identifier || selCmpt.project.items.some(function (proj) {
|
|
return proj.field === SELECTION_ID;
|
|
});
|
|
});
|
|
return identifier;
|
|
}
|
|
|
|
var RawCode = 'RawCode';
|
|
var Literal = 'Literal';
|
|
var Property = 'Property';
|
|
var Identifier = 'Identifier';
|
|
var ArrayExpression = 'ArrayExpression';
|
|
var BinaryExpression = 'BinaryExpression';
|
|
var CallExpression = 'CallExpression';
|
|
var ConditionalExpression = 'ConditionalExpression';
|
|
var LogicalExpression = 'LogicalExpression';
|
|
var MemberExpression = 'MemberExpression';
|
|
var ObjectExpression = 'ObjectExpression';
|
|
var UnaryExpression = 'UnaryExpression';
|
|
|
|
function ASTNode(type) {
|
|
this.type = type;
|
|
}
|
|
|
|
ASTNode.prototype.visit = function (visitor) {
|
|
var node = this,
|
|
c,
|
|
i,
|
|
n;
|
|
if (visitor(node)) return 1;
|
|
|
|
for (c = children(node), i = 0, n = c.length; i < n; ++i) {
|
|
if (c[i].visit(visitor)) return 1;
|
|
}
|
|
};
|
|
|
|
function children(node) {
|
|
switch (node.type) {
|
|
case ArrayExpression:
|
|
return node.elements;
|
|
|
|
case BinaryExpression:
|
|
case LogicalExpression:
|
|
return [node.left, node.right];
|
|
|
|
case CallExpression:
|
|
var args = node.arguments.slice();
|
|
args.unshift(node.callee);
|
|
return args;
|
|
|
|
case ConditionalExpression:
|
|
return [node.test, node.consequent, node.alternate];
|
|
|
|
case MemberExpression:
|
|
return [node.object, node.property];
|
|
|
|
case ObjectExpression:
|
|
return node.properties;
|
|
|
|
case Property:
|
|
return [node.key, node.value];
|
|
|
|
case UnaryExpression:
|
|
return [node.argument];
|
|
|
|
case Identifier:
|
|
case Literal:
|
|
case RawCode:
|
|
default:
|
|
return [];
|
|
}
|
|
}
|
|
/*
|
|
The following expression parser is based on Esprima (http://esprima.org/).
|
|
Original header comment and license for Esprima is included here:
|
|
Copyright (C) 2013 Ariya Hidayat <ariya.hidayat@gmail.com>
|
|
Copyright (C) 2013 Thaddee Tyl <thaddee.tyl@gmail.com>
|
|
Copyright (C) 2013 Mathias Bynens <mathias@qiwi.be>
|
|
Copyright (C) 2012 Ariya Hidayat <ariya.hidayat@gmail.com>
|
|
Copyright (C) 2012 Mathias Bynens <mathias@qiwi.be>
|
|
Copyright (C) 2012 Joost-Wim Boekesteijn <joost-wim@boekesteijn.nl>
|
|
Copyright (C) 2012 Kris Kowal <kris.kowal@cixar.com>
|
|
Copyright (C) 2012 Yusuke Suzuki <utatane.tea@gmail.com>
|
|
Copyright (C) 2012 Arpad Borsos <arpad.borsos@googlemail.com>
|
|
Copyright (C) 2011 Ariya Hidayat <ariya.hidayat@gmail.com>
|
|
Redistribution and use in source and binary forms, with or without
|
|
modification, are permitted provided that the following conditions are met:
|
|
* Redistributions of source code must retain the above copyright
|
|
notice, this list of conditions and the following disclaimer.
|
|
* Redistributions in binary form must reproduce the above copyright
|
|
notice, this list of conditions and the following disclaimer in the
|
|
documentation and/or other materials provided with the distribution.
|
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
|
|
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
|
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
|
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
*/
|
|
|
|
|
|
var TokenName, source, index, length, lookahead;
|
|
var TokenBooleanLiteral = 1,
|
|
TokenEOF = 2,
|
|
TokenIdentifier = 3,
|
|
TokenKeyword = 4,
|
|
TokenNullLiteral = 5,
|
|
TokenNumericLiteral = 6,
|
|
TokenPunctuator = 7,
|
|
TokenStringLiteral = 8,
|
|
TokenRegularExpression = 9;
|
|
TokenName = {};
|
|
TokenName[TokenBooleanLiteral] = 'Boolean';
|
|
TokenName[TokenEOF] = '<end>';
|
|
TokenName[TokenIdentifier] = 'Identifier';
|
|
TokenName[TokenKeyword] = 'Keyword';
|
|
TokenName[TokenNullLiteral] = 'Null';
|
|
TokenName[TokenNumericLiteral] = 'Numeric';
|
|
TokenName[TokenPunctuator] = 'Punctuator';
|
|
TokenName[TokenStringLiteral] = 'String';
|
|
TokenName[TokenRegularExpression] = 'RegularExpression';
|
|
var SyntaxArrayExpression = 'ArrayExpression',
|
|
SyntaxBinaryExpression = 'BinaryExpression',
|
|
SyntaxCallExpression = 'CallExpression',
|
|
SyntaxConditionalExpression = 'ConditionalExpression',
|
|
SyntaxIdentifier = 'Identifier',
|
|
SyntaxLiteral = 'Literal',
|
|
SyntaxLogicalExpression = 'LogicalExpression',
|
|
SyntaxMemberExpression = 'MemberExpression',
|
|
SyntaxObjectExpression = 'ObjectExpression',
|
|
SyntaxProperty = 'Property',
|
|
SyntaxUnaryExpression = 'UnaryExpression'; // Error messages should be identical to V8.
|
|
|
|
var MessageUnexpectedToken = 'Unexpected token %0',
|
|
MessageUnexpectedNumber = 'Unexpected number',
|
|
MessageUnexpectedString = 'Unexpected string',
|
|
MessageUnexpectedIdentifier = 'Unexpected identifier',
|
|
MessageUnexpectedReserved = 'Unexpected reserved word',
|
|
MessageUnexpectedEOS = 'Unexpected end of input',
|
|
MessageInvalidRegExp = 'Invalid regular expression',
|
|
MessageUnterminatedRegExp = 'Invalid regular expression: missing /',
|
|
MessageStrictOctalLiteral = 'Octal literals are not allowed in strict mode.',
|
|
MessageStrictDuplicateProperty = 'Duplicate data property in object literal not allowed in strict mode';
|
|
var ILLEGAL$1 = 'ILLEGAL',
|
|
DISABLED = 'Disabled.'; // See also tools/generate-unicode-regex.py.
|
|
|
|
var RegexNonAsciiIdentifierStart = new RegExp("[\\xAA\\xB5\\xBA\\xC0-\\xD6\\xD8-\\xF6\\xF8-\\u02C1\\u02C6-\\u02D1\\u02E0-\\u02E4\\u02EC\\u02EE\\u0370-\\u0374\\u0376\\u0377\\u037A-\\u037D\\u037F\\u0386\\u0388-\\u038A\\u038C\\u038E-\\u03A1\\u03A3-\\u03F5\\u03F7-\\u0481\\u048A-\\u052F\\u0531-\\u0556\\u0559\\u0561-\\u0587\\u05D0-\\u05EA\\u05F0-\\u05F2\\u0620-\\u064A\\u066E\\u066F\\u0671-\\u06D3\\u06D5\\u06E5\\u06E6\\u06EE\\u06EF\\u06FA-\\u06FC\\u06FF\\u0710\\u0712-\\u072F\\u074D-\\u07A5\\u07B1\\u07CA-\\u07EA\\u07F4\\u07F5\\u07FA\\u0800-\\u0815\\u081A\\u0824\\u0828\\u0840-\\u0858\\u08A0-\\u08B2\\u0904-\\u0939\\u093D\\u0950\\u0958-\\u0961\\u0971-\\u0980\\u0985-\\u098C\\u098F\\u0990\\u0993-\\u09A8\\u09AA-\\u09B0\\u09B2\\u09B6-\\u09B9\\u09BD\\u09CE\\u09DC\\u09DD\\u09DF-\\u09E1\\u09F0\\u09F1\\u0A05-\\u0A0A\\u0A0F\\u0A10\\u0A13-\\u0A28\\u0A2A-\\u0A30\\u0A32\\u0A33\\u0A35\\u0A36\\u0A38\\u0A39\\u0A59-\\u0A5C\\u0A5E\\u0A72-\\u0A74\\u0A85-\\u0A8D\\u0A8F-\\u0A91\\u0A93-\\u0AA8\\u0AAA-\\u0AB0\\u0AB2\\u0AB3\\u0AB5-\\u0AB9\\u0ABD\\u0AD0\\u0AE0\\u0AE1\\u0B05-\\u0B0C\\u0B0F\\u0B10\\u0B13-\\u0B28\\u0B2A-\\u0B30\\u0B32\\u0B33\\u0B35-\\u0B39\\u0B3D\\u0B5C\\u0B5D\\u0B5F-\\u0B61\\u0B71\\u0B83\\u0B85-\\u0B8A\\u0B8E-\\u0B90\\u0B92-\\u0B95\\u0B99\\u0B9A\\u0B9C\\u0B9E\\u0B9F\\u0BA3\\u0BA4\\u0BA8-\\u0BAA\\u0BAE-\\u0BB9\\u0BD0\\u0C05-\\u0C0C\\u0C0E-\\u0C10\\u0C12-\\u0C28\\u0C2A-\\u0C39\\u0C3D\\u0C58\\u0C59\\u0C60\\u0C61\\u0C85-\\u0C8C\\u0C8E-\\u0C90\\u0C92-\\u0CA8\\u0CAA-\\u0CB3\\u0CB5-\\u0CB9\\u0CBD\\u0CDE\\u0CE0\\u0CE1\\u0CF1\\u0CF2\\u0D05-\\u0D0C\\u0D0E-\\u0D10\\u0D12-\\u0D3A\\u0D3D\\u0D4E\\u0D60\\u0D61\\u0D7A-\\u0D7F\\u0D85-\\u0D96\\u0D9A-\\u0DB1\\u0DB3-\\u0DBB\\u0DBD\\u0DC0-\\u0DC6\\u0E01-\\u0E30\\u0E32\\u0E33\\u0E40-\\u0E46\\u0E81\\u0E82\\u0E84\\u0E87\\u0E88\\u0E8A\\u0E8D\\u0E94-\\u0E97\\u0E99-\\u0E9F\\u0EA1-\\u0EA3\\u0EA5\\u0EA7\\u0EAA\\u0EAB\\u0EAD-\\u0EB0\\u0EB2\\u0EB3\\u0EBD\\u0EC0-\\u0EC4\\u0EC6\\u0EDC-\\u0EDF\\u0F00\\u0F40-\\u0F47\\u0F49-\\u0F6C\\u0F88-\\u0F8C\\u1000-\\u102A\\u103F\\u1050-\\u1055\\u105A-\\u105D\\u1061\\u1065\\u1066\\u106E-\\u1070\\u1075-\\u1081\\u108E\\u10A0-\\u10C5\\u10C7\\u10CD\\u10D0-\\u10FA\\u10FC-\\u1248\\u124A-\\u124D\\u1250-\\u1256\\u1258\\u125A-\\u125D\\u1260-\\u1288\\u128A-\\u128D\\u1290-\\u12B0\\u12B2-\\u12B5\\u12B8-\\u12BE\\u12C0\\u12C2-\\u12C5\\u12C8-\\u12D6\\u12D8-\\u1310\\u1312-\\u1315\\u1318-\\u135A\\u1380-\\u138F\\u13A0-\\u13F4\\u1401-\\u166C\\u166F-\\u167F\\u1681-\\u169A\\u16A0-\\u16EA\\u16EE-\\u16F8\\u1700-\\u170C\\u170E-\\u1711\\u1720-\\u1731\\u1740-\\u1751\\u1760-\\u176C\\u176E-\\u1770\\u1780-\\u17B3\\u17D7\\u17DC\\u1820-\\u1877\\u1880-\\u18A8\\u18AA\\u18B0-\\u18F5\\u1900-\\u191E\\u1950-\\u196D\\u1970-\\u1974\\u1980-\\u19AB\\u19C1-\\u19C7\\u1A00-\\u1A16\\u1A20-\\u1A54\\u1AA7\\u1B05-\\u1B33\\u1B45-\\u1B4B\\u1B83-\\u1BA0\\u1BAE\\u1BAF\\u1BBA-\\u1BE5\\u1C00-\\u1C23\\u1C4D-\\u1C4F\\u1C5A-\\u1C7D\\u1CE9-\\u1CEC\\u1CEE-\\u1CF1\\u1CF5\\u1CF6\\u1D00-\\u1DBF\\u1E00-\\u1F15\\u1F18-\\u1F1D\\u1F20-\\u1F45\\u1F48-\\u1F4D\\u1F50-\\u1F57\\u1F59\\u1F5B\\u1F5D\\u1F5F-\\u1F7D\\u1F80-\\u1FB4\\u1FB6-\\u1FBC\\u1FBE\\u1FC2-\\u1FC4\\u1FC6-\\u1FCC\\u1FD0-\\u1FD3\\u1FD6-\\u1FDB\\u1FE0-\\u1FEC\\u1FF2-\\u1FF4\\u1FF6-\\u1FFC\\u2071\\u207F\\u2090-\\u209C\\u2102\\u2107\\u210A-\\u2113\\u2115\\u2119-\\u211D\\u2124\\u2126\\u2128\\u212A-\\u212D\\u212F-\\u2139\\u213C-\\u213F\\u2145-\\u2149\\u214E\\u2160-\\u2188\\u2C00-\\u2C2E\\u2C30-\\u2C5E\\u2C60-\\u2CE4\\u2CEB-\\u2CEE\\u2CF2\\u2CF3\\u2D00-\\u2D25\\u2D27\\u2D2D\\u2D30-\\u2D67\\u2D6F\\u2D80-\\u2D96\\u2DA0-\\u2DA6\\u2DA8-\\u2DAE\\u2DB0-\\u2DB6\\u2DB8-\\u2DBE\\u2DC0-\\u2DC6\\u2DC8-\\u2DCE\\u2DD0-\\u2DD6\\u2DD8-\\u2DDE\\u2E2F\\u3005-\\u3007\\u3021-\\u3029\\u3031-\\u3035\\u3038-\\u303C\\u3041-\\u3096\\u309D-\\u309F\\u30A1-\\u30FA\\u30FC-\\u30FF\\u3105-\\u312D\\u3131-\\u318E\\u31A0-\\u31BA\\u31F0-\\u31FF\\u3400-\\u4DB5\\u4E00-\\u9FCC\\uA000-\\uA48C\\uA4D0-\\uA4FD\\uA500-\\uA60C\\uA610-\\uA61F\\uA62A\\uA62B\\uA640-\\uA66E\\uA67F-\\uA69D\\uA6A0-\\uA6EF\\uA717-\\uA71F\\uA722-\\uA788\\uA78B-\\uA78E\\uA790-\\uA7AD\\uA7B0\\uA7B1\\uA7F7-\\uA801\\uA803-\\uA805\\uA807-\\uA80A\\uA80C-\\uA822\\uA840-\\uA873\\uA882-\\uA8B3\\uA8F2-\\uA8F7\\uA8FB\\uA90A-\\uA925\\uA930-\\uA946\\uA960-\\uA97C\\uA984-\\uA9B2\\uA9CF\\uA9E0-\\uA9E4\\uA9E6-\\uA9EF\\uA9FA-\\uA9FE\\uAA00-\\uAA28\\uAA40-\\uAA42\\uAA44-\\uAA4B\\uAA60-\\uAA76\\uAA7A\\uAA7E-\\uAAAF\\uAAB1\\uAAB5\\uAAB6\\uAAB9-\\uAABD\\uAAC0\\uAAC2\\uAADB-\\uAADD\\uAAE0-\\uAAEA\\uAAF2-\\uAAF4\\uAB01-\\uAB06\\uAB09-\\uAB0E\\uAB11-\\uAB16\\uAB20-\\uAB26\\uAB28-\\uAB2E\\uAB30-\\uAB5A\\uAB5C-\\uAB5F\\uAB64\\uAB65\\uABC0-\\uABE2\\uAC00-\\uD7A3\\uD7B0-\\uD7C6\\uD7CB-\\uD7FB\\uF900-\\uFA6D\\uFA70-\\uFAD9\\uFB00-\\uFB06\\uFB13-\\uFB17\\uFB1D\\uFB1F-\\uFB28\\uFB2A-\\uFB36\\uFB38-\\uFB3C\\uFB3E\\uFB40\\uFB41\\uFB43\\uFB44\\uFB46-\\uFBB1\\uFBD3-\\uFD3D\\uFD50-\\uFD8F\\uFD92-\\uFDC7\\uFDF0-\\uFDFB\\uFE70-\\uFE74\\uFE76-\\uFEFC\\uFF21-\\uFF3A\\uFF41-\\uFF5A\\uFF66-\\uFFBE\\uFFC2-\\uFFC7\\uFFCA-\\uFFCF\\uFFD2-\\uFFD7\\uFFDA-\\uFFDC]"),
|
|
// eslint-disable-next-line no-misleading-character-class
|
|
RegexNonAsciiIdentifierPart = new RegExp("[\\xAA\\xB5\\xBA\\xC0-\\xD6\\xD8-\\xF6\\xF8-\\u02C1\\u02C6-\\u02D1\\u02E0-\\u02E4\\u02EC\\u02EE\\u0300-\\u0374\\u0376\\u0377\\u037A-\\u037D\\u037F\\u0386\\u0388-\\u038A\\u038C\\u038E-\\u03A1\\u03A3-\\u03F5\\u03F7-\\u0481\\u0483-\\u0487\\u048A-\\u052F\\u0531-\\u0556\\u0559\\u0561-\\u0587\\u0591-\\u05BD\\u05BF\\u05C1\\u05C2\\u05C4\\u05C5\\u05C7\\u05D0-\\u05EA\\u05F0-\\u05F2\\u0610-\\u061A\\u0620-\\u0669\\u066E-\\u06D3\\u06D5-\\u06DC\\u06DF-\\u06E8\\u06EA-\\u06FC\\u06FF\\u0710-\\u074A\\u074D-\\u07B1\\u07C0-\\u07F5\\u07FA\\u0800-\\u082D\\u0840-\\u085B\\u08A0-\\u08B2\\u08E4-\\u0963\\u0966-\\u096F\\u0971-\\u0983\\u0985-\\u098C\\u098F\\u0990\\u0993-\\u09A8\\u09AA-\\u09B0\\u09B2\\u09B6-\\u09B9\\u09BC-\\u09C4\\u09C7\\u09C8\\u09CB-\\u09CE\\u09D7\\u09DC\\u09DD\\u09DF-\\u09E3\\u09E6-\\u09F1\\u0A01-\\u0A03\\u0A05-\\u0A0A\\u0A0F\\u0A10\\u0A13-\\u0A28\\u0A2A-\\u0A30\\u0A32\\u0A33\\u0A35\\u0A36\\u0A38\\u0A39\\u0A3C\\u0A3E-\\u0A42\\u0A47\\u0A48\\u0A4B-\\u0A4D\\u0A51\\u0A59-\\u0A5C\\u0A5E\\u0A66-\\u0A75\\u0A81-\\u0A83\\u0A85-\\u0A8D\\u0A8F-\\u0A91\\u0A93-\\u0AA8\\u0AAA-\\u0AB0\\u0AB2\\u0AB3\\u0AB5-\\u0AB9\\u0ABC-\\u0AC5\\u0AC7-\\u0AC9\\u0ACB-\\u0ACD\\u0AD0\\u0AE0-\\u0AE3\\u0AE6-\\u0AEF\\u0B01-\\u0B03\\u0B05-\\u0B0C\\u0B0F\\u0B10\\u0B13-\\u0B28\\u0B2A-\\u0B30\\u0B32\\u0B33\\u0B35-\\u0B39\\u0B3C-\\u0B44\\u0B47\\u0B48\\u0B4B-\\u0B4D\\u0B56\\u0B57\\u0B5C\\u0B5D\\u0B5F-\\u0B63\\u0B66-\\u0B6F\\u0B71\\u0B82\\u0B83\\u0B85-\\u0B8A\\u0B8E-\\u0B90\\u0B92-\\u0B95\\u0B99\\u0B9A\\u0B9C\\u0B9E\\u0B9F\\u0BA3\\u0BA4\\u0BA8-\\u0BAA\\u0BAE-\\u0BB9\\u0BBE-\\u0BC2\\u0BC6-\\u0BC8\\u0BCA-\\u0BCD\\u0BD0\\u0BD7\\u0BE6-\\u0BEF\\u0C00-\\u0C03\\u0C05-\\u0C0C\\u0C0E-\\u0C10\\u0C12-\\u0C28\\u0C2A-\\u0C39\\u0C3D-\\u0C44\\u0C46-\\u0C48\\u0C4A-\\u0C4D\\u0C55\\u0C56\\u0C58\\u0C59\\u0C60-\\u0C63\\u0C66-\\u0C6F\\u0C81-\\u0C83\\u0C85-\\u0C8C\\u0C8E-\\u0C90\\u0C92-\\u0CA8\\u0CAA-\\u0CB3\\u0CB5-\\u0CB9\\u0CBC-\\u0CC4\\u0CC6-\\u0CC8\\u0CCA-\\u0CCD\\u0CD5\\u0CD6\\u0CDE\\u0CE0-\\u0CE3\\u0CE6-\\u0CEF\\u0CF1\\u0CF2\\u0D01-\\u0D03\\u0D05-\\u0D0C\\u0D0E-\\u0D10\\u0D12-\\u0D3A\\u0D3D-\\u0D44\\u0D46-\\u0D48\\u0D4A-\\u0D4E\\u0D57\\u0D60-\\u0D63\\u0D66-\\u0D6F\\u0D7A-\\u0D7F\\u0D82\\u0D83\\u0D85-\\u0D96\\u0D9A-\\u0DB1\\u0DB3-\\u0DBB\\u0DBD\\u0DC0-\\u0DC6\\u0DCA\\u0DCF-\\u0DD4\\u0DD6\\u0DD8-\\u0DDF\\u0DE6-\\u0DEF\\u0DF2\\u0DF3\\u0E01-\\u0E3A\\u0E40-\\u0E4E\\u0E50-\\u0E59\\u0E81\\u0E82\\u0E84\\u0E87\\u0E88\\u0E8A\\u0E8D\\u0E94-\\u0E97\\u0E99-\\u0E9F\\u0EA1-\\u0EA3\\u0EA5\\u0EA7\\u0EAA\\u0EAB\\u0EAD-\\u0EB9\\u0EBB-\\u0EBD\\u0EC0-\\u0EC4\\u0EC6\\u0EC8-\\u0ECD\\u0ED0-\\u0ED9\\u0EDC-\\u0EDF\\u0F00\\u0F18\\u0F19\\u0F20-\\u0F29\\u0F35\\u0F37\\u0F39\\u0F3E-\\u0F47\\u0F49-\\u0F6C\\u0F71-\\u0F84\\u0F86-\\u0F97\\u0F99-\\u0FBC\\u0FC6\\u1000-\\u1049\\u1050-\\u109D\\u10A0-\\u10C5\\u10C7\\u10CD\\u10D0-\\u10FA\\u10FC-\\u1248\\u124A-\\u124D\\u1250-\\u1256\\u1258\\u125A-\\u125D\\u1260-\\u1288\\u128A-\\u128D\\u1290-\\u12B0\\u12B2-\\u12B5\\u12B8-\\u12BE\\u12C0\\u12C2-\\u12C5\\u12C8-\\u12D6\\u12D8-\\u1310\\u1312-\\u1315\\u1318-\\u135A\\u135D-\\u135F\\u1380-\\u138F\\u13A0-\\u13F4\\u1401-\\u166C\\u166F-\\u167F\\u1681-\\u169A\\u16A0-\\u16EA\\u16EE-\\u16F8\\u1700-\\u170C\\u170E-\\u1714\\u1720-\\u1734\\u1740-\\u1753\\u1760-\\u176C\\u176E-\\u1770\\u1772\\u1773\\u1780-\\u17D3\\u17D7\\u17DC\\u17DD\\u17E0-\\u17E9\\u180B-\\u180D\\u1810-\\u1819\\u1820-\\u1877\\u1880-\\u18AA\\u18B0-\\u18F5\\u1900-\\u191E\\u1920-\\u192B\\u1930-\\u193B\\u1946-\\u196D\\u1970-\\u1974\\u1980-\\u19AB\\u19B0-\\u19C9\\u19D0-\\u19D9\\u1A00-\\u1A1B\\u1A20-\\u1A5E\\u1A60-\\u1A7C\\u1A7F-\\u1A89\\u1A90-\\u1A99\\u1AA7\\u1AB0-\\u1ABD\\u1B00-\\u1B4B\\u1B50-\\u1B59\\u1B6B-\\u1B73\\u1B80-\\u1BF3\\u1C00-\\u1C37\\u1C40-\\u1C49\\u1C4D-\\u1C7D\\u1CD0-\\u1CD2\\u1CD4-\\u1CF6\\u1CF8\\u1CF9\\u1D00-\\u1DF5\\u1DFC-\\u1F15\\u1F18-\\u1F1D\\u1F20-\\u1F45\\u1F48-\\u1F4D\\u1F50-\\u1F57\\u1F59\\u1F5B\\u1F5D\\u1F5F-\\u1F7D\\u1F80-\\u1FB4\\u1FB6-\\u1FBC\\u1FBE\\u1FC2-\\u1FC4\\u1FC6-\\u1FCC\\u1FD0-\\u1FD3\\u1FD6-\\u1FDB\\u1FE0-\\u1FEC\\u1FF2-\\u1FF4\\u1FF6-\\u1FFC\\u200C\\u200D\\u203F\\u2040\\u2054\\u2071\\u207F\\u2090-\\u209C\\u20D0-\\u20DC\\u20E1\\u20E5-\\u20F0\\u2102\\u2107\\u210A-\\u2113\\u2115\\u2119-\\u211D\\u2124\\u2126\\u2128\\u212A-\\u212D\\u212F-\\u2139\\u213C-\\u213F\\u2145-\\u2149\\u214E\\u2160-\\u2188\\u2C00-\\u2C2E\\u2C30-\\u2C5E\\u2C60-\\u2CE4\\u2CEB-\\u2CF3\\u2D00-\\u2D25\\u2D27\\u2D2D\\u2D30-\\u2D67\\u2D6F\\u2D7F-\\u2D96\\u2DA0-\\u2DA6\\u2DA8-\\u2DAE\\u2DB0-\\u2DB6\\u2DB8-\\u2DBE\\u2DC0-\\u2DC6\\u2DC8-\\u2DCE\\u2DD0-\\u2DD6\\u2DD8-\\u2DDE\\u2DE0-\\u2DFF\\u2E2F\\u3005-\\u3007\\u3021-\\u302F\\u3031-\\u3035\\u3038-\\u303C\\u3041-\\u3096\\u3099\\u309A\\u309D-\\u309F\\u30A1-\\u30FA\\u30FC-\\u30FF\\u3105-\\u312D\\u3131-\\u318E\\u31A0-\\u31BA\\u31F0-\\u31FF\\u3400-\\u4DB5\\u4E00-\\u9FCC\\uA000-\\uA48C\\uA4D0-\\uA4FD\\uA500-\\uA60C\\uA610-\\uA62B\\uA640-\\uA66F\\uA674-\\uA67D\\uA67F-\\uA69D\\uA69F-\\uA6F1\\uA717-\\uA71F\\uA722-\\uA788\\uA78B-\\uA78E\\uA790-\\uA7AD\\uA7B0\\uA7B1\\uA7F7-\\uA827\\uA840-\\uA873\\uA880-\\uA8C4\\uA8D0-\\uA8D9\\uA8E0-\\uA8F7\\uA8FB\\uA900-\\uA92D\\uA930-\\uA953\\uA960-\\uA97C\\uA980-\\uA9C0\\uA9CF-\\uA9D9\\uA9E0-\\uA9FE\\uAA00-\\uAA36\\uAA40-\\uAA4D\\uAA50-\\uAA59\\uAA60-\\uAA76\\uAA7A-\\uAAC2\\uAADB-\\uAADD\\uAAE0-\\uAAEF\\uAAF2-\\uAAF6\\uAB01-\\uAB06\\uAB09-\\uAB0E\\uAB11-\\uAB16\\uAB20-\\uAB26\\uAB28-\\uAB2E\\uAB30-\\uAB5A\\uAB5C-\\uAB5F\\uAB64\\uAB65\\uABC0-\\uABEA\\uABEC\\uABED\\uABF0-\\uABF9\\uAC00-\\uD7A3\\uD7B0-\\uD7C6\\uD7CB-\\uD7FB\\uF900-\\uFA6D\\uFA70-\\uFAD9\\uFB00-\\uFB06\\uFB13-\\uFB17\\uFB1D-\\uFB28\\uFB2A-\\uFB36\\uFB38-\\uFB3C\\uFB3E\\uFB40\\uFB41\\uFB43\\uFB44\\uFB46-\\uFBB1\\uFBD3-\\uFD3D\\uFD50-\\uFD8F\\uFD92-\\uFDC7\\uFDF0-\\uFDFB\\uFE00-\\uFE0F\\uFE20-\\uFE2D\\uFE33\\uFE34\\uFE4D-\\uFE4F\\uFE70-\\uFE74\\uFE76-\\uFEFC\\uFF10-\\uFF19\\uFF21-\\uFF3A\\uFF3F\\uFF41-\\uFF5A\\uFF66-\\uFFBE\\uFFC2-\\uFFC7\\uFFCA-\\uFFCF\\uFFD2-\\uFFD7\\uFFDA-\\uFFDC]"); // Ensure the condition is true, otherwise throw an error.
|
|
// This is only to have a better contract semantic, i.e. another safety net
|
|
// to catch a logic error. The condition shall be fulfilled in normal case.
|
|
// Do NOT use this to enforce a certain condition on any user input.
|
|
|
|
function assert(condition, message) {
|
|
/* istanbul ignore next */
|
|
if (!condition) {
|
|
throw new Error('ASSERT: ' + message);
|
|
}
|
|
}
|
|
|
|
function isDecimalDigit(ch) {
|
|
return ch >= 0x30 && ch <= 0x39; // 0..9
|
|
}
|
|
|
|
function isHexDigit(ch) {
|
|
return '0123456789abcdefABCDEF'.indexOf(ch) >= 0;
|
|
}
|
|
|
|
function isOctalDigit(ch) {
|
|
return '01234567'.indexOf(ch) >= 0;
|
|
} // 7.2 White Space
|
|
|
|
|
|
function isWhiteSpace(ch) {
|
|
return ch === 0x20 || ch === 0x09 || ch === 0x0B || ch === 0x0C || ch === 0xA0 || ch >= 0x1680 && [0x1680, 0x180E, 0x2000, 0x2001, 0x2002, 0x2003, 0x2004, 0x2005, 0x2006, 0x2007, 0x2008, 0x2009, 0x200A, 0x202F, 0x205F, 0x3000, 0xFEFF].indexOf(ch) >= 0;
|
|
} // 7.3 Line Terminators
|
|
|
|
|
|
function isLineTerminator(ch) {
|
|
return ch === 0x0A || ch === 0x0D || ch === 0x2028 || ch === 0x2029;
|
|
} // 7.6 Identifier Names and Identifiers
|
|
|
|
|
|
function isIdentifierStart(ch) {
|
|
return ch === 0x24 || ch === 0x5F || // $ (dollar) and _ (underscore)
|
|
ch >= 0x41 && ch <= 0x5A || // A..Z
|
|
ch >= 0x61 && ch <= 0x7A || // a..z
|
|
ch === 0x5C || // \ (backslash)
|
|
ch >= 0x80 && RegexNonAsciiIdentifierStart.test(String.fromCharCode(ch));
|
|
}
|
|
|
|
function isIdentifierPart(ch) {
|
|
return ch === 0x24 || ch === 0x5F || // $ (dollar) and _ (underscore)
|
|
ch >= 0x41 && ch <= 0x5A || // A..Z
|
|
ch >= 0x61 && ch <= 0x7A || // a..z
|
|
ch >= 0x30 && ch <= 0x39 || // 0..9
|
|
ch === 0x5C || // \ (backslash)
|
|
ch >= 0x80 && RegexNonAsciiIdentifierPart.test(String.fromCharCode(ch));
|
|
} // 7.6.1.1 Keywords
|
|
|
|
|
|
var keywords = {
|
|
'if': 1,
|
|
'in': 1,
|
|
'do': 1,
|
|
'var': 1,
|
|
'for': 1,
|
|
'new': 1,
|
|
'try': 1,
|
|
'let': 1,
|
|
'this': 1,
|
|
'else': 1,
|
|
'case': 1,
|
|
'void': 1,
|
|
'with': 1,
|
|
'enum': 1,
|
|
'while': 1,
|
|
'break': 1,
|
|
'catch': 1,
|
|
'throw': 1,
|
|
'const': 1,
|
|
'yield': 1,
|
|
'class': 1,
|
|
'super': 1,
|
|
'return': 1,
|
|
'typeof': 1,
|
|
'delete': 1,
|
|
'switch': 1,
|
|
'export': 1,
|
|
'import': 1,
|
|
'public': 1,
|
|
'static': 1,
|
|
'default': 1,
|
|
'finally': 1,
|
|
'extends': 1,
|
|
'package': 1,
|
|
'private': 1,
|
|
'function': 1,
|
|
'continue': 1,
|
|
'debugger': 1,
|
|
'interface': 1,
|
|
'protected': 1,
|
|
'instanceof': 1,
|
|
'implements': 1
|
|
};
|
|
|
|
function skipComment() {
|
|
var ch;
|
|
|
|
while (index < length) {
|
|
ch = source.charCodeAt(index);
|
|
|
|
if (isWhiteSpace(ch) || isLineTerminator(ch)) {
|
|
++index;
|
|
} else {
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
|
|
function scanHexEscape(prefix) {
|
|
var i,
|
|
len,
|
|
ch,
|
|
code = 0;
|
|
len = prefix === 'u' ? 4 : 2;
|
|
|
|
for (i = 0; i < len; ++i) {
|
|
if (index < length && isHexDigit(source[index])) {
|
|
ch = source[index++];
|
|
code = code * 16 + '0123456789abcdef'.indexOf(ch.toLowerCase());
|
|
} else {
|
|
throwError({}, MessageUnexpectedToken, ILLEGAL$1);
|
|
}
|
|
}
|
|
|
|
return String.fromCharCode(code);
|
|
}
|
|
|
|
function scanUnicodeCodePointEscape() {
|
|
var ch, code, cu1, cu2;
|
|
ch = source[index];
|
|
code = 0; // At least, one hex digit is required.
|
|
|
|
if (ch === '}') {
|
|
throwError({}, MessageUnexpectedToken, ILLEGAL$1);
|
|
}
|
|
|
|
while (index < length) {
|
|
ch = source[index++];
|
|
|
|
if (!isHexDigit(ch)) {
|
|
break;
|
|
}
|
|
|
|
code = code * 16 + '0123456789abcdef'.indexOf(ch.toLowerCase());
|
|
}
|
|
|
|
if (code > 0x10FFFF || ch !== '}') {
|
|
throwError({}, MessageUnexpectedToken, ILLEGAL$1);
|
|
} // UTF-16 Encoding
|
|
|
|
|
|
if (code <= 0xFFFF) {
|
|
return String.fromCharCode(code);
|
|
}
|
|
|
|
cu1 = (code - 0x10000 >> 10) + 0xD800;
|
|
cu2 = (code - 0x10000 & 1023) + 0xDC00;
|
|
return String.fromCharCode(cu1, cu2);
|
|
}
|
|
|
|
function getEscapedIdentifier() {
|
|
var ch, id;
|
|
ch = source.charCodeAt(index++);
|
|
id = String.fromCharCode(ch); // '\u' (U+005C, U+0075) denotes an escaped character.
|
|
|
|
if (ch === 0x5C) {
|
|
if (source.charCodeAt(index) !== 0x75) {
|
|
throwError({}, MessageUnexpectedToken, ILLEGAL$1);
|
|
}
|
|
|
|
++index;
|
|
ch = scanHexEscape('u');
|
|
|
|
if (!ch || ch === '\\' || !isIdentifierStart(ch.charCodeAt(0))) {
|
|
throwError({}, MessageUnexpectedToken, ILLEGAL$1);
|
|
}
|
|
|
|
id = ch;
|
|
}
|
|
|
|
while (index < length) {
|
|
ch = source.charCodeAt(index);
|
|
|
|
if (!isIdentifierPart(ch)) {
|
|
break;
|
|
}
|
|
|
|
++index;
|
|
id += String.fromCharCode(ch); // '\u' (U+005C, U+0075) denotes an escaped character.
|
|
|
|
if (ch === 0x5C) {
|
|
id = id.substr(0, id.length - 1);
|
|
|
|
if (source.charCodeAt(index) !== 0x75) {
|
|
throwError({}, MessageUnexpectedToken, ILLEGAL$1);
|
|
}
|
|
|
|
++index;
|
|
ch = scanHexEscape('u');
|
|
|
|
if (!ch || ch === '\\' || !isIdentifierPart(ch.charCodeAt(0))) {
|
|
throwError({}, MessageUnexpectedToken, ILLEGAL$1);
|
|
}
|
|
|
|
id += ch;
|
|
}
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
function getIdentifier() {
|
|
var start, ch;
|
|
start = index++;
|
|
|
|
while (index < length) {
|
|
ch = source.charCodeAt(index);
|
|
|
|
if (ch === 0x5C) {
|
|
// Blackslash (U+005C) marks Unicode escape sequence.
|
|
index = start;
|
|
return getEscapedIdentifier();
|
|
}
|
|
|
|
if (isIdentifierPart(ch)) {
|
|
++index;
|
|
} else {
|
|
break;
|
|
}
|
|
}
|
|
|
|
return source.slice(start, index);
|
|
}
|
|
|
|
function scanIdentifier() {
|
|
var start, id, type;
|
|
start = index; // Backslash (U+005C) starts an escaped character.
|
|
|
|
id = source.charCodeAt(index) === 0x5C ? getEscapedIdentifier() : getIdentifier(); // There is no keyword or literal with only one character.
|
|
// Thus, it must be an identifier.
|
|
|
|
if (id.length === 1) {
|
|
type = TokenIdentifier;
|
|
} else if (keywords.hasOwnProperty(id)) {
|
|
// eslint-disable-line no-prototype-builtins
|
|
type = TokenKeyword;
|
|
} else if (id === 'null') {
|
|
type = TokenNullLiteral;
|
|
} else if (id === 'true' || id === 'false') {
|
|
type = TokenBooleanLiteral;
|
|
} else {
|
|
type = TokenIdentifier;
|
|
}
|
|
|
|
return {
|
|
type: type,
|
|
value: id,
|
|
start: start,
|
|
end: index
|
|
};
|
|
} // 7.7 Punctuators
|
|
|
|
|
|
function scanPunctuator() {
|
|
var start = index,
|
|
code = source.charCodeAt(index),
|
|
code2,
|
|
ch1 = source[index],
|
|
ch2,
|
|
ch3,
|
|
ch4;
|
|
|
|
switch (code) {
|
|
// Check for most common single-character punctuators.
|
|
case 0x2E: // . dot
|
|
|
|
case 0x28: // ( open bracket
|
|
|
|
case 0x29: // ) close bracket
|
|
|
|
case 0x3B: // ; semicolon
|
|
|
|
case 0x2C: // , comma
|
|
|
|
case 0x7B: // { open curly brace
|
|
|
|
case 0x7D: // } close curly brace
|
|
|
|
case 0x5B: // [
|
|
|
|
case 0x5D: // ]
|
|
|
|
case 0x3A: // :
|
|
|
|
case 0x3F: // ?
|
|
|
|
case 0x7E:
|
|
// ~
|
|
++index;
|
|
return {
|
|
type: TokenPunctuator,
|
|
value: String.fromCharCode(code),
|
|
start: start,
|
|
end: index
|
|
};
|
|
|
|
default:
|
|
code2 = source.charCodeAt(index + 1); // '=' (U+003D) marks an assignment or comparison operator.
|
|
|
|
if (code2 === 0x3D) {
|
|
switch (code) {
|
|
case 0x2B: // +
|
|
|
|
case 0x2D: // -
|
|
|
|
case 0x2F: // /
|
|
|
|
case 0x3C: // <
|
|
|
|
case 0x3E: // >
|
|
|
|
case 0x5E: // ^
|
|
|
|
case 0x7C: // |
|
|
|
|
case 0x25: // %
|
|
|
|
case 0x26: // &
|
|
|
|
case 0x2A:
|
|
// *
|
|
index += 2;
|
|
return {
|
|
type: TokenPunctuator,
|
|
value: String.fromCharCode(code) + String.fromCharCode(code2),
|
|
start: start,
|
|
end: index
|
|
};
|
|
|
|
case 0x21: // !
|
|
|
|
case 0x3D:
|
|
// =
|
|
index += 2; // !== and ===
|
|
|
|
if (source.charCodeAt(index) === 0x3D) {
|
|
++index;
|
|
}
|
|
|
|
return {
|
|
type: TokenPunctuator,
|
|
value: source.slice(start, index),
|
|
start: start,
|
|
end: index
|
|
};
|
|
}
|
|
}
|
|
|
|
} // 4-character punctuator: >>>=
|
|
|
|
|
|
ch4 = source.substr(index, 4);
|
|
|
|
if (ch4 === '>>>=') {
|
|
index += 4;
|
|
return {
|
|
type: TokenPunctuator,
|
|
value: ch4,
|
|
start: start,
|
|
end: index
|
|
};
|
|
} // 3-character punctuators: === !== >>> <<= >>=
|
|
|
|
|
|
ch3 = ch4.substr(0, 3);
|
|
|
|
if (ch3 === '>>>' || ch3 === '<<=' || ch3 === '>>=') {
|
|
index += 3;
|
|
return {
|
|
type: TokenPunctuator,
|
|
value: ch3,
|
|
start: start,
|
|
end: index
|
|
};
|
|
} // Other 2-character punctuators: ++ -- << >> && ||
|
|
|
|
|
|
ch2 = ch3.substr(0, 2);
|
|
|
|
if (ch1 === ch2[1] && '+-<>&|'.indexOf(ch1) >= 0 || ch2 === '=>') {
|
|
index += 2;
|
|
return {
|
|
type: TokenPunctuator,
|
|
value: ch2,
|
|
start: start,
|
|
end: index
|
|
};
|
|
} // 1-character punctuators: < > = ! + - * % & | ^ /
|
|
|
|
|
|
if ('<>=!+-*%&|^/'.indexOf(ch1) >= 0) {
|
|
++index;
|
|
return {
|
|
type: TokenPunctuator,
|
|
value: ch1,
|
|
start: start,
|
|
end: index
|
|
};
|
|
}
|
|
|
|
throwError({}, MessageUnexpectedToken, ILLEGAL$1);
|
|
} // 7.8.3 Numeric Literals
|
|
|
|
|
|
function scanHexLiteral(start) {
|
|
var number = '';
|
|
|
|
while (index < length) {
|
|
if (!isHexDigit(source[index])) {
|
|
break;
|
|
}
|
|
|
|
number += source[index++];
|
|
}
|
|
|
|
if (number.length === 0) {
|
|
throwError({}, MessageUnexpectedToken, ILLEGAL$1);
|
|
}
|
|
|
|
if (isIdentifierStart(source.charCodeAt(index))) {
|
|
throwError({}, MessageUnexpectedToken, ILLEGAL$1);
|
|
}
|
|
|
|
return {
|
|
type: TokenNumericLiteral,
|
|
value: parseInt('0x' + number, 16),
|
|
start: start,
|
|
end: index
|
|
};
|
|
}
|
|
|
|
function scanOctalLiteral(start) {
|
|
var number = '0' + source[index++];
|
|
|
|
while (index < length) {
|
|
if (!isOctalDigit(source[index])) {
|
|
break;
|
|
}
|
|
|
|
number += source[index++];
|
|
}
|
|
|
|
if (isIdentifierStart(source.charCodeAt(index)) || isDecimalDigit(source.charCodeAt(index))) {
|
|
throwError({}, MessageUnexpectedToken, ILLEGAL$1);
|
|
}
|
|
|
|
return {
|
|
type: TokenNumericLiteral,
|
|
value: parseInt(number, 8),
|
|
octal: true,
|
|
start: start,
|
|
end: index
|
|
};
|
|
}
|
|
|
|
function scanNumericLiteral() {
|
|
var number, start, ch;
|
|
ch = source[index];
|
|
assert(isDecimalDigit(ch.charCodeAt(0)) || ch === '.', 'Numeric literal must start with a decimal digit or a decimal point');
|
|
start = index;
|
|
number = '';
|
|
|
|
if (ch !== '.') {
|
|
number = source[index++];
|
|
ch = source[index]; // Hex number starts with '0x'.
|
|
// Octal number starts with '0'.
|
|
|
|
if (number === '0') {
|
|
if (ch === 'x' || ch === 'X') {
|
|
++index;
|
|
return scanHexLiteral(start);
|
|
}
|
|
|
|
if (isOctalDigit(ch)) {
|
|
return scanOctalLiteral(start);
|
|
} // decimal number starts with '0' such as '09' is illegal.
|
|
|
|
|
|
if (ch && isDecimalDigit(ch.charCodeAt(0))) {
|
|
throwError({}, MessageUnexpectedToken, ILLEGAL$1);
|
|
}
|
|
}
|
|
|
|
while (isDecimalDigit(source.charCodeAt(index))) {
|
|
number += source[index++];
|
|
}
|
|
|
|
ch = source[index];
|
|
}
|
|
|
|
if (ch === '.') {
|
|
number += source[index++];
|
|
|
|
while (isDecimalDigit(source.charCodeAt(index))) {
|
|
number += source[index++];
|
|
}
|
|
|
|
ch = source[index];
|
|
}
|
|
|
|
if (ch === 'e' || ch === 'E') {
|
|
number += source[index++];
|
|
ch = source[index];
|
|
|
|
if (ch === '+' || ch === '-') {
|
|
number += source[index++];
|
|
}
|
|
|
|
if (isDecimalDigit(source.charCodeAt(index))) {
|
|
while (isDecimalDigit(source.charCodeAt(index))) {
|
|
number += source[index++];
|
|
}
|
|
} else {
|
|
throwError({}, MessageUnexpectedToken, ILLEGAL$1);
|
|
}
|
|
}
|
|
|
|
if (isIdentifierStart(source.charCodeAt(index))) {
|
|
throwError({}, MessageUnexpectedToken, ILLEGAL$1);
|
|
}
|
|
|
|
return {
|
|
type: TokenNumericLiteral,
|
|
value: parseFloat(number),
|
|
start: start,
|
|
end: index
|
|
};
|
|
} // 7.8.4 String Literals
|
|
|
|
|
|
function scanStringLiteral() {
|
|
var str = '',
|
|
quote,
|
|
start,
|
|
ch,
|
|
code,
|
|
octal = false;
|
|
quote = source[index];
|
|
assert(quote === '\'' || quote === '"', 'String literal must starts with a quote');
|
|
start = index;
|
|
++index;
|
|
|
|
while (index < length) {
|
|
ch = source[index++];
|
|
|
|
if (ch === quote) {
|
|
quote = '';
|
|
break;
|
|
} else if (ch === '\\') {
|
|
ch = source[index++];
|
|
|
|
if (!ch || !isLineTerminator(ch.charCodeAt(0))) {
|
|
switch (ch) {
|
|
case 'u':
|
|
case 'x':
|
|
if (source[index] === '{') {
|
|
++index;
|
|
str += scanUnicodeCodePointEscape();
|
|
} else {
|
|
str += scanHexEscape(ch);
|
|
}
|
|
|
|
break;
|
|
|
|
case 'n':
|
|
str += '\n';
|
|
break;
|
|
|
|
case 'r':
|
|
str += '\r';
|
|
break;
|
|
|
|
case 't':
|
|
str += '\t';
|
|
break;
|
|
|
|
case 'b':
|
|
str += '\b';
|
|
break;
|
|
|
|
case 'f':
|
|
str += '\f';
|
|
break;
|
|
|
|
case 'v':
|
|
str += '\x0B';
|
|
break;
|
|
|
|
default:
|
|
if (isOctalDigit(ch)) {
|
|
code = '01234567'.indexOf(ch); // \0 is not octal escape sequence
|
|
|
|
if (code !== 0) {
|
|
octal = true;
|
|
}
|
|
|
|
if (index < length && isOctalDigit(source[index])) {
|
|
octal = true;
|
|
code = code * 8 + '01234567'.indexOf(source[index++]); // 3 digits are only allowed when string starts
|
|
// with 0, 1, 2, 3
|
|
|
|
if ('0123'.indexOf(ch) >= 0 && index < length && isOctalDigit(source[index])) {
|
|
code = code * 8 + '01234567'.indexOf(source[index++]);
|
|
}
|
|
}
|
|
|
|
str += String.fromCharCode(code);
|
|
} else {
|
|
str += ch;
|
|
}
|
|
|
|
break;
|
|
}
|
|
} else {
|
|
if (ch === '\r' && source[index] === '\n') {
|
|
++index;
|
|
}
|
|
}
|
|
} else if (isLineTerminator(ch.charCodeAt(0))) {
|
|
break;
|
|
} else {
|
|
str += ch;
|
|
}
|
|
}
|
|
|
|
if (quote !== '') {
|
|
throwError({}, MessageUnexpectedToken, ILLEGAL$1);
|
|
}
|
|
|
|
return {
|
|
type: TokenStringLiteral,
|
|
value: str,
|
|
octal: octal,
|
|
start: start,
|
|
end: index
|
|
};
|
|
}
|
|
|
|
function testRegExp(pattern, flags) {
|
|
var tmp = pattern;
|
|
|
|
if (flags.indexOf('u') >= 0) {
|
|
// Replace each astral symbol and every Unicode code point
|
|
// escape sequence with a single ASCII symbol to avoid throwing on
|
|
// regular expressions that are only valid in combination with the
|
|
// `/u` flag.
|
|
// Note: replacing with the ASCII symbol `x` might cause false
|
|
// negatives in unlikely scenarios. For example, `[\u{61}-b]` is a
|
|
// perfectly valid pattern that is equivalent to `[a-b]`, but it
|
|
// would be replaced by `[x-b]` which throws an error.
|
|
tmp = tmp.replace(/\\u\{([0-9a-fA-F]+)\}/g, function ($0, $1) {
|
|
if (parseInt($1, 16) <= 0x10FFFF) {
|
|
return 'x';
|
|
}
|
|
|
|
throwError({}, MessageInvalidRegExp);
|
|
}).replace(/[\uD800-\uDBFF][\uDC00-\uDFFF]/g, 'x');
|
|
} // First, detect invalid regular expressions.
|
|
|
|
|
|
try {
|
|
new RegExp(tmp);
|
|
} catch (e) {
|
|
throwError({}, MessageInvalidRegExp);
|
|
} // Return a regular expression object for this pattern-flag pair, or
|
|
// `null` in case the current environment doesn't support the flags it
|
|
// uses.
|
|
|
|
|
|
try {
|
|
return new RegExp(pattern, flags);
|
|
} catch (exception) {
|
|
return null;
|
|
}
|
|
}
|
|
|
|
function scanRegExpBody() {
|
|
var ch, str, classMarker, terminated, body;
|
|
ch = source[index];
|
|
assert(ch === '/', 'Regular expression literal must start with a slash');
|
|
str = source[index++];
|
|
classMarker = false;
|
|
terminated = false;
|
|
|
|
while (index < length) {
|
|
ch = source[index++];
|
|
str += ch;
|
|
|
|
if (ch === '\\') {
|
|
ch = source[index++]; // ECMA-262 7.8.5
|
|
|
|
if (isLineTerminator(ch.charCodeAt(0))) {
|
|
throwError({}, MessageUnterminatedRegExp);
|
|
}
|
|
|
|
str += ch;
|
|
} else if (isLineTerminator(ch.charCodeAt(0))) {
|
|
throwError({}, MessageUnterminatedRegExp);
|
|
} else if (classMarker) {
|
|
if (ch === ']') {
|
|
classMarker = false;
|
|
}
|
|
} else {
|
|
if (ch === '/') {
|
|
terminated = true;
|
|
break;
|
|
} else if (ch === '[') {
|
|
classMarker = true;
|
|
}
|
|
}
|
|
}
|
|
|
|
if (!terminated) {
|
|
throwError({}, MessageUnterminatedRegExp);
|
|
} // Exclude leading and trailing slash.
|
|
|
|
|
|
body = str.substr(1, str.length - 2);
|
|
return {
|
|
value: body,
|
|
literal: str
|
|
};
|
|
}
|
|
|
|
function scanRegExpFlags() {
|
|
var ch, str, flags;
|
|
str = '';
|
|
flags = '';
|
|
|
|
while (index < length) {
|
|
ch = source[index];
|
|
|
|
if (!isIdentifierPart(ch.charCodeAt(0))) {
|
|
break;
|
|
}
|
|
|
|
++index;
|
|
|
|
if (ch === '\\' && index < length) {
|
|
throwError({}, MessageUnexpectedToken, ILLEGAL$1);
|
|
} else {
|
|
flags += ch;
|
|
str += ch;
|
|
}
|
|
}
|
|
|
|
if (flags.search(/[^gimuy]/g) >= 0) {
|
|
throwError({}, MessageInvalidRegExp, flags);
|
|
}
|
|
|
|
return {
|
|
value: flags,
|
|
literal: str
|
|
};
|
|
}
|
|
|
|
function scanRegExp() {
|
|
var start, body, flags, value;
|
|
lookahead = null;
|
|
skipComment();
|
|
start = index;
|
|
body = scanRegExpBody();
|
|
flags = scanRegExpFlags();
|
|
value = testRegExp(body.value, flags.value);
|
|
return {
|
|
literal: body.literal + flags.literal,
|
|
value: value,
|
|
regex: {
|
|
pattern: body.value,
|
|
flags: flags.value
|
|
},
|
|
start: start,
|
|
end: index
|
|
};
|
|
}
|
|
|
|
function isIdentifierName(token) {
|
|
return token.type === TokenIdentifier || token.type === TokenKeyword || token.type === TokenBooleanLiteral || token.type === TokenNullLiteral;
|
|
}
|
|
|
|
function advance() {
|
|
var ch;
|
|
skipComment();
|
|
|
|
if (index >= length) {
|
|
return {
|
|
type: TokenEOF,
|
|
start: index,
|
|
end: index
|
|
};
|
|
}
|
|
|
|
ch = source.charCodeAt(index);
|
|
|
|
if (isIdentifierStart(ch)) {
|
|
return scanIdentifier();
|
|
} // Very common: ( and ) and ;
|
|
|
|
|
|
if (ch === 0x28 || ch === 0x29 || ch === 0x3B) {
|
|
return scanPunctuator();
|
|
} // String literal starts with single quote (U+0027) or double quote (U+0022).
|
|
|
|
|
|
if (ch === 0x27 || ch === 0x22) {
|
|
return scanStringLiteral();
|
|
} // Dot (.) U+002E can also start a floating-point number, hence the need
|
|
// to check the next character.
|
|
|
|
|
|
if (ch === 0x2E) {
|
|
if (isDecimalDigit(source.charCodeAt(index + 1))) {
|
|
return scanNumericLiteral();
|
|
}
|
|
|
|
return scanPunctuator();
|
|
}
|
|
|
|
if (isDecimalDigit(ch)) {
|
|
return scanNumericLiteral();
|
|
}
|
|
|
|
return scanPunctuator();
|
|
}
|
|
|
|
function lex() {
|
|
var token;
|
|
token = lookahead;
|
|
index = token.end;
|
|
lookahead = advance();
|
|
index = token.end;
|
|
return token;
|
|
}
|
|
|
|
function peek() {
|
|
var pos;
|
|
pos = index;
|
|
lookahead = advance();
|
|
index = pos;
|
|
}
|
|
|
|
function finishArrayExpression(elements) {
|
|
var node = new ASTNode(SyntaxArrayExpression);
|
|
node.elements = elements;
|
|
return node;
|
|
}
|
|
|
|
function finishBinaryExpression(operator, left, right) {
|
|
var node = new ASTNode(operator === '||' || operator === '&&' ? SyntaxLogicalExpression : SyntaxBinaryExpression);
|
|
node.operator = operator;
|
|
node.left = left;
|
|
node.right = right;
|
|
return node;
|
|
}
|
|
|
|
function finishCallExpression(callee, args) {
|
|
var node = new ASTNode(SyntaxCallExpression);
|
|
node.callee = callee;
|
|
node.arguments = args;
|
|
return node;
|
|
}
|
|
|
|
function finishConditionalExpression(test, consequent, alternate) {
|
|
var node = new ASTNode(SyntaxConditionalExpression);
|
|
node.test = test;
|
|
node.consequent = consequent;
|
|
node.alternate = alternate;
|
|
return node;
|
|
}
|
|
|
|
function finishIdentifier(name) {
|
|
var node = new ASTNode(SyntaxIdentifier);
|
|
node.name = name;
|
|
return node;
|
|
}
|
|
|
|
function finishLiteral(token) {
|
|
var node = new ASTNode(SyntaxLiteral);
|
|
node.value = token.value;
|
|
node.raw = source.slice(token.start, token.end);
|
|
|
|
if (token.regex) {
|
|
if (node.raw === '//') {
|
|
node.raw = '/(?:)/';
|
|
}
|
|
|
|
node.regex = token.regex;
|
|
}
|
|
|
|
return node;
|
|
}
|
|
|
|
function finishMemberExpression(accessor, object, property) {
|
|
var node = new ASTNode(SyntaxMemberExpression);
|
|
node.computed = accessor === '[';
|
|
node.object = object;
|
|
node.property = property;
|
|
if (!node.computed) property.member = true;
|
|
return node;
|
|
}
|
|
|
|
function finishObjectExpression(properties) {
|
|
var node = new ASTNode(SyntaxObjectExpression);
|
|
node.properties = properties;
|
|
return node;
|
|
}
|
|
|
|
function finishProperty(kind, key, value) {
|
|
var node = new ASTNode(SyntaxProperty);
|
|
node.key = key;
|
|
node.value = value;
|
|
node.kind = kind;
|
|
return node;
|
|
}
|
|
|
|
function finishUnaryExpression(operator, argument) {
|
|
var node = new ASTNode(SyntaxUnaryExpression);
|
|
node.operator = operator;
|
|
node.argument = argument;
|
|
node.prefix = true;
|
|
return node;
|
|
} // Throw an exception
|
|
|
|
|
|
function throwError(token, messageFormat) {
|
|
var error,
|
|
args = Array.prototype.slice.call(arguments, 2),
|
|
msg = messageFormat.replace(/%(\d)/g, function (whole, index) {
|
|
assert(index < args.length, 'Message reference must be in range');
|
|
return args[index];
|
|
});
|
|
error = new Error(msg);
|
|
error.index = index;
|
|
error.description = msg;
|
|
throw error;
|
|
} // Throw an exception because of the token.
|
|
|
|
|
|
function throwUnexpected(token) {
|
|
if (token.type === TokenEOF) {
|
|
throwError(token, MessageUnexpectedEOS);
|
|
}
|
|
|
|
if (token.type === TokenNumericLiteral) {
|
|
throwError(token, MessageUnexpectedNumber);
|
|
}
|
|
|
|
if (token.type === TokenStringLiteral) {
|
|
throwError(token, MessageUnexpectedString);
|
|
}
|
|
|
|
if (token.type === TokenIdentifier) {
|
|
throwError(token, MessageUnexpectedIdentifier);
|
|
}
|
|
|
|
if (token.type === TokenKeyword) {
|
|
throwError(token, MessageUnexpectedReserved);
|
|
} // BooleanLiteral, NullLiteral, or Punctuator.
|
|
|
|
|
|
throwError(token, MessageUnexpectedToken, token.value);
|
|
} // Expect the next token to match the specified punctuator.
|
|
// If not, an exception will be thrown.
|
|
|
|
|
|
function expect(value) {
|
|
var token = lex();
|
|
|
|
if (token.type !== TokenPunctuator || token.value !== value) {
|
|
throwUnexpected(token);
|
|
}
|
|
} // Return true if the next token matches the specified punctuator.
|
|
|
|
|
|
function match(value) {
|
|
return lookahead.type === TokenPunctuator && lookahead.value === value;
|
|
} // Return true if the next token matches the specified keyword
|
|
|
|
|
|
function matchKeyword(keyword) {
|
|
return lookahead.type === TokenKeyword && lookahead.value === keyword;
|
|
} // 11.1.4 Array Initialiser
|
|
|
|
|
|
function parseArrayInitialiser() {
|
|
var elements = [];
|
|
index = lookahead.start;
|
|
expect('[');
|
|
|
|
while (!match(']')) {
|
|
if (match(',')) {
|
|
lex();
|
|
elements.push(null);
|
|
} else {
|
|
elements.push(parseConditionalExpression());
|
|
|
|
if (!match(']')) {
|
|
expect(',');
|
|
}
|
|
}
|
|
}
|
|
|
|
lex();
|
|
return finishArrayExpression(elements);
|
|
} // 11.1.5 Object Initialiser
|
|
|
|
|
|
function parseObjectPropertyKey() {
|
|
var token;
|
|
index = lookahead.start;
|
|
token = lex(); // Note: This function is called only from parseObjectProperty(), where
|
|
// EOF and Punctuator tokens are already filtered out.
|
|
|
|
if (token.type === TokenStringLiteral || token.type === TokenNumericLiteral) {
|
|
if (token.octal) {
|
|
throwError(token, MessageStrictOctalLiteral);
|
|
}
|
|
|
|
return finishLiteral(token);
|
|
}
|
|
|
|
return finishIdentifier(token.value);
|
|
}
|
|
|
|
function parseObjectProperty() {
|
|
var token, key, id, value;
|
|
index = lookahead.start;
|
|
token = lookahead;
|
|
|
|
if (token.type === TokenIdentifier) {
|
|
id = parseObjectPropertyKey();
|
|
expect(':');
|
|
value = parseConditionalExpression();
|
|
return finishProperty('init', id, value);
|
|
}
|
|
|
|
if (token.type === TokenEOF || token.type === TokenPunctuator) {
|
|
throwUnexpected(token);
|
|
} else {
|
|
key = parseObjectPropertyKey();
|
|
expect(':');
|
|
value = parseConditionalExpression();
|
|
return finishProperty('init', key, value);
|
|
}
|
|
}
|
|
|
|
function parseObjectInitialiser() {
|
|
var properties = [],
|
|
property,
|
|
name,
|
|
key,
|
|
map = {},
|
|
toString = String;
|
|
index = lookahead.start;
|
|
expect('{');
|
|
|
|
while (!match('}')) {
|
|
property = parseObjectProperty();
|
|
|
|
if (property.key.type === SyntaxIdentifier) {
|
|
name = property.key.name;
|
|
} else {
|
|
name = toString(property.key.value);
|
|
}
|
|
|
|
key = '$' + name;
|
|
|
|
if (Object.prototype.hasOwnProperty.call(map, key)) {
|
|
throwError({}, MessageStrictDuplicateProperty);
|
|
} else {
|
|
map[key] = true;
|
|
}
|
|
|
|
properties.push(property);
|
|
|
|
if (!match('}')) {
|
|
expect(',');
|
|
}
|
|
}
|
|
|
|
expect('}');
|
|
return finishObjectExpression(properties);
|
|
} // 11.1.6 The Grouping Operator
|
|
|
|
|
|
function parseGroupExpression() {
|
|
var expr;
|
|
expect('(');
|
|
expr = parseExpression();
|
|
expect(')');
|
|
return expr;
|
|
} // 11.1 Primary Expressions
|
|
|
|
|
|
var legalKeywords = {
|
|
'if': 1
|
|
};
|
|
|
|
function parsePrimaryExpression() {
|
|
var type, token, expr;
|
|
|
|
if (match('(')) {
|
|
return parseGroupExpression();
|
|
}
|
|
|
|
if (match('[')) {
|
|
return parseArrayInitialiser();
|
|
}
|
|
|
|
if (match('{')) {
|
|
return parseObjectInitialiser();
|
|
}
|
|
|
|
type = lookahead.type;
|
|
index = lookahead.start;
|
|
|
|
if (type === TokenIdentifier || legalKeywords[lookahead.value]) {
|
|
expr = finishIdentifier(lex().value);
|
|
} else if (type === TokenStringLiteral || type === TokenNumericLiteral) {
|
|
if (lookahead.octal) {
|
|
throwError(lookahead, MessageStrictOctalLiteral);
|
|
}
|
|
|
|
expr = finishLiteral(lex());
|
|
} else if (type === TokenKeyword) {
|
|
throw new Error(DISABLED);
|
|
} else if (type === TokenBooleanLiteral) {
|
|
token = lex();
|
|
token.value = token.value === 'true';
|
|
expr = finishLiteral(token);
|
|
} else if (type === TokenNullLiteral) {
|
|
token = lex();
|
|
token.value = null;
|
|
expr = finishLiteral(token);
|
|
} else if (match('/') || match('/=')) {
|
|
expr = finishLiteral(scanRegExp());
|
|
peek();
|
|
} else {
|
|
throwUnexpected(lex());
|
|
}
|
|
|
|
return expr;
|
|
} // 11.2 Left-Hand-Side Expressions
|
|
|
|
|
|
function parseArguments() {
|
|
var args = [];
|
|
expect('(');
|
|
|
|
if (!match(')')) {
|
|
while (index < length) {
|
|
args.push(parseConditionalExpression());
|
|
|
|
if (match(')')) {
|
|
break;
|
|
}
|
|
|
|
expect(',');
|
|
}
|
|
}
|
|
|
|
expect(')');
|
|
return args;
|
|
}
|
|
|
|
function parseNonComputedProperty() {
|
|
var token;
|
|
index = lookahead.start;
|
|
token = lex();
|
|
|
|
if (!isIdentifierName(token)) {
|
|
throwUnexpected(token);
|
|
}
|
|
|
|
return finishIdentifier(token.value);
|
|
}
|
|
|
|
function parseNonComputedMember() {
|
|
expect('.');
|
|
return parseNonComputedProperty();
|
|
}
|
|
|
|
function parseComputedMember() {
|
|
var expr;
|
|
expect('[');
|
|
expr = parseExpression();
|
|
expect(']');
|
|
return expr;
|
|
}
|
|
|
|
function parseLeftHandSideExpressionAllowCall() {
|
|
var expr, args, property;
|
|
expr = parsePrimaryExpression();
|
|
|
|
for (;;) {
|
|
if (match('.')) {
|
|
property = parseNonComputedMember();
|
|
expr = finishMemberExpression('.', expr, property);
|
|
} else if (match('(')) {
|
|
args = parseArguments();
|
|
expr = finishCallExpression(expr, args);
|
|
} else if (match('[')) {
|
|
property = parseComputedMember();
|
|
expr = finishMemberExpression('[', expr, property);
|
|
} else {
|
|
break;
|
|
}
|
|
}
|
|
|
|
return expr;
|
|
} // 11.3 Postfix Expressions
|
|
|
|
|
|
function parsePostfixExpression() {
|
|
var expr = parseLeftHandSideExpressionAllowCall();
|
|
|
|
if (lookahead.type === TokenPunctuator) {
|
|
if (match('++') || match('--')) {
|
|
throw new Error(DISABLED);
|
|
}
|
|
}
|
|
|
|
return expr;
|
|
} // 11.4 Unary Operators
|
|
|
|
|
|
function parseUnaryExpression() {
|
|
var token, expr;
|
|
|
|
if (lookahead.type !== TokenPunctuator && lookahead.type !== TokenKeyword) {
|
|
expr = parsePostfixExpression();
|
|
} else if (match('++') || match('--')) {
|
|
throw new Error(DISABLED);
|
|
} else if (match('+') || match('-') || match('~') || match('!')) {
|
|
token = lex();
|
|
expr = parseUnaryExpression();
|
|
expr = finishUnaryExpression(token.value, expr);
|
|
} else if (matchKeyword('delete') || matchKeyword('void') || matchKeyword('typeof')) {
|
|
throw new Error(DISABLED);
|
|
} else {
|
|
expr = parsePostfixExpression();
|
|
}
|
|
|
|
return expr;
|
|
}
|
|
|
|
function binaryPrecedence(token) {
|
|
var prec = 0;
|
|
|
|
if (token.type !== TokenPunctuator && token.type !== TokenKeyword) {
|
|
return 0;
|
|
}
|
|
|
|
switch (token.value) {
|
|
case '||':
|
|
prec = 1;
|
|
break;
|
|
|
|
case '&&':
|
|
prec = 2;
|
|
break;
|
|
|
|
case '|':
|
|
prec = 3;
|
|
break;
|
|
|
|
case '^':
|
|
prec = 4;
|
|
break;
|
|
|
|
case '&':
|
|
prec = 5;
|
|
break;
|
|
|
|
case '==':
|
|
case '!=':
|
|
case '===':
|
|
case '!==':
|
|
prec = 6;
|
|
break;
|
|
|
|
case '<':
|
|
case '>':
|
|
case '<=':
|
|
case '>=':
|
|
case 'instanceof':
|
|
case 'in':
|
|
prec = 7;
|
|
break;
|
|
|
|
case '<<':
|
|
case '>>':
|
|
case '>>>':
|
|
prec = 8;
|
|
break;
|
|
|
|
case '+':
|
|
case '-':
|
|
prec = 9;
|
|
break;
|
|
|
|
case '*':
|
|
case '/':
|
|
case '%':
|
|
prec = 11;
|
|
break;
|
|
}
|
|
|
|
return prec;
|
|
} // 11.5 Multiplicative Operators
|
|
// 11.6 Additive Operators
|
|
// 11.7 Bitwise Shift Operators
|
|
// 11.8 Relational Operators
|
|
// 11.9 Equality Operators
|
|
// 11.10 Binary Bitwise Operators
|
|
// 11.11 Binary Logical Operators
|
|
|
|
|
|
function parseBinaryExpression() {
|
|
var marker, markers, expr, token, prec, stack, right, operator, left, i;
|
|
marker = lookahead;
|
|
left = parseUnaryExpression();
|
|
token = lookahead;
|
|
prec = binaryPrecedence(token);
|
|
|
|
if (prec === 0) {
|
|
return left;
|
|
}
|
|
|
|
token.prec = prec;
|
|
lex();
|
|
markers = [marker, lookahead];
|
|
right = parseUnaryExpression();
|
|
stack = [left, token, right];
|
|
|
|
while ((prec = binaryPrecedence(lookahead)) > 0) {
|
|
// Reduce: make a binary expression from the three topmost entries.
|
|
while (stack.length > 2 && prec <= stack[stack.length - 2].prec) {
|
|
right = stack.pop();
|
|
operator = stack.pop().value;
|
|
left = stack.pop();
|
|
markers.pop();
|
|
expr = finishBinaryExpression(operator, left, right);
|
|
stack.push(expr);
|
|
} // Shift.
|
|
|
|
|
|
token = lex();
|
|
token.prec = prec;
|
|
stack.push(token);
|
|
markers.push(lookahead);
|
|
expr = parseUnaryExpression();
|
|
stack.push(expr);
|
|
} // Final reduce to clean-up the stack.
|
|
|
|
|
|
i = stack.length - 1;
|
|
expr = stack[i];
|
|
markers.pop();
|
|
|
|
while (i > 1) {
|
|
markers.pop();
|
|
expr = finishBinaryExpression(stack[i - 1].value, stack[i - 2], expr);
|
|
i -= 2;
|
|
}
|
|
|
|
return expr;
|
|
} // 11.12 Conditional Operator
|
|
|
|
|
|
function parseConditionalExpression() {
|
|
var expr, consequent, alternate;
|
|
expr = parseBinaryExpression();
|
|
|
|
if (match('?')) {
|
|
lex();
|
|
consequent = parseConditionalExpression();
|
|
expect(':');
|
|
alternate = parseConditionalExpression();
|
|
expr = finishConditionalExpression(expr, consequent, alternate);
|
|
}
|
|
|
|
return expr;
|
|
} // 11.14 Comma Operator
|
|
|
|
|
|
function parseExpression() {
|
|
var expr = parseConditionalExpression();
|
|
|
|
if (match(',')) {
|
|
throw new Error(DISABLED); // no sequence expressions
|
|
}
|
|
|
|
return expr;
|
|
}
|
|
|
|
function parse(code) {
|
|
source = code;
|
|
index = 0;
|
|
length = source.length;
|
|
lookahead = null;
|
|
peek();
|
|
var expr = parseExpression();
|
|
|
|
if (lookahead.type !== TokenEOF) {
|
|
throw new Error('Unexpect token after expression.');
|
|
}
|
|
|
|
return expr;
|
|
}
|
|
|
|
function getName(node) {
|
|
var name = [];
|
|
|
|
if (node.type === 'Identifier') {
|
|
return [node.name];
|
|
}
|
|
|
|
if (node.type === 'Literal') {
|
|
return [node.value];
|
|
}
|
|
|
|
if (node.type === 'MemberExpression') {
|
|
name.push.apply(name, _toConsumableArray(getName(node.object)));
|
|
name.push.apply(name, _toConsumableArray(getName(node.property)));
|
|
}
|
|
|
|
return name;
|
|
}
|
|
|
|
function startsWithDatum(node) {
|
|
if (node.object.type === 'MemberExpression') {
|
|
return startsWithDatum(node.object);
|
|
}
|
|
|
|
return node.object.name === 'datum';
|
|
}
|
|
|
|
function getDependentFields(expression) {
|
|
var ast = parse(expression);
|
|
var dependents = new Set();
|
|
ast.visit(function (node) {
|
|
if (node.type === 'MemberExpression' && startsWithDatum(node)) {
|
|
dependents.add(getName(node).slice(1).join('.'));
|
|
}
|
|
});
|
|
return dependents;
|
|
}
|
|
|
|
var FilterNode = /*#__PURE__*/function (_DataFlowNode3) {
|
|
_inherits(FilterNode, _DataFlowNode3);
|
|
|
|
var _super5 = _createSuper(FilterNode);
|
|
|
|
function FilterNode(parent, model, filter) {
|
|
var _this10;
|
|
|
|
_classCallCheck(this, FilterNode);
|
|
|
|
_this10 = _super5.call(this, parent);
|
|
_this10.model = model;
|
|
_this10.filter = filter; // TODO: refactor this to not take a node and
|
|
// then add a static function makeFromOperand and make the constructor take only an expression
|
|
|
|
_this10.expr = expression(_this10.model, _this10.filter, _assertThisInitialized(_this10));
|
|
_this10._dependentFields = getDependentFields(_this10.expr);
|
|
return _this10;
|
|
}
|
|
|
|
_createClass(FilterNode, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
return new FilterNode(null, this.model, duplicate(this.filter));
|
|
}
|
|
}, {
|
|
key: "dependentFields",
|
|
value: function dependentFields() {
|
|
return this._dependentFields;
|
|
}
|
|
}, {
|
|
key: "producedFields",
|
|
value: function producedFields() {
|
|
return new Set(); // filter does not produce any new fields
|
|
}
|
|
}, {
|
|
key: "assemble",
|
|
value: function assemble() {
|
|
return {
|
|
type: 'filter',
|
|
expr: this.expr
|
|
};
|
|
}
|
|
}, {
|
|
key: "hash",
|
|
value: function hash() {
|
|
return "Filter ".concat(this.expr);
|
|
}
|
|
}]);
|
|
|
|
return FilterNode;
|
|
}(DataFlowNode);
|
|
|
|
function parseUnitSelection(model, selDefs) {
|
|
var _a;
|
|
|
|
var selCmpts = {};
|
|
var selectionConfig = model.config.selection;
|
|
|
|
var _iterator65 = _createForOfIteratorHelper(keys(selDefs !== null && selDefs !== void 0 ? selDefs : {})),
|
|
_step65;
|
|
|
|
try {
|
|
var _loop6 = function _loop6() {
|
|
var name = _step65.value;
|
|
var selDef = duplicate(selDefs[name]);
|
|
|
|
var _b = selectionConfig[selDef.type],
|
|
cfg = __rest(_b, ["fields", "encodings"]); // Project transform applies its defaults.
|
|
// Set default values from config if a property hasn't been specified,
|
|
// or if it is true. E.g., "translate": true should use the default
|
|
// event handlers for translate. However, true may be a valid value for
|
|
// a property (e.g., "nearest": true).
|
|
|
|
|
|
for (var key in cfg) {
|
|
// A selection should contain either `encodings` or `fields`, only use
|
|
// default values for these two values if neither of them is specified.
|
|
if (key === 'encodings' && selDef.fields || key === 'fields' && selDef.encodings) {
|
|
continue;
|
|
}
|
|
|
|
if (key === 'mark') {
|
|
selDef[key] = Object.assign(Object.assign({}, cfg[key]), selDef[key]);
|
|
}
|
|
|
|
if (selDef[key] === undefined || selDef[key] === true) {
|
|
selDef[key] = (_a = cfg[key]) !== null && _a !== void 0 ? _a : selDef[key];
|
|
}
|
|
}
|
|
|
|
var safeName = varName(name);
|
|
var selCmpt = selCmpts[safeName] = Object.assign(Object.assign({}, selDef), {
|
|
name: safeName,
|
|
events: isString(selDef.on) ? parseSelector(selDef.on, 'scope') : duplicate(selDef.on)
|
|
});
|
|
forEachTransform(selCmpt, function (txCompiler) {
|
|
if (txCompiler.has(selCmpt) && txCompiler.parse) {
|
|
txCompiler.parse(model, selCmpt, selDef, selDefs[name]);
|
|
}
|
|
});
|
|
};
|
|
|
|
for (_iterator65.s(); !(_step65 = _iterator65.n()).done;) {
|
|
_loop6();
|
|
}
|
|
} catch (err) {
|
|
_iterator65.e(err);
|
|
} finally {
|
|
_iterator65.f();
|
|
}
|
|
|
|
return selCmpts;
|
|
}
|
|
|
|
function parseSelectionPredicate(model, selections, dfnode) {
|
|
var datum = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : 'datum';
|
|
var stores = [];
|
|
|
|
function expr(name) {
|
|
var vname = varName(name);
|
|
var selCmpt = model.getSelectionComponent(vname, name);
|
|
var store = $(vname + STORE);
|
|
|
|
if (selCmpt.project.timeUnit) {
|
|
var child = dfnode !== null && dfnode !== void 0 ? dfnode : model.component.data.raw;
|
|
var tunode = selCmpt.project.timeUnit.clone();
|
|
|
|
if (child.parent) {
|
|
tunode.insertAsParentOf(child);
|
|
} else {
|
|
child.parent = tunode;
|
|
}
|
|
}
|
|
|
|
if (selCmpt.empty !== 'none') {
|
|
stores.push(store);
|
|
}
|
|
|
|
return "vlSelectionTest(".concat(store, ", ").concat(datum) + (selCmpt.resolve === 'global' ? ')' : ", ".concat($(selCmpt.resolve), ")"));
|
|
}
|
|
|
|
var predicateStr = logicalExpr(selections, expr);
|
|
return (stores.length ? '!(' + stores.map(function (s) {
|
|
return "length(data(".concat(s, "))");
|
|
}).join(' || ') + ') || ' : '') + "(".concat(predicateStr, ")");
|
|
}
|
|
|
|
function parseSelectionBinExtent(selCmpt, extent) {
|
|
var encoding = extent['encoding'];
|
|
var field = extent['field'];
|
|
|
|
if (!encoding && !field) {
|
|
field = selCmpt.project.items[0].field;
|
|
|
|
if (selCmpt.project.items.length > 1) {
|
|
warn('A "field" or "encoding" must be specified when using a selection as a scale domain. ' + "Using \"field\": ".concat($(field), "."));
|
|
}
|
|
} else if (encoding && !field) {
|
|
var encodings = selCmpt.project.items.filter(function (p) {
|
|
return p.channel === encoding;
|
|
});
|
|
|
|
if (!encodings.length || encodings.length > 1) {
|
|
field = selCmpt.project.items[0].field;
|
|
warn((!encodings.length ? 'No ' : 'Multiple ') + "matching ".concat($(encoding), " encoding found for selection ").concat($(extent.selection), ". ") + "Using \"field\": ".concat($(field), "."));
|
|
} else {
|
|
field = encodings[0].field;
|
|
}
|
|
}
|
|
|
|
return "".concat(selCmpt.name, "[").concat($(field), "]");
|
|
}
|
|
|
|
function materializeSelections(model, main) {
|
|
forEachSelection(model, function (selCmpt) {
|
|
var selection = selCmpt.name;
|
|
var lookupName = model.getName("lookup_".concat(selection));
|
|
model.component.data.outputNodes[lookupName] = selCmpt.materialized = new OutputNode(new FilterNode(main, model, {
|
|
selection: selection
|
|
}), lookupName, DataSourceType.Lookup, model.component.data.outputNodeRefCounts);
|
|
});
|
|
}
|
|
/**
|
|
* Converts a predicate into an expression.
|
|
*/
|
|
// model is only used for selection filters.
|
|
|
|
|
|
function expression(model, filterOp, node) {
|
|
return logicalExpr(filterOp, function (predicate) {
|
|
if (isString(predicate)) {
|
|
return predicate;
|
|
} else if (isSelectionPredicate(predicate)) {
|
|
return parseSelectionPredicate(model, predicate.selection, node);
|
|
} else {
|
|
// Filter Object
|
|
return fieldFilterExpression(predicate);
|
|
}
|
|
});
|
|
}
|
|
|
|
function assembleTitle(title, config) {
|
|
if (!title) {
|
|
return undefined;
|
|
}
|
|
|
|
if (isArray(title) && !isText(title)) {
|
|
return title.map(function (fieldDef) {
|
|
return defaultTitle(fieldDef, config);
|
|
}).join(', ');
|
|
}
|
|
|
|
return title;
|
|
}
|
|
|
|
function setAxisEncode(axis, part, vgProp, vgRef) {
|
|
var _a, _b, _c;
|
|
|
|
axis.encode = (_a = axis.encode) !== null && _a !== void 0 ? _a : {};
|
|
axis.encode[part] = (_b = axis.encode[part]) !== null && _b !== void 0 ? _b : {};
|
|
axis.encode[part].update = (_c = axis.encode[part].update) !== null && _c !== void 0 ? _c : {}; // TODO: remove as any after https://github.com/prisma/nexus-prisma/issues/291
|
|
|
|
axis.encode[part].update[vgProp] = vgRef;
|
|
}
|
|
|
|
function assembleAxis(axisCmpt, kind, config) {
|
|
var opt = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {
|
|
header: false
|
|
};
|
|
|
|
var _a, _b;
|
|
|
|
var _c = axisCmpt.combine(),
|
|
disable = _c.disable,
|
|
orient = _c.orient,
|
|
scale = _c.scale,
|
|
labelExpr = _c.labelExpr,
|
|
title = _c.title,
|
|
zindex = _c.zindex,
|
|
axis = __rest(_c, ["disable", "orient", "scale", "labelExpr", "title", "zindex"]);
|
|
|
|
if (disable) {
|
|
return undefined;
|
|
}
|
|
|
|
for (var prop in axis) {
|
|
var propType = AXIS_PROPERTY_TYPE[prop];
|
|
var propValue = axis[prop];
|
|
|
|
if (propType && propType !== kind && propType !== 'both') {
|
|
// Remove properties that are not valid for this kind of axis
|
|
delete axis[prop];
|
|
} else if (isConditionalAxisValue(propValue)) {
|
|
// deal with conditional axis value
|
|
var condition = propValue.condition,
|
|
valueOrSignalRef = __rest(propValue, ["condition"]);
|
|
|
|
var conditions = array(condition);
|
|
var propIndex = CONDITIONAL_AXIS_PROP_INDEX[prop];
|
|
|
|
if (propIndex) {
|
|
var vgProp = propIndex.vgProp,
|
|
part = propIndex.part; // If there is a corresponding Vega property for the channel,
|
|
// use Vega's custom axis encoding and delete the original axis property to avoid conflicts
|
|
|
|
var vgRef = [].concat(_toConsumableArray(conditions.map(function (c) {
|
|
var test = c.test,
|
|
valueOrSignalCRef = __rest(c, ["test"]);
|
|
|
|
return Object.assign({
|
|
test: expression(null, test)
|
|
}, valueOrSignalCRef);
|
|
})), [valueOrSignalRef]);
|
|
setAxisEncode(axis, part, vgProp, vgRef);
|
|
delete axis[prop];
|
|
} else if (propIndex === null) {
|
|
// If propIndex is null, this means we support conditional axis property by converting the condition to signal instead.
|
|
var signalRef = {
|
|
signal: conditions.map(function (c) {
|
|
var test = c.test,
|
|
valueOrSignalCRef = __rest(c, ["test"]);
|
|
|
|
return "".concat(expression(null, test), " ? ").concat(exprFromValueOrSignalRef(valueOrSignalCRef), " : ");
|
|
}).join('') + exprFromValueOrSignalRef(valueOrSignalRef)
|
|
};
|
|
axis[prop] = signalRef;
|
|
}
|
|
} else if (isSignalRef(propValue)) {
|
|
var _propIndex = CONDITIONAL_AXIS_PROP_INDEX[prop];
|
|
|
|
if (_propIndex) {
|
|
var _vgProp = _propIndex.vgProp,
|
|
_part = _propIndex.part;
|
|
setAxisEncode(axis, _part, _vgProp, propValue);
|
|
delete axis[prop];
|
|
} // else do nothing since the property already supports signal
|
|
|
|
}
|
|
}
|
|
|
|
if (kind === 'grid') {
|
|
if (!axis.grid) {
|
|
return undefined;
|
|
} // Remove unnecessary encode block
|
|
|
|
|
|
if (axis.encode) {
|
|
// Only need to keep encode block for grid
|
|
var grid = axis.encode.grid;
|
|
axis.encode = Object.assign({}, grid ? {
|
|
grid: grid
|
|
} : {});
|
|
|
|
if (isEmpty(axis.encode)) {
|
|
delete axis.encode;
|
|
}
|
|
}
|
|
|
|
return Object.assign(Object.assign({
|
|
scale: scale,
|
|
orient: orient
|
|
}, axis), {
|
|
domain: false,
|
|
labels: false,
|
|
aria: false,
|
|
// Always set min/maxExtent to 0 to ensure that `config.axis*.minExtent` and `config.axis*.maxExtent`
|
|
// would not affect gridAxis
|
|
maxExtent: 0,
|
|
minExtent: 0,
|
|
ticks: false,
|
|
zindex: getFirstDefined(zindex, 0) // put grid behind marks by default
|
|
|
|
});
|
|
} else {
|
|
// kind === 'main'
|
|
if (!opt.header && axisCmpt.mainExtracted) {
|
|
// if mainExtracted has been extracted to a separate facet
|
|
return undefined;
|
|
}
|
|
|
|
if (labelExpr !== undefined) {
|
|
var expr = labelExpr;
|
|
|
|
if (((_b = (_a = axis.encode) === null || _a === void 0 ? void 0 : _a.labels) === null || _b === void 0 ? void 0 : _b.update) && isSignalRef(axis.encode.labels.update.text)) {
|
|
expr = replaceAll(labelExpr, 'datum.label', axis.encode.labels.update.text.signal);
|
|
}
|
|
|
|
setAxisEncode(axis, 'labels', 'text', {
|
|
signal: expr
|
|
});
|
|
}
|
|
|
|
if (axis.labelAlign === null) {
|
|
delete axis.labelAlign;
|
|
} // Remove unnecessary encode block
|
|
|
|
|
|
if (axis.encode) {
|
|
var _iterator66 = _createForOfIteratorHelper(AXIS_PARTS),
|
|
_step66;
|
|
|
|
try {
|
|
for (_iterator66.s(); !(_step66 = _iterator66.n()).done;) {
|
|
var _part2 = _step66.value;
|
|
|
|
if (!axisCmpt.hasAxisPart(_part2)) {
|
|
delete axis.encode[_part2];
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator66.e(err);
|
|
} finally {
|
|
_iterator66.f();
|
|
}
|
|
|
|
if (isEmpty(axis.encode)) {
|
|
delete axis.encode;
|
|
}
|
|
}
|
|
|
|
var titleString = assembleTitle(title, config);
|
|
return Object.assign(Object.assign(Object.assign(Object.assign({
|
|
scale: scale,
|
|
orient: orient,
|
|
grid: false
|
|
}, titleString ? {
|
|
title: titleString
|
|
} : {}), axis), config.aria === false ? {
|
|
aria: false
|
|
} : {}), {
|
|
zindex: getFirstDefined(zindex, 0) // put axis line above marks by default
|
|
|
|
});
|
|
}
|
|
}
|
|
/**
|
|
* Add axis signals so grid line works correctly
|
|
* (Fix https://github.com/vega/vega-lite/issues/4226)
|
|
*/
|
|
|
|
|
|
function assembleAxisSignals(model) {
|
|
var axes = model.component.axes;
|
|
var signals = [];
|
|
|
|
var _iterator67 = _createForOfIteratorHelper(POSITION_SCALE_CHANNELS),
|
|
_step67;
|
|
|
|
try {
|
|
for (_iterator67.s(); !(_step67 = _iterator67.n()).done;) {
|
|
var channel = _step67.value;
|
|
|
|
if (axes[channel]) {
|
|
var _iterator68 = _createForOfIteratorHelper(axes[channel]),
|
|
_step68;
|
|
|
|
try {
|
|
for (_iterator68.s(); !(_step68 = _iterator68.n()).done;) {
|
|
var axis = _step68.value;
|
|
|
|
if (!axis.get('disable') && !axis.get('gridScale')) {
|
|
// If there is x-axis but no y-scale for gridScale, need to set height/width so x-axis can draw the grid with the right height. Same for y-axis and width.
|
|
var sizeType = channel === 'x' ? 'height' : 'width';
|
|
var update = model.getSizeSignalRef(sizeType).signal;
|
|
|
|
if (sizeType !== update) {
|
|
signals.push({
|
|
name: sizeType,
|
|
update: update
|
|
});
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator68.e(err);
|
|
} finally {
|
|
_iterator68.f();
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator67.e(err);
|
|
} finally {
|
|
_iterator67.f();
|
|
}
|
|
|
|
return signals;
|
|
}
|
|
|
|
function _assembleAxes(axisComponents, config) {
|
|
var _axisComponents$x = axisComponents.x,
|
|
x = _axisComponents$x === void 0 ? [] : _axisComponents$x,
|
|
_axisComponents$y = axisComponents.y,
|
|
y = _axisComponents$y === void 0 ? [] : _axisComponents$y;
|
|
return [].concat(_toConsumableArray(x.map(function (a) {
|
|
return assembleAxis(a, 'grid', config);
|
|
})), _toConsumableArray(y.map(function (a) {
|
|
return assembleAxis(a, 'grid', config);
|
|
})), _toConsumableArray(x.map(function (a) {
|
|
return assembleAxis(a, 'main', config);
|
|
})), _toConsumableArray(y.map(function (a) {
|
|
return assembleAxis(a, 'main', config);
|
|
}))).filter(function (a) {
|
|
return a;
|
|
}); // filter undefined
|
|
}
|
|
|
|
var HEADER_TITLE_PROPERTIES_MAP = {
|
|
titleAlign: 'align',
|
|
titleAnchor: 'anchor',
|
|
titleAngle: 'angle',
|
|
titleBaseline: 'baseline',
|
|
titleColor: 'color',
|
|
titleFont: 'font',
|
|
titleFontSize: 'fontSize',
|
|
titleFontStyle: 'fontStyle',
|
|
titleFontWeight: 'fontWeight',
|
|
titleLimit: 'limit',
|
|
titleLineHeight: 'lineHeight',
|
|
titleOrient: 'orient',
|
|
titlePadding: 'offset'
|
|
};
|
|
var HEADER_LABEL_PROPERTIES_MAP = {
|
|
labelAlign: 'align',
|
|
labelAnchor: 'anchor',
|
|
labelAngle: 'angle',
|
|
labelBaseline: 'baseline',
|
|
labelColor: 'color',
|
|
labelFont: 'font',
|
|
labelFontSize: 'fontSize',
|
|
labelFontStyle: 'fontStyle',
|
|
labelFontWeight: 'fontWeight',
|
|
labelLimit: 'limit',
|
|
labelLineHeight: 'lineHeight',
|
|
labelOrient: 'orient',
|
|
labelPadding: 'offset'
|
|
};
|
|
var HEADER_TITLE_PROPERTIES = keys(HEADER_TITLE_PROPERTIES_MAP);
|
|
var HEADER_LABEL_PROPERTIES = keys(HEADER_LABEL_PROPERTIES_MAP);
|
|
|
|
function getAxisConfigFromConfigTypes(configTypes, config, channel, orient) {
|
|
// TODO: add special casing to add conditional value based on orient signal
|
|
return Object.assign.apply(null, [{}].concat(_toConsumableArray(configTypes.map(function (configType) {
|
|
if (configType === 'axisOrient') {
|
|
var orient1 = channel === 'x' ? 'bottom' : 'left';
|
|
var orientConfig1 = config[channel === 'x' ? 'axisBottom' : 'axisLeft'] || {};
|
|
var orientConfig2 = config[channel === 'x' ? 'axisTop' : 'axisRight'] || {};
|
|
var props = new Set([].concat(_toConsumableArray(keys(orientConfig1)), _toConsumableArray(keys(orientConfig2))));
|
|
var conditionalOrientAxisConfig = {};
|
|
|
|
var _iterator69 = _createForOfIteratorHelper(props.values()),
|
|
_step69;
|
|
|
|
try {
|
|
for (_iterator69.s(); !(_step69 = _iterator69.n()).done;) {
|
|
var prop = _step69.value;
|
|
conditionalOrientAxisConfig[prop] = {
|
|
// orient is surely signal in this case
|
|
signal: "".concat(orient['signal'], " === \"").concat(orient1, "\" ? ").concat(signalOrStringValue(orientConfig1[prop]), " : ").concat(signalOrStringValue(orientConfig2[prop]))
|
|
};
|
|
}
|
|
} catch (err) {
|
|
_iterator69.e(err);
|
|
} finally {
|
|
_iterator69.f();
|
|
}
|
|
|
|
return conditionalOrientAxisConfig;
|
|
}
|
|
|
|
return config[configType];
|
|
}))));
|
|
}
|
|
|
|
function getAxisConfigs(channel, scaleType, orient, config) {
|
|
var typeBasedConfigTypes = scaleType === 'band' ? ['axisDiscrete', 'axisBand'] : scaleType === 'point' ? ['axisDiscrete', 'axisPoint'] : isQuantitative(scaleType) ? ['axisQuantitative'] : scaleType === 'time' || scaleType === 'utc' ? ['axisTemporal'] : [];
|
|
var axisChannel = channel === 'x' ? 'axisX' : 'axisY';
|
|
var axisOrient = isSignalRef(orient) ? 'axisOrient' : 'axis' + titleCase(orient); // axisTop, axisBottom, ...
|
|
|
|
var vlOnlyConfigTypes = [].concat(typeBasedConfigTypes, _toConsumableArray(typeBasedConfigTypes.map(function (c) {
|
|
return axisChannel + c.substr(4);
|
|
})));
|
|
var vgConfigTypes = ['axis', axisOrient, axisChannel];
|
|
return {
|
|
vlOnlyAxisConfig: getAxisConfigFromConfigTypes(vlOnlyConfigTypes, config, channel, orient),
|
|
vgAxisConfig: getAxisConfigFromConfigTypes(vgConfigTypes, config, channel, orient),
|
|
axisConfigStyle: getAxisConfigStyle([].concat(vgConfigTypes, _toConsumableArray(vlOnlyConfigTypes)), config)
|
|
};
|
|
}
|
|
|
|
function getAxisConfigStyle(axisConfigTypes, config) {
|
|
var _a;
|
|
|
|
var toMerge = [{}];
|
|
|
|
var _iterator70 = _createForOfIteratorHelper(axisConfigTypes),
|
|
_step70;
|
|
|
|
try {
|
|
for (_iterator70.s(); !(_step70 = _iterator70.n()).done;) {
|
|
var configType = _step70.value;
|
|
// TODO: add special casing to add conditional value based on orient signal
|
|
var style = (_a = config[configType]) === null || _a === void 0 ? void 0 : _a.style;
|
|
|
|
if (style) {
|
|
style = array(style);
|
|
|
|
var _iterator71 = _createForOfIteratorHelper(style),
|
|
_step71;
|
|
|
|
try {
|
|
for (_iterator71.s(); !(_step71 = _iterator71.n()).done;) {
|
|
var s = _step71.value;
|
|
toMerge.push(config.style[s]);
|
|
}
|
|
} catch (err) {
|
|
_iterator71.e(err);
|
|
} finally {
|
|
_iterator71.f();
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator70.e(err);
|
|
} finally {
|
|
_iterator70.f();
|
|
}
|
|
|
|
return Object.assign.apply(null, toMerge);
|
|
}
|
|
|
|
function getAxisConfig(property, styleConfigIndex, style) {
|
|
var axisConfigs = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {};
|
|
|
|
var _a;
|
|
|
|
var styleConfig = getStyleConfig(property, style, styleConfigIndex);
|
|
|
|
if (styleConfig !== undefined) {
|
|
return {
|
|
configFrom: 'style',
|
|
configValue: styleConfig
|
|
};
|
|
}
|
|
|
|
for (var _i9 = 0, _arr6 = ['vlOnlyAxisConfig', 'vgAxisConfig', 'axisConfigStyle']; _i9 < _arr6.length; _i9++) {
|
|
var configFrom = _arr6[_i9];
|
|
|
|
if (((_a = axisConfigs[configFrom]) === null || _a === void 0 ? void 0 : _a[property]) !== undefined) {
|
|
return {
|
|
configFrom: configFrom,
|
|
configValue: axisConfigs[configFrom][property]
|
|
};
|
|
}
|
|
}
|
|
|
|
return {};
|
|
}
|
|
|
|
var axisRules = {
|
|
scale: function scale(_ref53) {
|
|
var model = _ref53.model,
|
|
channel = _ref53.channel;
|
|
return model.scaleName(channel);
|
|
},
|
|
format: function format(_ref54) {
|
|
var fieldOrDatumDef = _ref54.fieldOrDatumDef,
|
|
config = _ref54.config,
|
|
axis = _ref54.axis;
|
|
var format = axis.format,
|
|
formatType = axis.formatType;
|
|
return guideFormat(fieldOrDatumDef, fieldOrDatumDef.type, format, formatType, config, true);
|
|
},
|
|
formatType: function formatType(_ref55) {
|
|
var axis = _ref55.axis,
|
|
fieldOrDatumDef = _ref55.fieldOrDatumDef,
|
|
scaleType = _ref55.scaleType;
|
|
var formatType = axis.formatType;
|
|
return guideFormatType(formatType, fieldOrDatumDef, scaleType);
|
|
},
|
|
grid: function grid(_ref56) {
|
|
var fieldOrDatumDef = _ref56.fieldOrDatumDef,
|
|
axis = _ref56.axis,
|
|
scaleType = _ref56.scaleType;
|
|
|
|
var _a;
|
|
|
|
if (isFieldDef(fieldOrDatumDef) && isBinned(fieldOrDatumDef.bin)) {
|
|
return false;
|
|
} else {
|
|
return (_a = axis.grid) !== null && _a !== void 0 ? _a : defaultGrid(scaleType, fieldOrDatumDef);
|
|
}
|
|
},
|
|
gridScale: function gridScale(_ref57) {
|
|
var model = _ref57.model,
|
|
channel = _ref57.channel;
|
|
return _gridScale(model, channel);
|
|
},
|
|
labelAlign: function labelAlign(_ref58) {
|
|
var axis = _ref58.axis,
|
|
labelAngle = _ref58.labelAngle,
|
|
orient = _ref58.orient,
|
|
channel = _ref58.channel;
|
|
return axis.labelAlign || defaultLabelAlign(labelAngle, orient, channel);
|
|
},
|
|
labelAngle: function labelAngle(_ref59) {
|
|
var _labelAngle = _ref59.labelAngle;
|
|
return _labelAngle;
|
|
},
|
|
labelBaseline: function labelBaseline(_ref60) {
|
|
var axis = _ref60.axis,
|
|
labelAngle = _ref60.labelAngle,
|
|
orient = _ref60.orient,
|
|
channel = _ref60.channel;
|
|
return axis.labelBaseline || defaultLabelBaseline(labelAngle, orient, channel);
|
|
},
|
|
labelFlush: function labelFlush(_ref61) {
|
|
var axis = _ref61.axis,
|
|
fieldOrDatumDef = _ref61.fieldOrDatumDef,
|
|
channel = _ref61.channel;
|
|
|
|
var _a;
|
|
|
|
return (_a = axis.labelFlush) !== null && _a !== void 0 ? _a : defaultLabelFlush(fieldOrDatumDef.type, channel);
|
|
},
|
|
labelOverlap: function labelOverlap(_ref62) {
|
|
var axis = _ref62.axis,
|
|
fieldOrDatumDef = _ref62.fieldOrDatumDef,
|
|
scaleType = _ref62.scaleType;
|
|
|
|
var _a;
|
|
|
|
return (_a = axis.labelOverlap) !== null && _a !== void 0 ? _a : defaultLabelOverlap(fieldOrDatumDef.type, scaleType, isFieldDef(fieldOrDatumDef) && !!fieldOrDatumDef.timeUnit, isFieldDef(fieldOrDatumDef) ? fieldOrDatumDef.sort : undefined);
|
|
},
|
|
// we already calculate orient in parse
|
|
orient: function orient(_ref63) {
|
|
var _orient2 = _ref63.orient;
|
|
return _orient2;
|
|
},
|
|
tickCount: function tickCount(_ref64) {
|
|
var channel = _ref64.channel,
|
|
model = _ref64.model,
|
|
axis = _ref64.axis,
|
|
fieldOrDatumDef = _ref64.fieldOrDatumDef,
|
|
scaleType = _ref64.scaleType;
|
|
|
|
var _a;
|
|
|
|
var sizeType = channel === 'x' ? 'width' : channel === 'y' ? 'height' : undefined;
|
|
var size = sizeType ? model.getSizeSignalRef(sizeType) : undefined;
|
|
return (_a = axis.tickCount) !== null && _a !== void 0 ? _a : defaultTickCount({
|
|
fieldOrDatumDef: fieldOrDatumDef,
|
|
scaleType: scaleType,
|
|
size: size,
|
|
values: axis.values
|
|
});
|
|
},
|
|
title: function title(_ref65) {
|
|
var axis = _ref65.axis,
|
|
model = _ref65.model,
|
|
channel = _ref65.channel;
|
|
|
|
if (axis.title !== undefined) {
|
|
return axis.title;
|
|
}
|
|
|
|
var fieldDefTitle = getFieldDefTitle(model, channel);
|
|
|
|
if (fieldDefTitle !== undefined) {
|
|
return fieldDefTitle;
|
|
}
|
|
|
|
var fieldDef = model.typedFieldDef(channel);
|
|
var channel2 = channel === 'x' ? 'x2' : 'y2';
|
|
var fieldDef2 = model.fieldDef(channel2); // If title not specified, store base parts of fieldDef (and fieldDef2 if exists)
|
|
|
|
return mergeTitleFieldDefs(fieldDef ? [toFieldDefBase(fieldDef)] : [], isFieldDef(fieldDef2) ? [toFieldDefBase(fieldDef2)] : []);
|
|
},
|
|
values: function values(_ref66) {
|
|
var axis = _ref66.axis,
|
|
fieldOrDatumDef = _ref66.fieldOrDatumDef;
|
|
return _values(axis, fieldOrDatumDef);
|
|
},
|
|
zindex: function zindex(_ref67) {
|
|
var axis = _ref67.axis,
|
|
fieldOrDatumDef = _ref67.fieldOrDatumDef,
|
|
mark = _ref67.mark;
|
|
|
|
var _a;
|
|
|
|
return (_a = axis.zindex) !== null && _a !== void 0 ? _a : defaultZindex(mark, fieldOrDatumDef);
|
|
}
|
|
}; // TODO: we need to refactor this method after we take care of config refactoring
|
|
|
|
/**
|
|
* Default rules for whether to show a grid should be shown for a channel.
|
|
* If `grid` is unspecified, the default value is `true` for ordinal scales that are not binned
|
|
*/
|
|
|
|
function defaultGrid(scaleType, fieldDef) {
|
|
return !hasDiscreteDomain(scaleType) && isFieldDef(fieldDef) && !isBinning(fieldDef === null || fieldDef === void 0 ? void 0 : fieldDef.bin);
|
|
}
|
|
|
|
function _gridScale(model, channel) {
|
|
var gridChannel = channel === 'x' ? 'y' : 'x';
|
|
|
|
if (model.getScaleComponent(gridChannel)) {
|
|
return model.scaleName(gridChannel);
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function getLabelAngle(fieldOrDatumDef, axis, channel, styleConfig, axisConfigs) {
|
|
var labelAngle = axis === null || axis === void 0 ? void 0 : axis.labelAngle; // try axis value
|
|
|
|
if (labelAngle !== undefined) {
|
|
return isSignalRef(labelAngle) ? labelAngle : normalizeAngle(labelAngle);
|
|
} else {
|
|
// try axis config value
|
|
var _getAxisConfig = getAxisConfig('labelAngle', styleConfig, axis === null || axis === void 0 ? void 0 : axis.style, axisConfigs),
|
|
angle = _getAxisConfig.configValue;
|
|
|
|
if (angle !== undefined) {
|
|
return normalizeAngle(angle);
|
|
} else {
|
|
// get default value
|
|
if (channel === X && contains([NOMINAL, ORDINAL], fieldOrDatumDef.type) && !(isFieldDef(fieldOrDatumDef) && fieldOrDatumDef.timeUnit)) {
|
|
return 270;
|
|
} // no default
|
|
|
|
|
|
return undefined;
|
|
}
|
|
}
|
|
}
|
|
|
|
function normalizeAngleExpr(angle) {
|
|
return "(((".concat(angle.signal, " % 360) + 360) % 360)");
|
|
}
|
|
|
|
function defaultLabelBaseline(angle, orient, channel, alwaysIncludeMiddle) {
|
|
if (angle !== undefined) {
|
|
if (channel === 'x') {
|
|
if (isSignalRef(angle)) {
|
|
var a = normalizeAngleExpr(angle);
|
|
var orientIsTop = isSignalRef(orient) ? "(".concat(orient.signal, " === \"top\")") : orient === 'top';
|
|
return {
|
|
signal: "(45 < ".concat(a, " && ").concat(a, " < 135) || (225 < ").concat(a, " && ").concat(a, " < 315) ? \"middle\" :") + "(".concat(a, " <= 45 || 315 <= ").concat(a, ") === ").concat(orientIsTop, " ? \"bottom\" : \"top\"")
|
|
};
|
|
}
|
|
|
|
if (45 < angle && angle < 135 || 225 < angle && angle < 315) {
|
|
return 'middle';
|
|
}
|
|
|
|
if (isSignalRef(orient)) {
|
|
var op = angle <= 45 || 315 <= angle ? '===' : '!==';
|
|
return {
|
|
signal: "".concat(orient.signal, " ").concat(op, " \"top\" ? \"bottom\" : \"top\"")
|
|
};
|
|
}
|
|
|
|
return (angle <= 45 || 315 <= angle) === (orient === 'top') ? 'bottom' : 'top';
|
|
} else {
|
|
if (isSignalRef(angle)) {
|
|
var _a2 = normalizeAngleExpr(angle);
|
|
|
|
var orientIsLeft = isSignalRef(orient) ? "(".concat(orient.signal, " === \"left\")") : orient === 'left';
|
|
var middle = alwaysIncludeMiddle ? '"middle"' : 'null';
|
|
return {
|
|
signal: "".concat(_a2, " <= 45 || 315 <= ").concat(_a2, " || (135 <= ").concat(_a2, " && ").concat(_a2, " <= 225) ? ").concat(middle, " : (45 <= ").concat(_a2, " && ").concat(_a2, " <= 135) === ").concat(orientIsLeft, " ? \"top\" : \"bottom\"")
|
|
};
|
|
}
|
|
|
|
if (angle <= 45 || 315 <= angle || 135 <= angle && angle <= 225) {
|
|
return alwaysIncludeMiddle ? 'middle' : null;
|
|
}
|
|
|
|
if (isSignalRef(orient)) {
|
|
var _op = 45 <= angle && angle <= 135 ? '===' : '!==';
|
|
|
|
return {
|
|
signal: "".concat(orient.signal, " ").concat(_op, " \"left\" ? \"top\" : \"bottom\"")
|
|
};
|
|
}
|
|
|
|
return (45 <= angle && angle <= 135) === (orient === 'left') ? 'top' : 'bottom';
|
|
}
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function defaultLabelAlign(angle, orient, channel) {
|
|
if (angle === undefined) {
|
|
return undefined;
|
|
}
|
|
|
|
var isX = channel === 'x';
|
|
var startAngle = isX ? 0 : 90;
|
|
var mainOrient = isX ? 'bottom' : 'left';
|
|
|
|
if (isSignalRef(angle)) {
|
|
var a = normalizeAngleExpr(angle);
|
|
var orientIsMain = isSignalRef(orient) ? "(".concat(orient.signal, " === \"").concat(mainOrient, "\")") : orient === mainOrient;
|
|
return {
|
|
signal: "(".concat(startAngle ? '(' + a + ' + 90)' : a, " % 180 === 0) ? ").concat(isX ? null : '"center"', " :") + "(".concat(startAngle, " < ").concat(a, " && ").concat(a, " < ").concat(180 + startAngle, ") === ").concat(orientIsMain, " ? \"left\" : \"right\"")
|
|
};
|
|
}
|
|
|
|
if ((angle + startAngle) % 180 === 0) {
|
|
// For bottom, use default label align so label flush still works
|
|
return isX ? null : 'center';
|
|
}
|
|
|
|
if (isSignalRef(orient)) {
|
|
var op = startAngle < angle && angle < 180 + startAngle ? '===' : '!==';
|
|
|
|
var _orientIsMain = "".concat(orient.signal, " ").concat(op, " \"").concat(mainOrient, "\"");
|
|
|
|
return {
|
|
signal: "".concat(_orientIsMain, " ? \"left\" : \"right\"")
|
|
};
|
|
}
|
|
|
|
if ((startAngle < angle && angle < 180 + startAngle) === (orient === mainOrient)) {
|
|
return 'left';
|
|
}
|
|
|
|
return 'right';
|
|
}
|
|
|
|
function defaultLabelFlush(type, channel) {
|
|
if (channel === 'x' && contains(['quantitative', 'temporal'], type)) {
|
|
return true;
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function defaultLabelOverlap(type, scaleType, hasTimeUnit, sort) {
|
|
// do not prevent overlap for nominal data because there is no way to infer what the missing labels are
|
|
if (hasTimeUnit && !isObject(sort) || type !== 'nominal' && type !== 'ordinal') {
|
|
if (scaleType === 'log') {
|
|
return 'greedy';
|
|
}
|
|
|
|
return true;
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function defaultOrient(channel) {
|
|
return channel === 'x' ? 'bottom' : 'left';
|
|
}
|
|
|
|
function defaultTickCount(_ref68) {
|
|
var fieldOrDatumDef = _ref68.fieldOrDatumDef,
|
|
scaleType = _ref68.scaleType,
|
|
size = _ref68.size,
|
|
vals = _ref68.values;
|
|
|
|
var _a;
|
|
|
|
if (!vals && !hasDiscreteDomain(scaleType) && scaleType !== 'log') {
|
|
if (isFieldDef(fieldOrDatumDef)) {
|
|
if (isBinning(fieldOrDatumDef.bin)) {
|
|
// for binned data, we don't want more ticks than maxbins
|
|
return {
|
|
signal: "ceil(".concat(size.signal, "/10)")
|
|
};
|
|
}
|
|
|
|
if (fieldOrDatumDef.timeUnit && contains(['month', 'hours', 'day', 'quarter'], (_a = normalizeTimeUnit(fieldOrDatumDef.timeUnit)) === null || _a === void 0 ? void 0 : _a.unit)) {
|
|
return undefined;
|
|
}
|
|
}
|
|
|
|
return {
|
|
signal: "ceil(".concat(size.signal, "/40)")
|
|
};
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function getFieldDefTitle(model, channel) {
|
|
var channel2 = channel === 'x' ? 'x2' : 'y2';
|
|
var fieldDef = model.fieldDef(channel);
|
|
var fieldDef2 = model.fieldDef(channel2);
|
|
var title1 = fieldDef ? fieldDef.title : undefined;
|
|
var title2 = fieldDef2 ? fieldDef2.title : undefined;
|
|
|
|
if (title1 && title2) {
|
|
return mergeTitle(title1, title2);
|
|
} else if (title1) {
|
|
return title1;
|
|
} else if (title2) {
|
|
return title2;
|
|
} else if (title1 !== undefined) {
|
|
// falsy value to disable config
|
|
return title1;
|
|
} else if (title2 !== undefined) {
|
|
// falsy value to disable config
|
|
return title2;
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function _values(axis, fieldOrDatumDef) {
|
|
var vals = axis.values;
|
|
|
|
if (isArray(vals)) {
|
|
return valueArray(fieldOrDatumDef, vals);
|
|
} else if (isSignalRef(vals)) {
|
|
return vals;
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function defaultZindex(mark, fieldDef) {
|
|
if (mark === 'rect' && isDiscrete(fieldDef)) {
|
|
return 1;
|
|
}
|
|
|
|
return 0;
|
|
}
|
|
|
|
var CalculateNode = /*#__PURE__*/function (_DataFlowNode4) {
|
|
_inherits(CalculateNode, _DataFlowNode4);
|
|
|
|
var _super6 = _createSuper(CalculateNode);
|
|
|
|
function CalculateNode(parent, transform) {
|
|
var _this11;
|
|
|
|
_classCallCheck(this, CalculateNode);
|
|
|
|
_this11 = _super6.call(this, parent);
|
|
_this11.transform = transform;
|
|
_this11._dependentFields = getDependentFields(_this11.transform.calculate);
|
|
return _this11;
|
|
}
|
|
|
|
_createClass(CalculateNode, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
return new CalculateNode(null, duplicate(this.transform));
|
|
}
|
|
}, {
|
|
key: "producedFields",
|
|
value: function producedFields() {
|
|
return new Set([this.transform.as]);
|
|
}
|
|
}, {
|
|
key: "dependentFields",
|
|
value: function dependentFields() {
|
|
return this._dependentFields;
|
|
}
|
|
}, {
|
|
key: "assemble",
|
|
value: function assemble() {
|
|
return {
|
|
type: 'formula',
|
|
expr: this.transform.calculate,
|
|
as: this.transform.as
|
|
};
|
|
}
|
|
}, {
|
|
key: "hash",
|
|
value: function hash() {
|
|
return "Calculate ".concat(_hash(this.transform));
|
|
}
|
|
}], [{
|
|
key: "parseAllForSortIndex",
|
|
value: function parseAllForSortIndex(parent, model) {
|
|
// get all the encoding with sort fields from model
|
|
model.forEachFieldDef(function (fieldDef, channel) {
|
|
if (!isScaleFieldDef(fieldDef)) {
|
|
return;
|
|
}
|
|
|
|
if (isSortArray(fieldDef.sort)) {
|
|
var _field8 = fieldDef.field,
|
|
timeUnit = fieldDef.timeUnit;
|
|
var sort = fieldDef.sort; // generate `datum["a"] === val0 ? 0 : datum["a"] === val1 ? 1 : ... : n` via FieldEqualPredicate
|
|
|
|
var calculate = sort.map(function (sortValue, i) {
|
|
return "".concat(fieldFilterExpression({
|
|
field: _field8,
|
|
timeUnit: timeUnit,
|
|
equal: sortValue
|
|
}), " ? ").concat(i, " : ");
|
|
}).join('') + sort.length;
|
|
parent = new CalculateNode(parent, {
|
|
calculate: calculate,
|
|
as: sortArrayIndexField(fieldDef, channel, {
|
|
forAs: true
|
|
})
|
|
});
|
|
}
|
|
});
|
|
return parent;
|
|
}
|
|
}]);
|
|
|
|
return CalculateNode;
|
|
}(DataFlowNode);
|
|
|
|
function sortArrayIndexField(fieldDef, channel, opt) {
|
|
return _vgField(fieldDef, Object.assign({
|
|
prefix: channel,
|
|
suffix: 'sort_index'
|
|
}, opt !== null && opt !== void 0 ? opt : {}));
|
|
}
|
|
/**
|
|
* Get header channel, which can be different from facet channel when orient is specified or when the facet channel is facet.
|
|
*/
|
|
|
|
|
|
function getHeaderChannel(channel, orient) {
|
|
if (contains(['top', 'bottom'], orient)) {
|
|
return 'column';
|
|
} else if (contains(['left', 'right'], orient)) {
|
|
return 'row';
|
|
}
|
|
|
|
return channel === 'row' ? 'row' : 'column';
|
|
}
|
|
|
|
function getHeaderProperty(prop, facetFieldDef, config, channel) {
|
|
var headerSpecificConfig = channel === 'row' ? config.headerRow : channel === 'column' ? config.headerColumn : config.headerFacet;
|
|
return getFirstDefined(((facetFieldDef === null || facetFieldDef === void 0 ? void 0 : facetFieldDef.header) || {})[prop], headerSpecificConfig[prop], config.header[prop]);
|
|
}
|
|
|
|
function getHeaderProperties(properties, facetFieldDef, config, channel) {
|
|
var props = {};
|
|
|
|
var _iterator72 = _createForOfIteratorHelper(properties),
|
|
_step72;
|
|
|
|
try {
|
|
for (_iterator72.s(); !(_step72 = _iterator72.n()).done;) {
|
|
var prop = _step72.value;
|
|
var value = getHeaderProperty(prop, facetFieldDef, config, channel);
|
|
|
|
if (value !== undefined) {
|
|
props[prop] = value;
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator72.e(err);
|
|
} finally {
|
|
_iterator72.f();
|
|
}
|
|
|
|
return props;
|
|
}
|
|
|
|
var HEADER_CHANNELS = ['row', 'column'];
|
|
var HEADER_TYPES = ['header', 'footer'];
|
|
/**
|
|
* Utility for generating row / column headers
|
|
*/
|
|
// TODO: rename to assembleHeaderTitleGroup
|
|
|
|
function assembleTitleGroup(model, channel) {
|
|
var title = model.component.layoutHeaders[channel].title;
|
|
var config = model.config ? model.config : undefined;
|
|
var facetFieldDef = model.component.layoutHeaders[channel].facetFieldDef ? model.component.layoutHeaders[channel].facetFieldDef : undefined;
|
|
|
|
var _getHeaderProperties = getHeaderProperties(['titleAnchor', 'titleAngle', 'titleOrient'], facetFieldDef, config, channel),
|
|
titleAnchor = _getHeaderProperties.titleAnchor,
|
|
ta = _getHeaderProperties.titleAngle,
|
|
titleOrient = _getHeaderProperties.titleOrient;
|
|
|
|
var headerChannel = getHeaderChannel(channel, titleOrient);
|
|
var titleAngle = normalizeAngle(ta);
|
|
return {
|
|
name: "".concat(channel, "-title"),
|
|
type: 'group',
|
|
role: "".concat(headerChannel, "-title"),
|
|
title: Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({
|
|
text: title
|
|
}, channel === 'row' ? {
|
|
orient: 'left'
|
|
} : {}), {
|
|
style: 'guide-title'
|
|
}), defaultHeaderGuideBaseline(titleAngle, headerChannel)), defaultHeaderGuideAlign(headerChannel, titleAngle, titleAnchor)), assembleHeaderProperties(config, facetFieldDef, channel, HEADER_TITLE_PROPERTIES, HEADER_TITLE_PROPERTIES_MAP))
|
|
};
|
|
}
|
|
|
|
function defaultHeaderGuideAlign(headerChannel, angle) {
|
|
var anchor = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 'middle';
|
|
|
|
switch (anchor) {
|
|
case 'start':
|
|
return {
|
|
align: 'left'
|
|
};
|
|
|
|
case 'end':
|
|
return {
|
|
align: 'right'
|
|
};
|
|
}
|
|
|
|
var align = defaultLabelAlign(angle, headerChannel === 'row' ? 'left' : 'top', headerChannel === 'row' ? 'y' : 'x');
|
|
return align ? {
|
|
align: align
|
|
} : {};
|
|
}
|
|
|
|
function defaultHeaderGuideBaseline(angle, channel) {
|
|
var baseline = defaultLabelBaseline(angle, channel === 'row' ? 'left' : 'top', channel === 'row' ? 'y' : 'x', true);
|
|
return baseline ? {
|
|
baseline: baseline
|
|
} : {};
|
|
}
|
|
|
|
function assembleHeaderGroups(model, channel) {
|
|
var layoutHeader = model.component.layoutHeaders[channel];
|
|
var groups = [];
|
|
|
|
var _iterator73 = _createForOfIteratorHelper(HEADER_TYPES),
|
|
_step73;
|
|
|
|
try {
|
|
for (_iterator73.s(); !(_step73 = _iterator73.n()).done;) {
|
|
var headerType = _step73.value;
|
|
|
|
if (layoutHeader[headerType]) {
|
|
var _iterator74 = _createForOfIteratorHelper(layoutHeader[headerType]),
|
|
_step74;
|
|
|
|
try {
|
|
for (_iterator74.s(); !(_step74 = _iterator74.n()).done;) {
|
|
var headerComponent = _step74.value;
|
|
var group = assembleHeaderGroup(model, channel, headerType, layoutHeader, headerComponent);
|
|
|
|
if (group != null) {
|
|
groups.push(group);
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator74.e(err);
|
|
} finally {
|
|
_iterator74.f();
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator73.e(err);
|
|
} finally {
|
|
_iterator73.f();
|
|
}
|
|
|
|
return groups;
|
|
}
|
|
|
|
function getSort(facetFieldDef, channel) {
|
|
var _a;
|
|
|
|
var sort = facetFieldDef.sort;
|
|
|
|
if (isSortField(sort)) {
|
|
return {
|
|
field: _vgField(sort, {
|
|
expr: 'datum'
|
|
}),
|
|
order: (_a = sort.order) !== null && _a !== void 0 ? _a : 'ascending'
|
|
};
|
|
} else if (isArray(sort)) {
|
|
return {
|
|
field: sortArrayIndexField(facetFieldDef, channel, {
|
|
expr: 'datum'
|
|
}),
|
|
order: 'ascending'
|
|
};
|
|
} else {
|
|
return {
|
|
field: _vgField(facetFieldDef, {
|
|
expr: 'datum'
|
|
}),
|
|
order: sort !== null && sort !== void 0 ? sort : 'ascending'
|
|
};
|
|
}
|
|
}
|
|
|
|
function _assembleLabelTitle(facetFieldDef, channel, config) {
|
|
var _getHeaderProperties2 = getHeaderProperties(['format', 'formatType', 'labelAngle', 'labelAnchor', 'labelOrient', 'labelExpr'], facetFieldDef, config, channel),
|
|
format = _getHeaderProperties2.format,
|
|
formatType = _getHeaderProperties2.formatType,
|
|
labelAngle = _getHeaderProperties2.labelAngle,
|
|
labelAnchor = _getHeaderProperties2.labelAnchor,
|
|
labelOrient = _getHeaderProperties2.labelOrient,
|
|
labelExpr = _getHeaderProperties2.labelExpr;
|
|
|
|
var titleTextExpr = formatSignalRef({
|
|
fieldOrDatumDef: facetFieldDef,
|
|
format: format,
|
|
formatType: formatType,
|
|
expr: 'parent',
|
|
config: config
|
|
}).signal;
|
|
var headerChannel = getHeaderChannel(channel, labelOrient);
|
|
return Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({
|
|
text: {
|
|
signal: labelExpr ? replaceAll(replaceAll(labelExpr, 'datum.label', titleTextExpr), 'datum.value', _vgField(facetFieldDef, {
|
|
expr: 'parent'
|
|
})) : titleTextExpr
|
|
}
|
|
}, channel === 'row' ? {
|
|
orient: 'left'
|
|
} : {}), {
|
|
style: 'guide-label',
|
|
frame: 'group'
|
|
}), defaultHeaderGuideBaseline(labelAngle, headerChannel)), defaultHeaderGuideAlign(headerChannel, labelAngle, labelAnchor)), assembleHeaderProperties(config, facetFieldDef, channel, HEADER_LABEL_PROPERTIES, HEADER_LABEL_PROPERTIES_MAP));
|
|
}
|
|
|
|
function assembleHeaderGroup(model, channel, headerType, layoutHeader, headerComponent) {
|
|
if (headerComponent) {
|
|
var _title2 = null;
|
|
var facetFieldDef = layoutHeader.facetFieldDef;
|
|
var config = model.config ? model.config : undefined;
|
|
|
|
if (facetFieldDef && headerComponent.labels) {
|
|
var _getHeaderProperties3 = getHeaderProperties(['labelOrient'], facetFieldDef, config, channel),
|
|
labelOrient = _getHeaderProperties3.labelOrient; // Include label title in the header if orient aligns with the channel
|
|
|
|
|
|
if (channel === 'row' && !contains(['top', 'bottom'], labelOrient) || channel === 'column' && !contains(['left', 'right'], labelOrient)) {
|
|
_title2 = _assembleLabelTitle(facetFieldDef, channel, config);
|
|
}
|
|
}
|
|
|
|
var isFacetWithoutRowCol = isFacetModel(model) && !isFacetMapping(model.facet);
|
|
var axes = headerComponent.axes;
|
|
var hasAxes = (axes === null || axes === void 0 ? void 0 : axes.length) > 0;
|
|
|
|
if (_title2 || hasAxes) {
|
|
var sizeChannel = channel === 'row' ? 'height' : 'width';
|
|
return Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({
|
|
name: model.getName("".concat(channel, "_").concat(headerType)),
|
|
type: 'group',
|
|
role: "".concat(channel, "-").concat(headerType)
|
|
}, layoutHeader.facetFieldDef ? {
|
|
from: {
|
|
data: model.getName(channel + '_domain')
|
|
},
|
|
sort: getSort(facetFieldDef, channel)
|
|
} : {}), hasAxes && isFacetWithoutRowCol ? {
|
|
from: {
|
|
data: model.getName("facet_domain_".concat(channel))
|
|
}
|
|
} : {}), _title2 ? {
|
|
title: _title2
|
|
} : {}), headerComponent.sizeSignal ? {
|
|
encode: {
|
|
update: _defineProperty({}, sizeChannel, headerComponent.sizeSignal)
|
|
}
|
|
} : {}), hasAxes ? {
|
|
axes: axes
|
|
} : {});
|
|
}
|
|
}
|
|
|
|
return null;
|
|
}
|
|
|
|
var LAYOUT_TITLE_BAND = {
|
|
column: {
|
|
start: 0,
|
|
end: 1
|
|
},
|
|
row: {
|
|
start: 1,
|
|
end: 0
|
|
}
|
|
};
|
|
|
|
function getLayoutTitleBand(titleAnchor, headerChannel) {
|
|
return LAYOUT_TITLE_BAND[headerChannel][titleAnchor];
|
|
}
|
|
|
|
function assembleLayoutTitleBand(headerComponentIndex, config) {
|
|
var titleBand = {};
|
|
|
|
var _iterator75 = _createForOfIteratorHelper(FACET_CHANNELS),
|
|
_step75;
|
|
|
|
try {
|
|
for (_iterator75.s(); !(_step75 = _iterator75.n()).done;) {
|
|
var channel = _step75.value;
|
|
var headerComponent = headerComponentIndex[channel];
|
|
|
|
if (headerComponent === null || headerComponent === void 0 ? void 0 : headerComponent.facetFieldDef) {
|
|
var _getHeaderProperties4 = getHeaderProperties(['titleAnchor', 'titleOrient'], headerComponent.facetFieldDef, config, channel),
|
|
titleAnchor = _getHeaderProperties4.titleAnchor,
|
|
titleOrient = _getHeaderProperties4.titleOrient;
|
|
|
|
var headerChannel = getHeaderChannel(channel, titleOrient);
|
|
var band = getLayoutTitleBand(titleAnchor, headerChannel);
|
|
|
|
if (band !== undefined) {
|
|
titleBand[headerChannel] = band;
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator75.e(err);
|
|
} finally {
|
|
_iterator75.f();
|
|
}
|
|
|
|
return isEmpty(titleBand) ? undefined : titleBand;
|
|
}
|
|
|
|
function assembleHeaderProperties(config, facetFieldDef, channel, properties, propertiesMap) {
|
|
var props = {};
|
|
|
|
var _iterator76 = _createForOfIteratorHelper(properties),
|
|
_step76;
|
|
|
|
try {
|
|
for (_iterator76.s(); !(_step76 = _iterator76.n()).done;) {
|
|
var prop = _step76.value;
|
|
|
|
if (!propertiesMap[prop]) {
|
|
continue;
|
|
}
|
|
|
|
var value = getHeaderProperty(prop, facetFieldDef, config, channel);
|
|
|
|
if (value !== undefined) {
|
|
props[propertiesMap[prop]] = value;
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator76.e(err);
|
|
} finally {
|
|
_iterator76.f();
|
|
}
|
|
|
|
return props;
|
|
}
|
|
|
|
function _assembleLayoutSignals(model) {
|
|
return [].concat(_toConsumableArray(sizeSignals(model, 'width')), _toConsumableArray(sizeSignals(model, 'height')), _toConsumableArray(sizeSignals(model, 'childWidth')), _toConsumableArray(sizeSignals(model, 'childHeight')));
|
|
}
|
|
|
|
function sizeSignals(model, sizeType) {
|
|
var channel = sizeType === 'width' ? 'x' : 'y';
|
|
var size = model.component.layoutSize.get(sizeType);
|
|
|
|
if (!size || size === 'merged') {
|
|
return [];
|
|
} // Read size signal name from name map, just in case it is the top-level size signal that got renamed.
|
|
|
|
|
|
var name = model.getSizeSignalRef(sizeType).signal;
|
|
|
|
if (size === 'step') {
|
|
var scaleComponent = model.getScaleComponent(channel);
|
|
|
|
if (scaleComponent) {
|
|
var type = scaleComponent.get('type');
|
|
var range = scaleComponent.get('range');
|
|
|
|
if (hasDiscreteDomain(type) && isVgRangeStep(range)) {
|
|
var scaleName = model.scaleName(channel);
|
|
|
|
if (isFacetModel(model.parent)) {
|
|
// If parent is facet and this is an independent scale, return only signal signal
|
|
// as the width/height will be calculated using the cardinality from
|
|
// facet's aggregate rather than reading from scale domain
|
|
var parentResolve = model.parent.component.resolve;
|
|
|
|
if (parentResolve.scale[channel] === 'independent') {
|
|
return [stepSignal(scaleName, range)];
|
|
}
|
|
}
|
|
|
|
return [stepSignal(scaleName, range), {
|
|
name: name,
|
|
update: sizeExpr(scaleName, scaleComponent, "domain('".concat(scaleName, "').length"))
|
|
}];
|
|
}
|
|
}
|
|
/* istanbul ignore next: Condition should not happen -- only for warning in development. */
|
|
|
|
|
|
throw new Error('layout size is step although width/height is not step.');
|
|
} else if (size == 'container') {
|
|
var isWidth = name.endsWith('width');
|
|
var expr = isWidth ? 'containerSize()[0]' : 'containerSize()[1]';
|
|
var defaultValue = getViewConfigContinuousSize(model.config.view, isWidth ? 'width' : 'height');
|
|
var safeExpr = "isFinite(".concat(expr, ") ? ").concat(expr, " : ").concat(defaultValue);
|
|
return [{
|
|
name: name,
|
|
init: safeExpr,
|
|
on: [{
|
|
update: safeExpr,
|
|
events: 'window:resize'
|
|
}]
|
|
}];
|
|
} else {
|
|
return [{
|
|
name: name,
|
|
value: size
|
|
}];
|
|
}
|
|
}
|
|
|
|
function stepSignal(scaleName, range) {
|
|
return {
|
|
name: scaleName + '_step',
|
|
value: range.step
|
|
};
|
|
}
|
|
|
|
function sizeExpr(scaleName, scaleComponent, cardinality) {
|
|
var type = scaleComponent.get('type');
|
|
var padding = scaleComponent.get('padding');
|
|
var paddingOuter = getFirstDefined(scaleComponent.get('paddingOuter'), padding);
|
|
var paddingInner = scaleComponent.get('paddingInner');
|
|
paddingInner = type === 'band' ? // only band has real paddingInner
|
|
paddingInner !== undefined ? paddingInner : padding : // For point, as calculated in https://github.com/vega/vega-scale/blob/master/src/band.js#L128,
|
|
// it's equivalent to have paddingInner = 1 since there is only n-1 steps between n points.
|
|
1;
|
|
return "bandspace(".concat(cardinality, ", ").concat(paddingInner, ", ").concat(paddingOuter, ") * ").concat(scaleName, "_step");
|
|
}
|
|
|
|
function getSizeTypeFromLayoutSizeType(layoutSizeType) {
|
|
return layoutSizeType === 'childWidth' ? 'width' : layoutSizeType === 'childHeight' ? 'height' : layoutSizeType;
|
|
}
|
|
|
|
function guideEncodeEntry(encoding, model) {
|
|
return keys(encoding).reduce(function (encode, channel) {
|
|
var valueDef = encoding[channel];
|
|
return Object.assign(Object.assign({}, encode), wrapCondition(model, valueDef, channel, function (x) {
|
|
return isSignalRef(x) ? x : {
|
|
value: x.value
|
|
};
|
|
}));
|
|
}, {});
|
|
}
|
|
|
|
function defaultScaleResolve(channel, model) {
|
|
if (isLayerModel(model) || isFacetModel(model)) {
|
|
return 'shared';
|
|
} else if (isConcatModel(model)) {
|
|
return isXorY(channel) ? 'independent' : 'shared';
|
|
}
|
|
/* istanbul ignore next: should never reach here. */
|
|
|
|
|
|
throw new Error('invalid model type for resolve');
|
|
}
|
|
|
|
function parseGuideResolve(resolve, channel) {
|
|
var channelScaleResolve = resolve.scale[channel];
|
|
var guide = isXorY(channel) ? 'axis' : 'legend';
|
|
|
|
if (channelScaleResolve === 'independent') {
|
|
if (resolve[guide][channel] === 'shared') {
|
|
warn(independentScaleMeansIndependentGuide(channel));
|
|
}
|
|
|
|
return 'independent';
|
|
}
|
|
|
|
return resolve[guide][channel] || 'shared';
|
|
}
|
|
|
|
var LEGEND_COMPONENT_PROPERTY_INDEX = Object.assign(Object.assign({}, COMMON_LEGEND_PROPERTY_INDEX), {
|
|
disable: 1,
|
|
labelExpr: 1,
|
|
selections: 1,
|
|
// channel scales
|
|
opacity: 1,
|
|
shape: 1,
|
|
stroke: 1,
|
|
fill: 1,
|
|
size: 1,
|
|
strokeWidth: 1,
|
|
strokeDash: 1,
|
|
// encode
|
|
encode: 1
|
|
});
|
|
var LEGEND_COMPONENT_PROPERTIES = keys(LEGEND_COMPONENT_PROPERTY_INDEX);
|
|
|
|
var LegendComponent = /*#__PURE__*/function (_Split2) {
|
|
_inherits(LegendComponent, _Split2);
|
|
|
|
var _super7 = _createSuper(LegendComponent);
|
|
|
|
function LegendComponent() {
|
|
_classCallCheck(this, LegendComponent);
|
|
|
|
return _super7.apply(this, arguments);
|
|
}
|
|
|
|
return LegendComponent;
|
|
}(Split);
|
|
|
|
var legendEncodeRules = {
|
|
symbols: symbols,
|
|
gradient: gradient,
|
|
labels: labels,
|
|
entries: entries$1
|
|
};
|
|
|
|
function symbols(symbolsSpec, _ref69) {
|
|
var fieldOrDatumDef = _ref69.fieldOrDatumDef,
|
|
model = _ref69.model,
|
|
channel = _ref69.channel,
|
|
legendCmpt = _ref69.legendCmpt,
|
|
legendType = _ref69.legendType;
|
|
|
|
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
|
|
if (legendType !== 'symbol') {
|
|
return undefined;
|
|
}
|
|
|
|
var markDef = model.markDef,
|
|
encoding = model.encoding,
|
|
config = model.config,
|
|
mark = model.mark;
|
|
var filled = markDef.filled && mark !== 'trail';
|
|
var out = Object.assign(Object.assign({}, applyMarkConfig({}, model, FILL_STROKE_CONFIG)), color(model, {
|
|
filled: filled
|
|
})); // FIXME: remove this when VgEncodeEntry is compatible with SymbolEncodeEntry
|
|
|
|
var symbolOpacity = (_a = legendCmpt.get('symbolOpacity')) !== null && _a !== void 0 ? _a : config.legend.symbolOpacity;
|
|
var symbolFillColor = (_b = legendCmpt.get('symbolFillColor')) !== null && _b !== void 0 ? _b : config.legend.symbolFillColor;
|
|
var symbolStrokeColor = (_c = legendCmpt.get('symbolStrokeColor')) !== null && _c !== void 0 ? _c : config.legend.symbolStrokeColor;
|
|
var opacity = symbolOpacity === undefined ? (_d = getMaxValue(encoding.opacity)) !== null && _d !== void 0 ? _d : markDef.opacity : undefined;
|
|
|
|
if (out.fill) {
|
|
// for fill legend, we don't want any fill in symbol
|
|
if (channel === 'fill' || filled && channel === COLOR) {
|
|
delete out.fill;
|
|
} else {
|
|
if (out.fill['field']) {
|
|
// For others, set fill to some opaque value (or nothing if a color is already set)
|
|
if (symbolFillColor) {
|
|
delete out.fill;
|
|
} else {
|
|
out.fill = signalOrValueRef((_e = config.legend.symbolBaseFillColor) !== null && _e !== void 0 ? _e : 'black');
|
|
out.fillOpacity = signalOrValueRef(opacity !== null && opacity !== void 0 ? opacity : 1);
|
|
}
|
|
} else if (isArray(out.fill)) {
|
|
var fill = (_h = (_g = getFirstConditionValue((_f = encoding.fill) !== null && _f !== void 0 ? _f : encoding.color)) !== null && _g !== void 0 ? _g : markDef.fill) !== null && _h !== void 0 ? _h : filled && markDef.color;
|
|
|
|
if (fill) {
|
|
out.fill = signalOrValueRef(fill);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
if (out.stroke) {
|
|
if (channel === 'stroke' || !filled && channel === COLOR) {
|
|
delete out.stroke;
|
|
} else {
|
|
if (out.stroke['field'] || symbolStrokeColor) {
|
|
// For others, remove stroke field
|
|
delete out.stroke;
|
|
} else if (isArray(out.stroke)) {
|
|
var stroke = getFirstDefined(getFirstConditionValue(encoding.stroke || encoding.color), markDef.stroke, filled ? markDef.color : undefined);
|
|
|
|
if (stroke) {
|
|
out.stroke = {
|
|
value: stroke
|
|
};
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
if (channel !== OPACITY) {
|
|
var condition = isFieldDef(fieldOrDatumDef) && selectedCondition(model, legendCmpt, fieldOrDatumDef);
|
|
|
|
if (condition) {
|
|
out.opacity = [Object.assign({
|
|
test: condition
|
|
}, signalOrValueRef(opacity !== null && opacity !== void 0 ? opacity : 1)), signalOrValueRef(config.legend.unselectedOpacity)];
|
|
} else if (opacity) {
|
|
out.opacity = signalOrValueRef(opacity);
|
|
}
|
|
}
|
|
|
|
out = Object.assign(Object.assign({}, out), symbolsSpec);
|
|
return isEmpty(out) ? undefined : out;
|
|
}
|
|
|
|
function gradient(gradientSpec, _ref70) {
|
|
var model = _ref70.model,
|
|
legendType = _ref70.legendType,
|
|
legendCmpt = _ref70.legendCmpt;
|
|
|
|
var _a;
|
|
|
|
if (legendType !== 'gradient') {
|
|
return undefined;
|
|
}
|
|
|
|
var config = model.config,
|
|
markDef = model.markDef,
|
|
encoding = model.encoding;
|
|
var out = {};
|
|
var gradientOpacity = (_a = legendCmpt.get('gradientOpacity')) !== null && _a !== void 0 ? _a : config.legend.gradientOpacity;
|
|
var opacity = gradientOpacity === undefined ? getMaxValue(encoding.opacity) || markDef.opacity : undefined;
|
|
|
|
if (opacity) {
|
|
// only apply opacity if it is neither zero or undefined
|
|
out.opacity = signalOrValueRef(opacity);
|
|
}
|
|
|
|
out = Object.assign(Object.assign({}, out), gradientSpec);
|
|
return isEmpty(out) ? undefined : out;
|
|
}
|
|
|
|
function labels(specifiedlabelsSpec, _ref71) {
|
|
var fieldOrDatumDef = _ref71.fieldOrDatumDef,
|
|
model = _ref71.model,
|
|
channel = _ref71.channel,
|
|
legendCmpt = _ref71.legendCmpt;
|
|
var legend = model.legend(channel) || {};
|
|
var config = model.config;
|
|
var condition = isFieldDef(fieldOrDatumDef) ? selectedCondition(model, legendCmpt, fieldOrDatumDef) : undefined;
|
|
var opacity = condition ? [{
|
|
test: condition,
|
|
value: 1
|
|
}, {
|
|
value: config.legend.unselectedOpacity
|
|
}] : undefined;
|
|
var format = legend.format,
|
|
formatType = legend.formatType;
|
|
var text = isCustomFormatType(formatType) ? formatCustomType({
|
|
fieldOrDatumDef: fieldOrDatumDef,
|
|
field: 'datum.value',
|
|
format: format,
|
|
formatType: formatType,
|
|
config: config
|
|
}) : undefined;
|
|
var labelsSpec = Object.assign(Object.assign(Object.assign({}, opacity ? {
|
|
opacity: opacity
|
|
} : {}), text ? {
|
|
text: text
|
|
} : {}), specifiedlabelsSpec);
|
|
return isEmpty(labelsSpec) ? undefined : labelsSpec;
|
|
}
|
|
|
|
function entries$1(entriesSpec, _ref72) {
|
|
var legendCmpt = _ref72.legendCmpt;
|
|
var selections = legendCmpt.get('selections');
|
|
return (selections === null || selections === void 0 ? void 0 : selections.length) ? Object.assign(Object.assign({}, entriesSpec), {
|
|
fill: {
|
|
value: 'transparent'
|
|
}
|
|
}) : entriesSpec;
|
|
}
|
|
|
|
function getMaxValue(channelDef) {
|
|
return getConditionValue(channelDef, function (v, conditionalDef) {
|
|
return Math.max(v, conditionalDef.value);
|
|
});
|
|
}
|
|
|
|
function getFirstConditionValue(channelDef) {
|
|
return getConditionValue(channelDef, function (v, conditionalDef) {
|
|
return getFirstDefined(v, conditionalDef.value);
|
|
});
|
|
}
|
|
|
|
function getConditionValue(channelDef, reducer) {
|
|
if (hasConditionalValueDef(channelDef)) {
|
|
return array(channelDef.condition).reduce(reducer, channelDef.value);
|
|
} else if (isValueDef(channelDef)) {
|
|
return channelDef.value;
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function selectedCondition(model, legendCmpt, fieldDef) {
|
|
var selections = legendCmpt.get('selections');
|
|
if (!(selections === null || selections === void 0 ? void 0 : selections.length)) return undefined;
|
|
var field = $(fieldDef.field);
|
|
return selections.map(function (name) {
|
|
var store = $(varName(name) + STORE);
|
|
return "(!length(data(".concat(store, ")) || (").concat(name, "[").concat(field, "] && indexof(").concat(name, "[").concat(field, "], datum.value) >= 0))");
|
|
}).join(' || ');
|
|
}
|
|
|
|
var legendRules = {
|
|
direction: function direction(_ref73) {
|
|
var _direction = _ref73.direction;
|
|
return _direction;
|
|
},
|
|
format: function format(_ref74) {
|
|
var fieldOrDatumDef = _ref74.fieldOrDatumDef,
|
|
legend = _ref74.legend,
|
|
config = _ref74.config;
|
|
var format = legend.format,
|
|
formatType = legend.formatType;
|
|
return guideFormat(fieldOrDatumDef, fieldOrDatumDef.type, format, formatType, config, false);
|
|
},
|
|
formatType: function formatType(_ref75) {
|
|
var legend = _ref75.legend,
|
|
fieldOrDatumDef = _ref75.fieldOrDatumDef,
|
|
scaleType = _ref75.scaleType;
|
|
var formatType = legend.formatType;
|
|
return guideFormatType(formatType, fieldOrDatumDef, scaleType);
|
|
},
|
|
gradientLength: function gradientLength(params) {
|
|
var _a, _b;
|
|
|
|
var legend = params.legend,
|
|
legendConfig = params.legendConfig;
|
|
return (_b = (_a = legend.gradientLength) !== null && _a !== void 0 ? _a : legendConfig.gradientLength) !== null && _b !== void 0 ? _b : defaultGradientLength(params);
|
|
},
|
|
labelOverlap: function labelOverlap(_ref76) {
|
|
var legend = _ref76.legend,
|
|
legendConfig = _ref76.legendConfig,
|
|
scaleType = _ref76.scaleType;
|
|
|
|
var _a, _b;
|
|
|
|
return (_b = (_a = legend.labelOverlap) !== null && _a !== void 0 ? _a : legendConfig.labelOverlap) !== null && _b !== void 0 ? _b : defaultLabelOverlap$1(scaleType);
|
|
},
|
|
symbolType: function symbolType(_ref77) {
|
|
var legend = _ref77.legend,
|
|
markDef = _ref77.markDef,
|
|
channel = _ref77.channel,
|
|
encoding = _ref77.encoding;
|
|
|
|
var _a;
|
|
|
|
return (_a = legend.symbolType) !== null && _a !== void 0 ? _a : defaultSymbolType(markDef.type, channel, encoding.shape, markDef.shape);
|
|
},
|
|
title: function title(_ref78) {
|
|
var fieldOrDatumDef = _ref78.fieldOrDatumDef,
|
|
config = _ref78.config;
|
|
return _title3(fieldOrDatumDef, config, {
|
|
allowDisabling: true
|
|
});
|
|
},
|
|
type: function type(_ref79) {
|
|
var legendType = _ref79.legendType,
|
|
scaleType = _ref79.scaleType,
|
|
channel = _ref79.channel;
|
|
|
|
if (isColorChannel(channel) && isContinuousToContinuous(scaleType)) {
|
|
if (legendType === 'gradient') {
|
|
return undefined;
|
|
}
|
|
} else if (legendType === 'symbol') {
|
|
return undefined;
|
|
}
|
|
|
|
return legendType;
|
|
},
|
|
values: function values(_ref80) {
|
|
var fieldOrDatumDef = _ref80.fieldOrDatumDef,
|
|
legend = _ref80.legend;
|
|
return values$1(legend, fieldOrDatumDef);
|
|
}
|
|
};
|
|
|
|
function values$1(legend, fieldOrDatumDef) {
|
|
var vals = legend.values;
|
|
|
|
if (isArray(vals)) {
|
|
return valueArray(fieldOrDatumDef, vals);
|
|
} else if (isSignalRef(vals)) {
|
|
return vals;
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function defaultSymbolType(mark, channel, shapeChannelDef, markShape) {
|
|
var _a;
|
|
|
|
if (channel !== 'shape') {
|
|
// use the value from the shape encoding or the mark config if they exist
|
|
var shape = (_a = getFirstConditionValue(shapeChannelDef)) !== null && _a !== void 0 ? _a : markShape;
|
|
|
|
if (shape) {
|
|
return shape;
|
|
}
|
|
}
|
|
|
|
switch (mark) {
|
|
case 'bar':
|
|
case 'rect':
|
|
case 'image':
|
|
case 'square':
|
|
return 'square';
|
|
|
|
case 'line':
|
|
case 'trail':
|
|
case 'rule':
|
|
return 'stroke';
|
|
|
|
case 'arc':
|
|
case 'point':
|
|
case 'circle':
|
|
case 'tick':
|
|
case 'geoshape':
|
|
case 'area':
|
|
case 'text':
|
|
return 'circle';
|
|
}
|
|
}
|
|
|
|
function getLegendType(params) {
|
|
var legend = params.legend;
|
|
return getFirstDefined(legend.type, defaultType$1(params));
|
|
}
|
|
|
|
function defaultType$1(_ref81) {
|
|
var channel = _ref81.channel,
|
|
timeUnit = _ref81.timeUnit,
|
|
scaleType = _ref81.scaleType;
|
|
|
|
// Following the logic in https://github.com/vega/vega-parser/blob/master/src/parsers/legend.js
|
|
if (isColorChannel(channel)) {
|
|
if (contains(['quarter', 'month', 'day'], timeUnit)) {
|
|
return 'symbol';
|
|
}
|
|
|
|
if (isContinuousToContinuous(scaleType)) {
|
|
return 'gradient';
|
|
}
|
|
}
|
|
|
|
return 'symbol';
|
|
}
|
|
|
|
function getDirection(_ref82) {
|
|
var legendConfig = _ref82.legendConfig,
|
|
legendType = _ref82.legendType,
|
|
orient = _ref82.orient,
|
|
legend = _ref82.legend;
|
|
|
|
var _a, _b;
|
|
|
|
return (_b = (_a = legend.direction) !== null && _a !== void 0 ? _a : legendConfig[legendType ? 'gradientDirection' : 'symbolDirection']) !== null && _b !== void 0 ? _b : defaultDirection(orient, legendType);
|
|
}
|
|
|
|
function defaultDirection(orient, legendType) {
|
|
switch (orient) {
|
|
case 'top':
|
|
case 'bottom':
|
|
return 'horizontal';
|
|
|
|
case 'left':
|
|
case 'right':
|
|
case 'none':
|
|
case undefined:
|
|
// undefined = "right" in Vega
|
|
return undefined;
|
|
// vertical is Vega's default
|
|
|
|
default:
|
|
// top-left / ...
|
|
// For inner legend, uses compact layout like Tableau
|
|
return legendType === 'gradient' ? 'horizontal' : undefined;
|
|
}
|
|
}
|
|
|
|
function defaultGradientLength(_ref83) {
|
|
var legendConfig = _ref83.legendConfig,
|
|
model = _ref83.model,
|
|
direction = _ref83.direction,
|
|
orient = _ref83.orient,
|
|
scaleType = _ref83.scaleType;
|
|
var gradientHorizontalMaxLength = legendConfig.gradientHorizontalMaxLength,
|
|
gradientHorizontalMinLength = legendConfig.gradientHorizontalMinLength,
|
|
gradientVerticalMaxLength = legendConfig.gradientVerticalMaxLength,
|
|
gradientVerticalMinLength = legendConfig.gradientVerticalMinLength;
|
|
|
|
if (isContinuousToContinuous(scaleType)) {
|
|
if (direction === 'horizontal') {
|
|
if (orient === 'top' || orient === 'bottom') {
|
|
return gradientLengthSignal(model, 'width', gradientHorizontalMinLength, gradientHorizontalMaxLength);
|
|
} else {
|
|
return gradientHorizontalMinLength;
|
|
}
|
|
} else {
|
|
// vertical / undefined (Vega uses vertical by default)
|
|
return gradientLengthSignal(model, 'height', gradientVerticalMinLength, gradientVerticalMaxLength);
|
|
}
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function gradientLengthSignal(model, sizeType, min, max) {
|
|
var sizeSignal = model.getSizeSignalRef(sizeType).signal;
|
|
return {
|
|
signal: "clamp(".concat(sizeSignal, ", ").concat(min, ", ").concat(max, ")")
|
|
};
|
|
}
|
|
|
|
function defaultLabelOverlap$1(scaleType) {
|
|
if (contains(['quantile', 'threshold', 'log'], scaleType)) {
|
|
return 'greedy';
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function parseLegend(model) {
|
|
var legendComponent = isUnitModel(model) ? parseUnitLegend(model) : parseNonUnitLegend(model);
|
|
model.component.legends = legendComponent;
|
|
return legendComponent;
|
|
}
|
|
|
|
function parseUnitLegend(model) {
|
|
var encoding = model.encoding;
|
|
var legendComponent = {};
|
|
|
|
for (var _i10 = 0, _arr7 = [COLOR].concat(LEGEND_SCALE_CHANNELS); _i10 < _arr7.length; _i10++) {
|
|
var channel = _arr7[_i10];
|
|
var def = getFieldOrDatumDef(encoding[channel]);
|
|
|
|
if (!def || !model.getScaleComponent(channel)) {
|
|
continue;
|
|
}
|
|
|
|
if (channel === SHAPE && isFieldDef(def) && def.type === GEOJSON) {
|
|
continue;
|
|
}
|
|
|
|
legendComponent[channel] = parseLegendForChannel(model, channel);
|
|
}
|
|
|
|
return legendComponent;
|
|
}
|
|
|
|
function getLegendDefWithScale(model, channel) {
|
|
var scale = model.scaleName(channel);
|
|
|
|
if (model.mark === 'trail') {
|
|
if (channel === 'color') {
|
|
// trail is a filled mark, but its default symbolType ("stroke") should use "stroke"
|
|
return {
|
|
stroke: scale
|
|
};
|
|
} else if (channel === 'size') {
|
|
return {
|
|
strokeWidth: scale
|
|
};
|
|
}
|
|
}
|
|
|
|
if (channel === 'color') {
|
|
return model.markDef.filled ? {
|
|
fill: scale
|
|
} : {
|
|
stroke: scale
|
|
};
|
|
}
|
|
|
|
return _defineProperty({}, channel, scale);
|
|
} // eslint-disable-next-line @typescript-eslint/ban-types
|
|
|
|
|
|
function isExplicit(value, property, legend, fieldDef) {
|
|
switch (property) {
|
|
case 'disable':
|
|
return legend !== undefined;
|
|
// if axis is specified or null/false, then it's enable/disable state is explicit
|
|
|
|
case 'values':
|
|
// specified legend.values is already respected, but may get transformed.
|
|
return !!(legend === null || legend === void 0 ? void 0 : legend.values);
|
|
|
|
case 'title':
|
|
// title can be explicit if fieldDef.title is set
|
|
if (property === 'title' && value === (fieldDef === null || fieldDef === void 0 ? void 0 : fieldDef.title)) {
|
|
return true;
|
|
}
|
|
|
|
} // Otherwise, things are explicit if the returned value matches the specified property
|
|
|
|
|
|
return value === (legend || {})[property];
|
|
}
|
|
|
|
function parseLegendForChannel(model, channel) {
|
|
var _a, _b, _c;
|
|
|
|
var legend = model.legend(channel);
|
|
var markDef = model.markDef,
|
|
encoding = model.encoding,
|
|
config = model.config;
|
|
var legendConfig = config.legend;
|
|
var legendCmpt = new LegendComponent({}, getLegendDefWithScale(model, channel));
|
|
parseInteractiveLegend(model, channel, legendCmpt);
|
|
var disable = legend !== undefined ? !legend : legendConfig.disable;
|
|
legendCmpt.set('disable', disable, legend !== undefined);
|
|
|
|
if (disable) {
|
|
return legendCmpt;
|
|
}
|
|
|
|
legend = legend || {};
|
|
var scaleType = model.getScaleComponent(channel).get('type');
|
|
var fieldOrDatumDef = getFieldOrDatumDef(encoding[channel]);
|
|
var timeUnit = isFieldDef(fieldOrDatumDef) ? (_a = normalizeTimeUnit(fieldOrDatumDef.timeUnit)) === null || _a === void 0 ? void 0 : _a.unit : undefined;
|
|
var orient = legend.orient || config.legend.orient || 'right';
|
|
var legendType = getLegendType({
|
|
legend: legend,
|
|
channel: channel,
|
|
timeUnit: timeUnit,
|
|
scaleType: scaleType
|
|
});
|
|
var direction = getDirection({
|
|
legend: legend,
|
|
legendType: legendType,
|
|
orient: orient,
|
|
legendConfig: legendConfig
|
|
});
|
|
var ruleParams = {
|
|
legend: legend,
|
|
channel: channel,
|
|
model: model,
|
|
markDef: markDef,
|
|
encoding: encoding,
|
|
fieldOrDatumDef: fieldOrDatumDef,
|
|
legendConfig: legendConfig,
|
|
config: config,
|
|
scaleType: scaleType,
|
|
orient: orient,
|
|
legendType: legendType,
|
|
direction: direction
|
|
};
|
|
|
|
var _iterator77 = _createForOfIteratorHelper(LEGEND_COMPONENT_PROPERTIES),
|
|
_step77;
|
|
|
|
try {
|
|
for (_iterator77.s(); !(_step77 = _iterator77.n()).done;) {
|
|
var property = _step77.value;
|
|
|
|
if (legendType === 'gradient' && property.startsWith('symbol') || legendType === 'symbol' && property.startsWith('gradient')) {
|
|
continue;
|
|
}
|
|
|
|
var _value = property in legendRules ? legendRules[property](ruleParams) : legend[property];
|
|
|
|
if (_value !== undefined) {
|
|
var explicit = isExplicit(_value, property, legend, model.fieldDef(channel));
|
|
|
|
if (explicit || config.legend[property] === undefined) {
|
|
legendCmpt.set(property, _value, explicit);
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator77.e(err);
|
|
} finally {
|
|
_iterator77.f();
|
|
}
|
|
|
|
var legendEncoding = (_b = legend === null || legend === void 0 ? void 0 : legend.encoding) !== null && _b !== void 0 ? _b : {};
|
|
var selections = legendCmpt.get('selections');
|
|
var legendEncode = {};
|
|
var legendEncodeParams = {
|
|
fieldOrDatumDef: fieldOrDatumDef,
|
|
model: model,
|
|
channel: channel,
|
|
legendCmpt: legendCmpt,
|
|
legendType: legendType
|
|
};
|
|
|
|
for (var _i11 = 0, _arr8 = ['labels', 'legend', 'title', 'symbols', 'gradient', 'entries']; _i11 < _arr8.length; _i11++) {
|
|
var part = _arr8[_i11];
|
|
var legendEncodingPart = guideEncodeEntry((_c = legendEncoding[part]) !== null && _c !== void 0 ? _c : {}, model);
|
|
var value = part in legendEncodeRules ? legendEncodeRules[part](legendEncodingPart, legendEncodeParams) // apply rule
|
|
: legendEncodingPart; // no rule -- just default values
|
|
|
|
if (value !== undefined && !isEmpty(value)) {
|
|
legendEncode[part] = Object.assign(Object.assign(Object.assign({}, (selections === null || selections === void 0 ? void 0 : selections.length) && isFieldDef(fieldOrDatumDef) ? {
|
|
name: "".concat(varName(fieldOrDatumDef.field), "_legend_").concat(part)
|
|
} : {}), (selections === null || selections === void 0 ? void 0 : selections.length) ? {
|
|
interactive: !!selections
|
|
} : {}), {
|
|
update: value
|
|
});
|
|
}
|
|
}
|
|
|
|
if (!isEmpty(legendEncode)) {
|
|
legendCmpt.set('encode', legendEncode, !!(legend === null || legend === void 0 ? void 0 : legend.encoding));
|
|
}
|
|
|
|
return legendCmpt;
|
|
}
|
|
|
|
function parseNonUnitLegend(model) {
|
|
var _model$component = model.component,
|
|
legends = _model$component.legends,
|
|
resolve = _model$component.resolve;
|
|
|
|
var _iterator78 = _createForOfIteratorHelper(model.children),
|
|
_step78;
|
|
|
|
try {
|
|
for (_iterator78.s(); !(_step78 = _iterator78.n()).done;) {
|
|
var child = _step78.value;
|
|
parseLegend(child);
|
|
|
|
var _iterator80 = _createForOfIteratorHelper(keys(child.component.legends)),
|
|
_step80;
|
|
|
|
try {
|
|
for (_iterator80.s(); !(_step80 = _iterator80.n()).done;) {
|
|
var channel = _step80.value;
|
|
resolve.legend[channel] = parseGuideResolve(model.component.resolve, channel);
|
|
|
|
if (resolve.legend[channel] === 'shared') {
|
|
// If the resolve says shared (and has not been overridden)
|
|
// We will try to merge and see if there is a conflict
|
|
legends[channel] = mergeLegendComponent(legends[channel], child.component.legends[channel]);
|
|
|
|
if (!legends[channel]) {
|
|
// If merge returns nothing, there is a conflict so we cannot make the legend shared.
|
|
// Thus, mark legend as independent and remove the legend component.
|
|
resolve.legend[channel] = 'independent';
|
|
delete legends[channel];
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator80.e(err);
|
|
} finally {
|
|
_iterator80.f();
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator78.e(err);
|
|
} finally {
|
|
_iterator78.f();
|
|
}
|
|
|
|
var _iterator79 = _createForOfIteratorHelper(keys(legends)),
|
|
_step79;
|
|
|
|
try {
|
|
for (_iterator79.s(); !(_step79 = _iterator79.n()).done;) {
|
|
var _channel = _step79.value;
|
|
|
|
var _iterator81 = _createForOfIteratorHelper(model.children),
|
|
_step81;
|
|
|
|
try {
|
|
for (_iterator81.s(); !(_step81 = _iterator81.n()).done;) {
|
|
var _child = _step81.value;
|
|
|
|
if (!_child.component.legends[_channel]) {
|
|
// skip if the child does not have a particular legend
|
|
continue;
|
|
}
|
|
|
|
if (resolve.legend[_channel] === 'shared') {
|
|
// After merging shared legend, make sure to remove legend from child
|
|
delete _child.component.legends[_channel];
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator81.e(err);
|
|
} finally {
|
|
_iterator81.f();
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator79.e(err);
|
|
} finally {
|
|
_iterator79.f();
|
|
}
|
|
|
|
return legends;
|
|
}
|
|
|
|
function mergeLegendComponent(mergedLegend, childLegend) {
|
|
var _a, _b, _c, _d;
|
|
|
|
if (!mergedLegend) {
|
|
return childLegend.clone();
|
|
}
|
|
|
|
var mergedOrient = mergedLegend.getWithExplicit('orient');
|
|
var childOrient = childLegend.getWithExplicit('orient');
|
|
|
|
if (mergedOrient.explicit && childOrient.explicit && mergedOrient.value !== childOrient.value) {
|
|
// TODO: throw warning if resolve is explicit (We don't have info about explicit/implicit resolve yet.)
|
|
// Cannot merge due to inconsistent orient
|
|
return undefined;
|
|
}
|
|
|
|
var typeMerged = false; // Otherwise, let's merge
|
|
|
|
var _iterator82 = _createForOfIteratorHelper(LEGEND_COMPONENT_PROPERTIES),
|
|
_step82;
|
|
|
|
try {
|
|
var _loop7 = function _loop7() {
|
|
var prop = _step82.value;
|
|
var mergedValueWithExplicit = mergeValuesWithExplicit(mergedLegend.getWithExplicit(prop), childLegend.getWithExplicit(prop), prop, 'legend', // Tie breaker function
|
|
function (v1, v2) {
|
|
switch (prop) {
|
|
case 'symbolType':
|
|
return mergeSymbolType(v1, v2);
|
|
|
|
case 'title':
|
|
return mergeTitleComponent(v1, v2);
|
|
|
|
case 'type':
|
|
// There are only two types. If we have different types, then prefer symbol over gradient.
|
|
typeMerged = true;
|
|
return makeImplicit('symbol');
|
|
}
|
|
|
|
return defaultTieBreaker(v1, v2, prop, 'legend');
|
|
});
|
|
mergedLegend.setWithExplicit(prop, mergedValueWithExplicit);
|
|
};
|
|
|
|
for (_iterator82.s(); !(_step82 = _iterator82.n()).done;) {
|
|
_loop7();
|
|
}
|
|
} catch (err) {
|
|
_iterator82.e(err);
|
|
} finally {
|
|
_iterator82.f();
|
|
}
|
|
|
|
if (typeMerged) {
|
|
if ((_b = (_a = mergedLegend.implicit) === null || _a === void 0 ? void 0 : _a.encode) === null || _b === void 0 ? void 0 : _b.gradient) {
|
|
deleteNestedProperty(mergedLegend.implicit, ['encode', 'gradient']);
|
|
}
|
|
|
|
if ((_d = (_c = mergedLegend.explicit) === null || _c === void 0 ? void 0 : _c.encode) === null || _d === void 0 ? void 0 : _d.gradient) {
|
|
deleteNestedProperty(mergedLegend.explicit, ['encode', 'gradient']);
|
|
}
|
|
}
|
|
|
|
return mergedLegend;
|
|
}
|
|
|
|
function mergeSymbolType(st1, st2) {
|
|
if (st2.value === 'circle') {
|
|
// prefer "circle" over "stroke"
|
|
return st2;
|
|
}
|
|
|
|
return st1;
|
|
}
|
|
|
|
function setLegendEncode(legend, part, vgProp, vgRef) {
|
|
var _a, _b, _c;
|
|
|
|
legend.encode = (_a = legend.encode) !== null && _a !== void 0 ? _a : {};
|
|
legend.encode[part] = (_b = legend.encode[part]) !== null && _b !== void 0 ? _b : {};
|
|
legend.encode[part].update = (_c = legend.encode[part].update) !== null && _c !== void 0 ? _c : {}; // TODO: remove as any after https://github.com/prisma/nexus-prisma/issues/291
|
|
|
|
legend.encode[part].update[vgProp] = vgRef;
|
|
}
|
|
|
|
function _assembleLegends(model) {
|
|
var legendComponentIndex = model.component.legends;
|
|
var legendByDomain = {};
|
|
|
|
var _iterator83 = _createForOfIteratorHelper(keys(legendComponentIndex)),
|
|
_step83;
|
|
|
|
try {
|
|
for (_iterator83.s(); !(_step83 = _iterator83.n()).done;) {
|
|
var channel = _step83.value;
|
|
var scaleComponent = model.getScaleComponent(channel);
|
|
var domainHash = stringify(scaleComponent.get('domains'));
|
|
|
|
if (legendByDomain[domainHash]) {
|
|
var _iterator84 = _createForOfIteratorHelper(legendByDomain[domainHash]),
|
|
_step84;
|
|
|
|
try {
|
|
for (_iterator84.s(); !(_step84 = _iterator84.n()).done;) {
|
|
var mergedLegendComponent = _step84.value;
|
|
var merged = mergeLegendComponent(mergedLegendComponent, legendComponentIndex[channel]);
|
|
|
|
if (!merged) {
|
|
// If cannot merge, need to add this legend separately
|
|
legendByDomain[domainHash].push(legendComponentIndex[channel]);
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator84.e(err);
|
|
} finally {
|
|
_iterator84.f();
|
|
}
|
|
} else {
|
|
legendByDomain[domainHash] = [legendComponentIndex[channel].clone()];
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator83.e(err);
|
|
} finally {
|
|
_iterator83.f();
|
|
}
|
|
|
|
var legends = vals(legendByDomain).flat().map(function (l) {
|
|
return assembleLegend(l, model.config);
|
|
}).filter(function (l) {
|
|
return l !== undefined;
|
|
});
|
|
return legends;
|
|
}
|
|
|
|
function assembleLegend(legendCmpt, config) {
|
|
var _a, _b, _c;
|
|
|
|
var _d = legendCmpt.combine(),
|
|
disable = _d.disable,
|
|
labelExpr = _d.labelExpr,
|
|
selections = _d.selections,
|
|
legend = __rest(_d, ["disable", "labelExpr", "selections"]);
|
|
|
|
if (disable) {
|
|
return undefined;
|
|
}
|
|
|
|
if (config.aria === false && legend.aria == undefined) {
|
|
legend.aria = false;
|
|
}
|
|
|
|
if ((_a = legend.encode) === null || _a === void 0 ? void 0 : _a.symbols) {
|
|
var out = legend.encode.symbols.update;
|
|
|
|
if (out.fill && out.fill['value'] !== 'transparent' && !out.stroke && !legend.stroke) {
|
|
// For non color channel's legend, we need to override symbol stroke config from Vega config if stroke channel is not used.
|
|
out.stroke = {
|
|
value: 'transparent'
|
|
};
|
|
} // Remove properties that the legend is encoding.
|
|
|
|
|
|
var _iterator85 = _createForOfIteratorHelper(LEGEND_SCALE_CHANNELS),
|
|
_step85;
|
|
|
|
try {
|
|
for (_iterator85.s(); !(_step85 = _iterator85.n()).done;) {
|
|
var property = _step85.value;
|
|
|
|
if (legend[property]) {
|
|
delete out[property];
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator85.e(err);
|
|
} finally {
|
|
_iterator85.f();
|
|
}
|
|
}
|
|
|
|
if (!legend.title) {
|
|
// title schema doesn't include null, ''
|
|
delete legend.title;
|
|
}
|
|
|
|
if (labelExpr !== undefined) {
|
|
var expr = labelExpr;
|
|
|
|
if (((_c = (_b = legend.encode) === null || _b === void 0 ? void 0 : _b.labels) === null || _c === void 0 ? void 0 : _c.update) && isSignalRef(legend.encode.labels.update.text)) {
|
|
expr = replaceAll(labelExpr, 'datum.label', legend.encode.labels.update.text.signal);
|
|
}
|
|
|
|
setLegendEncode(legend, 'labels', 'text', {
|
|
signal: expr
|
|
});
|
|
}
|
|
|
|
for (var prop in legend) {
|
|
var propValue = legend[prop];
|
|
|
|
if (isSignalRef(propValue)) {
|
|
var propIndex = SIGNAL_LEGEND_PROP_INDEX[prop];
|
|
|
|
if (propIndex) {
|
|
var vgProp = propIndex.vgProp,
|
|
part = propIndex.part;
|
|
setLegendEncode(legend, part, vgProp, propValue);
|
|
delete legend[prop];
|
|
}
|
|
}
|
|
}
|
|
|
|
return legend;
|
|
}
|
|
|
|
function _assembleProjections(model) {
|
|
if (isLayerModel(model) || isConcatModel(model)) {
|
|
return assembleProjectionsForModelAndChildren(model);
|
|
} else {
|
|
return assembleProjectionForModel(model);
|
|
}
|
|
}
|
|
|
|
function assembleProjectionsForModelAndChildren(model) {
|
|
return model.children.reduce(function (projections, child) {
|
|
return projections.concat(child.assembleProjections());
|
|
}, assembleProjectionForModel(model));
|
|
}
|
|
|
|
function assembleProjectionForModel(model) {
|
|
var component = model.component.projection;
|
|
|
|
if (!component || component.merged) {
|
|
return [];
|
|
}
|
|
|
|
var projection = component.combine();
|
|
var name = projection.name; // we need to extract name so that it is always present in the output and pass TS type validation
|
|
|
|
if (!component.data) {
|
|
// generate custom projection, no automatic fitting
|
|
return [Object.assign(Object.assign({
|
|
name: name
|
|
}, {
|
|
translate: {
|
|
signal: '[width / 2, height / 2]'
|
|
}
|
|
}), projection)];
|
|
} else {
|
|
// generate projection that uses extent fitting
|
|
var size = {
|
|
signal: "[".concat(component.size.map(function (ref) {
|
|
return ref.signal;
|
|
}).join(', '), "]")
|
|
};
|
|
var fits = component.data.reduce(function (sources, data) {
|
|
var source = isSignalRef(data) ? data.signal : "data('".concat(model.lookupDataSource(data), "')");
|
|
|
|
if (!contains(sources, source)) {
|
|
// build a unique list of sources
|
|
sources.push(source);
|
|
}
|
|
|
|
return sources;
|
|
}, []);
|
|
|
|
if (fits.length <= 0) {
|
|
throw new Error("Projection's fit didn't find any data sources");
|
|
}
|
|
|
|
return [Object.assign({
|
|
name: name,
|
|
size: size,
|
|
fit: {
|
|
signal: fits.length > 1 ? "[".concat(fits.join(', '), "]") : fits[0]
|
|
}
|
|
}, projection)];
|
|
}
|
|
}
|
|
|
|
var PROJECTION_PROPERTIES = ['type', 'clipAngle', 'clipExtent', 'center', 'rotate', 'precision', 'reflectX', 'reflectY', 'coefficient', 'distance', 'fraction', 'lobes', 'parallel', 'radius', 'ratio', 'spacing', 'tilt'];
|
|
|
|
var ProjectionComponent = /*#__PURE__*/function (_Split3) {
|
|
_inherits(ProjectionComponent, _Split3);
|
|
|
|
var _super8 = _createSuper(ProjectionComponent);
|
|
|
|
function ProjectionComponent(name, specifiedProjection, size, data) {
|
|
var _this12;
|
|
|
|
_classCallCheck(this, ProjectionComponent);
|
|
|
|
_this12 = _super8.call(this, Object.assign({}, specifiedProjection), // all explicit properties of projection
|
|
{
|
|
name: name
|
|
} // name as initial implicit property
|
|
);
|
|
_this12.specifiedProjection = specifiedProjection;
|
|
_this12.size = size;
|
|
_this12.data = data;
|
|
_this12.merged = false;
|
|
return _this12;
|
|
}
|
|
/**
|
|
* Whether the projection parameters should fit provided data.
|
|
*/
|
|
|
|
|
|
_createClass(ProjectionComponent, [{
|
|
key: "isFit",
|
|
get: function get() {
|
|
return !!this.data;
|
|
}
|
|
}]);
|
|
|
|
return ProjectionComponent;
|
|
}(Split);
|
|
|
|
function _parseProjection(model) {
|
|
model.component.projection = isUnitModel(model) ? parseUnitProjection(model) : parseNonUnitProjections(model);
|
|
}
|
|
|
|
function parseUnitProjection(model) {
|
|
var _a;
|
|
|
|
if (model.hasProjection) {
|
|
var proj = model.specifiedProjection;
|
|
var fit = !(proj && (proj.scale != null || proj.translate != null));
|
|
var size = fit ? [model.getSizeSignalRef('width'), model.getSizeSignalRef('height')] : undefined;
|
|
var data = fit ? gatherFitData(model) : undefined;
|
|
return new ProjectionComponent(model.projectionName(true), Object.assign(Object.assign({}, (_a = model.config.projection) !== null && _a !== void 0 ? _a : {}), proj !== null && proj !== void 0 ? proj : {}), size, data);
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function gatherFitData(model) {
|
|
var data = [];
|
|
var encoding = model.encoding;
|
|
|
|
for (var _i12 = 0, _arr9 = [[LONGITUDE, LATITUDE], [LONGITUDE2, LATITUDE2]]; _i12 < _arr9.length; _i12++) {
|
|
var posssiblePair = _arr9[_i12];
|
|
|
|
if (getFieldOrDatumDef(encoding[posssiblePair[0]]) || getFieldOrDatumDef(encoding[posssiblePair[1]])) {
|
|
data.push({
|
|
signal: model.getName("geojson_".concat(data.length))
|
|
});
|
|
}
|
|
}
|
|
|
|
if (model.channelHasField(SHAPE) && model.typedFieldDef(SHAPE).type === GEOJSON) {
|
|
data.push({
|
|
signal: model.getName("geojson_".concat(data.length))
|
|
});
|
|
}
|
|
|
|
if (data.length === 0) {
|
|
// main source is geojson, so we can just use that
|
|
data.push(model.requestDataName(DataSourceType.Main));
|
|
}
|
|
|
|
return data;
|
|
}
|
|
|
|
function mergeIfNoConflict(first, second) {
|
|
var allPropertiesShared = every(PROJECTION_PROPERTIES, function (prop) {
|
|
// neither has the property
|
|
if (!hasOwnProperty(first.explicit, prop) && !hasOwnProperty(second.explicit, prop)) {
|
|
return true;
|
|
} // both have property and an equal value for property
|
|
|
|
|
|
if (hasOwnProperty(first.explicit, prop) && hasOwnProperty(second.explicit, prop) && // some properties might be signals or objects and require hashing for comparison
|
|
stringify(first.get(prop)) === stringify(second.get(prop))) {
|
|
return true;
|
|
}
|
|
|
|
return false;
|
|
});
|
|
var size = stringify(first.size) === stringify(second.size);
|
|
|
|
if (size) {
|
|
if (allPropertiesShared) {
|
|
return first;
|
|
} else if (stringify(first.explicit) === stringify({})) {
|
|
return second;
|
|
} else if (stringify(second.explicit) === stringify({})) {
|
|
return first;
|
|
}
|
|
} // if all properties don't match, let each unit spec have its own projection
|
|
|
|
|
|
return null;
|
|
}
|
|
|
|
function parseNonUnitProjections(model) {
|
|
if (model.children.length === 0) {
|
|
return undefined;
|
|
}
|
|
|
|
var nonUnitProjection; // parse all children first
|
|
|
|
var _iterator86 = _createForOfIteratorHelper(model.children),
|
|
_step86;
|
|
|
|
try {
|
|
for (_iterator86.s(); !(_step86 = _iterator86.n()).done;) {
|
|
var _child2 = _step86.value;
|
|
|
|
_parseProjection(_child2);
|
|
} // analyze parsed projections, attempt to merge
|
|
|
|
} catch (err) {
|
|
_iterator86.e(err);
|
|
} finally {
|
|
_iterator86.f();
|
|
}
|
|
|
|
var mergable = every(model.children, function (child) {
|
|
var projection = child.component.projection;
|
|
|
|
if (!projection) {
|
|
// child layer does not use a projection
|
|
return true;
|
|
} else if (!nonUnitProjection) {
|
|
// cached 'projection' is null, cache this one
|
|
nonUnitProjection = projection;
|
|
return true;
|
|
} else {
|
|
var merge = mergeIfNoConflict(nonUnitProjection, projection);
|
|
|
|
if (merge) {
|
|
nonUnitProjection = merge;
|
|
}
|
|
|
|
return !!merge;
|
|
}
|
|
}); // if cached one and all other children share the same projection,
|
|
|
|
if (nonUnitProjection && mergable) {
|
|
// so we can elevate it to the layer level
|
|
var name = model.projectionName(true);
|
|
var modelProjection = new ProjectionComponent(name, nonUnitProjection.specifiedProjection, nonUnitProjection.size, duplicate(nonUnitProjection.data)); // rename and assign all others as merged
|
|
|
|
var _iterator87 = _createForOfIteratorHelper(model.children),
|
|
_step87;
|
|
|
|
try {
|
|
for (_iterator87.s(); !(_step87 = _iterator87.n()).done;) {
|
|
var child = _step87.value;
|
|
var projection = child.component.projection;
|
|
|
|
if (projection) {
|
|
if (projection.isFit) {
|
|
var _modelProjection$data;
|
|
|
|
(_modelProjection$data = modelProjection.data).push.apply(_modelProjection$data, _toConsumableArray(child.component.projection.data));
|
|
}
|
|
|
|
child.renameProjection(projection.get('name'), name);
|
|
projection.merged = true;
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator87.e(err);
|
|
} finally {
|
|
_iterator87.f();
|
|
}
|
|
|
|
return modelProjection;
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function rangeFormula(model, fieldDef, channel, config) {
|
|
var _a, _b;
|
|
|
|
if (binRequiresRange(fieldDef, channel)) {
|
|
// read format from axis or legend, if there is no format then use config.numberFormat
|
|
var guide = isUnitModel(model) ? (_b = (_a = model.axis(channel)) !== null && _a !== void 0 ? _a : model.legend(channel)) !== null && _b !== void 0 ? _b : {} : {};
|
|
|
|
var startField = _vgField(fieldDef, {
|
|
expr: 'datum'
|
|
});
|
|
|
|
var endField = _vgField(fieldDef, {
|
|
expr: 'datum',
|
|
binSuffix: 'end'
|
|
});
|
|
|
|
return {
|
|
formulaAs: _vgField(fieldDef, {
|
|
binSuffix: 'range',
|
|
forAs: true
|
|
}),
|
|
formula: binFormatExpression(startField, endField, guide.format, guide.formatType, config)
|
|
};
|
|
}
|
|
|
|
return {};
|
|
}
|
|
|
|
function binKey(bin, field) {
|
|
return "".concat(binToString(bin), "_").concat(field);
|
|
}
|
|
|
|
function getSignalsFromModel(model, key) {
|
|
return {
|
|
signal: model.getName("".concat(key, "_bins")),
|
|
extentSignal: model.getName("".concat(key, "_extent"))
|
|
};
|
|
}
|
|
|
|
function getBinSignalName(model, field, bin) {
|
|
var _a;
|
|
|
|
var normalizedBin = (_a = normalizeBin(bin, undefined)) !== null && _a !== void 0 ? _a : {};
|
|
var key = binKey(normalizedBin, field);
|
|
return model.getName("".concat(key, "_bins"));
|
|
}
|
|
|
|
function isBinTransform(t) {
|
|
return 'as' in t;
|
|
}
|
|
|
|
function createBinComponent(t, bin, model) {
|
|
var as;
|
|
var span;
|
|
|
|
if (isBinTransform(t)) {
|
|
as = isString(t.as) ? [t.as, "".concat(t.as, "_end")] : [t.as[0], t.as[1]];
|
|
} else {
|
|
as = [_vgField(t, {
|
|
forAs: true
|
|
}), _vgField(t, {
|
|
binSuffix: 'end',
|
|
forAs: true
|
|
})];
|
|
}
|
|
|
|
var normalizedBin = Object.assign({}, normalizeBin(bin, undefined));
|
|
var key = binKey(normalizedBin, t.field);
|
|
|
|
var _getSignalsFromModel = getSignalsFromModel(model, key),
|
|
signal = _getSignalsFromModel.signal,
|
|
extentSignal = _getSignalsFromModel.extentSignal;
|
|
|
|
if (isSelectionExtent(normalizedBin.extent)) {
|
|
var ext = normalizedBin.extent;
|
|
var selName = ext.selection;
|
|
span = parseSelectionBinExtent(model.getSelectionComponent(varName(selName), selName), ext);
|
|
delete normalizedBin.extent; // Vega-Lite selection extent map to Vega's span property.
|
|
}
|
|
|
|
var binComponent = Object.assign(Object.assign(Object.assign({
|
|
bin: normalizedBin,
|
|
field: t.field,
|
|
as: [as]
|
|
}, signal ? {
|
|
signal: signal
|
|
} : {}), extentSignal ? {
|
|
extentSignal: extentSignal
|
|
} : {}), span ? {
|
|
span: span
|
|
} : {});
|
|
return {
|
|
key: key,
|
|
binComponent: binComponent
|
|
};
|
|
}
|
|
|
|
var BinNode = /*#__PURE__*/function (_DataFlowNode5) {
|
|
_inherits(BinNode, _DataFlowNode5);
|
|
|
|
var _super9 = _createSuper(BinNode);
|
|
|
|
function BinNode(parent, bins) {
|
|
var _this13;
|
|
|
|
_classCallCheck(this, BinNode);
|
|
|
|
_this13 = _super9.call(this, parent);
|
|
_this13.bins = bins;
|
|
return _this13;
|
|
}
|
|
|
|
_createClass(BinNode, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
return new BinNode(null, duplicate(this.bins));
|
|
}
|
|
}, {
|
|
key: "merge",
|
|
|
|
/**
|
|
* Merge bin nodes. This method either integrates the bin config from the other node
|
|
* or if this node already has a bin config, renames the corresponding signal in the model.
|
|
*/
|
|
value: function merge(other, renameSignal) {
|
|
var _iterator88 = _createForOfIteratorHelper(keys(other.bins)),
|
|
_step88;
|
|
|
|
try {
|
|
for (_iterator88.s(); !(_step88 = _iterator88.n()).done;) {
|
|
var key = _step88.value;
|
|
|
|
if (key in this.bins) {
|
|
renameSignal(other.bins[key].signal, this.bins[key].signal); // Ensure that we don't have duplicate names for signal pairs
|
|
|
|
this.bins[key].as = unique([].concat(_toConsumableArray(this.bins[key].as), _toConsumableArray(other.bins[key].as)), _hash);
|
|
} else {
|
|
this.bins[key] = other.bins[key];
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator88.e(err);
|
|
} finally {
|
|
_iterator88.f();
|
|
}
|
|
|
|
var _iterator89 = _createForOfIteratorHelper(other.children),
|
|
_step89;
|
|
|
|
try {
|
|
for (_iterator89.s(); !(_step89 = _iterator89.n()).done;) {
|
|
var child = _step89.value;
|
|
other.removeChild(child);
|
|
child.parent = this;
|
|
}
|
|
} catch (err) {
|
|
_iterator89.e(err);
|
|
} finally {
|
|
_iterator89.f();
|
|
}
|
|
|
|
other.remove();
|
|
}
|
|
}, {
|
|
key: "producedFields",
|
|
value: function producedFields() {
|
|
return new Set(vals(this.bins).map(function (c) {
|
|
return c.as;
|
|
}).flat(2));
|
|
}
|
|
}, {
|
|
key: "dependentFields",
|
|
value: function dependentFields() {
|
|
return new Set(vals(this.bins).map(function (c) {
|
|
return c.field;
|
|
}));
|
|
}
|
|
}, {
|
|
key: "hash",
|
|
value: function hash() {
|
|
return "Bin ".concat(_hash(this.bins));
|
|
}
|
|
}, {
|
|
key: "assemble",
|
|
value: function assemble() {
|
|
return vals(this.bins).flatMap(function (bin) {
|
|
var transform = [];
|
|
|
|
var _bin$as = _toArray(bin.as),
|
|
binAs = _bin$as[0],
|
|
remainingAs = _bin$as.slice(1);
|
|
|
|
var _a = bin.bin,
|
|
extent = _a.extent,
|
|
params = __rest(_a, ["extent"]);
|
|
|
|
var binTrans = Object.assign(Object.assign(Object.assign({
|
|
type: 'bin',
|
|
field: replacePathInField(bin.field),
|
|
as: binAs,
|
|
signal: bin.signal
|
|
}, !isSelectionExtent(extent) ? {
|
|
extent: extent
|
|
} : {
|
|
extent: null
|
|
}), bin.span ? {
|
|
span: {
|
|
signal: "span(".concat(bin.span, ")")
|
|
}
|
|
} : {}), params);
|
|
|
|
if (!extent && bin.extentSignal) {
|
|
transform.push({
|
|
type: 'extent',
|
|
field: replacePathInField(bin.field),
|
|
signal: bin.extentSignal
|
|
});
|
|
binTrans.extent = {
|
|
signal: bin.extentSignal
|
|
};
|
|
}
|
|
|
|
transform.push(binTrans);
|
|
|
|
var _iterator90 = _createForOfIteratorHelper(remainingAs),
|
|
_step90;
|
|
|
|
try {
|
|
for (_iterator90.s(); !(_step90 = _iterator90.n()).done;) {
|
|
var as = _step90.value;
|
|
|
|
for (var i = 0; i < 2; i++) {
|
|
transform.push({
|
|
type: 'formula',
|
|
expr: _vgField({
|
|
field: binAs[i]
|
|
}, {
|
|
expr: 'datum'
|
|
}),
|
|
as: as[i]
|
|
});
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator90.e(err);
|
|
} finally {
|
|
_iterator90.f();
|
|
}
|
|
|
|
if (bin.formula) {
|
|
transform.push({
|
|
type: 'formula',
|
|
expr: bin.formula,
|
|
as: bin.formulaAs
|
|
});
|
|
}
|
|
|
|
return transform;
|
|
});
|
|
}
|
|
}], [{
|
|
key: "makeFromEncoding",
|
|
value: function makeFromEncoding(parent, model) {
|
|
var bins = model.reduceFieldDef(function (binComponentIndex, fieldDef, channel) {
|
|
if (isTypedFieldDef(fieldDef) && isBinning(fieldDef.bin)) {
|
|
var _createBinComponent = createBinComponent(fieldDef, fieldDef.bin, model),
|
|
key = _createBinComponent.key,
|
|
binComponent = _createBinComponent.binComponent;
|
|
|
|
binComponentIndex[key] = Object.assign(Object.assign(Object.assign({}, binComponent), binComponentIndex[key]), rangeFormula(model, fieldDef, channel, model.config));
|
|
}
|
|
|
|
return binComponentIndex;
|
|
}, {});
|
|
|
|
if (isEmpty(bins)) {
|
|
return null;
|
|
}
|
|
|
|
return new BinNode(parent, bins);
|
|
}
|
|
/**
|
|
* Creates a bin node from BinTransform.
|
|
* The optional parameter should provide
|
|
*/
|
|
|
|
}, {
|
|
key: "makeFromTransform",
|
|
value: function makeFromTransform(parent, t, model) {
|
|
var _createBinComponent2 = createBinComponent(t, t.bin, model),
|
|
key = _createBinComponent2.key,
|
|
binComponent = _createBinComponent2.binComponent;
|
|
|
|
return new BinNode(parent, _defineProperty({}, key, binComponent));
|
|
}
|
|
}]);
|
|
|
|
return BinNode;
|
|
}(DataFlowNode);
|
|
|
|
var SourceNode = /*#__PURE__*/function (_DataFlowNode6) {
|
|
_inherits(SourceNode, _DataFlowNode6);
|
|
|
|
var _super10 = _createSuper(SourceNode);
|
|
|
|
function SourceNode(data) {
|
|
var _this14;
|
|
|
|
_classCallCheck(this, SourceNode);
|
|
|
|
_this14 = _super10.call(this, null); // source cannot have parent
|
|
|
|
data = data !== null && data !== void 0 ? data : {
|
|
name: 'source'
|
|
};
|
|
var format;
|
|
|
|
if (!isGenerator(data)) {
|
|
format = data.format ? Object.assign({}, omit(data.format, ['parse'])) : {};
|
|
}
|
|
|
|
if (isInlineData(data)) {
|
|
_this14._data = {
|
|
values: data.values
|
|
};
|
|
} else if (isUrlData(data)) {
|
|
_this14._data = {
|
|
url: data.url
|
|
};
|
|
|
|
if (!format.type) {
|
|
// Extract extension from URL using snippet from
|
|
// http://stackoverflow.com/questions/680929/how-to-extract-extension-from-filename-string-in-javascript
|
|
var defaultExtension = /(?:\.([^.]+))?$/.exec(data.url)[1];
|
|
|
|
if (!contains(['json', 'csv', 'tsv', 'dsv', 'topojson'], defaultExtension)) {
|
|
defaultExtension = 'json';
|
|
} // defaultExtension has type string but we ensure that it is DataFormatType above
|
|
|
|
|
|
format.type = defaultExtension;
|
|
}
|
|
} else if (isSphereGenerator(data)) {
|
|
// hardwire GeoJSON sphere data into output specification
|
|
_this14._data = {
|
|
values: [{
|
|
type: 'Sphere'
|
|
}]
|
|
};
|
|
} else if (isNamedData(data) || isGenerator(data)) {
|
|
_this14._data = {};
|
|
} // set flag to check if generator
|
|
|
|
|
|
_this14._generator = isGenerator(data); // any dataset can be named
|
|
|
|
if (data.name) {
|
|
_this14._name = data.name;
|
|
}
|
|
|
|
if (format && !isEmpty(format)) {
|
|
_this14._data.format = format;
|
|
}
|
|
|
|
return _this14;
|
|
}
|
|
|
|
_createClass(SourceNode, [{
|
|
key: "dependentFields",
|
|
value: function dependentFields() {
|
|
return new Set();
|
|
}
|
|
}, {
|
|
key: "producedFields",
|
|
value: function producedFields() {
|
|
return undefined; // we don't know what this source produces
|
|
}
|
|
}, {
|
|
key: "hasName",
|
|
value: function hasName() {
|
|
return !!this._name;
|
|
}
|
|
}, {
|
|
key: "remove",
|
|
value: function remove() {
|
|
throw new Error('Source nodes are roots and cannot be removed.');
|
|
}
|
|
}, {
|
|
key: "hash",
|
|
value: function hash() {
|
|
throw new Error('Cannot hash sources');
|
|
}
|
|
}, {
|
|
key: "assemble",
|
|
value: function assemble() {
|
|
return Object.assign(Object.assign({
|
|
name: this._name
|
|
}, this._data), {
|
|
transform: []
|
|
});
|
|
}
|
|
}, {
|
|
key: "data",
|
|
get: function get() {
|
|
return this._data;
|
|
}
|
|
}, {
|
|
key: "isGenerator",
|
|
get: function get() {
|
|
return this._generator;
|
|
}
|
|
}, {
|
|
key: "dataName",
|
|
get: function get() {
|
|
return this._name;
|
|
},
|
|
set: function set(name) {
|
|
this._name = name;
|
|
}
|
|
}, {
|
|
key: "parent",
|
|
set: function set(parent) {
|
|
throw new Error('Source nodes have to be roots.');
|
|
}
|
|
}]);
|
|
|
|
return SourceNode;
|
|
}(DataFlowNode);
|
|
|
|
var GraticuleNode = /*#__PURE__*/function (_DataFlowNode7) {
|
|
_inherits(GraticuleNode, _DataFlowNode7);
|
|
|
|
var _super11 = _createSuper(GraticuleNode);
|
|
|
|
function GraticuleNode(parent, params) {
|
|
var _this15;
|
|
|
|
_classCallCheck(this, GraticuleNode);
|
|
|
|
_this15 = _super11.call(this, parent);
|
|
_this15.params = params;
|
|
return _this15;
|
|
}
|
|
|
|
_createClass(GraticuleNode, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
return new GraticuleNode(null, this.params);
|
|
}
|
|
}, {
|
|
key: "dependentFields",
|
|
value: function dependentFields() {
|
|
return new Set();
|
|
}
|
|
}, {
|
|
key: "producedFields",
|
|
value: function producedFields() {
|
|
return undefined; // there should never be a node before graticule
|
|
}
|
|
}, {
|
|
key: "hash",
|
|
value: function hash() {
|
|
return "Graticule ".concat(_hash(this.params));
|
|
}
|
|
}, {
|
|
key: "assemble",
|
|
value: function assemble() {
|
|
return Object.assign({
|
|
type: 'graticule'
|
|
}, this.params === true ? {} : this.params);
|
|
}
|
|
}]);
|
|
|
|
return GraticuleNode;
|
|
}(DataFlowNode);
|
|
|
|
var SequenceNode = /*#__PURE__*/function (_DataFlowNode8) {
|
|
_inherits(SequenceNode, _DataFlowNode8);
|
|
|
|
var _super12 = _createSuper(SequenceNode);
|
|
|
|
function SequenceNode(parent, params) {
|
|
var _this16;
|
|
|
|
_classCallCheck(this, SequenceNode);
|
|
|
|
_this16 = _super12.call(this, parent);
|
|
_this16.params = params;
|
|
return _this16;
|
|
}
|
|
|
|
_createClass(SequenceNode, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
return new SequenceNode(null, this.params);
|
|
}
|
|
}, {
|
|
key: "dependentFields",
|
|
value: function dependentFields() {
|
|
return new Set();
|
|
}
|
|
}, {
|
|
key: "producedFields",
|
|
value: function producedFields() {
|
|
var _a;
|
|
|
|
return new Set([(_a = this.params.as) !== null && _a !== void 0 ? _a : 'data']);
|
|
}
|
|
}, {
|
|
key: "hash",
|
|
value: function hash() {
|
|
return "Hash ".concat(_hash(this.params));
|
|
}
|
|
}, {
|
|
key: "assemble",
|
|
value: function assemble() {
|
|
return Object.assign({
|
|
type: 'sequence'
|
|
}, this.params);
|
|
}
|
|
}]);
|
|
|
|
return SequenceNode;
|
|
}(DataFlowNode);
|
|
/**
|
|
* Whether this dataflow node is the source of the dataflow that produces data i.e. a source or a generator.
|
|
*/
|
|
|
|
|
|
function isDataSourceNode(node) {
|
|
return node instanceof SourceNode || node instanceof GraticuleNode || node instanceof SequenceNode;
|
|
}
|
|
/**
|
|
* Abstract base class for BottomUpOptimizer and TopDownOptimizer.
|
|
* Contains only mutation handling logic. Subclasses need to implement iteration logic.
|
|
*/
|
|
|
|
|
|
var OptimizerBase = /*#__PURE__*/function () {
|
|
function OptimizerBase() {
|
|
_classCallCheck(this, OptimizerBase);
|
|
|
|
this._mutated = false;
|
|
} // Once true, _mutated is never set to false
|
|
|
|
|
|
_createClass(OptimizerBase, [{
|
|
key: "setMutated",
|
|
value: function setMutated() {
|
|
this._mutated = true;
|
|
}
|
|
}, {
|
|
key: "mutatedFlag",
|
|
get: function get() {
|
|
return this._mutated;
|
|
}
|
|
}]);
|
|
|
|
return OptimizerBase;
|
|
}();
|
|
/**
|
|
* Starts from a node and runs the optimization function(the "run" method) upwards to the root,
|
|
* depending on the continueFlag and mutatedFlag values returned by the optimization function.
|
|
*/
|
|
|
|
|
|
var BottomUpOptimizer = /*#__PURE__*/function (_OptimizerBase) {
|
|
_inherits(BottomUpOptimizer, _OptimizerBase);
|
|
|
|
var _super13 = _createSuper(BottomUpOptimizer);
|
|
|
|
function BottomUpOptimizer() {
|
|
var _this17;
|
|
|
|
_classCallCheck(this, BottomUpOptimizer);
|
|
|
|
_this17 = _super13.call(this);
|
|
_this17._continue = false;
|
|
return _this17;
|
|
}
|
|
|
|
_createClass(BottomUpOptimizer, [{
|
|
key: "setContinue",
|
|
value: function setContinue() {
|
|
this._continue = true;
|
|
}
|
|
}, {
|
|
key: "reset",
|
|
|
|
/**
|
|
* Reset the state of the optimizer after it has completed a run from the bottom of the tree to the top.
|
|
*/
|
|
value: function reset() {// do nothing
|
|
}
|
|
}, {
|
|
key: "optimizeNextFromLeaves",
|
|
value: function optimizeNextFromLeaves(node) {
|
|
if (isDataSourceNode(node)) {
|
|
return false;
|
|
}
|
|
|
|
var next = node.parent;
|
|
|
|
var _this$run = this.run(node),
|
|
continueFlag = _this$run.continueFlag;
|
|
|
|
if (continueFlag) {
|
|
this.optimizeNextFromLeaves(next);
|
|
}
|
|
|
|
return this.mutatedFlag;
|
|
}
|
|
}, {
|
|
key: "continueFlag",
|
|
get: function get() {
|
|
return this._continue;
|
|
}
|
|
}, {
|
|
key: "flags",
|
|
get: function get() {
|
|
return {
|
|
continueFlag: this.continueFlag,
|
|
mutatedFlag: this.mutatedFlag
|
|
};
|
|
},
|
|
set: function set(_ref85) {
|
|
var continueFlag = _ref85.continueFlag,
|
|
mutatedFlag = _ref85.mutatedFlag;
|
|
|
|
if (continueFlag) {
|
|
this.setContinue();
|
|
}
|
|
|
|
if (mutatedFlag) {
|
|
this.setMutated();
|
|
}
|
|
}
|
|
}]);
|
|
|
|
return BottomUpOptimizer;
|
|
}(OptimizerBase);
|
|
/**
|
|
* The optimizer function( the "run" method), is invoked on the given node and then continues recursively.
|
|
*/
|
|
|
|
|
|
var TopDownOptimizer = /*#__PURE__*/function (_OptimizerBase2) {
|
|
_inherits(TopDownOptimizer, _OptimizerBase2);
|
|
|
|
var _super14 = _createSuper(TopDownOptimizer);
|
|
|
|
function TopDownOptimizer() {
|
|
_classCallCheck(this, TopDownOptimizer);
|
|
|
|
return _super14.apply(this, arguments);
|
|
}
|
|
|
|
return TopDownOptimizer;
|
|
}(OptimizerBase);
|
|
|
|
function addDimension(dims, channel, fieldDef, model) {
|
|
var channelDef2 = isUnitModel(model) ? model.encoding[getSecondaryRangeChannel(channel)] : undefined;
|
|
|
|
if (isTypedFieldDef(fieldDef) && isUnitModel(model) && hasBand(channel, fieldDef, channelDef2, model.stack, model.markDef, model.config)) {
|
|
dims.add(_vgField(fieldDef, {}));
|
|
dims.add(_vgField(fieldDef, {
|
|
suffix: 'end'
|
|
}));
|
|
|
|
if (fieldDef.bin && binRequiresRange(fieldDef, channel)) {
|
|
dims.add(_vgField(fieldDef, {
|
|
binSuffix: 'range'
|
|
}));
|
|
}
|
|
} else if (isGeoPositionChannel(channel)) {
|
|
var posChannel = getPositionChannelFromLatLong(channel);
|
|
dims.add(model.getName(posChannel));
|
|
} else {
|
|
dims.add(_vgField(fieldDef));
|
|
}
|
|
|
|
return dims;
|
|
}
|
|
|
|
function mergeMeasures(parentMeasures, childMeasures) {
|
|
var _a;
|
|
|
|
var _iterator91 = _createForOfIteratorHelper(keys(childMeasures)),
|
|
_step91;
|
|
|
|
try {
|
|
for (_iterator91.s(); !(_step91 = _iterator91.n()).done;) {
|
|
var _field9 = _step91.value;
|
|
// when we merge a measure, we either have to add an aggregation operator or even a new field
|
|
var ops = childMeasures[_field9];
|
|
|
|
var _iterator92 = _createForOfIteratorHelper(keys(ops)),
|
|
_step92;
|
|
|
|
try {
|
|
for (_iterator92.s(); !(_step92 = _iterator92.n()).done;) {
|
|
var op = _step92.value;
|
|
|
|
if (_field9 in parentMeasures) {
|
|
// add operator to existing measure field
|
|
parentMeasures[_field9][op] = new Set([].concat(_toConsumableArray((_a = parentMeasures[_field9][op]) !== null && _a !== void 0 ? _a : []), _toConsumableArray(ops[op])));
|
|
} else {
|
|
parentMeasures[_field9] = _defineProperty({}, op, ops[op]);
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator92.e(err);
|
|
} finally {
|
|
_iterator92.f();
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator91.e(err);
|
|
} finally {
|
|
_iterator91.f();
|
|
}
|
|
}
|
|
|
|
var AggregateNode = /*#__PURE__*/function (_DataFlowNode9) {
|
|
_inherits(AggregateNode, _DataFlowNode9);
|
|
|
|
var _super15 = _createSuper(AggregateNode);
|
|
|
|
/**
|
|
* @param dimensions string set for dimensions
|
|
* @param measures dictionary mapping field name => dict of aggregation functions and names to use
|
|
*/
|
|
function AggregateNode(parent, dimensions, measures) {
|
|
var _this18;
|
|
|
|
_classCallCheck(this, AggregateNode);
|
|
|
|
_this18 = _super15.call(this, parent);
|
|
_this18.dimensions = dimensions;
|
|
_this18.measures = measures;
|
|
return _this18;
|
|
}
|
|
|
|
_createClass(AggregateNode, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
return new AggregateNode(null, new Set(this.dimensions), duplicate(this.measures));
|
|
}
|
|
}, {
|
|
key: "merge",
|
|
value: function merge(other) {
|
|
if (setEqual(this.dimensions, other.dimensions)) {
|
|
mergeMeasures(this.measures, other.measures);
|
|
return true;
|
|
} else {
|
|
debug('different dimensions, cannot merge');
|
|
return false;
|
|
}
|
|
}
|
|
}, {
|
|
key: "addDimensions",
|
|
value: function addDimensions(fields) {
|
|
fields.forEach(this.dimensions.add, this.dimensions);
|
|
}
|
|
}, {
|
|
key: "dependentFields",
|
|
value: function dependentFields() {
|
|
return new Set([].concat(_toConsumableArray(this.dimensions), _toConsumableArray(keys(this.measures))));
|
|
}
|
|
}, {
|
|
key: "producedFields",
|
|
value: function producedFields() {
|
|
var out = new Set();
|
|
|
|
var _iterator93 = _createForOfIteratorHelper(keys(this.measures)),
|
|
_step93;
|
|
|
|
try {
|
|
for (_iterator93.s(); !(_step93 = _iterator93.n()).done;) {
|
|
var _field10 = _step93.value;
|
|
|
|
var _iterator94 = _createForOfIteratorHelper(keys(this.measures[_field10])),
|
|
_step94;
|
|
|
|
try {
|
|
for (_iterator94.s(); !(_step94 = _iterator94.n()).done;) {
|
|
var op = _step94.value;
|
|
var m = this.measures[_field10][op];
|
|
|
|
if (m.size === 0) {
|
|
out.add("".concat(op, "_").concat(_field10));
|
|
} else {
|
|
m.forEach(out.add, out);
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator94.e(err);
|
|
} finally {
|
|
_iterator94.f();
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator93.e(err);
|
|
} finally {
|
|
_iterator93.f();
|
|
}
|
|
|
|
return out;
|
|
}
|
|
}, {
|
|
key: "hash",
|
|
value: function hash() {
|
|
return "Aggregate ".concat(_hash({
|
|
dimensions: this.dimensions,
|
|
measures: this.measures
|
|
}));
|
|
}
|
|
}, {
|
|
key: "assemble",
|
|
value: function assemble() {
|
|
var ops = [];
|
|
var fields = [];
|
|
var as = [];
|
|
|
|
var _iterator95 = _createForOfIteratorHelper(keys(this.measures)),
|
|
_step95;
|
|
|
|
try {
|
|
for (_iterator95.s(); !(_step95 = _iterator95.n()).done;) {
|
|
var _field11 = _step95.value;
|
|
|
|
var _iterator96 = _createForOfIteratorHelper(keys(this.measures[_field11])),
|
|
_step96;
|
|
|
|
try {
|
|
for (_iterator96.s(); !(_step96 = _iterator96.n()).done;) {
|
|
var op = _step96.value;
|
|
|
|
var _iterator97 = _createForOfIteratorHelper(this.measures[_field11][op]),
|
|
_step97;
|
|
|
|
try {
|
|
for (_iterator97.s(); !(_step97 = _iterator97.n()).done;) {
|
|
var alias = _step97.value;
|
|
as.push(alias);
|
|
ops.push(op);
|
|
fields.push(_field11 === '*' ? null : replacePathInField(_field11));
|
|
}
|
|
} catch (err) {
|
|
_iterator97.e(err);
|
|
} finally {
|
|
_iterator97.f();
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator96.e(err);
|
|
} finally {
|
|
_iterator96.f();
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator95.e(err);
|
|
} finally {
|
|
_iterator95.f();
|
|
}
|
|
|
|
var result = {
|
|
type: 'aggregate',
|
|
groupby: _toConsumableArray(this.dimensions).map(replacePathInField),
|
|
ops: ops,
|
|
fields: fields,
|
|
as: as
|
|
};
|
|
return result;
|
|
}
|
|
}, {
|
|
key: "groupBy",
|
|
get: function get() {
|
|
return this.dimensions;
|
|
}
|
|
}], [{
|
|
key: "makeFromEncoding",
|
|
value: function makeFromEncoding(parent, model) {
|
|
var isAggregate = false;
|
|
model.forEachFieldDef(function (fd) {
|
|
if (fd.aggregate) {
|
|
isAggregate = true;
|
|
}
|
|
});
|
|
var meas = {};
|
|
var dims = new Set();
|
|
|
|
if (!isAggregate) {
|
|
// no need to create this node if the model has no aggregation
|
|
return null;
|
|
}
|
|
|
|
model.forEachFieldDef(function (fieldDef, channel) {
|
|
var _a, _b, _c, _d;
|
|
|
|
var aggregate = fieldDef.aggregate,
|
|
field = fieldDef.field;
|
|
|
|
if (aggregate) {
|
|
if (aggregate === 'count') {
|
|
meas['*'] = (_a = meas['*']) !== null && _a !== void 0 ? _a : {};
|
|
meas['*']['count'] = new Set([_vgField(fieldDef, {
|
|
forAs: true
|
|
})]);
|
|
} else {
|
|
if (isArgminDef(aggregate) || isArgmaxDef(aggregate)) {
|
|
var op = isArgminDef(aggregate) ? 'argmin' : 'argmax';
|
|
var argField = aggregate[op];
|
|
meas[argField] = (_b = meas[argField]) !== null && _b !== void 0 ? _b : {};
|
|
meas[argField][op] = new Set([_vgField({
|
|
op: op,
|
|
field: argField
|
|
}, {
|
|
forAs: true
|
|
})]);
|
|
} else {
|
|
meas[field] = (_c = meas[field]) !== null && _c !== void 0 ? _c : {};
|
|
meas[field][aggregate] = new Set([_vgField(fieldDef, {
|
|
forAs: true
|
|
})]);
|
|
} // For scale channel with domain === 'unaggregated', add min/max so we can use their union as unaggregated domain
|
|
|
|
|
|
if (isScaleChannel(channel) && model.scaleDomain(channel) === 'unaggregated') {
|
|
meas[field] = (_d = meas[field]) !== null && _d !== void 0 ? _d : {};
|
|
meas[field]['min'] = new Set([_vgField({
|
|
field: field,
|
|
aggregate: 'min'
|
|
}, {
|
|
forAs: true
|
|
})]);
|
|
meas[field]['max'] = new Set([_vgField({
|
|
field: field,
|
|
aggregate: 'max'
|
|
}, {
|
|
forAs: true
|
|
})]);
|
|
}
|
|
}
|
|
} else {
|
|
addDimension(dims, channel, fieldDef, model);
|
|
}
|
|
});
|
|
|
|
if (dims.size + keys(meas).length === 0) {
|
|
return null;
|
|
}
|
|
|
|
return new AggregateNode(parent, dims, meas);
|
|
}
|
|
}, {
|
|
key: "makeFromTransform",
|
|
value: function makeFromTransform(parent, t) {
|
|
var _a, _b, _c;
|
|
|
|
var dims = new Set();
|
|
var meas = {};
|
|
|
|
var _iterator98 = _createForOfIteratorHelper(t.aggregate),
|
|
_step98;
|
|
|
|
try {
|
|
for (_iterator98.s(); !(_step98 = _iterator98.n()).done;) {
|
|
var s = _step98.value;
|
|
var op = s.op,
|
|
_field12 = s.field,
|
|
as = s.as;
|
|
|
|
if (op) {
|
|
if (op === 'count') {
|
|
meas['*'] = (_a = meas['*']) !== null && _a !== void 0 ? _a : {};
|
|
meas['*']['count'] = new Set([as ? as : _vgField(s, {
|
|
forAs: true
|
|
})]);
|
|
} else {
|
|
meas[_field12] = (_b = meas[_field12]) !== null && _b !== void 0 ? _b : {};
|
|
meas[_field12][op] = new Set([as ? as : _vgField(s, {
|
|
forAs: true
|
|
})]);
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator98.e(err);
|
|
} finally {
|
|
_iterator98.f();
|
|
}
|
|
|
|
var _iterator99 = _createForOfIteratorHelper((_c = t.groupby) !== null && _c !== void 0 ? _c : []),
|
|
_step99;
|
|
|
|
try {
|
|
for (_iterator99.s(); !(_step99 = _iterator99.n()).done;) {
|
|
var _s2 = _step99.value;
|
|
dims.add(_s2);
|
|
}
|
|
} catch (err) {
|
|
_iterator99.e(err);
|
|
} finally {
|
|
_iterator99.f();
|
|
}
|
|
|
|
if (dims.size + keys(meas).length === 0) {
|
|
return null;
|
|
}
|
|
|
|
return new AggregateNode(parent, dims, meas);
|
|
}
|
|
}]);
|
|
|
|
return AggregateNode;
|
|
}(DataFlowNode);
|
|
/**
|
|
* A node that helps us track what fields we are faceting by.
|
|
*/
|
|
|
|
|
|
var FacetNode = /*#__PURE__*/function (_DataFlowNode10) {
|
|
_inherits(FacetNode, _DataFlowNode10);
|
|
|
|
var _super16 = _createSuper(FacetNode);
|
|
|
|
/**
|
|
* @param model The facet model.
|
|
* @param name The name that this facet source will have.
|
|
* @param data The source data for this facet data.
|
|
*/
|
|
function FacetNode(parent, model, name, data) {
|
|
var _this19;
|
|
|
|
_classCallCheck(this, FacetNode);
|
|
|
|
_this19 = _super16.call(this, parent);
|
|
_this19.model = model;
|
|
_this19.name = name;
|
|
_this19.data = data;
|
|
|
|
var _iterator100 = _createForOfIteratorHelper(FACET_CHANNELS),
|
|
_step100;
|
|
|
|
try {
|
|
for (_iterator100.s(); !(_step100 = _iterator100.n()).done;) {
|
|
var channel = _step100.value;
|
|
var fieldDef = model.facet[channel];
|
|
|
|
if (fieldDef) {
|
|
var bin = fieldDef.bin,
|
|
sort = fieldDef.sort;
|
|
_this19[channel] = Object.assign({
|
|
name: model.getName("".concat(channel, "_domain")),
|
|
fields: [_vgField(fieldDef)].concat(_toConsumableArray(isBinning(bin) ? [_vgField(fieldDef, {
|
|
binSuffix: 'end'
|
|
})] : []))
|
|
}, isSortField(sort) ? {
|
|
sortField: sort
|
|
} : isArray(sort) ? {
|
|
sortIndexField: sortArrayIndexField(fieldDef, channel)
|
|
} : {});
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator100.e(err);
|
|
} finally {
|
|
_iterator100.f();
|
|
}
|
|
|
|
_this19.childModel = model.child;
|
|
return _this19;
|
|
}
|
|
|
|
_createClass(FacetNode, [{
|
|
key: "hash",
|
|
value: function hash() {
|
|
var out = "Facet";
|
|
|
|
var _iterator101 = _createForOfIteratorHelper(FACET_CHANNELS),
|
|
_step101;
|
|
|
|
try {
|
|
for (_iterator101.s(); !(_step101 = _iterator101.n()).done;) {
|
|
var channel = _step101.value;
|
|
|
|
if (this[channel]) {
|
|
out += " ".concat(channel.charAt(0), ":").concat(_hash(this[channel]));
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator101.e(err);
|
|
} finally {
|
|
_iterator101.f();
|
|
}
|
|
|
|
return out;
|
|
}
|
|
}, {
|
|
key: "dependentFields",
|
|
value: function dependentFields() {
|
|
var depFields = new Set(this.fields);
|
|
|
|
var _iterator102 = _createForOfIteratorHelper(FACET_CHANNELS),
|
|
_step102;
|
|
|
|
try {
|
|
for (_iterator102.s(); !(_step102 = _iterator102.n()).done;) {
|
|
var channel = _step102.value;
|
|
|
|
if (this[channel]) {
|
|
if (this[channel].sortField) {
|
|
depFields.add(this[channel].sortField.field);
|
|
}
|
|
|
|
if (this[channel].sortIndexField) {
|
|
depFields.add(this[channel].sortIndexField);
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator102.e(err);
|
|
} finally {
|
|
_iterator102.f();
|
|
}
|
|
|
|
return depFields;
|
|
}
|
|
}, {
|
|
key: "producedFields",
|
|
value: function producedFields() {
|
|
return new Set(); // facet does not produce any new fields
|
|
}
|
|
/**
|
|
* The name to reference this source is its name.
|
|
*/
|
|
|
|
}, {
|
|
key: "getSource",
|
|
value: function getSource() {
|
|
return this.name;
|
|
}
|
|
}, {
|
|
key: "getChildIndependentFieldsWithStep",
|
|
value: function getChildIndependentFieldsWithStep() {
|
|
var childIndependentFieldsWithStep = {};
|
|
|
|
var _iterator103 = _createForOfIteratorHelper(POSITION_SCALE_CHANNELS),
|
|
_step103;
|
|
|
|
try {
|
|
for (_iterator103.s(); !(_step103 = _iterator103.n()).done;) {
|
|
var channel = _step103.value;
|
|
var childScaleComponent = this.childModel.component.scales[channel];
|
|
|
|
if (childScaleComponent && !childScaleComponent.merged) {
|
|
// independent scale
|
|
var type = childScaleComponent.get('type');
|
|
var range = childScaleComponent.get('range');
|
|
|
|
if (hasDiscreteDomain(type) && isVgRangeStep(range)) {
|
|
var _domain = assembleDomain(this.childModel, channel);
|
|
|
|
var _field13 = getFieldFromDomain(_domain);
|
|
|
|
if (_field13) {
|
|
childIndependentFieldsWithStep[channel] = _field13;
|
|
} else {
|
|
warn(unknownField(channel));
|
|
}
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator103.e(err);
|
|
} finally {
|
|
_iterator103.f();
|
|
}
|
|
|
|
return childIndependentFieldsWithStep;
|
|
}
|
|
}, {
|
|
key: "assembleRowColumnHeaderData",
|
|
value: function assembleRowColumnHeaderData(channel, crossedDataName, childIndependentFieldsWithStep) {
|
|
var childChannel = {
|
|
row: 'y',
|
|
column: 'x'
|
|
}[channel];
|
|
var fields = [];
|
|
var ops = [];
|
|
var as = [];
|
|
|
|
if (childIndependentFieldsWithStep && childIndependentFieldsWithStep[childChannel]) {
|
|
if (crossedDataName) {
|
|
// If there is a crossed data, calculate max
|
|
fields.push("distinct_".concat(childIndependentFieldsWithStep[childChannel]));
|
|
ops.push('max');
|
|
} else {
|
|
// If there is no crossed data, just calculate distinct
|
|
fields.push(childIndependentFieldsWithStep[childChannel]);
|
|
ops.push('distinct');
|
|
} // Although it is technically a max, just name it distinct so it's easier to refer to it
|
|
|
|
|
|
as.push("distinct_".concat(childIndependentFieldsWithStep[childChannel]));
|
|
}
|
|
|
|
var _this$channel = this[channel],
|
|
sortField = _this$channel.sortField,
|
|
sortIndexField = _this$channel.sortIndexField;
|
|
|
|
if (sortField) {
|
|
var _sortField$op = sortField.op,
|
|
op = _sortField$op === void 0 ? DEFAULT_SORT_OP : _sortField$op,
|
|
_field14 = sortField.field;
|
|
fields.push(_field14);
|
|
ops.push(op);
|
|
as.push(_vgField(sortField, {
|
|
forAs: true
|
|
}));
|
|
} else if (sortIndexField) {
|
|
fields.push(sortIndexField);
|
|
ops.push('max');
|
|
as.push(sortIndexField);
|
|
}
|
|
|
|
return {
|
|
name: this[channel].name,
|
|
// Use data from the crossed one if it exist
|
|
source: crossedDataName !== null && crossedDataName !== void 0 ? crossedDataName : this.data,
|
|
transform: [Object.assign({
|
|
type: 'aggregate',
|
|
groupby: this[channel].fields
|
|
}, fields.length ? {
|
|
fields: fields,
|
|
ops: ops,
|
|
as: as
|
|
} : {})]
|
|
};
|
|
}
|
|
}, {
|
|
key: "assembleFacetHeaderData",
|
|
value: function assembleFacetHeaderData(childIndependentFieldsWithStep) {
|
|
var _a, _b;
|
|
|
|
var columns = this.model.layout.columns;
|
|
var layoutHeaders = this.model.component.layoutHeaders;
|
|
var data = [];
|
|
var hasSharedAxis = {};
|
|
|
|
var _iterator104 = _createForOfIteratorHelper(HEADER_CHANNELS),
|
|
_step104;
|
|
|
|
try {
|
|
for (_iterator104.s(); !(_step104 = _iterator104.n()).done;) {
|
|
var headerChannel = _step104.value;
|
|
|
|
var _iterator105 = _createForOfIteratorHelper(HEADER_TYPES),
|
|
_step105;
|
|
|
|
try {
|
|
for (_iterator105.s(); !(_step105 = _iterator105.n()).done;) {
|
|
var headerType = _step105.value;
|
|
var headers = (_a = layoutHeaders[headerChannel] && layoutHeaders[headerChannel][headerType]) !== null && _a !== void 0 ? _a : [];
|
|
|
|
var _iterator106 = _createForOfIteratorHelper(headers),
|
|
_step106;
|
|
|
|
try {
|
|
for (_iterator106.s(); !(_step106 = _iterator106.n()).done;) {
|
|
var header = _step106.value;
|
|
|
|
if (((_b = header.axes) === null || _b === void 0 ? void 0 : _b.length) > 0) {
|
|
hasSharedAxis[headerChannel] = true;
|
|
break;
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator106.e(err);
|
|
} finally {
|
|
_iterator106.f();
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator105.e(err);
|
|
} finally {
|
|
_iterator105.f();
|
|
}
|
|
|
|
if (hasSharedAxis[headerChannel]) {
|
|
var cardinality = "length(data(\"".concat(this.facet.name, "\"))");
|
|
var stop = headerChannel === 'row' ? columns ? {
|
|
signal: "ceil(".concat(cardinality, " / ").concat(columns, ")")
|
|
} : 1 : columns ? {
|
|
signal: "min(".concat(cardinality, ", ").concat(columns, ")")
|
|
} : {
|
|
signal: cardinality
|
|
};
|
|
data.push({
|
|
name: "".concat(this.facet.name, "_").concat(headerChannel),
|
|
transform: [{
|
|
type: 'sequence',
|
|
start: 0,
|
|
stop: stop
|
|
}]
|
|
});
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator104.e(err);
|
|
} finally {
|
|
_iterator104.f();
|
|
}
|
|
|
|
var row = hasSharedAxis.row,
|
|
column = hasSharedAxis.column;
|
|
|
|
if (row || column) {
|
|
data.unshift(this.assembleRowColumnHeaderData('facet', null, childIndependentFieldsWithStep));
|
|
}
|
|
|
|
return data;
|
|
}
|
|
}, {
|
|
key: "assemble",
|
|
value: function assemble() {
|
|
var _a, _b;
|
|
|
|
var data = [];
|
|
var crossedDataName = null;
|
|
var childIndependentFieldsWithStep = this.getChildIndependentFieldsWithStep();
|
|
var column = this.column,
|
|
row = this.row,
|
|
facet = this.facet;
|
|
|
|
if (column && row && (childIndependentFieldsWithStep.x || childIndependentFieldsWithStep.y)) {
|
|
// Need to create a cross dataset to correctly calculate cardinality
|
|
crossedDataName = "cross_".concat(this.column.name, "_").concat(this.row.name);
|
|
var fields = [].concat((_a = childIndependentFieldsWithStep.x) !== null && _a !== void 0 ? _a : [], (_b = childIndependentFieldsWithStep.y) !== null && _b !== void 0 ? _b : []);
|
|
var ops = fields.map(function () {
|
|
return 'distinct';
|
|
});
|
|
data.push({
|
|
name: crossedDataName,
|
|
source: this.data,
|
|
transform: [{
|
|
type: 'aggregate',
|
|
groupby: this.fields,
|
|
fields: fields,
|
|
ops: ops
|
|
}]
|
|
});
|
|
}
|
|
|
|
for (var _i13 = 0, _arr10 = [COLUMN, ROW]; _i13 < _arr10.length; _i13++) {
|
|
var channel = _arr10[_i13];
|
|
|
|
if (this[channel]) {
|
|
data.push(this.assembleRowColumnHeaderData(channel, crossedDataName, childIndependentFieldsWithStep));
|
|
}
|
|
}
|
|
|
|
if (facet) {
|
|
var facetData = this.assembleFacetHeaderData(childIndependentFieldsWithStep);
|
|
|
|
if (facetData) {
|
|
data.push.apply(data, _toConsumableArray(facetData));
|
|
}
|
|
}
|
|
|
|
return data;
|
|
}
|
|
}, {
|
|
key: "fields",
|
|
get: function get() {
|
|
var _a;
|
|
|
|
var f = [];
|
|
|
|
var _iterator107 = _createForOfIteratorHelper(FACET_CHANNELS),
|
|
_step107;
|
|
|
|
try {
|
|
for (_iterator107.s(); !(_step107 = _iterator107.n()).done;) {
|
|
var channel = _step107.value;
|
|
|
|
if ((_a = this[channel]) === null || _a === void 0 ? void 0 : _a.fields) {
|
|
f.push.apply(f, _toConsumableArray(this[channel].fields));
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator107.e(err);
|
|
} finally {
|
|
_iterator107.f();
|
|
}
|
|
|
|
return f;
|
|
}
|
|
}]);
|
|
|
|
return FacetNode;
|
|
}(DataFlowNode);
|
|
/**
|
|
* Remove quotes from a string.
|
|
*/
|
|
|
|
|
|
function unquote(pattern) {
|
|
if (pattern[0] === "'" && pattern[pattern.length - 1] === "'" || pattern[0] === '"' && pattern[pattern.length - 1] === '"') {
|
|
return pattern.slice(1, -1);
|
|
}
|
|
|
|
return pattern;
|
|
}
|
|
/**
|
|
* @param field The field.
|
|
* @param parse What to parse the field as.
|
|
*/
|
|
|
|
|
|
function parseExpression$1(field, parse) {
|
|
var f = accessPathWithDatum(field);
|
|
|
|
if (parse === 'number') {
|
|
return "toNumber(".concat(f, ")");
|
|
} else if (parse === 'boolean') {
|
|
return "toBoolean(".concat(f, ")");
|
|
} else if (parse === 'string') {
|
|
return "toString(".concat(f, ")");
|
|
} else if (parse === 'date') {
|
|
return "toDate(".concat(f, ")");
|
|
} else if (parse === 'flatten') {
|
|
return f;
|
|
} else if (parse.indexOf('date:') === 0) {
|
|
var specifier = unquote(parse.slice(5, parse.length));
|
|
return "timeParse(".concat(f, ",'").concat(specifier, "')");
|
|
} else if (parse.indexOf('utc:') === 0) {
|
|
var _specifier = unquote(parse.slice(4, parse.length));
|
|
|
|
return "utcParse(".concat(f, ",'").concat(_specifier, "')");
|
|
} else {
|
|
warn(unrecognizedParse(parse));
|
|
return null;
|
|
}
|
|
}
|
|
|
|
function getImplicitFromFilterTransform(transform) {
|
|
var implicit = {};
|
|
forEachLeaf(transform.filter, function (filter) {
|
|
var _a;
|
|
|
|
if (isFieldPredicate(filter)) {
|
|
// Automatically add a parse node for filters with filter objects
|
|
var val = null; // For EqualFilter, just use the equal property.
|
|
// For RangeFilter and OneOfFilter, all array members should have
|
|
// the same type, so we only use the first one.
|
|
|
|
if (isFieldEqualPredicate(filter)) {
|
|
val = filter.equal;
|
|
} else if (isFieldRangePredicate(filter)) {
|
|
val = filter.range[0];
|
|
} else if (isFieldOneOfPredicate(filter)) {
|
|
val = ((_a = filter.oneOf) !== null && _a !== void 0 ? _a : filter['in'])[0];
|
|
} // else -- for filter expression, we can't infer anything
|
|
|
|
|
|
if (val) {
|
|
if (isDateTime(val)) {
|
|
implicit[filter.field] = 'date';
|
|
} else if (isNumber(val)) {
|
|
implicit[filter.field] = 'number';
|
|
} else if (isString(val)) {
|
|
implicit[filter.field] = 'string';
|
|
}
|
|
}
|
|
|
|
if (filter.timeUnit) {
|
|
implicit[filter.field] = 'date';
|
|
}
|
|
}
|
|
});
|
|
return implicit;
|
|
}
|
|
/**
|
|
* Creates a parse node for implicit parsing from a model and updates ancestorParse.
|
|
*/
|
|
|
|
|
|
function getImplicitFromEncoding(model) {
|
|
var implicit = {};
|
|
|
|
function add(fieldDef) {
|
|
if (isFieldOrDatumDefForTimeFormat(fieldDef)) {
|
|
implicit[fieldDef.field] = 'date';
|
|
} else if (fieldDef.type === 'quantitative' && isMinMaxOp(fieldDef.aggregate) // we need to parse numbers to support correct min and max
|
|
) {
|
|
implicit[fieldDef.field] = 'number';
|
|
} else if (accessPathDepth(fieldDef.field) > 1) {
|
|
// For non-date/non-number (strings and booleans), derive a flattened field for a referenced nested field.
|
|
// (Parsing numbers / dates already flattens numeric and temporal fields.)
|
|
if (!(fieldDef.field in implicit)) {
|
|
implicit[fieldDef.field] = 'flatten';
|
|
}
|
|
} else if (isScaleFieldDef(fieldDef) && isSortField(fieldDef.sort) && accessPathDepth(fieldDef.sort.field) > 1) {
|
|
// Flatten fields that we sort by but that are not otherwise flattened.
|
|
if (!(fieldDef.sort.field in implicit)) {
|
|
implicit[fieldDef.sort.field] = 'flatten';
|
|
}
|
|
}
|
|
}
|
|
|
|
if (isUnitModel(model) || isFacetModel(model)) {
|
|
// Parse encoded fields
|
|
model.forEachFieldDef(function (fieldDef, channel) {
|
|
if (isTypedFieldDef(fieldDef)) {
|
|
add(fieldDef);
|
|
} else {
|
|
var mainChannel = getMainRangeChannel(channel);
|
|
var mainFieldDef = model.fieldDef(mainChannel);
|
|
add(Object.assign(Object.assign({}, fieldDef), {
|
|
type: mainFieldDef.type
|
|
}));
|
|
}
|
|
});
|
|
} // Parse quantitative dimension fields of path marks as numbers so that we sort them correctly.
|
|
|
|
|
|
if (isUnitModel(model)) {
|
|
var mark = model.mark,
|
|
markDef = model.markDef,
|
|
encoding = model.encoding;
|
|
|
|
if (isPathMark(mark) && // No need to sort by dimension if we have a connected scatterplot (order channel is present)
|
|
!model.encoding.order) {
|
|
var dimensionChannel = markDef.orient === 'horizontal' ? 'y' : 'x';
|
|
var dimensionChannelDef = encoding[dimensionChannel];
|
|
|
|
if (isFieldDef(dimensionChannelDef) && dimensionChannelDef.type === 'quantitative' && !(dimensionChannelDef.field in implicit)) {
|
|
implicit[dimensionChannelDef.field] = 'number';
|
|
}
|
|
}
|
|
}
|
|
|
|
return implicit;
|
|
}
|
|
/**
|
|
* Creates a parse node for implicit parsing from a model and updates ancestorParse.
|
|
*/
|
|
|
|
|
|
function getImplicitFromSelection(model) {
|
|
var implicit = {};
|
|
|
|
if (isUnitModel(model) && model.component.selection) {
|
|
var _iterator108 = _createForOfIteratorHelper(keys(model.component.selection)),
|
|
_step108;
|
|
|
|
try {
|
|
for (_iterator108.s(); !(_step108 = _iterator108.n()).done;) {
|
|
var name = _step108.value;
|
|
var selCmpt = model.component.selection[name];
|
|
|
|
var _iterator109 = _createForOfIteratorHelper(selCmpt.project.items),
|
|
_step109;
|
|
|
|
try {
|
|
for (_iterator109.s(); !(_step109 = _iterator109.n()).done;) {
|
|
var proj = _step109.value;
|
|
|
|
if (!proj.channel && accessPathDepth(proj.field) > 1) {
|
|
implicit[proj.field] = 'flatten';
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator109.e(err);
|
|
} finally {
|
|
_iterator109.f();
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator108.e(err);
|
|
} finally {
|
|
_iterator108.f();
|
|
}
|
|
}
|
|
|
|
return implicit;
|
|
}
|
|
|
|
var ParseNode = /*#__PURE__*/function (_DataFlowNode11) {
|
|
_inherits(ParseNode, _DataFlowNode11);
|
|
|
|
var _super17 = _createSuper(ParseNode);
|
|
|
|
function ParseNode(parent, parse) {
|
|
var _this20;
|
|
|
|
_classCallCheck(this, ParseNode);
|
|
|
|
_this20 = _super17.call(this, parent);
|
|
_this20._parse = parse;
|
|
return _this20;
|
|
}
|
|
|
|
_createClass(ParseNode, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
return new ParseNode(null, duplicate(this._parse));
|
|
}
|
|
}, {
|
|
key: "hash",
|
|
value: function hash() {
|
|
return "Parse ".concat(_hash(this._parse));
|
|
}
|
|
/**
|
|
* Creates a parse node from a data.format.parse and updates ancestorParse.
|
|
*/
|
|
|
|
}, {
|
|
key: "merge",
|
|
value: function merge(other) {
|
|
this._parse = Object.assign(Object.assign({}, this._parse), other.parse);
|
|
other.remove();
|
|
}
|
|
/**
|
|
* Assemble an object for Vega's format.parse property.
|
|
*/
|
|
|
|
}, {
|
|
key: "assembleFormatParse",
|
|
value: function assembleFormatParse() {
|
|
var formatParse = {};
|
|
|
|
var _iterator110 = _createForOfIteratorHelper(keys(this._parse)),
|
|
_step110;
|
|
|
|
try {
|
|
for (_iterator110.s(); !(_step110 = _iterator110.n()).done;) {
|
|
var _field15 = _step110.value;
|
|
var p = this._parse[_field15];
|
|
|
|
if (accessPathDepth(_field15) === 1) {
|
|
formatParse[_field15] = p;
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator110.e(err);
|
|
} finally {
|
|
_iterator110.f();
|
|
}
|
|
|
|
return formatParse;
|
|
} // format parse depends and produces all fields in its parse
|
|
|
|
}, {
|
|
key: "producedFields",
|
|
value: function producedFields() {
|
|
return new Set(keys(this._parse));
|
|
}
|
|
}, {
|
|
key: "dependentFields",
|
|
value: function dependentFields() {
|
|
return new Set(keys(this._parse));
|
|
}
|
|
}, {
|
|
key: "assembleTransforms",
|
|
value: function assembleTransforms() {
|
|
var _this21 = this;
|
|
|
|
var onlyNested = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false;
|
|
return keys(this._parse).filter(function (field) {
|
|
return onlyNested ? accessPathDepth(field) > 1 : true;
|
|
}).map(function (field) {
|
|
var expr = parseExpression$1(field, _this21._parse[field]);
|
|
|
|
if (!expr) {
|
|
return null;
|
|
}
|
|
|
|
var formula = {
|
|
type: 'formula',
|
|
expr: expr,
|
|
as: removePathFromField(field) // Vega output is always flattened
|
|
|
|
};
|
|
return formula;
|
|
}).filter(function (t) {
|
|
return t !== null;
|
|
});
|
|
}
|
|
}, {
|
|
key: "parse",
|
|
get: function get() {
|
|
return this._parse;
|
|
}
|
|
}], [{
|
|
key: "makeExplicit",
|
|
value: function makeExplicit(parent, model, ancestorParse) {
|
|
// Custom parse
|
|
var explicit = {};
|
|
var data = model.data;
|
|
|
|
if (!isGenerator(data) && data && data.format && data.format.parse) {
|
|
explicit = data.format.parse;
|
|
}
|
|
|
|
return this.makeWithAncestors(parent, explicit, {}, ancestorParse);
|
|
}
|
|
/**
|
|
* Creates a parse node from "explicit" parse and "implicit" parse and updates ancestorParse.
|
|
*/
|
|
|
|
}, {
|
|
key: "makeWithAncestors",
|
|
value: function makeWithAncestors(parent, explicit, implicit, ancestorParse) {
|
|
// We should not parse what has already been parsed in a parent (explicitly or implicitly) or what has been derived (maked as "derived"). We also don't need to flatten a field that has already been parsed.
|
|
var _iterator111 = _createForOfIteratorHelper(keys(implicit)),
|
|
_step111;
|
|
|
|
try {
|
|
for (_iterator111.s(); !(_step111 = _iterator111.n()).done;) {
|
|
var _field16 = _step111.value;
|
|
var parsedAs = ancestorParse.getWithExplicit(_field16);
|
|
|
|
if (parsedAs.value !== undefined) {
|
|
// We always ignore derived fields even if they are implicitly defined because we expect users to create the right types.
|
|
if (parsedAs.explicit || parsedAs.value === implicit[_field16] || parsedAs.value === 'derived' || implicit[_field16] === 'flatten') {
|
|
delete implicit[_field16];
|
|
} else {
|
|
warn(differentParse(_field16, implicit[_field16], parsedAs.value));
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator111.e(err);
|
|
} finally {
|
|
_iterator111.f();
|
|
}
|
|
|
|
var _iterator112 = _createForOfIteratorHelper(keys(explicit)),
|
|
_step112;
|
|
|
|
try {
|
|
for (_iterator112.s(); !(_step112 = _iterator112.n()).done;) {
|
|
var _field17 = _step112.value;
|
|
|
|
var _parsedAs = ancestorParse.get(_field17);
|
|
|
|
if (_parsedAs !== undefined) {
|
|
// Don't parse a field again if it has been parsed with the same type already.
|
|
if (_parsedAs === explicit[_field17]) {
|
|
delete explicit[_field17];
|
|
} else {
|
|
warn(differentParse(_field17, explicit[_field17], _parsedAs));
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator112.e(err);
|
|
} finally {
|
|
_iterator112.f();
|
|
}
|
|
|
|
var parse = new Split(explicit, implicit); // add the format parse from this model so that children don't parse the same field again
|
|
|
|
ancestorParse.copyAll(parse); // copy only non-null parses
|
|
|
|
var p = {};
|
|
|
|
var _iterator113 = _createForOfIteratorHelper(keys(parse.combine())),
|
|
_step113;
|
|
|
|
try {
|
|
for (_iterator113.s(); !(_step113 = _iterator113.n()).done;) {
|
|
var key = _step113.value;
|
|
var val = parse.get(key);
|
|
|
|
if (val !== null) {
|
|
p[key] = val;
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator113.e(err);
|
|
} finally {
|
|
_iterator113.f();
|
|
}
|
|
|
|
if (keys(p).length === 0 || ancestorParse.parseNothing) {
|
|
return null;
|
|
}
|
|
|
|
return new ParseNode(parent, p);
|
|
}
|
|
}]);
|
|
|
|
return ParseNode;
|
|
}(DataFlowNode);
|
|
|
|
var IdentifierNode = /*#__PURE__*/function (_DataFlowNode12) {
|
|
_inherits(IdentifierNode, _DataFlowNode12);
|
|
|
|
var _super18 = _createSuper(IdentifierNode);
|
|
|
|
_createClass(IdentifierNode, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
return new IdentifierNode(null);
|
|
}
|
|
}]);
|
|
|
|
function IdentifierNode(parent) {
|
|
_classCallCheck(this, IdentifierNode);
|
|
|
|
return _super18.call(this, parent);
|
|
}
|
|
|
|
_createClass(IdentifierNode, [{
|
|
key: "dependentFields",
|
|
value: function dependentFields() {
|
|
return new Set();
|
|
}
|
|
}, {
|
|
key: "producedFields",
|
|
value: function producedFields() {
|
|
return new Set([SELECTION_ID]);
|
|
}
|
|
}, {
|
|
key: "hash",
|
|
value: function hash() {
|
|
return 'Identifier';
|
|
}
|
|
}, {
|
|
key: "assemble",
|
|
value: function assemble() {
|
|
return {
|
|
type: 'identifier',
|
|
as: SELECTION_ID
|
|
};
|
|
}
|
|
}]);
|
|
|
|
return IdentifierNode;
|
|
}(DataFlowNode);
|
|
/**
|
|
* A class for the join aggregate transform nodes.
|
|
*/
|
|
|
|
|
|
var JoinAggregateTransformNode = /*#__PURE__*/function (_DataFlowNode13) {
|
|
_inherits(JoinAggregateTransformNode, _DataFlowNode13);
|
|
|
|
var _super19 = _createSuper(JoinAggregateTransformNode);
|
|
|
|
function JoinAggregateTransformNode(parent, transform) {
|
|
var _this22;
|
|
|
|
_classCallCheck(this, JoinAggregateTransformNode);
|
|
|
|
_this22 = _super19.call(this, parent);
|
|
_this22.transform = transform;
|
|
return _this22;
|
|
}
|
|
|
|
_createClass(JoinAggregateTransformNode, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
return new JoinAggregateTransformNode(null, duplicate(this.transform));
|
|
}
|
|
}, {
|
|
key: "addDimensions",
|
|
value: function addDimensions(fields) {
|
|
this.transform.groupby = unique(this.transform.groupby.concat(fields), function (d) {
|
|
return d;
|
|
});
|
|
}
|
|
}, {
|
|
key: "dependentFields",
|
|
value: function dependentFields() {
|
|
var out = new Set();
|
|
|
|
if (this.transform.groupby) {
|
|
this.transform.groupby.forEach(out.add, out);
|
|
}
|
|
|
|
this.transform.joinaggregate.map(function (w) {
|
|
return w.field;
|
|
}).filter(function (f) {
|
|
return f !== undefined;
|
|
}).forEach(out.add, out);
|
|
return out;
|
|
}
|
|
}, {
|
|
key: "producedFields",
|
|
value: function producedFields() {
|
|
return new Set(this.transform.joinaggregate.map(this.getDefaultName));
|
|
}
|
|
}, {
|
|
key: "getDefaultName",
|
|
value: function getDefaultName(joinAggregateFieldDef) {
|
|
var _a;
|
|
|
|
return (_a = joinAggregateFieldDef.as) !== null && _a !== void 0 ? _a : _vgField(joinAggregateFieldDef);
|
|
}
|
|
}, {
|
|
key: "hash",
|
|
value: function hash() {
|
|
return "JoinAggregateTransform ".concat(_hash(this.transform));
|
|
}
|
|
}, {
|
|
key: "assemble",
|
|
value: function assemble() {
|
|
var fields = [];
|
|
var ops = [];
|
|
var as = [];
|
|
|
|
var _iterator114 = _createForOfIteratorHelper(this.transform.joinaggregate),
|
|
_step114;
|
|
|
|
try {
|
|
for (_iterator114.s(); !(_step114 = _iterator114.n()).done;) {
|
|
var joinaggregate = _step114.value;
|
|
ops.push(joinaggregate.op);
|
|
as.push(this.getDefaultName(joinaggregate));
|
|
fields.push(joinaggregate.field === undefined ? null : joinaggregate.field);
|
|
}
|
|
} catch (err) {
|
|
_iterator114.e(err);
|
|
} finally {
|
|
_iterator114.f();
|
|
}
|
|
|
|
var groupby = this.transform.groupby;
|
|
return Object.assign({
|
|
type: 'joinaggregate',
|
|
as: as,
|
|
ops: ops,
|
|
fields: fields
|
|
}, groupby !== undefined ? {
|
|
groupby: groupby
|
|
} : {});
|
|
}
|
|
}]);
|
|
|
|
return JoinAggregateTransformNode;
|
|
}(DataFlowNode);
|
|
|
|
function getStackByFields(model) {
|
|
return model.stack.stackBy.reduce(function (fields, by) {
|
|
var fieldDef = by.fieldDef;
|
|
|
|
var _field = _vgField(fieldDef);
|
|
|
|
if (_field) {
|
|
fields.push(_field);
|
|
}
|
|
|
|
return fields;
|
|
}, []);
|
|
}
|
|
|
|
function isValidAsArray(as) {
|
|
return isArray(as) && as.every(function (s) {
|
|
return isString(s);
|
|
}) && as.length > 1;
|
|
}
|
|
|
|
var StackNode = /*#__PURE__*/function (_DataFlowNode14) {
|
|
_inherits(StackNode, _DataFlowNode14);
|
|
|
|
var _super20 = _createSuper(StackNode);
|
|
|
|
function StackNode(parent, stack) {
|
|
var _this23;
|
|
|
|
_classCallCheck(this, StackNode);
|
|
|
|
_this23 = _super20.call(this, parent);
|
|
_this23._stack = stack;
|
|
return _this23;
|
|
}
|
|
|
|
_createClass(StackNode, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
return new StackNode(null, duplicate(this._stack));
|
|
}
|
|
}, {
|
|
key: "addDimensions",
|
|
value: function addDimensions(fields) {
|
|
var _this$_stack$facetby;
|
|
|
|
(_this$_stack$facetby = this._stack.facetby).push.apply(_this$_stack$facetby, _toConsumableArray(fields));
|
|
}
|
|
}, {
|
|
key: "dependentFields",
|
|
value: function dependentFields() {
|
|
var out = new Set();
|
|
out.add(this._stack.stackField);
|
|
this.getGroupbyFields().forEach(out.add, out);
|
|
|
|
this._stack.facetby.forEach(out.add, out);
|
|
|
|
this._stack.sort.field.forEach(out.add, out);
|
|
|
|
return out;
|
|
}
|
|
}, {
|
|
key: "producedFields",
|
|
value: function producedFields() {
|
|
return new Set(this._stack.as);
|
|
}
|
|
}, {
|
|
key: "hash",
|
|
value: function hash() {
|
|
return "Stack ".concat(_hash(this._stack));
|
|
}
|
|
}, {
|
|
key: "getGroupbyFields",
|
|
value: function getGroupbyFields() {
|
|
var _this$_stack = this._stack,
|
|
dimensionFieldDef = _this$_stack.dimensionFieldDef,
|
|
impute = _this$_stack.impute,
|
|
groupby = _this$_stack.groupby;
|
|
|
|
if (dimensionFieldDef) {
|
|
if (dimensionFieldDef.bin) {
|
|
if (impute) {
|
|
// For binned group by field with impute, we calculate bin_mid
|
|
// as we cannot impute two fields simultaneously
|
|
return [_vgField(dimensionFieldDef, {
|
|
binSuffix: 'mid'
|
|
})];
|
|
}
|
|
|
|
return [// For binned group by field without impute, we need both bin (start) and bin_end
|
|
_vgField(dimensionFieldDef, {}), _vgField(dimensionFieldDef, {
|
|
binSuffix: 'end'
|
|
})];
|
|
}
|
|
|
|
return [_vgField(dimensionFieldDef)];
|
|
}
|
|
|
|
return groupby !== null && groupby !== void 0 ? groupby : [];
|
|
}
|
|
}, {
|
|
key: "assemble",
|
|
value: function assemble() {
|
|
var transform = [];
|
|
var _this$_stack2 = this._stack,
|
|
facetby = _this$_stack2.facetby,
|
|
dimensionFieldDef = _this$_stack2.dimensionFieldDef,
|
|
field = _this$_stack2.stackField,
|
|
stackby = _this$_stack2.stackby,
|
|
sort = _this$_stack2.sort,
|
|
offset = _this$_stack2.offset,
|
|
impute = _this$_stack2.impute,
|
|
as = _this$_stack2.as; // Impute
|
|
|
|
if (impute && dimensionFieldDef) {
|
|
var _dimensionFieldDef$ba = dimensionFieldDef.band,
|
|
band = _dimensionFieldDef$ba === void 0 ? 0.5 : _dimensionFieldDef$ba,
|
|
bin = dimensionFieldDef.bin;
|
|
|
|
if (bin) {
|
|
// As we can only impute one field at a time, we need to calculate
|
|
// mid point for a binned field
|
|
transform.push({
|
|
type: 'formula',
|
|
expr: "".concat(band, "*") + _vgField(dimensionFieldDef, {
|
|
expr: 'datum'
|
|
}) + "+".concat(1 - band, "*") + _vgField(dimensionFieldDef, {
|
|
expr: 'datum',
|
|
binSuffix: 'end'
|
|
}),
|
|
as: _vgField(dimensionFieldDef, {
|
|
binSuffix: 'mid',
|
|
forAs: true
|
|
})
|
|
});
|
|
}
|
|
|
|
transform.push({
|
|
type: 'impute',
|
|
field: field,
|
|
groupby: [].concat(_toConsumableArray(stackby), _toConsumableArray(facetby)),
|
|
key: _vgField(dimensionFieldDef, {
|
|
binSuffix: 'mid'
|
|
}),
|
|
method: 'value',
|
|
value: 0
|
|
});
|
|
} // Stack
|
|
|
|
|
|
transform.push({
|
|
type: 'stack',
|
|
groupby: [].concat(_toConsumableArray(this.getGroupbyFields()), _toConsumableArray(facetby)),
|
|
field: field,
|
|
sort: sort,
|
|
as: as,
|
|
offset: offset
|
|
});
|
|
return transform;
|
|
}
|
|
}, {
|
|
key: "stack",
|
|
get: function get() {
|
|
return this._stack;
|
|
}
|
|
}], [{
|
|
key: "makeFromTransform",
|
|
value: function makeFromTransform(parent, stackTransform) {
|
|
var stack = stackTransform.stack,
|
|
groupby = stackTransform.groupby,
|
|
as = stackTransform.as,
|
|
_stackTransform$offse = stackTransform.offset,
|
|
offset = _stackTransform$offse === void 0 ? 'zero' : _stackTransform$offse;
|
|
var sortFields = [];
|
|
var sortOrder = [];
|
|
|
|
if (stackTransform.sort !== undefined) {
|
|
var _iterator115 = _createForOfIteratorHelper(stackTransform.sort),
|
|
_step115;
|
|
|
|
try {
|
|
for (_iterator115.s(); !(_step115 = _iterator115.n()).done;) {
|
|
var sortField = _step115.value;
|
|
sortFields.push(sortField.field);
|
|
sortOrder.push(getFirstDefined(sortField.order, 'ascending'));
|
|
}
|
|
} catch (err) {
|
|
_iterator115.e(err);
|
|
} finally {
|
|
_iterator115.f();
|
|
}
|
|
}
|
|
|
|
var sort = {
|
|
field: sortFields,
|
|
order: sortOrder
|
|
};
|
|
var normalizedAs;
|
|
|
|
if (isValidAsArray(as)) {
|
|
normalizedAs = as;
|
|
} else if (isString(as)) {
|
|
normalizedAs = [as, as + '_end'];
|
|
} else {
|
|
normalizedAs = [stackTransform.stack + '_start', stackTransform.stack + '_end'];
|
|
}
|
|
|
|
return new StackNode(parent, {
|
|
stackField: stack,
|
|
groupby: groupby,
|
|
offset: offset,
|
|
sort: sort,
|
|
facetby: [],
|
|
as: normalizedAs
|
|
});
|
|
}
|
|
}, {
|
|
key: "makeFromEncoding",
|
|
value: function makeFromEncoding(parent, model) {
|
|
var stackProperties = model.stack;
|
|
var encoding = model.encoding;
|
|
|
|
if (!stackProperties) {
|
|
return null;
|
|
}
|
|
|
|
var groupbyChannel = stackProperties.groupbyChannel,
|
|
fieldChannel = stackProperties.fieldChannel,
|
|
offset = stackProperties.offset,
|
|
impute = stackProperties.impute;
|
|
var dimensionFieldDef;
|
|
|
|
if (groupbyChannel) {
|
|
var cDef = encoding[groupbyChannel];
|
|
dimensionFieldDef = getFieldDef(cDef); // Fair to cast as groupByChannel is always either x or y
|
|
}
|
|
|
|
var stackby = getStackByFields(model);
|
|
var orderDef = model.encoding.order;
|
|
var sort;
|
|
|
|
if (isArray(orderDef) || isFieldDef(orderDef)) {
|
|
sort = sortParams(orderDef);
|
|
} else {
|
|
// default = descending by stackFields
|
|
// FIXME is the default here correct for binned fields?
|
|
sort = stackby.reduce(function (s, field) {
|
|
s.field.push(field);
|
|
s.order.push(fieldChannel === 'y' ? 'descending' : 'ascending');
|
|
return s;
|
|
}, {
|
|
field: [],
|
|
order: []
|
|
});
|
|
}
|
|
|
|
return new StackNode(parent, {
|
|
dimensionFieldDef: dimensionFieldDef,
|
|
stackField: model.vgField(fieldChannel),
|
|
facetby: [],
|
|
stackby: stackby,
|
|
sort: sort,
|
|
offset: offset,
|
|
impute: impute,
|
|
as: [model.vgField(fieldChannel, {
|
|
suffix: 'start',
|
|
forAs: true
|
|
}), model.vgField(fieldChannel, {
|
|
suffix: 'end',
|
|
forAs: true
|
|
})]
|
|
});
|
|
}
|
|
}]);
|
|
|
|
return StackNode;
|
|
}(DataFlowNode);
|
|
/**
|
|
* A class for the window transform nodes
|
|
*/
|
|
|
|
|
|
var WindowTransformNode = /*#__PURE__*/function (_DataFlowNode15) {
|
|
_inherits(WindowTransformNode, _DataFlowNode15);
|
|
|
|
var _super21 = _createSuper(WindowTransformNode);
|
|
|
|
function WindowTransformNode(parent, transform) {
|
|
var _this24;
|
|
|
|
_classCallCheck(this, WindowTransformNode);
|
|
|
|
_this24 = _super21.call(this, parent);
|
|
_this24.transform = transform;
|
|
return _this24;
|
|
}
|
|
|
|
_createClass(WindowTransformNode, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
return new WindowTransformNode(null, duplicate(this.transform));
|
|
}
|
|
}, {
|
|
key: "addDimensions",
|
|
value: function addDimensions(fields) {
|
|
this.transform.groupby = unique(this.transform.groupby.concat(fields), function (d) {
|
|
return d;
|
|
});
|
|
}
|
|
}, {
|
|
key: "dependentFields",
|
|
value: function dependentFields() {
|
|
var _a, _b;
|
|
|
|
var out = new Set();
|
|
((_a = this.transform.groupby) !== null && _a !== void 0 ? _a : []).forEach(out.add, out);
|
|
((_b = this.transform.sort) !== null && _b !== void 0 ? _b : []).forEach(function (m) {
|
|
return out.add(m.field);
|
|
});
|
|
this.transform.window.map(function (w) {
|
|
return w.field;
|
|
}).filter(function (f) {
|
|
return f !== undefined;
|
|
}).forEach(out.add, out);
|
|
return out;
|
|
}
|
|
}, {
|
|
key: "producedFields",
|
|
value: function producedFields() {
|
|
return new Set(this.transform.window.map(this.getDefaultName));
|
|
}
|
|
}, {
|
|
key: "getDefaultName",
|
|
value: function getDefaultName(windowFieldDef) {
|
|
var _a;
|
|
|
|
return (_a = windowFieldDef.as) !== null && _a !== void 0 ? _a : _vgField(windowFieldDef);
|
|
}
|
|
}, {
|
|
key: "hash",
|
|
value: function hash() {
|
|
return "WindowTransform ".concat(_hash(this.transform));
|
|
}
|
|
}, {
|
|
key: "assemble",
|
|
value: function assemble() {
|
|
var _a;
|
|
|
|
var fields = [];
|
|
var ops = [];
|
|
var as = [];
|
|
var params = [];
|
|
|
|
var _iterator116 = _createForOfIteratorHelper(this.transform.window),
|
|
_step116;
|
|
|
|
try {
|
|
for (_iterator116.s(); !(_step116 = _iterator116.n()).done;) {
|
|
var window = _step116.value;
|
|
ops.push(window.op);
|
|
as.push(this.getDefaultName(window));
|
|
params.push(window.param === undefined ? null : window.param);
|
|
fields.push(window.field === undefined ? null : window.field);
|
|
}
|
|
} catch (err) {
|
|
_iterator116.e(err);
|
|
} finally {
|
|
_iterator116.f();
|
|
}
|
|
|
|
var frame = this.transform.frame;
|
|
var groupby = this.transform.groupby;
|
|
|
|
if (frame && frame[0] === null && frame[1] === null && ops.every(function (o) {
|
|
return isAggregateOp(o);
|
|
})) {
|
|
// when the window does not rely on any particular window ops or frame, switch to a simpler and more efficient joinaggregate
|
|
return Object.assign({
|
|
type: 'joinaggregate',
|
|
as: as,
|
|
ops: ops,
|
|
fields: fields
|
|
}, groupby !== undefined ? {
|
|
groupby: groupby
|
|
} : {});
|
|
}
|
|
|
|
var sortFields = [];
|
|
var sortOrder = [];
|
|
|
|
if (this.transform.sort !== undefined) {
|
|
var _iterator117 = _createForOfIteratorHelper(this.transform.sort),
|
|
_step117;
|
|
|
|
try {
|
|
for (_iterator117.s(); !(_step117 = _iterator117.n()).done;) {
|
|
var sortField = _step117.value;
|
|
sortFields.push(sortField.field);
|
|
sortOrder.push((_a = sortField.order) !== null && _a !== void 0 ? _a : 'ascending');
|
|
}
|
|
} catch (err) {
|
|
_iterator117.e(err);
|
|
} finally {
|
|
_iterator117.f();
|
|
}
|
|
}
|
|
|
|
var sort = {
|
|
field: sortFields,
|
|
order: sortOrder
|
|
};
|
|
var ignorePeers = this.transform.ignorePeers;
|
|
return Object.assign(Object.assign(Object.assign({
|
|
type: 'window',
|
|
params: params,
|
|
as: as,
|
|
ops: ops,
|
|
fields: fields,
|
|
sort: sort
|
|
}, ignorePeers !== undefined ? {
|
|
ignorePeers: ignorePeers
|
|
} : {}), groupby !== undefined ? {
|
|
groupby: groupby
|
|
} : {}), frame !== undefined ? {
|
|
frame: frame
|
|
} : {});
|
|
}
|
|
}]);
|
|
|
|
return WindowTransformNode;
|
|
}(DataFlowNode);
|
|
/**
|
|
* Move parse nodes up to forks.
|
|
*/
|
|
|
|
|
|
var MoveParseUp = /*#__PURE__*/function (_BottomUpOptimizer) {
|
|
_inherits(MoveParseUp, _BottomUpOptimizer);
|
|
|
|
var _super22 = _createSuper(MoveParseUp);
|
|
|
|
function MoveParseUp() {
|
|
_classCallCheck(this, MoveParseUp);
|
|
|
|
return _super22.apply(this, arguments);
|
|
}
|
|
|
|
_createClass(MoveParseUp, [{
|
|
key: "run",
|
|
value: function run(node) {
|
|
var parent = node.parent; // Move parse up by merging or swapping.
|
|
|
|
if (node instanceof ParseNode) {
|
|
if (isDataSourceNode(parent)) {
|
|
return this.flags;
|
|
}
|
|
|
|
if (parent.numChildren() > 1) {
|
|
// Don't move parse further up but continue with parent.
|
|
this.setContinue();
|
|
return this.flags;
|
|
}
|
|
|
|
if (parent instanceof ParseNode) {
|
|
this.setMutated();
|
|
parent.merge(node);
|
|
} else {
|
|
// Don't swap with nodes that produce something that the parse node depends on (e.g. lookup).
|
|
if (fieldIntersection(parent.producedFields(), node.dependentFields())) {
|
|
this.setContinue();
|
|
return this.flags;
|
|
}
|
|
|
|
this.setMutated();
|
|
node.swapWithParent();
|
|
}
|
|
}
|
|
|
|
this.setContinue();
|
|
return this.flags;
|
|
}
|
|
}]);
|
|
|
|
return MoveParseUp;
|
|
}(BottomUpOptimizer);
|
|
/**
|
|
* Merge identical nodes at forks by comparing hashes.
|
|
*
|
|
* Does not need to iterate from leaves so we implement this with recursion as it's a bit simpler.
|
|
*/
|
|
|
|
|
|
var MergeIdenticalNodes = /*#__PURE__*/function (_TopDownOptimizer) {
|
|
_inherits(MergeIdenticalNodes, _TopDownOptimizer);
|
|
|
|
var _super23 = _createSuper(MergeIdenticalNodes);
|
|
|
|
function MergeIdenticalNodes() {
|
|
_classCallCheck(this, MergeIdenticalNodes);
|
|
|
|
return _super23.apply(this, arguments);
|
|
}
|
|
|
|
_createClass(MergeIdenticalNodes, [{
|
|
key: "mergeNodes",
|
|
value: function mergeNodes(parent, nodes) {
|
|
var mergedNode = nodes.shift();
|
|
|
|
var _iterator118 = _createForOfIteratorHelper(nodes),
|
|
_step118;
|
|
|
|
try {
|
|
for (_iterator118.s(); !(_step118 = _iterator118.n()).done;) {
|
|
var node = _step118.value;
|
|
parent.removeChild(node);
|
|
node.parent = mergedNode;
|
|
node.remove();
|
|
}
|
|
} catch (err) {
|
|
_iterator118.e(err);
|
|
} finally {
|
|
_iterator118.f();
|
|
}
|
|
}
|
|
}, {
|
|
key: "run",
|
|
value: function run(node) {
|
|
var hashes = node.children.map(function (x) {
|
|
return x.hash();
|
|
});
|
|
var buckets = {};
|
|
|
|
for (var i = 0; i < hashes.length; i++) {
|
|
if (buckets[hashes[i]] === undefined) {
|
|
buckets[hashes[i]] = [node.children[i]];
|
|
} else {
|
|
buckets[hashes[i]].push(node.children[i]);
|
|
}
|
|
}
|
|
|
|
var _iterator119 = _createForOfIteratorHelper(keys(buckets)),
|
|
_step119;
|
|
|
|
try {
|
|
for (_iterator119.s(); !(_step119 = _iterator119.n()).done;) {
|
|
var k = _step119.value;
|
|
|
|
if (buckets[k].length > 1) {
|
|
this.setMutated();
|
|
this.mergeNodes(node, buckets[k]);
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator119.e(err);
|
|
} finally {
|
|
_iterator119.f();
|
|
}
|
|
|
|
var _iterator120 = _createForOfIteratorHelper(node.children),
|
|
_step120;
|
|
|
|
try {
|
|
for (_iterator120.s(); !(_step120 = _iterator120.n()).done;) {
|
|
var child = _step120.value;
|
|
this.run(child);
|
|
}
|
|
} catch (err) {
|
|
_iterator120.e(err);
|
|
} finally {
|
|
_iterator120.f();
|
|
}
|
|
|
|
return this.mutatedFlag;
|
|
}
|
|
}]);
|
|
|
|
return MergeIdenticalNodes;
|
|
}(TopDownOptimizer);
|
|
/**
|
|
* Repeatedly remove leaf nodes that are not output or facet nodes.
|
|
* The reason is that we don't need subtrees that don't have any output nodes.
|
|
* Facet nodes are needed for the row or column domains.
|
|
*/
|
|
|
|
|
|
var RemoveUnusedSubtrees = /*#__PURE__*/function (_BottomUpOptimizer2) {
|
|
_inherits(RemoveUnusedSubtrees, _BottomUpOptimizer2);
|
|
|
|
var _super24 = _createSuper(RemoveUnusedSubtrees);
|
|
|
|
function RemoveUnusedSubtrees() {
|
|
_classCallCheck(this, RemoveUnusedSubtrees);
|
|
|
|
return _super24.apply(this, arguments);
|
|
}
|
|
|
|
_createClass(RemoveUnusedSubtrees, [{
|
|
key: "run",
|
|
value: function run(node) {
|
|
if (node instanceof OutputNode || node.numChildren() > 0 || node instanceof FacetNode) {
|
|
// no need to continue with parent because it is output node or will have children (there was a fork)
|
|
return this.flags;
|
|
} else {
|
|
this.setMutated();
|
|
node.remove();
|
|
}
|
|
|
|
return this.flags;
|
|
}
|
|
}]);
|
|
|
|
return RemoveUnusedSubtrees;
|
|
}(BottomUpOptimizer);
|
|
/**
|
|
* Removes duplicate time unit nodes (as determined by the name of the
|
|
* output field) that may be generated due to selections projected over
|
|
* time units.
|
|
*
|
|
* TODO: Try to make this a top down optimizer that keeps only the first
|
|
* insance of a time unit node.
|
|
* TODO: Try to make a generic version of this that only keeps one node per hash.
|
|
*/
|
|
|
|
|
|
var RemoveDuplicateTimeUnits = /*#__PURE__*/function (_BottomUpOptimizer3) {
|
|
_inherits(RemoveDuplicateTimeUnits, _BottomUpOptimizer3);
|
|
|
|
var _super25 = _createSuper(RemoveDuplicateTimeUnits);
|
|
|
|
function RemoveDuplicateTimeUnits() {
|
|
var _this25;
|
|
|
|
_classCallCheck(this, RemoveDuplicateTimeUnits);
|
|
|
|
_this25 = _super25.apply(this, arguments);
|
|
_this25.fields = new Set();
|
|
_this25.prev = null;
|
|
return _this25;
|
|
}
|
|
|
|
_createClass(RemoveDuplicateTimeUnits, [{
|
|
key: "run",
|
|
value: function run(node) {
|
|
this.setContinue();
|
|
|
|
if (node instanceof TimeUnitNode) {
|
|
var pfields = node.producedFields();
|
|
|
|
if (hasIntersection(pfields, this.fields)) {
|
|
this.setMutated();
|
|
this.prev.remove();
|
|
} else {
|
|
this.fields = new Set([].concat(_toConsumableArray(this.fields), _toConsumableArray(pfields)));
|
|
}
|
|
|
|
this.prev = node;
|
|
}
|
|
|
|
return this.flags;
|
|
}
|
|
}, {
|
|
key: "reset",
|
|
value: function reset() {
|
|
this.fields.clear();
|
|
}
|
|
}]);
|
|
|
|
return RemoveDuplicateTimeUnits;
|
|
}(BottomUpOptimizer);
|
|
/**
|
|
* Merge adjacent time unit nodes.
|
|
*/
|
|
|
|
|
|
var MergeTimeUnits = /*#__PURE__*/function (_BottomUpOptimizer4) {
|
|
_inherits(MergeTimeUnits, _BottomUpOptimizer4);
|
|
|
|
var _super26 = _createSuper(MergeTimeUnits);
|
|
|
|
function MergeTimeUnits() {
|
|
_classCallCheck(this, MergeTimeUnits);
|
|
|
|
return _super26.apply(this, arguments);
|
|
}
|
|
|
|
_createClass(MergeTimeUnits, [{
|
|
key: "run",
|
|
value: function run(node) {
|
|
this.setContinue();
|
|
var parent = node.parent;
|
|
var timeUnitChildren = parent.children.filter(function (x) {
|
|
return x instanceof TimeUnitNode;
|
|
});
|
|
var combination = timeUnitChildren.pop();
|
|
|
|
var _iterator121 = _createForOfIteratorHelper(timeUnitChildren),
|
|
_step121;
|
|
|
|
try {
|
|
for (_iterator121.s(); !(_step121 = _iterator121.n()).done;) {
|
|
var timeUnit = _step121.value;
|
|
this.setMutated();
|
|
combination.merge(timeUnit);
|
|
}
|
|
} catch (err) {
|
|
_iterator121.e(err);
|
|
} finally {
|
|
_iterator121.f();
|
|
}
|
|
|
|
return this.flags;
|
|
}
|
|
}]);
|
|
|
|
return MergeTimeUnits;
|
|
}(BottomUpOptimizer);
|
|
/**
|
|
* Clones the subtree and ignores output nodes except for the leaves, which are renamed.
|
|
*/
|
|
|
|
|
|
function cloneSubtree(facet) {
|
|
function clone(node) {
|
|
if (!(node instanceof FacetNode)) {
|
|
var copy = node.clone();
|
|
|
|
if (copy instanceof OutputNode) {
|
|
var newName = FACET_SCALE_PREFIX + copy.getSource();
|
|
copy.setSource(newName);
|
|
facet.model.component.data.outputNodes[newName] = copy;
|
|
} else if (copy instanceof AggregateNode || copy instanceof StackNode || copy instanceof WindowTransformNode || copy instanceof JoinAggregateTransformNode) {
|
|
copy.addDimensions(facet.fields);
|
|
}
|
|
|
|
var _iterator122 = _createForOfIteratorHelper(node.children.flatMap(clone)),
|
|
_step122;
|
|
|
|
try {
|
|
for (_iterator122.s(); !(_step122 = _iterator122.n()).done;) {
|
|
var n = _step122.value;
|
|
n.parent = copy;
|
|
}
|
|
} catch (err) {
|
|
_iterator122.e(err);
|
|
} finally {
|
|
_iterator122.f();
|
|
}
|
|
|
|
return [copy];
|
|
}
|
|
|
|
return node.children.flatMap(clone);
|
|
}
|
|
|
|
return clone;
|
|
}
|
|
/**
|
|
* Move facet nodes down to the next fork or output node. Also pull the main output with the facet node.
|
|
* After moving down the facet node, make a copy of the subtree and make it a child of the main output.
|
|
*/
|
|
|
|
|
|
function moveFacetDown(node) {
|
|
if (node instanceof FacetNode) {
|
|
if (node.numChildren() === 1 && !(node.children[0] instanceof OutputNode)) {
|
|
// move down until we hit a fork or output node
|
|
var child = node.children[0];
|
|
|
|
if (child instanceof AggregateNode || child instanceof StackNode || child instanceof WindowTransformNode || child instanceof JoinAggregateTransformNode) {
|
|
child.addDimensions(node.fields);
|
|
}
|
|
|
|
child.swapWithParent();
|
|
moveFacetDown(node);
|
|
} else {
|
|
// move main to facet
|
|
var facetMain = node.model.component.data.main;
|
|
moveMainDownToFacet(facetMain); // replicate the subtree and place it before the facet's main node
|
|
|
|
var cloner = cloneSubtree(node);
|
|
var copy = node.children.map(cloner).flat();
|
|
|
|
var _iterator123 = _createForOfIteratorHelper(copy),
|
|
_step123;
|
|
|
|
try {
|
|
for (_iterator123.s(); !(_step123 = _iterator123.n()).done;) {
|
|
var c = _step123.value;
|
|
c.parent = facetMain;
|
|
}
|
|
} catch (err) {
|
|
_iterator123.e(err);
|
|
} finally {
|
|
_iterator123.f();
|
|
}
|
|
}
|
|
} else {
|
|
node.children.map(moveFacetDown);
|
|
}
|
|
}
|
|
|
|
function moveMainDownToFacet(node) {
|
|
if (node instanceof OutputNode && node.type === DataSourceType.Main) {
|
|
if (node.numChildren() === 1) {
|
|
var child = node.children[0];
|
|
|
|
if (!(child instanceof FacetNode)) {
|
|
child.swapWithParent();
|
|
moveMainDownToFacet(node);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Remove output nodes that are not required. Starting from a root.
|
|
*/
|
|
|
|
|
|
var RemoveUnnecessaryOutputNodes = /*#__PURE__*/function (_TopDownOptimizer2) {
|
|
_inherits(RemoveUnnecessaryOutputNodes, _TopDownOptimizer2);
|
|
|
|
var _super27 = _createSuper(RemoveUnnecessaryOutputNodes);
|
|
|
|
function RemoveUnnecessaryOutputNodes() {
|
|
_classCallCheck(this, RemoveUnnecessaryOutputNodes);
|
|
|
|
return _super27.call(this);
|
|
}
|
|
|
|
_createClass(RemoveUnnecessaryOutputNodes, [{
|
|
key: "run",
|
|
value: function run(node) {
|
|
if (node instanceof OutputNode && !node.isRequired()) {
|
|
this.setMutated();
|
|
node.remove();
|
|
}
|
|
|
|
var _iterator124 = _createForOfIteratorHelper(node.children),
|
|
_step124;
|
|
|
|
try {
|
|
for (_iterator124.s(); !(_step124 = _iterator124.n()).done;) {
|
|
var child = _step124.value;
|
|
this.run(child);
|
|
}
|
|
} catch (err) {
|
|
_iterator124.e(err);
|
|
} finally {
|
|
_iterator124.f();
|
|
}
|
|
|
|
return this.mutatedFlag;
|
|
}
|
|
}]);
|
|
|
|
return RemoveUnnecessaryOutputNodes;
|
|
}(TopDownOptimizer);
|
|
|
|
var RemoveUnnecessaryIdentifierNodes = /*#__PURE__*/function (_TopDownOptimizer3) {
|
|
_inherits(RemoveUnnecessaryIdentifierNodes, _TopDownOptimizer3);
|
|
|
|
var _super28 = _createSuper(RemoveUnnecessaryIdentifierNodes);
|
|
|
|
function RemoveUnnecessaryIdentifierNodes(model) {
|
|
var _this26;
|
|
|
|
_classCallCheck(this, RemoveUnnecessaryIdentifierNodes);
|
|
|
|
_this26 = _super28.call(this);
|
|
_this26.requiresSelectionId = model && requiresSelectionId(model);
|
|
return _this26;
|
|
}
|
|
|
|
_createClass(RemoveUnnecessaryIdentifierNodes, [{
|
|
key: "run",
|
|
value: function run(node) {
|
|
if (node instanceof IdentifierNode) {
|
|
// Only preserve IdentifierNodes if we have default discrete selections
|
|
// in our model tree, and if the nodes come after tuple producing nodes.
|
|
if (!(this.requiresSelectionId && (isDataSourceNode(node.parent) || node.parent instanceof AggregateNode || node.parent instanceof ParseNode))) {
|
|
this.setMutated();
|
|
node.remove();
|
|
}
|
|
}
|
|
|
|
var _iterator125 = _createForOfIteratorHelper(node.children),
|
|
_step125;
|
|
|
|
try {
|
|
for (_iterator125.s(); !(_step125 = _iterator125.n()).done;) {
|
|
var child = _step125.value;
|
|
this.run(child);
|
|
}
|
|
} catch (err) {
|
|
_iterator125.e(err);
|
|
} finally {
|
|
_iterator125.f();
|
|
}
|
|
|
|
return this.mutatedFlag;
|
|
}
|
|
}]);
|
|
|
|
return RemoveUnnecessaryIdentifierNodes;
|
|
}(TopDownOptimizer);
|
|
/**
|
|
* Inserts an intermediate ParseNode containing all non-conflicting parse fields and removes the empty ParseNodes.
|
|
*
|
|
* We assume that dependent paths that do not have a parse node can be just merged.
|
|
*/
|
|
|
|
|
|
var MergeParse = /*#__PURE__*/function (_BottomUpOptimizer5) {
|
|
_inherits(MergeParse, _BottomUpOptimizer5);
|
|
|
|
var _super29 = _createSuper(MergeParse);
|
|
|
|
function MergeParse() {
|
|
_classCallCheck(this, MergeParse);
|
|
|
|
return _super29.apply(this, arguments);
|
|
}
|
|
|
|
_createClass(MergeParse, [{
|
|
key: "run",
|
|
value: function run(node) {
|
|
var parent = node.parent;
|
|
|
|
var originalChildren = _toConsumableArray(parent.children);
|
|
|
|
var parseChildren = parent.children.filter(function (child) {
|
|
return child instanceof ParseNode;
|
|
});
|
|
|
|
if (parent.numChildren() > 1 && parseChildren.length >= 1) {
|
|
var commonParse = {};
|
|
var conflictingParse = new Set();
|
|
|
|
var _iterator126 = _createForOfIteratorHelper(parseChildren),
|
|
_step126;
|
|
|
|
try {
|
|
for (_iterator126.s(); !(_step126 = _iterator126.n()).done;) {
|
|
var parseNode = _step126.value;
|
|
var _parse = parseNode.parse;
|
|
|
|
var _iterator130 = _createForOfIteratorHelper(keys(_parse)),
|
|
_step130;
|
|
|
|
try {
|
|
for (_iterator130.s(); !(_step130 = _iterator130.n()).done;) {
|
|
var k = _step130.value;
|
|
|
|
if (!(k in commonParse)) {
|
|
commonParse[k] = _parse[k];
|
|
} else if (commonParse[k] !== _parse[k]) {
|
|
conflictingParse.add(k);
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator130.e(err);
|
|
} finally {
|
|
_iterator130.f();
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator126.e(err);
|
|
} finally {
|
|
_iterator126.f();
|
|
}
|
|
|
|
var _iterator127 = _createForOfIteratorHelper(conflictingParse),
|
|
_step127;
|
|
|
|
try {
|
|
for (_iterator127.s(); !(_step127 = _iterator127.n()).done;) {
|
|
var _field18 = _step127.value;
|
|
delete commonParse[_field18];
|
|
}
|
|
} catch (err) {
|
|
_iterator127.e(err);
|
|
} finally {
|
|
_iterator127.f();
|
|
}
|
|
|
|
if (!isEmpty(commonParse)) {
|
|
this.setMutated();
|
|
var mergedParseNode = new ParseNode(parent, commonParse);
|
|
|
|
var _iterator128 = _createForOfIteratorHelper(originalChildren),
|
|
_step128;
|
|
|
|
try {
|
|
for (_iterator128.s(); !(_step128 = _iterator128.n()).done;) {
|
|
var childNode = _step128.value;
|
|
|
|
if (childNode instanceof ParseNode) {
|
|
var _iterator129 = _createForOfIteratorHelper(keys(commonParse)),
|
|
_step129;
|
|
|
|
try {
|
|
for (_iterator129.s(); !(_step129 = _iterator129.n()).done;) {
|
|
var key = _step129.value;
|
|
delete childNode.parse[key];
|
|
}
|
|
} catch (err) {
|
|
_iterator129.e(err);
|
|
} finally {
|
|
_iterator129.f();
|
|
}
|
|
}
|
|
|
|
parent.removeChild(childNode);
|
|
childNode.parent = mergedParseNode; // remove empty parse nodes
|
|
|
|
if (childNode instanceof ParseNode && keys(childNode.parse).length === 0) {
|
|
childNode.remove();
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator128.e(err);
|
|
} finally {
|
|
_iterator128.f();
|
|
}
|
|
}
|
|
}
|
|
|
|
this.setContinue();
|
|
return this.flags;
|
|
}
|
|
}]);
|
|
|
|
return MergeParse;
|
|
}(BottomUpOptimizer);
|
|
|
|
var MergeAggregates = /*#__PURE__*/function (_BottomUpOptimizer6) {
|
|
_inherits(MergeAggregates, _BottomUpOptimizer6);
|
|
|
|
var _super30 = _createSuper(MergeAggregates);
|
|
|
|
function MergeAggregates() {
|
|
_classCallCheck(this, MergeAggregates);
|
|
|
|
return _super30.apply(this, arguments);
|
|
}
|
|
|
|
_createClass(MergeAggregates, [{
|
|
key: "run",
|
|
value: function run(node) {
|
|
var parent = node.parent;
|
|
var aggChildren = parent.children.filter(function (child) {
|
|
return child instanceof AggregateNode;
|
|
}); // Object which we'll use to map the fields which an aggregate is grouped by to
|
|
// the set of aggregates with that grouping. This is useful as only aggregates
|
|
// with the same group by can be merged
|
|
|
|
var groupedAggregates = {}; // Build groupedAggregates
|
|
|
|
var _iterator131 = _createForOfIteratorHelper(aggChildren),
|
|
_step131;
|
|
|
|
try {
|
|
for (_iterator131.s(); !(_step131 = _iterator131.n()).done;) {
|
|
var agg = _step131.value;
|
|
|
|
var groupBys = _hash(agg.groupBy);
|
|
|
|
if (!(groupBys in groupedAggregates)) {
|
|
groupedAggregates[groupBys] = [];
|
|
}
|
|
|
|
groupedAggregates[groupBys].push(agg);
|
|
} // Merge aggregateNodes with same key in groupedAggregates
|
|
|
|
} catch (err) {
|
|
_iterator131.e(err);
|
|
} finally {
|
|
_iterator131.f();
|
|
}
|
|
|
|
var _iterator132 = _createForOfIteratorHelper(keys(groupedAggregates)),
|
|
_step132;
|
|
|
|
try {
|
|
for (_iterator132.s(); !(_step132 = _iterator132.n()).done;) {
|
|
var group = _step132.value;
|
|
var mergeableAggs = groupedAggregates[group];
|
|
|
|
if (mergeableAggs.length > 1) {
|
|
var mergedAggs = mergeableAggs.pop();
|
|
|
|
var _iterator133 = _createForOfIteratorHelper(mergeableAggs),
|
|
_step133;
|
|
|
|
try {
|
|
for (_iterator133.s(); !(_step133 = _iterator133.n()).done;) {
|
|
var _agg = _step133.value;
|
|
|
|
if (mergedAggs.merge(_agg)) {
|
|
parent.removeChild(_agg);
|
|
_agg.parent = mergedAggs;
|
|
|
|
_agg.remove();
|
|
|
|
this.setMutated();
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator133.e(err);
|
|
} finally {
|
|
_iterator133.f();
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator132.e(err);
|
|
} finally {
|
|
_iterator132.f();
|
|
}
|
|
|
|
this.setContinue();
|
|
return this.flags;
|
|
}
|
|
}]);
|
|
|
|
return MergeAggregates;
|
|
}(BottomUpOptimizer);
|
|
/**
|
|
* Merge bin nodes and move them up through forks. Stop at filters, parse, identifier as we want them to stay before the bin node.
|
|
*/
|
|
|
|
|
|
var MergeBins = /*#__PURE__*/function (_BottomUpOptimizer7) {
|
|
_inherits(MergeBins, _BottomUpOptimizer7);
|
|
|
|
var _super31 = _createSuper(MergeBins);
|
|
|
|
function MergeBins(model) {
|
|
var _this27;
|
|
|
|
_classCallCheck(this, MergeBins);
|
|
|
|
_this27 = _super31.call(this);
|
|
_this27.model = model;
|
|
return _this27;
|
|
}
|
|
|
|
_createClass(MergeBins, [{
|
|
key: "run",
|
|
value: function run(node) {
|
|
var parent = node.parent;
|
|
var moveBinsUp = !(isDataSourceNode(parent) || parent instanceof FilterNode || parent instanceof ParseNode || parent instanceof IdentifierNode);
|
|
var promotableBins = [];
|
|
var remainingBins = [];
|
|
|
|
var _iterator134 = _createForOfIteratorHelper(parent.children),
|
|
_step134;
|
|
|
|
try {
|
|
for (_iterator134.s(); !(_step134 = _iterator134.n()).done;) {
|
|
var child = _step134.value;
|
|
|
|
if (child instanceof BinNode) {
|
|
if (moveBinsUp && !fieldIntersection(parent.producedFields(), child.dependentFields())) {
|
|
promotableBins.push(child);
|
|
} else {
|
|
remainingBins.push(child);
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator134.e(err);
|
|
} finally {
|
|
_iterator134.f();
|
|
}
|
|
|
|
if (promotableBins.length > 0) {
|
|
var promotedBin = promotableBins.pop();
|
|
|
|
var _iterator135 = _createForOfIteratorHelper(promotableBins),
|
|
_step135;
|
|
|
|
try {
|
|
for (_iterator135.s(); !(_step135 = _iterator135.n()).done;) {
|
|
var bin = _step135.value;
|
|
promotedBin.merge(bin, this.model.renameSignal.bind(this.model));
|
|
}
|
|
} catch (err) {
|
|
_iterator135.e(err);
|
|
} finally {
|
|
_iterator135.f();
|
|
}
|
|
|
|
this.setMutated();
|
|
|
|
if (parent instanceof BinNode) {
|
|
parent.merge(promotedBin, this.model.renameSignal.bind(this.model));
|
|
} else {
|
|
promotedBin.swapWithParent();
|
|
}
|
|
}
|
|
|
|
if (remainingBins.length > 1) {
|
|
var remainingBin = remainingBins.pop();
|
|
|
|
var _iterator136 = _createForOfIteratorHelper(remainingBins),
|
|
_step136;
|
|
|
|
try {
|
|
for (_iterator136.s(); !(_step136 = _iterator136.n()).done;) {
|
|
var _bin = _step136.value;
|
|
remainingBin.merge(_bin, this.model.renameSignal.bind(this.model));
|
|
}
|
|
} catch (err) {
|
|
_iterator136.e(err);
|
|
} finally {
|
|
_iterator136.f();
|
|
}
|
|
|
|
this.setMutated();
|
|
}
|
|
|
|
this.setContinue();
|
|
return this.flags;
|
|
}
|
|
}]);
|
|
|
|
return MergeBins;
|
|
}(BottomUpOptimizer);
|
|
/**
|
|
* This optimizer takes output nodes that are at a fork and moves them before the fork.
|
|
*
|
|
* The algorithm iterates over the children and tries to find the last output node in a chain of output nodes.
|
|
* It then moves all output nodes before that main output node. All other children (and the children of the output nodes)
|
|
* are inserted after the main output node.
|
|
*/
|
|
|
|
|
|
var MergeOutputs = /*#__PURE__*/function (_BottomUpOptimizer8) {
|
|
_inherits(MergeOutputs, _BottomUpOptimizer8);
|
|
|
|
var _super32 = _createSuper(MergeOutputs);
|
|
|
|
function MergeOutputs() {
|
|
_classCallCheck(this, MergeOutputs);
|
|
|
|
return _super32.apply(this, arguments);
|
|
}
|
|
|
|
_createClass(MergeOutputs, [{
|
|
key: "run",
|
|
value: function run(node) {
|
|
var parent = node.parent;
|
|
|
|
var children = _toConsumableArray(parent.children);
|
|
|
|
var hasOutputChild = some(children, function (child) {
|
|
return child instanceof OutputNode;
|
|
});
|
|
|
|
if (!hasOutputChild || parent.numChildren() <= 1) {
|
|
this.setContinue();
|
|
return this.flags;
|
|
}
|
|
|
|
var otherChildren = []; // The output node we will connect all other nodes to.
|
|
// Output nodes will be added before the new node, other nodes after.
|
|
|
|
var mainOutput;
|
|
|
|
var _iterator137 = _createForOfIteratorHelper(children),
|
|
_step137;
|
|
|
|
try {
|
|
for (_iterator137.s(); !(_step137 = _iterator137.n()).done;) {
|
|
var _child3 = _step137.value;
|
|
|
|
if (_child3 instanceof OutputNode) {
|
|
var lastOutput = _child3;
|
|
|
|
while (lastOutput.numChildren() === 1) {
|
|
var _lastOutput$children = _slicedToArray(lastOutput.children, 1),
|
|
theChild = _lastOutput$children[0];
|
|
|
|
if (theChild instanceof OutputNode) {
|
|
lastOutput = theChild;
|
|
} else {
|
|
break;
|
|
}
|
|
}
|
|
|
|
otherChildren.push.apply(otherChildren, _toConsumableArray(lastOutput.children));
|
|
|
|
if (mainOutput) {
|
|
// Move the output nodes before the mainOutput. We do this by setting
|
|
// the parent of the first not to the parent of the main output and
|
|
// the main output's parent to the last output.
|
|
// note: the child is the first output
|
|
parent.removeChild(_child3);
|
|
_child3.parent = mainOutput.parent;
|
|
mainOutput.parent.removeChild(mainOutput);
|
|
mainOutput.parent = lastOutput;
|
|
this.setMutated();
|
|
} else {
|
|
mainOutput = lastOutput;
|
|
}
|
|
} else {
|
|
otherChildren.push(_child3);
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator137.e(err);
|
|
} finally {
|
|
_iterator137.f();
|
|
}
|
|
|
|
if (otherChildren.length) {
|
|
this.setMutated();
|
|
|
|
var _iterator138 = _createForOfIteratorHelper(otherChildren),
|
|
_step138;
|
|
|
|
try {
|
|
for (_iterator138.s(); !(_step138 = _iterator138.n()).done;) {
|
|
var child = _step138.value;
|
|
child.parent.removeChild(child);
|
|
child.parent = mainOutput;
|
|
}
|
|
} catch (err) {
|
|
_iterator138.e(err);
|
|
} finally {
|
|
_iterator138.f();
|
|
}
|
|
}
|
|
|
|
this.setContinue();
|
|
return this.flags;
|
|
}
|
|
}]);
|
|
|
|
return MergeOutputs;
|
|
}(BottomUpOptimizer);
|
|
|
|
var FACET_SCALE_PREFIX = 'scale_';
|
|
var MAX_OPTIMIZATION_RUNS = 5;
|
|
/**
|
|
* Iterates over a dataflow graph and checks whether all links are consistent.
|
|
*/
|
|
|
|
function checkLinks(nodes) {
|
|
var _iterator139 = _createForOfIteratorHelper(nodes),
|
|
_step139;
|
|
|
|
try {
|
|
for (_iterator139.s(); !(_step139 = _iterator139.n()).done;) {
|
|
var node = _step139.value;
|
|
|
|
var _iterator140 = _createForOfIteratorHelper(node.children),
|
|
_step140;
|
|
|
|
try {
|
|
for (_iterator140.s(); !(_step140 = _iterator140.n()).done;) {
|
|
var child = _step140.value;
|
|
|
|
if (child.parent !== node) {
|
|
// log.error('Dataflow graph is inconsistent.', node, child);
|
|
return false;
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator140.e(err);
|
|
} finally {
|
|
_iterator140.f();
|
|
}
|
|
|
|
if (!checkLinks(node.children)) {
|
|
return false;
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator139.e(err);
|
|
} finally {
|
|
_iterator139.f();
|
|
}
|
|
|
|
return true;
|
|
}
|
|
/**
|
|
* Return all leaf nodes.
|
|
*/
|
|
|
|
|
|
function getLeaves(roots) {
|
|
var leaves = [];
|
|
|
|
function append(node) {
|
|
if (node.numChildren() === 0) {
|
|
leaves.push(node);
|
|
} else {
|
|
var _iterator141 = _createForOfIteratorHelper(node.children),
|
|
_step141;
|
|
|
|
try {
|
|
for (_iterator141.s(); !(_step141 = _iterator141.n()).done;) {
|
|
var child = _step141.value;
|
|
append(child);
|
|
}
|
|
} catch (err) {
|
|
_iterator141.e(err);
|
|
} finally {
|
|
_iterator141.f();
|
|
}
|
|
}
|
|
}
|
|
|
|
var _iterator142 = _createForOfIteratorHelper(roots),
|
|
_step142;
|
|
|
|
try {
|
|
for (_iterator142.s(); !(_step142 = _iterator142.n()).done;) {
|
|
var child = _step142.value;
|
|
append(child);
|
|
}
|
|
} catch (err) {
|
|
_iterator142.e(err);
|
|
} finally {
|
|
_iterator142.f();
|
|
}
|
|
|
|
return leaves;
|
|
}
|
|
|
|
function isTrue(x) {
|
|
return x;
|
|
}
|
|
/**
|
|
* Run the specified optimizer on the provided nodes.
|
|
*
|
|
* @param optimizer The optimizer instance to run.
|
|
* @param nodes A set of nodes to optimize.
|
|
* @param flag Flag that will be or'ed with return valued from optimization calls to the nodes.
|
|
*/
|
|
|
|
|
|
function runOptimizer(optimizer, nodes) {
|
|
var flags = nodes.map(function (node) {
|
|
if (optimizer instanceof BottomUpOptimizer) {
|
|
var runFlags = optimizer.optimizeNextFromLeaves(node);
|
|
optimizer.reset();
|
|
return runFlags;
|
|
} else {
|
|
return optimizer.run(node);
|
|
}
|
|
});
|
|
return flags.some(isTrue);
|
|
}
|
|
|
|
function optimizationDataflowHelper(dataComponent, model, firstPass) {
|
|
var roots = dataComponent.sources;
|
|
var mutatedFlags = new Set();
|
|
mutatedFlags.add(runOptimizer(new RemoveUnnecessaryOutputNodes(), roots));
|
|
mutatedFlags.add(runOptimizer(new RemoveUnnecessaryIdentifierNodes(model), roots)); // remove source nodes that don't have any children because they also don't have output nodes
|
|
|
|
roots = roots.filter(function (r) {
|
|
return r.numChildren() > 0;
|
|
});
|
|
mutatedFlags.add(runOptimizer(new RemoveUnusedSubtrees(), getLeaves(roots)));
|
|
roots = roots.filter(function (r) {
|
|
return r.numChildren() > 0;
|
|
});
|
|
|
|
if (!firstPass) {
|
|
// Only run these optimizations after the optimizer has moved down the facet node.
|
|
// With this change, we can be more aggressive in the optimizations.
|
|
mutatedFlags.add(runOptimizer(new MoveParseUp(), getLeaves(roots)));
|
|
mutatedFlags.add(runOptimizer(new MergeBins(model), getLeaves(roots)));
|
|
mutatedFlags.add(runOptimizer(new RemoveDuplicateTimeUnits(), getLeaves(roots)));
|
|
mutatedFlags.add(runOptimizer(new MergeParse(), getLeaves(roots)));
|
|
mutatedFlags.add(runOptimizer(new MergeAggregates(), getLeaves(roots)));
|
|
mutatedFlags.add(runOptimizer(new MergeTimeUnits(), getLeaves(roots)));
|
|
mutatedFlags.add(runOptimizer(new MergeIdenticalNodes(), roots));
|
|
mutatedFlags.add(runOptimizer(new MergeOutputs(), getLeaves(roots)));
|
|
}
|
|
|
|
dataComponent.sources = roots;
|
|
return mutatedFlags.has(true);
|
|
}
|
|
/**
|
|
* Optimizes the dataflow of the passed in data component.
|
|
*/
|
|
|
|
|
|
function optimizeDataflow(data, model) {
|
|
// check before optimizations
|
|
checkLinks(data.sources);
|
|
var firstPassCounter = 0;
|
|
var secondPassCounter = 0;
|
|
|
|
for (var i = 0; i < MAX_OPTIMIZATION_RUNS; i++) {
|
|
if (!optimizationDataflowHelper(data, model, true)) {
|
|
break;
|
|
}
|
|
|
|
firstPassCounter++;
|
|
} // move facets down and make a copy of the subtree so that we can have scales at the top level
|
|
|
|
|
|
data.sources.map(moveFacetDown);
|
|
|
|
for (var _i14 = 0; _i14 < MAX_OPTIMIZATION_RUNS; _i14++) {
|
|
if (!optimizationDataflowHelper(data, model, false)) {
|
|
break;
|
|
}
|
|
|
|
secondPassCounter++;
|
|
} // check after optimizations
|
|
|
|
|
|
checkLinks(data.sources);
|
|
|
|
if (Math.max(firstPassCounter, secondPassCounter) === MAX_OPTIMIZATION_RUNS) {
|
|
warn("Maximum optimization runs(".concat(MAX_OPTIMIZATION_RUNS, ") reached."));
|
|
}
|
|
}
|
|
/**
|
|
* A class that behaves like a SignalRef but lazily generates the signal.
|
|
* The provided generator function should use `Model.getSignalName` to use the correct signal name.
|
|
*/
|
|
|
|
|
|
var SignalRefWrapper = /*#__PURE__*/function () {
|
|
function SignalRefWrapper(exprGenerator) {
|
|
_classCallCheck(this, SignalRefWrapper);
|
|
|
|
Object.defineProperty(this, 'signal', {
|
|
enumerable: true,
|
|
get: exprGenerator
|
|
});
|
|
}
|
|
|
|
_createClass(SignalRefWrapper, null, [{
|
|
key: "fromName",
|
|
value: function fromName(rename, signalName) {
|
|
return new SignalRefWrapper(function () {
|
|
return rename(signalName);
|
|
});
|
|
}
|
|
}]);
|
|
|
|
return SignalRefWrapper;
|
|
}();
|
|
|
|
function parseScaleDomain(model) {
|
|
if (isUnitModel(model)) {
|
|
parseUnitScaleDomain(model);
|
|
} else {
|
|
parseNonUnitScaleDomain(model);
|
|
}
|
|
}
|
|
|
|
function parseUnitScaleDomain(model) {
|
|
var localScaleComponents = model.component.scales;
|
|
|
|
var _iterator143 = _createForOfIteratorHelper(keys(localScaleComponents)),
|
|
_step143;
|
|
|
|
try {
|
|
for (_iterator143.s(); !(_step143 = _iterator143.n()).done;) {
|
|
var channel = _step143.value;
|
|
var domains = parseDomainForChannel(model, channel);
|
|
var localScaleCmpt = localScaleComponents[channel];
|
|
localScaleCmpt.setWithExplicit('domains', domains);
|
|
parseSelectionDomain(model, channel);
|
|
|
|
if (model.component.data.isFaceted) {
|
|
// get resolve from closest facet parent as this decides whether we need to refer to cloned subtree or not
|
|
var facetParent = model;
|
|
|
|
while (!isFacetModel(facetParent) && facetParent.parent) {
|
|
facetParent = facetParent.parent;
|
|
}
|
|
|
|
var resolve = facetParent.component.resolve.scale[channel];
|
|
|
|
if (resolve === 'shared') {
|
|
var _iterator144 = _createForOfIteratorHelper(domains.value),
|
|
_step144;
|
|
|
|
try {
|
|
for (_iterator144.s(); !(_step144 = _iterator144.n()).done;) {
|
|
var _domain2 = _step144.value;
|
|
|
|
// Replace the scale domain with data output from a cloned subtree after the facet.
|
|
if (isDataRefDomain(_domain2)) {
|
|
// use data from cloned subtree (which is the same as data but with a prefix added once)
|
|
_domain2.data = FACET_SCALE_PREFIX + _domain2.data.replace(FACET_SCALE_PREFIX, '');
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator144.e(err);
|
|
} finally {
|
|
_iterator144.f();
|
|
}
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator143.e(err);
|
|
} finally {
|
|
_iterator143.f();
|
|
}
|
|
}
|
|
|
|
function parseNonUnitScaleDomain(model) {
|
|
var _iterator145 = _createForOfIteratorHelper(model.children),
|
|
_step145;
|
|
|
|
try {
|
|
for (_iterator145.s(); !(_step145 = _iterator145.n()).done;) {
|
|
var child = _step145.value;
|
|
parseScaleDomain(child);
|
|
}
|
|
} catch (err) {
|
|
_iterator145.e(err);
|
|
} finally {
|
|
_iterator145.f();
|
|
}
|
|
|
|
var localScaleComponents = model.component.scales;
|
|
|
|
var _iterator146 = _createForOfIteratorHelper(keys(localScaleComponents)),
|
|
_step146;
|
|
|
|
try {
|
|
for (_iterator146.s(); !(_step146 = _iterator146.n()).done;) {
|
|
var channel = _step146.value;
|
|
var domains = void 0;
|
|
var selectionExtent = null;
|
|
|
|
var _iterator147 = _createForOfIteratorHelper(model.children),
|
|
_step147;
|
|
|
|
try {
|
|
for (_iterator147.s(); !(_step147 = _iterator147.n()).done;) {
|
|
var _child4 = _step147.value;
|
|
var childComponent = _child4.component.scales[channel];
|
|
|
|
if (childComponent) {
|
|
if (domains === undefined) {
|
|
domains = childComponent.getWithExplicit('domains');
|
|
} else {
|
|
domains = mergeValuesWithExplicit(domains, childComponent.getWithExplicit('domains'), 'domains', 'scale', domainsTieBreaker);
|
|
}
|
|
|
|
var se = childComponent.get('selectionExtent');
|
|
|
|
if (selectionExtent && se && selectionExtent.selection !== se.selection) {
|
|
warn(NEEDS_SAME_SELECTION);
|
|
}
|
|
|
|
selectionExtent = se;
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator147.e(err);
|
|
} finally {
|
|
_iterator147.f();
|
|
}
|
|
|
|
localScaleComponents[channel].setWithExplicit('domains', domains);
|
|
|
|
if (selectionExtent) {
|
|
localScaleComponents[channel].set('selectionExtent', selectionExtent, true);
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator146.e(err);
|
|
} finally {
|
|
_iterator146.f();
|
|
}
|
|
}
|
|
/**
|
|
* Remove unaggregated domain if it is not applicable
|
|
* Add unaggregated domain if domain is not specified and config.scale.useUnaggregatedDomain is true.
|
|
*/
|
|
|
|
|
|
function normalizeUnaggregatedDomain(domain, fieldDef, scaleType, scaleConfig) {
|
|
if (domain === 'unaggregated') {
|
|
var _canUseUnaggregatedDo = canUseUnaggregatedDomain(fieldDef, scaleType),
|
|
valid = _canUseUnaggregatedDo.valid,
|
|
reason = _canUseUnaggregatedDo.reason;
|
|
|
|
if (!valid) {
|
|
warn(reason);
|
|
return undefined;
|
|
}
|
|
} else if (domain === undefined && scaleConfig.useUnaggregatedDomain) {
|
|
// Apply config if domain is not specified.
|
|
var _canUseUnaggregatedDo2 = canUseUnaggregatedDomain(fieldDef, scaleType),
|
|
_valid = _canUseUnaggregatedDo2.valid;
|
|
|
|
if (_valid) {
|
|
return 'unaggregated';
|
|
}
|
|
}
|
|
|
|
return domain;
|
|
}
|
|
|
|
function parseDomainForChannel(model, channel) {
|
|
var scaleType = model.getScaleComponent(channel).get('type');
|
|
var encoding = model.encoding;
|
|
var domain = normalizeUnaggregatedDomain(model.scaleDomain(channel), model.typedFieldDef(channel), scaleType, model.config.scale);
|
|
|
|
if (domain !== model.scaleDomain(channel)) {
|
|
model.specifiedScales[channel] = Object.assign(Object.assign({}, model.specifiedScales[channel]), {
|
|
domain: domain
|
|
});
|
|
} // If channel is either X or Y then union them with X2 & Y2 if they exist
|
|
|
|
|
|
if (channel === 'x' && getFieldOrDatumDef(encoding.x2)) {
|
|
if (getFieldOrDatumDef(encoding.x)) {
|
|
return mergeValuesWithExplicit(parseSingleChannelDomain(scaleType, domain, model, 'x'), parseSingleChannelDomain(scaleType, domain, model, 'x2'), 'domain', 'scale', domainsTieBreaker);
|
|
} else {
|
|
return parseSingleChannelDomain(scaleType, domain, model, 'x2');
|
|
}
|
|
} else if (channel === 'y' && getFieldOrDatumDef(encoding.y2)) {
|
|
if (getFieldOrDatumDef(encoding.y)) {
|
|
return mergeValuesWithExplicit(parseSingleChannelDomain(scaleType, domain, model, 'y'), parseSingleChannelDomain(scaleType, domain, model, 'y2'), 'domain', 'scale', domainsTieBreaker);
|
|
} else {
|
|
return parseSingleChannelDomain(scaleType, domain, model, 'y2');
|
|
}
|
|
}
|
|
|
|
return parseSingleChannelDomain(scaleType, domain, model, channel);
|
|
}
|
|
|
|
function mapDomainToDataSignal(domain, type, timeUnit) {
|
|
return domain.map(function (v) {
|
|
var data = valueExpr(v, {
|
|
timeUnit: timeUnit,
|
|
type: type
|
|
});
|
|
return {
|
|
signal: "{data: ".concat(data, "}")
|
|
};
|
|
});
|
|
}
|
|
|
|
function convertDomainIfItIsDateTime(domain, type, timeUnit) {
|
|
var _a; // explicit value
|
|
|
|
|
|
var normalizedTimeUnit = (_a = normalizeTimeUnit(timeUnit)) === null || _a === void 0 ? void 0 : _a.unit;
|
|
|
|
if (type === 'temporal' || normalizedTimeUnit) {
|
|
return mapDomainToDataSignal(domain, type, normalizedTimeUnit);
|
|
}
|
|
|
|
return [domain]; // Date time won't make sense
|
|
}
|
|
|
|
function parseSingleChannelDomain(scaleType, domain, model, channel) {
|
|
var encoding = model.encoding;
|
|
var fieldOrDatumDef = getFieldOrDatumDef(encoding[channel]);
|
|
var type = fieldOrDatumDef.type;
|
|
var timeUnit = fieldOrDatumDef['timeUnit'];
|
|
|
|
if (isDomainUnionWith(domain)) {
|
|
var defaultDomain = parseSingleChannelDomain(scaleType, undefined, model, channel);
|
|
var unionWith = convertDomainIfItIsDateTime(domain.unionWith, type, timeUnit);
|
|
return makeExplicit([].concat(_toConsumableArray(defaultDomain.value), _toConsumableArray(unionWith)));
|
|
} else if (isSignalRef(domain)) {
|
|
return makeExplicit([domain]);
|
|
} else if (domain && domain !== 'unaggregated' && !isSelectionDomain(domain)) {
|
|
return makeExplicit(convertDomainIfItIsDateTime(domain, type, timeUnit));
|
|
}
|
|
|
|
var stack = model.stack;
|
|
|
|
if (stack && channel === stack.fieldChannel) {
|
|
if (stack.offset === 'normalize') {
|
|
return makeImplicit([[0, 1]]);
|
|
}
|
|
|
|
var data = model.requestDataName(DataSourceType.Main);
|
|
return makeImplicit([{
|
|
data: data,
|
|
field: model.vgField(channel, {
|
|
suffix: 'start'
|
|
})
|
|
}, {
|
|
data: data,
|
|
field: model.vgField(channel, {
|
|
suffix: 'end'
|
|
})
|
|
}]);
|
|
}
|
|
|
|
var sort = isScaleChannel(channel) && isFieldDef(fieldOrDatumDef) ? domainSort(model, channel, scaleType) : undefined;
|
|
|
|
if (isDatumDef(fieldOrDatumDef)) {
|
|
var d = convertDomainIfItIsDateTime([fieldOrDatumDef.datum], type, timeUnit);
|
|
return makeImplicit(d);
|
|
}
|
|
|
|
var fieldDef = fieldOrDatumDef; // now we can be sure it's a fieldDef
|
|
|
|
if (domain === 'unaggregated') {
|
|
var _data = model.requestDataName(DataSourceType.Main);
|
|
|
|
var _field19 = fieldOrDatumDef.field;
|
|
return makeImplicit([{
|
|
data: _data,
|
|
field: _vgField({
|
|
field: _field19,
|
|
aggregate: 'min'
|
|
})
|
|
}, {
|
|
data: _data,
|
|
field: _vgField({
|
|
field: _field19,
|
|
aggregate: 'max'
|
|
})
|
|
}]);
|
|
} else if (isBinning(fieldDef.bin)) {
|
|
if (hasDiscreteDomain(scaleType)) {
|
|
if (scaleType === 'bin-ordinal') {
|
|
// we can omit the domain as it is inferred from the `bins` property
|
|
return makeImplicit([]);
|
|
} // ordinal bin scale takes domain from bin_range, ordered by bin start
|
|
// This is useful for both axis-based scale (x/y) and legend-based scale (other channels).
|
|
|
|
|
|
return makeImplicit([{
|
|
// If sort by aggregation of a specified sort field, we need to use RAW table,
|
|
// so we can aggregate values for the scale independently from the main aggregation.
|
|
data: isBoolean$1(sort) ? model.requestDataName(DataSourceType.Main) : model.requestDataName(DataSourceType.Raw),
|
|
// Use range if we added it and the scale does not support computing a range as a signal.
|
|
field: model.vgField(channel, binRequiresRange(fieldDef, channel) ? {
|
|
binSuffix: 'range'
|
|
} : {}),
|
|
// we have to use a sort object if sort = true to make the sort correct by bin start
|
|
sort: sort === true || !isObject(sort) ? {
|
|
field: model.vgField(channel, {}),
|
|
op: 'min' // min or max doesn't matter since we sort by the start of the bin range
|
|
|
|
} : sort
|
|
}]);
|
|
} else {
|
|
// continuous scales
|
|
var bin = fieldDef.bin;
|
|
|
|
if (isBinning(bin)) {
|
|
var binSignal = getBinSignalName(model, fieldDef.field, bin);
|
|
return makeImplicit([new SignalRefWrapper(function () {
|
|
var signal = model.getSignalName(binSignal);
|
|
return "[".concat(signal, ".start, ").concat(signal, ".stop]");
|
|
})]);
|
|
} else {
|
|
return makeImplicit([{
|
|
data: model.requestDataName(DataSourceType.Main),
|
|
field: model.vgField(channel, {})
|
|
}]);
|
|
}
|
|
}
|
|
} else if (fieldDef.timeUnit && contains(['time', 'utc'], scaleType) && hasBand(channel, fieldDef, isUnitModel(model) ? model.encoding[getSecondaryRangeChannel(channel)] : undefined, model.stack, model.markDef, model.config)) {
|
|
var _data2 = model.requestDataName(DataSourceType.Main);
|
|
|
|
return makeImplicit([{
|
|
data: _data2,
|
|
field: model.vgField(channel)
|
|
}, {
|
|
data: _data2,
|
|
field: model.vgField(channel, {
|
|
suffix: 'end'
|
|
})
|
|
}]);
|
|
} else if (sort) {
|
|
return makeImplicit([{
|
|
// If sort by aggregation of a specified sort field, we need to use RAW table,
|
|
// so we can aggregate values for the scale independently from the main aggregation.
|
|
data: isBoolean$1(sort) ? model.requestDataName(DataSourceType.Main) : model.requestDataName(DataSourceType.Raw),
|
|
field: model.vgField(channel),
|
|
sort: sort
|
|
}]);
|
|
} else {
|
|
return makeImplicit([{
|
|
data: model.requestDataName(DataSourceType.Main),
|
|
field: model.vgField(channel)
|
|
}]);
|
|
}
|
|
}
|
|
|
|
function normalizeSortField(sort, isStackedMeasure) {
|
|
var op = sort.op,
|
|
field = sort.field,
|
|
order = sort.order;
|
|
return Object.assign(Object.assign({
|
|
// Apply default op
|
|
op: op !== null && op !== void 0 ? op : isStackedMeasure ? 'sum' : DEFAULT_SORT_OP
|
|
}, field ? {
|
|
field: replacePathInField(field)
|
|
} : {}), order ? {
|
|
order: order
|
|
} : {});
|
|
}
|
|
|
|
function parseSelectionDomain(model, channel) {
|
|
var _a;
|
|
|
|
var scale = model.component.scales[channel];
|
|
var spec = model.specifiedScales[channel].domain;
|
|
var bin = (_a = model.fieldDef(channel)) === null || _a === void 0 ? void 0 : _a.bin;
|
|
var domain = isSelectionDomain(spec) && spec;
|
|
var extent = isBinParams(bin) && isSelectionExtent(bin.extent) && bin.extent;
|
|
|
|
if (domain || extent) {
|
|
// As scale parsing occurs before selection parsing, we cannot set
|
|
// domainRaw directly. So instead, we store the selectionExtent on
|
|
// the scale component, and then add domainRaw during scale assembly.
|
|
scale.set('selectionExtent', domain !== null && domain !== void 0 ? domain : extent, true);
|
|
}
|
|
}
|
|
|
|
function domainSort(model, channel, scaleType) {
|
|
if (!hasDiscreteDomain(scaleType)) {
|
|
return undefined;
|
|
} // save to cast as the only exception is the geojson type for shape, which would not generate a scale
|
|
|
|
|
|
var fieldDef = model.fieldDef(channel);
|
|
var sort = fieldDef.sort; // if the sort is specified with array, use the derived sort index field
|
|
|
|
if (isSortArray(sort)) {
|
|
return {
|
|
op: 'min',
|
|
field: sortArrayIndexField(fieldDef, channel),
|
|
order: 'ascending'
|
|
};
|
|
}
|
|
|
|
var stack = model.stack;
|
|
var stackDimensions = stack ? [].concat(_toConsumableArray(stack.groupbyField ? [stack.groupbyField] : []), _toConsumableArray(stack.stackBy.map(function (s) {
|
|
return s.fieldDef.field;
|
|
}))) : undefined; // Sorted based on an aggregate calculation over a specified sort field (only for ordinal scale)
|
|
|
|
if (isSortField(sort)) {
|
|
var isStackedMeasure = stack && !contains(stackDimensions, sort.field);
|
|
return normalizeSortField(sort, isStackedMeasure);
|
|
} else if (isSortByEncoding(sort)) {
|
|
var encoding = sort.encoding,
|
|
order = sort.order;
|
|
var fieldDefToSortBy = model.fieldDef(encoding);
|
|
var aggregate = fieldDefToSortBy.aggregate,
|
|
_field20 = fieldDefToSortBy.field;
|
|
|
|
var _isStackedMeasure = stack && !contains(stackDimensions, _field20);
|
|
|
|
if (isArgminDef(aggregate) || isArgmaxDef(aggregate)) {
|
|
return normalizeSortField({
|
|
field: _vgField(fieldDefToSortBy),
|
|
order: order
|
|
}, _isStackedMeasure);
|
|
} else if (isAggregateOp(aggregate) || !aggregate) {
|
|
return normalizeSortField({
|
|
op: aggregate,
|
|
field: _field20,
|
|
order: order
|
|
}, _isStackedMeasure);
|
|
}
|
|
} else if (sort === 'descending') {
|
|
return {
|
|
op: 'min',
|
|
field: model.vgField(channel),
|
|
order: 'descending'
|
|
};
|
|
} else if (contains(['ascending', undefined
|
|
/* default =ascending*/
|
|
], sort)) {
|
|
return true;
|
|
} // sort == null
|
|
|
|
|
|
return undefined;
|
|
}
|
|
/**
|
|
* Determine if a scale can use unaggregated domain.
|
|
* @return {Boolean} Returns true if all of the following conditions apply:
|
|
* 1. `scale.domain` is `unaggregated`
|
|
* 2. Aggregation function is not `count` or `sum`
|
|
* 3. The scale is quantitative or time scale.
|
|
*/
|
|
|
|
|
|
function canUseUnaggregatedDomain(fieldDef, scaleType) {
|
|
var aggregate = fieldDef.aggregate,
|
|
type = fieldDef.type;
|
|
|
|
if (!aggregate) {
|
|
return {
|
|
valid: false,
|
|
reason: unaggregateDomainHasNoEffectForRawField(fieldDef)
|
|
};
|
|
}
|
|
|
|
if (isString(aggregate) && !SHARED_DOMAIN_OP_INDEX[aggregate]) {
|
|
return {
|
|
valid: false,
|
|
reason: unaggregateDomainWithNonSharedDomainOp(aggregate)
|
|
};
|
|
}
|
|
|
|
if (type === 'quantitative') {
|
|
if (scaleType === 'log') {
|
|
return {
|
|
valid: false,
|
|
reason: unaggregatedDomainWithLogScale(fieldDef)
|
|
};
|
|
}
|
|
}
|
|
|
|
return {
|
|
valid: true
|
|
};
|
|
}
|
|
/**
|
|
* Tie breaker for mergeValuesWithExplicit for domains. We concat the specified values.
|
|
*/
|
|
|
|
|
|
function domainsTieBreaker(v1, v2, property, propertyOf) {
|
|
if (v1.explicit && v2.explicit) {
|
|
warn(mergeConflictingDomainProperty(property, propertyOf, v1.value, v2.value));
|
|
} // If equal score, concat the domains so that we union them later.
|
|
|
|
|
|
return {
|
|
explicit: v1.explicit,
|
|
value: [].concat(_toConsumableArray(v1.value), _toConsumableArray(v2.value))
|
|
};
|
|
}
|
|
/**
|
|
* Converts an array of domains to a single Vega scale domain.
|
|
*/
|
|
|
|
|
|
function mergeDomains(domains) {
|
|
var uniqueDomains = unique(domains.map(function (domain) {
|
|
// ignore sort property when computing the unique domains
|
|
if (isDataRefDomain(domain)) {
|
|
var domainWithoutSort = __rest(domain, ["sort"]);
|
|
|
|
return domainWithoutSort;
|
|
}
|
|
|
|
return domain;
|
|
}), _hash);
|
|
var sorts = unique(domains.map(function (d) {
|
|
if (isDataRefDomain(d)) {
|
|
var s = d.sort;
|
|
|
|
if (s !== undefined && !isBoolean$1(s)) {
|
|
if ('op' in s && s.op === 'count') {
|
|
// let's make sure that if op is count, we don't use a field
|
|
delete s.field;
|
|
}
|
|
|
|
if (s.order === 'ascending') {
|
|
// drop order: ascending as it is the default
|
|
delete s.order;
|
|
}
|
|
}
|
|
|
|
return s;
|
|
}
|
|
|
|
return undefined;
|
|
}).filter(function (s) {
|
|
return s !== undefined;
|
|
}), _hash);
|
|
|
|
if (uniqueDomains.length === 0) {
|
|
return undefined;
|
|
} else if (uniqueDomains.length === 1) {
|
|
var _domain3 = domains[0];
|
|
|
|
if (isDataRefDomain(_domain3) && sorts.length > 0) {
|
|
var _sort = sorts[0];
|
|
|
|
if (sorts.length > 1) {
|
|
warn(MORE_THAN_ONE_SORT);
|
|
_sort = true;
|
|
} else {
|
|
// Simplify domain sort by removing field and op when the field is the same as the domain field.
|
|
if (isObject(_sort) && 'field' in _sort) {
|
|
var sortField = _sort.field;
|
|
|
|
if (_domain3.field === sortField) {
|
|
_sort = _sort.order ? {
|
|
order: _sort.order
|
|
} : true;
|
|
}
|
|
}
|
|
}
|
|
|
|
return Object.assign(Object.assign({}, _domain3), {
|
|
sort: _sort
|
|
});
|
|
}
|
|
|
|
return _domain3;
|
|
} // only keep sort properties that work with unioned domains
|
|
|
|
|
|
var unionDomainSorts = unique(sorts.map(function (s) {
|
|
if (isBoolean$1(s) || !('op' in s) || s.op in MULTIDOMAIN_SORT_OP_INDEX) {
|
|
return s;
|
|
}
|
|
|
|
warn(domainSortDropped(s));
|
|
return true;
|
|
}), _hash);
|
|
var sort;
|
|
|
|
if (unionDomainSorts.length === 1) {
|
|
sort = unionDomainSorts[0];
|
|
} else if (unionDomainSorts.length > 1) {
|
|
warn(MORE_THAN_ONE_SORT);
|
|
sort = true;
|
|
}
|
|
|
|
var allData = unique(domains.map(function (d) {
|
|
if (isDataRefDomain(d)) {
|
|
return d.data;
|
|
}
|
|
|
|
return null;
|
|
}), function (x) {
|
|
return x;
|
|
});
|
|
|
|
if (allData.length === 1 && allData[0] !== null) {
|
|
// create a union domain of different fields with a single data source
|
|
var _domain4 = Object.assign({
|
|
data: allData[0],
|
|
fields: uniqueDomains.map(function (d) {
|
|
return d.field;
|
|
})
|
|
}, sort ? {
|
|
sort: sort
|
|
} : {});
|
|
|
|
return _domain4;
|
|
}
|
|
|
|
return Object.assign({
|
|
fields: uniqueDomains
|
|
}, sort ? {
|
|
sort: sort
|
|
} : {});
|
|
}
|
|
/**
|
|
* Return a field if a scale uses a single field.
|
|
* Return `undefined` otherwise.
|
|
*/
|
|
|
|
|
|
function getFieldFromDomain(domain) {
|
|
if (isDataRefDomain(domain) && isString(domain.field)) {
|
|
return domain.field;
|
|
} else if (isDataRefUnionedDomain(domain)) {
|
|
var _field21;
|
|
|
|
var _iterator148 = _createForOfIteratorHelper(domain.fields),
|
|
_step148;
|
|
|
|
try {
|
|
for (_iterator148.s(); !(_step148 = _iterator148.n()).done;) {
|
|
var nonUnionDomain = _step148.value;
|
|
|
|
if (isDataRefDomain(nonUnionDomain) && isString(nonUnionDomain.field)) {
|
|
if (!_field21) {
|
|
_field21 = nonUnionDomain.field;
|
|
} else if (_field21 !== nonUnionDomain.field) {
|
|
warn(FACETED_INDEPENDENT_DIFFERENT_SOURCES);
|
|
return _field21;
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator148.e(err);
|
|
} finally {
|
|
_iterator148.f();
|
|
}
|
|
|
|
warn(FACETED_INDEPENDENT_SAME_FIELDS_DIFFERENT_SOURCES);
|
|
return _field21;
|
|
} else if (isFieldRefUnionDomain(domain)) {
|
|
warn(FACETED_INDEPENDENT_SAME_SOURCE);
|
|
var _field22 = domain.fields[0];
|
|
return isString(_field22) ? _field22 : undefined;
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function assembleDomain(model, channel) {
|
|
var scaleComponent = model.component.scales[channel];
|
|
var domains = scaleComponent.get('domains').map(function (domain) {
|
|
// Correct references to data as the original domain's data was determined
|
|
// in parseScale, which happens before parseData. Thus the original data
|
|
// reference can be incorrect.
|
|
if (isDataRefDomain(domain)) {
|
|
domain.data = model.lookupDataSource(domain.data);
|
|
}
|
|
|
|
return domain;
|
|
}); // domains is an array that has to be merged into a single vega domain
|
|
|
|
return mergeDomains(domains);
|
|
}
|
|
|
|
function assembleScales(model) {
|
|
if (isLayerModel(model) || isConcatModel(model)) {
|
|
// For concat and layer, include scales of children too
|
|
return model.children.reduce(function (scales, child) {
|
|
return scales.concat(assembleScales(child));
|
|
}, assembleScalesForModel(model));
|
|
} else {
|
|
// For facet, child scales would not be included in the parent's scope.
|
|
// For unit, there is no child.
|
|
return assembleScalesForModel(model);
|
|
}
|
|
}
|
|
|
|
function assembleScalesForModel(model) {
|
|
return keys(model.component.scales).reduce(function (scales, channel) {
|
|
var scaleComponent = model.component.scales[channel];
|
|
|
|
if (scaleComponent.merged) {
|
|
// Skipped merged scales
|
|
return scales;
|
|
}
|
|
|
|
var scale = scaleComponent.combine();
|
|
|
|
var name = scale.name,
|
|
type = scale.type,
|
|
selectionExtent = scale.selectionExtent,
|
|
_d = scale.domains,
|
|
_r = scale.range,
|
|
reverse = scale.reverse,
|
|
otherScaleProps = __rest(scale, ["name", "type", "selectionExtent", "domains", "range", "reverse"]);
|
|
|
|
var range = assembleScaleRange(scale.range, name, channel, model);
|
|
var domainRaw;
|
|
|
|
if (selectionExtent) {
|
|
domainRaw = assembleSelectionScaleDomain(model, selectionExtent);
|
|
}
|
|
|
|
var domain = assembleDomain(model, channel);
|
|
scales.push(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({
|
|
name: name,
|
|
type: type
|
|
}, domain ? {
|
|
domain: domain
|
|
} : {}), domainRaw ? {
|
|
domainRaw: domainRaw
|
|
} : {}), {
|
|
range: range
|
|
}), reverse !== undefined ? {
|
|
reverse: reverse
|
|
} : {}), otherScaleProps));
|
|
return scales;
|
|
}, []);
|
|
}
|
|
|
|
function assembleScaleRange(scaleRange, scaleName, channel, model) {
|
|
// add signals to x/y range
|
|
if (isXorY(channel)) {
|
|
if (isVgRangeStep(scaleRange)) {
|
|
// For width/height step, use a signal created in layout assemble instead of a constant step.
|
|
return {
|
|
step: {
|
|
signal: scaleName + '_step'
|
|
}
|
|
};
|
|
}
|
|
} else if (isObject(scaleRange) && isDataRefDomain(scaleRange)) {
|
|
return Object.assign(Object.assign({}, scaleRange), {
|
|
data: model.lookupDataSource(scaleRange.data)
|
|
});
|
|
}
|
|
|
|
return scaleRange;
|
|
}
|
|
|
|
var ScaleComponent = /*#__PURE__*/function (_Split4) {
|
|
_inherits(ScaleComponent, _Split4);
|
|
|
|
var _super33 = _createSuper(ScaleComponent);
|
|
|
|
function ScaleComponent(name, typeWithExplicit) {
|
|
var _this28;
|
|
|
|
_classCallCheck(this, ScaleComponent);
|
|
|
|
_this28 = _super33.call(this, {}, // no initial explicit property
|
|
{
|
|
name: name
|
|
} // name as initial implicit property
|
|
);
|
|
_this28.merged = false;
|
|
|
|
_this28.setWithExplicit('type', typeWithExplicit);
|
|
|
|
return _this28;
|
|
}
|
|
/**
|
|
* Whether the scale definitely includes zero in the domain
|
|
*/
|
|
|
|
|
|
_createClass(ScaleComponent, [{
|
|
key: "domainDefinitelyIncludesZero",
|
|
value: function domainDefinitelyIncludesZero() {
|
|
if (this.get('zero') !== false) {
|
|
return true;
|
|
}
|
|
|
|
return some(this.get('domains'), function (d) {
|
|
return isArray(d) && d.length === 2 && d[0] <= 0 && d[1] >= 0;
|
|
});
|
|
}
|
|
}]);
|
|
|
|
return ScaleComponent;
|
|
}(Split);
|
|
|
|
var RANGE_PROPERTIES = ['range', 'scheme'];
|
|
|
|
function getSizeChannel$1(channel) {
|
|
return channel === 'x' ? 'width' : channel === 'y' ? 'height' : undefined;
|
|
}
|
|
|
|
function parseUnitScaleRange(model) {
|
|
var localScaleComponents = model.component.scales; // use SCALE_CHANNELS instead of scales[channel] to ensure that x, y come first!
|
|
|
|
var _iterator149 = _createForOfIteratorHelper(SCALE_CHANNELS),
|
|
_step149;
|
|
|
|
try {
|
|
for (_iterator149.s(); !(_step149 = _iterator149.n()).done;) {
|
|
var channel = _step149.value;
|
|
var localScaleCmpt = localScaleComponents[channel];
|
|
|
|
if (!localScaleCmpt) {
|
|
continue;
|
|
}
|
|
|
|
var rangeWithExplicit = parseRangeForChannel(channel, model);
|
|
localScaleCmpt.setWithExplicit('range', rangeWithExplicit);
|
|
}
|
|
} catch (err) {
|
|
_iterator149.e(err);
|
|
} finally {
|
|
_iterator149.f();
|
|
}
|
|
}
|
|
|
|
function getBinStepSignal(model, channel) {
|
|
var fieldDef = model.fieldDef(channel);
|
|
|
|
if (fieldDef && fieldDef.bin && isBinning(fieldDef.bin)) {
|
|
var binSignal = getBinSignalName(model, fieldDef.field, fieldDef.bin); // TODO: extract this to be range step signal
|
|
|
|
var sizeType = getSizeChannel$1(channel);
|
|
var sizeSignal = model.getName(sizeType);
|
|
return new SignalRefWrapper(function () {
|
|
var updatedName = model.getSignalName(binSignal);
|
|
var binCount = "(".concat(updatedName, ".stop - ").concat(updatedName, ".start) / ").concat(updatedName, ".step");
|
|
return "".concat(model.getSignalName(sizeSignal), " / (").concat(binCount, ")");
|
|
});
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
/**
|
|
* Return mixins that includes one of the Vega range types (explicit range, range.step, range.scheme).
|
|
*/
|
|
|
|
|
|
function parseRangeForChannel(channel, model) {
|
|
var specifiedScale = model.specifiedScales[channel];
|
|
var size = model.size;
|
|
var mergedScaleCmpt = model.getScaleComponent(channel);
|
|
var scaleType = mergedScaleCmpt.get('type'); // Check if any of the range properties is specified.
|
|
// If so, check if it is compatible and make sure that we only output one of the properties
|
|
|
|
var _iterator150 = _createForOfIteratorHelper(RANGE_PROPERTIES),
|
|
_step150;
|
|
|
|
try {
|
|
for (_iterator150.s(); !(_step150 = _iterator150.n()).done;) {
|
|
var property = _step150.value;
|
|
|
|
if (specifiedScale[property] !== undefined) {
|
|
var supportedByScaleType = scaleTypeSupportProperty(scaleType, property);
|
|
var channelIncompatability = channelScalePropertyIncompatability(channel, property);
|
|
|
|
if (!supportedByScaleType) {
|
|
warn(scalePropertyNotWorkWithScaleType(scaleType, property, channel));
|
|
} else if (channelIncompatability) {
|
|
// channel
|
|
warn(channelIncompatability);
|
|
} else {
|
|
switch (property) {
|
|
case 'range':
|
|
{
|
|
var range = specifiedScale.range;
|
|
|
|
if (isArray(range)) {
|
|
if (isXorY(channel)) {
|
|
return makeExplicit(range.map(function (v) {
|
|
if (v === 'width' || v === 'height') {
|
|
// get signal for width/height
|
|
// Just like default range logic below, we use SignalRefWrapper to account for potential merges and renames.
|
|
var sizeSignal = model.getName(v);
|
|
var getSignalName = model.getSignalName.bind(model);
|
|
return SignalRefWrapper.fromName(getSignalName, sizeSignal);
|
|
}
|
|
|
|
return v;
|
|
}));
|
|
}
|
|
} else if (isObject(range)) {
|
|
return makeExplicit({
|
|
data: model.requestDataName(DataSourceType.Main),
|
|
field: range.field,
|
|
sort: {
|
|
op: 'min',
|
|
field: model.vgField(channel)
|
|
}
|
|
});
|
|
}
|
|
|
|
return makeExplicit(range);
|
|
}
|
|
|
|
case 'scheme':
|
|
return makeExplicit(parseScheme(specifiedScale[property]));
|
|
}
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator150.e(err);
|
|
} finally {
|
|
_iterator150.f();
|
|
}
|
|
|
|
if (channel === X || channel === Y) {
|
|
var sizeChannel = channel === X ? 'width' : 'height';
|
|
var sizeValue = size[sizeChannel];
|
|
|
|
if (isStep(sizeValue)) {
|
|
if (hasDiscreteDomain(scaleType)) {
|
|
return makeExplicit({
|
|
step: sizeValue.step
|
|
});
|
|
} else {
|
|
warn(stepDropped(sizeChannel));
|
|
}
|
|
}
|
|
}
|
|
|
|
var rangeMin = specifiedScale.rangeMin,
|
|
rangeMax = specifiedScale.rangeMax;
|
|
var d = defaultRange(channel, model);
|
|
|
|
if ((rangeMin !== undefined || rangeMax !== undefined) && // it's ok to check just rangeMin's compatibility since rangeMin/rangeMax are the same
|
|
scaleTypeSupportProperty(scaleType, 'rangeMin') && isArray(d) && d.length === 2) {
|
|
return makeExplicit([rangeMin !== null && rangeMin !== void 0 ? rangeMin : d[0], rangeMax !== null && rangeMax !== void 0 ? rangeMax : d[1]]);
|
|
}
|
|
|
|
return makeImplicit(d);
|
|
}
|
|
|
|
function parseScheme(scheme) {
|
|
if (isExtendedScheme(scheme)) {
|
|
return Object.assign({
|
|
scheme: scheme.name
|
|
}, omit(scheme, ['name']));
|
|
}
|
|
|
|
return {
|
|
scheme: scheme
|
|
};
|
|
}
|
|
|
|
function defaultRange(channel, model) {
|
|
var size = model.size,
|
|
config = model.config,
|
|
mark = model.mark,
|
|
encoding = model.encoding;
|
|
var getSignalName = model.getSignalName.bind(model);
|
|
|
|
var _getFieldOrDatumDef = getFieldOrDatumDef(encoding[channel]),
|
|
type = _getFieldOrDatumDef.type;
|
|
|
|
var mergedScaleCmpt = model.getScaleComponent(channel);
|
|
var scaleType = mergedScaleCmpt.get('type');
|
|
var _model$specifiedScale = model.specifiedScales[channel],
|
|
domain = _model$specifiedScale.domain,
|
|
domainMid = _model$specifiedScale.domainMid;
|
|
|
|
switch (channel) {
|
|
case X:
|
|
case Y:
|
|
{
|
|
// If there is no explicit width/height for discrete x/y scales
|
|
if (contains(['point', 'band'], scaleType)) {
|
|
if (channel === X && !size.width) {
|
|
var w = getViewConfigDiscreteSize(config.view, 'width');
|
|
|
|
if (isStep(w)) {
|
|
return w;
|
|
}
|
|
} else if (channel === Y && !size.height) {
|
|
var h = getViewConfigDiscreteSize(config.view, 'height');
|
|
|
|
if (isStep(h)) {
|
|
return h;
|
|
}
|
|
}
|
|
} // If step is null, use zero to width or height.
|
|
// Note that we use SignalRefWrapper to account for potential merges and renames.
|
|
|
|
|
|
var sizeType = getSizeChannel$1(channel);
|
|
var sizeSignal = model.getName(sizeType);
|
|
|
|
if (channel === Y && hasContinuousDomain(scaleType)) {
|
|
// For y continuous scale, we have to start from the height as the bottom part has the max value.
|
|
return [SignalRefWrapper.fromName(getSignalName, sizeSignal), 0];
|
|
} else {
|
|
return [0, SignalRefWrapper.fromName(getSignalName, sizeSignal)];
|
|
}
|
|
}
|
|
|
|
case SIZE:
|
|
{
|
|
// TODO: support custom rangeMin, rangeMax
|
|
var _zero = model.component.scales[channel].get('zero');
|
|
|
|
var rangeMin = sizeRangeMin(mark, _zero, config);
|
|
var rangeMax = sizeRangeMax(mark, size, model, config);
|
|
|
|
if (isContinuousToDiscrete(scaleType)) {
|
|
return interpolateRange(rangeMin, rangeMax, defaultContinuousToDiscreteCount(scaleType, config, domain, channel));
|
|
} else {
|
|
return [rangeMin, rangeMax];
|
|
}
|
|
}
|
|
|
|
case THETA:
|
|
return [0, Math.PI * 2];
|
|
|
|
case ANGLE:
|
|
// TODO: add config.scale.min/maxAngleDegree (for point and text) and config.scale.min/maxAngleRadian (for arc) once we add arc marks.
|
|
// (It's weird to add just config.scale.min/maxAngleDegree for now)
|
|
return [0, 360];
|
|
|
|
case RADIUS:
|
|
{
|
|
// max radius = half od min(width,height)
|
|
return [0, new SignalRefWrapper(function () {
|
|
var w = model.getSignalName('width');
|
|
var h = model.getSignalName('height');
|
|
return "min(".concat(w, ",").concat(h, ")/2");
|
|
})];
|
|
}
|
|
|
|
case STROKEWIDTH:
|
|
// TODO: support custom rangeMin, rangeMax
|
|
return [config.scale.minStrokeWidth, config.scale.maxStrokeWidth];
|
|
|
|
case STROKEDASH:
|
|
return [// TODO: add this to Vega's config.range?
|
|
[1, 0], [4, 2], [2, 1], [1, 1], [1, 2, 4, 2]];
|
|
|
|
case SHAPE:
|
|
return 'symbol';
|
|
|
|
case COLOR:
|
|
case FILL:
|
|
case STROKE:
|
|
if (scaleType === 'ordinal') {
|
|
// Only nominal data uses ordinal scale by default
|
|
return type === 'nominal' ? 'category' : 'ordinal';
|
|
} else {
|
|
if (domainMid !== undefined) {
|
|
return 'diverging';
|
|
} else {
|
|
return mark === 'rect' || mark === 'geoshape' ? 'heatmap' : 'ramp';
|
|
}
|
|
}
|
|
|
|
case OPACITY:
|
|
case FILLOPACITY:
|
|
case STROKEOPACITY:
|
|
// TODO: support custom rangeMin, rangeMax
|
|
return [config.scale.minOpacity, config.scale.maxOpacity];
|
|
}
|
|
/* istanbul ignore next: should never reach here */
|
|
|
|
|
|
throw new Error("Scale range undefined for channel ".concat(channel));
|
|
}
|
|
|
|
function defaultContinuousToDiscreteCount(scaleType, config, domain, channel) {
|
|
switch (scaleType) {
|
|
case 'quantile':
|
|
return config.scale.quantileCount;
|
|
|
|
case 'quantize':
|
|
return config.scale.quantizeCount;
|
|
|
|
case 'threshold':
|
|
if (domain !== undefined && isArray(domain)) {
|
|
return domain.length + 1;
|
|
} else {
|
|
warn(domainRequiredForThresholdScale(channel)); // default threshold boundaries for threshold scale since domain has cardinality of 2
|
|
|
|
return 3;
|
|
}
|
|
|
|
}
|
|
}
|
|
/**
|
|
* Returns the linear interpolation of the range according to the cardinality
|
|
*
|
|
* @param rangeMin start of the range
|
|
* @param rangeMax end of the range
|
|
* @param cardinality number of values in the output range
|
|
*/
|
|
|
|
|
|
function interpolateRange(rangeMin, rangeMax, cardinality) {
|
|
// always return a signal since it's better to compute the sequence in Vega later
|
|
var f = function f() {
|
|
var rMax = signalOrStringValue(rangeMax);
|
|
var rMin = signalOrStringValue(rangeMin);
|
|
var step = "(".concat(rMax, " - ").concat(rMin, ") / (").concat(cardinality, " - 1)");
|
|
return "sequence(".concat(rMin, ", ").concat(rMax, " + ").concat(step, ", ").concat(step, ")");
|
|
};
|
|
|
|
if (isSignalRef(rangeMax)) {
|
|
return new SignalRefWrapper(f);
|
|
} else {
|
|
return {
|
|
signal: f()
|
|
};
|
|
}
|
|
}
|
|
|
|
function sizeRangeMin(mark, zero, config) {
|
|
if (zero) {
|
|
if (isSignalRef(zero)) {
|
|
return {
|
|
signal: "".concat(zero.signal, " ? 0 : ").concat(sizeRangeMin(mark, false, config))
|
|
};
|
|
} else {
|
|
return 0;
|
|
}
|
|
}
|
|
|
|
switch (mark) {
|
|
case 'bar':
|
|
case 'tick':
|
|
return config.scale.minBandSize;
|
|
|
|
case 'line':
|
|
case 'trail':
|
|
case 'rule':
|
|
return config.scale.minStrokeWidth;
|
|
|
|
case 'text':
|
|
return config.scale.minFontSize;
|
|
|
|
case 'point':
|
|
case 'square':
|
|
case 'circle':
|
|
return config.scale.minSize;
|
|
}
|
|
/* istanbul ignore next: should never reach here */
|
|
// sizeRangeMin not implemented for the mark
|
|
|
|
|
|
throw new Error(incompatibleChannel('size', mark));
|
|
}
|
|
|
|
var MAX_SIZE_RANGE_STEP_RATIO = 0.95;
|
|
|
|
function sizeRangeMax(mark, size, model, config) {
|
|
var xyStepSignals = {
|
|
x: getBinStepSignal(model, 'x'),
|
|
y: getBinStepSignal(model, 'y')
|
|
};
|
|
|
|
switch (mark) {
|
|
case 'bar':
|
|
case 'tick':
|
|
{
|
|
if (config.scale.maxBandSize !== undefined) {
|
|
return config.scale.maxBandSize;
|
|
}
|
|
|
|
var min = minXYStep(size, xyStepSignals, config.view);
|
|
|
|
if (isNumber(min)) {
|
|
return min - 1;
|
|
} else {
|
|
return new SignalRefWrapper(function () {
|
|
return "".concat(min.signal, " - 1");
|
|
});
|
|
}
|
|
}
|
|
|
|
case 'line':
|
|
case 'trail':
|
|
case 'rule':
|
|
return config.scale.maxStrokeWidth;
|
|
|
|
case 'text':
|
|
return config.scale.maxFontSize;
|
|
|
|
case 'point':
|
|
case 'square':
|
|
case 'circle':
|
|
{
|
|
if (config.scale.maxSize) {
|
|
return config.scale.maxSize;
|
|
}
|
|
|
|
var pointStep = minXYStep(size, xyStepSignals, config.view);
|
|
|
|
if (isNumber(pointStep)) {
|
|
return Math.pow(MAX_SIZE_RANGE_STEP_RATIO * pointStep, 2);
|
|
} else {
|
|
return new SignalRefWrapper(function () {
|
|
return "pow(".concat(MAX_SIZE_RANGE_STEP_RATIO, " * ").concat(pointStep.signal, ", 2)");
|
|
});
|
|
}
|
|
}
|
|
}
|
|
/* istanbul ignore next: should never reach here */
|
|
// sizeRangeMax not implemented for the mark
|
|
|
|
|
|
throw new Error(incompatibleChannel('size', mark));
|
|
}
|
|
/**
|
|
* @returns {number} Range step of x or y or minimum between the two if both are ordinal scale.
|
|
*/
|
|
|
|
|
|
function minXYStep(size, xyStepSignals, viewConfig) {
|
|
var widthStep = isStep(size.width) ? size.width.step : getViewConfigDiscreteStep(viewConfig, 'width');
|
|
var heightStep = isStep(size.height) ? size.height.step : getViewConfigDiscreteStep(viewConfig, 'height');
|
|
|
|
if (xyStepSignals.x || xyStepSignals.y) {
|
|
return new SignalRefWrapper(function () {
|
|
var exprs = [xyStepSignals.x ? xyStepSignals.x.signal : widthStep, xyStepSignals.y ? xyStepSignals.y.signal : heightStep];
|
|
return "min(".concat(exprs.join(', '), ")");
|
|
});
|
|
}
|
|
|
|
return Math.min(widthStep, heightStep);
|
|
}
|
|
|
|
function parseScaleProperty(model, property) {
|
|
if (isUnitModel(model)) {
|
|
parseUnitScaleProperty(model, property);
|
|
} else {
|
|
parseNonUnitScaleProperty(model, property);
|
|
}
|
|
}
|
|
|
|
function parseUnitScaleProperty(model, property) {
|
|
var localScaleComponents = model.component.scales;
|
|
var config = model.config,
|
|
encoding = model.encoding,
|
|
markDef = model.markDef,
|
|
specifiedScales = model.specifiedScales;
|
|
|
|
var _iterator151 = _createForOfIteratorHelper(keys(localScaleComponents)),
|
|
_step151;
|
|
|
|
try {
|
|
for (_iterator151.s(); !(_step151 = _iterator151.n()).done;) {
|
|
var channel = _step151.value;
|
|
var specifiedScale = specifiedScales[channel];
|
|
var localScaleCmpt = localScaleComponents[channel];
|
|
var mergedScaleCmpt = model.getScaleComponent(channel);
|
|
var fieldOrDatumDef = getFieldOrDatumDef(encoding[channel]);
|
|
var specifiedValue = specifiedScale[property];
|
|
|
|
var _scaleType8 = mergedScaleCmpt.get('type');
|
|
|
|
var scalePadding = mergedScaleCmpt.get('padding');
|
|
var scalePaddingInner = mergedScaleCmpt.get('paddingInner');
|
|
var supportedByScaleType = scaleTypeSupportProperty(_scaleType8, property);
|
|
var channelIncompatability = channelScalePropertyIncompatability(channel, property);
|
|
|
|
if (specifiedValue !== undefined) {
|
|
// If there is a specified value, check if it is compatible with scale type and channel
|
|
if (!supportedByScaleType) {
|
|
warn(scalePropertyNotWorkWithScaleType(_scaleType8, property, channel));
|
|
} else if (channelIncompatability) {
|
|
// channel
|
|
warn(channelIncompatability);
|
|
}
|
|
}
|
|
|
|
if (supportedByScaleType && channelIncompatability === undefined) {
|
|
if (specifiedValue !== undefined) {
|
|
var timeUnit = fieldOrDatumDef['timeUnit'];
|
|
var type = fieldOrDatumDef.type;
|
|
|
|
switch (property) {
|
|
// domainMax/Min to signal if the value is a datetime object
|
|
case 'domainMax':
|
|
case 'domainMin':
|
|
if (isDateTime(specifiedScale[property]) || type === 'temporal' || timeUnit) {
|
|
localScaleCmpt.set(property, {
|
|
signal: valueExpr(specifiedScale[property], {
|
|
type: type,
|
|
timeUnit: timeUnit
|
|
})
|
|
}, true);
|
|
} else {
|
|
localScaleCmpt.set(property, specifiedScale[property], true);
|
|
}
|
|
|
|
break;
|
|
|
|
default:
|
|
localScaleCmpt.copyKeyFromObject(property, specifiedScale);
|
|
}
|
|
} else {
|
|
var value = property in scaleRules ? scaleRules[property]({
|
|
model: model,
|
|
channel: channel,
|
|
fieldOrDatumDef: fieldOrDatumDef,
|
|
scaleType: _scaleType8,
|
|
scalePadding: scalePadding,
|
|
scalePaddingInner: scalePaddingInner,
|
|
domain: specifiedScale.domain,
|
|
markDef: markDef,
|
|
config: config
|
|
}) : config.scale[property];
|
|
|
|
if (value !== undefined) {
|
|
localScaleCmpt.set(property, value, false);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator151.e(err);
|
|
} finally {
|
|
_iterator151.f();
|
|
}
|
|
}
|
|
|
|
var scaleRules = {
|
|
bins: function bins(_ref86) {
|
|
var model = _ref86.model,
|
|
fieldOrDatumDef = _ref86.fieldOrDatumDef;
|
|
return isFieldDef(fieldOrDatumDef) ? _bins(model, fieldOrDatumDef) : undefined;
|
|
},
|
|
interpolate: function interpolate(_ref87) {
|
|
var channel = _ref87.channel,
|
|
fieldOrDatumDef = _ref87.fieldOrDatumDef;
|
|
return _interpolate(channel, fieldOrDatumDef.type);
|
|
},
|
|
nice: function nice(_ref88) {
|
|
var scaleType = _ref88.scaleType,
|
|
channel = _ref88.channel,
|
|
fieldOrDatumDef = _ref88.fieldOrDatumDef;
|
|
return _nice(scaleType, channel, fieldOrDatumDef);
|
|
},
|
|
padding: function padding(_ref89) {
|
|
var channel = _ref89.channel,
|
|
scaleType = _ref89.scaleType,
|
|
fieldOrDatumDef = _ref89.fieldOrDatumDef,
|
|
markDef = _ref89.markDef,
|
|
config = _ref89.config;
|
|
return _padding(channel, scaleType, config.scale, fieldOrDatumDef, markDef, config.bar);
|
|
},
|
|
paddingInner: function paddingInner(_ref90) {
|
|
var scalePadding = _ref90.scalePadding,
|
|
channel = _ref90.channel,
|
|
markDef = _ref90.markDef,
|
|
config = _ref90.config;
|
|
return _paddingInner(scalePadding, channel, markDef.type, config.scale);
|
|
},
|
|
paddingOuter: function paddingOuter(_ref91) {
|
|
var scalePadding = _ref91.scalePadding,
|
|
channel = _ref91.channel,
|
|
scaleType = _ref91.scaleType,
|
|
markDef = _ref91.markDef,
|
|
scalePaddingInner = _ref91.scalePaddingInner,
|
|
config = _ref91.config;
|
|
return _paddingOuter(scalePadding, channel, scaleType, markDef.type, scalePaddingInner, config.scale);
|
|
},
|
|
reverse: function reverse(_ref92) {
|
|
var fieldOrDatumDef = _ref92.fieldOrDatumDef,
|
|
scaleType = _ref92.scaleType,
|
|
channel = _ref92.channel,
|
|
config = _ref92.config;
|
|
var sort = isFieldDef(fieldOrDatumDef) ? fieldOrDatumDef.sort : undefined;
|
|
return _reverse(scaleType, sort, channel, config.scale);
|
|
},
|
|
zero: function zero(_ref93) {
|
|
var channel = _ref93.channel,
|
|
fieldOrDatumDef = _ref93.fieldOrDatumDef,
|
|
domain = _ref93.domain,
|
|
markDef = _ref93.markDef,
|
|
scaleType = _ref93.scaleType;
|
|
return zero$1(channel, fieldOrDatumDef, domain, markDef, scaleType);
|
|
}
|
|
}; // This method is here rather than in range.ts to avoid circular dependency.
|
|
|
|
function parseScaleRange(model) {
|
|
if (isUnitModel(model)) {
|
|
parseUnitScaleRange(model);
|
|
} else {
|
|
parseNonUnitScaleProperty(model, 'range');
|
|
}
|
|
}
|
|
|
|
function parseNonUnitScaleProperty(model, property) {
|
|
var localScaleComponents = model.component.scales;
|
|
|
|
var _iterator152 = _createForOfIteratorHelper(model.children),
|
|
_step152;
|
|
|
|
try {
|
|
for (_iterator152.s(); !(_step152 = _iterator152.n()).done;) {
|
|
var child = _step152.value;
|
|
|
|
if (property === 'range') {
|
|
parseScaleRange(child);
|
|
} else {
|
|
parseScaleProperty(child, property);
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator152.e(err);
|
|
} finally {
|
|
_iterator152.f();
|
|
}
|
|
|
|
var _iterator153 = _createForOfIteratorHelper(keys(localScaleComponents)),
|
|
_step153;
|
|
|
|
try {
|
|
for (_iterator153.s(); !(_step153 = _iterator153.n()).done;) {
|
|
var channel = _step153.value;
|
|
var valueWithExplicit = void 0;
|
|
|
|
var _iterator154 = _createForOfIteratorHelper(model.children),
|
|
_step154;
|
|
|
|
try {
|
|
for (_iterator154.s(); !(_step154 = _iterator154.n()).done;) {
|
|
var _child5 = _step154.value;
|
|
var childComponent = _child5.component.scales[channel];
|
|
|
|
if (childComponent) {
|
|
var childValueWithExplicit = childComponent.getWithExplicit(property);
|
|
valueWithExplicit = mergeValuesWithExplicit(valueWithExplicit, childValueWithExplicit, property, 'scale', tieBreakByComparing(function (v1, v2) {
|
|
switch (property) {
|
|
case 'range':
|
|
// For step, prefer larger step
|
|
if (v1.step && v2.step) {
|
|
return v1.step - v2.step;
|
|
}
|
|
|
|
return 0;
|
|
// TODO: precedence rule for other properties
|
|
}
|
|
|
|
return 0;
|
|
}));
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator154.e(err);
|
|
} finally {
|
|
_iterator154.f();
|
|
}
|
|
|
|
localScaleComponents[channel].setWithExplicit(property, valueWithExplicit);
|
|
}
|
|
} catch (err) {
|
|
_iterator153.e(err);
|
|
} finally {
|
|
_iterator153.f();
|
|
}
|
|
}
|
|
|
|
function _bins(model, fieldDef) {
|
|
var bin = fieldDef.bin;
|
|
|
|
if (isBinning(bin)) {
|
|
var binSignal = getBinSignalName(model, fieldDef.field, bin);
|
|
return new SignalRefWrapper(function () {
|
|
return model.getSignalName(binSignal);
|
|
});
|
|
} else if (isBinned(bin) && isBinParams(bin) && bin.step !== undefined) {
|
|
// start and stop will be determined from the scale domain
|
|
return {
|
|
step: bin.step
|
|
};
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function _interpolate(channel, type) {
|
|
if (contains([COLOR, FILL, STROKE], channel) && type !== 'nominal') {
|
|
return 'hcl';
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function _nice(scaleType, channel, fieldOrDatumDef) {
|
|
var _a;
|
|
|
|
if (((_a = getFieldDef(fieldOrDatumDef)) === null || _a === void 0 ? void 0 : _a.bin) || contains([ScaleType.TIME, ScaleType.UTC], scaleType)) {
|
|
return undefined;
|
|
}
|
|
|
|
return channel in POSITION_SCALE_CHANNEL_INDEX ? true : undefined;
|
|
}
|
|
|
|
function _padding(channel, scaleType, scaleConfig, fieldOrDatumDef, markDef, barConfig) {
|
|
if (channel in POSITION_SCALE_CHANNEL_INDEX) {
|
|
if (isContinuousToContinuous(scaleType)) {
|
|
if (scaleConfig.continuousPadding !== undefined) {
|
|
return scaleConfig.continuousPadding;
|
|
}
|
|
|
|
var type = markDef.type,
|
|
_orient3 = markDef.orient;
|
|
|
|
if (type === 'bar' && !(isFieldDef(fieldOrDatumDef) && (fieldOrDatumDef.bin || fieldOrDatumDef.timeUnit))) {
|
|
if (_orient3 === 'vertical' && channel === 'x' || _orient3 === 'horizontal' && channel === 'y') {
|
|
return barConfig.continuousBandSize;
|
|
}
|
|
}
|
|
}
|
|
|
|
if (scaleType === ScaleType.POINT) {
|
|
return scaleConfig.pointPadding;
|
|
}
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function _paddingInner(paddingValue, channel, mark, scaleConfig) {
|
|
if (paddingValue !== undefined) {
|
|
// If user has already manually specified "padding", no need to add default paddingInner.
|
|
return undefined;
|
|
}
|
|
|
|
if (channel in POSITION_SCALE_CHANNEL_INDEX) {
|
|
// Padding is only set for X and Y by default.
|
|
// Basically it doesn't make sense to add padding for color and size.
|
|
// paddingOuter would only be called if it's a band scale, just return the default for bandScale.
|
|
var bandPaddingInner = scaleConfig.bandPaddingInner,
|
|
barBandPaddingInner = scaleConfig.barBandPaddingInner,
|
|
rectBandPaddingInner = scaleConfig.rectBandPaddingInner;
|
|
return getFirstDefined(bandPaddingInner, mark === 'bar' ? barBandPaddingInner : rectBandPaddingInner);
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function _paddingOuter(paddingValue, channel, scaleType, mark, paddingInnerValue, scaleConfig) {
|
|
if (paddingValue !== undefined) {
|
|
// If user has already manually specified "padding", no need to add default paddingOuter.
|
|
return undefined;
|
|
}
|
|
|
|
if (channel in POSITION_SCALE_CHANNEL_INDEX) {
|
|
// Padding is only set for X and Y by default.
|
|
// Basically it doesn't make sense to add padding for color and size.
|
|
if (scaleType === ScaleType.BAND) {
|
|
var bandPaddingOuter = scaleConfig.bandPaddingOuter;
|
|
return getFirstDefined(bandPaddingOuter,
|
|
/* By default, paddingOuter is paddingInner / 2. The reason is that
|
|
size (width/height) = step * (cardinality - paddingInner + 2 * paddingOuter).
|
|
and we want the width/height to be integer by default.
|
|
Note that step (by default) and cardinality are integers.) */
|
|
isSignalRef(paddingInnerValue) ? {
|
|
signal: "".concat(paddingInnerValue.signal, "/2")
|
|
} : paddingInnerValue / 2);
|
|
}
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function _reverse(scaleType, sort, channel, scaleConfig) {
|
|
if (channel === 'x' && scaleConfig.xReverse !== undefined) {
|
|
if (hasContinuousDomain(scaleType) && sort === 'descending') {
|
|
if (isSignalRef(scaleConfig.xReverse)) {
|
|
return {
|
|
signal: "!".concat(scaleConfig.xReverse.signal)
|
|
};
|
|
} else {
|
|
return !scaleConfig.xReverse;
|
|
}
|
|
}
|
|
|
|
return scaleConfig.xReverse;
|
|
}
|
|
|
|
if (hasContinuousDomain(scaleType) && sort === 'descending') {
|
|
// For continuous domain scales, Vega does not support domain sort.
|
|
// Thus, we reverse range instead if sort is descending
|
|
return true;
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function zero$1(channel, fieldDef, specifiedDomain, markDef, scaleType) {
|
|
// If users explicitly provide a domain range, we should not augment zero as that will be unexpected.
|
|
var hasCustomDomain = !!specifiedDomain && specifiedDomain !== 'unaggregated';
|
|
|
|
if (hasCustomDomain) {
|
|
if (hasContinuousDomain(scaleType)) {
|
|
if (isArray(specifiedDomain)) {
|
|
var first = specifiedDomain[0];
|
|
var last = specifiedDomain[specifiedDomain.length - 1];
|
|
|
|
if (first <= 0 && last >= 0) {
|
|
// if the domain includes zero, make zero remains true
|
|
return true;
|
|
}
|
|
}
|
|
|
|
return false;
|
|
}
|
|
} // If there is no custom domain, return true only for the following cases:
|
|
// 1) using quantitative field with size
|
|
// While this can be either ratio or interval fields, our assumption is that
|
|
// ratio are more common. However, if the scaleType is discretizing scale, we want to return
|
|
// false so that range doesn't start at zero
|
|
|
|
|
|
if (channel === 'size' && fieldDef.type === 'quantitative' && !isContinuousToDiscrete(scaleType)) {
|
|
return true;
|
|
} // 2) non-binned, quantitative x-scale or y-scale
|
|
// (For binning, we should not include zero by default because binning are calculated without zero.)
|
|
|
|
|
|
if (!(isFieldDef(fieldDef) && fieldDef.bin) && contains([].concat(_toConsumableArray(POSITION_SCALE_CHANNELS), _toConsumableArray(POLAR_POSITION_SCALE_CHANNELS)), channel)) {
|
|
var _orient4 = markDef.orient,
|
|
type = markDef.type;
|
|
|
|
if (contains(['bar', 'area', 'line', 'trail'], type)) {
|
|
if (_orient4 === 'horizontal' && channel === 'y' || _orient4 === 'vertical' && channel === 'x') {
|
|
return false;
|
|
}
|
|
}
|
|
|
|
return true;
|
|
}
|
|
|
|
return false;
|
|
}
|
|
/**
|
|
* Determine if there is a specified scale type and if it is appropriate,
|
|
* or determine default type if type is unspecified or inappropriate.
|
|
*/
|
|
// NOTE: CompassQL uses this method.
|
|
|
|
|
|
function scaleType(specifiedScale, channel, fieldDef, mark) {
|
|
var defaultScaleType = defaultType$2(channel, fieldDef, mark);
|
|
var type = specifiedScale.type;
|
|
|
|
if (!isScaleChannel(channel)) {
|
|
// There is no scale for these channels
|
|
return null;
|
|
}
|
|
|
|
if (type !== undefined) {
|
|
// Check if explicitly specified scale type is supported by the channel
|
|
if (!channelSupportScaleType(channel, type)) {
|
|
warn(scaleTypeNotWorkWithChannel(channel, type, defaultScaleType));
|
|
return defaultScaleType;
|
|
} // Check if explicitly specified scale type is supported by the data type
|
|
|
|
|
|
if (isFieldDef(fieldDef) && !scaleTypeSupportDataType(type, fieldDef.type)) {
|
|
warn(scaleTypeNotWorkWithFieldDef(type, defaultScaleType));
|
|
return defaultScaleType;
|
|
}
|
|
|
|
return type;
|
|
}
|
|
|
|
return defaultScaleType;
|
|
}
|
|
/**
|
|
* Determine appropriate default scale type.
|
|
*/
|
|
// NOTE: Voyager uses this method.
|
|
|
|
|
|
function defaultType$2(channel, fieldDef, mark) {
|
|
var _a;
|
|
|
|
switch (fieldDef.type) {
|
|
case 'nominal':
|
|
case 'ordinal':
|
|
if (isColorChannel(channel) || rangeType(channel) === 'discrete') {
|
|
if (channel === 'shape' && fieldDef.type === 'ordinal') {
|
|
warn(discreteChannelCannotEncode(channel, 'ordinal'));
|
|
}
|
|
|
|
return 'ordinal';
|
|
}
|
|
|
|
if (channel in POSITION_SCALE_CHANNEL_INDEX) {
|
|
if (contains(['rect', 'bar', 'image', 'rule'], mark)) {
|
|
// The rect/bar mark should fit into a band.
|
|
// For rule, using band scale to make rule align with axis ticks better https://github.com/vega/vega-lite/issues/3429
|
|
return 'band';
|
|
}
|
|
} else if (mark === 'arc' && channel in POLAR_POSITION_SCALE_CHANNEL_INDEX) {
|
|
return 'band';
|
|
}
|
|
|
|
if (fieldDef.band !== undefined || isPositionFieldOrDatumDef(fieldDef) && ((_a = fieldDef.axis) === null || _a === void 0 ? void 0 : _a.tickBand)) {
|
|
return 'band';
|
|
} // Otherwise, use ordinal point scale so we can easily get center positions of the marks.
|
|
|
|
|
|
return 'point';
|
|
|
|
case 'temporal':
|
|
if (isColorChannel(channel)) {
|
|
return 'time';
|
|
} else if (rangeType(channel) === 'discrete') {
|
|
warn(discreteChannelCannotEncode(channel, 'temporal')); // TODO: consider using quantize (equivalent to binning) once we have it
|
|
|
|
return 'ordinal';
|
|
} else if (isFieldDef(fieldDef) && fieldDef.timeUnit && normalizeTimeUnit(fieldDef.timeUnit).utc) {
|
|
return 'utc';
|
|
}
|
|
|
|
return 'time';
|
|
|
|
case 'quantitative':
|
|
if (isColorChannel(channel)) {
|
|
if (isFieldDef(fieldDef) && isBinning(fieldDef.bin)) {
|
|
return 'bin-ordinal';
|
|
}
|
|
|
|
return 'linear';
|
|
} else if (rangeType(channel) === 'discrete') {
|
|
warn(discreteChannelCannotEncode(channel, 'quantitative')); // TODO: consider using quantize (equivalent to binning) once we have it
|
|
|
|
return 'ordinal';
|
|
}
|
|
|
|
return 'linear';
|
|
|
|
case 'geojson':
|
|
return undefined;
|
|
}
|
|
/* istanbul ignore next: should never reach this */
|
|
|
|
|
|
throw new Error(invalidFieldType(fieldDef.type));
|
|
}
|
|
|
|
function parseScales(model) {
|
|
var _ref94 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
|
|
ignoreRange = _ref94.ignoreRange;
|
|
|
|
parseScaleCore(model);
|
|
parseScaleDomain(model);
|
|
|
|
var _iterator155 = _createForOfIteratorHelper(NON_TYPE_DOMAIN_RANGE_VEGA_SCALE_PROPERTIES),
|
|
_step155;
|
|
|
|
try {
|
|
for (_iterator155.s(); !(_step155 = _iterator155.n()).done;) {
|
|
var prop = _step155.value;
|
|
parseScaleProperty(model, prop);
|
|
}
|
|
} catch (err) {
|
|
_iterator155.e(err);
|
|
} finally {
|
|
_iterator155.f();
|
|
}
|
|
|
|
if (!ignoreRange) {
|
|
// range depends on zero
|
|
parseScaleRange(model);
|
|
}
|
|
}
|
|
|
|
function parseScaleCore(model) {
|
|
if (isUnitModel(model)) {
|
|
model.component.scales = parseUnitScaleCore(model);
|
|
} else {
|
|
model.component.scales = parseNonUnitScaleCore(model);
|
|
}
|
|
}
|
|
/**
|
|
* Parse scales for all channels of a model.
|
|
*/
|
|
|
|
|
|
function parseUnitScaleCore(model) {
|
|
var encoding = model.encoding,
|
|
mark = model.mark;
|
|
return SCALE_CHANNELS.reduce(function (scaleComponents, channel) {
|
|
var fieldOrDatumDef = getFieldOrDatumDef(encoding[channel]); // must be typed def to have scale
|
|
// Don't generate scale for shape of geoshape
|
|
|
|
if (fieldOrDatumDef && mark === GEOSHAPE && channel === SHAPE && fieldOrDatumDef.type === GEOJSON) {
|
|
return scaleComponents;
|
|
}
|
|
|
|
var specifiedScale = fieldOrDatumDef && fieldOrDatumDef['scale'];
|
|
|
|
if (fieldOrDatumDef && specifiedScale !== null && specifiedScale !== false) {
|
|
specifiedScale = specifiedScale !== null && specifiedScale !== void 0 ? specifiedScale : {};
|
|
var sType = scaleType(specifiedScale, channel, fieldOrDatumDef, mark);
|
|
scaleComponents[channel] = new ScaleComponent(model.scaleName(channel + '', true), {
|
|
value: sType,
|
|
explicit: specifiedScale.type === sType
|
|
});
|
|
}
|
|
|
|
return scaleComponents;
|
|
}, {});
|
|
}
|
|
|
|
var scaleTypeTieBreaker = tieBreakByComparing(function (st1, st2) {
|
|
return scaleTypePrecedence(st1) - scaleTypePrecedence(st2);
|
|
});
|
|
|
|
function parseNonUnitScaleCore(model) {
|
|
var _a;
|
|
|
|
var scaleComponents = model.component.scales = {};
|
|
var scaleTypeWithExplicitIndex = {};
|
|
var resolve = model.component.resolve; // Parse each child scale and determine if a particular channel can be merged.
|
|
|
|
var _iterator156 = _createForOfIteratorHelper(model.children),
|
|
_step156;
|
|
|
|
try {
|
|
for (_iterator156.s(); !(_step156 = _iterator156.n()).done;) {
|
|
var child = _step156.value;
|
|
parseScaleCore(child); // Instead of always merging right away -- check if it is compatible to merge first!
|
|
|
|
var _iterator158 = _createForOfIteratorHelper(keys(child.component.scales)),
|
|
_step158;
|
|
|
|
try {
|
|
for (_iterator158.s(); !(_step158 = _iterator158.n()).done;) {
|
|
var channel = _step158.value;
|
|
// if resolve is undefined, set default first
|
|
resolve.scale[channel] = (_a = resolve.scale[channel]) !== null && _a !== void 0 ? _a : defaultScaleResolve(channel, model);
|
|
|
|
if (resolve.scale[channel] === 'shared') {
|
|
var explicitScaleType = scaleTypeWithExplicitIndex[channel];
|
|
var childScaleType = child.component.scales[channel].getWithExplicit('type');
|
|
|
|
if (explicitScaleType) {
|
|
if (scaleCompatible(explicitScaleType.value, childScaleType.value)) {
|
|
// merge scale component if type are compatible
|
|
scaleTypeWithExplicitIndex[channel] = mergeValuesWithExplicit(explicitScaleType, childScaleType, 'type', 'scale', scaleTypeTieBreaker);
|
|
} else {
|
|
// Otherwise, update conflicting channel to be independent
|
|
resolve.scale[channel] = 'independent'; // Remove from the index so they don't get merged
|
|
|
|
delete scaleTypeWithExplicitIndex[channel];
|
|
}
|
|
} else {
|
|
scaleTypeWithExplicitIndex[channel] = childScaleType;
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator158.e(err);
|
|
} finally {
|
|
_iterator158.f();
|
|
}
|
|
} // Merge each channel listed in the index
|
|
|
|
} catch (err) {
|
|
_iterator156.e(err);
|
|
} finally {
|
|
_iterator156.f();
|
|
}
|
|
|
|
var _iterator157 = _createForOfIteratorHelper(keys(scaleTypeWithExplicitIndex)),
|
|
_step157;
|
|
|
|
try {
|
|
for (_iterator157.s(); !(_step157 = _iterator157.n()).done;) {
|
|
var _channel2 = _step157.value;
|
|
// Create new merged scale component
|
|
var name = model.scaleName(_channel2, true);
|
|
var typeWithExplicit = scaleTypeWithExplicitIndex[_channel2];
|
|
scaleComponents[_channel2] = new ScaleComponent(name, typeWithExplicit); // rename each child and mark them as merged
|
|
|
|
var _iterator159 = _createForOfIteratorHelper(model.children),
|
|
_step159;
|
|
|
|
try {
|
|
for (_iterator159.s(); !(_step159 = _iterator159.n()).done;) {
|
|
var _child6 = _step159.value;
|
|
var childScale = _child6.component.scales[_channel2];
|
|
|
|
if (childScale) {
|
|
_child6.renameScale(childScale.get('name'), name);
|
|
|
|
childScale.merged = true;
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator159.e(err);
|
|
} finally {
|
|
_iterator159.f();
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator157.e(err);
|
|
} finally {
|
|
_iterator157.f();
|
|
}
|
|
|
|
return scaleComponents;
|
|
}
|
|
|
|
var NameMap = /*#__PURE__*/function () {
|
|
function NameMap() {
|
|
_classCallCheck(this, NameMap);
|
|
|
|
this.nameMap = {};
|
|
}
|
|
|
|
_createClass(NameMap, [{
|
|
key: "rename",
|
|
value: function rename(oldName, newName) {
|
|
this.nameMap[oldName] = newName;
|
|
}
|
|
}, {
|
|
key: "has",
|
|
value: function has(name) {
|
|
return this.nameMap[name] !== undefined;
|
|
}
|
|
}, {
|
|
key: "get",
|
|
value: function get(name) {
|
|
// If the name appears in the _nameMap, we need to read its new name.
|
|
// We have to loop over the dict just in case the new name also gets renamed.
|
|
while (this.nameMap[name] && name !== this.nameMap[name]) {
|
|
name = this.nameMap[name];
|
|
}
|
|
|
|
return name;
|
|
}
|
|
}]);
|
|
|
|
return NameMap;
|
|
}();
|
|
/*
|
|
We use type guards instead of `instanceof` as `instanceof` makes
|
|
different parts of the compiler depend on the actual implementation of
|
|
the model classes, which in turn depend on different parts of the compiler.
|
|
Thus, `instanceof` leads to circular dependency problems.
|
|
On the other hand, type guards only make different parts of the compiler
|
|
depend on the type of the model classes, but not the actual implementation.
|
|
*/
|
|
|
|
|
|
function isUnitModel(model) {
|
|
return (model === null || model === void 0 ? void 0 : model.type) === 'unit';
|
|
}
|
|
|
|
function isFacetModel(model) {
|
|
return (model === null || model === void 0 ? void 0 : model.type) === 'facet';
|
|
}
|
|
|
|
function isConcatModel(model) {
|
|
return (model === null || model === void 0 ? void 0 : model.type) === 'concat';
|
|
}
|
|
|
|
function isLayerModel(model) {
|
|
return (model === null || model === void 0 ? void 0 : model.type) === 'layer';
|
|
}
|
|
|
|
var Model = /*#__PURE__*/function () {
|
|
function Model(spec, type, parent, parentGivenName, config, resolve, view) {
|
|
var _this29 = this;
|
|
|
|
_classCallCheck(this, Model);
|
|
|
|
var _a, _b;
|
|
|
|
this.type = type;
|
|
this.parent = parent;
|
|
this.config = config;
|
|
this.view = view;
|
|
this.children = [];
|
|
/**
|
|
* Corrects the data references in marks after assemble.
|
|
*/
|
|
|
|
this.correctDataNames = function (mark) {
|
|
// TODO: make this correct
|
|
// for normal data references
|
|
if (mark.from && mark.from.data) {
|
|
mark.from.data = _this29.lookupDataSource(mark.from.data);
|
|
} // for access to facet data
|
|
|
|
|
|
if (mark.from && mark.from.facet && mark.from.facet.data) {
|
|
mark.from.facet.data = _this29.lookupDataSource(mark.from.facet.data);
|
|
}
|
|
|
|
return mark;
|
|
};
|
|
|
|
this.parent = parent;
|
|
this.config = config; // If name is not provided, always use parent's givenName to avoid name conflicts.
|
|
|
|
this.name = (_a = spec.name) !== null && _a !== void 0 ? _a : parentGivenName;
|
|
this.title = isText(spec.title) ? {
|
|
text: spec.title
|
|
} : spec.title; // Shared name maps
|
|
|
|
this.scaleNameMap = parent ? parent.scaleNameMap : new NameMap();
|
|
this.projectionNameMap = parent ? parent.projectionNameMap : new NameMap();
|
|
this.signalNameMap = parent ? parent.signalNameMap : new NameMap();
|
|
this.data = spec.data;
|
|
this.description = spec.description;
|
|
this.transforms = normalizeTransform((_b = spec.transform) !== null && _b !== void 0 ? _b : []);
|
|
this.layout = type === 'layer' || type === 'unit' ? {} : extractCompositionLayout(spec, type, config);
|
|
this.component = {
|
|
data: {
|
|
sources: parent ? parent.component.data.sources : [],
|
|
outputNodes: parent ? parent.component.data.outputNodes : {},
|
|
outputNodeRefCounts: parent ? parent.component.data.outputNodeRefCounts : {},
|
|
// data is faceted if the spec is a facet spec or the parent has faceted data and data is undefined
|
|
isFaceted: isFacetSpec(spec) || parent && parent.component.data.isFaceted && spec.data === undefined
|
|
},
|
|
layoutSize: new Split(),
|
|
layoutHeaders: {
|
|
row: {},
|
|
column: {},
|
|
facet: {}
|
|
},
|
|
mark: null,
|
|
resolve: Object.assign({
|
|
scale: {},
|
|
axis: {},
|
|
legend: {}
|
|
}, resolve ? duplicate(resolve) : {}),
|
|
selection: null,
|
|
scales: null,
|
|
projection: null,
|
|
axes: {},
|
|
legends: {}
|
|
};
|
|
}
|
|
|
|
_createClass(Model, [{
|
|
key: "parse",
|
|
value: function parse() {
|
|
this.parseScale();
|
|
this.parseLayoutSize(); // depends on scale
|
|
|
|
this.renameTopLevelLayoutSizeSignal();
|
|
this.parseSelections();
|
|
this.parseProjection();
|
|
this.parseData(); // (pathorder) depends on markDef; selection filters depend on parsed selections; depends on projection because some transforms require the finalized projection name.
|
|
|
|
this.parseAxesAndHeaders(); // depends on scale and layout size
|
|
|
|
this.parseLegends(); // depends on scale, markDef
|
|
|
|
this.parseMarkGroup(); // depends on data name, scale, layout size, axisGroup, and children's scale, axis, legend and mark.
|
|
}
|
|
}, {
|
|
key: "parseScale",
|
|
value: function parseScale() {
|
|
parseScales(this);
|
|
}
|
|
}, {
|
|
key: "parseProjection",
|
|
value: function parseProjection() {
|
|
_parseProjection(this);
|
|
}
|
|
/**
|
|
* Rename top-level spec's size to be just width / height, ignoring model name.
|
|
* This essentially merges the top-level spec's width/height signals with the width/height signals
|
|
* to help us reduce redundant signals declaration.
|
|
*/
|
|
|
|
}, {
|
|
key: "renameTopLevelLayoutSizeSignal",
|
|
value: function renameTopLevelLayoutSizeSignal() {
|
|
if (this.getName('width') !== 'width') {
|
|
this.renameSignal(this.getName('width'), 'width');
|
|
}
|
|
|
|
if (this.getName('height') !== 'height') {
|
|
this.renameSignal(this.getName('height'), 'height');
|
|
}
|
|
}
|
|
}, {
|
|
key: "parseLegends",
|
|
value: function parseLegends() {
|
|
parseLegend(this);
|
|
}
|
|
}, {
|
|
key: "assembleGroupStyle",
|
|
value: function assembleGroupStyle() {
|
|
var _a, _b;
|
|
|
|
if (this.type === 'unit' || this.type === 'layer') {
|
|
return (_b = (_a = this.view) === null || _a === void 0 ? void 0 : _a.style) !== null && _b !== void 0 ? _b : 'cell';
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
}, {
|
|
key: "assembleEncodeFromView",
|
|
value: function assembleEncodeFromView(view) {
|
|
// Exclude "style"
|
|
var baseView = __rest(view, ["style"]);
|
|
|
|
var e = {};
|
|
|
|
var _iterator160 = _createForOfIteratorHelper(keys(baseView)),
|
|
_step160;
|
|
|
|
try {
|
|
for (_iterator160.s(); !(_step160 = _iterator160.n()).done;) {
|
|
var property = _step160.value;
|
|
var value = baseView[property];
|
|
|
|
if (value !== undefined) {
|
|
e[property] = signalOrValueRef(value);
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator160.e(err);
|
|
} finally {
|
|
_iterator160.f();
|
|
}
|
|
|
|
return e;
|
|
}
|
|
}, {
|
|
key: "assembleGroupEncodeEntry",
|
|
value: function assembleGroupEncodeEntry(isTopLevel) {
|
|
var encodeEntry = {};
|
|
|
|
if (this.view) {
|
|
encodeEntry = this.assembleEncodeFromView(this.view);
|
|
}
|
|
|
|
if (!isTopLevel) {
|
|
// Descriptions are already added to the top-level description so we only need to add them to the inner views.
|
|
if (this.description) {
|
|
encodeEntry['description'] = signalOrValueRef(this.description);
|
|
} // For top-level spec, we can set the global width and height signal to adjust the group size.
|
|
// For other child specs, we have to manually set width and height in the encode entry.
|
|
|
|
|
|
if (this.type === 'unit' || this.type === 'layer') {
|
|
return Object.assign({
|
|
width: this.getSizeSignalRef('width'),
|
|
height: this.getSizeSignalRef('height')
|
|
}, encodeEntry !== null && encodeEntry !== void 0 ? encodeEntry : {});
|
|
}
|
|
}
|
|
|
|
return isEmpty(encodeEntry) ? undefined : encodeEntry;
|
|
}
|
|
}, {
|
|
key: "assembleLayout",
|
|
value: function assembleLayout() {
|
|
if (!this.layout) {
|
|
return undefined;
|
|
}
|
|
|
|
var _a = this.layout,
|
|
spacing = _a.spacing,
|
|
layout = __rest(_a, ["spacing"]);
|
|
|
|
var component = this.component,
|
|
config = this.config;
|
|
var titleBand = assembleLayoutTitleBand(component.layoutHeaders, config);
|
|
return Object.assign(Object.assign(Object.assign({
|
|
padding: spacing
|
|
}, this.assembleDefaultLayout()), layout), titleBand ? {
|
|
titleBand: titleBand
|
|
} : {});
|
|
}
|
|
}, {
|
|
key: "assembleDefaultLayout",
|
|
value: function assembleDefaultLayout() {
|
|
return {};
|
|
}
|
|
}, {
|
|
key: "assembleHeaderMarks",
|
|
value: function assembleHeaderMarks() {
|
|
var layoutHeaders = this.component.layoutHeaders;
|
|
var headerMarks = [];
|
|
|
|
var _iterator161 = _createForOfIteratorHelper(FACET_CHANNELS),
|
|
_step161;
|
|
|
|
try {
|
|
for (_iterator161.s(); !(_step161 = _iterator161.n()).done;) {
|
|
var channel = _step161.value;
|
|
|
|
if (layoutHeaders[channel].title) {
|
|
headerMarks.push(assembleTitleGroup(this, channel));
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator161.e(err);
|
|
} finally {
|
|
_iterator161.f();
|
|
}
|
|
|
|
var _iterator162 = _createForOfIteratorHelper(HEADER_CHANNELS),
|
|
_step162;
|
|
|
|
try {
|
|
for (_iterator162.s(); !(_step162 = _iterator162.n()).done;) {
|
|
var _channel3 = _step162.value;
|
|
headerMarks = headerMarks.concat(assembleHeaderGroups(this, _channel3));
|
|
}
|
|
} catch (err) {
|
|
_iterator162.e(err);
|
|
} finally {
|
|
_iterator162.f();
|
|
}
|
|
|
|
return headerMarks;
|
|
}
|
|
}, {
|
|
key: "assembleAxes",
|
|
value: function assembleAxes() {
|
|
return _assembleAxes(this.component.axes, this.config);
|
|
}
|
|
}, {
|
|
key: "assembleLegends",
|
|
value: function assembleLegends() {
|
|
return _assembleLegends(this);
|
|
}
|
|
}, {
|
|
key: "assembleProjections",
|
|
value: function assembleProjections() {
|
|
return _assembleProjections(this);
|
|
}
|
|
}, {
|
|
key: "assembleTitle",
|
|
value: function assembleTitle() {
|
|
var _a, _b, _c;
|
|
|
|
var _d = (_a = this.title) !== null && _a !== void 0 ? _a : {},
|
|
encoding = _d.encoding,
|
|
titleNoEncoding = __rest(_d, ["encoding"]);
|
|
|
|
var title = Object.assign(Object.assign(Object.assign({}, extractTitleConfig(this.config.title).nonMark), titleNoEncoding), encoding ? {
|
|
encode: {
|
|
update: encoding
|
|
}
|
|
} : {});
|
|
|
|
if (title.text) {
|
|
if (contains(['unit', 'layer'], this.type)) {
|
|
// Unit/Layer
|
|
if (contains(['middle', undefined], title.anchor)) {
|
|
title.frame = (_b = title.frame) !== null && _b !== void 0 ? _b : 'group';
|
|
}
|
|
} else {
|
|
// composition with Vega layout
|
|
// Set title = "start" by default for composition as "middle" does not look nice
|
|
// https://github.com/vega/vega/issues/960#issuecomment-471360328
|
|
title.anchor = (_c = title.anchor) !== null && _c !== void 0 ? _c : 'start';
|
|
}
|
|
|
|
return isEmpty(title) ? undefined : title;
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
/**
|
|
* Assemble the mark group for this model. We accept optional `signals` so that we can include concat top-level signals with the top-level model's local signals.
|
|
*/
|
|
|
|
}, {
|
|
key: "assembleGroup",
|
|
value: function assembleGroup() {
|
|
var signals = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : [];
|
|
var group = {};
|
|
signals = signals.concat(this.assembleSignals());
|
|
|
|
if (signals.length > 0) {
|
|
group.signals = signals;
|
|
}
|
|
|
|
var layout = this.assembleLayout();
|
|
|
|
if (layout) {
|
|
group.layout = layout;
|
|
}
|
|
|
|
group.marks = [].concat(this.assembleHeaderMarks(), this.assembleMarks()); // Only include scales if this spec is top-level or if parent is facet.
|
|
// (Otherwise, it will be merged with upper-level's scope.)
|
|
|
|
var scales = !this.parent || isFacetModel(this.parent) ? assembleScales(this) : [];
|
|
|
|
if (scales.length > 0) {
|
|
group.scales = scales;
|
|
}
|
|
|
|
var axes = this.assembleAxes();
|
|
|
|
if (axes.length > 0) {
|
|
group.axes = axes;
|
|
}
|
|
|
|
var legends = this.assembleLegends();
|
|
|
|
if (legends.length > 0) {
|
|
group.legends = legends;
|
|
}
|
|
|
|
return group;
|
|
}
|
|
}, {
|
|
key: "getName",
|
|
value: function getName(text) {
|
|
return varName((this.name ? this.name + '_' : '') + text);
|
|
}
|
|
}, {
|
|
key: "getDataName",
|
|
value: function getDataName(type) {
|
|
return this.getName(DataSourceType[type].toLowerCase());
|
|
}
|
|
/**
|
|
* Request a data source name for the given data source type and mark that data source as required.
|
|
* This method should be called in parse, so that all used data source can be correctly instantiated in assembleData().
|
|
* You can lookup the correct dataset name in assemble with `lookupDataSource`.
|
|
*/
|
|
|
|
}, {
|
|
key: "requestDataName",
|
|
value: function requestDataName(name) {
|
|
var fullName = this.getDataName(name); // Increase ref count. This is critical because otherwise we won't create a data source.
|
|
// We also increase the ref counts on OutputNode.getSource() calls.
|
|
|
|
var refCounts = this.component.data.outputNodeRefCounts;
|
|
refCounts[fullName] = (refCounts[fullName] || 0) + 1;
|
|
return fullName;
|
|
}
|
|
}, {
|
|
key: "getSizeSignalRef",
|
|
value: function getSizeSignalRef(layoutSizeType) {
|
|
if (isFacetModel(this.parent)) {
|
|
var sizeType = getSizeTypeFromLayoutSizeType(layoutSizeType);
|
|
var channel = getPositionScaleChannel(sizeType);
|
|
var scaleComponent = this.component.scales[channel];
|
|
|
|
if (scaleComponent && !scaleComponent.merged) {
|
|
// independent scale
|
|
var type = scaleComponent.get('type');
|
|
var range = scaleComponent.get('range');
|
|
|
|
if (hasDiscreteDomain(type) && isVgRangeStep(range)) {
|
|
var scaleName = scaleComponent.get('name');
|
|
|
|
var _domain5 = assembleDomain(this, channel);
|
|
|
|
var _field23 = getFieldFromDomain(_domain5);
|
|
|
|
if (_field23) {
|
|
var fieldRef = _vgField({
|
|
aggregate: 'distinct',
|
|
field: _field23
|
|
}, {
|
|
expr: 'datum'
|
|
});
|
|
|
|
return {
|
|
signal: sizeExpr(scaleName, scaleComponent, fieldRef)
|
|
};
|
|
} else {
|
|
warn(unknownField(channel));
|
|
return null;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
return {
|
|
signal: this.signalNameMap.get(this.getName(layoutSizeType))
|
|
};
|
|
}
|
|
/**
|
|
* Lookup the name of the datasource for an output node. You probably want to call this in assemble.
|
|
*/
|
|
|
|
}, {
|
|
key: "lookupDataSource",
|
|
value: function lookupDataSource(name) {
|
|
var node = this.component.data.outputNodes[name];
|
|
|
|
if (!node) {
|
|
// Name not found in map so let's just return what we got.
|
|
// This can happen if we already have the correct name.
|
|
return name;
|
|
}
|
|
|
|
return node.getSource();
|
|
}
|
|
}, {
|
|
key: "getSignalName",
|
|
value: function getSignalName(oldSignalName) {
|
|
return this.signalNameMap.get(oldSignalName);
|
|
}
|
|
}, {
|
|
key: "renameSignal",
|
|
value: function renameSignal(oldName, newName) {
|
|
this.signalNameMap.rename(oldName, newName);
|
|
}
|
|
}, {
|
|
key: "renameScale",
|
|
value: function renameScale(oldName, newName) {
|
|
this.scaleNameMap.rename(oldName, newName);
|
|
}
|
|
}, {
|
|
key: "renameProjection",
|
|
value: function renameProjection(oldName, newName) {
|
|
this.projectionNameMap.rename(oldName, newName);
|
|
}
|
|
/**
|
|
* @return scale name for a given channel after the scale has been parsed and named.
|
|
*/
|
|
|
|
}, {
|
|
key: "scaleName",
|
|
value: function scaleName(originalScaleName, parse) {
|
|
if (parse) {
|
|
// During the parse phase always return a value
|
|
// No need to refer to rename map because a scale can't be renamed
|
|
// before it has the original name.
|
|
return this.getName(originalScaleName);
|
|
} // If there is a scale for the channel, it should either
|
|
// be in the scale component or exist in the name map
|
|
|
|
|
|
if ( // If there is a scale for the channel, there should be a local scale component for it
|
|
isChannel(originalScaleName) && isScaleChannel(originalScaleName) && this.component.scales[originalScaleName] || // in the scale name map (the scale get merged by its parent)
|
|
this.scaleNameMap.has(this.getName(originalScaleName))) {
|
|
return this.scaleNameMap.get(this.getName(originalScaleName));
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
/**
|
|
* @return projection name after the projection has been parsed and named.
|
|
*/
|
|
|
|
}, {
|
|
key: "projectionName",
|
|
value: function projectionName(parse) {
|
|
if (parse) {
|
|
// During the parse phase always return a value
|
|
// No need to refer to rename map because a projection can't be renamed
|
|
// before it has the original name.
|
|
return this.getName('projection');
|
|
}
|
|
|
|
if (this.component.projection && !this.component.projection.merged || this.projectionNameMap.has(this.getName('projection'))) {
|
|
return this.projectionNameMap.get(this.getName('projection'));
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
/**
|
|
* Traverse a model's hierarchy to get the scale component for a particular channel.
|
|
*/
|
|
|
|
}, {
|
|
key: "getScaleComponent",
|
|
value: function getScaleComponent(channel) {
|
|
/* istanbul ignore next: This is warning for debugging test */
|
|
if (!this.component.scales) {
|
|
throw new Error('getScaleComponent cannot be called before parseScale(). Make sure you have called parseScale or use parseUnitModelWithScale().');
|
|
}
|
|
|
|
var localScaleComponent = this.component.scales[channel];
|
|
|
|
if (localScaleComponent && !localScaleComponent.merged) {
|
|
return localScaleComponent;
|
|
}
|
|
|
|
return this.parent ? this.parent.getScaleComponent(channel) : undefined;
|
|
}
|
|
/**
|
|
* Traverse a model's hierarchy to get a particular selection component.
|
|
*/
|
|
|
|
}, {
|
|
key: "getSelectionComponent",
|
|
value: function getSelectionComponent(variableName, origName) {
|
|
var sel = this.component.selection[variableName];
|
|
|
|
if (!sel && this.parent) {
|
|
sel = this.parent.getSelectionComponent(variableName, origName);
|
|
}
|
|
|
|
if (!sel) {
|
|
throw new Error(selectionNotFound(origName));
|
|
}
|
|
|
|
return sel;
|
|
}
|
|
/**
|
|
* Returns true if the model has a signalRef for an axis orient.
|
|
*/
|
|
|
|
}, {
|
|
key: "hasAxisOrientSignalRef",
|
|
value: function hasAxisOrientSignalRef() {
|
|
var _a, _b;
|
|
|
|
return ((_a = this.component.axes.x) === null || _a === void 0 ? void 0 : _a.some(function (a) {
|
|
return a.hasOrientSignalRef();
|
|
})) || ((_b = this.component.axes.y) === null || _b === void 0 ? void 0 : _b.some(function (a) {
|
|
return a.hasOrientSignalRef();
|
|
}));
|
|
}
|
|
}, {
|
|
key: "width",
|
|
get: function get() {
|
|
return this.getSizeSignalRef('width');
|
|
}
|
|
}, {
|
|
key: "height",
|
|
get: function get() {
|
|
return this.getSizeSignalRef('height');
|
|
}
|
|
}]);
|
|
|
|
return Model;
|
|
}();
|
|
/** Abstract class for UnitModel and FacetModel. Both of which can contain fieldDefs as a part of its own specification. */
|
|
|
|
|
|
var ModelWithField = /*#__PURE__*/function (_Model) {
|
|
_inherits(ModelWithField, _Model);
|
|
|
|
var _super34 = _createSuper(ModelWithField);
|
|
|
|
function ModelWithField() {
|
|
_classCallCheck(this, ModelWithField);
|
|
|
|
return _super34.apply(this, arguments);
|
|
}
|
|
|
|
_createClass(ModelWithField, [{
|
|
key: "vgField",
|
|
|
|
/** Get "field" reference for Vega */
|
|
value: function vgField(channel) {
|
|
var opt = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|
var fieldDef = this.fieldDef(channel);
|
|
|
|
if (!fieldDef) {
|
|
return undefined;
|
|
}
|
|
|
|
return _vgField(fieldDef, opt);
|
|
}
|
|
}, {
|
|
key: "reduceFieldDef",
|
|
value: function reduceFieldDef(f, init) {
|
|
return reduce(this.getMapping(), function (acc, cd, c) {
|
|
var fieldDef = getFieldDef(cd);
|
|
|
|
if (fieldDef) {
|
|
return f(acc, fieldDef, c);
|
|
}
|
|
|
|
return acc;
|
|
}, init);
|
|
}
|
|
}, {
|
|
key: "forEachFieldDef",
|
|
value: function forEachFieldDef(f, t) {
|
|
forEach(this.getMapping(), function (cd, c) {
|
|
var fieldDef = getFieldDef(cd);
|
|
|
|
if (fieldDef) {
|
|
f(fieldDef, c);
|
|
}
|
|
}, t);
|
|
}
|
|
}]);
|
|
|
|
return ModelWithField;
|
|
}(Model);
|
|
/**
|
|
* A class for density transform nodes
|
|
*/
|
|
|
|
|
|
var DensityTransformNode = /*#__PURE__*/function (_DataFlowNode16) {
|
|
_inherits(DensityTransformNode, _DataFlowNode16);
|
|
|
|
var _super35 = _createSuper(DensityTransformNode);
|
|
|
|
function DensityTransformNode(parent, transform) {
|
|
var _this30;
|
|
|
|
_classCallCheck(this, DensityTransformNode);
|
|
|
|
var _a, _b, _c;
|
|
|
|
_this30 = _super35.call(this, parent);
|
|
_this30.transform = transform;
|
|
_this30.transform = duplicate(transform); // duplicate to prevent side effects
|
|
|
|
var specifiedAs = (_a = _this30.transform.as) !== null && _a !== void 0 ? _a : [undefined, undefined];
|
|
_this30.transform.as = [(_b = specifiedAs[0]) !== null && _b !== void 0 ? _b : 'value', (_c = specifiedAs[1]) !== null && _c !== void 0 ? _c : 'density'];
|
|
return _this30;
|
|
}
|
|
|
|
_createClass(DensityTransformNode, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
return new DensityTransformNode(null, duplicate(this.transform));
|
|
}
|
|
}, {
|
|
key: "dependentFields",
|
|
value: function dependentFields() {
|
|
var _a;
|
|
|
|
return new Set([this.transform.density].concat(_toConsumableArray((_a = this.transform.groupby) !== null && _a !== void 0 ? _a : [])));
|
|
}
|
|
}, {
|
|
key: "producedFields",
|
|
value: function producedFields() {
|
|
return new Set(this.transform.as);
|
|
}
|
|
}, {
|
|
key: "hash",
|
|
value: function hash() {
|
|
return "DensityTransform ".concat(_hash(this.transform));
|
|
}
|
|
}, {
|
|
key: "assemble",
|
|
value: function assemble() {
|
|
var _a = this.transform,
|
|
density = _a.density,
|
|
rest = __rest(_a, ["density"]);
|
|
|
|
var result = Object.assign({
|
|
type: 'kde',
|
|
field: density
|
|
}, rest);
|
|
return result;
|
|
}
|
|
}]);
|
|
|
|
return DensityTransformNode;
|
|
}(DataFlowNode);
|
|
|
|
var FilterInvalidNode = /*#__PURE__*/function (_DataFlowNode17) {
|
|
_inherits(FilterInvalidNode, _DataFlowNode17);
|
|
|
|
var _super36 = _createSuper(FilterInvalidNode);
|
|
|
|
function FilterInvalidNode(parent, filter) {
|
|
var _this31;
|
|
|
|
_classCallCheck(this, FilterInvalidNode);
|
|
|
|
_this31 = _super36.call(this, parent);
|
|
_this31.filter = filter;
|
|
return _this31;
|
|
}
|
|
|
|
_createClass(FilterInvalidNode, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
return new FilterInvalidNode(null, Object.assign({}, this.filter));
|
|
}
|
|
}, {
|
|
key: "dependentFields",
|
|
value: function dependentFields() {
|
|
return new Set(keys(this.filter));
|
|
}
|
|
}, {
|
|
key: "producedFields",
|
|
value: function producedFields() {
|
|
return new Set(); // filter does not produce any new fields
|
|
}
|
|
}, {
|
|
key: "hash",
|
|
value: function hash() {
|
|
return "FilterInvalid ".concat(_hash(this.filter));
|
|
}
|
|
/**
|
|
* Create the VgTransforms for each of the filtered fields.
|
|
*/
|
|
|
|
}, {
|
|
key: "assemble",
|
|
value: function assemble() {
|
|
var _this32 = this;
|
|
|
|
var filters = keys(this.filter).reduce(function (vegaFilters, field) {
|
|
var fieldDef = _this32.filter[field];
|
|
|
|
var ref = _vgField(fieldDef, {
|
|
expr: 'datum'
|
|
});
|
|
|
|
if (fieldDef !== null) {
|
|
if (fieldDef.type === 'temporal') {
|
|
vegaFilters.push("(isDate(".concat(ref, ") || (isValid(").concat(ref, ") && isFinite(+").concat(ref, ")))"));
|
|
} else if (fieldDef.type === 'quantitative') {
|
|
vegaFilters.push("isValid(".concat(ref, ")"));
|
|
vegaFilters.push("isFinite(+".concat(ref, ")"));
|
|
}
|
|
}
|
|
|
|
return vegaFilters;
|
|
}, []);
|
|
return filters.length > 0 ? {
|
|
type: 'filter',
|
|
expr: filters.join(' && ')
|
|
} : null;
|
|
}
|
|
}], [{
|
|
key: "make",
|
|
value: function make(parent, model) {
|
|
var config = model.config,
|
|
mark = model.mark,
|
|
markDef = model.markDef;
|
|
var invalid = getMarkPropOrConfig('invalid', markDef, config);
|
|
|
|
if (invalid !== 'filter') {
|
|
return null;
|
|
}
|
|
|
|
var filter = model.reduceFieldDef(function (aggregator, fieldDef, channel) {
|
|
var scaleComponent = isScaleChannel(channel) && model.getScaleComponent(channel);
|
|
|
|
if (scaleComponent) {
|
|
var _scaleType9 = scaleComponent.get('type'); // While discrete domain scales can handle invalid values, continuous scales can't.
|
|
// Thus, for non-path marks, we have to filter null for scales with continuous domains.
|
|
// (For path marks, we will use "defined" property and skip these values instead.)
|
|
|
|
|
|
if (hasContinuousDomain(_scaleType9) && fieldDef.aggregate !== 'count' && !isPathMark(mark)) {
|
|
aggregator[fieldDef.field] = fieldDef; // we know that the fieldDef is a typed field def
|
|
}
|
|
}
|
|
|
|
return aggregator;
|
|
}, {});
|
|
|
|
if (!keys(filter).length) {
|
|
return null;
|
|
}
|
|
|
|
return new FilterInvalidNode(parent, filter);
|
|
}
|
|
}]);
|
|
|
|
return FilterInvalidNode;
|
|
}(DataFlowNode);
|
|
/**
|
|
* A class for flatten transform nodes
|
|
*/
|
|
|
|
|
|
var FlattenTransformNode = /*#__PURE__*/function (_DataFlowNode18) {
|
|
_inherits(FlattenTransformNode, _DataFlowNode18);
|
|
|
|
var _super37 = _createSuper(FlattenTransformNode);
|
|
|
|
function FlattenTransformNode(parent, transform) {
|
|
var _this33;
|
|
|
|
_classCallCheck(this, FlattenTransformNode);
|
|
|
|
_this33 = _super37.call(this, parent);
|
|
_this33.transform = transform;
|
|
_this33.transform = duplicate(transform); // duplicate to prevent side effects
|
|
|
|
var _this33$transform = _this33.transform,
|
|
flatten = _this33$transform.flatten,
|
|
_this33$transform$as = _this33$transform.as,
|
|
as = _this33$transform$as === void 0 ? [] : _this33$transform$as;
|
|
_this33.transform.as = flatten.map(function (f, i) {
|
|
var _a;
|
|
|
|
return (_a = as[i]) !== null && _a !== void 0 ? _a : f;
|
|
});
|
|
return _this33;
|
|
}
|
|
|
|
_createClass(FlattenTransformNode, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
return new FlattenTransformNode(this.parent, duplicate(this.transform));
|
|
}
|
|
}, {
|
|
key: "dependentFields",
|
|
value: function dependentFields() {
|
|
return new Set(this.transform.flatten);
|
|
}
|
|
}, {
|
|
key: "producedFields",
|
|
value: function producedFields() {
|
|
return new Set(this.transform.as);
|
|
}
|
|
}, {
|
|
key: "hash",
|
|
value: function hash() {
|
|
return "FlattenTransform ".concat(_hash(this.transform));
|
|
}
|
|
}, {
|
|
key: "assemble",
|
|
value: function assemble() {
|
|
var _this$transform = this.transform,
|
|
fields = _this$transform.flatten,
|
|
as = _this$transform.as;
|
|
var result = {
|
|
type: 'flatten',
|
|
fields: fields,
|
|
as: as
|
|
};
|
|
return result;
|
|
}
|
|
}]);
|
|
|
|
return FlattenTransformNode;
|
|
}(DataFlowNode);
|
|
/**
|
|
* A class for flatten transform nodes
|
|
*/
|
|
|
|
|
|
var FoldTransformNode = /*#__PURE__*/function (_DataFlowNode19) {
|
|
_inherits(FoldTransformNode, _DataFlowNode19);
|
|
|
|
var _super38 = _createSuper(FoldTransformNode);
|
|
|
|
function FoldTransformNode(parent, transform) {
|
|
var _this34;
|
|
|
|
_classCallCheck(this, FoldTransformNode);
|
|
|
|
var _a, _b, _c;
|
|
|
|
_this34 = _super38.call(this, parent);
|
|
_this34.transform = transform;
|
|
_this34.transform = duplicate(transform); // duplicate to prevent side effects
|
|
|
|
var specifiedAs = (_a = _this34.transform.as) !== null && _a !== void 0 ? _a : [undefined, undefined];
|
|
_this34.transform.as = [(_b = specifiedAs[0]) !== null && _b !== void 0 ? _b : 'key', (_c = specifiedAs[1]) !== null && _c !== void 0 ? _c : 'value'];
|
|
return _this34;
|
|
}
|
|
|
|
_createClass(FoldTransformNode, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
return new FoldTransformNode(null, duplicate(this.transform));
|
|
}
|
|
}, {
|
|
key: "dependentFields",
|
|
value: function dependentFields() {
|
|
return new Set(this.transform.fold);
|
|
}
|
|
}, {
|
|
key: "producedFields",
|
|
value: function producedFields() {
|
|
return new Set(this.transform.as);
|
|
}
|
|
}, {
|
|
key: "hash",
|
|
value: function hash() {
|
|
return "FoldTransform ".concat(_hash(this.transform));
|
|
}
|
|
}, {
|
|
key: "assemble",
|
|
value: function assemble() {
|
|
var _this$transform2 = this.transform,
|
|
fold = _this$transform2.fold,
|
|
as = _this$transform2.as;
|
|
var result = {
|
|
type: 'fold',
|
|
fields: fold,
|
|
as: as
|
|
};
|
|
return result;
|
|
}
|
|
}]);
|
|
|
|
return FoldTransformNode;
|
|
}(DataFlowNode);
|
|
|
|
var GeoJSONNode = /*#__PURE__*/function (_DataFlowNode20) {
|
|
_inherits(GeoJSONNode, _DataFlowNode20);
|
|
|
|
var _super39 = _createSuper(GeoJSONNode);
|
|
|
|
function GeoJSONNode(parent, fields, geojson, signal) {
|
|
var _this35;
|
|
|
|
_classCallCheck(this, GeoJSONNode);
|
|
|
|
_this35 = _super39.call(this, parent);
|
|
_this35.fields = fields;
|
|
_this35.geojson = geojson;
|
|
_this35.signal = signal;
|
|
return _this35;
|
|
}
|
|
|
|
_createClass(GeoJSONNode, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
return new GeoJSONNode(null, duplicate(this.fields), this.geojson, this.signal);
|
|
}
|
|
}, {
|
|
key: "dependentFields",
|
|
value: function dependentFields() {
|
|
var _a;
|
|
|
|
var fields = ((_a = this.fields) !== null && _a !== void 0 ? _a : []).filter(isString);
|
|
return new Set([].concat(_toConsumableArray(this.geojson ? [this.geojson] : []), _toConsumableArray(fields)));
|
|
}
|
|
}, {
|
|
key: "producedFields",
|
|
value: function producedFields() {
|
|
return new Set();
|
|
}
|
|
}, {
|
|
key: "hash",
|
|
value: function hash() {
|
|
return "GeoJSON ".concat(this.geojson, " ").concat(this.signal, " ").concat(_hash(this.fields));
|
|
}
|
|
}, {
|
|
key: "assemble",
|
|
value: function assemble() {
|
|
return Object.assign(Object.assign(Object.assign({
|
|
type: 'geojson'
|
|
}, this.fields ? {
|
|
fields: this.fields
|
|
} : {}), this.geojson ? {
|
|
geojson: this.geojson
|
|
} : {}), {
|
|
signal: this.signal
|
|
});
|
|
}
|
|
}], [{
|
|
key: "parseAll",
|
|
value: function parseAll(parent, model) {
|
|
if (model.component.projection && !model.component.projection.isFit) {
|
|
return parent;
|
|
}
|
|
|
|
var geoJsonCounter = 0;
|
|
|
|
for (var _i15 = 0, _arr11 = [[LONGITUDE, LATITUDE], [LONGITUDE2, LATITUDE2]]; _i15 < _arr11.length; _i15++) {
|
|
var coordinates = _arr11[_i15];
|
|
var pair = coordinates.map(function (channel) {
|
|
var def = getFieldOrDatumDef(model.encoding[channel]);
|
|
return isFieldDef(def) ? def.field : isDatumDef(def) ? {
|
|
expr: "".concat(def.datum)
|
|
} : isValueDef(def) ? {
|
|
expr: "".concat(def['value'])
|
|
} : undefined;
|
|
});
|
|
|
|
if (pair[0] || pair[1]) {
|
|
parent = new GeoJSONNode(parent, pair, null, model.getName("geojson_".concat(geoJsonCounter++)));
|
|
}
|
|
}
|
|
|
|
if (model.channelHasField(SHAPE)) {
|
|
var fieldDef = model.typedFieldDef(SHAPE);
|
|
|
|
if (fieldDef.type === GEOJSON) {
|
|
parent = new GeoJSONNode(parent, null, fieldDef.field, model.getName("geojson_".concat(geoJsonCounter++)));
|
|
}
|
|
}
|
|
|
|
return parent;
|
|
}
|
|
}]);
|
|
|
|
return GeoJSONNode;
|
|
}(DataFlowNode);
|
|
|
|
var GeoPointNode = /*#__PURE__*/function (_DataFlowNode21) {
|
|
_inherits(GeoPointNode, _DataFlowNode21);
|
|
|
|
var _super40 = _createSuper(GeoPointNode);
|
|
|
|
function GeoPointNode(parent, projection, fields, as) {
|
|
var _this36;
|
|
|
|
_classCallCheck(this, GeoPointNode);
|
|
|
|
_this36 = _super40.call(this, parent);
|
|
_this36.projection = projection;
|
|
_this36.fields = fields;
|
|
_this36.as = as;
|
|
return _this36;
|
|
}
|
|
|
|
_createClass(GeoPointNode, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
return new GeoPointNode(null, this.projection, duplicate(this.fields), duplicate(this.as));
|
|
}
|
|
}, {
|
|
key: "dependentFields",
|
|
value: function dependentFields() {
|
|
return new Set(this.fields.filter(isString));
|
|
}
|
|
}, {
|
|
key: "producedFields",
|
|
value: function producedFields() {
|
|
return new Set(this.as);
|
|
}
|
|
}, {
|
|
key: "hash",
|
|
value: function hash() {
|
|
return "Geopoint ".concat(this.projection, " ").concat(_hash(this.fields), " ").concat(_hash(this.as));
|
|
}
|
|
}, {
|
|
key: "assemble",
|
|
value: function assemble() {
|
|
return {
|
|
type: 'geopoint',
|
|
projection: this.projection,
|
|
fields: this.fields,
|
|
as: this.as
|
|
};
|
|
}
|
|
}], [{
|
|
key: "parseAll",
|
|
value: function parseAll(parent, model) {
|
|
if (!model.projectionName()) {
|
|
return parent;
|
|
}
|
|
|
|
for (var _i16 = 0, _arr12 = [[LONGITUDE, LATITUDE], [LONGITUDE2, LATITUDE2]]; _i16 < _arr12.length; _i16++) {
|
|
var coordinates = _arr12[_i16];
|
|
var pair = coordinates.map(function (channel) {
|
|
var def = getFieldOrDatumDef(model.encoding[channel]);
|
|
return isFieldDef(def) ? def.field : isDatumDef(def) ? {
|
|
expr: "".concat(def.datum)
|
|
} : isValueDef(def) ? {
|
|
expr: "".concat(def['value'])
|
|
} : undefined;
|
|
});
|
|
var suffix = coordinates[0] === LONGITUDE2 ? '2' : '';
|
|
|
|
if (pair[0] || pair[1]) {
|
|
parent = new GeoPointNode(parent, model.projectionName(), pair, [model.getName('x' + suffix), model.getName('y' + suffix)]);
|
|
}
|
|
}
|
|
|
|
return parent;
|
|
}
|
|
}]);
|
|
|
|
return GeoPointNode;
|
|
}(DataFlowNode);
|
|
|
|
var ImputeNode = /*#__PURE__*/function (_DataFlowNode22) {
|
|
_inherits(ImputeNode, _DataFlowNode22);
|
|
|
|
var _super41 = _createSuper(ImputeNode);
|
|
|
|
function ImputeNode(parent, transform) {
|
|
var _this37;
|
|
|
|
_classCallCheck(this, ImputeNode);
|
|
|
|
_this37 = _super41.call(this, parent);
|
|
_this37.transform = transform;
|
|
return _this37;
|
|
}
|
|
|
|
_createClass(ImputeNode, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
return new ImputeNode(null, duplicate(this.transform));
|
|
}
|
|
}, {
|
|
key: "dependentFields",
|
|
value: function dependentFields() {
|
|
var _a;
|
|
|
|
return new Set([this.transform.impute, this.transform.key].concat(_toConsumableArray((_a = this.transform.groupby) !== null && _a !== void 0 ? _a : [])));
|
|
}
|
|
}, {
|
|
key: "producedFields",
|
|
value: function producedFields() {
|
|
return new Set([this.transform.impute]);
|
|
}
|
|
}, {
|
|
key: "processSequence",
|
|
value: function processSequence(keyvals) {
|
|
var _keyvals$start = keyvals.start,
|
|
start = _keyvals$start === void 0 ? 0 : _keyvals$start,
|
|
stop = keyvals.stop,
|
|
step = keyvals.step;
|
|
var result = [start, stop].concat(_toConsumableArray(step ? [step] : [])).join(',');
|
|
return {
|
|
signal: "sequence(".concat(result, ")")
|
|
};
|
|
}
|
|
}, {
|
|
key: "hash",
|
|
value: function hash() {
|
|
return "Impute ".concat(_hash(this.transform));
|
|
}
|
|
}, {
|
|
key: "assemble",
|
|
value: function assemble() {
|
|
var _this$transform3 = this.transform,
|
|
impute = _this$transform3.impute,
|
|
key = _this$transform3.key,
|
|
keyvals = _this$transform3.keyvals,
|
|
method = _this$transform3.method,
|
|
groupby = _this$transform3.groupby,
|
|
value = _this$transform3.value,
|
|
_this$transform3$fram = _this$transform3.frame,
|
|
frame = _this$transform3$fram === void 0 ? [null, null] : _this$transform3$fram;
|
|
var imputeTransform = Object.assign(Object.assign(Object.assign(Object.assign({
|
|
type: 'impute',
|
|
field: impute,
|
|
key: key
|
|
}, keyvals ? {
|
|
keyvals: isImputeSequence(keyvals) ? this.processSequence(keyvals) : keyvals
|
|
} : {}), {
|
|
method: 'value'
|
|
}), groupby ? {
|
|
groupby: groupby
|
|
} : {}), {
|
|
value: !method || method === 'value' ? value : null
|
|
});
|
|
|
|
if (method && method !== 'value') {
|
|
var deriveNewField = Object.assign({
|
|
type: 'window',
|
|
as: ["imputed_".concat(impute, "_value")],
|
|
ops: [method],
|
|
fields: [impute],
|
|
frame: frame,
|
|
ignorePeers: false
|
|
}, groupby ? {
|
|
groupby: groupby
|
|
} : {});
|
|
var replaceOriginal = {
|
|
type: 'formula',
|
|
expr: "datum.".concat(impute, " === null ? datum.imputed_").concat(impute, "_value : datum.").concat(impute),
|
|
as: impute
|
|
};
|
|
return [imputeTransform, deriveNewField, replaceOriginal];
|
|
} else {
|
|
return [imputeTransform];
|
|
}
|
|
}
|
|
}], [{
|
|
key: "makeFromTransform",
|
|
value: function makeFromTransform(parent, imputeTransform) {
|
|
return new ImputeNode(parent, imputeTransform);
|
|
}
|
|
}, {
|
|
key: "makeFromEncoding",
|
|
value: function makeFromEncoding(parent, model) {
|
|
var encoding = model.encoding;
|
|
var xDef = encoding.x;
|
|
var yDef = encoding.y;
|
|
|
|
if (isFieldDef(xDef) && isFieldDef(yDef)) {
|
|
var imputedChannel = xDef.impute ? xDef : yDef.impute ? yDef : undefined;
|
|
|
|
if (imputedChannel === undefined) {
|
|
return undefined;
|
|
}
|
|
|
|
var keyChannel = xDef.impute ? yDef : yDef.impute ? xDef : undefined;
|
|
var _imputedChannel$imput = imputedChannel.impute,
|
|
method = _imputedChannel$imput.method,
|
|
value = _imputedChannel$imput.value,
|
|
frame = _imputedChannel$imput.frame,
|
|
keyvals = _imputedChannel$imput.keyvals;
|
|
var groupbyFields = pathGroupingFields(model.mark, encoding);
|
|
return new ImputeNode(parent, Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({
|
|
impute: imputedChannel.field,
|
|
key: keyChannel.field
|
|
}, method ? {
|
|
method: method
|
|
} : {}), value !== undefined ? {
|
|
value: value
|
|
} : {}), frame ? {
|
|
frame: frame
|
|
} : {}), keyvals !== undefined ? {
|
|
keyvals: keyvals
|
|
} : {}), groupbyFields.length ? {
|
|
groupby: groupbyFields
|
|
} : {}));
|
|
}
|
|
|
|
return null;
|
|
}
|
|
}]);
|
|
|
|
return ImputeNode;
|
|
}(DataFlowNode);
|
|
/**
|
|
* A class for loess transform nodes
|
|
*/
|
|
|
|
|
|
var LoessTransformNode = /*#__PURE__*/function (_DataFlowNode23) {
|
|
_inherits(LoessTransformNode, _DataFlowNode23);
|
|
|
|
var _super42 = _createSuper(LoessTransformNode);
|
|
|
|
function LoessTransformNode(parent, transform) {
|
|
var _this38;
|
|
|
|
_classCallCheck(this, LoessTransformNode);
|
|
|
|
var _a, _b, _c;
|
|
|
|
_this38 = _super42.call(this, parent);
|
|
_this38.transform = transform;
|
|
_this38.transform = duplicate(transform); // duplicate to prevent side effects
|
|
|
|
var specifiedAs = (_a = _this38.transform.as) !== null && _a !== void 0 ? _a : [undefined, undefined];
|
|
_this38.transform.as = [(_b = specifiedAs[0]) !== null && _b !== void 0 ? _b : transform.on, (_c = specifiedAs[1]) !== null && _c !== void 0 ? _c : transform.loess];
|
|
return _this38;
|
|
}
|
|
|
|
_createClass(LoessTransformNode, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
return new LoessTransformNode(null, duplicate(this.transform));
|
|
}
|
|
}, {
|
|
key: "dependentFields",
|
|
value: function dependentFields() {
|
|
var _a;
|
|
|
|
return new Set([this.transform.loess, this.transform.on].concat(_toConsumableArray((_a = this.transform.groupby) !== null && _a !== void 0 ? _a : [])));
|
|
}
|
|
}, {
|
|
key: "producedFields",
|
|
value: function producedFields() {
|
|
return new Set(this.transform.as);
|
|
}
|
|
}, {
|
|
key: "hash",
|
|
value: function hash() {
|
|
return "LoessTransform ".concat(_hash(this.transform));
|
|
}
|
|
}, {
|
|
key: "assemble",
|
|
value: function assemble() {
|
|
var _a = this.transform,
|
|
loess = _a.loess,
|
|
on = _a.on,
|
|
rest = __rest(_a, ["loess", "on"]);
|
|
|
|
var result = Object.assign({
|
|
type: 'loess',
|
|
x: on,
|
|
y: loess
|
|
}, rest);
|
|
return result;
|
|
}
|
|
}]);
|
|
|
|
return LoessTransformNode;
|
|
}(DataFlowNode);
|
|
|
|
var LookupNode = /*#__PURE__*/function (_DataFlowNode24) {
|
|
_inherits(LookupNode, _DataFlowNode24);
|
|
|
|
var _super43 = _createSuper(LookupNode);
|
|
|
|
function LookupNode(parent, transform, secondary) {
|
|
var _this39;
|
|
|
|
_classCallCheck(this, LookupNode);
|
|
|
|
_this39 = _super43.call(this, parent);
|
|
_this39.transform = transform;
|
|
_this39.secondary = secondary;
|
|
return _this39;
|
|
}
|
|
|
|
_createClass(LookupNode, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
return new LookupNode(null, duplicate(this.transform), this.secondary);
|
|
}
|
|
}, {
|
|
key: "dependentFields",
|
|
value: function dependentFields() {
|
|
return new Set([this.transform.lookup]);
|
|
}
|
|
}, {
|
|
key: "producedFields",
|
|
value: function producedFields() {
|
|
return new Set(this.transform.as ? array(this.transform.as) : this.transform.from.fields);
|
|
}
|
|
}, {
|
|
key: "hash",
|
|
value: function hash() {
|
|
return "Lookup ".concat(_hash({
|
|
transform: this.transform,
|
|
secondary: this.secondary
|
|
}));
|
|
}
|
|
}, {
|
|
key: "assemble",
|
|
value: function assemble() {
|
|
var foreign;
|
|
|
|
if (this.transform.from.fields) {
|
|
// lookup a few fields and add create a flat output
|
|
foreign = Object.assign({
|
|
values: this.transform.from.fields
|
|
}, this.transform.as ? {
|
|
as: array(this.transform.as)
|
|
} : {});
|
|
} else {
|
|
// lookup full record and nest it
|
|
var asName = this.transform.as;
|
|
|
|
if (!isString(asName)) {
|
|
warn(NO_FIELDS_NEEDS_AS);
|
|
asName = '_lookup';
|
|
}
|
|
|
|
foreign = {
|
|
as: [asName]
|
|
};
|
|
}
|
|
|
|
return Object.assign(Object.assign({
|
|
type: 'lookup',
|
|
from: this.secondary,
|
|
key: this.transform.from.key,
|
|
fields: [this.transform.lookup]
|
|
}, foreign), this.transform.default ? {
|
|
default: this.transform.default
|
|
} : {});
|
|
}
|
|
}], [{
|
|
key: "make",
|
|
value: function make(parent, model, transform, counter) {
|
|
var sources = model.component.data.sources;
|
|
var _transform = transform,
|
|
from = _transform.from;
|
|
var fromOutputNode = null;
|
|
|
|
if (isLookupData(from)) {
|
|
var fromSource = findSource(from.data, sources);
|
|
|
|
if (!fromSource) {
|
|
fromSource = new SourceNode(from.data);
|
|
sources.push(fromSource);
|
|
}
|
|
|
|
var fromOutputName = model.getName("lookup_".concat(counter));
|
|
fromOutputNode = new OutputNode(fromSource, fromOutputName, DataSourceType.Lookup, model.component.data.outputNodeRefCounts);
|
|
model.component.data.outputNodes[fromOutputName] = fromOutputNode;
|
|
} else if (isLookupSelection(from)) {
|
|
var selName = from.selection;
|
|
transform = Object.assign({
|
|
as: selName
|
|
}, transform);
|
|
fromOutputNode = model.getSelectionComponent(varName(selName), selName).materialized;
|
|
|
|
if (!fromOutputNode) {
|
|
throw new Error(noSameUnitLookup(selName));
|
|
}
|
|
}
|
|
|
|
return new LookupNode(parent, transform, fromOutputNode.getSource());
|
|
}
|
|
}]);
|
|
|
|
return LookupNode;
|
|
}(DataFlowNode);
|
|
/**
|
|
* A class for quantile transform nodes
|
|
*/
|
|
|
|
|
|
var QuantileTransformNode = /*#__PURE__*/function (_DataFlowNode25) {
|
|
_inherits(QuantileTransformNode, _DataFlowNode25);
|
|
|
|
var _super44 = _createSuper(QuantileTransformNode);
|
|
|
|
function QuantileTransformNode(parent, transform) {
|
|
var _this40;
|
|
|
|
_classCallCheck(this, QuantileTransformNode);
|
|
|
|
var _a, _b, _c;
|
|
|
|
_this40 = _super44.call(this, parent);
|
|
_this40.transform = transform;
|
|
_this40.transform = duplicate(transform); // duplicate to prevent side effects
|
|
|
|
var specifiedAs = (_a = _this40.transform.as) !== null && _a !== void 0 ? _a : [undefined, undefined];
|
|
_this40.transform.as = [(_b = specifiedAs[0]) !== null && _b !== void 0 ? _b : 'prob', (_c = specifiedAs[1]) !== null && _c !== void 0 ? _c : 'value'];
|
|
return _this40;
|
|
}
|
|
|
|
_createClass(QuantileTransformNode, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
return new QuantileTransformNode(null, duplicate(this.transform));
|
|
}
|
|
}, {
|
|
key: "dependentFields",
|
|
value: function dependentFields() {
|
|
var _a;
|
|
|
|
return new Set([this.transform.quantile].concat(_toConsumableArray((_a = this.transform.groupby) !== null && _a !== void 0 ? _a : [])));
|
|
}
|
|
}, {
|
|
key: "producedFields",
|
|
value: function producedFields() {
|
|
return new Set(this.transform.as);
|
|
}
|
|
}, {
|
|
key: "hash",
|
|
value: function hash() {
|
|
return "QuantileTransform ".concat(_hash(this.transform));
|
|
}
|
|
}, {
|
|
key: "assemble",
|
|
value: function assemble() {
|
|
var _a = this.transform,
|
|
quantile = _a.quantile,
|
|
rest = __rest(_a, ["quantile"]);
|
|
|
|
var result = Object.assign({
|
|
type: 'quantile',
|
|
field: quantile
|
|
}, rest);
|
|
return result;
|
|
}
|
|
}]);
|
|
|
|
return QuantileTransformNode;
|
|
}(DataFlowNode);
|
|
/**
|
|
* A class for regression transform nodes
|
|
*/
|
|
|
|
|
|
var RegressionTransformNode = /*#__PURE__*/function (_DataFlowNode26) {
|
|
_inherits(RegressionTransformNode, _DataFlowNode26);
|
|
|
|
var _super45 = _createSuper(RegressionTransformNode);
|
|
|
|
function RegressionTransformNode(parent, transform) {
|
|
var _this41;
|
|
|
|
_classCallCheck(this, RegressionTransformNode);
|
|
|
|
var _a, _b, _c;
|
|
|
|
_this41 = _super45.call(this, parent);
|
|
_this41.transform = transform;
|
|
_this41.transform = duplicate(transform); // duplicate to prevent side effects
|
|
|
|
var specifiedAs = (_a = _this41.transform.as) !== null && _a !== void 0 ? _a : [undefined, undefined];
|
|
_this41.transform.as = [(_b = specifiedAs[0]) !== null && _b !== void 0 ? _b : transform.on, (_c = specifiedAs[1]) !== null && _c !== void 0 ? _c : transform.regression];
|
|
return _this41;
|
|
}
|
|
|
|
_createClass(RegressionTransformNode, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
return new RegressionTransformNode(null, duplicate(this.transform));
|
|
}
|
|
}, {
|
|
key: "dependentFields",
|
|
value: function dependentFields() {
|
|
var _a;
|
|
|
|
return new Set([this.transform.regression, this.transform.on].concat(_toConsumableArray((_a = this.transform.groupby) !== null && _a !== void 0 ? _a : [])));
|
|
}
|
|
}, {
|
|
key: "producedFields",
|
|
value: function producedFields() {
|
|
return new Set(this.transform.as);
|
|
}
|
|
}, {
|
|
key: "hash",
|
|
value: function hash() {
|
|
return "RegressionTransform ".concat(_hash(this.transform));
|
|
}
|
|
}, {
|
|
key: "assemble",
|
|
value: function assemble() {
|
|
var _a = this.transform,
|
|
regression = _a.regression,
|
|
on = _a.on,
|
|
rest = __rest(_a, ["regression", "on"]);
|
|
|
|
var result = Object.assign({
|
|
type: 'regression',
|
|
x: on,
|
|
y: regression
|
|
}, rest);
|
|
return result;
|
|
}
|
|
}]);
|
|
|
|
return RegressionTransformNode;
|
|
}(DataFlowNode);
|
|
/**
|
|
* A class for pivot transform nodes.
|
|
*/
|
|
|
|
|
|
var PivotTransformNode = /*#__PURE__*/function (_DataFlowNode27) {
|
|
_inherits(PivotTransformNode, _DataFlowNode27);
|
|
|
|
var _super46 = _createSuper(PivotTransformNode);
|
|
|
|
function PivotTransformNode(parent, transform) {
|
|
var _this42;
|
|
|
|
_classCallCheck(this, PivotTransformNode);
|
|
|
|
_this42 = _super46.call(this, parent);
|
|
_this42.transform = transform;
|
|
return _this42;
|
|
}
|
|
|
|
_createClass(PivotTransformNode, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
return new PivotTransformNode(null, duplicate(this.transform));
|
|
}
|
|
}, {
|
|
key: "addDimensions",
|
|
value: function addDimensions(fields) {
|
|
var _a;
|
|
|
|
this.transform.groupby = unique(((_a = this.transform.groupby) !== null && _a !== void 0 ? _a : []).concat(fields), function (d) {
|
|
return d;
|
|
});
|
|
}
|
|
}, {
|
|
key: "producedFields",
|
|
value: function producedFields() {
|
|
return undefined; // return undefined so that potentially everything can depend on the pivot
|
|
}
|
|
}, {
|
|
key: "dependentFields",
|
|
value: function dependentFields() {
|
|
var _a;
|
|
|
|
return new Set([this.transform.pivot, this.transform.value].concat(_toConsumableArray((_a = this.transform.groupby) !== null && _a !== void 0 ? _a : [])));
|
|
}
|
|
}, {
|
|
key: "hash",
|
|
value: function hash() {
|
|
return "PivotTransform ".concat(_hash(this.transform));
|
|
}
|
|
}, {
|
|
key: "assemble",
|
|
value: function assemble() {
|
|
var _this$transform4 = this.transform,
|
|
pivot = _this$transform4.pivot,
|
|
value = _this$transform4.value,
|
|
groupby = _this$transform4.groupby,
|
|
limit = _this$transform4.limit,
|
|
op = _this$transform4.op;
|
|
return Object.assign(Object.assign(Object.assign({
|
|
type: 'pivot',
|
|
field: pivot,
|
|
value: value
|
|
}, limit !== undefined ? {
|
|
limit: limit
|
|
} : {}), op !== undefined ? {
|
|
op: op
|
|
} : {}), groupby !== undefined ? {
|
|
groupby: groupby
|
|
} : {});
|
|
}
|
|
}]);
|
|
|
|
return PivotTransformNode;
|
|
}(DataFlowNode);
|
|
/**
|
|
* A class for the sample transform nodes
|
|
*/
|
|
|
|
|
|
var SampleTransformNode = /*#__PURE__*/function (_DataFlowNode28) {
|
|
_inherits(SampleTransformNode, _DataFlowNode28);
|
|
|
|
var _super47 = _createSuper(SampleTransformNode);
|
|
|
|
function SampleTransformNode(parent, transform) {
|
|
var _this43;
|
|
|
|
_classCallCheck(this, SampleTransformNode);
|
|
|
|
_this43 = _super47.call(this, parent);
|
|
_this43.transform = transform;
|
|
return _this43;
|
|
}
|
|
|
|
_createClass(SampleTransformNode, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
return new SampleTransformNode(null, duplicate(this.transform));
|
|
}
|
|
}, {
|
|
key: "dependentFields",
|
|
value: function dependentFields() {
|
|
return new Set();
|
|
}
|
|
}, {
|
|
key: "producedFields",
|
|
value: function producedFields() {
|
|
return new Set();
|
|
}
|
|
}, {
|
|
key: "hash",
|
|
value: function hash() {
|
|
return "SampleTransform ".concat(_hash(this.transform));
|
|
}
|
|
}, {
|
|
key: "assemble",
|
|
value: function assemble() {
|
|
return {
|
|
type: 'sample',
|
|
size: this.transform.sample
|
|
};
|
|
}
|
|
}]);
|
|
|
|
return SampleTransformNode;
|
|
}(DataFlowNode);
|
|
|
|
function makeWalkTree(data) {
|
|
// to name datasources
|
|
var datasetIndex = 0;
|
|
/**
|
|
* Recursively walk down the tree.
|
|
*/
|
|
|
|
function walkTree(node, dataSource) {
|
|
var _a;
|
|
|
|
if (node instanceof SourceNode) {
|
|
// If the source is a named data source or a data source with values, we need
|
|
// to put it in a different data source. Otherwise, Vega may override the data.
|
|
if (!node.isGenerator && !isUrlData(node.data)) {
|
|
data.push(dataSource);
|
|
var newData = {
|
|
name: null,
|
|
source: dataSource.name,
|
|
transform: []
|
|
};
|
|
dataSource = newData;
|
|
}
|
|
}
|
|
|
|
if (node instanceof ParseNode) {
|
|
if (node.parent instanceof SourceNode && !dataSource.source) {
|
|
var _dataSource$transform;
|
|
|
|
// If node's parent is a root source and the data source does not refer to another data source, use normal format parse
|
|
dataSource.format = Object.assign(Object.assign({}, (_a = dataSource.format) !== null && _a !== void 0 ? _a : {}), {
|
|
parse: node.assembleFormatParse()
|
|
}); // add calculates for all nested fields
|
|
|
|
(_dataSource$transform = dataSource.transform).push.apply(_dataSource$transform, _toConsumableArray(node.assembleTransforms(true)));
|
|
} else {
|
|
var _dataSource$transform2;
|
|
|
|
// Otherwise use Vega expression to parse
|
|
(_dataSource$transform2 = dataSource.transform).push.apply(_dataSource$transform2, _toConsumableArray(node.assembleTransforms()));
|
|
}
|
|
}
|
|
|
|
if (node instanceof FacetNode) {
|
|
if (!dataSource.name) {
|
|
dataSource.name = "data_".concat(datasetIndex++);
|
|
}
|
|
|
|
if (!dataSource.source || dataSource.transform.length > 0) {
|
|
data.push(dataSource);
|
|
node.data = dataSource.name;
|
|
} else {
|
|
node.data = dataSource.source;
|
|
}
|
|
|
|
var _iterator163 = _createForOfIteratorHelper(node.assemble()),
|
|
_step163;
|
|
|
|
try {
|
|
for (_iterator163.s(); !(_step163 = _iterator163.n()).done;) {
|
|
var d = _step163.value;
|
|
data.push(d);
|
|
} // break here because the rest of the tree has to be taken care of by the facet.
|
|
|
|
} catch (err) {
|
|
_iterator163.e(err);
|
|
} finally {
|
|
_iterator163.f();
|
|
}
|
|
|
|
return;
|
|
}
|
|
|
|
if (node instanceof GraticuleNode || node instanceof SequenceNode || node instanceof FilterInvalidNode || node instanceof FilterNode || node instanceof CalculateNode || node instanceof GeoPointNode || node instanceof GeoJSONNode || node instanceof AggregateNode || node instanceof LookupNode || node instanceof WindowTransformNode || node instanceof JoinAggregateTransformNode || node instanceof FoldTransformNode || node instanceof FlattenTransformNode || node instanceof DensityTransformNode || node instanceof LoessTransformNode || node instanceof QuantileTransformNode || node instanceof RegressionTransformNode || node instanceof IdentifierNode || node instanceof SampleTransformNode || node instanceof PivotTransformNode) {
|
|
dataSource.transform.push(node.assemble());
|
|
}
|
|
|
|
if (node instanceof BinNode || node instanceof TimeUnitNode || node instanceof ImputeNode || node instanceof StackNode) {
|
|
var _dataSource$transform3;
|
|
|
|
(_dataSource$transform3 = dataSource.transform).push.apply(_dataSource$transform3, _toConsumableArray(node.assemble()));
|
|
}
|
|
|
|
if (node instanceof OutputNode) {
|
|
if (dataSource.source && dataSource.transform.length === 0) {
|
|
node.setSource(dataSource.source);
|
|
} else if (node.parent instanceof OutputNode) {
|
|
// Note that an output node may be required but we still do not assemble a
|
|
// separate data source for it.
|
|
node.setSource(dataSource.name);
|
|
} else {
|
|
if (!dataSource.name) {
|
|
dataSource.name = "data_".concat(datasetIndex++);
|
|
} // Here we set the name of the datasource we generated. From now on
|
|
// other assemblers can use it.
|
|
|
|
|
|
node.setSource(dataSource.name); // if this node has more than one child, we will add a datasource automatically
|
|
|
|
if (node.numChildren() === 1) {
|
|
data.push(dataSource);
|
|
var _newData = {
|
|
name: null,
|
|
source: dataSource.name,
|
|
transform: []
|
|
};
|
|
dataSource = _newData;
|
|
}
|
|
}
|
|
}
|
|
|
|
switch (node.numChildren()) {
|
|
case 0:
|
|
// done
|
|
if (node instanceof OutputNode && (!dataSource.source || dataSource.transform.length > 0)) {
|
|
// do not push empty datasources that are simply references
|
|
data.push(dataSource);
|
|
}
|
|
|
|
break;
|
|
|
|
case 1:
|
|
walkTree(node.children[0], dataSource);
|
|
break;
|
|
|
|
default:
|
|
{
|
|
if (!dataSource.name) {
|
|
dataSource.name = "data_".concat(datasetIndex++);
|
|
}
|
|
|
|
var _source = dataSource.name;
|
|
|
|
if (!dataSource.source || dataSource.transform.length > 0) {
|
|
data.push(dataSource);
|
|
} else {
|
|
_source = dataSource.source;
|
|
}
|
|
|
|
var _iterator164 = _createForOfIteratorHelper(node.children),
|
|
_step164;
|
|
|
|
try {
|
|
for (_iterator164.s(); !(_step164 = _iterator164.n()).done;) {
|
|
var child = _step164.value;
|
|
var _newData2 = {
|
|
name: null,
|
|
source: _source,
|
|
transform: []
|
|
};
|
|
walkTree(child, _newData2);
|
|
}
|
|
} catch (err) {
|
|
_iterator164.e(err);
|
|
} finally {
|
|
_iterator164.f();
|
|
}
|
|
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
|
|
return walkTree;
|
|
}
|
|
/**
|
|
* Assemble data sources that are derived from faceted data.
|
|
*/
|
|
|
|
|
|
function assembleFacetData(root) {
|
|
var data = [];
|
|
var walkTree = makeWalkTree(data);
|
|
|
|
var _iterator165 = _createForOfIteratorHelper(root.children),
|
|
_step165;
|
|
|
|
try {
|
|
for (_iterator165.s(); !(_step165 = _iterator165.n()).done;) {
|
|
var child = _step165.value;
|
|
walkTree(child, {
|
|
source: root.name,
|
|
name: null,
|
|
transform: []
|
|
});
|
|
}
|
|
} catch (err) {
|
|
_iterator165.e(err);
|
|
} finally {
|
|
_iterator165.f();
|
|
}
|
|
|
|
return data;
|
|
}
|
|
/**
|
|
* Create Vega data array from a given compiled model and append all of them to the given array
|
|
*
|
|
* @param model
|
|
* @param data array
|
|
* @return modified data array
|
|
*/
|
|
|
|
|
|
function assembleRootData(dataComponent, datasets) {
|
|
var _a, _b;
|
|
|
|
var data = []; // dataComponent.sources.forEach(debug);
|
|
// draw(dataComponent.sources);
|
|
|
|
var walkTree = makeWalkTree(data);
|
|
var sourceIndex = 0;
|
|
|
|
var _iterator166 = _createForOfIteratorHelper(dataComponent.sources),
|
|
_step166;
|
|
|
|
try {
|
|
for (_iterator166.s(); !(_step166 = _iterator166.n()).done;) {
|
|
var root = _step166.value;
|
|
|
|
// assign a name if the source does not have a name yet
|
|
if (!root.hasName()) {
|
|
root.dataName = "source_".concat(sourceIndex++);
|
|
}
|
|
|
|
var newData = root.assemble();
|
|
walkTree(root, newData);
|
|
} // remove empty transform arrays for cleaner output
|
|
|
|
} catch (err) {
|
|
_iterator166.e(err);
|
|
} finally {
|
|
_iterator166.f();
|
|
}
|
|
|
|
for (var _i17 = 0, _data3 = data; _i17 < _data3.length; _i17++) {
|
|
var d = _data3[_i17];
|
|
|
|
if (d.transform.length === 0) {
|
|
delete d.transform;
|
|
}
|
|
} // move sources without transforms (the ones that are potentially used in lookups) to the beginning
|
|
|
|
|
|
var whereTo = 0;
|
|
|
|
var _iterator167 = _createForOfIteratorHelper(data.entries()),
|
|
_step167;
|
|
|
|
try {
|
|
for (_iterator167.s(); !(_step167 = _iterator167.n()).done;) {
|
|
var _step167$value = _slicedToArray(_step167.value, 2),
|
|
i = _step167$value[0],
|
|
_d4 = _step167$value[1];
|
|
|
|
if (((_a = _d4.transform) !== null && _a !== void 0 ? _a : []).length === 0 && !_d4.source) {
|
|
data.splice(whereTo++, 0, data.splice(i, 1)[0]);
|
|
}
|
|
} // now fix the from references in lookup transforms
|
|
|
|
} catch (err) {
|
|
_iterator167.e(err);
|
|
} finally {
|
|
_iterator167.f();
|
|
}
|
|
|
|
for (var _i18 = 0, _data4 = data; _i18 < _data4.length; _i18++) {
|
|
var _d2 = _data4[_i18];
|
|
|
|
var _iterator168 = _createForOfIteratorHelper((_b = _d2.transform) !== null && _b !== void 0 ? _b : []),
|
|
_step168;
|
|
|
|
try {
|
|
for (_iterator168.s(); !(_step168 = _iterator168.n()).done;) {
|
|
var t = _step168.value;
|
|
|
|
if (t.type === 'lookup') {
|
|
t.from = dataComponent.outputNodes[t.from].getSource();
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator168.e(err);
|
|
} finally {
|
|
_iterator168.f();
|
|
}
|
|
} // inline values for datasets that are in the datastore
|
|
|
|
|
|
for (var _i19 = 0, _data5 = data; _i19 < _data5.length; _i19++) {
|
|
var _d3 = _data5[_i19];
|
|
|
|
if (_d3.name in datasets) {
|
|
_d3.values = datasets[_d3.name];
|
|
}
|
|
}
|
|
|
|
return data;
|
|
}
|
|
|
|
function getHeaderType(orient) {
|
|
if (orient === 'top' || orient === 'left' || isSignalRef(orient)) {
|
|
// we always use header for orient signal since we can't dynamically make header becomes footer
|
|
return 'header';
|
|
}
|
|
|
|
return 'footer';
|
|
}
|
|
|
|
function parseFacetHeaders(model) {
|
|
var _iterator169 = _createForOfIteratorHelper(FACET_CHANNELS),
|
|
_step169;
|
|
|
|
try {
|
|
for (_iterator169.s(); !(_step169 = _iterator169.n()).done;) {
|
|
var channel = _step169.value;
|
|
parseFacetHeader(model, channel);
|
|
}
|
|
} catch (err) {
|
|
_iterator169.e(err);
|
|
} finally {
|
|
_iterator169.f();
|
|
}
|
|
|
|
mergeChildAxis(model, 'x');
|
|
mergeChildAxis(model, 'y');
|
|
}
|
|
|
|
function parseFacetHeader(model, channel) {
|
|
var _a;
|
|
|
|
var facet = model.facet,
|
|
config = model.config,
|
|
child = model.child,
|
|
component = model.component;
|
|
|
|
if (model.channelHasField(channel)) {
|
|
var fieldDef = facet[channel];
|
|
var titleConfig = getHeaderProperty('title', null, config, channel);
|
|
|
|
var title$1 = _title3(fieldDef, config, {
|
|
allowDisabling: true,
|
|
includeDefault: titleConfig === undefined || !!titleConfig
|
|
});
|
|
|
|
if (child.component.layoutHeaders[channel].title) {
|
|
// TODO: better handle multiline titles
|
|
title$1 = isArray(title$1) ? title$1.join(', ') : title$1; // merge title with child to produce "Title / Subtitle / Sub-subtitle"
|
|
|
|
title$1 += ' / ' + child.component.layoutHeaders[channel].title;
|
|
child.component.layoutHeaders[channel].title = null;
|
|
}
|
|
|
|
var labelOrient = getHeaderProperty('labelOrient', fieldDef, config, channel);
|
|
var header = (_a = fieldDef.header) !== null && _a !== void 0 ? _a : {};
|
|
|
|
var _labels = getFirstDefined(header.labels, config.header.labels, true);
|
|
|
|
var headerType = contains(['bottom', 'right'], labelOrient) ? 'footer' : 'header';
|
|
component.layoutHeaders[channel] = _defineProperty({
|
|
title: title$1,
|
|
facetFieldDef: fieldDef
|
|
}, headerType, channel === 'facet' ? [] : [makeHeaderComponent(model, channel, _labels)]);
|
|
}
|
|
}
|
|
|
|
function makeHeaderComponent(model, channel, labels) {
|
|
var sizeType = channel === 'row' ? 'height' : 'width';
|
|
return {
|
|
labels: labels,
|
|
sizeSignal: model.child.component.layoutSize.get(sizeType) ? model.child.getSizeSignalRef(sizeType) : undefined,
|
|
axes: []
|
|
};
|
|
}
|
|
|
|
function mergeChildAxis(model, channel) {
|
|
var _a;
|
|
|
|
var child = model.child;
|
|
|
|
if (child.component.axes[channel]) {
|
|
var _model$component2 = model.component,
|
|
layoutHeaders = _model$component2.layoutHeaders,
|
|
resolve = _model$component2.resolve;
|
|
resolve.axis[channel] = parseGuideResolve(resolve, channel);
|
|
|
|
if (resolve.axis[channel] === 'shared') {
|
|
// For shared axis, move the axes to facet's header or footer
|
|
var headerChannel = channel === 'x' ? 'column' : 'row';
|
|
var layoutHeader = layoutHeaders[headerChannel];
|
|
|
|
var _iterator170 = _createForOfIteratorHelper(child.component.axes[channel]),
|
|
_step170;
|
|
|
|
try {
|
|
for (_iterator170.s(); !(_step170 = _iterator170.n()).done;) {
|
|
var axisComponent = _step170.value;
|
|
var headerType = getHeaderType(axisComponent.get('orient'));
|
|
layoutHeader[headerType] = (_a = layoutHeader[headerType]) !== null && _a !== void 0 ? _a : [makeHeaderComponent(model, headerChannel, false)]; // FIXME: assemble shouldn't be called here, but we do it this way so we only extract the main part of the axes
|
|
|
|
var mainAxis = assembleAxis(axisComponent, 'main', model.config, {
|
|
header: true
|
|
});
|
|
|
|
if (mainAxis) {
|
|
// LayoutHeader no longer keep track of property precedence, thus let's combine.
|
|
layoutHeader[headerType][0].axes.push(mainAxis);
|
|
}
|
|
|
|
axisComponent.mainExtracted = true;
|
|
}
|
|
} catch (err) {
|
|
_iterator170.e(err);
|
|
} finally {
|
|
_iterator170.f();
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
function parseLayerLayoutSize(model) {
|
|
parseChildrenLayoutSize(model);
|
|
parseNonUnitLayoutSizeForChannel(model, 'width');
|
|
parseNonUnitLayoutSizeForChannel(model, 'height');
|
|
}
|
|
|
|
function parseConcatLayoutSize(model) {
|
|
parseChildrenLayoutSize(model); // for columns === 1 (vconcat), we can completely merge width. Otherwise, we can treat merged width as childWidth.
|
|
|
|
var widthType = model.layout.columns === 1 ? 'width' : 'childWidth'; // for columns === undefined (hconcat), we can completely merge height. Otherwise, we can treat merged height as childHeight.
|
|
|
|
var heightType = model.layout.columns === undefined ? 'height' : 'childHeight';
|
|
parseNonUnitLayoutSizeForChannel(model, widthType);
|
|
parseNonUnitLayoutSizeForChannel(model, heightType);
|
|
}
|
|
|
|
function parseChildrenLayoutSize(model) {
|
|
var _iterator171 = _createForOfIteratorHelper(model.children),
|
|
_step171;
|
|
|
|
try {
|
|
for (_iterator171.s(); !(_step171 = _iterator171.n()).done;) {
|
|
var child = _step171.value;
|
|
child.parseLayoutSize();
|
|
}
|
|
} catch (err) {
|
|
_iterator171.e(err);
|
|
} finally {
|
|
_iterator171.f();
|
|
}
|
|
}
|
|
/**
|
|
* Merge child layout size (width or height).
|
|
*/
|
|
|
|
|
|
function parseNonUnitLayoutSizeForChannel(model, layoutSizeType) {
|
|
/*
|
|
* For concat, the parent width or height might not be the same as the children's shared height.
|
|
* For example, hconcat's subviews may share width, but the shared width is not the hconcat view's width.
|
|
*
|
|
* layoutSizeType represents the output of the view (could be childWidth/childHeight/width/height)
|
|
* while the sizeType represents the properties of the child.
|
|
*/
|
|
var sizeType = getSizeTypeFromLayoutSizeType(layoutSizeType);
|
|
var channel = getPositionScaleChannel(sizeType);
|
|
var resolve = model.component.resolve;
|
|
var layoutSizeCmpt = model.component.layoutSize;
|
|
var mergedSize; // Try to merge layout size
|
|
|
|
var _iterator172 = _createForOfIteratorHelper(model.children),
|
|
_step172;
|
|
|
|
try {
|
|
for (_iterator172.s(); !(_step172 = _iterator172.n()).done;) {
|
|
var _child7 = _step172.value;
|
|
|
|
var childSize = _child7.component.layoutSize.getWithExplicit(sizeType);
|
|
|
|
var scaleResolve = resolve.scale[channel];
|
|
|
|
if (scaleResolve === 'independent' && childSize.value === 'step') {
|
|
// Do not merge independent scales with range-step as their size depends
|
|
// on the scale domains, which can be different between scales.
|
|
mergedSize = undefined;
|
|
break;
|
|
}
|
|
|
|
if (mergedSize) {
|
|
if (scaleResolve === 'independent' && mergedSize.value !== childSize.value) {
|
|
// For independent scale, only merge if all the sizes are the same.
|
|
// If the values are different, abandon the merge!
|
|
mergedSize = undefined;
|
|
break;
|
|
}
|
|
|
|
mergedSize = mergeValuesWithExplicit(mergedSize, childSize, sizeType, '');
|
|
} else {
|
|
mergedSize = childSize;
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator172.e(err);
|
|
} finally {
|
|
_iterator172.f();
|
|
}
|
|
|
|
if (mergedSize) {
|
|
// If merged, rename size and set size of all children.
|
|
var _iterator173 = _createForOfIteratorHelper(model.children),
|
|
_step173;
|
|
|
|
try {
|
|
for (_iterator173.s(); !(_step173 = _iterator173.n()).done;) {
|
|
var child = _step173.value;
|
|
model.renameSignal(child.getName(sizeType), model.getName(layoutSizeType));
|
|
child.component.layoutSize.set(sizeType, 'merged', false);
|
|
}
|
|
} catch (err) {
|
|
_iterator173.e(err);
|
|
} finally {
|
|
_iterator173.f();
|
|
}
|
|
|
|
layoutSizeCmpt.setWithExplicit(layoutSizeType, mergedSize);
|
|
} else {
|
|
layoutSizeCmpt.setWithExplicit(layoutSizeType, {
|
|
explicit: false,
|
|
value: undefined
|
|
});
|
|
}
|
|
}
|
|
|
|
function parseUnitLayoutSize(model) {
|
|
var size = model.size,
|
|
component = model.component;
|
|
|
|
var _iterator174 = _createForOfIteratorHelper(POSITION_SCALE_CHANNELS),
|
|
_step174;
|
|
|
|
try {
|
|
for (_iterator174.s(); !(_step174 = _iterator174.n()).done;) {
|
|
var channel = _step174.value;
|
|
var sizeType = getSizeChannel(channel);
|
|
|
|
if (size[sizeType]) {
|
|
var specifiedSize = size[sizeType];
|
|
component.layoutSize.set(sizeType, isStep(specifiedSize) ? 'step' : specifiedSize, true);
|
|
} else {
|
|
var _defaultSize = defaultUnitSize(model, sizeType);
|
|
|
|
component.layoutSize.set(sizeType, _defaultSize, false);
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator174.e(err);
|
|
} finally {
|
|
_iterator174.f();
|
|
}
|
|
}
|
|
|
|
function defaultUnitSize(model, sizeType) {
|
|
var channel = sizeType === 'width' ? 'x' : 'y';
|
|
var config = model.config;
|
|
var scaleComponent = model.getScaleComponent(channel);
|
|
|
|
if (scaleComponent) {
|
|
var _scaleType10 = scaleComponent.get('type');
|
|
|
|
var range = scaleComponent.get('range');
|
|
|
|
if (hasDiscreteDomain(_scaleType10)) {
|
|
var size = getViewConfigDiscreteSize(config.view, sizeType);
|
|
|
|
if (isVgRangeStep(range) || isStep(size)) {
|
|
// For discrete domain with range.step, use dynamic width/height
|
|
return 'step';
|
|
} else {
|
|
return size;
|
|
}
|
|
} else {
|
|
return getViewConfigContinuousSize(config.view, sizeType);
|
|
}
|
|
} else if (model.hasProjection || model.mark === 'arc') {
|
|
// arc should use continuous size by default otherwise the pie is extremely small
|
|
return getViewConfigContinuousSize(config.view, sizeType);
|
|
} else {
|
|
var _size = getViewConfigDiscreteSize(config.view, sizeType);
|
|
|
|
return isStep(_size) ? _size.step : _size;
|
|
}
|
|
}
|
|
|
|
function facetSortFieldName(fieldDef, sort, opt) {
|
|
return _vgField(sort, Object.assign({
|
|
suffix: "by_".concat(_vgField(fieldDef))
|
|
}, opt !== null && opt !== void 0 ? opt : {}));
|
|
}
|
|
|
|
var FacetModel = /*#__PURE__*/function (_ModelWithField) {
|
|
_inherits(FacetModel, _ModelWithField);
|
|
|
|
var _super48 = _createSuper(FacetModel);
|
|
|
|
function FacetModel(spec, parent, parentGivenName, config) {
|
|
var _this44;
|
|
|
|
_classCallCheck(this, FacetModel);
|
|
|
|
_this44 = _super48.call(this, spec, 'facet', parent, parentGivenName, config, spec.resolve);
|
|
_this44.child = buildModel(spec.spec, _assertThisInitialized(_this44), _this44.getName('child'), undefined, config);
|
|
_this44.children = [_this44.child];
|
|
_this44.facet = _this44.initFacet(spec.facet, config);
|
|
return _this44;
|
|
}
|
|
|
|
_createClass(FacetModel, [{
|
|
key: "initFacet",
|
|
value: function initFacet(facet, config) {
|
|
// clone to prevent side effect to the original spec
|
|
if (!isFacetMapping(facet)) {
|
|
return {
|
|
facet: initChannelDef(facet, 'facet', config)
|
|
};
|
|
}
|
|
|
|
return reduce(facet, function (normalizedFacet, fieldDef, channel) {
|
|
if (!contains([ROW, COLUMN], channel)) {
|
|
// Drop unsupported channel
|
|
warn(incompatibleChannel(channel, 'facet'));
|
|
return normalizedFacet;
|
|
}
|
|
|
|
if (fieldDef.field === undefined) {
|
|
warn(emptyFieldDef(fieldDef, channel));
|
|
return normalizedFacet;
|
|
} // Convert type to full, lowercase type, or augment the fieldDef with a default type if missing.
|
|
|
|
|
|
normalizedFacet[channel] = initChannelDef(fieldDef, channel, config);
|
|
return normalizedFacet;
|
|
}, {});
|
|
}
|
|
}, {
|
|
key: "channelHasField",
|
|
value: function channelHasField(channel) {
|
|
return !!this.facet[channel];
|
|
}
|
|
}, {
|
|
key: "fieldDef",
|
|
value: function fieldDef(channel) {
|
|
return this.facet[channel];
|
|
}
|
|
}, {
|
|
key: "parseData",
|
|
value: function parseData() {
|
|
this.component.data = _parseData(this);
|
|
this.child.parseData();
|
|
}
|
|
}, {
|
|
key: "parseLayoutSize",
|
|
value: function parseLayoutSize() {
|
|
parseChildrenLayoutSize(this);
|
|
}
|
|
}, {
|
|
key: "parseSelections",
|
|
value: function parseSelections() {
|
|
// As a facet has a single child, the selection components are the same.
|
|
// The child maintains its selections to assemble signals, which remain
|
|
// within its unit.
|
|
this.child.parseSelections();
|
|
this.component.selection = this.child.component.selection;
|
|
}
|
|
}, {
|
|
key: "parseMarkGroup",
|
|
value: function parseMarkGroup() {
|
|
this.child.parseMarkGroup();
|
|
}
|
|
}, {
|
|
key: "parseAxesAndHeaders",
|
|
value: function parseAxesAndHeaders() {
|
|
this.child.parseAxesAndHeaders();
|
|
parseFacetHeaders(this);
|
|
}
|
|
}, {
|
|
key: "assembleSelectionTopLevelSignals",
|
|
value: function assembleSelectionTopLevelSignals(signals) {
|
|
return this.child.assembleSelectionTopLevelSignals(signals);
|
|
}
|
|
}, {
|
|
key: "assembleSignals",
|
|
value: function assembleSignals() {
|
|
this.child.assembleSignals();
|
|
return [];
|
|
}
|
|
}, {
|
|
key: "assembleSelectionData",
|
|
value: function assembleSelectionData(data) {
|
|
return this.child.assembleSelectionData(data);
|
|
}
|
|
}, {
|
|
key: "getHeaderLayoutMixins",
|
|
value: function getHeaderLayoutMixins() {
|
|
var _a, _b, _c;
|
|
|
|
var layoutMixins = {};
|
|
|
|
var _iterator175 = _createForOfIteratorHelper(FACET_CHANNELS),
|
|
_step175;
|
|
|
|
try {
|
|
for (_iterator175.s(); !(_step175 = _iterator175.n()).done;) {
|
|
var channel = _step175.value;
|
|
|
|
var _iterator176 = _createForOfIteratorHelper(HEADER_TYPES),
|
|
_step176;
|
|
|
|
try {
|
|
for (_iterator176.s(); !(_step176 = _iterator176.n()).done;) {
|
|
var headerType = _step176.value;
|
|
var layoutHeaderComponent = this.component.layoutHeaders[channel];
|
|
var headerComponent = layoutHeaderComponent[headerType];
|
|
var facetFieldDef = layoutHeaderComponent.facetFieldDef;
|
|
|
|
if (facetFieldDef) {
|
|
var titleOrient = getHeaderProperty('titleOrient', facetFieldDef, this.config, channel);
|
|
|
|
if (contains(['right', 'bottom'], titleOrient)) {
|
|
var headerChannel = getHeaderChannel(channel, titleOrient);
|
|
layoutMixins.titleAnchor = (_a = layoutMixins.titleAnchor) !== null && _a !== void 0 ? _a : {};
|
|
layoutMixins.titleAnchor[headerChannel] = 'end';
|
|
}
|
|
}
|
|
|
|
if (headerComponent === null || headerComponent === void 0 ? void 0 : headerComponent[0]) {
|
|
// set header/footerBand
|
|
var sizeType = channel === 'row' ? 'height' : 'width';
|
|
var bandType = headerType === 'header' ? 'headerBand' : 'footerBand';
|
|
|
|
if (channel !== 'facet' && !this.child.component.layoutSize.get(sizeType)) {
|
|
// If facet child does not have size signal, then apply headerBand
|
|
layoutMixins[bandType] = (_b = layoutMixins[bandType]) !== null && _b !== void 0 ? _b : {};
|
|
layoutMixins[bandType][channel] = 0.5;
|
|
}
|
|
|
|
if (layoutHeaderComponent.title) {
|
|
layoutMixins.offset = (_c = layoutMixins.offset) !== null && _c !== void 0 ? _c : {};
|
|
layoutMixins.offset[channel === 'row' ? 'rowTitle' : 'columnTitle'] = 10;
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator176.e(err);
|
|
} finally {
|
|
_iterator176.f();
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator175.e(err);
|
|
} finally {
|
|
_iterator175.f();
|
|
}
|
|
|
|
return layoutMixins;
|
|
}
|
|
}, {
|
|
key: "assembleDefaultLayout",
|
|
value: function assembleDefaultLayout() {
|
|
var _this$facet = this.facet,
|
|
column = _this$facet.column,
|
|
row = _this$facet.row;
|
|
var columns = column ? this.columnDistinctSignal() : row ? 1 : undefined;
|
|
var align = 'all'; // Do not align the cells if the scale corresponding to the direction is indepent.
|
|
// We always align when we facet into both row and column.
|
|
|
|
if (!row && this.component.resolve.scale.x === 'independent') {
|
|
align = 'none';
|
|
} else if (!column && this.component.resolve.scale.y === 'independent') {
|
|
align = 'none';
|
|
}
|
|
|
|
return Object.assign(Object.assign(Object.assign({}, this.getHeaderLayoutMixins()), columns ? {
|
|
columns: columns
|
|
} : {}), {
|
|
bounds: 'full',
|
|
align: align
|
|
});
|
|
}
|
|
}, {
|
|
key: "assembleLayoutSignals",
|
|
value: function assembleLayoutSignals() {
|
|
// FIXME(https://github.com/vega/vega-lite/issues/1193): this can be incorrect if we have independent scales.
|
|
return this.child.assembleLayoutSignals();
|
|
}
|
|
}, {
|
|
key: "columnDistinctSignal",
|
|
value: function columnDistinctSignal() {
|
|
if (this.parent && this.parent instanceof FacetModel) {
|
|
// For nested facet, we will add columns to group mark instead
|
|
// See discussion in https://github.com/vega/vega/issues/952
|
|
// and https://github.com/vega/vega-view/releases/tag/v1.2.6
|
|
return undefined;
|
|
} else {
|
|
// In facetNode.assemble(), the name is always this.getName('column') + '_layout'.
|
|
var facetLayoutDataName = this.getName('column_domain');
|
|
return {
|
|
signal: "length(data('".concat(facetLayoutDataName, "'))")
|
|
};
|
|
}
|
|
}
|
|
}, {
|
|
key: "assembleGroup",
|
|
value: function assembleGroup(signals) {
|
|
if (this.parent && this.parent instanceof FacetModel) {
|
|
// Provide number of columns for layout.
|
|
// See discussion in https://github.com/vega/vega/issues/952
|
|
// and https://github.com/vega/vega-view/releases/tag/v1.2.6
|
|
return Object.assign(Object.assign({}, this.channelHasField('column') ? {
|
|
encode: {
|
|
update: {
|
|
// TODO(https://github.com/vega/vega-lite/issues/2759):
|
|
// Correct the signal for facet of concat of facet_column
|
|
columns: {
|
|
field: _vgField(this.facet.column, {
|
|
prefix: 'distinct'
|
|
})
|
|
}
|
|
}
|
|
}
|
|
} : {}), _get(_getPrototypeOf(FacetModel.prototype), "assembleGroup", this).call(this, signals));
|
|
}
|
|
|
|
return _get(_getPrototypeOf(FacetModel.prototype), "assembleGroup", this).call(this, signals);
|
|
}
|
|
/**
|
|
* Aggregate cardinality for calculating size
|
|
*/
|
|
|
|
}, {
|
|
key: "getCardinalityAggregateForChild",
|
|
value: function getCardinalityAggregateForChild() {
|
|
var fields = [];
|
|
var ops = [];
|
|
var as = [];
|
|
|
|
if (this.child instanceof FacetModel) {
|
|
if (this.child.channelHasField('column')) {
|
|
var _field24 = _vgField(this.child.facet.column);
|
|
|
|
fields.push(_field24);
|
|
ops.push('distinct');
|
|
as.push("distinct_".concat(_field24));
|
|
}
|
|
} else {
|
|
var _iterator177 = _createForOfIteratorHelper(POSITION_SCALE_CHANNELS),
|
|
_step177;
|
|
|
|
try {
|
|
for (_iterator177.s(); !(_step177 = _iterator177.n()).done;) {
|
|
var channel = _step177.value;
|
|
var childScaleComponent = this.child.component.scales[channel];
|
|
|
|
if (childScaleComponent && !childScaleComponent.merged) {
|
|
var type = childScaleComponent.get('type');
|
|
var range = childScaleComponent.get('range');
|
|
|
|
if (hasDiscreteDomain(type) && isVgRangeStep(range)) {
|
|
var _domain6 = assembleDomain(this.child, channel);
|
|
|
|
var _field25 = getFieldFromDomain(_domain6);
|
|
|
|
if (_field25) {
|
|
fields.push(_field25);
|
|
ops.push('distinct');
|
|
as.push("distinct_".concat(_field25));
|
|
} else {
|
|
warn(unknownField(channel));
|
|
}
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator177.e(err);
|
|
} finally {
|
|
_iterator177.f();
|
|
}
|
|
}
|
|
|
|
return {
|
|
fields: fields,
|
|
ops: ops,
|
|
as: as
|
|
};
|
|
}
|
|
}, {
|
|
key: "assembleFacet",
|
|
value: function assembleFacet() {
|
|
var _this$component$data$ = this.component.data.facetRoot,
|
|
name = _this$component$data$.name,
|
|
data = _this$component$data$.data;
|
|
var _this$facet2 = this.facet,
|
|
row = _this$facet2.row,
|
|
column = _this$facet2.column;
|
|
|
|
var _this$getCardinalityA = this.getCardinalityAggregateForChild(),
|
|
fields = _this$getCardinalityA.fields,
|
|
ops = _this$getCardinalityA.ops,
|
|
as = _this$getCardinalityA.as;
|
|
|
|
var groupby = [];
|
|
|
|
var _iterator178 = _createForOfIteratorHelper(FACET_CHANNELS),
|
|
_step178;
|
|
|
|
try {
|
|
for (_iterator178.s(); !(_step178 = _iterator178.n()).done;) {
|
|
var channel = _step178.value;
|
|
var fieldDef = this.facet[channel];
|
|
|
|
if (fieldDef) {
|
|
groupby.push(_vgField(fieldDef));
|
|
var bin = fieldDef.bin,
|
|
sort = fieldDef.sort;
|
|
|
|
if (isBinning(bin)) {
|
|
groupby.push(_vgField(fieldDef, {
|
|
binSuffix: 'end'
|
|
}));
|
|
}
|
|
|
|
if (isSortField(sort)) {
|
|
var _field26 = sort.field,
|
|
_sort$op = sort.op,
|
|
op = _sort$op === void 0 ? DEFAULT_SORT_OP : _sort$op;
|
|
var outputName = facetSortFieldName(fieldDef, sort);
|
|
|
|
if (row && column) {
|
|
// For crossed facet, use pre-calculate field as it requires a different groupby
|
|
// For each calculated field, apply max and assign them to the same name as
|
|
// all values of the same group should be the same anyway.
|
|
fields.push(outputName);
|
|
ops.push('max');
|
|
as.push(outputName);
|
|
} else {
|
|
fields.push(_field26);
|
|
ops.push(op);
|
|
as.push(outputName);
|
|
}
|
|
} else if (isArray(sort)) {
|
|
var _outputName = sortArrayIndexField(fieldDef, channel);
|
|
|
|
fields.push(_outputName);
|
|
ops.push('max');
|
|
as.push(_outputName);
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator178.e(err);
|
|
} finally {
|
|
_iterator178.f();
|
|
}
|
|
|
|
var cross = !!row && !!column;
|
|
return Object.assign({
|
|
name: name,
|
|
data: data,
|
|
groupby: groupby
|
|
}, cross || fields.length > 0 ? {
|
|
aggregate: Object.assign(Object.assign({}, cross ? {
|
|
cross: cross
|
|
} : {}), fields.length ? {
|
|
fields: fields,
|
|
ops: ops,
|
|
as: as
|
|
} : {})
|
|
} : {});
|
|
}
|
|
}, {
|
|
key: "facetSortFields",
|
|
value: function facetSortFields(channel) {
|
|
var facet = this.facet;
|
|
var fieldDef = facet[channel];
|
|
|
|
if (fieldDef) {
|
|
if (isSortField(fieldDef.sort)) {
|
|
return [facetSortFieldName(fieldDef, fieldDef.sort, {
|
|
expr: 'datum'
|
|
})];
|
|
} else if (isArray(fieldDef.sort)) {
|
|
return [sortArrayIndexField(fieldDef, channel, {
|
|
expr: 'datum'
|
|
})];
|
|
}
|
|
|
|
return [_vgField(fieldDef, {
|
|
expr: 'datum'
|
|
})];
|
|
}
|
|
|
|
return [];
|
|
}
|
|
}, {
|
|
key: "facetSortOrder",
|
|
value: function facetSortOrder(channel) {
|
|
var facet = this.facet;
|
|
var fieldDef = facet[channel];
|
|
|
|
if (fieldDef) {
|
|
var sort = fieldDef.sort;
|
|
var order = (isSortField(sort) ? sort.order : !isArray(sort) && sort) || 'ascending';
|
|
return [order];
|
|
}
|
|
|
|
return [];
|
|
}
|
|
}, {
|
|
key: "assembleLabelTitle",
|
|
value: function assembleLabelTitle() {
|
|
var facet = this.facet,
|
|
config = this.config;
|
|
|
|
if (facet.facet) {
|
|
// Facet always uses title to display labels
|
|
return _assembleLabelTitle(facet.facet, 'facet', config);
|
|
}
|
|
|
|
var ORTHOGONAL_ORIENT = {
|
|
row: ['top', 'bottom'],
|
|
column: ['left', 'right']
|
|
};
|
|
|
|
var _iterator179 = _createForOfIteratorHelper(HEADER_CHANNELS),
|
|
_step179;
|
|
|
|
try {
|
|
for (_iterator179.s(); !(_step179 = _iterator179.n()).done;) {
|
|
var channel = _step179.value;
|
|
|
|
if (facet[channel]) {
|
|
var labelOrient = getHeaderProperty('labelOrient', facet[channel], config, channel);
|
|
|
|
if (contains(ORTHOGONAL_ORIENT[channel], labelOrient)) {
|
|
// Row/Column with orthogonal labelOrient must use title to display labels
|
|
return _assembleLabelTitle(facet[channel], channel, config);
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator179.e(err);
|
|
} finally {
|
|
_iterator179.f();
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
}, {
|
|
key: "assembleMarks",
|
|
value: function assembleMarks() {
|
|
var _this45 = this;
|
|
|
|
var child = this.child; // If we facet by two dimensions, we need to add a cross operator to the aggregation
|
|
// so that we create all groups
|
|
|
|
var facetRoot = this.component.data.facetRoot;
|
|
var data = assembleFacetData(facetRoot);
|
|
var encodeEntry = child.assembleGroupEncodeEntry(false);
|
|
var title = this.assembleLabelTitle() || child.assembleTitle();
|
|
var style = child.assembleGroupStyle();
|
|
var markGroup = Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({
|
|
name: this.getName('cell'),
|
|
type: 'group'
|
|
}, title ? {
|
|
title: title
|
|
} : {}), style ? {
|
|
style: style
|
|
} : {}), {
|
|
from: {
|
|
facet: this.assembleFacet()
|
|
},
|
|
// TODO: move this to after data
|
|
sort: {
|
|
field: FACET_CHANNELS.map(function (c) {
|
|
return _this45.facetSortFields(c);
|
|
}).flat(),
|
|
order: FACET_CHANNELS.map(function (c) {
|
|
return _this45.facetSortOrder(c);
|
|
}).flat()
|
|
}
|
|
}), data.length > 0 ? {
|
|
data: data
|
|
} : {}), encodeEntry ? {
|
|
encode: {
|
|
update: encodeEntry
|
|
}
|
|
} : {}), child.assembleGroup(assembleFacetSignals(this, [])));
|
|
return [markGroup];
|
|
}
|
|
}, {
|
|
key: "getMapping",
|
|
value: function getMapping() {
|
|
return this.facet;
|
|
}
|
|
}]);
|
|
|
|
return FacetModel;
|
|
}(ModelWithField);
|
|
|
|
function makeJoinAggregateFromFacet(parent, facet) {
|
|
var row = facet.row,
|
|
column = facet.column;
|
|
|
|
if (row && column) {
|
|
var newParent = null; // only need to make one for crossed facet
|
|
|
|
for (var _i20 = 0, _arr13 = [row, column]; _i20 < _arr13.length; _i20++) {
|
|
var fieldDef = _arr13[_i20];
|
|
|
|
if (isSortField(fieldDef.sort)) {
|
|
var _fieldDef$sort = fieldDef.sort,
|
|
_field27 = _fieldDef$sort.field,
|
|
_fieldDef$sort$op = _fieldDef$sort.op,
|
|
op = _fieldDef$sort$op === void 0 ? DEFAULT_SORT_OP : _fieldDef$sort$op;
|
|
parent = newParent = new JoinAggregateTransformNode(parent, {
|
|
joinaggregate: [{
|
|
op: op,
|
|
field: _field27,
|
|
as: facetSortFieldName(fieldDef, fieldDef.sort, {
|
|
forAs: true
|
|
})
|
|
}],
|
|
groupby: [_vgField(fieldDef)]
|
|
});
|
|
}
|
|
}
|
|
|
|
return newParent;
|
|
}
|
|
|
|
return null;
|
|
}
|
|
|
|
function findSource(data, sources) {
|
|
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k;
|
|
|
|
var _iterator180 = _createForOfIteratorHelper(sources),
|
|
_step180;
|
|
|
|
try {
|
|
for (_iterator180.s(); !(_step180 = _iterator180.n()).done;) {
|
|
var other = _step180.value;
|
|
var otherData = other.data; // if both datasets have a name defined, we cannot merge
|
|
|
|
if (data.name && other.hasName() && data.name !== other.dataName) {
|
|
continue;
|
|
} // feature and mesh are mutually exclusive
|
|
|
|
|
|
if (((_a = data['format']) === null || _a === void 0 ? void 0 : _a.mesh) && ((_b = otherData.format) === null || _b === void 0 ? void 0 : _b.feature)) {
|
|
continue;
|
|
} // we have to extract the same feature or mesh
|
|
|
|
|
|
if ((((_c = data['format']) === null || _c === void 0 ? void 0 : _c.feature) || ((_d = otherData.format) === null || _d === void 0 ? void 0 : _d.feature)) && ((_e = data['format']) === null || _e === void 0 ? void 0 : _e.feature) !== ((_f = otherData.format) === null || _f === void 0 ? void 0 : _f.feature)) {
|
|
continue;
|
|
}
|
|
|
|
if ((((_g = data['format']) === null || _g === void 0 ? void 0 : _g.mesh) || ((_h = otherData.format) === null || _h === void 0 ? void 0 : _h.mesh)) && ((_j = data['format']) === null || _j === void 0 ? void 0 : _j.mesh) !== ((_k = otherData.format) === null || _k === void 0 ? void 0 : _k.mesh)) {
|
|
continue;
|
|
}
|
|
|
|
if (isInlineData(data) && isInlineData(otherData)) {
|
|
if (deepEqual(data.values, otherData.values)) {
|
|
return other;
|
|
}
|
|
} else if (isUrlData(data) && isUrlData(otherData)) {
|
|
if (data.url === otherData.url) {
|
|
return other;
|
|
}
|
|
} else if (isNamedData(data)) {
|
|
if (data.name === other.dataName) {
|
|
return other;
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator180.e(err);
|
|
} finally {
|
|
_iterator180.f();
|
|
}
|
|
|
|
return null;
|
|
}
|
|
|
|
function parseRoot(model, sources) {
|
|
if (model.data || !model.parent) {
|
|
// if the model defines a data source or is the root, create a source node
|
|
if (model.data === null) {
|
|
// data: null means we should ignore the parent's data so we just create a new data source
|
|
var _source2 = new SourceNode({
|
|
values: []
|
|
});
|
|
|
|
sources.push(_source2);
|
|
return _source2;
|
|
}
|
|
|
|
var existingSource = findSource(model.data, sources);
|
|
|
|
if (existingSource) {
|
|
if (!isGenerator(model.data)) {
|
|
existingSource.data.format = mergeDeep({}, model.data.format, existingSource.data.format);
|
|
} // if the new source has a name but the existing one does not, we can set it
|
|
|
|
|
|
if (!existingSource.hasName() && model.data.name) {
|
|
existingSource.dataName = model.data.name;
|
|
}
|
|
|
|
return existingSource;
|
|
} else {
|
|
var _source3 = new SourceNode(model.data);
|
|
|
|
sources.push(_source3);
|
|
return _source3;
|
|
}
|
|
} else {
|
|
// If we don't have a source defined (overriding parent's data), use the parent's facet root or main.
|
|
return model.parent.component.data.facetRoot ? model.parent.component.data.facetRoot : model.parent.component.data.main;
|
|
}
|
|
}
|
|
/**
|
|
* Parses a transform array into a chain of connected dataflow nodes.
|
|
*/
|
|
|
|
|
|
function parseTransformArray(head, model, ancestorParse) {
|
|
var _a, _b;
|
|
|
|
var lookupCounter = 0;
|
|
|
|
var _iterator181 = _createForOfIteratorHelper(model.transforms),
|
|
_step181;
|
|
|
|
try {
|
|
for (_iterator181.s(); !(_step181 = _iterator181.n()).done;) {
|
|
var t = _step181.value;
|
|
var derivedType = undefined;
|
|
var transformNode = void 0;
|
|
|
|
if (isCalculate(t)) {
|
|
transformNode = head = new CalculateNode(head, t);
|
|
derivedType = 'derived';
|
|
} else if (isFilter(t)) {
|
|
var implicit = getImplicitFromFilterTransform(t);
|
|
transformNode = head = (_a = ParseNode.makeWithAncestors(head, {}, implicit, ancestorParse)) !== null && _a !== void 0 ? _a : head;
|
|
head = new FilterNode(head, model, t.filter);
|
|
} else if (isBin(t)) {
|
|
transformNode = head = BinNode.makeFromTransform(head, t, model);
|
|
derivedType = 'number';
|
|
} else if (isTimeUnit(t)) {
|
|
derivedType = 'date';
|
|
var parsedAs = ancestorParse.getWithExplicit(t.field); // Create parse node because the input to time unit is always date.
|
|
|
|
if (parsedAs.value === undefined) {
|
|
head = new ParseNode(head, _defineProperty({}, t.field, derivedType));
|
|
ancestorParse.set(t.field, derivedType, false);
|
|
}
|
|
|
|
transformNode = head = TimeUnitNode.makeFromTransform(head, t);
|
|
} else if (isAggregate$1(t)) {
|
|
transformNode = head = AggregateNode.makeFromTransform(head, t);
|
|
derivedType = 'number';
|
|
|
|
if (requiresSelectionId(model)) {
|
|
head = new IdentifierNode(head);
|
|
}
|
|
} else if (isLookup(t)) {
|
|
transformNode = head = LookupNode.make(head, model, t, lookupCounter++);
|
|
derivedType = 'derived';
|
|
} else if (isWindow(t)) {
|
|
transformNode = head = new WindowTransformNode(head, t);
|
|
derivedType = 'number';
|
|
} else if (isJoinAggregate(t)) {
|
|
transformNode = head = new JoinAggregateTransformNode(head, t);
|
|
derivedType = 'number';
|
|
} else if (isStack(t)) {
|
|
transformNode = head = StackNode.makeFromTransform(head, t);
|
|
derivedType = 'derived';
|
|
} else if (isFold(t)) {
|
|
transformNode = head = new FoldTransformNode(head, t);
|
|
derivedType = 'derived';
|
|
} else if (isFlatten(t)) {
|
|
transformNode = head = new FlattenTransformNode(head, t);
|
|
derivedType = 'derived';
|
|
} else if (isPivot(t)) {
|
|
transformNode = head = new PivotTransformNode(head, t);
|
|
derivedType = 'derived';
|
|
} else if (isSample(t)) {
|
|
head = new SampleTransformNode(head, t);
|
|
} else if (isImpute(t)) {
|
|
transformNode = head = ImputeNode.makeFromTransform(head, t);
|
|
derivedType = 'derived';
|
|
} else if (isDensity(t)) {
|
|
transformNode = head = new DensityTransformNode(head, t);
|
|
derivedType = 'derived';
|
|
} else if (isQuantile(t)) {
|
|
transformNode = head = new QuantileTransformNode(head, t);
|
|
derivedType = 'derived';
|
|
} else if (isRegression(t)) {
|
|
transformNode = head = new RegressionTransformNode(head, t);
|
|
derivedType = 'derived';
|
|
} else if (isLoess(t)) {
|
|
transformNode = head = new LoessTransformNode(head, t);
|
|
derivedType = 'derived';
|
|
} else {
|
|
warn(invalidTransformIgnored(t));
|
|
continue;
|
|
}
|
|
|
|
if (transformNode && derivedType !== undefined) {
|
|
var _iterator182 = _createForOfIteratorHelper((_b = transformNode.producedFields()) !== null && _b !== void 0 ? _b : []),
|
|
_step182;
|
|
|
|
try {
|
|
for (_iterator182.s(); !(_step182 = _iterator182.n()).done;) {
|
|
var _field28 = _step182.value;
|
|
ancestorParse.set(_field28, derivedType, false);
|
|
}
|
|
} catch (err) {
|
|
_iterator182.e(err);
|
|
} finally {
|
|
_iterator182.f();
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator181.e(err);
|
|
} finally {
|
|
_iterator181.f();
|
|
}
|
|
|
|
return head;
|
|
}
|
|
/*
|
|
Description of the dataflow (http://asciiflow.com/):
|
|
+--------+
|
|
| Source |
|
|
+---+----+
|
|
|
|
|
v
|
|
FormatParse
|
|
(explicit)
|
|
|
|
|
v
|
|
Transforms
|
|
(Filter, Calculate, Binning, TimeUnit, Aggregate, Window, ...)
|
|
|
|
|
v
|
|
FormatParse
|
|
(implicit)
|
|
|
|
|
v
|
|
Binning (in `encoding`)
|
|
|
|
|
v
|
|
Timeunit (in `encoding`)
|
|
|
|
|
v
|
|
Formula From Sort Array
|
|
|
|
|
v
|
|
+--+--+
|
|
| Raw |
|
|
+-----+
|
|
|
|
|
v
|
|
Aggregate (in `encoding`)
|
|
|
|
|
v
|
|
Stack (in `encoding`)
|
|
|
|
|
v
|
|
Invalid Filter
|
|
|
|
|
v
|
|
+----------+
|
|
| Main |
|
|
+----------+
|
|
|
|
|
v
|
|
+-------+
|
|
| Facet |----> "column", "column-layout", and "row"
|
|
+-------+
|
|
|
|
|
v
|
|
...Child data...
|
|
*/
|
|
|
|
|
|
function _parseData(model) {
|
|
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k;
|
|
|
|
var head = parseRoot(model, model.component.data.sources);
|
|
var _model$component$data = model.component.data,
|
|
outputNodes = _model$component$data.outputNodes,
|
|
outputNodeRefCounts = _model$component$data.outputNodeRefCounts;
|
|
var ancestorParse = model.parent ? model.parent.component.data.ancestorParse.clone() : new AncestorParse();
|
|
var data = model.data;
|
|
|
|
if (isGenerator(data)) {
|
|
// insert generator transform
|
|
if (isSequenceGenerator(data)) {
|
|
head = new SequenceNode(head, data.sequence);
|
|
} else if (isGraticuleGenerator(data)) {
|
|
head = new GraticuleNode(head, data.graticule);
|
|
} // no parsing necessary for generator
|
|
|
|
|
|
ancestorParse.parseNothing = true;
|
|
} else if (((_a = data === null || data === void 0 ? void 0 : data.format) === null || _a === void 0 ? void 0 : _a.parse) === null) {
|
|
// format.parse: null means disable parsing
|
|
ancestorParse.parseNothing = true;
|
|
}
|
|
|
|
head = (_b = ParseNode.makeExplicit(head, model, ancestorParse)) !== null && _b !== void 0 ? _b : head; // Default discrete selections require an identifer transform to
|
|
// uniquely identify data points. Add this transform at the head of
|
|
// the pipeline such that the identifier field is available for all
|
|
// subsequent datasets. During optimization, we will remove this
|
|
// transform if it proves to be unnecessary. Additional identifier
|
|
// transforms will be necessary when new tuples are constructed
|
|
// (e.g., post-aggregation).
|
|
|
|
head = new IdentifierNode(head); // HACK: This is equivalent for merging bin extent for union scale.
|
|
// FIXME(https://github.com/vega/vega-lite/issues/2270): Correctly merge extent / bin node for shared bin scale
|
|
|
|
var parentIsLayer = model.parent && isLayerModel(model.parent);
|
|
|
|
if (isUnitModel(model) || isFacetModel(model)) {
|
|
if (parentIsLayer) {
|
|
head = (_c = BinNode.makeFromEncoding(head, model)) !== null && _c !== void 0 ? _c : head;
|
|
}
|
|
}
|
|
|
|
if (model.transforms.length > 0) {
|
|
head = parseTransformArray(head, model, ancestorParse);
|
|
} // create parse nodes for fields that need to be parsed (or flattened) implicitly
|
|
|
|
|
|
var implicitSelection = getImplicitFromSelection(model);
|
|
var implicitEncoding = getImplicitFromEncoding(model);
|
|
head = (_d = ParseNode.makeWithAncestors(head, {}, Object.assign(Object.assign({}, implicitSelection), implicitEncoding), ancestorParse)) !== null && _d !== void 0 ? _d : head;
|
|
|
|
if (isUnitModel(model)) {
|
|
head = GeoJSONNode.parseAll(head, model);
|
|
head = GeoPointNode.parseAll(head, model);
|
|
}
|
|
|
|
if (isUnitModel(model) || isFacetModel(model)) {
|
|
if (!parentIsLayer) {
|
|
head = (_e = BinNode.makeFromEncoding(head, model)) !== null && _e !== void 0 ? _e : head;
|
|
}
|
|
|
|
head = (_f = TimeUnitNode.makeFromEncoding(head, model)) !== null && _f !== void 0 ? _f : head;
|
|
head = CalculateNode.parseAllForSortIndex(head, model);
|
|
} // add an output node pre aggregation
|
|
|
|
|
|
var rawName = model.getDataName(DataSourceType.Raw);
|
|
var raw = new OutputNode(head, rawName, DataSourceType.Raw, outputNodeRefCounts);
|
|
outputNodes[rawName] = raw;
|
|
head = raw;
|
|
|
|
if (isUnitModel(model)) {
|
|
var agg = AggregateNode.makeFromEncoding(head, model);
|
|
|
|
if (agg) {
|
|
head = agg;
|
|
|
|
if (requiresSelectionId(model)) {
|
|
head = new IdentifierNode(head);
|
|
}
|
|
}
|
|
|
|
head = (_g = ImputeNode.makeFromEncoding(head, model)) !== null && _g !== void 0 ? _g : head;
|
|
head = (_h = StackNode.makeFromEncoding(head, model)) !== null && _h !== void 0 ? _h : head;
|
|
}
|
|
|
|
if (isUnitModel(model)) {
|
|
head = (_j = FilterInvalidNode.make(head, model)) !== null && _j !== void 0 ? _j : head;
|
|
} // output node for marks
|
|
|
|
|
|
var mainName = model.getDataName(DataSourceType.Main);
|
|
var main = new OutputNode(head, mainName, DataSourceType.Main, outputNodeRefCounts);
|
|
outputNodes[mainName] = main;
|
|
head = main;
|
|
|
|
if (isUnitModel(model)) {
|
|
materializeSelections(model, main);
|
|
} // add facet marker
|
|
|
|
|
|
var facetRoot = null;
|
|
|
|
if (isFacetModel(model)) {
|
|
var facetName = model.getName('facet'); // Derive new aggregate for facet's sort field
|
|
// augment data source with new fields for crossed facet
|
|
|
|
head = (_k = makeJoinAggregateFromFacet(head, model.facet)) !== null && _k !== void 0 ? _k : head;
|
|
facetRoot = new FacetNode(head, model, facetName, main.getSource());
|
|
outputNodes[facetName] = facetRoot;
|
|
}
|
|
|
|
return Object.assign(Object.assign({}, model.component.data), {
|
|
outputNodes: outputNodes,
|
|
outputNodeRefCounts: outputNodeRefCounts,
|
|
raw: raw,
|
|
main: main,
|
|
facetRoot: facetRoot,
|
|
ancestorParse: ancestorParse
|
|
});
|
|
}
|
|
|
|
var ConcatModel = /*#__PURE__*/function (_Model2) {
|
|
_inherits(ConcatModel, _Model2);
|
|
|
|
var _super49 = _createSuper(ConcatModel);
|
|
|
|
function ConcatModel(spec, parent, parentGivenName, config) {
|
|
var _this46;
|
|
|
|
_classCallCheck(this, ConcatModel);
|
|
|
|
var _a, _b, _c, _d;
|
|
|
|
_this46 = _super49.call(this, spec, 'concat', parent, parentGivenName, config, spec.resolve);
|
|
|
|
if (((_b = (_a = spec.resolve) === null || _a === void 0 ? void 0 : _a.axis) === null || _b === void 0 ? void 0 : _b.x) === 'shared' || ((_d = (_c = spec.resolve) === null || _c === void 0 ? void 0 : _c.axis) === null || _d === void 0 ? void 0 : _d.y) === 'shared') {
|
|
warn(CONCAT_CANNOT_SHARE_AXIS);
|
|
}
|
|
|
|
_this46.children = _this46.getChildren(spec).map(function (child, i) {
|
|
return buildModel(child, _assertThisInitialized(_this46), _this46.getName('concat_' + i), undefined, config);
|
|
});
|
|
return _this46;
|
|
}
|
|
|
|
_createClass(ConcatModel, [{
|
|
key: "parseData",
|
|
value: function parseData() {
|
|
this.component.data = _parseData(this);
|
|
|
|
var _iterator183 = _createForOfIteratorHelper(this.children),
|
|
_step183;
|
|
|
|
try {
|
|
for (_iterator183.s(); !(_step183 = _iterator183.n()).done;) {
|
|
var child = _step183.value;
|
|
child.parseData();
|
|
}
|
|
} catch (err) {
|
|
_iterator183.e(err);
|
|
} finally {
|
|
_iterator183.f();
|
|
}
|
|
}
|
|
}, {
|
|
key: "parseSelections",
|
|
value: function parseSelections() {
|
|
// Merge selections up the hierarchy so that they may be referenced
|
|
// across unit specs. Persist their definitions within each child
|
|
// to assemble signals which remain within output Vega unit groups.
|
|
this.component.selection = {};
|
|
|
|
var _iterator184 = _createForOfIteratorHelper(this.children),
|
|
_step184;
|
|
|
|
try {
|
|
for (_iterator184.s(); !(_step184 = _iterator184.n()).done;) {
|
|
var child = _step184.value;
|
|
child.parseSelections();
|
|
|
|
var _iterator185 = _createForOfIteratorHelper(keys(child.component.selection)),
|
|
_step185;
|
|
|
|
try {
|
|
for (_iterator185.s(); !(_step185 = _iterator185.n()).done;) {
|
|
var key = _step185.value;
|
|
this.component.selection[key] = child.component.selection[key];
|
|
}
|
|
} catch (err) {
|
|
_iterator185.e(err);
|
|
} finally {
|
|
_iterator185.f();
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator184.e(err);
|
|
} finally {
|
|
_iterator184.f();
|
|
}
|
|
}
|
|
}, {
|
|
key: "parseMarkGroup",
|
|
value: function parseMarkGroup() {
|
|
var _iterator186 = _createForOfIteratorHelper(this.children),
|
|
_step186;
|
|
|
|
try {
|
|
for (_iterator186.s(); !(_step186 = _iterator186.n()).done;) {
|
|
var child = _step186.value;
|
|
child.parseMarkGroup();
|
|
}
|
|
} catch (err) {
|
|
_iterator186.e(err);
|
|
} finally {
|
|
_iterator186.f();
|
|
}
|
|
}
|
|
}, {
|
|
key: "parseAxesAndHeaders",
|
|
value: function parseAxesAndHeaders() {
|
|
var _iterator187 = _createForOfIteratorHelper(this.children),
|
|
_step187;
|
|
|
|
try {
|
|
for (_iterator187.s(); !(_step187 = _iterator187.n()).done;) {
|
|
var child = _step187.value;
|
|
child.parseAxesAndHeaders();
|
|
} // TODO(#2415): support shared axes
|
|
|
|
} catch (err) {
|
|
_iterator187.e(err);
|
|
} finally {
|
|
_iterator187.f();
|
|
}
|
|
}
|
|
}, {
|
|
key: "getChildren",
|
|
value: function getChildren(spec) {
|
|
if (isVConcatSpec(spec)) {
|
|
return spec.vconcat;
|
|
} else if (isHConcatSpec(spec)) {
|
|
return spec.hconcat;
|
|
}
|
|
|
|
return spec.concat;
|
|
}
|
|
}, {
|
|
key: "parseLayoutSize",
|
|
value: function parseLayoutSize() {
|
|
parseConcatLayoutSize(this);
|
|
}
|
|
}, {
|
|
key: "parseAxisGroup",
|
|
value: function parseAxisGroup() {
|
|
return null;
|
|
}
|
|
}, {
|
|
key: "assembleSelectionTopLevelSignals",
|
|
value: function assembleSelectionTopLevelSignals(signals) {
|
|
return this.children.reduce(function (sg, child) {
|
|
return child.assembleSelectionTopLevelSignals(sg);
|
|
}, signals);
|
|
}
|
|
}, {
|
|
key: "assembleSignals",
|
|
value: function assembleSignals() {
|
|
this.children.forEach(function (child) {
|
|
return child.assembleSignals();
|
|
});
|
|
return [];
|
|
}
|
|
}, {
|
|
key: "assembleLayoutSignals",
|
|
value: function assembleLayoutSignals() {
|
|
var layoutSignals = _assembleLayoutSignals(this);
|
|
|
|
var _iterator188 = _createForOfIteratorHelper(this.children),
|
|
_step188;
|
|
|
|
try {
|
|
for (_iterator188.s(); !(_step188 = _iterator188.n()).done;) {
|
|
var child = _step188.value;
|
|
layoutSignals.push.apply(layoutSignals, _toConsumableArray(child.assembleLayoutSignals()));
|
|
}
|
|
} catch (err) {
|
|
_iterator188.e(err);
|
|
} finally {
|
|
_iterator188.f();
|
|
}
|
|
|
|
return layoutSignals;
|
|
}
|
|
}, {
|
|
key: "assembleSelectionData",
|
|
value: function assembleSelectionData(data) {
|
|
return this.children.reduce(function (db, child) {
|
|
return child.assembleSelectionData(db);
|
|
}, data);
|
|
}
|
|
}, {
|
|
key: "assembleMarks",
|
|
value: function assembleMarks() {
|
|
// only children have marks
|
|
return this.children.map(function (child) {
|
|
var title = child.assembleTitle();
|
|
var style = child.assembleGroupStyle();
|
|
var encodeEntry = child.assembleGroupEncodeEntry(false);
|
|
return Object.assign(Object.assign(Object.assign(Object.assign({
|
|
type: 'group',
|
|
name: child.getName('group')
|
|
}, title ? {
|
|
title: title
|
|
} : {}), style ? {
|
|
style: style
|
|
} : {}), encodeEntry ? {
|
|
encode: {
|
|
update: encodeEntry
|
|
}
|
|
} : {}), child.assembleGroup());
|
|
});
|
|
}
|
|
}, {
|
|
key: "assembleDefaultLayout",
|
|
value: function assembleDefaultLayout() {
|
|
var columns = this.layout.columns;
|
|
return Object.assign(Object.assign({}, columns != null ? {
|
|
columns: columns
|
|
} : {}), {
|
|
bounds: 'full',
|
|
// Use align each so it can work with multiple plots with different size
|
|
align: 'each'
|
|
});
|
|
}
|
|
}]);
|
|
|
|
return ConcatModel;
|
|
}(Model);
|
|
|
|
function isFalseOrNull(v) {
|
|
return v === false || v === null;
|
|
}
|
|
|
|
var AXIS_COMPONENT_PROPERTIES_INDEX = Object.assign(Object.assign({
|
|
disable: 1,
|
|
gridScale: 1,
|
|
scale: 1
|
|
}, COMMON_AXIS_PROPERTIES_INDEX), {
|
|
labelExpr: 1,
|
|
encode: 1
|
|
});
|
|
var AXIS_COMPONENT_PROPERTIES = keys(AXIS_COMPONENT_PROPERTIES_INDEX);
|
|
|
|
var AxisComponent = /*#__PURE__*/function (_Split5) {
|
|
_inherits(AxisComponent, _Split5);
|
|
|
|
var _super50 = _createSuper(AxisComponent);
|
|
|
|
function AxisComponent() {
|
|
var _this47;
|
|
|
|
var explicit = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
|
|
var implicit = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|
var mainExtracted = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : false;
|
|
|
|
_classCallCheck(this, AxisComponent);
|
|
|
|
_this47 = _super50.call(this);
|
|
_this47.explicit = explicit;
|
|
_this47.implicit = implicit;
|
|
_this47.mainExtracted = mainExtracted;
|
|
return _this47;
|
|
}
|
|
|
|
_createClass(AxisComponent, [{
|
|
key: "clone",
|
|
value: function clone() {
|
|
return new AxisComponent(duplicate(this.explicit), duplicate(this.implicit), this.mainExtracted);
|
|
}
|
|
}, {
|
|
key: "hasAxisPart",
|
|
value: function hasAxisPart(part) {
|
|
// FIXME(https://github.com/vega/vega-lite/issues/2552) this method can be wrong if users use a Vega theme.
|
|
if (part === 'axis') {
|
|
// always has the axis container part
|
|
return true;
|
|
}
|
|
|
|
if (part === 'grid' || part === 'title') {
|
|
return !!this.get(part);
|
|
} // Other parts are enabled by default, so they should not be false or null.
|
|
|
|
|
|
return !isFalseOrNull(this.get(part));
|
|
}
|
|
}, {
|
|
key: "hasOrientSignalRef",
|
|
value: function hasOrientSignalRef() {
|
|
return isSignalRef(this.explicit.orient);
|
|
}
|
|
}]);
|
|
|
|
return AxisComponent;
|
|
}(Split);
|
|
|
|
function labels$1(model, channel, specifiedLabelsSpec) {
|
|
var _a;
|
|
|
|
var encoding = model.encoding,
|
|
config = model.config;
|
|
var fieldOrDatumDef = (_a = getFieldOrDatumDef(encoding[channel])) !== null && _a !== void 0 ? _a : getFieldOrDatumDef(encoding[getSecondaryRangeChannel(channel)]);
|
|
var axis = model.axis(channel) || {};
|
|
var format = axis.format,
|
|
formatType = axis.formatType;
|
|
|
|
if (isCustomFormatType(formatType)) {
|
|
return Object.assign({
|
|
text: formatCustomType({
|
|
fieldOrDatumDef: fieldOrDatumDef,
|
|
field: 'datum.value',
|
|
format: format,
|
|
formatType: formatType,
|
|
config: config
|
|
})
|
|
}, specifiedLabelsSpec);
|
|
}
|
|
|
|
return specifiedLabelsSpec;
|
|
}
|
|
|
|
function parseUnitAxes(model) {
|
|
return POSITION_SCALE_CHANNELS.reduce(function (axis, channel) {
|
|
if (model.component.scales[channel]) {
|
|
axis[channel] = [parseAxis(channel, model)];
|
|
}
|
|
|
|
return axis;
|
|
}, {});
|
|
}
|
|
|
|
var OPPOSITE_ORIENT = {
|
|
bottom: 'top',
|
|
top: 'bottom',
|
|
left: 'right',
|
|
right: 'left'
|
|
};
|
|
|
|
function parseLayerAxes(model) {
|
|
var _a;
|
|
|
|
var _model$component3 = model.component,
|
|
axes = _model$component3.axes,
|
|
resolve = _model$component3.resolve;
|
|
var axisCount = {
|
|
top: 0,
|
|
bottom: 0,
|
|
right: 0,
|
|
left: 0
|
|
};
|
|
|
|
var _iterator189 = _createForOfIteratorHelper(model.children),
|
|
_step189;
|
|
|
|
try {
|
|
for (_iterator189.s(); !(_step189 = _iterator189.n()).done;) {
|
|
var child = _step189.value;
|
|
child.parseAxesAndHeaders();
|
|
|
|
var _iterator191 = _createForOfIteratorHelper(keys(child.component.axes)),
|
|
_step191;
|
|
|
|
try {
|
|
for (_iterator191.s(); !(_step191 = _iterator191.n()).done;) {
|
|
var channel = _step191.value;
|
|
resolve.axis[channel] = parseGuideResolve(model.component.resolve, channel);
|
|
|
|
if (resolve.axis[channel] === 'shared') {
|
|
// If the resolve says shared (and has not been overridden)
|
|
// We will try to merge and see if there is a conflict
|
|
axes[channel] = mergeAxisComponents(axes[channel], child.component.axes[channel]);
|
|
|
|
if (!axes[channel]) {
|
|
// If merge returns nothing, there is a conflict so we cannot make the axis shared.
|
|
// Thus, mark axis as independent and remove the axis component.
|
|
resolve.axis[channel] = 'independent';
|
|
delete axes[channel];
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator191.e(err);
|
|
} finally {
|
|
_iterator191.f();
|
|
}
|
|
} // Move axes to layer's axis component and merge shared axes
|
|
|
|
} catch (err) {
|
|
_iterator189.e(err);
|
|
} finally {
|
|
_iterator189.f();
|
|
}
|
|
|
|
var _iterator190 = _createForOfIteratorHelper(POSITION_SCALE_CHANNELS),
|
|
_step190;
|
|
|
|
try {
|
|
for (_iterator190.s(); !(_step190 = _iterator190.n()).done;) {
|
|
var _channel4 = _step190.value;
|
|
|
|
var _iterator192 = _createForOfIteratorHelper(model.children),
|
|
_step192;
|
|
|
|
try {
|
|
for (_iterator192.s(); !(_step192 = _iterator192.n()).done;) {
|
|
var _child8 = _step192.value;
|
|
|
|
if (!_child8.component.axes[_channel4]) {
|
|
// skip if the child does not have a particular axis
|
|
continue;
|
|
}
|
|
|
|
if (resolve.axis[_channel4] === 'independent') {
|
|
// If axes are independent, concat the axisComponent array.
|
|
axes[_channel4] = ((_a = axes[_channel4]) !== null && _a !== void 0 ? _a : []).concat(_child8.component.axes[_channel4]); // Automatically adjust orient
|
|
|
|
var _iterator194 = _createForOfIteratorHelper(_child8.component.axes[_channel4]),
|
|
_step194;
|
|
|
|
try {
|
|
for (_iterator194.s(); !(_step194 = _iterator194.n()).done;) {
|
|
var axisComponent = _step194.value;
|
|
|
|
var _axisComponent$getWit = axisComponent.getWithExplicit('orient'),
|
|
_orient5 = _axisComponent$getWit.value,
|
|
explicit = _axisComponent$getWit.explicit;
|
|
|
|
if (isSignalRef(_orient5)) {
|
|
continue;
|
|
}
|
|
|
|
if (axisCount[_orient5] > 0 && !explicit) {
|
|
// Change axis orient if the number do not match
|
|
var oppositeOrient = OPPOSITE_ORIENT[_orient5];
|
|
|
|
if (axisCount[_orient5] > axisCount[oppositeOrient]) {
|
|
axisComponent.set('orient', oppositeOrient, false);
|
|
}
|
|
}
|
|
|
|
axisCount[_orient5]++; // TODO(https://github.com/vega/vega-lite/issues/2634): automatically add extra offset?
|
|
}
|
|
} catch (err) {
|
|
_iterator194.e(err);
|
|
} finally {
|
|
_iterator194.f();
|
|
}
|
|
} // After merging, make sure to remove axes from child
|
|
|
|
|
|
delete _child8.component.axes[_channel4];
|
|
} // Suppress grid lines for dual axis charts (https://github.com/vega/vega-lite/issues/4676)
|
|
|
|
} catch (err) {
|
|
_iterator192.e(err);
|
|
} finally {
|
|
_iterator192.f();
|
|
}
|
|
|
|
if (resolve.axis[_channel4] === 'independent' && axes[_channel4] && axes[_channel4].length > 1) {
|
|
var _iterator193 = _createForOfIteratorHelper(axes[_channel4]),
|
|
_step193;
|
|
|
|
try {
|
|
for (_iterator193.s(); !(_step193 = _iterator193.n()).done;) {
|
|
var axisCmpt = _step193.value;
|
|
|
|
if (!!axisCmpt.get('grid') && !axisCmpt.explicit.grid) {
|
|
axisCmpt.implicit.grid = false;
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator193.e(err);
|
|
} finally {
|
|
_iterator193.f();
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator190.e(err);
|
|
} finally {
|
|
_iterator190.f();
|
|
}
|
|
}
|
|
|
|
function mergeAxisComponents(mergedAxisCmpts, childAxisCmpts) {
|
|
if (mergedAxisCmpts) {
|
|
// FIXME: this is a bit wrong once we support multiple axes
|
|
if (mergedAxisCmpts.length !== childAxisCmpts.length) {
|
|
return undefined; // Cannot merge axis component with different number of axes.
|
|
}
|
|
|
|
var _length = mergedAxisCmpts.length;
|
|
|
|
for (var i = 0; i < _length; i++) {
|
|
var merged = mergedAxisCmpts[i];
|
|
var child = childAxisCmpts[i];
|
|
|
|
if (!!merged !== !!child) {
|
|
return undefined;
|
|
} else if (merged && child) {
|
|
var mergedOrient = merged.getWithExplicit('orient');
|
|
var childOrient = child.getWithExplicit('orient');
|
|
|
|
if (mergedOrient.explicit && childOrient.explicit && mergedOrient.value !== childOrient.value) {
|
|
// TODO: throw warning if resolve is explicit (We don't have info about explicit/implicit resolve yet.)
|
|
// Cannot merge due to inconsistent orient
|
|
return undefined;
|
|
} else {
|
|
mergedAxisCmpts[i] = mergeAxisComponent(merged, child);
|
|
}
|
|
}
|
|
}
|
|
} else {
|
|
// For first one, return a copy of the child
|
|
return childAxisCmpts.map(function (axisComponent) {
|
|
return axisComponent.clone();
|
|
});
|
|
}
|
|
|
|
return mergedAxisCmpts;
|
|
}
|
|
|
|
function mergeAxisComponent(merged, child) {
|
|
var _iterator195 = _createForOfIteratorHelper(AXIS_COMPONENT_PROPERTIES),
|
|
_step195;
|
|
|
|
try {
|
|
var _loop8 = function _loop8() {
|
|
var prop = _step195.value;
|
|
var mergedValueWithExplicit = mergeValuesWithExplicit(merged.getWithExplicit(prop), child.getWithExplicit(prop), prop, 'axis', // Tie breaker function
|
|
function (v1, v2) {
|
|
switch (prop) {
|
|
case 'title':
|
|
return mergeTitleComponent(v1, v2);
|
|
|
|
case 'gridScale':
|
|
return {
|
|
explicit: v1.explicit,
|
|
value: getFirstDefined(v1.value, v2.value)
|
|
};
|
|
}
|
|
|
|
return defaultTieBreaker(v1, v2, prop, 'axis');
|
|
});
|
|
merged.setWithExplicit(prop, mergedValueWithExplicit);
|
|
};
|
|
|
|
for (_iterator195.s(); !(_step195 = _iterator195.n()).done;) {
|
|
_loop8();
|
|
}
|
|
} catch (err) {
|
|
_iterator195.e(err);
|
|
} finally {
|
|
_iterator195.f();
|
|
}
|
|
|
|
return merged;
|
|
} // eslint-disable-next-line @typescript-eslint/ban-types
|
|
|
|
|
|
function isExplicit$1(value, property, axis, model, channel) {
|
|
if (property === 'disable') {
|
|
return axis !== undefined; // if axis is specified or null/false, then it's enable/disable state is explicit
|
|
}
|
|
|
|
axis = axis || {};
|
|
|
|
switch (property) {
|
|
case 'titleAngle':
|
|
case 'labelAngle':
|
|
return value === (isSignalRef(axis.labelAngle) ? axis.labelAngle : normalizeAngle(axis.labelAngle));
|
|
|
|
case 'values':
|
|
return !!axis.values;
|
|
// specified axis.values is already respected, but may get transformed.
|
|
|
|
case 'encode':
|
|
// both VL axis.encoding and axis.labelAngle affect VG axis.encode
|
|
return !!axis.encoding || !!axis.labelAngle;
|
|
|
|
case 'title':
|
|
// title can be explicit if fieldDef.title is set
|
|
if (value === getFieldDefTitle(model, channel)) {
|
|
return true;
|
|
}
|
|
|
|
} // Otherwise, things are explicit if the returned value matches the specified property
|
|
|
|
|
|
return value === axis[property];
|
|
}
|
|
/**
|
|
* Properties to always include values from config
|
|
*/
|
|
|
|
|
|
var propsToAlwaysIncludeConfig = new Set(['grid', 'translate', // the rest are not axis configs in Vega, but are in VL, so we need to set too.
|
|
'format', 'formatType', 'orient', 'labelExpr', 'tickCount', 'position', 'tickMinStep']);
|
|
|
|
function parseAxis(channel, model) {
|
|
var _a, _b, _c;
|
|
|
|
var axis = model.axis(channel);
|
|
var axisComponent = new AxisComponent();
|
|
var fieldOrDatumDef = getFieldOrDatumDef(model.encoding[channel]);
|
|
var mark = model.mark,
|
|
config = model.config;
|
|
var orient = (axis === null || axis === void 0 ? void 0 : axis.orient) || ((_a = config[channel === 'x' ? 'axisX' : 'axisY']) === null || _a === void 0 ? void 0 : _a.orient) || ((_b = config.axis) === null || _b === void 0 ? void 0 : _b.orient) || defaultOrient(channel);
|
|
var scaleType = model.getScaleComponent(channel).get('type');
|
|
var axisConfigs = getAxisConfigs(channel, scaleType, orient, model.config);
|
|
var disable = axis !== undefined ? !axis : getAxisConfig('disable', config.style, axis === null || axis === void 0 ? void 0 : axis.style, axisConfigs).configValue;
|
|
axisComponent.set('disable', disable, axis !== undefined);
|
|
|
|
if (disable) {
|
|
return axisComponent;
|
|
}
|
|
|
|
axis = axis || {};
|
|
var labelAngle = getLabelAngle(fieldOrDatumDef, axis, channel, config.style, axisConfigs);
|
|
var ruleParams = {
|
|
fieldOrDatumDef: fieldOrDatumDef,
|
|
axis: axis,
|
|
channel: channel,
|
|
model: model,
|
|
scaleType: scaleType,
|
|
orient: orient,
|
|
labelAngle: labelAngle,
|
|
mark: mark,
|
|
config: config
|
|
}; // 1.2. Add properties
|
|
|
|
var _iterator196 = _createForOfIteratorHelper(AXIS_COMPONENT_PROPERTIES),
|
|
_step196;
|
|
|
|
try {
|
|
for (_iterator196.s(); !(_step196 = _iterator196.n()).done;) {
|
|
var property = _step196.value;
|
|
var value = property in axisRules ? axisRules[property](ruleParams) : isAxisProperty(property) ? axis[property] : undefined;
|
|
var hasValue = value !== undefined;
|
|
var explicit = isExplicit$1(value, property, axis, model, channel);
|
|
|
|
if (hasValue && explicit) {
|
|
axisComponent.set(property, value, explicit);
|
|
} else {
|
|
var _ref95 = isAxisProperty(property) && property !== 'values' ? getAxisConfig(property, config.style, axis.style, axisConfigs) : {},
|
|
_ref95$configValue = _ref95.configValue,
|
|
configValue = _ref95$configValue === void 0 ? undefined : _ref95$configValue,
|
|
_ref95$configFrom = _ref95.configFrom,
|
|
configFrom = _ref95$configFrom === void 0 ? undefined : _ref95$configFrom;
|
|
|
|
var hasConfigValue = configValue !== undefined;
|
|
|
|
if (hasValue && !hasConfigValue) {
|
|
// only set property if it is explicitly set or has no config value (otherwise we will accidentally override config)
|
|
axisComponent.set(property, value, explicit);
|
|
} else if ( // Cases need implicit values
|
|
// 1. Axis config that aren't available in Vega
|
|
!(configFrom === 'vgAxisConfig') || // 2. Certain properties are always included (see `propsToAlwaysIncludeConfig`'s declaration for more details)
|
|
propsToAlwaysIncludeConfig.has(property) && hasConfigValue || // 3. Conditional axis values and signals
|
|
isConditionalAxisValue(configValue) || isSignalRef(configValue)) {
|
|
// If a config is specified and is conditional, copy conditional value from axis config
|
|
axisComponent.set(property, configValue, false);
|
|
}
|
|
}
|
|
} // 2) Add guide encode definition groups
|
|
|
|
} catch (err) {
|
|
_iterator196.e(err);
|
|
} finally {
|
|
_iterator196.f();
|
|
}
|
|
|
|
var axisEncoding = (_c = axis.encoding) !== null && _c !== void 0 ? _c : {};
|
|
var axisEncode = AXIS_PARTS.reduce(function (e, part) {
|
|
var _a;
|
|
|
|
if (!axisComponent.hasAxisPart(part)) {
|
|
// No need to create encode for a disabled part.
|
|
return e;
|
|
}
|
|
|
|
var axisEncodingPart = guideEncodeEntry((_a = axisEncoding[part]) !== null && _a !== void 0 ? _a : {}, model);
|
|
var value = part === 'labels' ? labels$1(model, channel, axisEncodingPart) : axisEncodingPart;
|
|
|
|
if (value !== undefined && !isEmpty(value)) {
|
|
e[part] = {
|
|
update: value
|
|
};
|
|
}
|
|
|
|
return e;
|
|
}, {}); // FIXME: By having encode as one property, we won't have fine grained encode merging.
|
|
|
|
if (!isEmpty(axisEncode)) {
|
|
axisComponent.set('encode', axisEncode, !!axis.encoding || axis.labelAngle !== undefined);
|
|
}
|
|
|
|
return axisComponent;
|
|
}
|
|
|
|
function initLayoutSize(_ref96) {
|
|
var encoding = _ref96.encoding,
|
|
size = _ref96.size;
|
|
|
|
var _iterator197 = _createForOfIteratorHelper(POSITION_SCALE_CHANNELS),
|
|
_step197;
|
|
|
|
try {
|
|
for (_iterator197.s(); !(_step197 = _iterator197.n()).done;) {
|
|
var channel = _step197.value;
|
|
var sizeType = getSizeChannel(channel);
|
|
|
|
if (isStep(size[sizeType])) {
|
|
if (isContinuousFieldOrDatumDef(encoding[channel])) {
|
|
delete size[sizeType];
|
|
warn(stepDropped(sizeType));
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator197.e(err);
|
|
} finally {
|
|
_iterator197.f();
|
|
}
|
|
|
|
return size;
|
|
}
|
|
|
|
function initMarkdef(markDef, encoding, config) {
|
|
// set orient, which can be overridden by rules as sometimes the specified orient is invalid.
|
|
var specifiedOrient = getMarkPropOrConfig('orient', markDef, config);
|
|
markDef.orient = orient(markDef.type, encoding, specifiedOrient);
|
|
|
|
if (specifiedOrient !== undefined && specifiedOrient !== markDef.orient) {
|
|
warn(orientOverridden(markDef.orient, specifiedOrient));
|
|
}
|
|
|
|
if (markDef.type === 'bar' && markDef.orient) {
|
|
var cornerRadiusEnd = getMarkPropOrConfig('cornerRadiusEnd', markDef, config);
|
|
|
|
if (cornerRadiusEnd !== undefined) {
|
|
var newProps = markDef.orient === 'horizontal' && encoding.x2 || markDef.orient === 'vertical' && encoding.y2 ? ['cornerRadius'] : BAR_CORNER_RADIUS_INDEX[markDef.orient];
|
|
|
|
var _iterator198 = _createForOfIteratorHelper(newProps),
|
|
_step198;
|
|
|
|
try {
|
|
for (_iterator198.s(); !(_step198 = _iterator198.n()).done;) {
|
|
var newProp = _step198.value;
|
|
markDef[newProp] = cornerRadiusEnd;
|
|
}
|
|
} catch (err) {
|
|
_iterator198.e(err);
|
|
} finally {
|
|
_iterator198.f();
|
|
}
|
|
|
|
if (markDef.cornerRadiusEnd !== undefined) {
|
|
delete markDef.cornerRadiusEnd; // no need to keep the original cap cornerRadius
|
|
}
|
|
}
|
|
} // set opacity and filled if not specified in mark config
|
|
|
|
|
|
var specifiedOpacity = getMarkPropOrConfig('opacity', markDef, config);
|
|
|
|
if (specifiedOpacity === undefined) {
|
|
markDef.opacity = opacity(markDef.type, encoding);
|
|
} // set cursor, which should be pointer if href channel is present unless otherwise specified
|
|
|
|
|
|
var specifiedCursor = getMarkPropOrConfig('cursor', markDef, config);
|
|
|
|
if (specifiedCursor === undefined) {
|
|
markDef.cursor = cursor(markDef, encoding, config);
|
|
}
|
|
|
|
return markDef;
|
|
}
|
|
|
|
function cursor(markDef, encoding, config) {
|
|
if (encoding.href || markDef.href || getMarkPropOrConfig('href', markDef, config)) {
|
|
return 'pointer';
|
|
}
|
|
|
|
return markDef.cursor;
|
|
}
|
|
|
|
function opacity(mark, encoding) {
|
|
if (contains([POINT, TICK, CIRCLE, SQUARE], mark)) {
|
|
// point-based marks
|
|
if (!isAggregate(encoding)) {
|
|
return 0.7;
|
|
}
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function defaultFilled(markDef, config, _ref97) {
|
|
var graticule = _ref97.graticule;
|
|
|
|
if (graticule) {
|
|
return false;
|
|
}
|
|
|
|
var filledConfig = getMarkConfig('filled', markDef, config);
|
|
var mark = markDef.type;
|
|
return getFirstDefined(filledConfig, mark !== POINT && mark !== LINE && mark !== RULE);
|
|
}
|
|
|
|
function orient(mark, encoding, specifiedOrient) {
|
|
switch (mark) {
|
|
case POINT:
|
|
case CIRCLE:
|
|
case SQUARE:
|
|
case TEXT:
|
|
case RECT:
|
|
case IMAGE:
|
|
// orient is meaningless for these marks.
|
|
return undefined;
|
|
}
|
|
|
|
var x = encoding.x,
|
|
y = encoding.y,
|
|
x2 = encoding.x2,
|
|
y2 = encoding.y2;
|
|
|
|
switch (mark) {
|
|
case BAR:
|
|
if (isFieldDef(x) && (isBinned(x.bin) || isFieldDef(y) && y.aggregate && !x.aggregate)) {
|
|
return 'vertical';
|
|
}
|
|
|
|
if (isFieldDef(y) && (isBinned(y.bin) || isFieldDef(x) && x.aggregate && !y.aggregate)) {
|
|
return 'horizontal';
|
|
}
|
|
|
|
if (y2 || x2) {
|
|
// Ranged bar does not always have clear orientation, so we allow overriding
|
|
if (specifiedOrient) {
|
|
return specifiedOrient;
|
|
} // If y is range and x is non-range, non-bin Q, y is likely a prebinned field
|
|
|
|
|
|
if (!x2) {
|
|
if (isFieldDef(x) && x.type === QUANTITATIVE && !isBinning(x.bin) || isNumericDataDef(x)) {
|
|
return 'horizontal';
|
|
}
|
|
} // If x is range and y is non-range, non-bin Q, x is likely a prebinned field
|
|
|
|
|
|
if (!y2) {
|
|
if (isFieldDef(y) && y.type === QUANTITATIVE && !isBinning(y.bin) || isNumericDataDef(y)) {
|
|
return 'vertical';
|
|
}
|
|
}
|
|
}
|
|
|
|
// falls through
|
|
|
|
case RULE:
|
|
// return undefined for line segment rule and bar with both axis ranged
|
|
// we have to ignore the case that the data are already binned
|
|
if (x2 && !(isFieldDef(x) && isBinned(x.bin)) && y2 && !(isFieldDef(y) && isBinned(y.bin))) {
|
|
return undefined;
|
|
}
|
|
|
|
// falls through
|
|
|
|
case AREA:
|
|
// If there are range for both x and y, y (vertical) has higher precedence.
|
|
if (y2) {
|
|
if (isFieldDef(y) && isBinned(y.bin)) {
|
|
return 'horizontal';
|
|
} else {
|
|
return 'vertical';
|
|
}
|
|
} else if (x2) {
|
|
if (isFieldDef(x) && isBinned(x.bin)) {
|
|
return 'vertical';
|
|
} else {
|
|
return 'horizontal';
|
|
}
|
|
} else if (mark === RULE) {
|
|
if (x && !y) {
|
|
return 'vertical';
|
|
} else if (y && !x) {
|
|
return 'horizontal';
|
|
}
|
|
}
|
|
|
|
// falls through
|
|
|
|
case LINE:
|
|
case TICK:
|
|
{
|
|
// Tick is opposite to bar, line, area and never have ranged mark.
|
|
var xIsContinuous = isContinuousFieldOrDatumDef(x);
|
|
var yIsContinuous = isContinuousFieldOrDatumDef(y);
|
|
|
|
if (xIsContinuous && !yIsContinuous) {
|
|
return mark !== 'tick' ? 'horizontal' : 'vertical';
|
|
} else if (!xIsContinuous && yIsContinuous) {
|
|
return mark !== 'tick' ? 'vertical' : 'horizontal';
|
|
} else if (xIsContinuous && yIsContinuous) {
|
|
var xDef = x; // we can cast here since they are surely fieldDef
|
|
|
|
var yDef = y;
|
|
var xIsTemporal = xDef.type === TEMPORAL;
|
|
var yIsTemporal = yDef.type === TEMPORAL; // temporal without timeUnit is considered continuous, but better serves as dimension
|
|
|
|
if (xIsTemporal && !yIsTemporal) {
|
|
return mark !== 'tick' ? 'vertical' : 'horizontal';
|
|
} else if (!xIsTemporal && yIsTemporal) {
|
|
return mark !== 'tick' ? 'horizontal' : 'vertical';
|
|
}
|
|
|
|
if (!xDef.aggregate && yDef.aggregate) {
|
|
return mark !== 'tick' ? 'vertical' : 'horizontal';
|
|
} else if (xDef.aggregate && !yDef.aggregate) {
|
|
return mark !== 'tick' ? 'horizontal' : 'vertical';
|
|
}
|
|
|
|
if (specifiedOrient) {
|
|
// When ambiguous, use user specified one.
|
|
return specifiedOrient;
|
|
}
|
|
|
|
return 'vertical';
|
|
} else {
|
|
// Discrete x Discrete case
|
|
if (specifiedOrient) {
|
|
// When ambiguous, use user specified one.
|
|
return specifiedOrient;
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
}
|
|
}
|
|
|
|
return 'vertical';
|
|
}
|
|
|
|
var arc = {
|
|
vgMark: 'arc',
|
|
encodeEntry: function encodeEntry(model) {
|
|
return Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({}, baseEncodeEntry(model, {
|
|
align: 'ignore',
|
|
baseline: 'ignore',
|
|
color: 'include',
|
|
size: 'ignore',
|
|
orient: 'ignore',
|
|
theta: 'ignore'
|
|
})), pointPosition('x', model, {
|
|
defaultPos: 'mid'
|
|
})), pointPosition('y', model, {
|
|
defaultPos: 'mid'
|
|
})), rectPosition(model, 'radius', 'arc')), rectPosition(model, 'theta', 'arc'));
|
|
}
|
|
};
|
|
var area = {
|
|
vgMark: 'area',
|
|
encodeEntry: function encodeEntry(model) {
|
|
return Object.assign(Object.assign(Object.assign(Object.assign({}, baseEncodeEntry(model, {
|
|
align: 'ignore',
|
|
baseline: 'ignore',
|
|
color: 'include',
|
|
orient: 'include',
|
|
size: 'ignore',
|
|
theta: 'ignore'
|
|
})), pointOrRangePosition('x', model, {
|
|
defaultPos: 'zeroOrMin',
|
|
defaultPos2: 'zeroOrMin',
|
|
range: model.markDef.orient === 'horizontal'
|
|
})), pointOrRangePosition('y', model, {
|
|
defaultPos: 'zeroOrMin',
|
|
defaultPos2: 'zeroOrMin',
|
|
range: model.markDef.orient === 'vertical'
|
|
})), defined(model));
|
|
}
|
|
};
|
|
var bar = {
|
|
vgMark: 'rect',
|
|
encodeEntry: function encodeEntry(model) {
|
|
return Object.assign(Object.assign(Object.assign({}, baseEncodeEntry(model, {
|
|
align: 'ignore',
|
|
baseline: 'ignore',
|
|
color: 'include',
|
|
orient: 'ignore',
|
|
size: 'ignore',
|
|
theta: 'ignore'
|
|
})), rectPosition(model, 'x', 'bar')), rectPosition(model, 'y', 'bar'));
|
|
}
|
|
};
|
|
var geoshape = {
|
|
vgMark: 'shape',
|
|
encodeEntry: function encodeEntry(model) {
|
|
return Object.assign({}, baseEncodeEntry(model, {
|
|
align: 'ignore',
|
|
baseline: 'ignore',
|
|
color: 'include',
|
|
size: 'ignore',
|
|
orient: 'ignore',
|
|
theta: 'ignore'
|
|
}));
|
|
},
|
|
postEncodingTransform: function postEncodingTransform(model) {
|
|
var encoding = model.encoding;
|
|
var shapeDef = encoding.shape;
|
|
var transform = Object.assign({
|
|
type: 'geoshape',
|
|
projection: model.projectionName()
|
|
}, shapeDef && isFieldDef(shapeDef) && shapeDef.type === GEOJSON ? {
|
|
field: _vgField(shapeDef, {
|
|
expr: 'datum'
|
|
})
|
|
} : {});
|
|
return [transform];
|
|
}
|
|
};
|
|
var image = {
|
|
vgMark: 'image',
|
|
encodeEntry: function encodeEntry(model) {
|
|
return Object.assign(Object.assign(Object.assign(Object.assign({}, baseEncodeEntry(model, {
|
|
align: 'ignore',
|
|
baseline: 'ignore',
|
|
color: 'ignore',
|
|
orient: 'ignore',
|
|
size: 'ignore',
|
|
theta: 'ignore'
|
|
})), rectPosition(model, 'x', 'image')), rectPosition(model, 'y', 'image')), text(model, 'url'));
|
|
}
|
|
};
|
|
var line = {
|
|
vgMark: 'line',
|
|
encodeEntry: function encodeEntry(model) {
|
|
return Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({}, baseEncodeEntry(model, {
|
|
align: 'ignore',
|
|
baseline: 'ignore',
|
|
color: 'include',
|
|
size: 'ignore',
|
|
orient: 'ignore',
|
|
theta: 'ignore'
|
|
})), pointPosition('x', model, {
|
|
defaultPos: 'mid'
|
|
})), pointPosition('y', model, {
|
|
defaultPos: 'mid'
|
|
})), nonPosition('size', model, {
|
|
vgChannel: 'strokeWidth' // VL's line size is strokeWidth
|
|
|
|
})), defined(model));
|
|
}
|
|
};
|
|
var trail = {
|
|
vgMark: 'trail',
|
|
encodeEntry: function encodeEntry(model) {
|
|
return Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({}, baseEncodeEntry(model, {
|
|
align: 'ignore',
|
|
baseline: 'ignore',
|
|
color: 'include',
|
|
size: 'include',
|
|
orient: 'ignore',
|
|
theta: 'ignore'
|
|
})), pointPosition('x', model, {
|
|
defaultPos: 'mid'
|
|
})), pointPosition('y', model, {
|
|
defaultPos: 'mid'
|
|
})), nonPosition('size', model)), defined(model));
|
|
}
|
|
};
|
|
|
|
function _encodeEntry(model, fixedShape) {
|
|
var config = model.config;
|
|
return Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({}, baseEncodeEntry(model, {
|
|
align: 'ignore',
|
|
baseline: 'ignore',
|
|
color: 'include',
|
|
size: 'include',
|
|
orient: 'ignore',
|
|
theta: 'ignore'
|
|
})), pointPosition('x', model, {
|
|
defaultPos: 'mid'
|
|
})), pointPosition('y', model, {
|
|
defaultPos: 'mid'
|
|
})), nonPosition('size', model)), nonPosition('angle', model)), shapeMixins(model, config, fixedShape));
|
|
}
|
|
|
|
function shapeMixins(model, config, fixedShape) {
|
|
if (fixedShape) {
|
|
return {
|
|
shape: {
|
|
value: fixedShape
|
|
}
|
|
};
|
|
}
|
|
|
|
return nonPosition('shape', model);
|
|
}
|
|
|
|
var point = {
|
|
vgMark: 'symbol',
|
|
encodeEntry: function encodeEntry(model) {
|
|
return _encodeEntry(model);
|
|
}
|
|
};
|
|
var circle = {
|
|
vgMark: 'symbol',
|
|
encodeEntry: function encodeEntry(model) {
|
|
return _encodeEntry(model, 'circle');
|
|
}
|
|
};
|
|
var square = {
|
|
vgMark: 'symbol',
|
|
encodeEntry: function encodeEntry(model) {
|
|
return _encodeEntry(model, 'square');
|
|
}
|
|
};
|
|
var rect = {
|
|
vgMark: 'rect',
|
|
encodeEntry: function encodeEntry(model) {
|
|
return Object.assign(Object.assign(Object.assign({}, baseEncodeEntry(model, {
|
|
align: 'ignore',
|
|
baseline: 'ignore',
|
|
color: 'include',
|
|
orient: 'ignore',
|
|
size: 'ignore',
|
|
theta: 'ignore'
|
|
})), rectPosition(model, 'x', 'rect')), rectPosition(model, 'y', 'rect'));
|
|
}
|
|
};
|
|
var rule = {
|
|
vgMark: 'rule',
|
|
encodeEntry: function encodeEntry(model) {
|
|
var markDef = model.markDef;
|
|
var orient = markDef.orient;
|
|
|
|
if (!model.encoding.x && !model.encoding.y && !model.encoding.latitude && !model.encoding.longitude) {
|
|
// Show nothing if we have none of x, y, lat, and long.
|
|
return {};
|
|
}
|
|
|
|
return Object.assign(Object.assign(Object.assign(Object.assign({}, baseEncodeEntry(model, {
|
|
align: 'ignore',
|
|
baseline: 'ignore',
|
|
color: 'include',
|
|
orient: 'ignore',
|
|
size: 'ignore',
|
|
theta: 'ignore'
|
|
})), pointOrRangePosition('x', model, {
|
|
defaultPos: orient === 'horizontal' ? 'zeroOrMax' : 'mid',
|
|
defaultPos2: 'zeroOrMin',
|
|
range: orient !== 'vertical' // include x2 for horizontal or line segment rule
|
|
|
|
})), pointOrRangePosition('y', model, {
|
|
defaultPos: orient === 'vertical' ? 'zeroOrMax' : 'mid',
|
|
defaultPos2: 'zeroOrMin',
|
|
range: orient !== 'horizontal' // include y2 for vertical or line segment rule
|
|
|
|
})), nonPosition('size', model, {
|
|
vgChannel: 'strokeWidth' // VL's rule size is strokeWidth
|
|
|
|
}));
|
|
}
|
|
};
|
|
var text$1 = {
|
|
vgMark: 'text',
|
|
encodeEntry: function encodeEntry(model) {
|
|
var config = model.config,
|
|
encoding = model.encoding;
|
|
return Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({}, baseEncodeEntry(model, {
|
|
align: 'include',
|
|
baseline: 'include',
|
|
color: 'include',
|
|
size: 'ignore',
|
|
orient: 'ignore',
|
|
theta: 'include'
|
|
})), pointPosition('x', model, {
|
|
defaultPos: 'mid'
|
|
})), pointPosition('y', model, {
|
|
defaultPos: 'mid'
|
|
})), text(model)), nonPosition('size', model, {
|
|
vgChannel: 'fontSize' // VL's text size is fontSize
|
|
|
|
})), nonPosition('angle', model)), valueIfDefined('align', align(model.markDef, encoding, config))), valueIfDefined('baseline', baseline(model.markDef, encoding, config))), pointPosition('radius', model, {
|
|
defaultPos: null,
|
|
isMidPoint: true
|
|
})), pointPosition('theta', model, {
|
|
defaultPos: null,
|
|
isMidPoint: true
|
|
}));
|
|
}
|
|
};
|
|
|
|
function align(markDef, encoding, config) {
|
|
var a = getMarkPropOrConfig('align', markDef, config);
|
|
|
|
if (a === undefined) {
|
|
return 'center';
|
|
} // If there is a config, Vega-parser will process this already.
|
|
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function baseline(markDef, encoding, config) {
|
|
var b = getMarkPropOrConfig('baseline', markDef, config);
|
|
|
|
if (b === undefined) {
|
|
return 'middle';
|
|
} // If there is a config, Vega-parser will process this already.
|
|
|
|
|
|
return undefined;
|
|
}
|
|
|
|
var tick = {
|
|
vgMark: 'rect',
|
|
encodeEntry: function encodeEntry(model) {
|
|
var config = model.config,
|
|
markDef = model.markDef;
|
|
var orient = markDef.orient;
|
|
var vgSizeChannel = orient === 'horizontal' ? 'width' : 'height';
|
|
var vgThicknessChannel = orient === 'horizontal' ? 'height' : 'width';
|
|
return Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({}, baseEncodeEntry(model, {
|
|
align: 'ignore',
|
|
baseline: 'ignore',
|
|
color: 'include',
|
|
orient: 'ignore',
|
|
size: 'ignore',
|
|
theta: 'ignore'
|
|
})), pointPosition('x', model, {
|
|
defaultPos: 'mid',
|
|
vgChannel: 'xc'
|
|
})), pointPosition('y', model, {
|
|
defaultPos: 'mid',
|
|
vgChannel: 'yc'
|
|
})), nonPosition('size', model, {
|
|
defaultValue: defaultSize(model),
|
|
vgChannel: vgSizeChannel
|
|
})), _defineProperty({}, vgThicknessChannel, signalOrValueRef(getMarkPropOrConfig('thickness', markDef, config))));
|
|
}
|
|
};
|
|
|
|
function defaultSize(model) {
|
|
var _a;
|
|
|
|
var config = model.config,
|
|
markDef = model.markDef;
|
|
var orient = markDef.orient;
|
|
var vgSizeChannel = orient === 'horizontal' ? 'width' : 'height';
|
|
var scale = model.getScaleComponent(orient === 'horizontal' ? 'x' : 'y');
|
|
var markPropOrConfig = (_a = getMarkPropOrConfig('size', markDef, config, {
|
|
vgChannel: vgSizeChannel
|
|
})) !== null && _a !== void 0 ? _a : config.tick.bandSize;
|
|
|
|
if (markPropOrConfig !== undefined) {
|
|
return markPropOrConfig;
|
|
} else {
|
|
var scaleRange = scale ? scale.get('range') : undefined;
|
|
|
|
if (scaleRange && isVgRangeStep(scaleRange) && isNumber(scaleRange.step)) {
|
|
return scaleRange.step * 3 / 4;
|
|
}
|
|
|
|
var defaultViewStep = getViewConfigDiscreteStep(config.view, vgSizeChannel);
|
|
return defaultViewStep * 3 / 4;
|
|
}
|
|
}
|
|
|
|
var markCompiler = {
|
|
arc: arc,
|
|
area: area,
|
|
bar: bar,
|
|
circle: circle,
|
|
geoshape: geoshape,
|
|
image: image,
|
|
line: line,
|
|
point: point,
|
|
rect: rect,
|
|
rule: rule,
|
|
square: square,
|
|
text: text$1,
|
|
tick: tick,
|
|
trail: trail
|
|
};
|
|
|
|
function parseMarkGroups(model) {
|
|
if (contains([LINE, AREA, TRAIL], model.mark)) {
|
|
var details = pathGroupingFields(model.mark, model.encoding);
|
|
|
|
if (details.length > 0) {
|
|
return getPathGroups(model, details);
|
|
} // otherwise use standard mark groups
|
|
|
|
} else if (contains([BAR], model.mark)) {
|
|
var hasCornerRadius = VG_CORNERRADIUS_CHANNELS.some(function (prop) {
|
|
return getMarkPropOrConfig(prop, model.markDef, model.config);
|
|
});
|
|
|
|
if (model.stack && !model.fieldDef('size') && hasCornerRadius) {
|
|
return getGroupsForStackedBarWithCornerRadius(model);
|
|
}
|
|
}
|
|
|
|
return getMarkGroup(model);
|
|
}
|
|
|
|
var FACETED_PATH_PREFIX = 'faceted_path_';
|
|
|
|
function getPathGroups(model, details) {
|
|
// TODO: for non-stacked plot, map order to zindex. (Maybe rename order for layer to zindex?)
|
|
return [{
|
|
name: model.getName('pathgroup'),
|
|
type: 'group',
|
|
from: {
|
|
facet: {
|
|
name: FACETED_PATH_PREFIX + model.requestDataName(DataSourceType.Main),
|
|
data: model.requestDataName(DataSourceType.Main),
|
|
groupby: details
|
|
}
|
|
},
|
|
encode: {
|
|
update: {
|
|
width: {
|
|
field: {
|
|
group: 'width'
|
|
}
|
|
},
|
|
height: {
|
|
field: {
|
|
group: 'height'
|
|
}
|
|
}
|
|
}
|
|
},
|
|
// With subfacet for line/area group, need to use faceted data from above.
|
|
marks: getMarkGroup(model, {
|
|
fromPrefix: FACETED_PATH_PREFIX
|
|
})
|
|
}];
|
|
}
|
|
|
|
var STACK_GROUP_PREFIX = 'stack_group_';
|
|
/**
|
|
* We need to put stacked bars into groups in order to enable cornerRadius for stacks.
|
|
* If stack is used and the model doesn't have size encoding, we put the mark into groups,
|
|
* and apply cornerRadius properties at the group.
|
|
*/
|
|
|
|
function getGroupsForStackedBarWithCornerRadius(model) {
|
|
// Generate the mark
|
|
var _getMarkGroup = getMarkGroup(model, {
|
|
fromPrefix: STACK_GROUP_PREFIX
|
|
}),
|
|
_getMarkGroup2 = _slicedToArray(_getMarkGroup, 1),
|
|
mark = _getMarkGroup2[0]; // Get the scale for the stacked field
|
|
|
|
|
|
var fieldScale = model.scaleName(model.stack.fieldChannel);
|
|
|
|
var stackField = function stackField() {
|
|
var opt = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
|
|
return model.vgField(model.stack.fieldChannel, opt);
|
|
}; // Find the min/max of the pixel value on the stacked direction
|
|
|
|
|
|
var stackFieldGroup = function stackFieldGroup(func, expr) {
|
|
var vgFieldMinMax = [stackField({
|
|
prefix: 'min',
|
|
suffix: 'start',
|
|
expr: expr
|
|
}), stackField({
|
|
prefix: 'max',
|
|
suffix: 'start',
|
|
expr: expr
|
|
}), stackField({
|
|
prefix: 'min',
|
|
suffix: 'end',
|
|
expr: expr
|
|
}), stackField({
|
|
prefix: 'max',
|
|
suffix: 'end',
|
|
expr: expr
|
|
})];
|
|
return "".concat(func, "(").concat(vgFieldMinMax.map(function (field) {
|
|
return "scale('".concat(fieldScale, "',").concat(field, ")");
|
|
}).join(','), ")");
|
|
};
|
|
|
|
var groupUpdate;
|
|
var innerGroupUpdate; // Build the encoding for group and an inner group
|
|
|
|
if (model.stack.fieldChannel === 'x') {
|
|
// Move cornerRadius, y/yc/y2/height properties to group
|
|
// Group x/x2 should be the min/max of the marks within
|
|
groupUpdate = Object.assign(Object.assign({}, pick(mark.encode.update, ['y', 'yc', 'y2', 'height'].concat(VG_CORNERRADIUS_CHANNELS))), {
|
|
x: {
|
|
signal: stackFieldGroup('min', 'datum')
|
|
},
|
|
x2: {
|
|
signal: stackFieldGroup('max', 'datum')
|
|
},
|
|
clip: {
|
|
value: true
|
|
}
|
|
}); // Inner group should revert the x translation, and pass height through
|
|
|
|
innerGroupUpdate = {
|
|
x: {
|
|
field: {
|
|
group: 'x'
|
|
},
|
|
mult: -1
|
|
},
|
|
height: {
|
|
field: {
|
|
group: 'height'
|
|
}
|
|
}
|
|
}; // The marks should use the same height as group, without y/yc/y2 properties (because it's already done by group)
|
|
// This is why size encoding is not supported yet
|
|
|
|
mark.encode.update = Object.assign(Object.assign({}, omit(mark.encode.update, ['y', 'yc', 'y2'])), {
|
|
height: {
|
|
field: {
|
|
group: 'height'
|
|
}
|
|
}
|
|
});
|
|
} else {
|
|
groupUpdate = Object.assign(Object.assign({}, pick(mark.encode.update, ['x', 'xc', 'x2', 'width'])), {
|
|
y: {
|
|
signal: stackFieldGroup('min', 'datum')
|
|
},
|
|
y2: {
|
|
signal: stackFieldGroup('max', 'datum')
|
|
},
|
|
clip: {
|
|
value: true
|
|
}
|
|
});
|
|
innerGroupUpdate = {
|
|
y: {
|
|
field: {
|
|
group: 'y'
|
|
},
|
|
mult: -1
|
|
},
|
|
width: {
|
|
field: {
|
|
group: 'width'
|
|
}
|
|
}
|
|
};
|
|
mark.encode.update = Object.assign(Object.assign({}, omit(mark.encode.update, ['x', 'xc', 'x2'])), {
|
|
width: {
|
|
field: {
|
|
group: 'width'
|
|
}
|
|
}
|
|
});
|
|
} // Deal with cornerRadius properties
|
|
|
|
|
|
var _iterator199 = _createForOfIteratorHelper(VG_CORNERRADIUS_CHANNELS),
|
|
_step199;
|
|
|
|
try {
|
|
for (_iterator199.s(); !(_step199 = _iterator199.n()).done;) {
|
|
var key = _step199.value;
|
|
var configValue = getMarkConfig(key, model.markDef, model.config); // Move from mark to group
|
|
|
|
if (mark.encode.update[key]) {
|
|
groupUpdate[key] = mark.encode.update[key];
|
|
delete mark.encode.update[key];
|
|
} else if (configValue) {
|
|
groupUpdate[key] = signalOrValueRef(configValue);
|
|
} // Overwrite any cornerRadius on mark set by config --- they are already moved to the group
|
|
|
|
|
|
if (configValue) {
|
|
mark.encode.update[key] = {
|
|
value: 0
|
|
};
|
|
}
|
|
} // For bin and time unit, we have to add bin/timeunit -end channels.
|
|
|
|
} catch (err) {
|
|
_iterator199.e(err);
|
|
} finally {
|
|
_iterator199.f();
|
|
}
|
|
|
|
var groupByField = model.fieldDef(model.stack.groupbyChannel);
|
|
var groupby = _vgField(groupByField) ? [_vgField(groupByField)] : [];
|
|
|
|
if ((groupByField === null || groupByField === void 0 ? void 0 : groupByField.bin) || (groupByField === null || groupByField === void 0 ? void 0 : groupByField.timeUnit)) {
|
|
groupby.push(_vgField(groupByField, {
|
|
binSuffix: 'end'
|
|
}));
|
|
}
|
|
|
|
var strokeProperties = ['stroke', 'strokeWidth', 'strokeJoin', 'strokeCap', 'strokeDash', 'strokeDashOffset', 'strokeMiterLimit', 'strokeOpacity']; // Generate stroke properties for the group
|
|
|
|
groupUpdate = strokeProperties.reduce(function (encode, prop) {
|
|
if (mark.encode.update[prop]) {
|
|
return Object.assign(Object.assign({}, encode), _defineProperty({}, prop, mark.encode.update[prop]));
|
|
} else {
|
|
var configValue = getMarkConfig(prop, model.markDef, model.config);
|
|
|
|
if (configValue !== undefined) {
|
|
return Object.assign(Object.assign({}, encode), _defineProperty({}, prop, signalOrValueRef(configValue)));
|
|
} else {
|
|
return encode;
|
|
}
|
|
}
|
|
}, groupUpdate); // Apply strokeForeground and strokeOffset if stroke is used
|
|
|
|
if (groupUpdate.stroke) {
|
|
groupUpdate.strokeForeground = {
|
|
value: true
|
|
};
|
|
groupUpdate.strokeOffset = {
|
|
value: 0
|
|
};
|
|
}
|
|
|
|
return [{
|
|
type: 'group',
|
|
from: {
|
|
facet: {
|
|
data: model.requestDataName(DataSourceType.Main),
|
|
name: STACK_GROUP_PREFIX + model.requestDataName(DataSourceType.Main),
|
|
groupby: groupby,
|
|
aggregate: {
|
|
fields: [stackField({
|
|
suffix: 'start'
|
|
}), stackField({
|
|
suffix: 'start'
|
|
}), stackField({
|
|
suffix: 'end'
|
|
}), stackField({
|
|
suffix: 'end'
|
|
})],
|
|
ops: ['min', 'max', 'min', 'max']
|
|
}
|
|
}
|
|
},
|
|
encode: {
|
|
update: groupUpdate
|
|
},
|
|
marks: [{
|
|
type: 'group',
|
|
encode: {
|
|
update: innerGroupUpdate
|
|
},
|
|
marks: [mark]
|
|
}]
|
|
}];
|
|
}
|
|
|
|
function getSort$1(model) {
|
|
var encoding = model.encoding,
|
|
stack = model.stack,
|
|
mark = model.mark,
|
|
markDef = model.markDef,
|
|
config = model.config;
|
|
var order = encoding.order;
|
|
|
|
if (!isArray(order) && isValueDef(order) && isNullOrFalse(order.value) || !order && isNullOrFalse(getMarkPropOrConfig('order', markDef, config))) {
|
|
return undefined;
|
|
} else if ((isArray(order) || isFieldDef(order)) && !stack) {
|
|
// Sort by the order field if it is specified and the field is not stacked. (For stacked field, order specify stack order.)
|
|
return sortParams(order, {
|
|
expr: 'datum'
|
|
});
|
|
} else if (isPathMark(mark)) {
|
|
// For both line and area, we sort values based on dimension by default
|
|
var dimensionChannel = markDef.orient === 'horizontal' ? 'y' : 'x';
|
|
var dimensionChannelDef = encoding[dimensionChannel];
|
|
|
|
if (isFieldDef(dimensionChannelDef)) {
|
|
var s = dimensionChannelDef.sort;
|
|
|
|
if (isArray(s)) {
|
|
return {
|
|
field: _vgField(dimensionChannelDef, {
|
|
prefix: dimensionChannel,
|
|
suffix: 'sort_index',
|
|
expr: 'datum'
|
|
})
|
|
};
|
|
} else if (isSortField(s)) {
|
|
return {
|
|
field: _vgField({
|
|
// FIXME: this op might not already exist?
|
|
// FIXME: what if dimensionChannel (x or y) contains custom domain?
|
|
aggregate: isAggregate(model.encoding) ? s.op : undefined,
|
|
field: s.field
|
|
}, {
|
|
expr: 'datum'
|
|
})
|
|
};
|
|
} else if (isSortByEncoding(s)) {
|
|
var fieldDefToSort = model.fieldDef(s.encoding);
|
|
return {
|
|
field: _vgField(fieldDefToSort, {
|
|
expr: 'datum'
|
|
}),
|
|
order: s.order
|
|
};
|
|
} else if (s === null) {
|
|
return undefined;
|
|
} else {
|
|
return {
|
|
field: _vgField(dimensionChannelDef, {
|
|
// For stack with imputation, we only have bin_mid
|
|
binSuffix: model.stack && model.stack.impute ? 'mid' : undefined,
|
|
expr: 'datum'
|
|
})
|
|
};
|
|
}
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
|
|
function getMarkGroup(model) {
|
|
var opt = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {
|
|
fromPrefix: ''
|
|
};
|
|
var mark = model.mark,
|
|
markDef = model.markDef,
|
|
encoding = model.encoding,
|
|
config = model.config;
|
|
var clip = getFirstDefined(markDef.clip, scaleClip(model), projectionClip(model));
|
|
var style = getStyles(markDef);
|
|
var key = encoding.key;
|
|
var sort = getSort$1(model);
|
|
var interactive = interactiveFlag(model);
|
|
var aria = getMarkPropOrConfig('aria', markDef, config);
|
|
var postEncodingTransform = markCompiler[mark].postEncodingTransform ? markCompiler[mark].postEncodingTransform(model) : null;
|
|
return [Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({
|
|
name: model.getName('marks'),
|
|
type: markCompiler[mark].vgMark
|
|
}, clip ? {
|
|
clip: true
|
|
} : {}), style ? {
|
|
style: style
|
|
} : {}), key ? {
|
|
key: key.field
|
|
} : {}), sort ? {
|
|
sort: sort
|
|
} : {}), interactive ? interactive : {}), aria === false ? {
|
|
aria: aria
|
|
} : {}), {
|
|
from: {
|
|
data: opt.fromPrefix + model.requestDataName(DataSourceType.Main)
|
|
},
|
|
encode: {
|
|
update: markCompiler[mark].encodeEntry(model)
|
|
}
|
|
}), postEncodingTransform ? {
|
|
transform: postEncodingTransform
|
|
} : {})];
|
|
}
|
|
/**
|
|
* If scales are bound to interval selections, we want to automatically clip
|
|
* marks to account for panning/zooming interactions. We identify bound scales
|
|
* by the selectionExtent property, which gets added during scale parsing.
|
|
*/
|
|
|
|
|
|
function scaleClip(model) {
|
|
var xScale = model.getScaleComponent('x');
|
|
var yScale = model.getScaleComponent('y');
|
|
return xScale && xScale.get('selectionExtent') || yScale && yScale.get('selectionExtent') ? true : undefined;
|
|
}
|
|
/**
|
|
* If we use a custom projection with auto-fitting to the geodata extent,
|
|
* we need to clip to ensure the chart size doesn't explode.
|
|
*/
|
|
|
|
|
|
function projectionClip(model) {
|
|
var projection = model.component.projection;
|
|
return projection && !projection.isFit ? true : undefined;
|
|
}
|
|
/**
|
|
* Only output interactive flags if we have selections defined somewhere in our model hierarchy.
|
|
*/
|
|
|
|
|
|
function interactiveFlag(model) {
|
|
if (!model.component.selection) return null;
|
|
var unitCount = keys(model.component.selection).length;
|
|
var parentCount = unitCount;
|
|
var parent = model.parent;
|
|
|
|
while (parent && parentCount === 0) {
|
|
parentCount = keys(parent.component.selection).length;
|
|
parent = parent.parent;
|
|
}
|
|
|
|
return parentCount ? {
|
|
interactive: unitCount > 0
|
|
} : null;
|
|
}
|
|
/**
|
|
* Internal model of Vega-Lite specification for the compiler.
|
|
*/
|
|
|
|
|
|
var UnitModel = /*#__PURE__*/function (_ModelWithField2) {
|
|
_inherits(UnitModel, _ModelWithField2);
|
|
|
|
var _super51 = _createSuper(UnitModel);
|
|
|
|
function UnitModel(spec, parent, parentGivenName) {
|
|
var _this48;
|
|
|
|
var parentGivenSize = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {};
|
|
var config = arguments.length > 4 ? arguments[4] : undefined;
|
|
|
|
_classCallCheck(this, UnitModel);
|
|
|
|
_this48 = _super51.call(this, spec, 'unit', parent, parentGivenName, config, undefined, isFrameMixins(spec) ? spec.view : undefined);
|
|
_this48.specifiedScales = {};
|
|
_this48.specifiedAxes = {};
|
|
_this48.specifiedLegends = {};
|
|
_this48.specifiedProjection = {};
|
|
_this48.selection = {};
|
|
_this48.children = [];
|
|
var markDef = isMarkDef(spec.mark) ? Object.assign({}, spec.mark) : {
|
|
type: spec.mark
|
|
};
|
|
var mark = markDef.type; // Need to init filled before other mark properties because encoding depends on filled but other mark properties depend on types inside encoding
|
|
|
|
if (markDef.filled === undefined) {
|
|
markDef.filled = defaultFilled(markDef, config, {
|
|
graticule: spec.data && isGraticuleGenerator(spec.data)
|
|
});
|
|
}
|
|
|
|
var encoding = _this48.encoding = initEncoding(spec.encoding || {}, mark, markDef.filled, config);
|
|
_this48.markDef = initMarkdef(markDef, encoding, config);
|
|
_this48.size = initLayoutSize({
|
|
encoding: encoding,
|
|
size: isFrameMixins(spec) ? Object.assign(Object.assign(Object.assign({}, parentGivenSize), spec.width ? {
|
|
width: spec.width
|
|
} : {}), spec.height ? {
|
|
height: spec.height
|
|
} : {}) : parentGivenSize
|
|
}); // calculate stack properties
|
|
|
|
_this48.stack = stack(mark, encoding);
|
|
_this48.specifiedScales = _this48.initScales(mark, encoding);
|
|
_this48.specifiedAxes = _this48.initAxes(encoding);
|
|
_this48.specifiedLegends = _this48.initLegend(encoding);
|
|
_this48.specifiedProjection = spec.projection; // Selections will be initialized upon parse.
|
|
|
|
_this48.selection = spec.selection;
|
|
return _this48;
|
|
}
|
|
|
|
_createClass(UnitModel, [{
|
|
key: "scaleDomain",
|
|
|
|
/**
|
|
* Return specified Vega-Lite scale domain for a particular channel
|
|
* @param channel
|
|
*/
|
|
value: function scaleDomain(channel) {
|
|
var scale = this.specifiedScales[channel];
|
|
return scale ? scale.domain : undefined;
|
|
}
|
|
}, {
|
|
key: "axis",
|
|
value: function axis(channel) {
|
|
return this.specifiedAxes[channel];
|
|
}
|
|
}, {
|
|
key: "legend",
|
|
value: function legend(channel) {
|
|
return this.specifiedLegends[channel];
|
|
}
|
|
}, {
|
|
key: "initScales",
|
|
value: function initScales(mark, encoding) {
|
|
return SCALE_CHANNELS.reduce(function (scales, channel) {
|
|
var _a;
|
|
|
|
var fieldOrDatumDef = getFieldOrDatumDef(encoding[channel]);
|
|
|
|
if (fieldOrDatumDef) {
|
|
scales[channel] = (_a = fieldOrDatumDef.scale) !== null && _a !== void 0 ? _a : {};
|
|
}
|
|
|
|
return scales;
|
|
}, {});
|
|
}
|
|
}, {
|
|
key: "initAxes",
|
|
value: function initAxes(encoding) {
|
|
return POSITION_SCALE_CHANNELS.reduce(function (_axis, channel) {
|
|
// Position Axis
|
|
// TODO: handle ConditionFieldDef
|
|
var channelDef = encoding[channel];
|
|
|
|
if (isFieldOrDatumDef(channelDef) || channel === X && isFieldOrDatumDef(encoding.x2) || channel === Y && isFieldOrDatumDef(encoding.y2)) {
|
|
var axisSpec = isFieldOrDatumDef(channelDef) ? channelDef.axis : undefined;
|
|
_axis[channel] = axisSpec ? Object.assign({}, axisSpec) : axisSpec; // convert truthy value to object
|
|
}
|
|
|
|
return _axis;
|
|
}, {});
|
|
}
|
|
}, {
|
|
key: "initLegend",
|
|
value: function initLegend(encoding) {
|
|
return NONPOSITION_SCALE_CHANNELS.reduce(function (_legend, channel) {
|
|
var fieldOrDatumDef = getFieldOrDatumDef(encoding[channel]);
|
|
|
|
if (fieldOrDatumDef && supportLegend(channel)) {
|
|
var legend = fieldOrDatumDef.legend;
|
|
_legend[channel] = legend ? Object.assign({}, legend) : legend; // convert truthy value to object
|
|
}
|
|
|
|
return _legend;
|
|
}, {});
|
|
}
|
|
}, {
|
|
key: "parseData",
|
|
value: function parseData() {
|
|
this.component.data = _parseData(this);
|
|
}
|
|
}, {
|
|
key: "parseLayoutSize",
|
|
value: function parseLayoutSize() {
|
|
parseUnitLayoutSize(this);
|
|
}
|
|
}, {
|
|
key: "parseSelections",
|
|
value: function parseSelections() {
|
|
this.component.selection = parseUnitSelection(this, this.selection);
|
|
}
|
|
}, {
|
|
key: "parseMarkGroup",
|
|
value: function parseMarkGroup() {
|
|
this.component.mark = parseMarkGroups(this);
|
|
}
|
|
}, {
|
|
key: "parseAxesAndHeaders",
|
|
value: function parseAxesAndHeaders() {
|
|
this.component.axes = parseUnitAxes(this);
|
|
}
|
|
}, {
|
|
key: "assembleSelectionTopLevelSignals",
|
|
value: function assembleSelectionTopLevelSignals(signals) {
|
|
return assembleTopLevelSignals(this, signals);
|
|
}
|
|
}, {
|
|
key: "assembleSignals",
|
|
value: function assembleSignals() {
|
|
return [].concat(_toConsumableArray(assembleAxisSignals(this)), _toConsumableArray(assembleUnitSelectionSignals(this, [])));
|
|
}
|
|
}, {
|
|
key: "assembleSelectionData",
|
|
value: function assembleSelectionData(data) {
|
|
return assembleUnitSelectionData(this, data);
|
|
}
|
|
}, {
|
|
key: "assembleLayout",
|
|
value: function assembleLayout() {
|
|
return null;
|
|
}
|
|
}, {
|
|
key: "assembleLayoutSignals",
|
|
value: function assembleLayoutSignals() {
|
|
return _assembleLayoutSignals(this);
|
|
}
|
|
}, {
|
|
key: "assembleMarks",
|
|
value: function assembleMarks() {
|
|
var _a;
|
|
|
|
var marks = (_a = this.component.mark) !== null && _a !== void 0 ? _a : []; // If this unit is part of a layer, selections should augment
|
|
// all in concert rather than each unit individually. This
|
|
// ensures correct interleaving of clipping and brushed marks.
|
|
|
|
if (!this.parent || !isLayerModel(this.parent)) {
|
|
marks = assembleUnitSelectionMarks(this, marks);
|
|
}
|
|
|
|
return marks.map(this.correctDataNames);
|
|
}
|
|
}, {
|
|
key: "getMapping",
|
|
value: function getMapping() {
|
|
return this.encoding;
|
|
}
|
|
}, {
|
|
key: "channelHasField",
|
|
value: function channelHasField(channel) {
|
|
return _channelHasField(this.encoding, channel);
|
|
}
|
|
}, {
|
|
key: "fieldDef",
|
|
value: function fieldDef(channel) {
|
|
var channelDef = this.encoding[channel];
|
|
return getFieldDef(channelDef);
|
|
}
|
|
}, {
|
|
key: "typedFieldDef",
|
|
value: function typedFieldDef(channel) {
|
|
var fieldDef = this.fieldDef(channel);
|
|
|
|
if (isTypedFieldDef(fieldDef)) {
|
|
return fieldDef;
|
|
}
|
|
|
|
return null;
|
|
}
|
|
}, {
|
|
key: "hasProjection",
|
|
get: function get() {
|
|
var encoding = this.encoding;
|
|
var isGeoShapeMark = this.mark === GEOSHAPE;
|
|
var hasGeoPosition = encoding && GEOPOSITION_CHANNELS.some(function (channel) {
|
|
return isFieldOrDatumDef(encoding[channel]);
|
|
});
|
|
return isGeoShapeMark || hasGeoPosition;
|
|
}
|
|
}, {
|
|
key: "mark",
|
|
get: function get() {
|
|
return this.markDef.type;
|
|
}
|
|
}]);
|
|
|
|
return UnitModel;
|
|
}(ModelWithField);
|
|
|
|
var LayerModel = /*#__PURE__*/function (_Model3) {
|
|
_inherits(LayerModel, _Model3);
|
|
|
|
var _super52 = _createSuper(LayerModel);
|
|
|
|
function LayerModel(spec, parent, parentGivenName, parentGivenSize, config) {
|
|
var _this49;
|
|
|
|
_classCallCheck(this, LayerModel);
|
|
|
|
_this49 = _super52.call(this, spec, 'layer', parent, parentGivenName, config, spec.resolve, spec.view);
|
|
var layoutSize = Object.assign(Object.assign(Object.assign({}, parentGivenSize), spec.width ? {
|
|
width: spec.width
|
|
} : {}), spec.height ? {
|
|
height: spec.height
|
|
} : {});
|
|
_this49.children = spec.layer.map(function (layer, i) {
|
|
if (isLayerSpec(layer)) {
|
|
return new LayerModel(layer, _assertThisInitialized(_this49), _this49.getName('layer_' + i), layoutSize, config);
|
|
} else if (isUnitSpec(layer)) {
|
|
return new UnitModel(layer, _assertThisInitialized(_this49), _this49.getName('layer_' + i), layoutSize, config);
|
|
}
|
|
|
|
throw new Error(invalidSpec(layer));
|
|
});
|
|
return _this49;
|
|
}
|
|
|
|
_createClass(LayerModel, [{
|
|
key: "parseData",
|
|
value: function parseData() {
|
|
this.component.data = _parseData(this);
|
|
|
|
var _iterator200 = _createForOfIteratorHelper(this.children),
|
|
_step200;
|
|
|
|
try {
|
|
for (_iterator200.s(); !(_step200 = _iterator200.n()).done;) {
|
|
var child = _step200.value;
|
|
child.parseData();
|
|
}
|
|
} catch (err) {
|
|
_iterator200.e(err);
|
|
} finally {
|
|
_iterator200.f();
|
|
}
|
|
}
|
|
}, {
|
|
key: "parseLayoutSize",
|
|
value: function parseLayoutSize() {
|
|
parseLayerLayoutSize(this);
|
|
}
|
|
}, {
|
|
key: "parseSelections",
|
|
value: function parseSelections() {
|
|
// Merge selections up the hierarchy so that they may be referenced
|
|
// across unit specs. Persist their definitions within each child
|
|
// to assemble signals which remain within output Vega unit groups.
|
|
this.component.selection = {};
|
|
|
|
var _iterator201 = _createForOfIteratorHelper(this.children),
|
|
_step201;
|
|
|
|
try {
|
|
for (_iterator201.s(); !(_step201 = _iterator201.n()).done;) {
|
|
var child = _step201.value;
|
|
child.parseSelections();
|
|
|
|
var _iterator202 = _createForOfIteratorHelper(keys(child.component.selection)),
|
|
_step202;
|
|
|
|
try {
|
|
for (_iterator202.s(); !(_step202 = _iterator202.n()).done;) {
|
|
var key = _step202.value;
|
|
this.component.selection[key] = child.component.selection[key];
|
|
}
|
|
} catch (err) {
|
|
_iterator202.e(err);
|
|
} finally {
|
|
_iterator202.f();
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator201.e(err);
|
|
} finally {
|
|
_iterator201.f();
|
|
}
|
|
}
|
|
}, {
|
|
key: "parseMarkGroup",
|
|
value: function parseMarkGroup() {
|
|
var _iterator203 = _createForOfIteratorHelper(this.children),
|
|
_step203;
|
|
|
|
try {
|
|
for (_iterator203.s(); !(_step203 = _iterator203.n()).done;) {
|
|
var child = _step203.value;
|
|
child.parseMarkGroup();
|
|
}
|
|
} catch (err) {
|
|
_iterator203.e(err);
|
|
} finally {
|
|
_iterator203.f();
|
|
}
|
|
}
|
|
}, {
|
|
key: "parseAxesAndHeaders",
|
|
value: function parseAxesAndHeaders() {
|
|
parseLayerAxes(this);
|
|
}
|
|
}, {
|
|
key: "assembleSelectionTopLevelSignals",
|
|
value: function assembleSelectionTopLevelSignals(signals) {
|
|
return this.children.reduce(function (sg, child) {
|
|
return child.assembleSelectionTopLevelSignals(sg);
|
|
}, signals);
|
|
} // TODO: Support same named selections across children.
|
|
|
|
}, {
|
|
key: "assembleSignals",
|
|
value: function assembleSignals() {
|
|
return this.children.reduce(function (signals, child) {
|
|
return signals.concat(child.assembleSignals());
|
|
}, assembleAxisSignals(this));
|
|
}
|
|
}, {
|
|
key: "assembleLayoutSignals",
|
|
value: function assembleLayoutSignals() {
|
|
return this.children.reduce(function (signals, child) {
|
|
return signals.concat(child.assembleLayoutSignals());
|
|
}, _assembleLayoutSignals(this));
|
|
}
|
|
}, {
|
|
key: "assembleSelectionData",
|
|
value: function assembleSelectionData(data) {
|
|
return this.children.reduce(function (db, child) {
|
|
return child.assembleSelectionData(db);
|
|
}, data);
|
|
}
|
|
}, {
|
|
key: "assembleTitle",
|
|
value: function assembleTitle() {
|
|
var title = _get(_getPrototypeOf(LayerModel.prototype), "assembleTitle", this).call(this);
|
|
|
|
if (title) {
|
|
return title;
|
|
} // If title does not provide layer, look into children
|
|
|
|
|
|
var _iterator204 = _createForOfIteratorHelper(this.children),
|
|
_step204;
|
|
|
|
try {
|
|
for (_iterator204.s(); !(_step204 = _iterator204.n()).done;) {
|
|
var child = _step204.value;
|
|
title = child.assembleTitle();
|
|
|
|
if (title) {
|
|
return title;
|
|
}
|
|
}
|
|
} catch (err) {
|
|
_iterator204.e(err);
|
|
} finally {
|
|
_iterator204.f();
|
|
}
|
|
|
|
return undefined;
|
|
}
|
|
}, {
|
|
key: "assembleLayout",
|
|
value: function assembleLayout() {
|
|
return null;
|
|
}
|
|
}, {
|
|
key: "assembleMarks",
|
|
value: function assembleMarks() {
|
|
return assembleLayerSelectionMarks(this, this.children.flatMap(function (child) {
|
|
return child.assembleMarks();
|
|
}));
|
|
}
|
|
}, {
|
|
key: "assembleLegends",
|
|
value: function assembleLegends() {
|
|
return this.children.reduce(function (legends, child) {
|
|
return legends.concat(child.assembleLegends());
|
|
}, _assembleLegends(this));
|
|
}
|
|
}]);
|
|
|
|
return LayerModel;
|
|
}(Model);
|
|
|
|
function buildModel(spec, parent, parentGivenName, unitSize, config) {
|
|
if (isFacetSpec(spec)) {
|
|
return new FacetModel(spec, parent, parentGivenName, config);
|
|
} else if (isLayerSpec(spec)) {
|
|
return new LayerModel(spec, parent, parentGivenName, unitSize, config);
|
|
} else if (isUnitSpec(spec)) {
|
|
return new UnitModel(spec, parent, parentGivenName, unitSize, config);
|
|
} else if (isAnyConcatSpec(spec)) {
|
|
return new ConcatModel(spec, parent, parentGivenName, config);
|
|
}
|
|
|
|
throw new Error(invalidSpec(spec));
|
|
}
|
|
/**
|
|
* Vega-Lite's main function, for compiling Vega-Lite spec into Vega spec.
|
|
*
|
|
* At a high-level, we make the following transformations in different phases:
|
|
*
|
|
* Input spec
|
|
* |
|
|
* | (Normalization)
|
|
* v
|
|
* Normalized Spec (Row/Column channels in single-view specs becomes faceted specs, composite marks becomes layered specs.)
|
|
* |
|
|
* | (Build Model)
|
|
* v
|
|
* A model tree of the spec
|
|
* |
|
|
* | (Parse)
|
|
* v
|
|
* A model tree with parsed components (intermediate structure of visualization primitives in a format that can be easily merged)
|
|
* |
|
|
* | (Optimize)
|
|
* v
|
|
* A model tree with parsed components with the data component optimized
|
|
* |
|
|
* | (Assemble)
|
|
* v
|
|
* Vega spec
|
|
*
|
|
* @param inputSpec The Vega-Lite specification.
|
|
* @param opt Optional arguments passed to the Vega-Lite compiler.
|
|
* @returns An object containing the compiled Vega spec and normalized Vega-Lite spec.
|
|
*/
|
|
|
|
|
|
function compile(inputSpec) {
|
|
var opt = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|
|
|
// 0. Augment opt with default opts
|
|
if (opt.logger) {
|
|
// set the singleton logger to the provided logger
|
|
set(opt.logger);
|
|
}
|
|
|
|
if (opt.fieldTitle) {
|
|
// set the singleton field title formatter
|
|
setTitleFormatter(opt.fieldTitle);
|
|
}
|
|
|
|
try {
|
|
// 1. Initialize config by deep merging default config with the config provided via option and the input spec.
|
|
var config = initConfig(mergeConfig(opt.config, inputSpec.config)); // 2. Normalize: Convert input spec -> normalized spec
|
|
// - Decompose all extended unit specs into composition of unit spec. For example, a box plot get expanded into multiple layers of bars, ticks, and rules. The shorthand row/column channel is also expanded to a facet spec.
|
|
// - Normalize autosize and width or height spec
|
|
|
|
var spec = normalize(inputSpec, config); // 3. Build Model: normalized spec -> Model (a tree structure)
|
|
// This phases instantiates the models with default config by doing a top-down traversal. This allows us to pass properties that child models derive from their parents via their constructors.
|
|
// See the abstract `Model` class and its children (UnitModel, LayerModel, FacetModel, ConcatModel) for different types of models.
|
|
|
|
var model = buildModel(spec, null, '', undefined, config); // 4 Parse: Model --> Model with components
|
|
// Note that components = intermediate representations that are equivalent to Vega specs.
|
|
// We need these intermediate representation because we need to merge many visualization "components" like projections, scales, axes, and legends.
|
|
// We will later convert these components into actual Vega specs in the assemble phase.
|
|
// In this phase, we do a bottom-up traversal over the whole tree to
|
|
// parse for each type of components once (e.g., data, layout, mark, scale).
|
|
// By doing bottom-up traversal, we start parsing components of unit specs and
|
|
// then merge child components of parent composite specs.
|
|
//
|
|
// Please see inside model.parse() for order of different components parsed.
|
|
|
|
model.parse(); // drawDataflow(model.component.data.sources);
|
|
// 5. Optimize the dataflow. This will modify the data component of the model.
|
|
|
|
optimizeDataflow(model.component.data, model); // drawDataflow(model.component.data.sources);
|
|
// 6. Assemble: convert model components --> Vega Spec.
|
|
|
|
var vgSpec = assembleTopLevelModel(model, getTopLevelProperties(inputSpec, spec.autosize, config, model), inputSpec.datasets, inputSpec.usermeta);
|
|
return {
|
|
spec: vgSpec,
|
|
normalized: spec
|
|
};
|
|
} finally {
|
|
// Reset the singleton logger if a logger is provided
|
|
if (opt.logger) {
|
|
reset();
|
|
} // Reset the singleton field title formatter if provided
|
|
|
|
|
|
if (opt.fieldTitle) {
|
|
resetTitleFormatter();
|
|
}
|
|
}
|
|
}
|
|
|
|
function getTopLevelProperties(inputSpec, autosize, config, model) {
|
|
var width = model.component.layoutSize.get('width');
|
|
var height = model.component.layoutSize.get('height');
|
|
|
|
if (autosize === undefined) {
|
|
autosize = {
|
|
type: 'pad'
|
|
};
|
|
|
|
if (model.hasAxisOrientSignalRef()) {
|
|
autosize.resize = true;
|
|
}
|
|
} else if (isString(autosize)) {
|
|
autosize = {
|
|
type: autosize
|
|
};
|
|
}
|
|
|
|
if (width && height && isFitType(autosize.type)) {
|
|
if (width === 'step' && height === 'step') {
|
|
warn(droppingFit());
|
|
autosize.type = 'pad';
|
|
} else if (width === 'step' || height === 'step') {
|
|
// effectively XOR, because else if
|
|
// get step dimension
|
|
var sizeType = width === 'step' ? 'width' : 'height'; // log that we're dropping fit for respective channel
|
|
|
|
warn(droppingFit(getPositionScaleChannel(sizeType))); // setting type to inverse fit (so if we dropped fit-x, type is now fit-y)
|
|
|
|
var inverseSizeType = sizeType === 'width' ? 'height' : 'width';
|
|
autosize.type = getFitType(inverseSizeType);
|
|
}
|
|
}
|
|
|
|
return Object.assign(Object.assign(Object.assign({}, keys(autosize).length === 1 && autosize.type ? autosize.type === 'pad' ? {} : {
|
|
autosize: autosize.type
|
|
} : {
|
|
autosize: autosize
|
|
}), extractTopLevelProperties(config)), extractTopLevelProperties(inputSpec));
|
|
}
|
|
/*
|
|
* Assemble the top-level model to a Vega spec.
|
|
*
|
|
* Note: this couldn't be `model.assemble()` since the top-level model
|
|
* needs some special treatment to generate top-level properties.
|
|
*/
|
|
|
|
|
|
function assembleTopLevelModel(model, topLevelProperties) {
|
|
var datasets = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
|
|
var usermeta = arguments.length > 3 ? arguments[3] : undefined;
|
|
// Config with Vega-Lite only config removed.
|
|
var vgConfig = model.config ? stripAndRedirectConfig(model.config) : undefined;
|
|
var data = [].concat(model.assembleSelectionData([]), // only assemble data in the root
|
|
assembleRootData(model.component.data, datasets));
|
|
var projections = model.assembleProjections();
|
|
var title = model.assembleTitle();
|
|
var style = model.assembleGroupStyle();
|
|
var encodeEntry = model.assembleGroupEncodeEntry(true);
|
|
var layoutSignals = model.assembleLayoutSignals(); // move width and height signals with values to top level
|
|
|
|
layoutSignals = layoutSignals.filter(function (signal) {
|
|
if ((signal.name === 'width' || signal.name === 'height') && signal.value !== undefined) {
|
|
topLevelProperties[signal.name] = +signal.value;
|
|
return false;
|
|
}
|
|
|
|
return true;
|
|
});
|
|
return Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({
|
|
$schema: 'https://vega.github.io/schema/vega/v5.json'
|
|
}, model.description ? {
|
|
description: model.description
|
|
} : {}), topLevelProperties), title ? {
|
|
title: title
|
|
} : {}), style ? {
|
|
style: style
|
|
} : {}), encodeEntry ? {
|
|
encode: {
|
|
update: encodeEntry
|
|
}
|
|
} : {}), {
|
|
data: data
|
|
}), projections.length > 0 ? {
|
|
projections: projections
|
|
} : {}), model.assembleGroup([].concat(_toConsumableArray(layoutSignals), _toConsumableArray(model.assembleSelectionTopLevelSignals([]))))), vgConfig ? {
|
|
config: vgConfig
|
|
} : {}), usermeta ? {
|
|
usermeta: usermeta
|
|
} : {});
|
|
}
|
|
|
|
exports.compile = compile;
|
|
exports.normalize = normalize;
|
|
exports.version = version;
|
|
Object.defineProperty(exports, '__esModule', {
|
|
value: true
|
|
});
|
|
}); |