You've already forked wakapi-readme-stats
Bar graph added.
This commit is contained in:
443
node_modules/vega-loader/build/vega-loader.js
generated
vendored
Normal file
443
node_modules/vega-loader/build/vega-loader.js
generated
vendored
Normal file
@@ -0,0 +1,443 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('vega-util'), require('d3-dsv'), require('topojson-client'), require('vega-format')) :
|
||||
typeof define === 'function' && define.amd ? define(['exports', 'vega-util', 'd3-dsv', 'topojson-client', 'vega-format'], factory) :
|
||||
(global = global || self, factory(global.vega = {}, global.vega, global.d3, global.topojson, global.vega));
|
||||
}(this, (function (exports, vegaUtil, d3Dsv, topojsonClient, vegaFormat) { 'use strict';
|
||||
|
||||
// Matches absolute URLs with optional protocol
|
||||
// https://... file://... //...
|
||||
const protocol_re = /^([A-Za-z]+:)?\/\//;
|
||||
|
||||
// Matches allowed URIs. From https://github.com/cure53/DOMPurify/blob/master/src/regexp.js with added file://
|
||||
const allowed_re = /^(?:(?:(?:f|ht)tps?|mailto|tel|callto|cid|xmpp|file|data):|[^a-z]|[a-z+.\-]+(?:[^a-z+.\-:]|$))/i; // eslint-disable-line no-useless-escape
|
||||
const whitespace_re = /[\u0000-\u0020\u00A0\u1680\u180E\u2000-\u2029\u205f\u3000]/g; // eslint-disable-line no-control-regex
|
||||
|
||||
|
||||
// Special treatment in node.js for the file: protocol
|
||||
const fileProtocol = 'file://';
|
||||
|
||||
/**
|
||||
* Factory for a loader constructor that provides methods for requesting
|
||||
* files from either the network or disk, and for sanitizing request URIs.
|
||||
* @param {function} fetch - The Fetch API for HTTP network requests.
|
||||
* If null or undefined, HTTP loading will be disabled.
|
||||
* @param {object} fs - The file system interface for file loading.
|
||||
* If null or undefined, local file loading will be disabled.
|
||||
* @return {function} A loader constructor with the following signature:
|
||||
* param {object} [options] - Optional default loading options to use.
|
||||
* return {object} - A new loader instance.
|
||||
*/
|
||||
function loaderFactory(fetch, fs) {
|
||||
return function(options) {
|
||||
return {
|
||||
options: options || {},
|
||||
sanitize: sanitize,
|
||||
load: load,
|
||||
fileAccess: !!fs,
|
||||
file: fileLoader(fs),
|
||||
http: httpLoader(fetch)
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Load an external resource, typically either from the web or from the local
|
||||
* filesystem. This function uses {@link sanitize} to first sanitize the uri,
|
||||
* then calls either {@link http} (for web requests) or {@link file} (for
|
||||
* filesystem loading).
|
||||
* @param {string} uri - The resource indicator (e.g., URL or filename).
|
||||
* @param {object} [options] - Optional loading options. These options will
|
||||
* override any existing default options.
|
||||
* @return {Promise} - A promise that resolves to the loaded content.
|
||||
*/
|
||||
async function load(uri, options) {
|
||||
const opt = await this.sanitize(uri, options),
|
||||
url = opt.href;
|
||||
|
||||
return opt.localFile
|
||||
? this.file(url)
|
||||
: this.http(url, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* URI sanitizer function.
|
||||
* @param {string} uri - The uri (url or filename) to sanity check.
|
||||
* @param {object} options - An options hash.
|
||||
* @return {Promise} - A promise that resolves to an object containing
|
||||
* sanitized uri data, or rejects it the input uri is deemed invalid.
|
||||
* The properties of the resolved object are assumed to be
|
||||
* valid attributes for an HTML 'a' tag. The sanitized uri *must* be
|
||||
* provided by the 'href' property of the returned object.
|
||||
*/
|
||||
async function sanitize(uri, options) {
|
||||
options = vegaUtil.extend({}, this.options, options);
|
||||
|
||||
const fileAccess = this.fileAccess,
|
||||
result = {href: null};
|
||||
|
||||
let isFile, loadFile, base;
|
||||
|
||||
const isAllowed = allowed_re.test(uri.replace(whitespace_re, ''));
|
||||
|
||||
if (uri == null || typeof uri !== 'string' || !isAllowed) {
|
||||
vegaUtil.error('Sanitize failure, invalid URI: ' + vegaUtil.stringValue(uri));
|
||||
}
|
||||
|
||||
const hasProtocol = protocol_re.test(uri);
|
||||
|
||||
// if relative url (no protocol/host), prepend baseURL
|
||||
if ((base = options.baseURL) && !hasProtocol) {
|
||||
// Ensure that there is a slash between the baseURL (e.g. hostname) and url
|
||||
if (!uri.startsWith('/') && base[base.length-1] !== '/') {
|
||||
uri = '/' + uri;
|
||||
}
|
||||
uri = base + uri;
|
||||
}
|
||||
|
||||
// should we load from file system?
|
||||
loadFile = (isFile = uri.startsWith(fileProtocol))
|
||||
|| options.mode === 'file'
|
||||
|| options.mode !== 'http' && !hasProtocol && fileAccess;
|
||||
|
||||
if (isFile) {
|
||||
// strip file protocol
|
||||
uri = uri.slice(fileProtocol.length);
|
||||
} else if (uri.startsWith('//')) {
|
||||
if (options.defaultProtocol === 'file') {
|
||||
// if is file, strip protocol and set loadFile flag
|
||||
uri = uri.slice(2);
|
||||
loadFile = true;
|
||||
} else {
|
||||
// if relative protocol (starts with '//'), prepend default protocol
|
||||
uri = (options.defaultProtocol || 'http') + ':' + uri;
|
||||
}
|
||||
}
|
||||
|
||||
// set non-enumerable mode flag to indicate local file load
|
||||
Object.defineProperty(result, 'localFile', {value: !!loadFile});
|
||||
|
||||
// set uri
|
||||
result.href = uri;
|
||||
|
||||
// set default result target, if specified
|
||||
if (options.target) {
|
||||
result.target = options.target + '';
|
||||
}
|
||||
|
||||
// set default result rel, if specified (#1542)
|
||||
if (options.rel) {
|
||||
result.rel = options.rel + '';
|
||||
}
|
||||
|
||||
// provide control over cross-origin image handling (#2238)
|
||||
// https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image
|
||||
if (options.context === 'image' && options.crossOrigin) {
|
||||
result.crossOrigin = options.crossOrigin + '';
|
||||
}
|
||||
|
||||
// return
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* File system loader factory.
|
||||
* @param {object} fs - The file system interface.
|
||||
* @return {function} - A file loader with the following signature:
|
||||
* param {string} filename - The file system path to load.
|
||||
* param {string} filename - The file system path to load.
|
||||
* return {Promise} A promise that resolves to the file contents.
|
||||
*/
|
||||
function fileLoader(fs) {
|
||||
return fs
|
||||
? function(filename) {
|
||||
return new Promise(function(accept, reject) {
|
||||
fs.readFile(filename, function(error, data) {
|
||||
if (error) reject(error);
|
||||
else accept(data);
|
||||
});
|
||||
});
|
||||
}
|
||||
: fileReject;
|
||||
}
|
||||
|
||||
/**
|
||||
* Default file system loader that simply rejects.
|
||||
*/
|
||||
async function fileReject() {
|
||||
vegaUtil.error('No file system access.');
|
||||
}
|
||||
|
||||
/**
|
||||
* HTTP request handler factory.
|
||||
* @param {function} fetch - The Fetch API method.
|
||||
* @return {function} - An http loader with the following signature:
|
||||
* param {string} url - The url to request.
|
||||
* param {object} options - An options hash.
|
||||
* return {Promise} - A promise that resolves to the file contents.
|
||||
*/
|
||||
function httpLoader(fetch) {
|
||||
return fetch
|
||||
? async function(url, options) {
|
||||
const opt = vegaUtil.extend({}, this.options.http, options),
|
||||
type = options && options.response,
|
||||
response = await fetch(url, opt);
|
||||
|
||||
return !response.ok
|
||||
? vegaUtil.error(response.status + '' + response.statusText)
|
||||
: vegaUtil.isFunction(response[type]) ? response[type]()
|
||||
: response.text();
|
||||
}
|
||||
: httpReject;
|
||||
}
|
||||
|
||||
/**
|
||||
* Default http request handler that simply rejects.
|
||||
*/
|
||||
async function httpReject() {
|
||||
vegaUtil.error('No HTTP fetch method available.');
|
||||
}
|
||||
|
||||
var typeParsers = {
|
||||
boolean: vegaUtil.toBoolean,
|
||||
integer: vegaUtil.toNumber,
|
||||
number: vegaUtil.toNumber,
|
||||
date: vegaUtil.toDate,
|
||||
string: vegaUtil.toString,
|
||||
unknown: vegaUtil.identity
|
||||
};
|
||||
|
||||
var typeTests = [
|
||||
isBoolean,
|
||||
isInteger,
|
||||
isNumber,
|
||||
isDate
|
||||
];
|
||||
|
||||
var typeList = [
|
||||
'boolean',
|
||||
'integer',
|
||||
'number',
|
||||
'date'
|
||||
];
|
||||
|
||||
function inferType(values, field) {
|
||||
if (!values || !values.length) return 'unknown';
|
||||
|
||||
const n = values.length,
|
||||
m = typeTests.length,
|
||||
a = typeTests.map((_, i) => i + 1);
|
||||
|
||||
for (let i = 0, t = 0, j, value; i < n; ++i) {
|
||||
value = field ? values[i][field] : values[i];
|
||||
for (j = 0; j < m; ++j) {
|
||||
if (a[j] && isValid(value) && !typeTests[j](value)) {
|
||||
a[j] = 0;
|
||||
++t;
|
||||
if (t === typeTests.length) return 'string';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return typeList[
|
||||
a.reduce((u, v) => u === 0 ? v : u, 0) - 1
|
||||
];
|
||||
}
|
||||
|
||||
function inferTypes(data, fields) {
|
||||
return fields.reduce(function(types, field) {
|
||||
types[field] = inferType(data, field);
|
||||
return types;
|
||||
}, {});
|
||||
}
|
||||
|
||||
// -- Type Checks ----
|
||||
|
||||
function isValid(_) {
|
||||
return _ != null && _ === _;
|
||||
}
|
||||
|
||||
function isBoolean(_) {
|
||||
return _ === 'true' || _ === 'false' || _ === true || _ === false;
|
||||
}
|
||||
|
||||
function isDate(_) {
|
||||
return !Number.isNaN(Date.parse(_));
|
||||
}
|
||||
|
||||
function isNumber(_) {
|
||||
return !Number.isNaN(+_) && !(_ instanceof Date);
|
||||
}
|
||||
|
||||
function isInteger(_) {
|
||||
return isNumber(_) && Number.isInteger(+_);
|
||||
}
|
||||
|
||||
function delimitedFormat(delimiter) {
|
||||
const parse = function(data, format) {
|
||||
const delim = {delimiter: delimiter};
|
||||
return dsv(data, format ? vegaUtil.extend(format, delim) : delim);
|
||||
};
|
||||
|
||||
parse.responseType = 'text';
|
||||
|
||||
return parse;
|
||||
}
|
||||
|
||||
function dsv(data, format) {
|
||||
if (format.header) {
|
||||
data = format.header
|
||||
.map(vegaUtil.stringValue)
|
||||
.join(format.delimiter) + '\n' + data;
|
||||
}
|
||||
return d3Dsv.dsvFormat(format.delimiter).parse(data + '');
|
||||
}
|
||||
|
||||
dsv.responseType = 'text';
|
||||
|
||||
function isBuffer(_) {
|
||||
return (typeof Buffer === 'function' && vegaUtil.isFunction(Buffer.isBuffer))
|
||||
? Buffer.isBuffer(_) : false;
|
||||
}
|
||||
|
||||
function json(data, format) {
|
||||
const prop = (format && format.property) ? vegaUtil.field(format.property) : vegaUtil.identity;
|
||||
return vegaUtil.isObject(data) && !isBuffer(data)
|
||||
? parseJSON(prop(data))
|
||||
: prop(JSON.parse(data));
|
||||
}
|
||||
|
||||
json.responseType = 'json';
|
||||
|
||||
function parseJSON(data, format) {
|
||||
return (format && format.copy)
|
||||
? JSON.parse(JSON.stringify(data))
|
||||
: data;
|
||||
}
|
||||
|
||||
const filters = {
|
||||
interior: (a, b) => a !== b,
|
||||
exterior: (a, b) => a === b
|
||||
};
|
||||
|
||||
function topojson(data, format) {
|
||||
let method, object, property, filter;
|
||||
data = json(data, format);
|
||||
|
||||
if (format && format.feature) {
|
||||
method = topojsonClient.feature;
|
||||
property = format.feature;
|
||||
} else if (format && format.mesh) {
|
||||
method = topojsonClient.mesh;
|
||||
property = format.mesh;
|
||||
filter = filters[format.filter];
|
||||
} else {
|
||||
vegaUtil.error('Missing TopoJSON feature or mesh parameter.');
|
||||
}
|
||||
|
||||
object = (object = data.objects[property])
|
||||
? method(data, object, filter)
|
||||
: vegaUtil.error('Invalid TopoJSON object: ' + property);
|
||||
|
||||
return object && object.features || [object];
|
||||
}
|
||||
|
||||
topojson.responseType = 'json';
|
||||
|
||||
const format = {
|
||||
dsv: dsv,
|
||||
csv: delimitedFormat(','),
|
||||
tsv: delimitedFormat('\t'),
|
||||
json: json,
|
||||
topojson: topojson
|
||||
};
|
||||
|
||||
function formats(name, reader) {
|
||||
if (arguments.length > 1) {
|
||||
format[name] = reader;
|
||||
return this;
|
||||
} else {
|
||||
return vegaUtil.hasOwnProperty(format, name) ? format[name] : null;
|
||||
}
|
||||
}
|
||||
|
||||
function responseType(type) {
|
||||
const f = formats(type);
|
||||
return f && f.responseType || 'text';
|
||||
}
|
||||
|
||||
function read(data, schema, timeParser, utcParser) {
|
||||
schema = schema || {};
|
||||
|
||||
const reader = formats(schema.type || 'json');
|
||||
if (!reader) vegaUtil.error('Unknown data format type: ' + schema.type);
|
||||
|
||||
data = reader(data, schema);
|
||||
if (schema.parse) parse(data, schema.parse, timeParser, utcParser);
|
||||
|
||||
if (vegaUtil.hasOwnProperty(data, 'columns')) delete data.columns;
|
||||
return data;
|
||||
}
|
||||
|
||||
function parse(data, types, timeParser, utcParser) {
|
||||
if (!data.length) return; // early exit for empty data
|
||||
|
||||
const locale = vegaFormat.timeFormatDefaultLocale();
|
||||
timeParser = timeParser || locale.timeParse;
|
||||
utcParser = utcParser || locale.utcParse;
|
||||
|
||||
var fields = data.columns || Object.keys(data[0]),
|
||||
parsers, datum, field, i, j, n, m;
|
||||
|
||||
if (types === 'auto') types = inferTypes(data, fields);
|
||||
|
||||
fields = Object.keys(types);
|
||||
parsers = fields.map(function(field) {
|
||||
var type = types[field],
|
||||
parts, pattern;
|
||||
|
||||
if (type && (type.startsWith('date:') || type.startsWith('utc:'))) {
|
||||
parts = type.split(/:(.+)?/, 2); // split on first :
|
||||
pattern = parts[1];
|
||||
|
||||
if ((pattern[0] === '\'' && pattern[pattern.length-1] === '\'') ||
|
||||
(pattern[0] === '"' && pattern[pattern.length-1] === '"')) {
|
||||
pattern = pattern.slice(1, -1);
|
||||
}
|
||||
|
||||
const parse = parts[0] === 'utc' ? utcParser : timeParser;
|
||||
return parse(pattern);
|
||||
}
|
||||
|
||||
if (!typeParsers[type]) {
|
||||
throw Error('Illegal format pattern: ' + field + ':' + type);
|
||||
}
|
||||
|
||||
return typeParsers[type];
|
||||
});
|
||||
|
||||
for (i=0, n=data.length, m=fields.length; i<n; ++i) {
|
||||
datum = data[i];
|
||||
for (j=0; j<m; ++j) {
|
||||
field = fields[j];
|
||||
datum[field] = parsers[j](datum[field]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var loader = loaderFactory(
|
||||
typeof fetch !== 'undefined' && fetch, // use built-in fetch API
|
||||
null // no file system access
|
||||
);
|
||||
|
||||
exports.format = format;
|
||||
exports.formats = formats;
|
||||
exports.inferType = inferType;
|
||||
exports.inferTypes = inferTypes;
|
||||
exports.loader = loader;
|
||||
exports.read = read;
|
||||
exports.responseType = responseType;
|
||||
exports.typeParsers = typeParsers;
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
})));
|
||||
1
node_modules/vega-loader/build/vega-loader.min.js
generated
vendored
Normal file
1
node_modules/vega-loader/build/vega-loader.min.js
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports,require("vega-util"),require("d3-dsv"),require("topojson-client"),require("vega-format")):"function"==typeof define&&define.amd?define(["exports","vega-util","d3-dsv","topojson-client","vega-format"],t):t((e=e||self).vega={},e.vega,e.d3,e.topojson,e.vega)}(this,(function(e,t,n,r,o){"use strict";const i=/^([A-Za-z]+:)?\/\//,s=/^(?:(?:(?:f|ht)tps?|mailto|tel|callto|cid|xmpp|file|data):|[^a-z]|[a-z+.\-]+(?:[^a-z+.\-:]|$))/i,u=/[\u0000-\u0020\u00A0\u1680\u180E\u2000-\u2029\u205f\u3000]/g;async function a(e,t){const n=await this.sanitize(e,t),r=n.href;return n.localFile?this.file(r):this.http(r,t)}async function f(e,n){n=t.extend({},this.options,n);const r=this.fileAccess,o={href:null};let a,f,c;const l=s.test(e.replace(u,""));null!=e&&"string"==typeof e&&l||t.error("Sanitize failure, invalid URI: "+t.stringValue(e));const p=i.test(e);return(c=n.baseURL)&&!p&&(e.startsWith("/")||"/"===c[c.length-1]||(e="/"+e),e=c+e),f=(a=e.startsWith("file://"))||"file"===n.mode||"http"!==n.mode&&!p&&r,a?e=e.slice("file://".length):e.startsWith("//")&&("file"===n.defaultProtocol?(e=e.slice(2),f=!0):e=(n.defaultProtocol||"http")+":"+e),Object.defineProperty(o,"localFile",{value:!!f}),o.href=e,n.target&&(o.target=n.target+""),n.rel&&(o.rel=n.rel+""),"image"===n.context&&n.crossOrigin&&(o.crossOrigin=n.crossOrigin+""),o}function c(e){return e?function(t){return new Promise((function(n,r){e.readFile(t,(function(e,t){e?r(e):n(t)}))}))}:l}async function l(){t.error("No file system access.")}function p(e){return e?async function(n,r){const o=t.extend({},this.options.http,r),i=r&&r.response,s=await e(n,o);return s.ok?t.isFunction(s[i])?s[i]():s.text():t.error(s.status+""+s.statusText)}:d}async function d(){t.error("No HTTP fetch method available.")}var h={boolean:t.toBoolean,integer:t.toNumber,number:t.toNumber,date:t.toDate,string:t.toString,unknown:t.identity},m=[function(e){return"true"===e||"false"===e||!0===e||!1===e},function(e){return b(e)&&Number.isInteger(+e)},b,function(e){return!Number.isNaN(Date.parse(e))}],g=["boolean","integer","number","date"];function y(e,t){if(!e||!e.length)return"unknown";const n=e.length,r=m.length,o=m.map((e,t)=>t+1);for(let s,u,a=0,f=0;a<n;++a)for(u=t?e[a][t]:e[a],s=0;s<r;++s)if(o[s]&&(null!=(i=u)&&i==i)&&!m[s](u)&&(o[s]=0,++f,f===m.length))return"string";var i;return g[o.reduce((e,t)=>0===e?t:e,0)-1]}function v(e,t){return t.reduce((function(t,n){return t[n]=y(e,n),t}),{})}function b(e){return!(Number.isNaN(+e)||e instanceof Date)}function j(e){const n=function(n,r){const o={delimiter:e};return N(n,r?t.extend(r,o):o)};return n.responseType="text",n}function N(e,r){return r.header&&(e=r.header.map(t.stringValue).join(r.delimiter)+"\n"+e),n.dsvFormat(r.delimiter).parse(e+"")}function O(e,n){const r=n&&n.property?t.field(n.property):t.identity;return!t.isObject(e)||(o=e,"function"==typeof Buffer&&t.isFunction(Buffer.isBuffer)&&Buffer.isBuffer(o))?r(JSON.parse(e)):function(e,t){return t&&t.copy?JSON.parse(JSON.stringify(e)):e}(r(e));var o}N.responseType="text",O.responseType="json";const x={interior:(e,t)=>e!==t,exterior:(e,t)=>e===t};function T(e,n){let o,i,s,u;return e=O(e,n),n&&n.feature?(o=r.feature,s=n.feature):n&&n.mesh?(o=r.mesh,s=n.mesh,u=x[n.filter]):t.error("Missing TopoJSON feature or mesh parameter."),i=(i=e.objects[s])?o(e,i,u):t.error("Invalid TopoJSON object: "+s),i&&i.features||[i]}T.responseType="json";const P={dsv:N,csv:j(","),tsv:j("\t"),json:O,topojson:T};function w(e,n){return arguments.length>1?(P[e]=n,this):t.hasOwnProperty(P,e)?P[e]:null}var z=function(e,t){return function(n){return{options:n||{},sanitize:f,load:a,fileAccess:!!t,file:c(t),http:p(e)}}}("undefined"!=typeof fetch&&fetch,null);e.format=P,e.formats=w,e.inferType=y,e.inferTypes=v,e.loader=z,e.read=function(e,n,r,i){const s=w((n=n||{}).type||"json");return s||t.error("Unknown data format type: "+n.type),e=s(e,n),n.parse&&function(e,t,n,r){if(!e.length)return;const i=o.timeFormatDefaultLocale();n=n||i.timeParse,r=r||i.utcParse;var s,u,a,f,c,l,p,d=e.columns||Object.keys(e[0]);"auto"===t&&(t=v(e,d));for(d=Object.keys(t),s=d.map((function(e){var o,i,s=t[e];if(s&&(s.startsWith("date:")||s.startsWith("utc:"))){return("'"===(i=(o=s.split(/:(.+)?/,2))[1])[0]&&"'"===i[i.length-1]||'"'===i[0]&&'"'===i[i.length-1])&&(i=i.slice(1,-1)),("utc"===o[0]?r:n)(i)}if(!h[s])throw Error("Illegal format pattern: "+e+":"+s);return h[s]})),f=0,l=e.length,p=d.length;f<l;++f)for(u=e[f],c=0;c<p;++c)a=d[c],u[a]=s[c](u[a])}(e,n.parse,r,i),t.hasOwnProperty(e,"columns")&&delete e.columns,e},e.responseType=function(e){const t=w(e);return t&&t.responseType||"text"},e.typeParsers=h,Object.defineProperty(e,"__esModule",{value:!0})}));
|
||||
442
node_modules/vega-loader/build/vega-loader.node.js
generated
vendored
Normal file
442
node_modules/vega-loader/build/vega-loader.node.js
generated
vendored
Normal file
@@ -0,0 +1,442 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
var vegaUtil = require('vega-util');
|
||||
var d3Dsv = require('d3-dsv');
|
||||
var topojsonClient = require('topojson-client');
|
||||
var vegaFormat = require('vega-format');
|
||||
|
||||
// Matches absolute URLs with optional protocol
|
||||
// https://... file://... //...
|
||||
const protocol_re = /^([A-Za-z]+:)?\/\//;
|
||||
|
||||
// Matches allowed URIs. From https://github.com/cure53/DOMPurify/blob/master/src/regexp.js with added file://
|
||||
const allowed_re = /^(?:(?:(?:f|ht)tps?|mailto|tel|callto|cid|xmpp|file|data):|[^a-z]|[a-z+.\-]+(?:[^a-z+.\-:]|$))/i; // eslint-disable-line no-useless-escape
|
||||
const whitespace_re = /[\u0000-\u0020\u00A0\u1680\u180E\u2000-\u2029\u205f\u3000]/g; // eslint-disable-line no-control-regex
|
||||
|
||||
|
||||
// Special treatment in node.js for the file: protocol
|
||||
const fileProtocol = 'file://';
|
||||
|
||||
/**
|
||||
* Factory for a loader constructor that provides methods for requesting
|
||||
* files from either the network or disk, and for sanitizing request URIs.
|
||||
* @param {function} fetch - The Fetch API for HTTP network requests.
|
||||
* If null or undefined, HTTP loading will be disabled.
|
||||
* @param {object} fs - The file system interface for file loading.
|
||||
* If null or undefined, local file loading will be disabled.
|
||||
* @return {function} A loader constructor with the following signature:
|
||||
* param {object} [options] - Optional default loading options to use.
|
||||
* return {object} - A new loader instance.
|
||||
*/
|
||||
function loaderFactory(fetch, fs) {
|
||||
return function(options) {
|
||||
return {
|
||||
options: options || {},
|
||||
sanitize: sanitize,
|
||||
load: load,
|
||||
fileAccess: !!fs,
|
||||
file: fileLoader(fs),
|
||||
http: httpLoader(fetch)
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Load an external resource, typically either from the web or from the local
|
||||
* filesystem. This function uses {@link sanitize} to first sanitize the uri,
|
||||
* then calls either {@link http} (for web requests) or {@link file} (for
|
||||
* filesystem loading).
|
||||
* @param {string} uri - The resource indicator (e.g., URL or filename).
|
||||
* @param {object} [options] - Optional loading options. These options will
|
||||
* override any existing default options.
|
||||
* @return {Promise} - A promise that resolves to the loaded content.
|
||||
*/
|
||||
async function load(uri, options) {
|
||||
const opt = await this.sanitize(uri, options),
|
||||
url = opt.href;
|
||||
|
||||
return opt.localFile
|
||||
? this.file(url)
|
||||
: this.http(url, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* URI sanitizer function.
|
||||
* @param {string} uri - The uri (url or filename) to sanity check.
|
||||
* @param {object} options - An options hash.
|
||||
* @return {Promise} - A promise that resolves to an object containing
|
||||
* sanitized uri data, or rejects it the input uri is deemed invalid.
|
||||
* The properties of the resolved object are assumed to be
|
||||
* valid attributes for an HTML 'a' tag. The sanitized uri *must* be
|
||||
* provided by the 'href' property of the returned object.
|
||||
*/
|
||||
async function sanitize(uri, options) {
|
||||
options = vegaUtil.extend({}, this.options, options);
|
||||
|
||||
const fileAccess = this.fileAccess,
|
||||
result = {href: null};
|
||||
|
||||
let isFile, loadFile, base;
|
||||
|
||||
const isAllowed = allowed_re.test(uri.replace(whitespace_re, ''));
|
||||
|
||||
if (uri == null || typeof uri !== 'string' || !isAllowed) {
|
||||
vegaUtil.error('Sanitize failure, invalid URI: ' + vegaUtil.stringValue(uri));
|
||||
}
|
||||
|
||||
const hasProtocol = protocol_re.test(uri);
|
||||
|
||||
// if relative url (no protocol/host), prepend baseURL
|
||||
if ((base = options.baseURL) && !hasProtocol) {
|
||||
// Ensure that there is a slash between the baseURL (e.g. hostname) and url
|
||||
if (!uri.startsWith('/') && base[base.length-1] !== '/') {
|
||||
uri = '/' + uri;
|
||||
}
|
||||
uri = base + uri;
|
||||
}
|
||||
|
||||
// should we load from file system?
|
||||
loadFile = (isFile = uri.startsWith(fileProtocol))
|
||||
|| options.mode === 'file'
|
||||
|| options.mode !== 'http' && !hasProtocol && fileAccess;
|
||||
|
||||
if (isFile) {
|
||||
// strip file protocol
|
||||
uri = uri.slice(fileProtocol.length);
|
||||
} else if (uri.startsWith('//')) {
|
||||
if (options.defaultProtocol === 'file') {
|
||||
// if is file, strip protocol and set loadFile flag
|
||||
uri = uri.slice(2);
|
||||
loadFile = true;
|
||||
} else {
|
||||
// if relative protocol (starts with '//'), prepend default protocol
|
||||
uri = (options.defaultProtocol || 'http') + ':' + uri;
|
||||
}
|
||||
}
|
||||
|
||||
// set non-enumerable mode flag to indicate local file load
|
||||
Object.defineProperty(result, 'localFile', {value: !!loadFile});
|
||||
|
||||
// set uri
|
||||
result.href = uri;
|
||||
|
||||
// set default result target, if specified
|
||||
if (options.target) {
|
||||
result.target = options.target + '';
|
||||
}
|
||||
|
||||
// set default result rel, if specified (#1542)
|
||||
if (options.rel) {
|
||||
result.rel = options.rel + '';
|
||||
}
|
||||
|
||||
// provide control over cross-origin image handling (#2238)
|
||||
// https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image
|
||||
if (options.context === 'image' && options.crossOrigin) {
|
||||
result.crossOrigin = options.crossOrigin + '';
|
||||
}
|
||||
|
||||
// return
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* File system loader factory.
|
||||
* @param {object} fs - The file system interface.
|
||||
* @return {function} - A file loader with the following signature:
|
||||
* param {string} filename - The file system path to load.
|
||||
* param {string} filename - The file system path to load.
|
||||
* return {Promise} A promise that resolves to the file contents.
|
||||
*/
|
||||
function fileLoader(fs) {
|
||||
return fs
|
||||
? function(filename) {
|
||||
return new Promise(function(accept, reject) {
|
||||
fs.readFile(filename, function(error, data) {
|
||||
if (error) reject(error);
|
||||
else accept(data);
|
||||
});
|
||||
});
|
||||
}
|
||||
: fileReject;
|
||||
}
|
||||
|
||||
/**
|
||||
* Default file system loader that simply rejects.
|
||||
*/
|
||||
async function fileReject() {
|
||||
vegaUtil.error('No file system access.');
|
||||
}
|
||||
|
||||
/**
|
||||
* HTTP request handler factory.
|
||||
* @param {function} fetch - The Fetch API method.
|
||||
* @return {function} - An http loader with the following signature:
|
||||
* param {string} url - The url to request.
|
||||
* param {object} options - An options hash.
|
||||
* return {Promise} - A promise that resolves to the file contents.
|
||||
*/
|
||||
function httpLoader(fetch) {
|
||||
return fetch
|
||||
? async function(url, options) {
|
||||
const opt = vegaUtil.extend({}, this.options.http, options),
|
||||
type = options && options.response,
|
||||
response = await fetch(url, opt);
|
||||
|
||||
return !response.ok
|
||||
? vegaUtil.error(response.status + '' + response.statusText)
|
||||
: vegaUtil.isFunction(response[type]) ? response[type]()
|
||||
: response.text();
|
||||
}
|
||||
: httpReject;
|
||||
}
|
||||
|
||||
/**
|
||||
* Default http request handler that simply rejects.
|
||||
*/
|
||||
async function httpReject() {
|
||||
vegaUtil.error('No HTTP fetch method available.');
|
||||
}
|
||||
|
||||
var typeParsers = {
|
||||
boolean: vegaUtil.toBoolean,
|
||||
integer: vegaUtil.toNumber,
|
||||
number: vegaUtil.toNumber,
|
||||
date: vegaUtil.toDate,
|
||||
string: vegaUtil.toString,
|
||||
unknown: vegaUtil.identity
|
||||
};
|
||||
|
||||
var typeTests = [
|
||||
isBoolean,
|
||||
isInteger,
|
||||
isNumber,
|
||||
isDate
|
||||
];
|
||||
|
||||
var typeList = [
|
||||
'boolean',
|
||||
'integer',
|
||||
'number',
|
||||
'date'
|
||||
];
|
||||
|
||||
function inferType(values, field) {
|
||||
if (!values || !values.length) return 'unknown';
|
||||
|
||||
const n = values.length,
|
||||
m = typeTests.length,
|
||||
a = typeTests.map((_, i) => i + 1);
|
||||
|
||||
for (let i = 0, t = 0, j, value; i < n; ++i) {
|
||||
value = field ? values[i][field] : values[i];
|
||||
for (j = 0; j < m; ++j) {
|
||||
if (a[j] && isValid(value) && !typeTests[j](value)) {
|
||||
a[j] = 0;
|
||||
++t;
|
||||
if (t === typeTests.length) return 'string';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return typeList[
|
||||
a.reduce((u, v) => u === 0 ? v : u, 0) - 1
|
||||
];
|
||||
}
|
||||
|
||||
function inferTypes(data, fields) {
|
||||
return fields.reduce(function(types, field) {
|
||||
types[field] = inferType(data, field);
|
||||
return types;
|
||||
}, {});
|
||||
}
|
||||
|
||||
// -- Type Checks ----
|
||||
|
||||
function isValid(_) {
|
||||
return _ != null && _ === _;
|
||||
}
|
||||
|
||||
function isBoolean(_) {
|
||||
return _ === 'true' || _ === 'false' || _ === true || _ === false;
|
||||
}
|
||||
|
||||
function isDate(_) {
|
||||
return !Number.isNaN(Date.parse(_));
|
||||
}
|
||||
|
||||
function isNumber(_) {
|
||||
return !Number.isNaN(+_) && !(_ instanceof Date);
|
||||
}
|
||||
|
||||
function isInteger(_) {
|
||||
return isNumber(_) && Number.isInteger(+_);
|
||||
}
|
||||
|
||||
function delimitedFormat(delimiter) {
|
||||
const parse = function(data, format) {
|
||||
const delim = {delimiter: delimiter};
|
||||
return dsv(data, format ? vegaUtil.extend(format, delim) : delim);
|
||||
};
|
||||
|
||||
parse.responseType = 'text';
|
||||
|
||||
return parse;
|
||||
}
|
||||
|
||||
function dsv(data, format) {
|
||||
if (format.header) {
|
||||
data = format.header
|
||||
.map(vegaUtil.stringValue)
|
||||
.join(format.delimiter) + '\n' + data;
|
||||
}
|
||||
return d3Dsv.dsvFormat(format.delimiter).parse(data + '');
|
||||
}
|
||||
|
||||
dsv.responseType = 'text';
|
||||
|
||||
function isBuffer(_) {
|
||||
return (typeof Buffer === 'function' && vegaUtil.isFunction(Buffer.isBuffer))
|
||||
? Buffer.isBuffer(_) : false;
|
||||
}
|
||||
|
||||
function json(data, format) {
|
||||
const prop = (format && format.property) ? vegaUtil.field(format.property) : vegaUtil.identity;
|
||||
return vegaUtil.isObject(data) && !isBuffer(data)
|
||||
? parseJSON(prop(data))
|
||||
: prop(JSON.parse(data));
|
||||
}
|
||||
|
||||
json.responseType = 'json';
|
||||
|
||||
function parseJSON(data, format) {
|
||||
return (format && format.copy)
|
||||
? JSON.parse(JSON.stringify(data))
|
||||
: data;
|
||||
}
|
||||
|
||||
const filters = {
|
||||
interior: (a, b) => a !== b,
|
||||
exterior: (a, b) => a === b
|
||||
};
|
||||
|
||||
function topojson(data, format) {
|
||||
let method, object, property, filter;
|
||||
data = json(data, format);
|
||||
|
||||
if (format && format.feature) {
|
||||
method = topojsonClient.feature;
|
||||
property = format.feature;
|
||||
} else if (format && format.mesh) {
|
||||
method = topojsonClient.mesh;
|
||||
property = format.mesh;
|
||||
filter = filters[format.filter];
|
||||
} else {
|
||||
vegaUtil.error('Missing TopoJSON feature or mesh parameter.');
|
||||
}
|
||||
|
||||
object = (object = data.objects[property])
|
||||
? method(data, object, filter)
|
||||
: vegaUtil.error('Invalid TopoJSON object: ' + property);
|
||||
|
||||
return object && object.features || [object];
|
||||
}
|
||||
|
||||
topojson.responseType = 'json';
|
||||
|
||||
const format = {
|
||||
dsv: dsv,
|
||||
csv: delimitedFormat(','),
|
||||
tsv: delimitedFormat('\t'),
|
||||
json: json,
|
||||
topojson: topojson
|
||||
};
|
||||
|
||||
function formats(name, reader) {
|
||||
if (arguments.length > 1) {
|
||||
format[name] = reader;
|
||||
return this;
|
||||
} else {
|
||||
return vegaUtil.hasOwnProperty(format, name) ? format[name] : null;
|
||||
}
|
||||
}
|
||||
|
||||
function responseType(type) {
|
||||
const f = formats(type);
|
||||
return f && f.responseType || 'text';
|
||||
}
|
||||
|
||||
function read(data, schema, timeParser, utcParser) {
|
||||
schema = schema || {};
|
||||
|
||||
const reader = formats(schema.type || 'json');
|
||||
if (!reader) vegaUtil.error('Unknown data format type: ' + schema.type);
|
||||
|
||||
data = reader(data, schema);
|
||||
if (schema.parse) parse(data, schema.parse, timeParser, utcParser);
|
||||
|
||||
if (vegaUtil.hasOwnProperty(data, 'columns')) delete data.columns;
|
||||
return data;
|
||||
}
|
||||
|
||||
function parse(data, types, timeParser, utcParser) {
|
||||
if (!data.length) return; // early exit for empty data
|
||||
|
||||
const locale = vegaFormat.timeFormatDefaultLocale();
|
||||
timeParser = timeParser || locale.timeParse;
|
||||
utcParser = utcParser || locale.utcParse;
|
||||
|
||||
var fields = data.columns || Object.keys(data[0]),
|
||||
parsers, datum, field, i, j, n, m;
|
||||
|
||||
if (types === 'auto') types = inferTypes(data, fields);
|
||||
|
||||
fields = Object.keys(types);
|
||||
parsers = fields.map(function(field) {
|
||||
var type = types[field],
|
||||
parts, pattern;
|
||||
|
||||
if (type && (type.startsWith('date:') || type.startsWith('utc:'))) {
|
||||
parts = type.split(/:(.+)?/, 2); // split on first :
|
||||
pattern = parts[1];
|
||||
|
||||
if ((pattern[0] === '\'' && pattern[pattern.length-1] === '\'') ||
|
||||
(pattern[0] === '"' && pattern[pattern.length-1] === '"')) {
|
||||
pattern = pattern.slice(1, -1);
|
||||
}
|
||||
|
||||
const parse = parts[0] === 'utc' ? utcParser : timeParser;
|
||||
return parse(pattern);
|
||||
}
|
||||
|
||||
if (!typeParsers[type]) {
|
||||
throw Error('Illegal format pattern: ' + field + ':' + type);
|
||||
}
|
||||
|
||||
return typeParsers[type];
|
||||
});
|
||||
|
||||
for (i=0, n=data.length, m=fields.length; i<n; ++i) {
|
||||
datum = data[i];
|
||||
for (j=0; j<m; ++j) {
|
||||
field = fields[j];
|
||||
datum[field] = parsers[j](datum[field]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var loader = loaderFactory(
|
||||
require('node-fetch'),
|
||||
require('fs')
|
||||
);
|
||||
|
||||
exports.format = format;
|
||||
exports.formats = formats;
|
||||
exports.inferType = inferType;
|
||||
exports.inferTypes = inferTypes;
|
||||
exports.loader = loader;
|
||||
exports.read = read;
|
||||
exports.responseType = responseType;
|
||||
exports.typeParsers = typeParsers;
|
||||
Reference in New Issue
Block a user