diff --git a/.travis.yml b/.travis.yml index 797b21d..49d82f7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,11 +1,12 @@ language: node_js node_js: - - "8" +- '8' before_script: - - npm install - - npm install -g codecov +- npm install +- npm install -g codecov script: - - npm test - - codecov -f coverage/lcov.info +- npm test +- codecov -f coverage/lcov.info notifications: - slack: fusioncharts:JmooWfzCnyxe4p7KTTJU5xzP + slack: + secure: E/O+6gcjD2oTwLnt10w6qEpNQp9AkcuZsy6xsK/Hxw7z47IZwM5BHkUAJhTX0QcJyamZbFaMU9mJ5P4ClLnMPBBj4KV+mznu8yTSySfCub2LKpuGyKIqH3BHqepHbSZiAlMhQkq5OUfW8tOo2p8j6kc5AbvPx6pHCr/nQc0HkjwBQY4SwLb60LGXbPkYsoZhYnXmJmRg/iowu00qakjXH7FInsGist//ZlJp9MiaZH3Cfdo4l3rZn4AJ1naFBD4bNb+Wqqh6zVO4DdOiBVTsq3bZ6vcNZVb2IqlYZvCLODuwhvHiO4wKsQ9QAAhm1TXrraXfs9kR9pMeZeUtnlEeZURu/m7J1Wz2PkBOGGUjCb2xGucppgyg3/1eG3esEL6M6pqVGzuGH1CKjh4aRVZoq47UHDoN+N8Q4ix+TZMqztCFQV47bs56dlmc8hnlluANJsrlJha0p3myYQknv2qjgLjfbZDHoKKybpdAsfaZsDQ3aShw+EtdB38FA0YjsWYQNWh1YmKWBEz1W+jMZ7AlLrkNi20+JHmNngZPW4MQy1Mi0xN905Rlr4RwnzUt5o+pbP78zIlCnZFU5KIIoMDdlKXoZ9hC6gptTajhXkcIjm+FSezy6VUtNpMS0dCSM3RomYWx1MSEpo1XIMzzb6bujBD/XLckejJWAJcTF+PEzz8= diff --git a/LICENSE b/LICENSE index d8c182b..b84531d 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2018 Charts.com +Copyright (c) 2018 Muzejs.org Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/README.md b/README.md index 8e8df01..b9f046f 100644 --- a/README.md +++ b/README.md @@ -39,7 +39,7 @@ DataModel can be used if you need an in-browser tabular data store for data anal Insert the DataModel build into the ``: ```html - + ``` ### NPM @@ -144,7 +144,7 @@ console.log(projectDm.getData().schema); ## Documentation -Find detailed documentation and API reference from [here](https://www.charts.com/muze/docs/introduction-to-datamodel). +Find detailed documentation and API reference from [here](https://muzejs.org/docs/introduction-to-datamodel). ## Contributing diff --git a/dist/datamodel.js b/dist/datamodel.js index ea3c2ae..8cab24b 100644 --- a/dist/datamodel.js +++ b/dist/datamodel.js @@ -1,2 +1,2 @@ -!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define("DataModel",[],t):"object"==typeof exports?exports.DataModel=t():e.DataModel=t()}(window,function(){return function(e){var t={};function n(r){if(t[r])return t[r].exports;var a=t[r]={i:r,l:!1,exports:{}};return e[r].call(a.exports,a,a.exports,n),a.l=!0,a.exports}return n.m=e,n.c=t,n.d=function(e,t,r){n.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:r})},n.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},n.t=function(e,t){if(1&t&&(e=n(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var r=Object.create(null);if(n.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var a in e)n.d(r,a,function(t){return e[t]}.bind(null,a));return r},n.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return n.d(t,"a",t),t},n.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},n.p="",n(n.s=1)}([function(e){e.exports={name:"datamodel",description:"Relational algebra compliant in-memory tabular data store",homepage:"https://github.com/chartshq/datamodel",version:"2.1.0",license:"MIT",main:"dist/datamodel.js",author:"Charts.com ",keywords:["datamodel","data","relational","algebra","model","muze","fusioncharts","table","tabular","operation"],repository:{type:"git",url:"https://github.com/chartshq/datamodel.git"},contributors:[{name:"Akash Goswami",email:"akash@charts.com"},{name:"Subhash Haldar",email:"subhash@charts.com"},{name:"Rousan Ali",email:"rousan@charts.com",url:"https://rousan.io"},{name:"Ujjal Kumar Dutta",email:"ujjal@charts.com"}],dependencies:{"d3-dsv":"^1.0.8"},devDependencies:{"babel-cli":"6.26.0","babel-core":"^6.26.3","babel-eslint":"6.1.2","babel-loader":"^7.1.4","babel-plugin-transform-runtime":"^6.23.0","babel-preset-env":"^1.7.0","babel-preset-es2015":"^6.24.1","babel-preset-flow":"^6.23.0",chai:"3.5.0","cross-env":"^5.0.5",eslint:"3.19.0","eslint-config-airbnb":"15.1.0","eslint-plugin-import":"2.7.0","eslint-plugin-jsx-a11y":"5.1.1","eslint-plugin-react":"7.3.0","istanbul-instrumenter-loader":"^3.0.0",jsdoc:"3.5.5",json2yaml:"^1.1.0",karma:"1.7.1","karma-chai":"0.1.0","karma-chrome-launcher":"2.1.1","karma-coverage-istanbul-reporter":"^1.3.0","karma-mocha":"1.3.0","karma-spec-reporter":"0.0.31","karma-webpack":"2.0.3",marked:"^0.5.0",mocha:"3.4.2","mocha-webpack":"0.7.0","transform-runtime":"0.0.0",webpack:"^4.12.0","webpack-cli":"^3.0.7","webpack-dev-server":"^3.1.4"},scripts:{test:"npm run lint && npm run ut",ut:"karma start karma.conf.js",utd:"karma start --single-run false --browsers Chrome karma.conf.js ",build:"webpack --mode production",start:"webpack-dev-server --config webpack.config.dev.js --mode development --open",lint:"eslint ./src","lint-errors":"eslint --quiet ./src",docs:"rm -rf yaml && mkdir yaml && jsdoc -c jsdoc.conf.json"}}},function(e,t,n){var r=n(2);e.exports=r.default?r.default:r},function(e,t,n){"use strict";n.r(t);var r={};n.r(r),n.d(r,"DataFormat",function(){return o}),n.d(r,"DimensionSubtype",function(){return u}),n.d(r,"MeasureSubtype",function(){return c}),n.d(r,"FieldType",function(){return f}),n.d(r,"FilteringMode",function(){return l});var a={};n.r(a),n.d(a,"DSVArr",function(){return Ge}),n.d(a,"DSVStr",function(){return tt}),n.d(a,"FlatJSON",function(){return nt}),n.d(a,"Auto",function(){return rt});var i={};n.r(i),n.d(i,"sum",function(){return jt}),n.d(i,"avg",function(){return St}),n.d(i,"min",function(){return Nt}),n.d(i,"max",function(){return kt}),n.d(i,"first",function(){return Ft}),n.d(i,"last",function(){return Dt}),n.d(i,"count",function(){return Tt}),n.d(i,"sd",function(){return Rt});var o={FLAT_JSON:"FlatJSON",DSV_STR:"DSVStr",DSV_ARR:"DSVArr",AUTO:"Auto"},u={CATEGORICAL:"categorical",TEMPORAL:"temporal",GEO:"geo",BINNED:"binned"},c={CONTINUOUS:"continuous"},f={MEASURE:"measure",DIMENSION:"dimension"},l={NORMAL:"normal",INVERSE:"inverse",ALL:"all"};function s(e){return e instanceof Date?e:new Date(e)}function d(e){return e<10?"0"+e:e}function p(e){this.format=e,this.dtParams=void 0,this.nativeDate=void 0}RegExp.escape=function(e){return e.replace(/[-[\]{}()*+?.,\\^$|#\s]/g,"\\$&")},p.TOKEN_PREFIX="%",p.DATETIME_PARAM_SEQUENCE={YEAR:0,MONTH:1,DAY:2,HOUR:3,MINUTE:4,SECOND:5,MILLISECOND:6},p.defaultNumberParser=function(e){return function(t){var n;return isFinite(n=parseInt(t,10))?n:e}},p.defaultRangeParser=function(e,t){return function(n){var r,a=void 0;if(!n)return t;var i=n.toLowerCase();for(a=0,r=e.length;aa.getFullYear()&&(t=""+(i-1)+r),s(t).getFullYear()},formatter:function(e){var t=s(e).getFullYear().toString(),n=void 0;return t&&(n=t.length,t=t.substring(n-2,n)),t}},Y:{name:"Y",index:0,extract:function(){return"(\\d{4})"},parser:p.defaultNumberParser(),formatter:function(e){return s(e).getFullYear().toString()}}}},p.getTokenFormalNames=function(){var e=p.getTokenDefinitions();return{HOUR:e.H,HOUR_12:e.l,AMPM_UPPERCASE:e.p,AMPM_LOWERCASE:e.P,MINUTE:e.M,SECOND:e.S,SHORT_DAY:e.a,LONG_DAY:e.A,DAY_OF_MONTH:e.e,DAY_OF_MONTH_CONSTANT_WIDTH:e.d,SHORT_MONTH:e.b,LONG_MONTH:e.B,MONTH_OF_YEAR:e.m,SHORT_YEAR:e.y,LONG_YEAR:e.Y}},p.tokenResolver=function(){var e=p.getTokenDefinitions(),t=function(){for(var e=0,t=void 0,n=void 0,r=arguments.length;e=0;)o=e[i+1],-1!==r.indexOf(o)&&a.push({index:i,token:o});return a},p.formatAs=function(e,t){var n,r=s(e),a=p.findTokens(t),i=p.getTokenDefinitions(),o=String(t),u=p.TOKEN_PREFIX,c=void 0,f=void 0,l=void 0;for(l=0,n=a.length;l=0;d--)(f=i[d].index)+1!==s.length-1?(void 0===u&&(u=s.length),l=s.substring(f+2,u),s=s.substring(0,f+2)+RegExp.escape(l)+s.substring(u,s.length),u=f):u=f;for(d=0;d0&&e.split(",").forEach(function(e){var n=e.split("-"),r=+n[0],a=+(n[1]||n[0]);if(a>=r)for(var i=r;i<=a;i+=1)t(i)})}var T=function(){function e(e,t){for(var n=0;n=(i=e[a=n+Math.floor((r-n)/2)]).start&&t=i.end?n=a+1:t3&&void 0!==arguments[3]&&arguments[3],a=arguments.length>4&&void 0!==arguments[4]?arguments[4]:J.CROSS,i=[],o=[],u=n||K,c=e.getFieldspace(),f=t.getFieldspace(),l=c.name,s=f.name,d=c.name+"."+f.name,p=C(c,f);if(l===s)throw new Error("DataModels must have different alias names");return c.fields.forEach(function(e){var t=_({},e.schema());-1===p.indexOf(t.name)||r||(t.name=c.name+"."+t.name),i.push(t)}),f.fields.forEach(function(e){var t=_({},e.schema());-1!==p.indexOf(t.name)?r||(t.name=f.name+"."+t.name,i.push(t)):i.push(t)}),D(e._rowDiffset,function(n){var d=!1,h=void 0;D(t._rowDiffset,function(v){var m=[],y={};y[l]={},y[s]={},c.fields.forEach(function(e){m.push(e.partialField.data[n]),y[l][e.name()]=e.partialField.data[n]}),f.fields.forEach(function(e){-1!==p.indexOf(e.schema().name)&&r||m.push(e.partialField.data[v]),y[s][e.name()]=e.partialField.data[v]});var g=ot(y[l]),b=ot(y[s]);if(u(g,b,function(){return e.detachedRoot()},function(){return t.detachedRoot()},{})){var w={};m.forEach(function(e,t){w[i[t].name]=e}),d&&J.CROSS!==a?o[h]=w:(o.push(w),d=!0,h=n)}else if((a===J.LEFTOUTER||a===J.RIGHTOUTER)&&!d){var _={},O=c.fields.length-1;m.forEach(function(e,t){_[i[t].name]=t<=O?e:null}),d=!0,h=n,o.push(_)}})}),new At(o,i,{name:d})}function z(e,t){var n=""+e,r=""+t;return nr?1:0}function q(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:z;return e.length>1&&function e(t,n,r,a){if(r===n)return t;var i=n+Math.floor((r-n)/2);return e(t,n,i,a),e(t,i+1,r,a),function(e,t,n,r,a){for(var i=e,o=[],u=t;u<=r;u+=1)o[u]=i[u];for(var c=t,f=n+1,l=t;l<=r;l+=1)c>n?(i[l]=o[f],f+=1):f>r?(i[l]=o[c],c+=1):a(o[c],o[f])<=0?(i[l]=o[c],c+=1):(i[l]=o[f],f+=1)}(t,n,i,r,a),t}(e,0,e.length-1,t),e}function X(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);ti?"desc"===t?-1:1:0}}return r}function Q(e,t){var n=new Map,r=[];return e.forEach(function(e){var a=e[t];n.has(a)?r[n.get(a)][1].push(e):(r.push([a,[e]]),n.set(a,r.length-1))}),r}function Z(e,t,n){var r={label:e[0]};return t.reduce(function(t,r,a){return t[r]=e[1].map(function(e){return e[n[a].index]}),t},r),r}function ee(e,t,n,r,a){a=Object.assign({},{addUid:!1,columnWise:!1},a);var i={schema:[],data:[],uids:[]},o=a.addUid,u=r&&r.length>0,c=[];if(n.split(",").forEach(function(t){for(var n=0;n=0;u--)a=t[u][0],i=t[u][1],(o=vt(r,a))&&("function"==typeof i?q(n,function(e,t){return i(e[o.index],t[o.index])}):O(i)?function(){var e=Q(n,o.index),t=i[i.length-1],a=i.slice(0,i.length-1),u=a.map(function(e){return vt(r,e)});e.forEach(function(e){e.push(Z(e,a,u))}),q(e,function(e,n){var r=e[2],a=n[2];return t(r,a)}),n.length=0,e.forEach(function(e){n.push.apply(n,X(e[1]))})}():(i="desc"===String(i).toLowerCase()?"desc":"asc",q(n,$(o.type,i,o.index))));e.uids=[],n.forEach(function(t){e.uids.push(t.pop())})}(i,r),a.columnWise){var f=Array.apply(void 0,X(Array(i.schema.length))).map(function(){return[]});i.data.forEach(function(e){e.forEach(function(e,t){f[t].push(e)})}),i.data=f}return i}function te(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!A(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t,r){D(e._rowDiffset,function(e){var o={},u="";a.forEach(function(n){var r=t[n].partialField.data[e];u+="-"+r,o[n]=r}),n[u]||(r&&i.push(o),n[u]=!0)})}return e._colIdentifier.split(",").forEach(function(e){var t=c[e];r.push(_({},t.schema())),a.push(t.schema().name)}),s(t,f,!1),s(e,c,!0),new At(i,r,{name:l})}function ne(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n={},r=e.getFieldspace().getMeasure(),a=le.defaultReducer();return Object.keys(r).forEach(function(e){"string"!=typeof t[e]&&(t[e]=r[e].defAggFn());var i=le.resolve(t[e]);i?n[e]=i:(n[e]=a,t[e]=ue)}),n}(e,n),o=e.getFieldspace(),u=o.fieldsObj(),c=o.name,l=[],s=[],d=[],p={},h=[],v=void 0;Object.entries(u).forEach(function(e){var t=se(e,2),n=t[0],r=t[1];if(-1!==a.indexOf(n)||i[n])switch(d.push(_({},r.schema())),r.schema().type){case f.MEASURE:s.push(n);break;default:case f.DIMENSION:l.push(n)}});var m=0;D(e._rowDiffset,function(e){var t="";l.forEach(function(n){t=t+"-"+u[n].partialField.data[e]}),void 0===p[t]?(p[t]=m,h.push({}),l.forEach(function(t){h[m][t]=u[t].partialField.data[e]}),s.forEach(function(t){h[m][t]=[u[t].partialField.data[e]]}),m+=1):s.forEach(function(n){h[p[t]][n].push(u[n].partialField.data[e])})});var y={},g=function(){return e.detachedRoot()};return h.forEach(function(e){var t=e;s.forEach(function(n){t[n]=i[n](e[n],g,y)})}),r?(r.__calculateFieldspace(),v=r):v=new Mt(h,d,{name:c}),v}function pe(e,t){var n=C(e.getFieldspace(),t.getFieldspace());return function(e,t){var r=!0;return n.forEach(function(n){r=!(e[n].value!==t[n].value||!r)}),r}}function he(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!A(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t){D(e._rowDiffset,function(e){var r={},o="";a.forEach(function(n){var a=t[n].partialField.data[e];o+="-"+a,r[n]=a}),n[o]||(i.push(r),n[o]=!0)})}return e._colIdentifier.split(",").forEach(function(e){var t=c[e];r.push(_({},t.schema())),a.push(t.schema().name)}),s(e,c),s(t,f),new Mt(i,r,{name:l})}function ve(e,t,n){return W(e,t,n,!1,J.LEFTOUTER)}function me(e,t,n){return W(t,e,n,!1,J.RIGHTOUTER)}var ye=function(){function e(e,t){for(var n=0;nn&&(n=a))}),[t,n]}}]),t}(),Te=function(){function e(e,t){for(var n=0;n=i?c=!0:(r=e.charCodeAt(o++))===qe?f=!0:r===Xe&&(f=!0,e.charCodeAt(o)===qe&&++o),e.slice(a+1,t-1).replace(/""/g,'"')}for(;o2&&void 0!==arguments[2]?arguments[2]:{},a=arguments[3],i=void 0;t!==H?(i={op:t,meta:r,criteria:a},e._derivation.push(i)):(i=[].concat(it(a)),e._derivation.length=0,(n=e._derivation).push.apply(n,it(i)))},ft=function(e,t,n,r,a){var i=[],o=-1,u=r.mode,c=void 0,f={},s=function(){return a.detachedRoot()},d=function(e){return n(function(e,t){var n={},r=!0,a=!1,i=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done);r=!0){var c=o.value;n[c.name()]=new F(c.partialField.data[t],c)}}catch(e){a=!0,i=e}finally{try{!r&&u.return&&u.return()}finally{if(a)throw i}}return n}(t,e),e,s,f)},p=void 0;return p=u===l.INVERSE?function(e){return!d(e)}:function(e){return d(e)},D(e,function(e){p(e)&&(-1!==o&&e===o+1?(c=i.length-1,i[c]=i[c].split("-")[0]+"-"+e):i.push(""+e),o=e)}),i.join(",")},lt=function(e,t){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},r=n.operation||G,a=n.filterByMeasure||!1,i=[];i=t.length?t.map(function(e){return n=(t=e).getData(),r=n.schema,i=t.getFieldsConfig(),o=t.getFieldspace().fieldsObj(),u=n.data,c=Object.values(i).reduce(function(e,t){return e[t.def.name]=o[t.def.name].domain(),e},{}),function(e){return!!u.length&&u.some(function(t){return r.every(function(n){if(!(n.name in e))return!0;var r=e[n.name].valueOf();if(a&&n.type===f.MEASURE)return r>=c[n.name][0]&&r<=c[n.name][1];if(n.type!==f.DIMENSION)return!0;var o=i[n.name].index;return t[o]===e[n.name].valueOf()})})};var t,n,r,i,o,u,c}):[function(){return!1}];var o=void 0;r===G?o=e.clone(!1,!1).select(function(e){return i.every(function(t){return t(e)})},{saveChild:!1,mode:l.ALL}):o=e.clone(!1,!1).select(function(e){return i.some(function(t){return t(e)})},{mode:l.ALL,saveChild:!1});return o},st=function(e,t,n,r){var a=e.clone(r.saveChild),i=ft(a._rowDiffset,a.getPartialFieldspace().fields,t,n,e);return a._rowDiffset=i,a.__calculateFieldspace().calculateFieldsConfig(),ct(a,L,{config:n},t),a},dt=function(e,t,n,r){var a=e.clone(n.saveChild),i=t;return n.mode===l.INVERSE&&(i=r.filter(function(e){return-1===t.indexOf(e)})),a._colIdentifier=i.join(","),a.__calculateFieldspace().calculateFieldsConfig(),ct(a,U,{projField:t,config:n,actualProjField:i},null),a},pt=function(e){if((e=_({},e)).type||(e.type=f.DIMENSION),!e.subtype)switch(e.type){case f.MEASURE:e.subtype=c.CONTINUOUS;break;default:case f.DIMENSION:e.subtype=u.CATEGORICAL}return e},ht=function(e,t,n,r){n=function(e){return e.map(function(e){return pt(e)})}(n),r=Object.assign(Object.assign({},Je),r);var i=a[r.dataFormat];if(!i||"function"!=typeof i)throw new Error("No converter function found for "+r.dataFormat+" format");var u=i(t,r),c=at(u,2),f=c[0],l=c[1],s=Be(l,n,f),d=N.createNamespace(s,r.name);return e._partialFieldspace=d,e._rowDiffset=l.length&&l[0].length?"0-"+(l[0].length-1):"",e._colIdentifier=n.map(function(e){return e.name}).join(),e._dataFormat=r.dataFormat===o.AUTO?S(t):r.dataFormat,e},vt=function(e,t){for(var n=0;n2&&void 0!==arguments[2]?arguments[2]:{},a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=a.nonTraversingModel,o=a.excludeModels||[];t!==i&&((!o.length||-1===o.indexOf(t))&&t.handlePropagation(n,r),t._children.forEach(function(t){var i=mt(n,t),o=at(i,2),u=o[0],c=o[1];e(t,[u,c],r,a)}))},gt=function(e){for(var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[];e._parent;)t.push(e),e=e._parent;return t},bt=function(e,t,n,r){var a=void 0,i=void 0,o=n.propagationNameSpace,u=n.propagateToSource,c=n.sourceId,f=r.propagateInterpolatedValues,l=[];if(null===e&&!0!==r.persistent)l=[{criteria:[]}];else{var s,d=Object.values(o.mutableActions);!1!==u&&(d=d.filter(function(e){return e.config.sourceId!==c}));var p=d.filter(function(e){return(r.filterFn||function(){return!0})(e,r)}).map(function(e){return e.config.criteria}),h=[];if(!1!==u){var v=Object.values(o.mutableActions);v.forEach(function(e){var t=e.config;!1===t.applyOnSource&&t.action===r.action&&t.sourceId!==c&&(h.push(e.model),(a=v.filter(function(t){return t!==e}).map(function(e){return e.config.criteria})).length&&l.push({criteria:a,models:e.model,path:gt(e.model)}))})}a=(s=[]).concat.apply(s,[].concat(it(p),[e])).filter(function(e){return null!==e}),l.push({criteria:a,excludeModels:[].concat(h,it(r.excludeModels||[]))})}var m=t.model,y=Object.assign({sourceIdentifiers:e,propagationSourceId:c},r),g=t.groupByModel;f&&g&&(i=lt(g,a,{filterByMeasure:f}),yt(g,i,y)),l.forEach(function(e){var t=lt(m,e.criteria),n=e.path;if(n){var r=function(e,t){for(var n=0,r=t.length;n0&&void 0!==arguments[0])||arguments[0],t=void 0;if(!1===(!(arguments.length>1&&void 0!==arguments[1])||arguments[1])){var n=this.getData({getAllFields:!0}),r=n.data,a=n.schema,i=r.map(function(e){var t={};return a.forEach(function(n,r){t[n.name]=e[r]}),t});t=new this.constructor(i,a)}else t=new this.constructor(this);return e&&this._children.push(t),t}},{key:"project",value:function(e,t){var n={mode:l.NORMAL,saveChild:!0};t=Object.assign({},n,t);var r=this.getFieldsConfig(),a=Object.keys(r),i=t.mode,o=e.reduce(function(e,t){return"RegExp"===t.constructor.name?e.push.apply(e,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:[];ct(this,H,null,t),this._parent=e,e._children.push(this)}},{key:"getParent",value:function(){return this._parent}},{key:"getChildren",value:function(){return this._children}},{key:"getDerivations",value:function(){return this._derivation}}]),e}(),Ot=function(){return function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,a=!1,i=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){a=!0,i=e}finally{try{!r&&u.return&&u.return()}finally{if(a)throw i}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")}}(),Et=function(){function e(e,t){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:{},n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{saveChild:!0},r=""+e.join(),a=[this,e,t],i=de.apply(void 0,a);return ct(i,V,{fieldsArr:e,groupByString:r,defaultReducer:le.defaultReducer()},t),n.saveChild&&this._children.push(i),i._parent=this,i}},{key:"sort",value:function(e){var t=this.getData({order:"row",sort:e}),n=[t.schema.map(function(e){return e.name})].concat(t.data),r=new this.constructor(n,t.schema,{dataFormat:"DSVArr"});return r._sortingDetails=e,r}},{key:"serialize",value:function(e,t){e=e||this._dataFormat,t=Object.assign({},{fieldSeparator:","},t);var n=this.getFieldspace().fields,r=n.map(function(e){return e.formattedData()}),a=r[0].length,i=void 0,u=void 0,c=void 0;if(e===o.FLAT_JSON)for(i=[],u=0;u=0&&(n.fields[r]=e)}else n.fields.push(e);return n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,this.__calculateFieldspace().calculateFieldsConfig(),this}},{key:"calculateVariable",value:function(e,t,n){var r=this;e=pt(e),n=Object.assign({},{saveChild:!0,replaceVar:!1},n);var a=this.getFieldsConfig(),i=t.slice(0,t.length-1),o=t[t.length-1];if(a[e.name]&&!n.replaceVar)throw new Error(e.name+" field already exists in datamodel");var u=i.map(function(e){var t=a[e];if(!t)throw new Error(e+" is not a valid column name.");return t.index}),c=this.clone(),f=c.getFieldspace().fields,l=u.map(function(e){return f[e]}),s={},d=function(){return r.detachedRoot()},p=[];D(c._rowDiffset,function(e){var t=l.map(function(t){return t.partialField.data[e]});p[e]=o.apply(void 0,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},a=t.isMutableAction,i=t.sourceId,o=t.payload,u=function(e){for(;e._parent;)e=e._parent;return e}(this),c=u._propagationNameSpace,f={groupByModel:function e(t){return t._parent&&t._derivation.find(function(e){return"group"!==e.op})?e(t._parent):t}(this),model:u};return n&&function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=void 0,a=t.isMutableAction,i=t.criteria,o=t.action+"-"+t.sourceId;r=a?e.mutableActions:e.immutableActions,null===i?delete r[o]:r[o]={model:n,config:t}}(c,t,this),bt(e,f,{propagationNameSpace:c,sourceId:i},Object.assign({payload:o},t)),a&&function(e,t,n){var r=e.immutableActions;for(var a in r){var i=r[a].config,o=n.config.sourceId,u=!n.propConfig.filterImmutableAction||n.propConfig.filterImmutableAction(i,n.config);if(i.sourceId!==o&&u){var c=i.criteria;bt(c,t,{propagationNameSpace:e,propagateToSource:!1,sourceId:o},i)}}}(c,f,{config:t,propConfig:r}),this}},{key:"on",value:function(e,t){switch(e){case"propagation":this._onPropagation.push(t)}return this}},{key:"unsubscribe",value:function(e){switch(e){case"propagation":this._onPropagation=[]}return this}},{key:"handlePropagation",value:function(e,t){var n=this;this._onPropagation.forEach(function(r){return r.call(n,e,t)})}},{key:"bin",value:function(e,t){var n=this.getFieldsConfig();if(!n[e])throw new Error("Field "+e+" doesn't exist");var r=t.name||e+"_binned";if(n[r])throw new Error("Field "+r+" already exists");var a=function(e,t,n){var r=n.buckets,a=n.binsCount,i=n.binSize,o=n.start,u=n.end,c=e.domain(),f=M(c,2),l=f[0],s=f[1];r||(o=0!==o&&(!o||o>l)?l:o,u=0!==u&&(!u||ul&&r.unshift(l),r[r.length-1]<=s&&r.push(s+1);for(var d=[],p=0;p1&&void 0!==arguments[1]?arguments[1]:{saveChild:!0},r=e,a=void 0,i=[],o=n.saveChild;return t.forEach(function(e){r=e(r),i.push.apply(i,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&a.dispose(),r}},bin:function(){for(var e=arguments.length,t=Array(e),n=0;na.getFullYear()&&(t=""+(i-1)+r),d(t).getFullYear()},formatter:function(e){var t=d(e).getFullYear().toString(),n=void 0;return t&&(n=t.length,t=t.substring(n-2,n)),t}},Y:{name:"Y",index:0,extract:function(){return"(\\d{4})"},parser:h.defaultNumberParser(),formatter:function(e){return d(e).getFullYear().toString()}}}},h.getTokenFormalNames=function(){var e=h.getTokenDefinitions();return{HOUR:e.H,HOUR_12:e.l,AMPM_UPPERCASE:e.p,AMPM_LOWERCASE:e.P,MINUTE:e.M,SECOND:e.S,SHORT_DAY:e.a,LONG_DAY:e.A,DAY_OF_MONTH:e.e,DAY_OF_MONTH_CONSTANT_WIDTH:e.d,SHORT_MONTH:e.b,LONG_MONTH:e.B,MONTH_OF_YEAR:e.m,SHORT_YEAR:e.y,LONG_YEAR:e.Y}},h.tokenResolver=function(){var e=h.getTokenDefinitions(),t=function(){for(var e=0,t=void 0,n=void 0,r=arguments.length;e=0;)o=e[i+1],-1!==r.indexOf(o)&&a.push({index:i,token:o});return a},h.formatAs=function(e,t){var n,r=d(e),a=h.findTokens(t),i=h.getTokenDefinitions(),o=String(t),u=h.TOKEN_PREFIX,c=void 0,f=void 0,l=void 0;for(l=0,n=a.length;l=0;d--)(f=i[d].index)+1!==s.length-1?(void 0===u&&(u=s.length),l=s.substring(f+2,u),s=s.substring(0,f+2)+RegExp.escape(l)+s.substring(u,s.length),u=f):u=f;for(d=0;d0&&e.split(",").forEach(function(e){var n=e.split("-"),r=+n[0],a=+(n[1]||n[0]);if(a>=r)for(var i=r;i<=a;i+=1)t(i)})}var R=function(){function e(e,t){for(var n=0;n=(i=e[a=n+Math.floor((r-n)/2)]).start&&t=i.end?n=a+1:t3&&void 0!==arguments[3]&&arguments[3],a=arguments.length>4&&void 0!==arguments[4]?arguments[4]:U.CROSS,i=[],o=[],u=n||B,c=e.getFieldspace(),f=t.getFieldspace(),l=c.name,s=f.name,d=c.name+"."+f.name,p=H(c,f);if(l===s)throw new Error("DataModels must have different alias names");return c.fields.forEach(function(e){var t=_({},e.schema());-1===p.indexOf(t.name)||r||(t.name=c.name+"."+t.name),i.push(t)}),f.fields.forEach(function(e){var t=_({},e.schema());-1!==p.indexOf(t.name)?r||(t.name=f.name+"."+t.name,i.push(t)):i.push(t)}),D(e._rowDiffset,function(n){var d=!1,h=void 0;D(t._rowDiffset,function(v){var m=[],y={};y[l]={},y[s]={},c.fields.forEach(function(e){m.push(e.partialField.data[n]),y[l][e.name()]=e.partialField.data[n]}),f.fields.forEach(function(e){-1!==p.indexOf(e.schema().name)&&r||m.push(e.partialField.data[v]),y[s][e.name()]=e.partialField.data[v]});var g=vt(y[l]),b=vt(y[s]);if(u(g,b,function(){return e.detachedRoot()},function(){return t.detachedRoot()},{})){var O={};m.forEach(function(e,t){O[i[t].name]=e}),d&&U.CROSS!==a?o[h]=O:(o.push(O),d=!0,h=n)}else if((a===U.LEFTOUTER||a===U.RIGHTOUTER)&&!d){var w={},_=c.fields.length-1;m.forEach(function(e,t){w[i[t].name]=t<=_?e:null}),d=!0,h=n,o.push(w)}})}),new It(o,i,{name:d})}function J(e,t){var n=""+e,r=""+t;return nr?1:0}function z(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:J;return e.length>1&&function e(t,n,r,a){if(r===n)return t;var i=n+Math.floor((r-n)/2);return e(t,n,i,a),e(t,i+1,r,a),function(e,t,n,r,a){for(var i=e,o=[],u=t;u<=r;u+=1)o[u]=i[u];for(var c=t,f=n+1,l=t;l<=r;l+=1)c>n?(i[l]=o[f],f+=1):f>r?(i[l]=o[c],c+=1):a(o[c],o[f])<=0?(i[l]=o[c],c+=1):(i[l]=o[f],f+=1)}(t,n,i,r,a),t}(e,0,e.length-1,t),e}function K(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);ti?"desc"===t?-1:1:0}}return r}function X(e,t){var n=new Map,r=[];return e.forEach(function(e){var a=e[t];n.has(a)?r[n.get(a)][1].push(e):(r.push([a,[e]]),n.set(a,r.length-1))}),r}function q(e,t,n){var r={label:e[0]};return t.reduce(function(t,r,a){return t[r]=e[1].map(function(e){return e[n[a].index]}),t},r),r}function Z(e,t,n,r,a){a=Object.assign({},{addUid:!1,columnWise:!1},a);var i={schema:[],data:[],uids:[]},o=a.addUid,u=r&&r.length>0,c=[];if(n.split(",").forEach(function(t){for(var n=0;n=0;u--)a=t[u][0],i=t[u][1],(o=jt(r,a))&&("function"==typeof i?z(n,function(e,t){return i(e[o.index],t[o.index])}):E(i)?function(){var e=X(n,o.index),t=i[i.length-1],a=i.slice(0,i.length-1),u=a.map(function(e){return jt(r,e)});e.forEach(function(e){e.push(q(e,a,u))}),z(e,function(e,n){var r=e[2],a=n[2];return t(r,a)}),n.length=0,e.forEach(function(e){n.push.apply(n,K(e[1]))})}():(i="desc"===String(i).toLowerCase()?"desc":"asc",z(n,W(o.type,i,o.index))));e.uids=[],n.forEach(function(t){e.uids.push(t.pop())})}(i,r),a.columnWise){var f=Array.apply(void 0,K(Array(i.schema.length))).map(function(){return[]});i.data.forEach(function(e){e.forEach(function(e,t){f[t].push(e)})}),i.data=f}return i}function $(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!S(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t,r){D(e._rowDiffset,function(e){var o={},u="";a.forEach(function(n){var r=t[n].partialField.data[e];u+="-"+r,o[n]=r}),n[u]||(r&&i.push(o),n[u]=!0)})}return e._colIdentifier.split(",").forEach(function(e){var t=c[e];r.push(_({},t.schema())),a.push(t.schema().name)}),s(t,f,!1),s(e,c,!0),new It(i,r,{name:l})}function Q(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function ee(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n={},r=e.getFieldspace().getMeasure(),a=me.defaultReducer();return Object.keys(r).forEach(function(e){"string"!=typeof t[e]&&(t[e]=r[e].defAggFn());var i=me.resolve(t[e]);i?n[e]=i:(n[e]=a,t[e]=pe)}),n}(e,n),o=e.getFieldspace(),u=o.fieldsObj(),c=o.name,l=[],s=[],d=[],p={},h=[],v=void 0;Object.entries(u).forEach(function(e){var t=ye(e,2),n=t[0],r=t[1];if(-1!==a.indexOf(n)||i[n])switch(d.push(_({},r.schema())),r.schema().type){case f.MEASURE:s.push(n);break;default:case f.DIMENSION:l.push(n)}});var m=0;D(e._rowDiffset,function(e){var t="";l.forEach(function(n){t=t+"-"+u[n].partialField.data[e]}),void 0===p[t]?(p[t]=m,h.push({}),l.forEach(function(t){h[m][t]=u[t].partialField.data[e]}),s.forEach(function(t){h[m][t]=[u[t].partialField.data[e]]}),m+=1):s.forEach(function(n){h[p[t]][n].push(u[n].partialField.data[e])})});var y={},g=function(){return e.detachedRoot()};return h.forEach(function(e){var t=e;s.forEach(function(n){t[n]=i[n](e[n],g,y)})}),r?(r.__calculateFieldspace(),v=r):v=new zt(h,d,{name:c}),v}function be(e,t){var n=H(e.getFieldspace(),t.getFieldspace());return function(e,t){var r=!0;return n.forEach(function(n){r=!(e[n].value!==t[n].value||!r)}),r}}function Oe(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!S(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t){D(e._rowDiffset,function(e){var r={},o="";a.forEach(function(n){var a=t[n].partialField.data[e];o+="-"+a,r[n]=a}),n[o]||(i.push(r),n[o]=!0)})}return e._colIdentifier.split(",").forEach(function(e){var t=c[e];r.push(_({},t.schema())),a.push(t.schema().name)}),s(e,c),s(t,f),new zt(i,r,{name:l})}function we(e,t,n){return G(e,t,n,!1,U.LEFTOUTER)}function _e(e,t,n){return G(t,e,n,!1,U.RIGHTOUTER)}var Ee=function(){function e(e,t){for(var n=0;nn&&(n=a))}),[t,n]}}]),t}(),xe=function(){function e(e,t){for(var n=0;n9999?"+"+it(t,6):it(t,4))+"-"+it(e.getUTCMonth()+1,2)+"-"+it(e.getUTCDate(),2)+(i?"T"+it(n,2)+":"+it(r,2)+":"+it(a,2)+"."+it(i,3)+"Z":a?"T"+it(n,2)+":"+it(r,2)+":"+it(a,2)+"Z":r||n?"T"+it(n,2)+":"+it(r,2)+"Z":"")}var ut=function(e){var t=new RegExp('["'+e+"\n\r]"),n=e.charCodeAt(0);function r(e,t){var r,a=[],i=e.length,o=0,u=0,c=i<=0,f=!1;function l(){if(c)return Qe;if(f)return f=!1,$e;var t,r,a=o;if(e.charCodeAt(a)===et){for(;o++=i?c=!0:(r=e.charCodeAt(o++))===tt?f=!0:r===nt&&(f=!0,e.charCodeAt(o)===tt&&++o),e.slice(a+1,t-1).replace(/""/g,'"')}for(;o2&&void 0!==arguments[2]?arguments[2]:{},a=arguments[3];t===L.COMPOSE?(e._derivation.length=0,(n=e._derivation).push.apply(n,ht(a))):e._derivation.push({op:t,meta:r,criteria:a})}(t,n,arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},arguments[4]),function(e,t){var n;(n=t._ancestorDerivation).push.apply(n,ht(e._ancestorDerivation).concat(ht(e._derivation)))}(e,t)},gt=function(e,t,n,r,a){var i=[],o=-1,u=r.mode,c=void 0,f={},s=function(){return a.detachedRoot()},d=function(e){return n(function(e,t){var n={},r=!0,a=!1,i=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done);r=!0){var c=o.value;n[c.name()]=new k(c.partialField.data[t],c)}}catch(e){a=!0,i=e}finally{try{!r&&u.return&&u.return()}finally{if(a)throw i}}return n}(t,e),e,s,f)},p=void 0;return p=u===l.INVERSE?function(e){return!d(e)}:function(e){return d(e)},D(e,function(e){p(e)&&(-1!==o&&e===o+1?(c=i.length-1,i[c]=i[c].split("-")[0]+"-"+e):i.push(""+e),o=e)}),i.join(",")},bt=function(e){var t=e.clone(!1),n=e.getPartialFieldspace();return t._colIdentifier=n.fields.map(function(e){return e.name()}).join(","),n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,t.__calculateFieldspace().calculateFieldsConfig(),t},Ot=function(e,t){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},r=n.operation||V,a=n.filterByMeasure||!1,i=[];i=t.length?t.map(function(e){return n=(t=e).getData(),r=n.schema,i=t.getFieldsConfig(),o=t.getFieldspace().fieldsObj(),u=n.data,c=Object.values(i).reduce(function(e,t){return e[t.def.name]=o[t.def.name].domain(),e},{}),function(e){return!!u.length&&u.some(function(t){return r.every(function(n){if(!(n.name in e))return!0;var r=e[n.name].valueOf();if(a&&n.type===f.MEASURE)return r>=c[n.name][0]&&r<=c[n.name][1];if(n.type!==f.DIMENSION)return!0;var o=i[n.name].index;return t[o]===e[n.name].valueOf()})})};var t,n,r,i,o,u,c}):[function(){return!1}];return r===V?bt(e).select(function(e){return i.every(function(t){return t(e)})},{saveChild:!1,mode:l.ALL}):bt(e).select(function(e){return i.some(function(t){return t(e)})},{mode:l.ALL,saveChild:!1})},wt=function(e,t,n,r){var a=e.clone(r.saveChild),i=gt(a._rowDiffset,a.getPartialFieldspace().fields,t,n,e);return a._rowDiffset=i,a.__calculateFieldspace().calculateFieldsConfig(),yt(e,a,L.SELECT,{config:n},t),a},_t=function(e,t,n,r){var a=e.clone(n.saveChild),i=t;return n.mode===l.INVERSE&&(i=r.filter(function(e){return-1===t.indexOf(e)})),a._colIdentifier=i.join(","),a.__calculateFieldspace().calculateFieldsConfig(),yt(e,a,L.PROJECT,{projField:t,config:n,actualProjField:i},null),a},Et=function(e){if((e=_({},e)).type||(e.type=f.DIMENSION),!e.subtype)switch(e.type){case f.MEASURE:e.subtype=c.CONTINUOUS;break;default:case f.DIMENSION:e.subtype=u.CATEGORICAL}return e},At=function(e){return e.map(function(e){return function(e){var t=[c.CONTINUOUS],n=[u.CATEGORICAL,u.BINNED,u.TEMPORAL,u.GEO],r=e.type,a=e.subtype,i=e.name;switch(r){case f.DIMENSION:if(-1===n.indexOf(a))throw new Error("DataModel doesn't support dimension field subtype "+a+" used for "+i+" field");break;case f.MEASURE:if(-1===t.indexOf(a))throw new Error("DataModel doesn't support measure field subtype "+a+" used for "+i+" field");break;default:throw new Error("DataModel doesn't support field type "+r+" used for "+i+" field")}}(e=Et(e)),e})},St=function(e,t,n,r){n=At(n),r=Object.assign(Object.assign({},qe),r);var i=a[r.dataFormat];if(!i||"function"!=typeof i)throw new Error("No converter function found for "+r.dataFormat+" format");var u=i(t,r),c=pt(u,2),f=c[0],l=c[1];!function(e,t){e.forEach(function(e){var n=e.as;if(n){var r=t.indexOf(e.name);t[r]=n,e.name=n,delete e.as}})}(n,f);var s=Xe(l,n,f),d=T.createNamespace(s,r.name);return e._partialFieldspace=d,e._rowDiffset=l.length&&l[0].length?"0-"+(l[0].length-1):"",e._colIdentifier=n.map(function(e){return e.name}).join(),e._dataFormat=r.dataFormat===o.AUTO?N(t):r.dataFormat,e},jt=function(e,t){for(var n=0;n2&&void 0!==arguments[2]?arguments[2]:{},a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=a.nonTraversingModel,o=a.excludeModels||[];t!==i&&((!o.length||-1===o.indexOf(t))&&t.handlePropagation(n,r),t._children.forEach(function(t){var i=Nt(n,t),o=pt(i,2),u=o[0],c=o[1];e(t,[u,c],r,a)}))},Ft=function(e){for(var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[];e._parent;)t.push(e),e=e._parent;return t},kt=function(e,t,n,r){var a=void 0,i=void 0,o=n.propagationNameSpace,u=n.propagateToSource,c=n.sourceId,f=r.propagateInterpolatedValues,l=[];if(null===e&&!0!==r.persistent)l=[{criteria:[]}];else{var s,d=Object.values(o.mutableActions);!1!==u&&(d=d.filter(function(e){return e.config.sourceId!==c}));var p=d.filter(function(e){return(r.filterFn||function(){return!0})(e,r)}).map(function(e){return e.config.criteria}),h=[];if(!1!==u){var v=Object.values(o.mutableActions);v.forEach(function(e){var t=e.config;!1===t.applyOnSource&&t.action===r.action&&t.sourceId!==c&&(h.push(e.model),(a=v.filter(function(t){return t!==e}).map(function(e){return e.config.criteria})).length&&l.push({criteria:a,models:e.model,path:Ft(e.model)}))})}a=(s=[]).concat.apply(s,[].concat(ht(p),[e])).filter(function(e){return null!==e}),l.push({criteria:a,excludeModels:[].concat(h,ht(r.excludeModels||[]))})}var m=t.model,y=Object.assign({sourceIdentifiers:e,propagationSourceId:c},r),g=t.groupByModel;f&&g&&(i=Ot(g,a,{filterByMeasure:f}),Tt(g,i,y)),l.forEach(function(e){var t=Ot(m,e.criteria),n=e.path;if(n){var r=function(e,t){for(var n=0,r=t.length;n0&&void 0!==arguments[0])||arguments[0],t=new this.constructor(this);return e?t.setParent(this):t.setParent(null),t}},{key:"project",value:function(e,t){var n={mode:l.NORMAL,saveChild:!0};t=Object.assign({},n,t);var r=this.getFieldsConfig(),a=Object.keys(r),i=t.mode,o=e.reduce(function(e,t){return"RegExp"===t.constructor.name?e.push.apply(e,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{saveChild:!0},r=""+e.join(),a=[this,e,t],i=ge.apply(void 0,a);return yt(this,i,L.GROUPBY,{fieldsArr:e,groupByString:r,defaultReducer:me.defaultReducer()},t),n.saveChild?i.setParent(this):i.setParent(null),i}},{key:"sort",value:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{saveChild:!1},n=this.getData({order:"row",sort:e}),r=[n.schema.map(function(e){return e.name})].concat(n.data),a=new this.constructor(r,n.schema,{dataFormat:"DSVArr"});return yt(this,a,L.SORT,t,e),t.saveChild?a.setParent(this):a.setParent(null),a}},{key:"serialize",value:function(e,t){e=e||this._dataFormat,t=Object.assign({},{fieldSeparator:","},t);var n=this.getFieldspace().fields,r=n.map(function(e){return e.formattedData()}),a=r[0].length,i=void 0,u=void 0,c=void 0;if(e===o.FLAT_JSON)for(i=[],u=0;u=0&&(n.fields[r]=e)}else n.fields.push(e);return n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,this.__calculateFieldspace().calculateFieldsConfig(),this}},{key:"calculateVariable",value:function(e,t,n){var r=this;e=Et(e),n=Object.assign({},{saveChild:!0,replaceVar:!1},n);var a=this.getFieldsConfig(),i=t.slice(0,t.length-1),o=t[t.length-1];if(a[e.name]&&!n.replaceVar)throw new Error(e.name+" field already exists in datamodel");var u=i.map(function(e){var t=a[e];if(!t)throw new Error(e+" is not a valid column name.");return t.index}),c=this.clone(n.saveChild),f=c.getFieldspace().fields,l=u.map(function(e){return f[e]}),s={},d=function(){return r.detachedRoot()},p=[];D(c._rowDiffset,function(e){var t=l.map(function(t){return t.partialField.data[e]});p[e]=o.apply(void 0,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},a=t.isMutableAction,i=t.sourceId,o=t.payload,u=function(e){for(;e._parent;)e=e._parent;return e}(this),c=u._propagationNameSpace,f={groupByModel:function(e){for(;e._parent&&e._derivation.find(function(e){return e.op!==L.GROUPBY});)e=e._parent;return e}(this),model:u};return n&&function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=void 0,a=t.isMutableAction,i=t.criteria,o=t.action+"-"+t.sourceId;r=a?e.mutableActions:e.immutableActions,null===i?delete r[o]:r[o]={model:n,config:t}}(c,t,this),kt(e,f,{propagationNameSpace:c,sourceId:i},Object.assign({payload:o},t)),a&&function(e,t,n){var r=e.immutableActions;for(var a in r){var i=r[a].config,o=n.config.sourceId,u=!n.propConfig.filterImmutableAction||n.propConfig.filterImmutableAction(i,n.config);if(i.sourceId!==o&&u){var c=i.criteria;kt(c,t,{propagationNameSpace:e,propagateToSource:!1,sourceId:o},i)}}}(c,f,{config:t,propConfig:r}),this}},{key:"on",value:function(e,t){switch(e){case"propagation":this._onPropagation.push(t)}return this}},{key:"unsubscribe",value:function(e){switch(e){case"propagation":this._onPropagation=[]}return this}},{key:"handlePropagation",value:function(e,t){var n=this;this._onPropagation.forEach(function(r){return r.call(n,e,t)})}},{key:"bin",value:function(e,t){var n=this.getFieldsConfig();if(!n[e])throw new Error("Field "+e+" doesn't exist");var r=t.name||e+"_binned";if(n[r])throw new Error("Field "+r+" already exists");var a=function(e,t,n){var r=n.buckets,a=n.binsCount,i=n.binSize,o=n.start,u=n.end,c=e.domain(),f=I(c,2),l=f[0],s=f[1];r||(o=0!==o&&(!o||o>l)?l:o,u=0!==u&&(!u||ul&&r.unshift(l),r[r.length-1]<=s&&r.push(s+1);for(var d=[],p=0;p1&&void 0!==arguments[1]?arguments[1]:{saveChild:!0},r=e,a=void 0,i=[];return t.forEach(function(e){r=e(r),i.push.apply(i,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t {\n let i;\n let l;\n\n if (!val) { return defVal; }\n\n const nVal = val.toLowerCase();\n\n for (i = 0, l = range.length; i < l; i++) {\n if (range[i].toLowerCase() === nVal) {\n return i;\n }\n }\n\n if (i === undefined) {\n return defVal;\n }\n return null;\n };\n};\n\n/*\n * Defines the tokens which are supporter by the dateformatter. Using this definitation a value gets extracted from\n * the user specifed date string. This also formats the value for display purpose from native JS date.\n * The definition of each token contains the following named properties\n * {\n * %token_name% : {\n * name: name of the token, this is used in reverse lookup,\n * extract: a function that returns the regular expression to extract that piece of information. All the\n * regex should be gouped by using ()\n * parser: a function which receives value extracted by the above regex and parse it to get the date params\n * formatter: a formatter function that takes milliseconds or JS Date object and format the param\n * represented by the token only.\n * }\n * }\n *\n * @return {Object} : Definition of the all the supported tokens.\n */\nDateTimeFormatter.getTokenDefinitions = function () {\n const daysDef = {\n short: [\n 'Sun',\n 'Mon',\n 'Tue',\n 'Wed',\n 'Thu',\n 'Fri',\n 'Sat'\n ],\n long: [\n 'Sunday',\n 'Monday',\n 'Tuesday',\n 'Wednesday',\n 'Thursday',\n 'Friday',\n 'Saturday'\n ]\n };\n const monthsDef = {\n short: [\n 'Jan',\n 'Feb',\n 'Mar',\n 'Apr',\n 'May',\n 'Jun',\n 'Jul',\n 'Aug',\n 'Sep',\n 'Oct',\n 'Nov',\n 'Dec'\n ],\n long: [\n 'January',\n 'February',\n 'March',\n 'April',\n 'May',\n 'June',\n 'July',\n 'August',\n 'September',\n 'October',\n 'November',\n 'December'\n ]\n };\n\n const definitions = {\n H: {\n // 24 hours format\n name: 'H',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n\n return d.getHours().toString();\n }\n },\n l: {\n // 12 hours format\n name: 'l',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const hours = d.getHours() % 12;\n\n return (hours === 0 ? 12 : hours).toString();\n }\n },\n p: {\n // AM or PM\n name: 'p',\n index: 3,\n extract () { return '(AM|PM)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'AM' : 'PM');\n }\n },\n P: {\n // am or pm\n name: 'P',\n index: 3,\n extract () { return '(am|pm)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'am' : 'pm');\n }\n },\n M: {\n // Two digit minutes 00 - 59\n name: 'M',\n index: 4,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const mins = d.getMinutes();\n\n return pad(mins);\n }\n },\n S: {\n // Two digit seconds 00 - 59\n name: 'S',\n index: 5,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const seconds = d.getSeconds();\n\n return pad(seconds);\n }\n },\n K: {\n // Milliseconds\n name: 'K',\n index: 6,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const ms = d.getMilliseconds();\n\n return ms.toString();\n }\n },\n a: {\n // Short name of day, like Mon\n name: 'a',\n index: 2,\n extract () { return `(${daysDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.short[day]).toString();\n }\n },\n A: {\n // Long name of day, like Monday\n name: 'A',\n index: 2,\n extract () { return `(${daysDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.long[day]).toString();\n }\n },\n e: {\n // 8 of March, 11 of November\n name: 'e',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return day.toString();\n }\n },\n d: {\n // 08 of March, 11 of November\n name: 'd',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return pad(day);\n }\n },\n b: {\n // Short month, like Jan\n name: 'b',\n index: 1,\n extract () { return `(${monthsDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.short[month]).toString();\n }\n },\n B: {\n // Long month, like January\n name: 'B',\n index: 1,\n extract () { return `(${monthsDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.long[month]).toString();\n }\n },\n m: {\n // Two digit month of year like 01 for January\n name: 'm',\n index: 1,\n extract () { return '(\\\\d+)'; },\n parser (val) { return DateTimeFormatter.defaultNumberParser()(val) - 1; },\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return pad(month + 1);\n }\n },\n y: {\n // Short year like 90 for 1990\n name: 'y',\n index: 0,\n extract () { return '(\\\\d{2})'; },\n parser (val) {\n let result;\n if (val) {\n const l = val.length;\n val = val.substring(l - 2, l);\n }\n let parsedVal = DateTimeFormatter.defaultNumberParser()(val);\n let presentDate = new Date();\n let presentYear = Math.trunc((presentDate.getFullYear()) / 100);\n\n result = `${presentYear}${parsedVal}`;\n\n if (convertToNativeDate(result).getFullYear() > presentDate.getFullYear()) {\n result = `${presentYear - 1}${parsedVal}`;\n }\n return convertToNativeDate(result).getFullYear();\n },\n formatter (val) {\n const d = convertToNativeDate(val);\n let year = d.getFullYear().toString();\n let l;\n\n if (year) {\n l = year.length;\n year = year.substring(l - 2, l);\n }\n\n return year;\n }\n },\n Y: {\n // Long year like 1990\n name: 'Y',\n index: 0,\n extract () { return '(\\\\d{4})'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const year = d.getFullYear().toString();\n\n return year;\n }\n }\n };\n\n return definitions;\n};\n\n/*\n * The tokens which works internally is not user friendly in terms of memorizing the names. This gives a formal\n * definition to the informal notations.\n *\n * @return {Object} : Formal definition of the tokens\n */\nDateTimeFormatter.getTokenFormalNames = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n\n return {\n HOUR: definitions.H,\n HOUR_12: definitions.l,\n AMPM_UPPERCASE: definitions.p,\n AMPM_LOWERCASE: definitions.P,\n MINUTE: definitions.M,\n SECOND: definitions.S,\n SHORT_DAY: definitions.a,\n LONG_DAY: definitions.A,\n DAY_OF_MONTH: definitions.e,\n DAY_OF_MONTH_CONSTANT_WIDTH: definitions.d,\n SHORT_MONTH: definitions.b,\n LONG_MONTH: definitions.B,\n MONTH_OF_YEAR: definitions.m,\n SHORT_YEAR: definitions.y,\n LONG_YEAR: definitions.Y\n };\n};\n\n/*\n * This defines the rules and declares dependencies that resolves a date parameter (year, month etc) from\n * the date time parameter array.\n *\n * @return {Object} : An object that contains dependencies and a resolver function. The dependencies values are fed\n * to the resolver function in that particular sequence only.\n */\nDateTimeFormatter.tokenResolver = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const defaultResolver = (...args) => { // eslint-disable-line require-jsdoc\n let i = 0;\n let arg;\n let targetParam;\n const l = args.length;\n\n for (; i < l; i++) {\n arg = args[i];\n if (args[i]) {\n targetParam = arg;\n }\n }\n\n if (!targetParam) { return null; }\n\n return targetParam[0].parser(targetParam[1]);\n };\n\n return {\n YEAR: [definitions.y, definitions.Y,\n defaultResolver\n ],\n MONTH: [definitions.b, definitions.B, definitions.m,\n defaultResolver\n ],\n DAY: [definitions.a, definitions.A, definitions.e, definitions.d,\n defaultResolver\n ],\n HOUR: [definitions.H, definitions.l, definitions.p, definitions.P,\n function (hourFormat24, hourFormat12, ampmLower, ampmUpper) {\n let targetParam;\n let amOrpm;\n let isPM;\n let val;\n\n if (hourFormat12 && (amOrpm = (ampmLower || ampmUpper))) {\n if (amOrpm[0].parser(amOrpm[1]) === 'pm') {\n isPM = true;\n }\n\n targetParam = hourFormat12;\n } else if (hourFormat12) {\n targetParam = hourFormat12;\n } else {\n targetParam = hourFormat24;\n }\n\n if (!targetParam) { return null; }\n\n val = targetParam[0].parser(targetParam[1]);\n if (isPM) {\n val += 12;\n }\n return val;\n }\n ],\n MINUTE: [definitions.M,\n defaultResolver\n ],\n SECOND: [definitions.S,\n defaultResolver\n ]\n };\n};\n\n/*\n * Finds token from the format rule specified by a user.\n * @param format {String} : The format of the input date specified by the user\n * @return {Array} : An array of objects which contains the available token and their occurence index in the format\n */\nDateTimeFormatter.findTokens = function (format) {\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenLiterals = Object.keys(definitions);\n const occurrence = [];\n let i;\n let forwardChar;\n\n while ((i = format.indexOf(tokenPrefix, i + 1)) >= 0) {\n forwardChar = format[i + 1];\n if (tokenLiterals.indexOf(forwardChar) === -1) { continue; }\n\n occurrence.push({\n index: i,\n token: forwardChar\n });\n }\n\n return occurrence;\n};\n\n/*\n * Format any JS date to a specified date given by user.\n *\n * @param date {Number | Date} : The date object which is to be formatted\n * @param format {String} : The format using which the date will be formatted for display\n */\nDateTimeFormatter.formatAs = function (date, format) {\n const nDate = convertToNativeDate(date);\n const occurrence = DateTimeFormatter.findTokens(format);\n const definitions = DateTimeFormatter.getTokenDefinitions();\n let formattedStr = String(format);\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n let token;\n let formattedVal;\n let i;\n let l;\n\n for (i = 0, l = occurrence.length; i < l; i++) {\n token = occurrence[i].token;\n formattedVal = definitions[token].formatter(nDate);\n formattedStr = formattedStr.replace(new RegExp(tokenPrefix + token, 'g'), formattedVal);\n }\n\n return formattedStr;\n};\n\n/*\n * Parses the user specified date string to extract the date time params.\n *\n * @return {Array} : Value of date time params in an array [year, month, day, hour, minutes, seconds, milli]\n */\nDateTimeFormatter.prototype.parse = function (dateTimeStamp, options) {\n const tokenResolver = DateTimeFormatter.tokenResolver();\n const dtParams = this.extractTokenValue(dateTimeStamp);\n const dtParamSeq = DateTimeFormatter.DATETIME_PARAM_SEQUENCE;\n const noBreak = options && options.noBreak;\n const dtParamArr = [];\n const args = [];\n let resolverKey;\n let resolverParams;\n let resolverFn;\n let val;\n let i;\n let param;\n let resolvedVal;\n let l;\n let result = [];\n\n for (resolverKey in tokenResolver) {\n if (!{}.hasOwnProperty.call(tokenResolver, resolverKey)) { continue; }\n\n args.length = 0;\n resolverParams = tokenResolver[resolverKey];\n resolverFn = resolverParams.splice(resolverParams.length - 1, 1)[0];\n\n for (i = 0, l = resolverParams.length; i < l; i++) {\n param = resolverParams[i];\n val = dtParams[param.name];\n\n if (val === undefined) {\n args.push(null);\n } else {\n args.push([param, val]);\n }\n }\n\n resolvedVal = resolverFn.apply(this, args);\n\n if ((resolvedVal === undefined || resolvedVal === null) && !noBreak) {\n break;\n }\n\n dtParamArr[dtParamSeq[resolverKey]] = resolvedVal;\n }\n\n if (dtParamArr.length && this.checkIfOnlyYear(dtParamArr.length))\n {\n result.unshift(dtParamArr[0], 0, 1); }\n else {\n result.unshift(...dtParamArr);\n }\n\n return result;\n};\n\n/*\n * Extract the value of the token from user specified date time string.\n *\n * @return {Object} : An key value pair which contains the tokens as key and value as pair\n */\nDateTimeFormatter.prototype.extractTokenValue = function (dateTimeStamp) {\n const format = this.format;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const occurrence = DateTimeFormatter.findTokens(format);\n const tokenObj = {};\n\n let lastOccurrenceIndex;\n let occObj;\n let occIndex;\n let targetText;\n let regexFormat;\n\n let l;\n let i;\n\n regexFormat = String(format);\n\n const tokenArr = occurrence.map(obj => obj.token);\n const occurrenceLength = occurrence.length;\n for (i = occurrenceLength - 1; i >= 0; i--) {\n occIndex = occurrence[i].index;\n\n if (occIndex + 1 === regexFormat.length - 1) {\n lastOccurrenceIndex = occIndex;\n continue;\n }\n\n if (lastOccurrenceIndex === undefined) {\n lastOccurrenceIndex = regexFormat.length;\n }\n\n targetText = regexFormat.substring(occIndex + 2, lastOccurrenceIndex);\n regexFormat = regexFormat.substring(0, occIndex + 2) +\n RegExp.escape(targetText) +\n regexFormat.substring(lastOccurrenceIndex, regexFormat.length);\n\n lastOccurrenceIndex = occIndex;\n }\n\n for (i = 0; i < occurrenceLength; i++) {\n occObj = occurrence[i];\n regexFormat = regexFormat.replace(tokenPrefix + occObj.token, definitions[occObj.token].extract());\n }\n\n const extractValues = dateTimeStamp.match(new RegExp(regexFormat)) || [];\n extractValues.shift();\n\n for (i = 0, l = tokenArr.length; i < l; i++) {\n tokenObj[tokenArr[i]] = extractValues[i];\n }\n return tokenObj;\n};\n\n/*\n * Give back the JS native date formed from user specified date string\n *\n * @return {Date} : Native JS Date\n */\nDateTimeFormatter.prototype.getNativeDate = function (dateTimeStamp) {\n let date = null;\n if (Number.isFinite(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n } else if (!this.format && Date.parse(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n }\n else {\n const dtParams = this.dtParams = this.parse(dateTimeStamp);\n if (dtParams.length) {\n this.nativeDate = new Date(...dtParams);\n date = this.nativeDate;\n }\n }\n return date;\n};\n\nDateTimeFormatter.prototype.checkIfOnlyYear = function(len) {\n return len === 1 && this.format.match(/y|Y/g).length;\n};\n\n/*\n * Represents JS native date to a user specified format.\n *\n * @param format {String} : The format according to which the date is to be represented\n * @return {String} : The formatted date string\n */\nDateTimeFormatter.prototype.formatAs = function (format, dateTimeStamp) {\n let nativeDate;\n\n if (dateTimeStamp) {\n nativeDate = this.nativeDate = this.getNativeDate(dateTimeStamp);\n } else if (!(nativeDate = this.nativeDate)) {\n nativeDate = this.getNativeDate(dateTimeStamp);\n }\n\n return DateTimeFormatter.formatAs(nativeDate, format);\n};\n\nexport { DateTimeFormatter as default };\n","/**\n * The utility function to calculate major column.\n *\n * @param {Object} store - The store object.\n * @return {Function} Returns the push function.\n */\nexport default (store) => {\n let i = 0;\n return (...fields) => {\n fields.forEach((val, fieldIndex) => {\n if (!(store[fieldIndex] instanceof Array)) {\n store[fieldIndex] = Array.from({ length: i });\n }\n store[fieldIndex].push(val);\n });\n i++;\n };\n};\n","/* eslint-disable */\nconst OBJECTSTRING = 'object';\nconst objectToStrFn = Object.prototype.toString;\nconst objectToStr = '[object Object]';\nconst arrayToStr = '[object Array]';\n\nfunction checkCyclicRef(obj, parentArr) {\n let i = parentArr.length;\n let bIndex = -1;\n\n while (i) {\n if (obj === parentArr[i]) {\n bIndex = i;\n return bIndex;\n }\n i -= 1;\n }\n\n return bIndex;\n}\n\nfunction merge(obj1, obj2, skipUndef, tgtArr, srcArr) {\n var item,\n srcVal,\n tgtVal,\n str,\n cRef;\n // check whether obj2 is an array\n // if array then iterate through it's index\n // **** MOOTOOLS precution\n\n if (!srcArr) {\n tgtArr = [obj1];\n srcArr = [obj2];\n }\n else {\n tgtArr.push(obj1);\n srcArr.push(obj2);\n }\n\n if (obj2 instanceof Array) {\n for (item = 0; item < obj2.length; item += 1) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (typeof tgtVal !== OBJECTSTRING) {\n if (!(skipUndef && tgtVal === undefined)) {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = tgtVal instanceof Array ? [] : {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n }\n }\n else {\n for (item in obj2) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (tgtVal !== null && typeof tgtVal === OBJECTSTRING) {\n // Fix for issue BUG: FWXT-602\n // IE < 9 Object.prototype.toString.call(null) gives\n // '[object Object]' instead of '[object Null]'\n // that's why null value becomes Object in IE < 9\n str = objectToStrFn.call(tgtVal);\n if (str === objectToStr) {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else if (str === arrayToStr) {\n if (srcVal === null || !(srcVal instanceof Array)) {\n srcVal = obj1[item] = [];\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (skipUndef && tgtVal === undefined) {\n continue;\n }\n obj1[item] = tgtVal;\n }\n }\n }\n return obj1;\n}\n\n\nfunction extend2 (obj1, obj2, skipUndef) {\n //if none of the arguments are object then return back\n if (typeof obj1 !== OBJECTSTRING && typeof obj2 !== OBJECTSTRING) {\n return null;\n }\n\n if (typeof obj2 !== OBJECTSTRING || obj2 === null) {\n return obj1;\n }\n\n if (typeof obj1 !== OBJECTSTRING) {\n obj1 = obj2 instanceof Array ? [] : {};\n }\n merge(obj1, obj2, skipUndef);\n return obj1;\n}\n\nexport { extend2 as default };\n","import { DataFormat } from '../enums';\n\n/**\n * Checks whether the value is an array.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an array otherwise returns false.\n */\nexport function isArray (val) {\n return Array.isArray(val);\n}\n\n/**\n * Checks whether the value is an object.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an object otherwise returns false.\n */\nexport function isObject (val) {\n return val === Object(val);\n}\n\n/**\n * Checks whether the value is a string value.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is a string value otherwise returns false.\n */\nexport function isString (val) {\n return typeof val === 'string';\n}\n\n/**\n * Checks whether the value is callable.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is callable otherwise returns false.\n */\nexport function isCallable (val) {\n return typeof val === 'function';\n}\n\n/**\n * Returns the unique values from the input array.\n *\n * @param {Array} data - The input array.\n * @return {Array} Returns a new array of unique values.\n */\nexport function uniqueValues (data) {\n return [...new Set(data)];\n}\n\nexport const getUniqueId = () => `id-${new Date().getTime()}${Math.round(Math.random() * 10000)}`;\n\n/**\n * Checks Whether two arrays have same content.\n *\n * @param {Array} arr1 - The first array.\n * @param {Array} arr2 - The 2nd array.\n * @return {boolean} Returns whether two array have same content.\n */\nexport function isArrEqual(arr1, arr2) {\n if (!isArray(arr1) || !isArray(arr2)) {\n return arr1 === arr2;\n }\n\n if (arr1.length !== arr2.length) {\n return false;\n }\n\n for (let i = 0; i < arr1.length; i++) {\n if (arr1[i] !== arr2[i]) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * It is the default number format function for the measure field type.\n *\n * @param {any} val - The input value.\n * @return {number} Returns a number value.\n */\nexport function formatNumber(val) {\n return val;\n}\n\n/**\n * Returns the detected data format.\n *\n * @param {any} data - The input data to be tested.\n * @return {string} Returns the data format name.\n */\nexport const detectDataFormat = (data) => {\n if (isString(data)) {\n return DataFormat.DSV_STR;\n } else if (isArray(data) && isArray(data[0])) {\n return DataFormat.DSV_ARR;\n } else if (isArray(data) && (data.length === 0 || isObject(data[0]))) {\n return DataFormat.FLAT_JSON;\n }\n return null;\n};\n","import { FieldType } from './enums';\nimport { getUniqueId } from './utils';\n\nconst fieldStore = {\n data: {},\n\n createNamespace (fieldArr, name) {\n const dataId = name || getUniqueId();\n\n this.data[dataId] = {\n name: dataId,\n fields: fieldArr,\n\n fieldsObj () {\n let fieldsObj = this._cachedFieldsObj;\n\n if (!fieldsObj) {\n fieldsObj = this._cachedFieldsObj = {};\n this.fields.forEach((field) => {\n fieldsObj[field.name()] = field;\n });\n }\n return fieldsObj;\n },\n getMeasure () {\n let measureFields = this._cachedMeasure;\n\n if (!measureFields) {\n measureFields = this._cachedMeasure = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.MEASURE) {\n measureFields[field.name()] = field;\n }\n });\n }\n return measureFields;\n },\n getDimension () {\n let dimensionFields = this._cachedDimension;\n\n if (!this._cachedDimension) {\n dimensionFields = this._cachedDimension = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.DIMENSION) {\n dimensionFields[field.name()] = field;\n }\n });\n }\n return dimensionFields;\n },\n };\n return this.data[dataId];\n },\n};\n\nexport default fieldStore;\n","/**\n * The wrapper class on top of the primitive value of a field.\n *\n * @todo Need to have support for StringValue, NumberValue, DateTimeValue\n * and GeoValue. These types should expose predicate API mostly.\n */\nclass Value {\n\n /**\n * Creates new Value instance.\n *\n * @param {*} val - the primitive value from the field cell.\n * @param {string | Field} field - The field from which the value belongs.\n */\n constructor (val, field) {\n Object.defineProperty(this, '_value', {\n enumerable: false,\n configurable: false,\n writable: false,\n value: val\n });\n\n this.field = field;\n }\n\n /**\n * Returns the field value.\n *\n * @return {*} Returns the current value.\n */\n get value () {\n return this._value;\n }\n\n /**\n * Converts to human readable string.\n *\n * @override\n * @return {string} Returns a human readable string of the field value.\n *\n */\n toString () {\n return String(this.value);\n }\n\n /**\n * Returns the value of the field.\n *\n * @override\n * @return {*} Returns the field value.\n */\n valueOf () {\n return this.value;\n }\n}\n\nexport default Value;\n","/**\n * Iterates through the diffSet array and call the callback with the current\n * index.\n *\n * @param {string} rowDiffset - The row diffset string e.g. '0-4,6,10-13'.\n * @param {Function} callback - The callback function to be called with every index.\n */\nexport function rowDiffsetIterator (rowDiffset, callback) {\n if (rowDiffset.length > 0) {\n const rowDiffArr = rowDiffset.split(',');\n rowDiffArr.forEach((diffStr) => {\n const diffStsArr = diffStr.split('-');\n const start = +(diffStsArr[0]);\n const end = +(diffStsArr[1] || diffStsArr[0]);\n if (end >= start) {\n for (let i = start; i <= end; i += 1) {\n callback(i);\n }\n }\n });\n }\n}\n","/**\n * A parser to parser null, undefined, invalid and NIL values.\n *\n * @public\n * @class\n */\nclass InvalidAwareTypes {\n /**\n * Static method which gets/sets the invalid value registry.\n *\n * @public\n * @param {Object} config - The custom configuration supplied by user.\n * @return {Object} Returns the invalid values registry.\n */\n static invalidAwareVals (config) {\n if (!config) {\n return InvalidAwareTypes._invalidAwareValsMap;\n }\n return Object.assign(InvalidAwareTypes._invalidAwareValsMap, config);\n }\n\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} value - The value of the invalid data type.\n */\n constructor (value) {\n this._value = value;\n }\n\n /**\n * Returns the current value of the instance.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n value () {\n return this._value;\n }\n\n /**\n * Returns the current value of the instance in string format.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n toString () {\n return String(this._value);\n }\n\n static isInvalid(val) {\n return (val instanceof InvalidAwareTypes) || !!InvalidAwareTypes.invalidAwareVals()[val];\n }\n\n static getInvalidType(val) {\n return val instanceof InvalidAwareTypes ? val : InvalidAwareTypes.invalidAwareVals()[val];\n }\n}\n\n/**\n * Enums for Invalid types.\n */\nInvalidAwareTypes.NULL = new InvalidAwareTypes('null');\nInvalidAwareTypes.NA = new InvalidAwareTypes('na');\nInvalidAwareTypes.NIL = new InvalidAwareTypes('nil');\n\n/**\n * Default Registry for mapping the invalid values.\n *\n * @private\n */\nInvalidAwareTypes._invalidAwareValsMap = {\n invalid: InvalidAwareTypes.NA,\n nil: InvalidAwareTypes.NIL,\n null: InvalidAwareTypes.NULL,\n undefined: InvalidAwareTypes.NA\n};\n\nexport default InvalidAwareTypes;\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport InvalidAwareTypes from '../invalid-aware-types';\n\nconst generateBuckets = (binSize, start, end) => {\n const buckets = [];\n let next = start;\n\n while (next < end) {\n buckets.push(next);\n next += binSize;\n }\n buckets.push(next);\n\n return buckets;\n};\n\nconst findBucketRange = (bucketRanges, value) => {\n let leftIdx = 0;\n let rightIdx = bucketRanges.length - 1;\n let midIdx;\n let range;\n\n // Here use binary search as the bucketRanges is a sorted array\n while (leftIdx <= rightIdx) {\n midIdx = leftIdx + Math.floor((rightIdx - leftIdx) / 2);\n range = bucketRanges[midIdx];\n\n if (value >= range.start && value < range.end) {\n return range;\n } else if (value >= range.end) {\n leftIdx = midIdx + 1;\n } else if (value < range.start) {\n rightIdx = midIdx - 1;\n }\n }\n\n return null;\n};\n\n /**\n * Creates the bin data from input measure field and supplied configs.\n *\n * @param {Measure} measureField - The Measure field instance.\n * @param {string} rowDiffset - The datamodel rowDiffset values.\n * @param {Object} config - The config object.\n * @return {Object} Returns the binned data and the corresponding bins.\n */\nexport function createBinnedFieldData (measureField, rowDiffset, config) {\n let { buckets, binsCount, binSize, start, end } = config;\n const [dMin, dMax] = measureField.domain();\n\n if (!buckets) {\n start = (start !== 0 && (!start || start > dMin)) ? dMin : start;\n end = (end !== 0 && (!end || end < dMax)) ? (dMax + 1) : end;\n\n if (binsCount) {\n binSize = Math.ceil(Math.abs(end - start) / binsCount);\n }\n\n buckets = generateBuckets(binSize, start, end);\n }\n\n if (buckets[0] > dMin) {\n buckets.unshift(dMin);\n }\n if (buckets[buckets.length - 1] <= dMax) {\n buckets.push(dMax + 1);\n }\n\n const bucketRanges = [];\n for (let i = 0; i < buckets.length - 1; i++) {\n bucketRanges.push({\n start: buckets[i],\n end: buckets[i + 1]\n });\n }\n\n const binnedData = [];\n rowDiffsetIterator(rowDiffset, (i) => {\n const datum = measureField.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n binnedData.push(datum);\n return;\n }\n\n const range = findBucketRange(bucketRanges, datum);\n binnedData.push(`${range.start}-${range.end}`);\n });\n\n return { binnedData, bins: buckets };\n}\n","/**\n * The helper function that returns an array of common schema\n * from two fieldStore instances.\n *\n * @param {FieldStore} fs1 - The first FieldStore instance.\n * @param {FieldStore} fs2 - The second FieldStore instance.\n * @return {Array} An array containing the common schema.\n */\nexport function getCommonSchema (fs1, fs2) {\n const retArr = [];\n const fs1Arr = [];\n fs1.fields.forEach((field) => {\n fs1Arr.push(field.schema().name);\n });\n fs2.fields.forEach((field) => {\n if (fs1Arr.indexOf(field.schema().name) !== -1) {\n retArr.push(field.schema().name);\n }\n });\n return retArr;\n}\n","export { DataFormat, FilteringMode } from '../enums';\n/**\n * The event name for data propagation.\n */\nexport const PROPAGATION = 'propagation';\n\n/**\n * The name of the unique row id column in DataModel.\n */\nexport const ROW_ID = '__id__';\n\n/**\n * The enums for operation names performed on DataModel.\n */\nexport const DM_DERIVATIVES = {\n SELECT: 'select',\n PROJECT: 'project',\n GROUPBY: 'group',\n COMPOSE: 'compose',\n CAL_VAR: 'calculatedVariable',\n BIN: 'bin'\n};\n\nexport const JOINS = {\n CROSS: 'cross',\n LEFTOUTER: 'leftOuter',\n RIGHTOUTER: 'rightOuter',\n NATURAL: 'natural',\n FULLOUTER: 'fullOuter'\n};\n\nexport const LOGICAL_OPERATORS = {\n AND: 'and',\n OR: 'or'\n};\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { getCommonSchema } from './get-common-schema';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { JOINS } from '../constants';\nimport { prepareJoinData } from '../helper';\n/**\n * Default filter function for crossProduct.\n *\n * @return {boolean} Always returns true.\n */\nfunction defaultFilterFn() { return true; }\n\n/**\n * Implementation of cross product operation between two DataModel instances.\n * It internally creates the data and schema for the new DataModel.\n *\n * @param {DataModel} dataModel1 - The left DataModel instance.\n * @param {DataModel} dataModel2 - The right DataModel instance.\n * @param {Function} filterFn - The filter function which is used to filter the tuples.\n * @param {boolean} [replaceCommonSchema=false] - The flag if the common name schema should be there.\n * @return {DataModel} Returns The newly created DataModel instance from the crossProduct operation.\n */\nexport function crossProduct (dm1, dm2, filterFn, replaceCommonSchema = false, jointype = JOINS.CROSS) {\n const schema = [];\n const data = [];\n const applicableFilterFn = filterFn || defaultFilterFn;\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreName = dm1FieldStore.name;\n const dm2FieldStoreName = dm2FieldStore.name;\n const name = `${dm1FieldStore.name}.${dm2FieldStore.name}`;\n const commonSchemaList = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n if (dm1FieldStoreName === dm2FieldStoreName) {\n throw new Error('DataModels must have different alias names');\n }\n // Here prepare the schema\n dm1FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1 && !replaceCommonSchema) {\n tmpSchema.name = `${dm1FieldStore.name}.${tmpSchema.name}`;\n }\n schema.push(tmpSchema);\n });\n dm2FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1) {\n if (!replaceCommonSchema) {\n tmpSchema.name = `${dm2FieldStore.name}.${tmpSchema.name}`;\n schema.push(tmpSchema);\n }\n } else {\n schema.push(tmpSchema);\n }\n });\n\n // Here prepare Data\n rowDiffsetIterator(dm1._rowDiffset, (i) => {\n let rowAdded = false;\n let rowPosition;\n rowDiffsetIterator(dm2._rowDiffset, (ii) => {\n const tuple = [];\n const userArg = {};\n userArg[dm1FieldStoreName] = {};\n userArg[dm2FieldStoreName] = {};\n dm1FieldStore.fields.forEach((field) => {\n tuple.push(field.partialField.data[i]);\n userArg[dm1FieldStoreName][field.name()] = field.partialField.data[i];\n });\n dm2FieldStore.fields.forEach((field) => {\n if (!(commonSchemaList.indexOf(field.schema().name) !== -1 && replaceCommonSchema)) {\n tuple.push(field.partialField.data[ii]);\n }\n userArg[dm2FieldStoreName][field.name()] = field.partialField.data[ii];\n });\n\n let cachedStore = {};\n let cloneProvider1 = () => dm1.detachedRoot();\n let cloneProvider2 = () => dm2.detachedRoot();\n\n const dm1Fields = prepareJoinData(userArg[dm1FieldStoreName]);\n const dm2Fields = prepareJoinData(userArg[dm2FieldStoreName]);\n if (applicableFilterFn(dm1Fields, dm2Fields, cloneProvider1, cloneProvider2, cachedStore)) {\n const tupleObj = {};\n tuple.forEach((cellVal, iii) => {\n tupleObj[schema[iii].name] = cellVal;\n });\n if (rowAdded && JOINS.CROSS !== jointype) {\n data[rowPosition] = tupleObj;\n }\n else {\n data.push(tupleObj);\n rowAdded = true;\n rowPosition = i;\n }\n } else if ((jointype === JOINS.LEFTOUTER || jointype === JOINS.RIGHTOUTER) && !rowAdded) {\n const tupleObj = {};\n let len = dm1FieldStore.fields.length - 1;\n tuple.forEach((cellVal, iii) => {\n if (iii <= len) {\n tupleObj[schema[iii].name] = cellVal;\n }\n else {\n tupleObj[schema[iii].name] = null;\n }\n });\n rowAdded = true;\n rowPosition = i;\n data.push(tupleObj);\n }\n });\n });\n\n return new DataModel(data, schema, { name });\n}\n","/**\n * The default sort function.\n *\n * @param {*} a - The first value.\n * @param {*} b - The second value.\n * @return {number} Returns the comparison result e.g. 1 or 0 or -1.\n */\nfunction defSortFn (a, b) {\n const a1 = `${a}`;\n const b1 = `${b}`;\n if (a1 < b1) {\n return -1;\n }\n if (a1 > b1) {\n return 1;\n }\n return 0;\n}\n\n/**\n * The helper function for merge sort which creates the sorted array\n * from the two halves of the input array.\n *\n * @param {Array} arr - The target array which needs to be merged.\n * @param {number} lo - The starting index of the first array half.\n * @param {number} mid - The ending index of the first array half.\n * @param {number} hi - The ending index of the second array half.\n * @param {Function} sortFn - The sort function.\n */\nfunction merge (arr, lo, mid, hi, sortFn) {\n const mainArr = arr;\n const auxArr = [];\n for (let i = lo; i <= hi; i += 1) {\n auxArr[i] = mainArr[i];\n }\n let a = lo;\n let b = mid + 1;\n\n for (let i = lo; i <= hi; i += 1) {\n if (a > mid) {\n mainArr[i] = auxArr[b];\n b += 1;\n } else if (b > hi) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else if (sortFn(auxArr[a], auxArr[b]) <= 0) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else {\n mainArr[i] = auxArr[b];\n b += 1;\n }\n }\n}\n\n/**\n * The helper function for merge sort which would be called\n * recursively for sorting the array halves.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {number} lo - The starting index of the array half.\n * @param {number} hi - The ending index of the array half.\n * @param {Function} sortFn - The sort function.\n * @return {Array} Returns the target array itself.\n */\nfunction sort (arr, lo, hi, sortFn) {\n if (hi === lo) { return arr; }\n\n const mid = lo + Math.floor((hi - lo) / 2);\n sort(arr, lo, mid, sortFn);\n sort(arr, mid + 1, hi, sortFn);\n merge(arr, lo, mid, hi, sortFn);\n\n return arr;\n}\n\n/**\n * The implementation of merge sort.\n * It is used in DataModel for stable sorting as it is not sure\n * what the sorting algorithm used by browsers is stable or not.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {Function} [sortFn=defSortFn] - The sort function.\n * @return {Array} Returns the input array itself in sorted order.\n */\nexport function mergeSort (arr, sortFn = defSortFn) {\n if (arr.length > 1) {\n sort(arr, 0, arr.length - 1, sortFn);\n }\n return arr;\n}\n","import { DimensionSubtype, MeasureSubtype } from '../enums';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { mergeSort } from './merge-sort';\nimport { fieldInSchema } from '../helper';\nimport { isCallable, isArray, } from '../utils';\n/**\n * Generates the sorting functions to sort the data of a DataModel instance\n * according to the input data type.\n *\n * @param {string} dataType - The data type e.g. 'measure', 'datetime' etc.\n * @param {string} sortType - The sorting order i.e. 'asc' or 'desc'.\n * @param {integer} index - The index of the data which will be sorted.\n * @return {Function} Returns the the sorting function.\n */\nfunction getSortFn (dataType, sortType, index) {\n let retFunc;\n switch (dataType) {\n case MeasureSubtype.CONTINUOUS:\n case DimensionSubtype.TEMPORAL:\n if (sortType === 'desc') {\n retFunc = (a, b) => b[index] - a[index];\n } else {\n retFunc = (a, b) => a[index] - b[index];\n }\n break;\n default:\n retFunc = (a, b) => {\n const a1 = `${a[index]}`;\n const b1 = `${b[index]}`;\n if (a1 < b1) {\n return sortType === 'desc' ? 1 : -1;\n }\n if (a1 > b1) {\n return sortType === 'desc' ? -1 : 1;\n }\n return 0;\n };\n }\n return retFunc;\n}\n\n/**\n * Groups the data according to the specified target field.\n *\n * @param {Array} data - The input data array.\n * @param {number} fieldIndex - The target field index within schema array.\n * @return {Array} Returns an array containing the grouped data.\n */\nfunction groupData(data, fieldIndex) {\n const hashMap = new Map();\n const groupedData = [];\n\n data.forEach((datum) => {\n const fieldVal = datum[fieldIndex];\n if (hashMap.has(fieldVal)) {\n groupedData[hashMap.get(fieldVal)][1].push(datum);\n } else {\n groupedData.push([fieldVal, [datum]]);\n hashMap.set(fieldVal, groupedData.length - 1);\n }\n });\n\n return groupedData;\n}\n\n/**\n * Creates the argument value used for sorting function when sort is done\n * with another fields.\n *\n * @param {Array} groupedDatum - The grouped datum for a single dimension field value.\n * @param {Array} targetFields - An array of the sorting fields.\n * @param {Array} targetFieldDetails - An array of the sorting field details in schema.\n * @return {Object} Returns an object containing the value of sorting fields and the target field name.\n */\nfunction createSortingFnArg(groupedDatum, targetFields, targetFieldDetails) {\n const arg = {\n label: groupedDatum[0]\n };\n\n targetFields.reduce((acc, next, idx) => {\n acc[next] = groupedDatum[1].map(datum => datum[targetFieldDetails[idx].index]);\n return acc;\n }, arg);\n\n return arg;\n}\n\n/**\n * Sorts the data before return in dataBuilder.\n *\n * @param {Object} dataObj - An object containing the data and schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nfunction sortData(dataObj, sortingDetails) {\n const { data, schema } = dataObj;\n let fieldName;\n let sortMeta;\n let fDetails;\n let i = sortingDetails.length - 1;\n\n for (; i >= 0; i--) {\n fieldName = sortingDetails[i][0];\n sortMeta = sortingDetails[i][1];\n fDetails = fieldInSchema(schema, fieldName);\n\n if (!fDetails) {\n // eslint-disable-next-line no-continue\n continue;\n }\n\n if (isCallable(sortMeta)) {\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortMeta(a[fDetails.index], b[fDetails.index]));\n } else if (isArray(sortMeta)) {\n const groupedData = groupData(data, fDetails.index);\n const sortingFn = sortMeta[sortMeta.length - 1];\n const targetFields = sortMeta.slice(0, sortMeta.length - 1);\n const targetFieldDetails = targetFields.map(f => fieldInSchema(schema, f));\n\n groupedData.forEach((groupedDatum) => {\n groupedDatum.push(createSortingFnArg(groupedDatum, targetFields, targetFieldDetails));\n });\n\n mergeSort(groupedData, (a, b) => {\n const m = a[2];\n const n = b[2];\n return sortingFn(m, n);\n });\n\n // Empty the array\n data.length = 0;\n groupedData.forEach((datum) => {\n data.push(...datum[1]);\n });\n } else {\n sortMeta = String(sortMeta).toLowerCase() === 'desc' ? 'desc' : 'asc';\n mergeSort(data, getSortFn(fDetails.type, sortMeta, fDetails.index));\n }\n }\n\n dataObj.uids = [];\n data.forEach((value) => {\n dataObj.uids.push(value.pop());\n });\n}\n\n\n/**\n * Builds the actual data array.\n *\n * @param {Array} fieldStore - An array of field.\n * @param {string} rowDiffset - A string consisting of which rows to be included eg. '0-2,4,6';\n * @param {string} colIdentifier - A string consisting of the details of which column\n * to be included eg 'date,sales,profit';\n * @param {Object} sortingDetails - An object containing the sorting details of the DataModel instance.\n * @param {Object} options - The options required to create the type of the data.\n * @return {Object} Returns an object containing the multidimensional array and the relative schema.\n */\nexport function dataBuilder (fieldStore, rowDiffset, colIdentifier, sortingDetails, options) {\n const defOptions = {\n addUid: false,\n columnWise: false\n };\n options = Object.assign({}, defOptions, options);\n\n const retObj = {\n schema: [],\n data: [],\n uids: []\n };\n const addUid = options.addUid;\n const reqSorting = sortingDetails && sortingDetails.length > 0;\n // It stores the fields according to the colIdentifier argument\n const tmpDataArr = [];\n // Stores the fields according to the colIdentifier argument\n const colIArr = colIdentifier.split(',');\n\n colIArr.forEach((colName) => {\n for (let i = 0; i < fieldStore.length; i += 1) {\n if (fieldStore[i].name() === colName) {\n tmpDataArr.push(fieldStore[i]);\n break;\n }\n }\n });\n\n // Inserts the schema to the schema object\n tmpDataArr.forEach((field) => {\n /** @todo Need to use extend2 here otherwise user can overwrite the schema. */\n retObj.schema.push(field.schema());\n });\n\n if (addUid) {\n retObj.schema.push({\n name: 'uid',\n type: 'identifier'\n });\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n retObj.data.push([]);\n const insertInd = retObj.data.length - 1;\n let start = 0;\n tmpDataArr.forEach((field, ii) => {\n retObj.data[insertInd][ii + start] = field.partialField.data[i];\n });\n if (addUid) {\n retObj.data[insertInd][tmpDataArr.length] = i;\n }\n // Creates an array of unique identifiers for each row\n retObj.uids.push(i);\n\n // If sorting needed then there is the need to expose the index\n // mapping from the old index to its new index\n if (reqSorting) { retObj.data[insertInd].push(i); }\n });\n\n // Handles the sort functionality\n if (reqSorting) {\n sortData(retObj, sortingDetails);\n }\n\n if (options.columnWise) {\n const tmpData = Array(...Array(retObj.schema.length)).map(() => []);\n retObj.data.forEach((tuple) => {\n tuple.forEach((data, i) => {\n tmpData[i].push(data);\n });\n });\n retObj.data = tmpData;\n }\n\n return retObj;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n\n/**\n * Performs the union operation between two dm instances.\n *\n * @todo Fix the conflicts between union and difference terminology here.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function difference (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n * @param {boolean} addData - If true only tuple will be added to the data.\n */\n function prepareDataHelper(dm, fieldsObj, addData) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n if (addData) { data.push(tuple); }\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm2, dm2FieldStoreFieldObj, false);\n prepareDataHelper(dm1, dm1FieldStoreFieldObj, true);\n\n return new DataModel(data, schema, { name });\n}\n\n","import { isArray } from '../utils';\nimport InvalidAwareTypes from '../invalid-aware-types';\n\n\nfunction getFilteredValues(arr) {\n return arr.filter(item => !(item instanceof InvalidAwareTypes));\n}\n/**\n * Reducer function that returns the sum of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the sum of the array.\n */\nfunction sum (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const filteredNumber = getFilteredValues(arr);\n const totalSum = filteredNumber.length ?\n filteredNumber.reduce((acc, curr) => acc + curr, 0)\n : InvalidAwareTypes.NULL;\n return totalSum;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that returns the average of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the mean value of the array.\n */\nfunction avg (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const totalSum = sum(arr);\n const len = arr.length || 1;\n return (Number.isNaN(totalSum) || totalSum instanceof InvalidAwareTypes) ?\n InvalidAwareTypes.NULL : totalSum / len;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the min value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the minimum value of the array.\n */\nfunction min (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.min(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the max value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the maximum value of the array.\n */\nfunction max (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.max(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the first value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the first value of the array.\n */\nfunction first (arr) {\n return arr[0];\n}\n\n/**\n * Reducer function that gives the last value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the last value of the array.\n */\nfunction last (arr) {\n return arr[arr.length - 1];\n}\n\n/**\n * Reducer function that gives the count value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the length of the array.\n */\nfunction count (arr) {\n if (isArray(arr)) {\n return arr.length;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Calculates the variance of the input array.\n *\n * @param {Array.} arr - The input array.\n * @return {number} Returns the variance of the input array.\n */\nfunction variance (arr) {\n let mean = avg(arr);\n return avg(arr.map(num => (num - mean) ** 2));\n}\n\n/**\n * Calculates the square root of the variance of the input array.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the square root of the variance.\n */\nfunction std (arr) {\n return Math.sqrt(variance(arr));\n}\n\n\nconst fnList = {\n sum,\n avg,\n min,\n max,\n first,\n last,\n count,\n std\n};\n\nconst defaultReducerName = 'sum';\n\nexport {\n defaultReducerName,\n sum as defReducer,\n fnList,\n};\n","import { defReducer, fnList } from '../operator';\n\n/**\n * A page level storage which stores, registers, unregisters reducers for all the datamodel instances. There is only one\n * reducer store available in a page. All the datamodel instances receive same instance of reducer store. DataModel\n * out of the box provides handful of {@link reducer | reducers} which can be used as reducer funciton.\n *\n * @public\n * @namespace DataModel\n */\nclass ReducerStore {\n constructor () {\n this.store = new Map();\n this.store.set('defReducer', defReducer);\n\n Object.entries(fnList).forEach((key) => {\n this.store.set(key[0], key[1]);\n });\n }\n\n /**\n * Changes the `defaultReducer` globally. For all the fields which does not have `defAggFn` mentioned in schema, the\n * value of `defaultReducer` is used for aggregation.\n *\n * @public\n * @param {string} [reducer='sum'] - The name of the default reducer. It picks up the definition from store by doing\n * name lookup. If no name is found then it takes `sum` as the default reducer.\n * @return {ReducerStore} Returns instance of the singleton store in page.\n */\n defaultReducer (...params) {\n if (!params.length) {\n return this.store.get('defReducer');\n }\n\n let reducer = params[0];\n\n if (typeof reducer === 'function') {\n this.store.set('defReducer', reducer);\n } else {\n reducer = String(reducer);\n if (Object.keys(fnList).indexOf(reducer) !== -1) {\n this.store.set('defReducer', fnList[reducer]);\n } else {\n throw new Error(`Reducer ${reducer} not found in registry`);\n }\n }\n return this;\n }\n\n /**\n *\n * Registers a {@link reducer | reducer}.\n * A {@link reducer | reducer} has to be registered before it is used.\n *\n * @example\n * // find the mean squared value of a given set\n * const reducerStore = DataModel.Reducers();\n *\n * reducers.register('meanSquared', (arr) => {\n * const squaredVal = arr.map(item => item * item);\n * let sum = 0;\n * for (let i = 0, l = squaredVal.length; i < l; i++) {\n * sum += squaredVal[i++];\n * }\n *\n * return sum;\n * })\n *\n * // datamodel (dm) is already prepared with cars.json\n * const dm1 = dm.groupBy(['origin'], {\n * accleration: 'meanSquared'\n * });\n *\n * @public\n *\n * @param {string} name formal name for a reducer. If the given name already exists in store it is overridden by new\n * definition.\n * @param {Function} reducer definition of {@link reducer} function.\n *\n * @return {Function} function for unregistering the reducer.\n */\n register (name, reducer) {\n if (typeof reducer !== 'function') {\n throw new Error('Reducer should be a function');\n }\n\n name = String(name);\n this.store.set(name, reducer);\n\n return () => { this.__unregister(name); };\n }\n\n __unregister (name) {\n if (this.store.has(name)) {\n this.store.delete(name);\n }\n }\n\n resolve (name) {\n if (name instanceof Function) {\n return name;\n }\n return this.store.get(name);\n }\n}\n\nconst reducerStore = (function () {\n let store = null;\n\n function getStore () {\n if (store === null) {\n store = new ReducerStore();\n }\n return store;\n }\n return getStore();\n}());\n\nexport default reducerStore;\n","import { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport DataModel from '../export';\nimport reducerStore from '../utils/reducer-store';\nimport { defaultReducerName } from './group-by-function';\nimport { FieldType } from '../enums';\n\n/**\n * This function sanitize the user given field and return a common Array structure field\n * list\n * @param {DataModel} dataModel the dataModel operating on\n * @param {Array} fieldArr user input of field Array\n * @return {Array} arrays of field name\n */\nfunction getFieldArr (dataModel, fieldArr) {\n const retArr = [];\n const fieldStore = dataModel.getFieldspace();\n const dimensions = fieldStore.getDimension();\n\n Object.entries(dimensions).forEach(([key]) => {\n if (fieldArr && fieldArr.length) {\n if (fieldArr.indexOf(key) !== -1) {\n retArr.push(key);\n }\n } else {\n retArr.push(key);\n }\n });\n\n return retArr;\n}\n\n/**\n * This sanitize the reducer provide by the user and create a common type of object.\n * user can give function Also\n * @param {DataModel} dataModel dataModel to worked on\n * @param {Object|function} [reducers={}] reducer provided by the users\n * @return {Object} object containing reducer function for every measure\n */\nfunction getReducerObj (dataModel, reducers = {}) {\n const retObj = {};\n const fieldStore = dataModel.getFieldspace();\n const measures = fieldStore.getMeasure();\n const defReducer = reducerStore.defaultReducer();\n\n Object.keys(measures).forEach((measureName) => {\n if (typeof reducers[measureName] !== 'string') {\n reducers[measureName] = measures[measureName].defAggFn();\n }\n const reducerFn = reducerStore.resolve(reducers[measureName]);\n if (reducerFn) {\n retObj[measureName] = reducerFn;\n } else {\n retObj[measureName] = defReducer;\n reducers[measureName] = defaultReducerName;\n }\n });\n return retObj;\n}\n\n/**\n * main function which perform the group-by operations which reduce the measures value is the\n * fields are common according to the reducer function provided\n * @param {DataModel} dataModel the dataModel to worked\n * @param {Array} fieldArr fields according to which the groupby should be worked\n * @param {Object|Function} reducers reducers function\n * @param {DataModel} existingDataModel Existing datamodel instance\n * @return {DataModel} new dataModel with the group by\n */\nfunction groupBy (dataModel, fieldArr, reducers, existingDataModel) {\n const sFieldArr = getFieldArr(dataModel, fieldArr);\n const reducerObj = getReducerObj(dataModel, reducers);\n const fieldStore = dataModel.getFieldspace();\n const fieldStoreObj = fieldStore.fieldsObj();\n const dbName = fieldStore.name;\n const dimensionArr = [];\n const measureArr = [];\n const schema = [];\n const hashMap = {};\n const data = [];\n let newDataModel;\n\n // Prepare the schema\n Object.entries(fieldStoreObj).forEach(([key, value]) => {\n if (sFieldArr.indexOf(key) !== -1 || reducerObj[key]) {\n schema.push(extend2({}, value.schema()));\n\n switch (value.schema().type) {\n case FieldType.MEASURE:\n measureArr.push(key);\n break;\n default:\n case FieldType.DIMENSION:\n dimensionArr.push(key);\n }\n }\n });\n // Prepare the data\n let rowCount = 0;\n rowDiffsetIterator(dataModel._rowDiffset, (i) => {\n let hash = '';\n dimensionArr.forEach((_) => {\n hash = `${hash}-${fieldStoreObj[_].partialField.data[i]}`;\n });\n if (hashMap[hash] === undefined) {\n hashMap[hash] = rowCount;\n data.push({});\n dimensionArr.forEach((_) => {\n data[rowCount][_] = fieldStoreObj[_].partialField.data[i];\n });\n measureArr.forEach((_) => {\n data[rowCount][_] = [fieldStoreObj[_].partialField.data[i]];\n });\n rowCount += 1;\n } else {\n measureArr.forEach((_) => {\n data[hashMap[hash]][_].push(fieldStoreObj[_].partialField.data[i]);\n });\n }\n });\n\n // reduction\n let cachedStore = {};\n let cloneProvider = () => dataModel.detachedRoot();\n data.forEach((row) => {\n const tuple = row;\n measureArr.forEach((_) => {\n tuple[_] = reducerObj[_](row[_], cloneProvider, cachedStore);\n });\n });\n if (existingDataModel) {\n existingDataModel.__calculateFieldspace();\n newDataModel = existingDataModel;\n }\n else {\n newDataModel = new DataModel(data, schema, { name: dbName });\n }\n return newDataModel;\n}\n\nexport { groupBy, getFieldArr, getReducerObj };\n","import { getCommonSchema } from './get-common-schema';\n\n/**\n * The filter function used in natural join.\n * It generates a function that will have the logic to join two\n * DataModel instances by the process of natural join.\n *\n * @param {DataModel} dm1 - The left DataModel instance.\n * @param {DataModel} dm2 - The right DataModel instance.\n * @return {Function} Returns a function that is used in cross-product operation.\n */\nexport function naturalJoinFilter (dm1, dm2) {\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n // const dm1FieldStoreName = dm1FieldStore.name;\n // const dm2FieldStoreName = dm2FieldStore.name;\n const commonSchemaArr = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n return (dm1Fields, dm2Fields) => {\n let retainTuple = true;\n commonSchemaArr.forEach((fieldName) => {\n if (dm1Fields[fieldName].value ===\n dm2Fields[fieldName].value && retainTuple) {\n retainTuple = true;\n } else {\n retainTuple = false;\n }\n });\n return retainTuple;\n };\n}\n","import DataModel from '../export';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n/**\n * Performs the union operation between two dm instances.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function union (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n */\n function prepareDataHelper (dm, fieldsObj) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n data.push(tuple);\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm1, dm1FieldStoreFieldObj);\n prepareDataHelper(dm2, dm2FieldStoreFieldObj);\n\n return new DataModel(data, schema, { name });\n}\n","import { crossProduct } from './cross-product';\nimport { JOINS } from '../constants';\nimport { union } from './union';\n\n\nexport function leftOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel1, dataModel2, filterFn, false, JOINS.LEFTOUTER);\n}\n\nexport function rightOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel2, dataModel1, filterFn, false, JOINS.RIGHTOUTER);\n}\n\nexport function fullOuterJoin (dataModel1, dataModel2, filterFn) {\n return union(leftOuterJoin(dataModel1, dataModel2, filterFn), rightOuterJoin(dataModel1, dataModel2, filterFn));\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\n\n/**\n * In {@link DataModel}, every tabular data consists of column, a column is stored as field.\n * Field contains all the data for a given column in an array.\n *\n * Each record consists of several fields; the fields of all records form the columns.\n * Examples of fields: name, gender, sex etc.\n *\n * In DataModel, each field can have multiple attributes which describes its data and behaviour.\n * A field can have two types of data: Measure and Dimension.\n *\n * A Dimension Field is the context on which a data is categorized and the measure is the numerical values that\n * quantify the data set.\n * In short a dimension is the lens through which you are looking at your measure data.\n *\n * Refer to {@link Schema} to get info about possible field attributes.\n *\n * @public\n * @class\n */\nexport default class Field {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n this.partialField = partialField;\n this.rowDiffset = rowDiffset;\n }\n\n /**\n * Generates the field type specific domain.\n *\n * @public\n * @abstract\n */\n domain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the the field schema.\n *\n * @public\n * @return {string} Returns the field schema.\n */\n schema () {\n return this.partialField.schema;\n }\n\n /**\n * Returns the name of the field.\n *\n * @public\n * @return {string} Returns the name of the field.\n */\n name () {\n return this.partialField.name;\n }\n\n /**\n * Returns the type of the field.\n *\n * @public\n * @return {string} Returns the type of the field.\n */\n type () {\n return this.partialField.schema.type;\n }\n\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return this.partialField.schema.subtype;\n }\n\n /**\n * Returns the description of the field.\n *\n * @public\n * @return {string} Returns the description of the field.\n */\n description () {\n return this.partialField.schema.description;\n }\n\n /**\n * Returns the display name of the field.\n *\n * @public\n * @return {string} Returns the display name of the field.\n */\n displayName () {\n return this.partialField.schema.displayName || this.partialField.schema.name;\n }\n\n /**\n * Returns the data associated with the field.\n *\n * @public\n * @return {Array} Returns the data.\n */\n data () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n data.push(this.partialField.data[i]);\n });\n return data;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @abstract\n */\n formattedData () {\n throw new Error('Not yet implemented');\n }\n}\n","import Field from '../field';\n\n/**\n * Represents dimension field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Dimension extends Field {\n /**\n * Returns the domain for the dimension field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { DimensionSubtype } from '../../enums';\nimport Dimension from '../dimension';\n/**\n * Represents categorical field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Categorical extends Dimension {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return DimensionSubtype.CATEGORICAL;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n return domain;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport Dimension from '../dimension';\nimport { DateTimeFormatter } from '../../utils';\nimport InvalidAwareTypes from '../../invalid-aware-types';\n\n/**\n * Represents temporal field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Temporal extends Dimension {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n super(partialField, rowDiffset);\n\n this._cachedMinDiff = null;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be\n // occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n\n return domain;\n }\n\n\n /**\n * Calculates the minimum consecutive difference from the associated field data.\n *\n * @public\n * @return {number} Returns the minimum consecutive diff in milliseconds.\n */\n minimumConsecutiveDifference () {\n if (this._cachedMinDiff) {\n return this._cachedMinDiff;\n }\n\n const sortedData = this.data().filter(item => !(item instanceof InvalidAwareTypes)).sort((a, b) => a - b);\n const arrLn = sortedData.length;\n let minDiff = Number.POSITIVE_INFINITY;\n let prevDatum;\n let nextDatum;\n let processedCount = 0;\n\n for (let i = 1; i < arrLn; i++) {\n prevDatum = sortedData[i - 1];\n nextDatum = sortedData[i];\n\n if (nextDatum === prevDatum) {\n continue;\n }\n\n minDiff = Math.min(minDiff, nextDatum - sortedData[i - 1]);\n processedCount++;\n }\n\n if (!processedCount) {\n minDiff = null;\n }\n this._cachedMinDiff = minDiff;\n\n return this._cachedMinDiff;\n }\n\n /**\n * Returns the format specified in the input schema while creating field.\n *\n * @public\n * @return {string} Returns the datetime format.\n */\n format () {\n return this.partialField.schema.format;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n data.push(datum);\n } else {\n data.push(DateTimeFormatter.formatAs(datum, this.format()));\n }\n });\n return data;\n }\n}\n\n","import Dimension from '../dimension';\n\n/**\n * Represents binned field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Binned extends Dimension {\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the last and first values of bins config array.\n */\n calculateDataDomain () {\n const binsArr = this.partialField.schema.bins;\n return [binsArr[0], binsArr[binsArr.length - 1]];\n }\n\n /**\n * Returns the bins config provided while creating the field instance.\n *\n * @public\n * @return {Array} Returns the bins array config.\n */\n bins () {\n return this.partialField.schema.bins;\n }\n}\n","import { formatNumber } from '../../utils';\nimport { defaultReducerName } from '../../operator/group-by-function';\nimport Field from '../field';\n\n/**\n * Represents measure field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Measure extends Field {\n /**\n * Returns the domain for the measure field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Returns the unit of the measure field.\n *\n * @public\n * @return {string} Returns unit of the field.\n */\n unit () {\n return this.partialField.schema.unit;\n }\n\n /**\n * Returns the aggregation function name of the measure field.\n *\n * @public\n * @return {string} Returns aggregation function name of the field.\n */\n defAggFn () {\n return this.partialField.schema.defAggFn || defaultReducerName;\n }\n\n /**\n * Returns the number format of the measure field.\n *\n * @public\n * @return {Function} Returns number format of the field.\n */\n numberFormat () {\n const { numberFormat } = this.partialField.schema;\n return numberFormat instanceof Function ? numberFormat : formatNumber;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { MeasureSubtype } from '../../enums';\nimport Measure from '../measure';\nimport InvalidAwareTypes from '../../invalid-aware-types';\n\n/**\n * Represents continuous field subtype.\n *\n * @public\n * @class\n * @extends Measure\n */\nexport default class Continuous extends Measure {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return MeasureSubtype.CONTINUOUS;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the min and max values.\n */\n calculateDataDomain () {\n let min = Number.POSITIVE_INFINITY;\n let max = Number.NEGATIVE_INFINITY;\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n return;\n }\n\n if (datum < min) {\n min = datum;\n }\n if (datum > max) {\n max = datum;\n }\n });\n\n return [min, max];\n }\n}\n","/**\n * A interface to represent a parser which is responsible to parse the field.\n *\n * @public\n * @interface\n */\nexport default class FieldParser {\n /**\n * Parses a single value of a field and return the sanitized form.\n *\n * @public\n * @abstract\n */\n parse () {\n throw new Error('Not yet implemented');\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the categorical values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class CategoricalParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the stringified form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the stringified value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n result = String(val).trim();\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { DateTimeFormatter } from '../../../utils';\nimport FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the temporal values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class TemporalParser extends FieldParser {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {Object} schema - The schema object for the corresponding field.\n */\n constructor (schema) {\n super();\n this.schema = schema;\n this._dtf = new DateTimeFormatter(this.schema.format);\n }\n\n /**\n * Parses a single value of a field and returns the millisecond value.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {number} Returns the millisecond value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let nativeDate = this._dtf.getNativeDate(val);\n result = nativeDate ? nativeDate.getTime() : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the binned values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class BinnedParser extends FieldParser {\n /**\n * Parses a single binned value of a field and returns the sanitized value.\n *\n * @public\n * @param {string} val - The value of the field.\n * @return {string} Returns the sanitized value.\n */\n parse (val) {\n const regex = /^\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*-\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*$/;\n val = String(val);\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let matched = val.match(regex);\n result = matched ? `${Number.parseFloat(matched[1])}-${Number.parseFloat(matched[2])}`\n : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the continuous values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class ContinuousParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the number form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the number value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let parsedVal = parseFloat(val, 10);\n result = Number.isNaN(parsedVal) ? InvalidAwareTypes.NA : parsedVal;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","/**\n * Stores the full data and the metadata of a field. It provides\n * a single source of data from which the future Field\n * instance can get a subset of it with a rowDiffset config.\n *\n * @class\n * @public\n */\nexport default class PartialField {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} name - The name of the field.\n * @param {Array} data - The data array.\n * @param {Object} schema - The schema object of the corresponding field.\n * @param {FieldParser} parser - The parser instance corresponding to that field.\n */\n constructor (name, data, schema, parser) {\n this.name = name;\n this.schema = schema;\n this.parser = parser;\n this.data = this._sanitize(data);\n }\n\n /**\n * Sanitizes the field data.\n *\n * @private\n * @param {Array} data - The actual input data.\n * @return {Array} Returns the sanitized data.\n */\n _sanitize (data) {\n return data.map(datum => this.parser.parse(datum));\n }\n}\n","import { FieldType, DimensionSubtype, MeasureSubtype } from './enums';\nimport {\n Categorical,\n Temporal,\n Binned,\n Continuous,\n CategoricalParser,\n TemporalParser,\n BinnedParser,\n ContinuousParser,\n PartialField\n} from './fields';\n\n/**\n * Creates a field instance according to the provided data and schema.\n *\n * @param {Array} data - The field data array.\n * @param {Object} schema - The field schema object.\n * @return {Field} Returns the newly created field instance.\n */\nfunction createUnitField(data, schema) {\n data = data || [];\n let partialField;\n\n switch (schema.type) {\n case FieldType.MEASURE:\n switch (schema.subtype) {\n case MeasureSubtype.CONTINUOUS:\n partialField = new PartialField(schema.name, data, schema, new ContinuousParser());\n return new Continuous(partialField, `0-${data.length - 1}`);\n default:\n partialField = new PartialField(schema.name, data, schema, new ContinuousParser());\n return new Continuous(partialField, `0-${data.length - 1}`);\n }\n case FieldType.DIMENSION:\n switch (schema.subtype) {\n case DimensionSubtype.CATEGORICAL:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n case DimensionSubtype.TEMPORAL:\n partialField = new PartialField(schema.name, data, schema, new TemporalParser(schema));\n return new Temporal(partialField, `0-${data.length - 1}`);\n case DimensionSubtype.BINNED:\n partialField = new PartialField(schema.name, data, schema, new BinnedParser());\n return new Binned(partialField, `0-${data.length - 1}`);\n default:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n }\n default:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n }\n}\n\n\n/**\n * Creates a field instance from partialField and rowDiffset.\n *\n * @param {PartialField} partialField - The corresponding partial field.\n * @param {string} rowDiffset - The data subset config.\n * @return {Field} Returns the newly created field instance.\n */\nexport function createUnitFieldFromPartial(partialField, rowDiffset) {\n const { schema } = partialField;\n\n switch (schema.type) {\n case FieldType.MEASURE:\n switch (schema.subtype) {\n case MeasureSubtype.CONTINUOUS:\n return new Continuous(partialField, rowDiffset);\n default:\n return new Continuous(partialField, rowDiffset);\n }\n case FieldType.DIMENSION:\n switch (schema.subtype) {\n case DimensionSubtype.CATEGORICAL:\n return new Categorical(partialField, rowDiffset);\n case DimensionSubtype.TEMPORAL:\n return new Temporal(partialField, rowDiffset);\n case DimensionSubtype.BINNED:\n return new Binned(partialField, rowDiffset);\n default:\n return new Categorical(partialField, rowDiffset);\n }\n default:\n return new Categorical(partialField, rowDiffset);\n }\n}\n\n/**\n * Creates the field instances with input data and schema.\n *\n * @param {Array} dataColumn - The data array for fields.\n * @param {Array} schema - The schema array for fields.\n * @param {Array} headers - The array of header names.\n * @return {Array.} Returns an array of newly created field instances.\n */\nexport function createFields(dataColumn, schema, headers) {\n const headersObj = {};\n\n if (!(headers && headers.length)) {\n headers = schema.map(item => item.name);\n }\n\n headers.forEach((header, i) => {\n headersObj[header] = i;\n });\n\n return schema.map(item => createUnitField(dataColumn[headersObj[item.name]], item));\n}\n","import { DataFormat } from './enums';\n\nexport default {\n dataFormat: DataFormat.AUTO\n};\n","import { columnMajor } from '../utils';\n\n/**\n * Parses and converts data formatted in DSV array to a manageable internal format.\n *\n * @param {Array.} arr - A 2D array containing of the DSV data.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv data is header or not.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * [\"a\", \"b\", \"c\"],\n * [1, 2, 3],\n * [4, 5, 6],\n * [7, 8, 9]\n * ];\n */\nfunction DSVArr (arr, options) {\n const defaultOption = {\n firstRowHeader: true,\n };\n options = Object.assign({}, defaultOption, options);\n\n let header;\n const columns = [];\n const push = columnMajor(columns);\n\n if (options.firstRowHeader) {\n // If header present then mutate the array.\n // Do in-place mutation to save space.\n header = arr.splice(0, 1)[0];\n } else {\n header = [];\n }\n\n arr.forEach(field => push(...field));\n\n return [header, columns];\n}\n\nexport default DSVArr;\n","var EOL = {},\n EOF = {},\n QUOTE = 34,\n NEWLINE = 10,\n RETURN = 13;\n\nfunction objectConverter(columns) {\n return new Function(\"d\", \"return {\" + columns.map(function(name, i) {\n return JSON.stringify(name) + \": d[\" + i + \"]\";\n }).join(\",\") + \"}\");\n}\n\nfunction customConverter(columns, f) {\n var object = objectConverter(columns);\n return function(row, i) {\n return f(object(row), i, columns);\n };\n}\n\n// Compute unique columns in order of discovery.\nfunction inferColumns(rows) {\n var columnSet = Object.create(null),\n columns = [];\n\n rows.forEach(function(row) {\n for (var column in row) {\n if (!(column in columnSet)) {\n columns.push(columnSet[column] = column);\n }\n }\n });\n\n return columns;\n}\n\nexport default function(delimiter) {\n var reFormat = new RegExp(\"[\\\"\" + delimiter + \"\\n\\r]\"),\n DELIMITER = delimiter.charCodeAt(0);\n\n function parse(text, f) {\n var convert, columns, rows = parseRows(text, function(row, i) {\n if (convert) return convert(row, i - 1);\n columns = row, convert = f ? customConverter(row, f) : objectConverter(row);\n });\n rows.columns = columns || [];\n return rows;\n }\n\n function parseRows(text, f) {\n var rows = [], // output rows\n N = text.length,\n I = 0, // current character index\n n = 0, // current line number\n t, // current token\n eof = N <= 0, // current token followed by EOF?\n eol = false; // current token followed by EOL?\n\n // Strip the trailing newline.\n if (text.charCodeAt(N - 1) === NEWLINE) --N;\n if (text.charCodeAt(N - 1) === RETURN) --N;\n\n function token() {\n if (eof) return EOF;\n if (eol) return eol = false, EOL;\n\n // Unescape quotes.\n var i, j = I, c;\n if (text.charCodeAt(j) === QUOTE) {\n while (I++ < N && text.charCodeAt(I) !== QUOTE || text.charCodeAt(++I) === QUOTE);\n if ((i = I) >= N) eof = true;\n else if ((c = text.charCodeAt(I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n return text.slice(j + 1, i - 1).replace(/\"\"/g, \"\\\"\");\n }\n\n // Find next delimiter or newline.\n while (I < N) {\n if ((c = text.charCodeAt(i = I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n else if (c !== DELIMITER) continue;\n return text.slice(j, i);\n }\n\n // Return last token before EOF.\n return eof = true, text.slice(j, N);\n }\n\n while ((t = token()) !== EOF) {\n var row = [];\n while (t !== EOL && t !== EOF) row.push(t), t = token();\n if (f && (row = f(row, n++)) == null) continue;\n rows.push(row);\n }\n\n return rows;\n }\n\n function format(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return [columns.map(formatValue).join(delimiter)].concat(rows.map(function(row) {\n return columns.map(function(column) {\n return formatValue(row[column]);\n }).join(delimiter);\n })).join(\"\\n\");\n }\n\n function formatRows(rows) {\n return rows.map(formatRow).join(\"\\n\");\n }\n\n function formatRow(row) {\n return row.map(formatValue).join(delimiter);\n }\n\n function formatValue(text) {\n return text == null ? \"\"\n : reFormat.test(text += \"\") ? \"\\\"\" + text.replace(/\"/g, \"\\\"\\\"\") + \"\\\"\"\n : text;\n }\n\n return {\n parse: parse,\n parseRows: parseRows,\n format: format,\n formatRows: formatRows\n };\n}\n","import dsv from \"./dsv\";\n\nvar csv = dsv(\",\");\n\nexport var csvParse = csv.parse;\nexport var csvParseRows = csv.parseRows;\nexport var csvFormat = csv.format;\nexport var csvFormatRows = csv.formatRows;\n","import dsv from \"./dsv\";\n\nvar tsv = dsv(\"\\t\");\n\nexport var tsvParse = tsv.parse;\nexport var tsvParseRows = tsv.parseRows;\nexport var tsvFormat = tsv.format;\nexport var tsvFormatRows = tsv.formatRows;\n","import { dsvFormat as d3Dsv } from 'd3-dsv';\nimport DSVArr from './dsv-arr';\n\n/**\n * Parses and converts data formatted in DSV string to a manageable internal format.\n *\n * @todo Support to be given for https://tools.ietf.org/html/rfc4180.\n * @todo Sample implementation https://github.com/knrz/CSV.js/.\n *\n * @param {string} str - The input DSV string.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv string data is header or not.\n * @param {string} [options.fieldSeparator=\",\"] - The separator of two consecutive field.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = `\n * a,b,c\n * 1,2,3\n * 4,5,6\n * 7,8,9\n * `\n */\nfunction DSVStr (str, options) {\n const defaultOption = {\n firstRowHeader: true,\n fieldSeparator: ','\n };\n options = Object.assign({}, defaultOption, options);\n\n const dsv = d3Dsv(options.fieldSeparator);\n return DSVArr(dsv.parseRows(str), options);\n}\n\nexport default DSVStr;\n","import { columnMajor } from '../utils';\n\n/**\n * Parses and converts data formatted in JSON to a manageable internal format.\n *\n * @param {Array.} arr - The input data formatted in JSON.\n * @return {Array.} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * {\n * \"a\": 1,\n * \"b\": 2,\n * \"c\": 3\n * },\n * {\n * \"a\": 4,\n * \"b\": 5,\n * \"c\": 6\n * },\n * {\n * \"a\": 7,\n * \"b\": 8,\n * \"c\": 9\n * }\n * ];\n */\nfunction FlatJSON (arr) {\n const header = {};\n let i = 0;\n let insertionIndex;\n const columns = [];\n const push = columnMajor(columns);\n\n arr.forEach((item) => {\n const fields = [];\n for (let key in item) {\n if (key in header) {\n insertionIndex = header[key];\n } else {\n header[key] = i++;\n insertionIndex = i - 1;\n }\n fields[insertionIndex] = item[key];\n }\n push(...fields);\n });\n\n return [Object.keys(header), columns];\n}\n\nexport default FlatJSON;\n","import FlatJSON from './flat-json';\nimport DSVArr from './dsv-arr';\nimport DSVStr from './dsv-str';\nimport { detectDataFormat } from '../utils';\n\n/**\n * Parses the input data and detect the format automatically.\n *\n * @param {string|Array} data - The input data.\n * @param {Object} options - An optional config specific to data format.\n * @return {Array.} Returns an array of headers and column major data.\n */\nfunction Auto (data, options) {\n const converters = { FlatJSON, DSVStr, DSVArr };\n const dataFormat = detectDataFormat(data);\n\n if (!dataFormat) {\n throw new Error('Couldn\\'t detect the data format');\n }\n\n return converters[dataFormat](data, options);\n}\n\nexport default Auto;\n","import { FieldType, FilteringMode, DimensionSubtype, MeasureSubtype, DataFormat } from './enums';\nimport fieldStore from './field-store';\nimport Value from './value';\nimport {\n rowDiffsetIterator\n} from './operator';\nimport { DM_DERIVATIVES, LOGICAL_OPERATORS } from './constants';\nimport { createFields, createUnitFieldFromPartial } from './field-creator';\nimport defaultConfig from './default-config';\nimport * as converter from './converter';\nimport { extend2, detectDataFormat } from './utils';\n\n/**\n * Prepares the selection data.\n */\nfunction prepareSelectionData (fields, i) {\n const resp = {};\n for (let field of fields) {\n resp[field.name()] = new Value(field.partialField.data[i], field);\n }\n return resp;\n}\n\nexport function prepareJoinData (fields) {\n const resp = {};\n Object.keys(fields).forEach((key) => { resp[key] = new Value(fields[key], key); });\n return resp;\n}\n\nexport const updateFields = ([rowDiffset, colIdentifier], partialFieldspace, fieldStoreName) => {\n let collID = colIdentifier.length ? colIdentifier.split(',') : [];\n let partialFieldMap = partialFieldspace.fieldsObj();\n let newFields = collID.map(coll => createUnitFieldFromPartial(partialFieldMap[coll].partialField, rowDiffset));\n return fieldStore.createNamespace(newFields, fieldStoreName);\n};\n\nexport const persistDerivation = (model, operation, config = {}, criteriaFn) => {\n let derivative;\n if (operation !== DM_DERIVATIVES.COMPOSE) {\n derivative = {\n op: operation,\n meta: config,\n criteria: criteriaFn\n };\n model._derivation.push(derivative);\n }\n else {\n derivative = [...criteriaFn];\n model._derivation.length = 0;\n model._derivation.push(...derivative);\n }\n};\n\nexport const selectHelper = (rowDiffset, fields, selectFn, config, sourceDm) => {\n const newRowDiffSet = [];\n let lastInsertedValue = -1;\n let { mode } = config;\n let li;\n let cachedStore = {};\n let cloneProvider = () => sourceDm.detachedRoot();\n const selectorHelperFn = index => selectFn(\n prepareSelectionData(fields, index),\n index,\n cloneProvider,\n cachedStore\n );\n\n let checker;\n if (mode === FilteringMode.INVERSE) {\n checker = index => !selectorHelperFn(index);\n } else {\n checker = index => selectorHelperFn(index);\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n if (checker(i)) {\n if (lastInsertedValue !== -1 && i === (lastInsertedValue + 1)) {\n li = newRowDiffSet.length - 1;\n newRowDiffSet[li] = `${newRowDiffSet[li].split('-')[0]}-${i}`;\n } else {\n newRowDiffSet.push(`${i}`);\n }\n lastInsertedValue = i;\n }\n });\n return newRowDiffSet.join(',');\n};\n\nexport const filterPropagationModel = (model, propModels, config = {}) => {\n const operation = config.operation || LOGICAL_OPERATORS.AND;\n const filterByMeasure = config.filterByMeasure || false;\n let fns = [];\n if (!propModels.length) {\n fns = [() => false];\n } else {\n fns = propModels.map(propModel => ((dataModel) => {\n const dataObj = dataModel.getData();\n const schema = dataObj.schema;\n const fieldsConfig = dataModel.getFieldsConfig();\n const fieldsSpace = dataModel.getFieldspace().fieldsObj();\n const data = dataObj.data;\n const domain = Object.values(fieldsConfig).reduce((acc, v) => {\n acc[v.def.name] = fieldsSpace[v.def.name].domain();\n return acc;\n }, {});\n\n return (fields) => {\n const include = !data.length ? false : data.some(row => schema.every((propField) => {\n if (!(propField.name in fields)) {\n return true;\n }\n const value = fields[propField.name].valueOf();\n if (filterByMeasure && propField.type === FieldType.MEASURE) {\n return value >= domain[propField.name][0] && value <= domain[propField.name][1];\n }\n\n if (propField.type !== FieldType.DIMENSION) {\n return true;\n }\n const idx = fieldsConfig[propField.name].index;\n return row[idx] === fields[propField.name].valueOf();\n }));\n return include;\n };\n })(propModel));\n }\n\n let filteredModel;\n if (operation === LOGICAL_OPERATORS.AND) {\n const clonedModel = model.clone(false, false);\n filteredModel = clonedModel.select(fields => fns.every(fn => fn(fields)), {\n saveChild: false,\n mode: FilteringMode.ALL\n });\n } else {\n filteredModel = model.clone(false, false).select(fields => fns.some(fn => fn(fields)), {\n mode: FilteringMode.ALL,\n saveChild: false\n });\n }\n\n return filteredModel;\n};\n\nexport const cloneWithSelect = (sourceDm, selectFn, selectConfig, cloneConfig) => {\n const cloned = sourceDm.clone(cloneConfig.saveChild);\n const rowDiffset = selectHelper(\n cloned._rowDiffset,\n cloned.getPartialFieldspace().fields,\n selectFn,\n selectConfig,\n sourceDm\n );\n cloned._rowDiffset = rowDiffset;\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivation(cloned, DM_DERIVATIVES.SELECT, { config: selectConfig }, selectFn);\n\n return cloned;\n};\n\nexport const cloneWithProject = (sourceDm, projField, config, allFields) => {\n const cloned = sourceDm.clone(config.saveChild);\n let projectionSet = projField;\n if (config.mode === FilteringMode.INVERSE) {\n projectionSet = allFields.filter(fieldName => projField.indexOf(fieldName) === -1);\n }\n // cloned._colIdentifier = sourceDm._colIdentifier.split(',')\n // .filter(coll => projectionSet.indexOf(coll) !== -1).join();\n cloned._colIdentifier = projectionSet.join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivation(\n cloned,\n DM_DERIVATIVES.PROJECT,\n { projField, config, actualProjField: projectionSet },\n null\n );\n\n return cloned;\n};\n\nexport const sanitizeUnitSchema = (unitSchema) => {\n // Do deep clone of the unit schema as the user might change it later.\n unitSchema = extend2({}, unitSchema);\n if (!unitSchema.type) {\n unitSchema.type = FieldType.DIMENSION;\n }\n\n if (!unitSchema.subtype) {\n switch (unitSchema.type) {\n case FieldType.MEASURE:\n unitSchema.subtype = MeasureSubtype.CONTINUOUS;\n break;\n default:\n case FieldType.DIMENSION:\n unitSchema.subtype = DimensionSubtype.CATEGORICAL;\n break;\n }\n }\n\n return unitSchema;\n};\n\nexport const sanitizeSchema = schema => schema.map(unitSchema => sanitizeUnitSchema(unitSchema));\n\nexport const updateData = (relation, data, schema, options) => {\n schema = sanitizeSchema(schema);\n options = Object.assign(Object.assign({}, defaultConfig), options);\n const converterFn = converter[options.dataFormat];\n\n if (!(converterFn && typeof converterFn === 'function')) {\n throw new Error(`No converter function found for ${options.dataFormat} format`);\n }\n\n const [header, formattedData] = converterFn(data, options);\n const fieldArr = createFields(formattedData, schema, header);\n\n // This will create a new fieldStore with the fields\n const nameSpace = fieldStore.createNamespace(fieldArr, options.name);\n relation._partialFieldspace = nameSpace;\n // If data is provided create the default colIdentifier and rowDiffset\n relation._rowDiffset = formattedData.length && formattedData[0].length ? `0-${formattedData[0].length - 1}` : '';\n relation._colIdentifier = (schema.map(_ => _.name)).join();\n relation._dataFormat = options.dataFormat === DataFormat.AUTO ? detectDataFormat(data) : options.dataFormat;\n return relation;\n};\n\nexport const fieldInSchema = (schema, field) => {\n let i = 0;\n\n for (; i < schema.length; ++i) {\n if (field === schema[i].name) {\n return {\n type: schema[i].subtype || schema[i].type,\n index: i\n };\n }\n }\n return null;\n};\n\n\nexport const getOperationArguments = (child) => {\n const derivation = child._derivation;\n let params = [];\n let operation;\n if (derivation && derivation.length === 1) {\n operation = derivation[0].op;\n switch (operation) {\n case DM_DERIVATIVES.SELECT:\n params = [derivation[0].criteria];\n break;\n case DM_DERIVATIVES.PROJECT:\n params = [derivation[0].meta.actualProjField];\n break;\n case DM_DERIVATIVES.GROUPBY:\n operation = 'groupBy';\n params = [derivation[0].meta.groupByString.split(','), derivation[0].criteria];\n break;\n default:\n break;\n }\n }\n\n return {\n operation,\n params\n };\n};\n\nconst applyExistingOperationOnModel = (propModel, dataModel) => {\n const { operation, params } = getOperationArguments(dataModel);\n let selectionModel = propModel[0];\n let rejectionModel = propModel[1];\n if (operation && params.length) {\n selectionModel = propModel[0][operation](...params, {\n saveChild: false\n });\n rejectionModel = propModel[1][operation](...params, {\n saveChild: false\n });\n }\n return [selectionModel, rejectionModel];\n};\n\nconst getFilteredModel = (propModel, path) => {\n for (let i = 0, len = path.length; i < len; i++) {\n const model = path[i];\n propModel = applyExistingOperationOnModel(propModel, model);\n }\n return propModel;\n};\n\nconst propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = {}) => {\n const nonTraversingModel = propModelInf.nonTraversingModel;\n const excludeModels = propModelInf.excludeModels || [];\n\n if (dataModel === nonTraversingModel) {\n return;\n }\n\n const propagate = excludeModels.length ? excludeModels.indexOf(dataModel) === -1 : true;\n\n propagate && dataModel.handlePropagation(propModel, config);\n\n const children = dataModel._children;\n children.forEach((child) => {\n let [selectionModel, rejectionModel] = applyExistingOperationOnModel(propModel, child);\n propagateIdentifiers(child, [selectionModel, rejectionModel], config, propModelInf);\n });\n};\n\nexport const getRootGroupByModel = (model) => {\n if (model._parent && model._derivation.find(d => d.op !== 'group')) {\n return getRootGroupByModel(model._parent);\n }\n return model;\n};\n\nexport const getRootDataModel = (model) => {\n while (model._parent) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getPathToRootModel = (model, path = []) => {\n while (model._parent) {\n path.push(model);\n model = model._parent;\n }\n return path;\n};\n\nexport const propagateToAllDataModels = (identifiers, rootModels, propagationInf, config) => {\n let criteria;\n let propModel;\n const { propagationNameSpace, propagateToSource } = propagationInf;\n const propagationSourceId = propagationInf.sourceId;\n const propagateInterpolatedValues = config.propagateInterpolatedValues;\n const filterFn = (entry) => {\n const filter = config.filterFn || (() => true);\n return filter(entry, config);\n };\n\n let criterias = [];\n\n if (identifiers === null && config.persistent !== true) {\n criterias = [{\n criteria: []\n }];\n } else {\n let actionCriterias = Object.values(propagationNameSpace.mutableActions);\n if (propagateToSource !== false) {\n actionCriterias = actionCriterias.filter(d => d.config.sourceId !== propagationSourceId);\n }\n\n const filteredCriteria = actionCriterias.filter(filterFn).map(action => action.config.criteria);\n\n const excludeModels = [];\n\n if (propagateToSource !== false) {\n const sourceActionCriterias = Object.values(propagationNameSpace.mutableActions);\n\n sourceActionCriterias.forEach((actionInf) => {\n const actionConf = actionInf.config;\n if (actionConf.applyOnSource === false && actionConf.action === config.action &&\n actionConf.sourceId !== propagationSourceId) {\n excludeModels.push(actionInf.model);\n criteria = sourceActionCriterias.filter(d => d !== actionInf).map(d => d.config.criteria);\n criteria.length && criterias.push({\n criteria,\n models: actionInf.model,\n path: getPathToRootModel(actionInf.model)\n });\n }\n });\n }\n\n\n criteria = [].concat(...[...filteredCriteria, identifiers]).filter(d => d !== null);\n criterias.push({\n criteria,\n excludeModels: [...excludeModels, ...config.excludeModels || []]\n });\n }\n\n const rootModel = rootModels.model;\n\n const propConfig = Object.assign({\n sourceIdentifiers: identifiers,\n propagationSourceId\n }, config);\n\n const rootGroupByModel = rootModels.groupByModel;\n if (propagateInterpolatedValues && rootGroupByModel) {\n propModel = filterPropagationModel(rootGroupByModel, criteria, {\n filterByMeasure: propagateInterpolatedValues\n });\n propagateIdentifiers(rootGroupByModel, propModel, propConfig);\n }\n\n criterias.forEach((inf) => {\n const propagationModel = filterPropagationModel(rootModel, inf.criteria);\n const path = inf.path;\n\n if (path) {\n const filteredModel = getFilteredModel(propagationModel, path.reverse());\n inf.models.handlePropagation(filteredModel, propConfig);\n } else {\n propagateIdentifiers(rootModel, propagationModel, propConfig, {\n excludeModels: inf.excludeModels,\n nonTraversingModel: propagateInterpolatedValues && rootGroupByModel\n });\n }\n });\n};\n\nexport const propagateImmutableActions = (propagationNameSpace, rootModels, propagationInf) => {\n const immutableActions = propagationNameSpace.immutableActions;\n\n for (const action in immutableActions) {\n const actionInf = immutableActions[action];\n const actionConf = actionInf.config;\n const propagationSourceId = propagationInf.config.sourceId;\n const filterImmutableAction = propagationInf.propConfig.filterImmutableAction ?\n propagationInf.propConfig.filterImmutableAction(actionConf, propagationInf.config) : true;\n if (actionConf.sourceId !== propagationSourceId && filterImmutableAction) {\n const criteriaModel = actionConf.criteria;\n propagateToAllDataModels(criteriaModel, rootModels, {\n propagationNameSpace,\n propagateToSource: false,\n sourceId: propagationSourceId\n }, actionConf);\n }\n }\n};\n\nexport const addToPropNamespace = (propagationNameSpace, config = {}, model) => {\n let sourceNamespace;\n const isMutableAction = config.isMutableAction;\n const criteria = config.criteria;\n const key = `${config.action}-${config.sourceId}`;\n\n if (isMutableAction) {\n sourceNamespace = propagationNameSpace.mutableActions;\n } else {\n sourceNamespace = propagationNameSpace.immutableActions;\n }\n\n if (criteria === null) {\n delete sourceNamespace[key];\n } else {\n sourceNamespace[key] = {\n model,\n config\n };\n }\n\n return this;\n};\n","import { FilteringMode } from './enums';\nimport { getUniqueId } from './utils';\nimport { persistDerivation, updateFields, cloneWithSelect, cloneWithProject, updateData } from './helper';\nimport { crossProduct, difference, naturalJoinFilter, union } from './operator';\nimport { DM_DERIVATIVES } from './constants';\n\n/**\n * Relation provides the definitions of basic operators of relational algebra like *selection*, *projection*, *union*,\n * *difference* etc.\n *\n * It is extended by {@link DataModel} to inherit the functionalities of relational algebra concept.\n *\n * @class\n * @public\n * @module Relation\n * @namespace DataModel\n */\nclass Relation {\n\n /**\n * Creates a new Relation instance by providing underlying data and schema.\n *\n * @private\n *\n * @param {Object | string | Relation} data - The input tabular data in dsv or json format or\n * an existing Relation instance object.\n * @param {Array} schema - An array of data schema.\n * @param {Object} [options] - The optional options.\n */\n constructor (...params) {\n let source;\n\n this._parent = null;\n this._derivation = [];\n this._children = [];\n\n if (params.length === 1 && ((source = params[0]) instanceof Relation)) {\n // parent datamodel was passed as part of source\n this._colIdentifier = source._colIdentifier;\n this._rowDiffset = source._rowDiffset;\n this._dataFormat = source._dataFormat;\n this._parent = source;\n this._partialFieldspace = this._parent._partialFieldspace;\n this._fieldStoreName = getUniqueId();\n this.__calculateFieldspace().calculateFieldsConfig();\n } else {\n updateData(this, ...params);\n this._fieldStoreName = this._partialFieldspace.name;\n this.__calculateFieldspace().calculateFieldsConfig();\n this._propagationNameSpace = {\n mutableActions: {},\n immutableActions: {}\n };\n }\n }\n\n /**\n * Retrieves the {@link Schema | schema} details for every {@link Field | field} as an array.\n *\n * @public\n *\n * @return {Array.} Array of fields schema.\n * ```\n * [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', numberFormat: (val) => `${val} miles / gallon` },\n * { name: 'Cylinder', type: 'dimension' },\n * { name: 'Displacement', type: 'measure', defAggFn: 'max' },\n * { name: 'HorsePower', type: 'measure', defAggFn: 'max' },\n * { name: 'Weight_in_lbs', type: 'measure', defAggFn: 'avg', },\n * { name: 'Acceleration', type: 'measure', defAggFn: 'avg' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin' }\n * ]\n * ```\n */\n getSchema () {\n return this.getFieldspace().fields.map(d => d.schema());\n }\n\n /**\n * Returns the name of the {@link DataModel} instance. If no name was specified during {@link DataModel}\n * initialization, then it returns a auto-generated name.\n *\n * @public\n *\n * @return {string} Name of the DataModel instance.\n */\n getName() {\n return this._fieldStoreName;\n }\n\n getFieldspace () {\n return this._fieldspace;\n }\n\n __calculateFieldspace () {\n this._fieldspace = updateFields([this._rowDiffset, this._colIdentifier],\n this.getPartialFieldspace(), this._fieldStoreName);\n return this;\n }\n\n getPartialFieldspace () {\n return this._partialFieldspace;\n }\n\n /**\n * Performs {@link link_of_cross_product | cross-product} between two {@link DataModel} instances and returns a\n * new {@link DataModel} instance containing the results. This operation is also called theta join.\n *\n * Cross product takes two set and create one set where each value of one set is paired with each value of another\n * set.\n *\n * This method takes an optional predicate which filters the generated result rows. If the predicate returns true\n * the combined row is included in the resulatant table.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.join(originDM)));\n *\n * console.log(carsDM.join(originDM,\n * obj => obj.[originDM.getName()].Origin === obj.[carsDM.getName()].Origin));\n *\n * @text\n * This is chained version of `join` operator. `join` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel to be joined with the current instance DataModel.\n * @param {SelectionPredicate} filterFn - The predicate function that will filter the result of the crossProduct.\n *\n * @return {DataModel} New DataModel instance created after joining.\n */\n join (joinWith, filterFn) {\n return crossProduct(this, joinWith, filterFn);\n }\n\n /**\n * {@link natural_join | Natural join} is a special kind of cross-product join where filtering of rows are performed\n * internally by resolving common fields are from both table and the rows with common value are included.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.naturalJoin(originDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel with which the current instance of DataModel on which the method is\n * called will be joined.\n * @return {DataModel} New DataModel instance created after joining.\n */\n naturalJoin (joinWith) {\n return crossProduct(this, joinWith, naturalJoinFilter(this, joinWith), true);\n }\n\n /**\n * {@link link_to_union | Union} operation can be termed as vertical stacking of all rows from both the DataModel\n * instances, provided that both of the {@link DataModel} instances should have same column names.\n *\n * @example\n * console.log(EuropeanMakerDM.union(USAMakerDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} unionWith - DataModel instance for which union has to be applied with the instance on which\n * the method is called\n *\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n union (unionWith) {\n return union(this, unionWith);\n }\n\n /**\n * {@link link_to_difference | Difference } operation only include rows which are present in the datamodel on which\n * it was called but not on the one passed as argument.\n *\n * @example\n * console.log(highPowerDM.difference(highExpensiveDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} differenceWith - DataModel instance for which difference has to be applied with the instance\n * on which the method is called\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n difference (differenceWith) {\n return difference(this, differenceWith);\n }\n\n /**\n * {@link link_to_selection | Selection} is a row filtering operation. It expects a predicate and an optional mode\n * which control which all rows should be included in the resultant DataModel instance.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection operation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resultant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * // with selection mode NORMAL:\n * const normDt = dt.select(fields => fields.Origin.value === \"USA\")\n * console.log(normDt));\n *\n * // with selection mode INVERSE:\n * const inverDt = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt);\n *\n * // with selection mode ALL:\n * const dtArr = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.ALL })\n * // print the selected parts\n * console.log(dtArr[0]);\n * // print the inverted parts\n * console.log(dtArr[1]);\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Function} selectFn - The predicate function which is called for each row with the current row.\n * ```\n * function (row, i, cloneProvider, store) { ... }\n * ```\n * @param {Object} config - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance.\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection.\n * @return {DataModel} Returns the new DataModel instance(s) after operation.\n */\n select (selectFn, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n\n const cloneConfig = { saveChild: config.saveChild };\n let oDm;\n\n if (config.mode === FilteringMode.ALL) {\n const selectDm = cloneWithSelect(\n this,\n selectFn,\n { mode: FilteringMode.NORMAL },\n cloneConfig\n );\n const rejectDm = cloneWithSelect(\n this,\n selectFn,\n { mode: FilteringMode.INVERSE },\n cloneConfig\n );\n oDm = [selectDm, rejectDm];\n } else {\n oDm = cloneWithSelect(\n this,\n selectFn,\n config,\n cloneConfig\n );\n }\n\n return oDm;\n }\n\n /**\n * Retrieves a boolean value if the current {@link DataModel} instance has data.\n *\n * @example\n * const schema = [\n * { name: 'CarName', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n * const data = [];\n *\n * const dt = new DataModel(data, schema);\n * console.log(dt.isEmpty());\n *\n * @public\n *\n * @return {Boolean} True if the datamodel has no data, otherwise false.\n */\n isEmpty () {\n return !this._rowDiffset.length || !this._colIdentifier.length;\n }\n\n /**\n * Creates a clone from the current DataModel instance with child parent relationship.\n *\n * @private\n * @param {boolean} [saveChild=true] - Whether the cloned instance would be recorded in the parent instance.\n * @return {DataModel} - Returns the newly cloned DataModel instance.\n */\n clone (saveChild = true, linkParent = true) {\n let retDataModel;\n if (linkParent === false) {\n const dataObj = this.getData({\n getAllFields: true\n });\n const data = dataObj.data;\n const schema = dataObj.schema;\n const jsonData = data.map((row) => {\n const rowObj = {};\n schema.forEach((field, i) => {\n rowObj[field.name] = row[i];\n });\n return rowObj;\n });\n retDataModel = new this.constructor(jsonData, schema);\n }\n else {\n retDataModel = new this.constructor(this);\n }\n\n if (saveChild) {\n this._children.push(retDataModel);\n }\n return retDataModel;\n }\n\n /**\n * {@link Projection} is filter column (field) operation. It expects list of fields' name and either include those\n * or exclude those based on {@link FilteringMode} on the resultant variable.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * const dm = new DataModel(data, schema);\n *\n * // with projection mode NORMAL:\n * const normDt = dt.project([\"Name\", \"HorsePower\"]);\n * console.log(normDt.getData());\n *\n * // with projection mode INVERSE:\n * const inverDt = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt.getData());\n *\n * // with selection mode ALL:\n * const dtArr = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.ALL })\n * // print the normal parts\n * console.log(dtArr[0].getData());\n * // print the inverted parts\n * console.log(dtArr[1].getData());\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {DataModel} Returns the new DataModel instance after operation.\n */\n project (projField, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const { mode } = config;\n\n let normalizedProjField = projField.reduce((acc, field) => {\n if (field.constructor.name === 'RegExp') {\n acc.push(...allFields.filter(fieldName => fieldName.search(field) !== -1));\n } else if (field in fieldConfig) {\n acc.push(field);\n }\n return acc;\n }, []);\n\n normalizedProjField = Array.from(new Set(normalizedProjField)).map(field => field.trim());\n let dataModel;\n\n if (mode === FilteringMode.ALL) {\n let projectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.NORMAL,\n saveChild: config.saveChild\n }, allFields);\n let rejectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.INVERSE,\n saveChild: config.saveChild\n }, allFields);\n dataModel = [projectionClone, rejectionClone];\n } else {\n let projectionClone = cloneWithProject(this, normalizedProjField, config, allFields);\n dataModel = projectionClone;\n }\n\n return dataModel;\n }\n\n getFieldsConfig () {\n return this._fieldConfig;\n }\n\n calculateFieldsConfig () {\n this._fieldConfig = this._fieldspace.fields.reduce((acc, fieldDef, i) => {\n acc[fieldDef.name()] = {\n index: i,\n def: { name: fieldDef.name(), type: fieldDef.type(), subtype: fieldDef.subtype() }\n };\n return acc;\n }, {});\n return this;\n }\n\n\n /**\n * Frees up the resources associated with the current DataModel instance and breaks all the links instance has in\n * the DAG.\n *\n * @public\n */\n dispose () {\n this._parent.removeChild(this);\n this._parent = null;\n }\n\n /**\n * Removes the specified child {@link DataModel} from the child list of the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\")\n * dt.removeChild(dt2);\n *\n * @private\n *\n * @param {DataModel} child - Delegates the parent to remove this child.\n */\n removeChild (child) {\n let idx = this._children.findIndex(sibling => sibling === child);\n idx !== -1 ? this._children.splice(idx, 1) : true;\n }\n\n /**\n * Adds the specified {@link DataModel} as a parent for the current {@link DataModel} instance.\n *\n * The optional criteriaQueue is an array containing the history of transaction performed on parent\n * {@link DataModel} to get the current one.\n *\n * @param {DataModel} parent - The datamodel instance which will act as parent.\n * @param {Array} criteriaQueue - Queue contains in-between operation meta-data.\n */\n addParent (parent, criteriaQueue = []) {\n persistDerivation(this, DM_DERIVATIVES.COMPOSE, null, criteriaQueue);\n this._parent = parent;\n parent._children.push(this);\n }\n\n /**\n * Returns the parent {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const parentDm = dt2.getParent();\n *\n * @return {DataModel} Returns the parent DataModel instance.\n */\n getParent () {\n return this._parent;\n }\n\n /**\n * Returns the immediate child {@link DataModel} instances.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const childDm1 = dt.select(fields => fields.Origin.value === \"USA\");\n * const childDm2 = dt.select(fields => fields.Origin.value === \"Japan\");\n * const childDm3 = dt.groupBy([\"Origin\"]);\n *\n * @return {DataModel[]} Returns the immediate child DataModel instances.\n */\n getChildren() {\n return this._children;\n }\n\n /**\n * Returns the in-between operation meta data while creating the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const derivations = dt3.getDerivations();\n *\n * @return {Any[]} Returns the derivation meta data.\n */\n getDerivations() {\n return this._derivation;\n }\n}\n\nexport default Relation;\n","/* eslint-disable default-case */\n\nimport { FieldType, DimensionSubtype, DataFormat } from './enums';\nimport {\n persistDerivation,\n getRootGroupByModel,\n propagateToAllDataModels,\n getRootDataModel,\n propagateImmutableActions,\n addToPropNamespace,\n sanitizeUnitSchema\n} from './helper';\nimport { DM_DERIVATIVES, PROPAGATION } from './constants';\nimport {\n dataBuilder,\n rowDiffsetIterator,\n groupBy\n} from './operator';\nimport { createBinnedFieldData } from './operator/bucket-creator';\nimport Relation from './relation';\nimport reducerStore from './utils/reducer-store';\nimport { createFields } from './field-creator';\nimport InvalidAwareTypes from './invalid-aware-types';\n\n/**\n * DataModel is an in-browser representation of tabular data. It supports\n * {@link https://en.wikipedia.org/wiki/Relational_algebra | relational algebra} operators as well as generic data\n * processing opearators.\n * DataModel extends {@link Relation} class which defines all the relational algebra opreators. DataModel gives\n * definition of generic data processing operators which are not relational algebra complient.\n *\n * @public\n * @class\n * @extends Relation\n * @memberof Datamodel\n */\nclass DataModel extends Relation {\n /**\n * Creates a new DataModel instance by providing data and schema. Data could be in the form of\n * - Flat JSON\n * - DSV String\n * - 2D Array\n *\n * By default DataModel finds suitable adapter to serialize the data. DataModel also expects a\n * {@link Schema | schema} for identifying the variables present in data.\n *\n * @constructor\n * @example\n * const data = loadData('cars.csv');\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', unit : 'cm', scale: '1000', numberformat: val => `${val}G`},\n * { name: 'Cylinders', type: 'dimension' },\n * { name: 'Displacement', type: 'measure' },\n * { name: 'Horsepower', type: 'measure' },\n * { name: 'Weight_in_lbs', type: 'measure' },\n * { name: 'Acceleration', type: 'measure' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin', type: 'dimension' }\n * ];\n * const dm = new DataModel(data, schema, { name: 'Cars' });\n * table(dm);\n *\n * @public\n *\n * @param {Array. | string | Array.} data Input data in any of the mentioned formats\n * @param {Array.} schema Defination of the variables. Order of the variables in data and order of the\n * variables in schema has to be same.\n * @param {object} [options] Optional arguments to specify more settings regarding the creation part\n * @param {string} [options.name] Name of the datamodel instance. If no name is given an auto generated name is\n * assigned to the instance.\n * @param {string} [options.fieldSeparator=','] specify field separator type if the data is of type dsv string.\n */\n constructor (...args) {\n super(...args);\n\n this._onPropagation = [];\n this._sortingDetails = [];\n }\n\n /**\n * Reducers are simple functions which reduces an array of numbers to a representative number of the set.\n * Like an array of numbers `[10, 20, 5, 15]` can be reduced to `12.5` if average / mean reducer function is\n * applied. All the measure fields in datamodel (variables in data) needs a reducer to handle aggregation.\n *\n * @public\n *\n * @return {ReducerStore} Singleton instance of {@link ReducerStore}.\n */\n static get Reducers () {\n return reducerStore;\n }\n\n /**\n * Configure null, undefined, invalid values in the source data\n *\n * @public\n *\n * @param {Object} [config] - Configuration to control how null, undefined and non-parsable values are\n * represented in DataModel.\n * @param {string} [config.undefined] - Define how an undefined value will be represented.\n * @param {string} [config.null] - Define how a null value will be represented.\n * @param {string} [config.invalid] - Define how a non-parsable value will be represented.\n */\n static configureInvalidAwareTypes (config) {\n return InvalidAwareTypes.invalidAwareVals(config);\n }\n\n /**\n * Retrieve the data attached to an instance in JSON format.\n *\n * @example\n * // DataModel instance is already prepared and assigned to dm variable\n * const data = dm.getData({\n * order: 'column',\n * formatter: {\n * origin: (val) => val === 'European Union' ? 'EU' : val;\n * }\n * });\n * console.log(data);\n *\n * @public\n *\n * @param {Object} [options] Options to control how the raw data is to be returned.\n * @param {string} [options.order='row'] Defines if data is retieved in row order or column order. Possible values\n * are `'rows'` and `'columns'`\n * @param {Function} [options.formatter=null] Formats the output data. This expects an object, where the keys are\n * the name of the variable needs to be formatted. The formatter function is called for each row passing the\n * value of the cell for a particular row as arguments. The formatter is a function in the form of\n * `function (value, rowId, schema) => { ... }`\n * Know more about {@link Fomatter}.\n *\n * @return {Array} Returns a multidimensional array of the data with schema. The return format looks like\n * ```\n * {\n * data,\n * schema\n * }\n * ```\n */\n getData (options) {\n const defOptions = {\n order: 'row',\n formatter: null,\n withUid: false,\n getAllFields: false,\n sort: []\n };\n options = Object.assign({}, defOptions, options);\n const fields = this.getPartialFieldspace().fields;\n\n const dataGenerated = dataBuilder.call(\n this,\n this.getPartialFieldspace().fields,\n this._rowDiffset,\n options.getAllFields ? fields.map(d => d.name()).join() : this._colIdentifier,\n options.sort,\n {\n columnWise: options.order === 'column',\n addUid: !!options.withUid\n }\n );\n\n if (!options.formatter) {\n return dataGenerated;\n }\n\n const { formatter } = options;\n const { data, schema, uids } = dataGenerated;\n const fieldNames = schema.map((e => e.name));\n const fmtFieldNames = Object.keys(formatter);\n const fmtFieldIdx = fmtFieldNames.reduce((acc, next) => {\n const idx = fieldNames.indexOf(next);\n if (idx !== -1) {\n acc.push([idx, formatter[next]]);\n }\n return acc;\n }, []);\n\n if (options.order === 'column') {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n data[fIdx].forEach((datum, datumIdx) => {\n data[fIdx][datumIdx] = fmtFn.call(\n undefined,\n datum,\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n } else {\n data.forEach((datum, datumIdx) => {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n datum[fIdx] = fmtFn.call(\n undefined,\n datum[fIdx],\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n }\n\n return dataGenerated;\n }\n\n /**\n * Groups the data using particular dimensions and by reducing measures. It expects a list of dimensions using which\n * it projects the datamodel and perform aggregations to reduce the duplicate tuples. Refer this\n * {@link link_to_one_example_with_group_by | document} to know the intuition behind groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupedDM = dm.groupBy(['Year'], { horsepower: 'max' } );\n * console.log(groupedDm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {DataModel} Returns a new DataModel instance after performing the groupby.\n */\n groupBy (fieldsArr, reducers = {}, config = { saveChild: true }) {\n const groupByString = `${fieldsArr.join()}`;\n let params = [this, fieldsArr, reducers];\n const newDataModel = groupBy(...params);\n\n persistDerivation(\n newDataModel,\n DM_DERIVATIVES.GROUPBY,\n { fieldsArr, groupByString, defaultReducer: reducerStore.defaultReducer() },\n reducers\n );\n\n if (config.saveChild) {\n this._children.push(newDataModel);\n }\n newDataModel._parent = this;\n\n return newDataModel;\n }\n\n /**\n * Performs sorting operation on the current {@link DataModel} instance according to the specified sorting details.\n * Like every other operator it doesn't mutate the current DataModel instance on which it was called, instead\n * returns a new DataModel instance containing the sorted data.\n *\n * DataModel support multi level sorting by listing the variables using which sorting needs to be performed and\n * the type of sorting `ASC` or `DESC`.\n *\n * In the following example, data is sorted by `Origin` field in `DESC` order in first level followed by another\n * level of sorting by `Acceleration` in `ASC` order.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * let sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\"] // Default value is ASC\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * // Sort with a custom sorting function\n * sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\", (a, b) => a - b] // Custom sorting function\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @text\n * DataModel also provides another sorting mechanism out of the box where sort is applied to a variable using\n * another variable which determines the order.\n * Like the above DataModel contains three fields `Origin`, `Name` and `Acceleration`. Now, the data in this\n * model can be sorted by `Origin` field according to the average value of all `Acceleration` for a\n * particular `Origin` value.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * const sortedDm = dm.sort([\n * ['Origin', ['Acceleration', (a, b) => avg(...a.Acceleration) - avg(...b.Acceleration)]]\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @public\n *\n * @param {Array.} sortingDetails - Sorting details based on which the sorting will be performed.\n * @return {DataModel} Returns a new instance of DataModel with sorted data.\n */\n sort (sortingDetails) {\n const rawData = this.getData({\n order: 'row',\n sort: sortingDetails\n });\n const header = rawData.schema.map(field => field.name);\n const dataInCSVArr = [header].concat(rawData.data);\n\n const sortedDm = new this.constructor(dataInCSVArr, rawData.schema, { dataFormat: 'DSVArr' });\n sortedDm._sortingDetails = sortingDetails;\n return sortedDm;\n }\n\n /**\n * Performs the serialization operation on the current {@link DataModel} instance according to the specified data\n * type. When an {@link DataModel} instance is created, it de-serializes the input data into its internal format,\n * and during its serialization process, it converts its internal data format to the specified data type and returns\n * that data regardless what type of data is used during the {@link DataModel} initialization.\n *\n * @example\n * // here dm is the pre-declared DataModel instance.\n * const csvData = dm.serialize(DataModel.DataFormat.DSV_STR, { fieldSeparator: \",\" });\n * console.log(csvData); // The csv formatted data.\n *\n * const jsonData = dm.serialize(DataModel.DataFormat.FLAT_JSON);\n * console.log(jsonData); // The json data.\n *\n * @public\n *\n * @param {string} type - The data type name for serialization.\n * @param {Object} options - The optional option object.\n * @param {string} options.fieldSeparator - The field separator character for DSV data type.\n * @return {Array|string} Returns the serialized data.\n */\n serialize (type, options) {\n type = type || this._dataFormat;\n options = Object.assign({}, { fieldSeparator: ',' }, options);\n\n const fields = this.getFieldspace().fields;\n const colData = fields.map(f => f.formattedData());\n const rowsCount = colData[0].length;\n let serializedData;\n let rowIdx;\n let colIdx;\n\n if (type === DataFormat.FLAT_JSON) {\n serializedData = [];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = {};\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row[fields[colIdx].name()] = colData[colIdx][rowIdx];\n }\n serializedData.push(row);\n }\n } else if (type === DataFormat.DSV_STR) {\n serializedData = [fields.map(f => f.name()).join(options.fieldSeparator)];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row.join(options.fieldSeparator));\n }\n serializedData = serializedData.join('\\n');\n } else if (type === DataFormat.DSV_ARR) {\n serializedData = [fields.map(f => f.name())];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row);\n }\n } else {\n throw new Error(`Data type ${type} is not supported`);\n }\n\n return serializedData;\n }\n\n addField (field) {\n const fieldName = field.name();\n this._colIdentifier += `,${fieldName}`;\n const partialFieldspace = this._partialFieldspace;\n\n if (!partialFieldspace.fieldsObj()[field.name()]) {\n partialFieldspace.fields.push(field);\n } else {\n const fieldIndex = partialFieldspace.fields.findIndex(fieldinst => fieldinst.name() === fieldName);\n fieldIndex >= 0 && (partialFieldspace.fields[fieldIndex] = field);\n }\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n\n this.__calculateFieldspace().calculateFieldsConfig();\n return this;\n }\n\n /**\n * Creates a new variable calculated from existing variables. This method expects the definition of the newly created\n * variable and a function which resolves the value of the new variable from existing variables.\n *\n * Can create a new measure based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const newDm = dataModel.calculateVariable({\n * name: 'powerToWeight',\n * type: 'measure'\n * }, ['horsepower', 'weight_in_lbs', (hp, weight) => hp / weight ]);\n *\n *\n * Can create a new dimension based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const child = dataModel.calculateVariable(\n * {\n * name: 'Efficiency',\n * type: 'dimension'\n * }, ['horsepower', (hp) => {\n * if (hp < 80) { return 'low'; },\n * else if (hp < 120) { return 'moderate'; }\n * else { return 'high' }\n * }]);\n *\n * @public\n *\n * @param {Object} schema - The schema of newly defined variable.\n * @param {Array.} dependency - An array containing the dependency variable names and a resolver\n * function as the last element.\n * @param {Object} config - An optional config object.\n * @param {boolean} [config.saveChild] - Whether the newly created DataModel will be a child.\n * @param {boolean} [config.replaceVar] - Whether the newly created variable will replace the existing variable.\n * @return {DataModel} Returns an instance of DataModel with the new field.\n */\n calculateVariable (schema, dependency, config) {\n schema = sanitizeUnitSchema(schema);\n config = Object.assign({}, { saveChild: true, replaceVar: false }, config);\n\n const fieldsConfig = this.getFieldsConfig();\n const depVars = dependency.slice(0, dependency.length - 1);\n const retrieveFn = dependency[dependency.length - 1];\n\n if (fieldsConfig[schema.name] && !config.replaceVar) {\n throw new Error(`${schema.name} field already exists in datamodel`);\n }\n\n const depFieldIndices = depVars.map((field) => {\n const fieldSpec = fieldsConfig[field];\n if (!fieldSpec) {\n // @todo dont throw error here, use warning in production mode\n throw new Error(`${field} is not a valid column name.`);\n }\n return fieldSpec.index;\n });\n\n const clone = this.clone();\n\n const fs = clone.getFieldspace().fields;\n const suppliedFields = depFieldIndices.map(idx => fs[idx]);\n\n let cachedStore = {};\n let cloneProvider = () => this.detachedRoot();\n\n const computedValues = [];\n rowDiffsetIterator(clone._rowDiffset, (i) => {\n const fieldsData = suppliedFields.map(field => field.partialField.data[i]);\n computedValues[i] = retrieveFn(...fieldsData, i, cloneProvider, cachedStore);\n });\n const [field] = createFields([computedValues], [schema], [schema.name]);\n clone.addField(field);\n\n persistDerivation(clone, DM_DERIVATIVES.CAL_VAR, { config: schema, fields: depVars }, retrieveFn);\n\n return clone;\n }\n\n /**\n * Propagates changes across all the connected DataModel instances.\n *\n * @param {Array} identifiers - A list of identifiers that were interacted with.\n * @param {Object} payload - The interaction specific details.\n *\n * @return {DataModel} DataModel instance.\n */\n propagate (identifiers, config = {}, addToNameSpace, propConfig = {}) {\n const isMutableAction = config.isMutableAction;\n const propagationSourceId = config.sourceId;\n const payload = config.payload;\n const rootModel = getRootDataModel(this);\n const propagationNameSpace = rootModel._propagationNameSpace;\n const rootGroupByModel = getRootGroupByModel(this);\n const rootModels = {\n groupByModel: rootGroupByModel,\n model: rootModel\n };\n\n addToNameSpace && addToPropNamespace(propagationNameSpace, config, this);\n propagateToAllDataModels(identifiers, rootModels, { propagationNameSpace, sourceId: propagationSourceId },\n Object.assign({\n payload\n }, config));\n\n if (isMutableAction) {\n propagateImmutableActions(propagationNameSpace, rootModels, {\n config,\n propConfig\n }, this);\n }\n\n return this;\n }\n\n /**\n * Associates a callback with an event name.\n *\n * @param {string} eventName - The name of the event.\n * @param {Function} callback - The callback to invoke.\n * @return {DataModel} Returns this current DataModel instance itself.\n */\n on (eventName, callback) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation.push(callback);\n break;\n }\n return this;\n }\n\n /**\n * Unsubscribes the callbacks for the provided event name.\n *\n * @param {string} eventName - The name of the event to unsubscribe.\n * @return {DataModel} Returns the current DataModel instance itself.\n */\n unsubscribe (eventName) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation = [];\n break;\n\n }\n return this;\n }\n\n /**\n * This method is used to invoke the method associated with propagation.\n *\n * @param {Object} payload The interaction payload.\n * @param {DataModel} identifiers The propagated DataModel.\n * @memberof DataModel\n */\n handlePropagation (propModel, payload) {\n let propListeners = this._onPropagation;\n propListeners.forEach(fn => fn.call(this, propModel, payload));\n }\n\n /**\n * Performs the binning operation on a measure field based on the binning configuration. Binning means discretizing\n * values of a measure. Binning configuration contains an array; subsequent values from the array marks the boundary\n * of buckets in [inclusive, exclusive) range format. This operation does not mutate the subject measure field,\n * instead, it creates a new field (variable) of type dimension and subtype binned.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non-uniform buckets,\n * - providing bins count,\n * - providing each bin size,\n *\n * When custom `buckets` are provided as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', buckets: [30, 80, 100, 110] }\n * const binnedDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binsCount` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', binsCount: 5, start: 0, end: 100 }\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binSize` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHorsepower', binSize: 20, start: 5}\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @public\n *\n * @param {string} measureFieldName - The name of the target measure field.\n * @param {Object} config - The config object.\n * @param {string} [config.name] - The name of the new field which will be created.\n * @param {string} [config.buckets] - An array containing the bucket ranges.\n * @param {string} [config.binSize] - The size of each bin. It is ignored when buckets are given.\n * @param {string} [config.binsCount] - The total number of bins to generate. It is ignored when buckets are given.\n * @param {string} [config.start] - The start value of the bucket ranges. It is ignored when buckets are given.\n * @param {string} [config.end] - The end value of the bucket ranges. It is ignored when buckets are given.\n * @return {DataModel} Returns a new {@link DataModel} instance with the new field.\n */\n bin (measureFieldName, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n if (!fieldsConfig[measureFieldName]) {\n throw new Error(`Field ${measureFieldName} doesn't exist`);\n }\n\n const binFieldName = config.name || `${measureFieldName}_binned`;\n\n if (fieldsConfig[binFieldName]) {\n throw new Error(`Field ${binFieldName} already exists`);\n }\n\n const measureField = this.getFieldspace().fieldsObj()[measureFieldName];\n const { binnedData, bins } = createBinnedFieldData(measureField, this._rowDiffset, config);\n\n const binField = createFields([binnedData], [\n {\n name: binFieldName,\n type: FieldType.DIMENSION,\n subtype: DimensionSubtype.BINNED,\n bins\n }], [binFieldName])[0];\n\n const clone = this.clone();\n clone.addField(binField);\n\n persistDerivation(clone, DM_DERIVATIVES.BIN, { measureFieldName, config, binFieldName }, null);\n\n return clone;\n }\n\n /**\n * Creates a new {@link DataModel} instance with completely detached root from current {@link DataModel} instance,\n * the new {@link DataModel} instance has no parent-children relationship with the current one, but has same data as\n * the current one.\n * This API is useful when a completely different {@link DataModel} but with same data as the current instance is\n * needed.\n *\n * @example\n * const dm = new DataModel(data, schema);\n * const detachedDm = dm.detachedRoot();\n *\n * // has different namespace\n * console.log(dm.getPartialFieldspace().name);\n * console.log(detachedDm.getPartialFieldspace().name);\n *\n * // has same data\n * console.log(dm.getData());\n * console.log(detachedDm.getData());\n *\n * @public\n *\n * @return {DataModel} Returns a detached {@link DataModel} instance.\n */\n detachedRoot () {\n const data = this.serialize(DataFormat.FLAT_JSON);\n const schema = this.getSchema();\n\n return new DataModel(data, schema);\n }\n}\n\nexport default DataModel;\n","import { fnList } from '../operator/group-by-function';\n\nexport const { sum, avg, min, max, first, last, count, std: sd } = fnList;\n","import DataModel from './datamodel';\nimport {\n compose,\n bin,\n select,\n project,\n groupby as groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union\n} from './operator';\nimport * as Stats from './stats';\nimport * as enums from './enums';\nimport { DateTimeFormatter } from './utils';\nimport { DataFormat, FilteringMode } from './constants';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport pkg from '../package.json';\n\nDataModel.Operators = {\n compose,\n bin,\n select,\n project,\n groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union\n};\nDataModel.Stats = Stats;\nObject.assign(DataModel, enums);\nDataModel.DateTimeFormatter = DateTimeFormatter;\nDataModel.DataFormat = DataFormat;\nDataModel.FilteringMode = FilteringMode;\nDataModel.InvalidAwareTypes = InvalidAwareTypes;\nDataModel.version = pkg.version;\n\nexport default DataModel;\n","\n/**\n * DataModel's opearators are exposed as composable functional operators as well as chainable operators. Chainable\n * operators are called on the instances of {@link Datamodel} and {@link Relation} class.\n *\n * Those same operators can be used as composable operators from `DataModel.Operators` namespace.\n *\n * All these operators have similar behaviour. All these operators when called with the argument returns a function\n * which expects a DataModel instance.\n *\n * @public\n * @module Operators\n * @namespace DataModel\n */\n\n/**\n * This is functional version of selection operator. {@link link_to_selection | Selection} is a row filtering operation.\n * It takes {@link SelectionPredicate | predicate} for filtering criteria and returns a function.\n * The returned function is called with the DataModel instance on which the action needs to be performed.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection opearation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * [Warn] Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * [Error] `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @example\n * const select = DataModel.Operators.select;\n * usaCarsFn = select(fields => fields.Origin.value === 'USA');\n * usaCarsDm = usaCarsFn(dm);\n * console.log(usaCarsDm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {SelectionPredicate} selectFn - Predicate funciton which is called for each row with the current row\n * ```\n * function (row, i) { ... }\n * ```\n * @param {Object} [config] - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const select = (...args) => dm => dm.select(...args);\n\n/**\n * This is functional version of projection operator. {@link link_to_projection | Projection} is a column filtering\n * operation.It expects list of fields name and either include those or exclude those based on {@link FilteringMode} on\n * the resultant variable.It returns a function which is called with the DataModel instance on which the action needs\n * to be performed.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const project = (...args) => dm => dm.project(...args);\n\n/**\n * This is functional version of binnig operator. Binning happens on a measure field based on a binning configuration.\n * Binning in DataModel does not aggregate the number of rows present in DataModel instance after binning, it just adds\n * a new field with the binned value. Refer binning {@link example_of_binning | example} to have a intuition of what\n * binning is and the use case.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non uniform buckets\n * - providing bin count\n * - providing each bin size\n *\n * When custom buckets are provided as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const buckets = {\n * start: 30\n * stops: [80, 100, 110]\n * };\n * const config = { buckets, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(dm);\n *\n * @text\n * When `binCount` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binCount: 5, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @text\n * When `binSize` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binSize: 200, name: 'binnedHorsepower' }\n * const binnedDm = dataModel.bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {String} name Name of measure which will be used to create bin\n * @param {Object} config Config required for bin creation\n * @param {Array.} config.bucketObj.stops Defination of bucket ranges. Two subsequent number from arrays\n * are picked and a range is created. The first number from range is inclusive and the second number from range\n * is exclusive.\n * @param {Number} [config.bucketObj.startAt] Force the start of the bin from a particular number.\n * If not mentioned, the start of the bin or the lower domain of the data if stops is not mentioned, else its\n * the first value of the stop.\n * @param {Number} config.binSize Bucket size for each bin\n * @param {Number} config.binCount Number of bins which will be created\n * @param {String} config.name Name of the new binned field to be created\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const bin = (...args) => dm => dm.bin(...args);\n\n/**\n * This is functional version of `groupBy` operator.Groups the data using particular dimensions and by reducing\n * measures. It expects a list of dimensions using which it projects the datamodel and perform aggregations to reduce\n * the duplicate tuples. Refer this {@link link_to_one_example_with_group_by | document} to know the intuition behind\n * groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupBy = DataModel.Operators.groupBy;\n * const groupedFn = groupBy(['Year'], { horsepower: 'max' } );\n * groupedDM = groupByFn(dm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const groupBy = (...args) => dm => dm.groupBy(...args);\n\n/**\n * Enables composing operators to run multiple operations and save group of operataion as named opration on a DataModel.\n * The resulting DataModel will be the result of all the operation provided. The operations provided will be executed in\n * a serial manner ie. result of one operation will be the input for the next operations (like pipe operator in unix).\n *\n * Suported operations in compose are\n * - `select`\n * - `project`\n * - `groupBy`\n * - `bin`\n * - `compose`\n *\n * @example\n * const compose = DataModel.Operators.compose;\n * const select = DataModel.Operators.select;\n * const project = DataModel.Operators.project;\n *\n * let composedFn = compose(\n * select(fields => fields.netprofit.value <= 15),\n * project(['netprofit', 'netsales']));\n *\n * const dataModel = new DataModel(data1, schema1);\n *\n * let composedDm = composedFn(dataModel);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} operators: An array of operation that will be applied on the\n * datatable.\n *\n * @returns {DataModel} Instance of resultant DataModel\n */\nexport const compose = (...operations) =>\n (dm, config = { saveChild: true }) => {\n let currentDM = dm;\n let frstChild;\n const derivations = [];\n const saveChild = config.saveChild;\n\n operations.forEach((operation) => {\n currentDM = operation(currentDM);\n derivations.push(...currentDM._derivation);\n if (!frstChild) {\n frstChild = currentDM;\n }\n });\n\n saveChild && currentDM.addParent(dm, derivations);\n if (derivations.length > 1) {\n frstChild.dispose();\n }\n\n return currentDM;\n };\n","/**\n * Wrapper on calculateVariable() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const calculateVariable = (...args) => dm => dm.calculateVariable(...args);\n\n/**\n * Wrapper on sort() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const sort = (...args) => dm => dm.sort(...args);\n","import { crossProduct } from './cross-product';\nimport { naturalJoinFilter } from './natural-join-filter-function';\n\nexport function naturalJoin (dataModel1, dataModel2) {\n return crossProduct(dataModel1, dataModel2, naturalJoinFilter(dataModel1, dataModel2), true);\n}\n"],"sourceRoot":""} \ No newline at end of file +{"version":3,"sources":["webpack://DataModel/webpack/universalModuleDefinition","webpack://DataModel/webpack/bootstrap","webpack://DataModel/./src/index.js","webpack://DataModel/./src/enums/data-format.js","webpack://DataModel/./src/enums/dimension-subtype.js","webpack://DataModel/./src/enums/measure-subtype.js","webpack://DataModel/./src/enums/field-type.js","webpack://DataModel/./src/enums/filtering-mode.js","webpack://DataModel/./src/enums/group-by-functions.js","webpack://DataModel/./src/utils/date-time-formatter.js","webpack://DataModel/./src/utils/column-major.js","webpack://DataModel/./src/utils/extend2.js","webpack://DataModel/./src/utils/helper.js","webpack://DataModel/./src/field-store.js","webpack://DataModel/./src/value.js","webpack://DataModel/./src/operator/row-diffset-iterator.js","webpack://DataModel/./src/invalid-aware-types.js","webpack://DataModel/./src/operator/bucket-creator.js","webpack://DataModel/./src/constants/index.js","webpack://DataModel/./src/operator/compose.js","webpack://DataModel/./src/operator/get-common-schema.js","webpack://DataModel/./src/operator/cross-product.js","webpack://DataModel/./src/operator/merge-sort.js","webpack://DataModel/./src/operator/data-builder.js","webpack://DataModel/./src/operator/difference.js","webpack://DataModel/./src/operator/group-by-function.js","webpack://DataModel/./src/utils/reducer-store.js","webpack://DataModel/./src/operator/group-by.js","webpack://DataModel/./src/operator/natural-join-filter-function.js","webpack://DataModel/./src/operator/union.js","webpack://DataModel/./src/operator/outer-join.js","webpack://DataModel/./src/fields/field/index.js","webpack://DataModel/./src/fields/dimension/index.js","webpack://DataModel/./src/fields/categorical/index.js","webpack://DataModel/./src/fields/temporal/index.js","webpack://DataModel/./src/fields/binned/index.js","webpack://DataModel/./src/fields/measure/index.js","webpack://DataModel/./src/fields/continuous/index.js","webpack://DataModel/./src/fields/parsers/field-parser/index.js","webpack://DataModel/./src/fields/parsers/categorical-parser/index.js","webpack://DataModel/./src/fields/parsers/temporal-parser/index.js","webpack://DataModel/./src/fields/parsers/binned-parser/index.js","webpack://DataModel/./src/fields/parsers/continuous-parser/index.js","webpack://DataModel/./src/fields/partial-field/index.js","webpack://DataModel/./src/field-creator.js","webpack://DataModel/./src/default-config.js","webpack://DataModel/./src/converter/dsv-arr.js","webpack://DataModel/./node_modules/d3-dsv/src/dsv.js","webpack://DataModel/./node_modules/d3-dsv/src/csv.js","webpack://DataModel/./node_modules/d3-dsv/src/tsv.js","webpack://DataModel/./src/converter/dsv-str.js","webpack://DataModel/./src/converter/flat-json.js","webpack://DataModel/./src/converter/auto-resolver.js","webpack://DataModel/./src/helper.js","webpack://DataModel/./src/relation.js","webpack://DataModel/./src/datamodel.js","webpack://DataModel/./src/stats/index.js","webpack://DataModel/./src/export.js","webpack://DataModel/./src/operator/pure-operators.js","webpack://DataModel/./src/operator/natural-join.js"],"names":["root","factory","exports","module","define","amd","window","installedModules","__webpack_require__","moduleId","i","l","modules","call","m","c","d","name","getter","o","Object","defineProperty","enumerable","get","r","Symbol","toStringTag","value","t","mode","__esModule","ns","create","key","bind","n","object","property","prototype","hasOwnProperty","p","s","DataModel","require","default","DataFormat","FLAT_JSON","DSV_STR","DSV_ARR","AUTO","DimensionSubtype","CATEGORICAL","TEMPORAL","GEO","BINNED","MeasureSubtype","CONTINUOUS","FieldType","MEASURE","DIMENSION","FilteringMode","NORMAL","INVERSE","ALL","GROUP_BY_FUNCTIONS","SUM","AVG","MIN","MAX","FIRST","LAST","COUNT","STD","convertToNativeDate","date","Date","pad","DateTimeFormatter","format","this","dtParams","undefined","nativeDate","RegExp","escape","text","replace","TOKEN_PREFIX","DATETIME_PARAM_SEQUENCE","YEAR","MONTH","DAY","HOUR","MINUTE","SECOND","MILLISECOND","defaultNumberParser","defVal","val","parsedVal","isFinite","parseInt","defaultRangeParser","range","nVal","toLowerCase","length","getTokenDefinitions","daysDef","short","long","monthsDef","H","index","extract","parser","formatter","getHours","toString","hours","P","M","getMinutes","S","getSeconds","K","getMilliseconds","a","join","day","getDay","A","e","getDate","b","month","getMonth","B","y","result","substring","presentDate","presentYear","Math","trunc","getFullYear","year","Y","getTokenFormalNames","definitions","HOUR_12","AMPM_UPPERCASE","AMPM_LOWERCASE","SHORT_DAY","LONG_DAY","DAY_OF_MONTH","DAY_OF_MONTH_CONSTANT_WIDTH","SHORT_MONTH","LONG_MONTH","MONTH_OF_YEAR","SHORT_YEAR","LONG_YEAR","tokenResolver","defaultResolver","arg","targetParam","arguments","hourFormat24","hourFormat12","ampmLower","ampmUpper","amOrpm","isPM","findTokens","tokenPrefix","tokenLiterals","keys","occurrence","forwardChar","indexOf","push","token","formatAs","nDate","formattedStr","String","formattedVal","parse","dateTimeStamp","options","extractTokenValue","dtParamSeq","noBreak","dtParamArr","args","resolverKey","resolverParams","resolverFn","param","resolvedVal","splice","apply","checkIfOnlyYear","unshift","tokenObj","lastOccurrenceIndex","occObj","occIndex","targetText","regexFormat","tokenArr","map","obj","occurrenceLength","extractValues","match","shift","getNativeDate","Number","Function","concat","_toConsumableArray","len","column_major","store","_len","fields","Array","_key","forEach","fieldIndex","from","OBJECTSTRING","objectToStrFn","objectToStr","arrayToStr","checkCyclicRef","parentArr","bIndex","extend2","obj1","obj2","skipUndef","_typeof","merge","tgtArr","srcArr","item","srcVal","tgtVal","str","cRef","isArray","getUniqueId","getTime","round","random","isArrEqual","arr1","arr2","formatNumber","detectDataFormat","data","isObject","fieldStore","createNamespace","fieldArr","dataId","fieldsObj","_cachedFieldsObj","field","getMeasure","measureFields","_cachedMeasure","schema","type","getDimension","dimensionFields","_cachedDimension","Value","_classCallCheck","configurable","writable","_value","rowDiffsetIterator","rowDiffset","callback","split","diffStr","diffStsArr","start","end","InvalidAwareTypes","invalid_aware_types_classCallCheck","config","assign","_invalidAwareValsMap","invalidAwareVals","NULL","NA","NIL","invalid","nil","null","generateBuckets","binSize","buckets","next","findBucketRange","bucketRanges","leftIdx","rightIdx","midIdx","floor","DM_DERIVATIVES","SELECT","PROJECT","GROUPBY","COMPOSE","CAL_VAR","BIN","SORT","JOINS","CROSS","LEFTOUTER","RIGHTOUTER","NATURAL","FULLOUTER","LOGICAL_OPERATORS","getCommonSchema","fs1","fs2","retArr","fs1Arr","defaultFilterFn","crossProduct","dm1","dm2","filterFn","replaceCommonSchema","jointype","applicableFilterFn","dm1FieldStore","getFieldspace","dm2FieldStore","dm1FieldStoreName","dm2FieldStoreName","commonSchemaList","Error","tmpSchema","_rowDiffset","rowAdded","rowPosition","ii","tuple","userArg","partialField","dm1Fields","prepareJoinData","dm2Fields","detachedRoot","tupleObj","cellVal","iii","defSortFn","a1","b1","mergeSort","arr","sortFn","sort","lo","hi","mid","mainArr","auxArr","getSortFn","dataType","sortType","retFunc","groupData","hashMap","Map","groupedData","datum","fieldVal","has","set","createSortingFnArg","groupedDatum","targetFields","targetFieldDetails","label","reduce","acc","idx","dataBuilder","colIdentifier","sortingDetails","addUid","columnWise","retObj","uids","reqSorting","tmpDataArr","colName","insertInd","dataObj","fieldName","sortMeta","fDetails","fieldInSchema","sortingFn","slice","f","data_builder_toConsumableArray","pop","sortData","tmpData","difference","hashTable","schemaNameArr","dm1FieldStoreFieldObj","dm2FieldStoreFieldObj","_colIdentifier","prepareDataHelper","dm","addData","hashData","schemaName","getFilteredValues","filter","sum","filteredNumber","curr","avg","totalSum","isNaN","fnList","_defineProperty","_fnList","filteredValues","min","group_by_function_toConsumableArray","max","sqrt","mean","num","pow","variance","defaultReducerName","ReducerStore","_this","reducer_store_classCallCheck","defReducer","entries","reducer","_this2","__unregister","delete","reducerStore","groupBy","dataModel","reducers","existingDataModel","sFieldArr","dimensions","_ref","group_by_slicedToArray","getFieldArr","reducerObj","measures","defaultReducer","measureName","defAggFn","reducerFn","resolve","getReducerObj","fieldStoreObj","dbName","dimensionArr","measureArr","newDataModel","_ref3","_ref4","rowCount","hash","_","cachedStore","cloneProvider","row","__calculateFieldspace","naturalJoinFilter","commonSchemaArr","retainTuple","union","leftOuterJoin","dataModel1","dataModel2","rightOuterJoin","Field","field_classCallCheck","subtype","description","displayName","Dimension","_cachedDomain","calculateDataDomain","Categorical","Set","domain","add","Temporal","temporal_classCallCheck","temporal_possibleConstructorReturn","__proto__","getPrototypeOf","_cachedMinDiff","sortedData","arrLn","minDiff","POSITIVE_INFINITY","prevDatum","nextDatum","processedCount","_this3","Binned","binsArr","bins","Measure","unit","numberFormat","Continuous","NEGATIVE_INFINITY","FieldParser","CategoricalParser","isInvalid","getInvalidType","trim","TemporalParser","temporal_parser_classCallCheck","temporal_parser_possibleConstructorReturn","_dtf","BinnedParser","matched","parseFloat","ContinuousParser","PartialField","partial_field_classCallCheck","_sanitize","createFields","dataColumn","headers","headersObj","header","createUnitField","default_config","dataFormat","DSVArr","firstRowHeader","columns","columnMajor","EOL","EOF","QUOTE","NEWLINE","RETURN","objectConverter","JSON","stringify","inferColumns","rows","columnSet","column","width","formatDate","getUTCHours","minutes","getUTCMinutes","seconds","getUTCSeconds","milliseconds","getUTCMilliseconds","getUTCFullYear","getUTCMonth","getUTCDate","src_dsv","delimiter","reFormat","DELIMITER","charCodeAt","parseRows","N","I","eof","eol","j","preformatBody","formatValue","formatRow","test","convert","customConverter","formatBody","formatRows","csv","dsv","tsv","DSVStr","fieldSeparator","d3Dsv","FlatJSON","insertionIndex","Auto","converters","resp","updateFields","partialFieldspace","fieldStoreName","_ref2","helper_slicedToArray","collID","partialFieldMap","newFields","coll","createUnitFieldFromPartial","persistDerivations","sourceDm","model","operation","_model$_derivation","criteriaFn","_derivation","src_helper_toConsumableArray","op","meta","criteria","persistCurrentDerivation","newDm","_newDm$_ancestorDeriv","_ancestorDerivation","persistAncestorDerivation","selectHelper","selectFn","newRowDiffSet","lastInsertedValue","li","selectorHelperFn","_iteratorNormalCompletion","_didIteratorError","_iteratorError","_step","_iterator","iterator","done","err","return","prepareSelectionData","checker","cloneWithAllFields","clonedDm","clone","getPartialFieldspace","calculateFieldsConfig","filterPropagationModel","propModels","filterByMeasure","fns","propModel","getData","fieldsConfig","getFieldsConfig","fieldsSpace","values","v","def","some","every","propField","valueOf","select","fn","saveChild","cloneWithSelect","selectConfig","cloneConfig","cloned","cloneWithProject","projField","allFields","projectionSet","actualProjField","sanitizeUnitSchema","unitSchema","sanitizeAndValidateSchema","supportedMeasureSubTypes","supportedDimSubTypes","validateUnitSchema","updateData","relation","defaultConfig","converterFn","converter","_converterFn","_converterFn2","formattedData","dataHeader","fieldNameAs","as","resolveFieldName","nameSpace","_partialFieldspace","_dataFormat","applyExistingOperationOnModel","derivations","getDerivations","selectionModel","rejectionModel","derivation","_selectionModel","_rejectionModel","_getDerivationArgumen","params","groupByString","getDerivationArguments","propagateIdentifiers","propModelInf","nonTraversingModel","excludeModels","handlePropagation","_children","child","_applyExistingOperati","_applyExistingOperati2","getPathToRootModel","path","_parent","propagateToAllDataModels","identifiers","rootModels","propagationInf","propagationNameSpace","propagateToSource","propagationSourceId","sourceId","propagateInterpolatedValues","criterias","persistent","actionCriterias","mutableActions","filteredCriteria","entry","action","sourceActionCriterias","actionInf","actionConf","applyOnSource","models","rootModel","propConfig","sourceIdentifiers","rootGroupByModel","groupByModel","inf","propagationModel","filteredModel","getFilteredModel","reverse","Relation","relation_classCallCheck","source","_fieldStoreName","_propagationNameSpace","immutableActions","_fieldspace","joinWith","unionWith","differenceWith","defConfig","oDm","constructor","setParent","fieldConfig","normalizedProjField","relation_toConsumableArray","search","_fieldConfig","fieldObj","removeChild","findIndex","sibling","parent","datamodel_classCallCheck","datamodel_possibleConstructorReturn","_onPropagation","order","withUid","getAllFields","dataGenerated","fieldNames","fmtFieldIdx","elem","fIdx","fmtFn","datumIdx","fieldsArr","rawData","dataInCSVArr","sortedDm","colData","rowsCount","serializedData","rowIdx","colIdx","fieldinst","dependency","replaceVar","depVars","retrieveFn","depFieldIndices","fieldSpec","fs","suppliedFields","computedValues","fieldsData","_createFields","datamodel_slicedToArray","addField","addToNameSpace","isMutableAction","payload","getRootDataModel","find","getRootGroupByModel","sourceNamespace","addToPropNamespace","filterImmutableAction","criteriaModel","propagateImmutableActions","eventName","measureFieldName","binFieldName","_createBinnedFieldDat","measureField","binsCount","_measureField$domain","_measureField$domain2","_slicedToArray","dMin","dMax","ceil","abs","binnedData","createBinnedFieldData","binField","serialize","getSchema","first","last","count","sd","std","Operators","compose","_len5","operations","_key5","currentDM","firstChild","compose_toConsumableArray","dispose","bin","_len3","_key3","project","_len2","_key2","_len4","_key4","calculateVariable","naturalJoin","fullOuterJoin","version","Stats","enums"],"mappings":"CAAA,SAAAA,EAAAC,GACA,iBAAAC,SAAA,iBAAAC,OACAA,OAAAD,QAAAD,IACA,mBAAAG,eAAAC,IACAD,OAAA,eAAAH,GACA,iBAAAC,QACAA,QAAA,UAAAD,IAEAD,EAAA,UAAAC,IARA,CASCK,OAAA,WACD,mBCTA,IAAAC,EAAA,GAGA,SAAAC,EAAAC,GAGA,GAAAF,EAAAE,GACA,OAAAF,EAAAE,GAAAP,QAGA,IAAAC,EAAAI,EAAAE,GAAA,CACAC,EAAAD,EACAE,GAAA,EACAT,QAAA,IAUA,OANAU,EAAAH,GAAAI,KAAAV,EAAAD,QAAAC,IAAAD,QAAAM,GAGAL,EAAAQ,GAAA,EAGAR,EAAAD,QA0DA,OArDAM,EAAAM,EAAAF,EAGAJ,EAAAO,EAAAR,EAGAC,EAAAQ,EAAA,SAAAd,EAAAe,EAAAC,GACAV,EAAAW,EAAAjB,EAAAe,IACAG,OAAAC,eAAAnB,EAAAe,EAAA,CAA0CK,YAAA,EAAAC,IAAAL,KAK1CV,EAAAgB,EAAA,SAAAtB,GACA,oBAAAuB,eAAAC,aACAN,OAAAC,eAAAnB,EAAAuB,OAAAC,YAAA,CAAwDC,MAAA,WAExDP,OAAAC,eAAAnB,EAAA,cAAiDyB,OAAA,KAQjDnB,EAAAoB,EAAA,SAAAD,EAAAE,GAEA,GADA,EAAAA,IAAAF,EAAAnB,EAAAmB,IACA,EAAAE,EAAA,OAAAF,EACA,KAAAE,GAAA,iBAAAF,QAAAG,WAAA,OAAAH,EACA,IAAAI,EAAAX,OAAAY,OAAA,MAGA,GAFAxB,EAAAgB,EAAAO,GACAX,OAAAC,eAAAU,EAAA,WAAyCT,YAAA,EAAAK,UACzC,EAAAE,GAAA,iBAAAF,EAAA,QAAAM,KAAAN,EAAAnB,EAAAQ,EAAAe,EAAAE,EAAA,SAAAA,GAAgH,OAAAN,EAAAM,IAAqBC,KAAA,KAAAD,IACrI,OAAAF,GAIAvB,EAAA2B,EAAA,SAAAhC,GACA,IAAAe,EAAAf,KAAA2B,WACA,WAA2B,OAAA3B,EAAA,SAC3B,WAAiC,OAAAA,GAEjC,OADAK,EAAAQ,EAAAE,EAAA,IAAAA,GACAA,GAIAV,EAAAW,EAAA,SAAAiB,EAAAC,GAAsD,OAAAjB,OAAAkB,UAAAC,eAAA1B,KAAAuB,EAAAC,IAGtD7B,EAAAgC,EAAA,GAIAhC,IAAAiC,EAAA,u5DClFA,IAAMC,EAAYC,EAAQ,GAE1BxC,EAAOD,QAAUwC,EAAUE,QAAUF,EAAUE,QAAUF,qxBCKzD,IAOeG,EAPI,CACfC,UAAW,WACXC,QAAS,SACTC,QAAS,SACTC,KAAM,QCEKC,EAPU,CACrBC,YAAa,cACbC,SAAU,WACVC,IAAK,MACLC,OAAQ,UCAGC,EAJQ,CACnBC,WAAY,cCKDC,EALG,CACdC,QAAS,UACTC,UAAW,aCGAC,EANO,CAClBC,OAAQ,SACRC,QAAS,UACTC,IAAK,OCQMC,EAXY,CACvBC,IAAK,MACLC,IAAK,MACLC,IAAK,MACLC,IAAK,MACLC,MAAO,QACPC,KAAM,OACNC,MAAO,QACPC,IAAK,OCRT,SAASC,EAAqBC,GAC1B,OAAIA,aAAgBC,KACTD,EAGJ,IAAIC,KAAKD,GASpB,SAASE,EAAKzC,GACV,OAAQA,EAAI,GAAL,IAAgBA,EAAOA,EA8BP,SAAS0C,EAAmBC,GACnDC,KAAKD,OAASA,EACdC,KAAKC,cAAWC,EAChBF,KAAKG,gBAAaD,EAftBE,OAAOC,OAAS,SAAUC,GACtB,OAAOA,EAAKC,QAAQ,2BAA4B,SAkBpDT,EAAkBU,aAAe,IAIjCV,EAAkBW,wBAA0B,CACxCC,KAAM,EACNC,MAAO,EACPC,IAAK,EACLC,KAAM,EACNC,OAAQ,EACRC,OAAQ,EACRC,YAAa,GAUjBlB,EAAkBmB,oBAAsB,SAAUC,GAC9C,OAAO,SAAUC,GACb,IAAIC,EACJ,OAAIC,SAASD,EAAYE,SAASH,EAAK,KAC5BC,EAGJF,IAYfpB,EAAkByB,mBAAqB,SAAUC,EAAON,GACpD,OAAO,SAACC,GACJ,IACIvF,EADAD,SAGJ,IAAKwF,EAAO,OAAOD,EAEnB,IAAMO,EAAON,EAAIO,cAEjB,IAAK/F,EAAI,EAAGC,EAAI4F,EAAMG,OAAQhG,EAAIC,EAAGD,IACjC,GAAI6F,EAAM7F,GAAG+F,gBAAkBD,EAC3B,OAAO9F,EAIf,YAAUuE,IAANvE,EACOuF,EAEJ,OAqBfpB,EAAkB8B,oBAAsB,WACpC,IAAMC,EAAU,CACZC,MAAO,CACH,MACA,MACA,MACA,MACA,MACA,MACA,OAEJC,KAAM,CACF,SACA,SACA,UACA,YACA,WACA,SACA,aAGFC,EAAY,CACdF,MAAO,CACH,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,OAEJC,KAAM,CACF,UACA,WACA,QACA,QACA,MACA,OACA,OACA,SACA,YACA,UACA,WACA,aAsPR,MAlPoB,CAChBE,EAAG,CAEC/F,KAAM,IACNgG,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAGP,OAFUzB,EAAoByB,GAErBmB,WAAWC,aAG5B3G,EAAG,CAECM,KAAM,IACNgG,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GACP,IACMqB,EADI9C,EAAoByB,GACdmB,WAAa,GAE7B,OAAkB,IAAVE,EAAc,GAAKA,GAAOD,aAG1C9E,EAAG,CAECvB,KAAM,IACNgG,MAAO,EACPC,QAJD,WAIc,MAAO,WACpBC,OAAQ,SAACjB,GACL,OAAIA,EACOA,EAAIO,cAER,MAEXW,UAAW,SAAClB,GAIR,OAHUzB,EAAoByB,GACdmB,WAEA,GAAK,KAAO,OAGpCG,EAAG,CAECvG,KAAM,IACNgG,MAAO,EACPC,QAJD,WAIc,MAAO,WACpBC,OAAQ,SAACjB,GACL,OAAIA,EACOA,EAAIO,cAER,MAEXW,UAAW,SAAClB,GAIR,OAHUzB,EAAoByB,GACdmB,WAEA,GAAK,KAAO,OAGpCI,EAAG,CAECxG,KAAM,IACNgG,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GACfwB,gBAKvBC,EAAG,CAEC1G,KAAM,IACNgG,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GACZ0B,gBAK1BC,EAAG,CAEC5G,KAAM,IACNgG,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAHUzB,EAAoByB,GACjB4B,kBAEHR,aAGlBS,EAAG,CAEC9G,KAAM,IACNgG,MAAO,EACPC,QAJD,WAIc,UAAWN,EAAQC,MAAMmB,KAAK,KAA9B,KACbb,OAAQtC,EAAkByB,mBAAmBM,EAAQC,OACrDO,UAND,SAMYlB,GACP,IACM+B,EADIxD,EAAoByB,GAChBgC,SAEd,OAAQtB,EAAQC,MAAMoB,GAAMX,aAGpCa,EAAG,CAEClH,KAAM,IACNgG,MAAO,EACPC,QAJD,WAIc,UAAWN,EAAQE,KAAKkB,KAAK,KAA7B,KACbb,OAAQtC,EAAkByB,mBAAmBM,EAAQE,MACrDM,UAND,SAMYlB,GACP,IACM+B,EADIxD,EAAoByB,GAChBgC,SAEd,OAAQtB,EAAQE,KAAKmB,GAAMX,aAGnCc,EAAG,CAECnH,KAAM,IACNgG,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAHUzB,EAAoByB,GAChBmC,UAEHf,aAGnBtG,EAAG,CAECC,KAAM,IACNgG,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GAChBmC,aAKtBC,EAAG,CAECrH,KAAM,IACNgG,MAAO,EACPC,QAJD,WAIc,UAAWH,EAAUF,MAAMmB,KAAK,KAAhC,KACbb,OAAQtC,EAAkByB,mBAAmBS,EAAUF,OACvDO,UAND,SAMYlB,GACP,IACMqC,EADI9D,EAAoByB,GACdsC,WAEhB,OAAQzB,EAAUF,MAAM0B,GAAQjB,aAGxCmB,EAAG,CAECxH,KAAM,IACNgG,MAAO,EACPC,QAJD,WAIc,UAAWH,EAAUD,KAAKkB,KAAK,KAA/B,KACbb,OAAQtC,EAAkByB,mBAAmBS,EAAUD,MACvDM,UAND,SAMYlB,GACP,IACMqC,EADI9D,EAAoByB,GACdsC,WAEhB,OAAQzB,EAAUD,KAAKyB,GAAQjB,aAGvCxG,EAAG,CAECG,KAAM,IACNgG,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OALD,SAKSjB,GAAO,OAAOrB,EAAkBmB,qBAAlBnB,CAAwCqB,GAAO,GACrEkB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GACdsC,WAEG,KAG3BE,EAAG,CAECzH,KAAM,IACNgG,MAAO,EACPC,QAJD,WAIc,MAAO,YACpBC,OALD,SAKSjB,GACJ,IAAIyC,SACJ,GAAIzC,EAAK,CACL,IAAMvF,EAAIuF,EAAIQ,OACdR,EAAMA,EAAI0C,UAAUjI,EAAI,EAAGA,GAE/B,IAAIwF,EAAYtB,EAAkBmB,qBAAlBnB,CAAwCqB,GACpD2C,EAAc,IAAIlE,KAClBmE,EAAcC,KAAKC,MAAOH,EAAYI,cAAiB,KAO3D,OAHIxE,EAFJkE,KAAYG,EAAc3C,GAEM8C,cAAgBJ,EAAYI,gBACxDN,MAAYG,EAAc,GAAI3C,GAE3B1B,EAAoBkE,GAAQM,eAEvC7B,UAtBD,SAsBYlB,GACP,IACIgD,EADMzE,EAAoByB,GACjB+C,cAAc3B,WACvB3G,SAOJ,OALIuI,IACAvI,EAAIuI,EAAKxC,OACTwC,EAAOA,EAAKN,UAAUjI,EAAI,EAAGA,IAG1BuI,IAGfC,EAAG,CAEClI,KAAM,IACNgG,MAAO,EACPC,QAJD,WAIc,MAAO,YACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAHUzB,EAAoByB,GACf+C,cAAc3B,eAgB7CzC,EAAkBuE,oBAAsB,WACpC,IAAMC,EAAcxE,EAAkB8B,sBAEtC,MAAO,CACHf,KAAMyD,EAAYrC,EAClBsC,QAASD,EAAY1I,EACrB4I,eAAgBF,EAAY7G,EAC5BgH,eAAgBH,EAAY7B,EAC5B3B,OAAQwD,EAAY5B,EACpB3B,OAAQuD,EAAY1B,EACpB8B,UAAWJ,EAAYtB,EACvB2B,SAAUL,EAAYlB,EACtBwB,aAAcN,EAAYjB,EAC1BwB,4BAA6BP,EAAYrI,EACzC6I,YAAaR,EAAYf,EACzBwB,WAAYT,EAAYZ,EACxBsB,cAAeV,EAAYvI,EAC3BkJ,WAAYX,EAAYX,EACxBuB,UAAWZ,EAAYF,IAW/BtE,EAAkBqF,cAAgB,WAC9B,IAAMb,EAAcxE,EAAkB8B,sBAChCwD,EAAkB,WAMpB,IALA,IAAIzJ,EAAI,EACJ0J,SACAC,SACE1J,EAAI2J,UAAK5D,OAERhG,EAAIC,EAAGD,IACV0J,oBAAW1J,OAAXuE,EAAAqF,UAAW5J,IACX4J,UAAA5D,QAAShG,OAATuE,EAAAqF,UAAS5J,MACL2J,EAAcD,GAItB,OAAKC,EAEEA,EAAY,GAAGlD,OAAOkD,EAAY,IAFd,MAK/B,MAAO,CACH5E,KAAM,CAAC4D,EAAYX,EAAGW,EAAYF,EAC9BgB,GAEJzE,MAAO,CAAC2D,EAAYf,EAAGe,EAAYZ,EAAGY,EAAYvI,EAC9CqJ,GAEJxE,IAAK,CAAC0D,EAAYtB,EAAGsB,EAAYlB,EAAGkB,EAAYjB,EAAGiB,EAAYrI,EAC3DmJ,GAEJvE,KAAM,CAACyD,EAAYrC,EAAGqC,EAAY1I,EAAG0I,EAAY7G,EAAG6G,EAAY7B,EAC5D,SAAU+C,EAAcC,EAAcC,EAAWC,GAC7C,IAAIL,SACAM,SACAC,SACA1E,SAcJ,OAZIsE,IAAiBG,EAAUF,GAAaC,IACJ,OAAhCC,EAAO,GAAGxD,OAAOwD,EAAO,MACxBC,GAAO,GAGXP,EAAcG,GAEdH,EADOG,GAGOD,EAGbF,GAELnE,EAAMmE,EAAY,GAAGlD,OAAOkD,EAAY,IACpCO,IACA1E,GAAO,IAEJA,GANoB,OASnCL,OAAQ,CAACwD,EAAY5B,EACjB0C,GAEJrE,OAAQ,CAACuD,EAAY1B,EACjBwC,KAUZtF,EAAkBgG,WAAa,SAAU/F,GAQrC,IAPA,IAAMgG,EAAcjG,EAAkBU,aAChC8D,EAAcxE,EAAkB8B,sBAChCoE,EAAgB3J,OAAO4J,KAAK3B,GAC5B4B,EAAa,GACfvK,SACAwK,UAEIxK,EAAIoE,EAAOqG,QAAQL,EAAapK,EAAI,KAAO,GAC/CwK,EAAcpG,EAAOpE,EAAI,IACmB,IAAxCqK,EAAcI,QAAQD,IAE1BD,EAAWG,KAAK,CACZnE,MAAOvG,EACP2K,MAAOH,IAIf,OAAOD,GASXpG,EAAkByG,SAAW,SAAU5G,EAAMI,GACzC,IAQInE,EARE4K,EAAQ9G,EAAoBC,GAC5BuG,EAAapG,EAAkBgG,WAAW/F,GAC1CuE,EAAcxE,EAAkB8B,sBAClC6E,EAAeC,OAAO3G,GACpBgG,EAAcjG,EAAkBU,aAClC8F,SACAK,SACAhL,SAGJ,IAAKA,EAAI,EAAGC,EAAIsK,EAAWvE,OAAQhG,EAAIC,EAAGD,IAEtCgL,EAAerC,EADfgC,EAAQJ,EAAWvK,GAAG2K,OACYjE,UAAUmE,GAC5CC,EAAeA,EAAalG,QAAQ,IAAIH,OAAO2F,EAAcO,EAAO,KAAMK,GAG9E,OAAOF,GAQX3G,EAAkBvC,UAAUqJ,MAAQ,SAAUC,EAAeC,GACzD,IAAM3B,EAAgBrF,EAAkBqF,gBAClClF,EAAWD,KAAK+G,kBAAkBF,GAClCG,EAAalH,EAAkBW,wBAC/BwG,EAAUH,GAAWA,EAAQG,QAC7BC,EAAa,GACbC,EAAO,GACTC,SACAC,SACAC,SACAnG,SACAxF,SACA4L,SACAC,SACA5L,SACAgI,EAAS,GAEb,IAAKwD,KAAejC,EAChB,GAAK,GAAG3H,eAAe1B,KAAKqJ,EAAeiC,GAA3C,CAMA,IAJAD,EAAKxF,OAAS,EAEd2F,GADAD,EAAiBlC,EAAciC,IACHK,OAAOJ,EAAe1F,OAAS,EAAG,GAAG,GAE5DhG,EAAI,EAAGC,EAAIyL,EAAe1F,OAAQhG,EAAIC,EAAGD,SAI9BuE,KAFZiB,EAAMlB,GADNsH,EAAQF,EAAe1L,IACFO,OAGjBiL,EAAKd,KAAK,MAEVc,EAAKd,KAAK,CAACkB,EAAOpG,IAM1B,GAAI,OAFJqG,EAAcF,EAAWI,MAAM1H,KAAMmH,MAEuBF,EACxD,MAGJC,EAAWF,EAAWI,IAAgBI,EAU1C,OAPIN,EAAWvF,QAAU3B,KAAK2H,gBAAgBT,EAAWvF,QAErDiC,EAAOgE,QAAQV,EAAW,GAAI,EAAG,GAEjCtD,EAAOgE,QAAPF,MAAA9D,EAAkBsD,GAGftD,GAQX9D,EAAkBvC,UAAUwJ,kBAAoB,SAAUF,GACtD,IAYIjL,EAZEmE,EAASC,KAAKD,OACduE,EAAcxE,EAAkB8B,sBAChCmE,EAAcjG,EAAkBU,aAChC0F,EAAapG,EAAkBgG,WAAW/F,GAC1C8H,EAAW,GAEbC,SACAC,SACAC,SACAC,SACAC,SAGAvM,SAEJuM,EAAcxB,OAAO3G,GAErB,IAAMoI,EAAWjC,EAAWkC,IAAI,SAAAC,GAAA,OAAOA,EAAI/B,QACrCgC,EAAmBpC,EAAWvE,OACpC,IAAKhG,EAAI2M,EAAmB,EAAG3M,GAAK,EAAGA,KACnCqM,EAAW9B,EAAWvK,GAAGuG,OAEV,IAAMgG,EAAYvG,OAAS,QAKdzB,IAAxB4H,IACAA,EAAsBI,EAAYvG,QAGtCsG,EAAaC,EAAYrE,UAAUmE,EAAW,EAAGF,GACjDI,EAAcA,EAAYrE,UAAU,EAAGmE,EAAW,GAC9C5H,OAAOC,OAAO4H,GACdC,EAAYrE,UAAUiE,EAAqBI,EAAYvG,QAE3DmG,EAAsBE,GAblBF,EAAsBE,EAgB9B,IAAKrM,EAAI,EAAGA,EAAI2M,EAAkB3M,IAC9BoM,EAAS7B,EAAWvK,GACpBuM,EAAcA,EAAY3H,QAAQwF,EAAcgC,EAAOzB,MAAOhC,EAAYyD,EAAOzB,OAAOnE,WAG5F,IAAMoG,EAAgB1B,EAAc2B,MAAM,IAAIpI,OAAO8H,KAAiB,GAGtE,IAFAK,EAAcE,QAET9M,EAAI,EAAGC,EAAIuM,EAASxG,OAAQhG,EAAIC,EAAGD,IACpCkM,EAASM,EAASxM,IAAM4M,EAAc5M,GAE1C,OAAOkM,GAQX/H,EAAkBvC,UAAUmL,cAAgB,SAAU7B,GAClD,IAAIlH,EAAO,KACX,GAAIgJ,OAAOtH,SAASwF,GAChBlH,EAAO,IAAIC,KAAKiH,QACb,IAAK7G,KAAKD,QAAUH,KAAKgH,MAAMC,GAClClH,EAAO,IAAIC,KAAKiH,OAEf,CACD,IAAM5G,EAAWD,KAAKC,SAAWD,KAAK4G,MAAMC,GACxC5G,EAAS0B,SACT3B,KAAKG,WAAL,IAAAyI,SAAArL,UAAAJ,KAAAuK,MAAsB9H,KAAtB,OAAAiJ,6HAAAC,CAA8B7I,MAC9BN,EAAOK,KAAKG,YAGpB,OAAOR,GAGXG,EAAkBvC,UAAUoK,gBAAkB,SAASoB,GACnD,OAAe,IAARA,GAAa/I,KAAKD,OAAOyI,MAAM,QAAQ7G,QASlD7B,EAAkBvC,UAAUgJ,SAAW,SAAUxG,EAAQ8G,GACrD,IAAI1G,SAQJ,OANI0G,EACA1G,EAAaH,KAAKG,WAAaH,KAAK0I,cAAc7B,IACzC1G,EAAaH,KAAKG,cAC3BA,EAAaH,KAAK0I,cAAc7B,IAG7B/G,EAAkByG,SAASpG,EAAYJ,ICruBnC,IAAAiJ,EAAA,SAACC,GACZ,IAAItN,EAAI,EACR,OAAO,WAAe,QAAAuN,EAAA3D,UAAA5D,OAAXwH,EAAWC,MAAAF,GAAAG,EAAA,EAAAA,EAAAH,EAAAG,IAAXF,EAAWE,GAAA9D,UAAA8D,GAClBF,EAAOG,QAAQ,SAACnI,EAAKoI,GACXN,EAAMM,aAAuBH,QAC/BH,EAAMM,GAAcH,MAAMI,KAAK,CAAE7H,OAAQhG,KAE7CsN,EAAMM,GAAYlD,KAAKlF,KAE3BxF,kNCdF8N,EAAe,SACfC,EAAgBrN,OAAOkB,UAAUgF,SACjCoH,EAAc,kBACdC,EAAa,iBAEnB,SAASC,EAAexB,EAAKyB,GAIzB,IAHA,IAAInO,EAAImO,EAAUnI,OACdoI,GAAU,EAEPpO,GAAG,CACN,GAAI0M,IAAQyB,EAAUnO,GAElB,OADAoO,EAASpO,EAGbA,GAAK,EAGT,OAAOoO,EA2GX,SAASC,EAASC,EAAMC,EAAMC,GAE1B,YAAI,IAAOF,EAAP,YAAAG,EAAOH,MAASR,SAAgB,IAAOS,EAAP,YAAAE,EAAOF,MAAST,EACzC,WAGP,IAAOS,EAAP,YAAAE,EAAOF,MAAST,GAAyB,OAATS,EACzBD,SAGP,IAAOA,EAAP,YAAAG,EAAOH,MAASR,IAChBQ,EAAOC,aAAgBd,MAAQ,GAAK,IAnH5C,SAASiB,EAAMJ,EAAMC,EAAMC,EAAWG,EAAQC,GAC1C,IAAIC,EACAC,EACAC,EACAC,EACAC,EAcJ,GATKL,GAKDD,EAAOjE,KAAK4D,GACZM,EAAOlE,KAAK6D,KALZI,EAAS,CAACL,GACVM,EAAS,CAACL,IAOVA,aAAgBd,MAChB,IAAKoB,EAAO,EAAGA,EAAON,EAAKvI,OAAQ6I,GAAQ,EAAG,CAC1C,IACIC,EAASR,EAAKO,GACdE,EAASR,EAAKM,GAElB,MAAOnH,GACH,eAGA,IAAOqH,EAAP,YAAAN,EAAOM,MAAWjB,EACZU,QAAwBjK,IAAXwK,IACfT,EAAKO,GAAQE,IAIF,OAAXD,SAAmB,IAAOA,EAAP,YAAAL,EAAOK,MAAWhB,IACrCgB,EAASR,EAAKO,GAAQE,aAAkBtB,MAAQ,GAAK,KAG3C,KADdwB,EAAOf,EAAea,EAAQH,IAE1BE,EAASR,EAAKO,GAAQF,EAAOM,GAG7BP,EAAMI,EAAQC,EAAQP,EAAWG,EAAQC,SAMrD,IAAKC,KAAQN,EAAM,CACf,IACIO,EAASR,EAAKO,GACdE,EAASR,EAAKM,GAElB,MAAOnH,GACH,SAGJ,GAAe,OAAXqH,SAAmB,IAAOA,EAAP,YAAAN,EAAOM,MAAWjB,GAKrCkB,EAAMjB,EAAc5N,KAAK4O,MACbf,GACO,OAAXc,SAAmB,IAAOA,EAAP,YAAAL,EAAOK,MAAWhB,IACrCgB,EAASR,EAAKO,GAAQ,KAGZ,KADdI,EAAOf,EAAea,EAAQH,IAE1BE,EAASR,EAAKO,GAAQF,EAAOM,GAG7BP,EAAMI,EAAQC,EAAQP,EAAWG,EAAQC,IAGxCI,IAAQf,GACE,OAAXa,GAAqBA,aAAkBrB,QACvCqB,EAASR,EAAKO,GAAQ,KAGZ,KADdI,EAAOf,EAAea,EAAQH,IAE1BE,EAASR,EAAKO,GAAQF,EAAOM,GAG7BP,EAAMI,EAAQC,EAAQP,EAAWG,EAAQC,IAI7CN,EAAKO,GAAQE,MAGhB,CACD,GAAIP,QAAwBjK,IAAXwK,EACb,SAEJT,EAAKO,GAAQE,GAIzB,OAAOT,EAiBPI,CAAMJ,EAAMC,EAAMC,GACXF,GCnIJ,SAASY,EAAS1J,GACrB,OAAOiI,MAAMyB,QAAQ1J,GA2ClB,IAAM2J,EAAc,wBAAY,IAAIlL,MAAOmL,UAAY/G,KAAKgH,MAAsB,IAAhBhH,KAAKiH,WASvE,SAASC,EAAWC,EAAMC,GAC7B,IAAKP,EAAQM,KAAUN,EAAQO,GAC3B,OAAOD,IAASC,EAGpB,GAAID,EAAKxJ,SAAWyJ,EAAKzJ,OACrB,OAAO,EAGX,IAAK,IAAIhG,EAAI,EAAGA,EAAIwP,EAAKxJ,OAAQhG,IAC7B,GAAIwP,EAAKxP,KAAOyP,EAAKzP,GACjB,OAAO,EAIf,OAAO,EASJ,SAAS0P,EAAalK,GACzB,OAAOA,EASJ,IAAMmK,EAAmB,SAACC,GAC7B,MAnEsB,iBAmETA,EACFzN,EAAWE,QACX6M,EAAQU,IAASV,EAAQU,EAAK,IAC9BzN,EAAWG,QACX4M,EAAQU,KAA0B,IAAhBA,EAAK5J,QAlF/B,SAAmBR,GACtB,OAAOA,IAAQ9E,OAAO8E,GAiF4BqK,CAASD,EAAK,KACrDzN,EAAWC,UAEf,MChDI0N,EApDI,CACfF,KAAM,GAENG,gBAHe,SAGEC,EAAUzP,GACvB,IAAM0P,EAAS1P,GAAQ4O,IA4CvB,OA1CA9K,KAAKuL,KAAKK,GAAU,CAChB1P,KAAM0P,EACNzC,OAAQwC,EAERE,UAJgB,WAKZ,IAAIA,EAAY7L,KAAK8L,iBAQrB,OANKD,IACDA,EAAY7L,KAAK8L,iBAAmB,GACpC9L,KAAKmJ,OAAOG,QAAQ,SAACyC,GACjBF,EAAUE,EAAM7P,QAAU6P,KAG3BF,GAEXG,WAfgB,WAgBZ,IAAIC,EAAgBjM,KAAKkM,eAUzB,OARKD,IACDA,EAAgBjM,KAAKkM,eAAiB,GACtClM,KAAKmJ,OAAOG,QAAQ,SAACyC,GACbA,EAAMI,SAASC,OAAS1N,EAAUC,UAClCsN,EAAcF,EAAM7P,QAAU6P,MAInCE,GAEXI,aA5BgB,WA6BZ,IAAIC,EAAkBtM,KAAKuM,iBAU3B,OARKvM,KAAKuM,mBACND,EAAkBtM,KAAKuM,iBAAmB,GAC1CvM,KAAKmJ,OAAOG,QAAQ,SAACyC,GACbA,EAAMI,SAASC,OAAS1N,EAAUE,YAClC0N,EAAgBP,EAAM7P,QAAU6P,MAIrCO,IAGRtM,KAAKuL,KAAKK,8PCKVY,aA1CX,SAAAA,EAAarL,EAAK4K,gGAAOU,CAAAzM,KAAAwM,GACrBnQ,OAAOC,eAAe0D,KAAM,SAAU,CAClCzD,YAAY,EACZmQ,cAAc,EACdC,UAAU,EACV/P,MAAOuE,IAGXnB,KAAK+L,MAAQA,+CAoBb,OAAOrF,OAAO1G,KAAKpD,yCAUnB,OAAOoD,KAAKpD,oCArBZ,OAAOoD,KAAK4M,gBCxBb,SAASC,EAAoBC,EAAYC,GACxCD,EAAWnL,OAAS,GACDmL,EAAWE,MAAM,KACzB1D,QAAQ,SAAC2D,GAChB,IAAMC,EAAaD,EAAQD,MAAM,KAC3BG,GAAUD,EAAW,GACrBE,IAAQF,EAAW,IAAMA,EAAW,IAC1C,GAAIE,GAAOD,EACP,IAAK,IAAIxR,EAAIwR,EAAOxR,GAAKyR,EAAKzR,GAAK,EAC/BoR,EAASpR,kQCVvB0R,aAqBF,SAAAA,EAAazQ,gGAAO0Q,CAAAtN,KAAAqN,GAChBrN,KAAK4M,OAAShQ,0DAdO2Q,GACrB,OAAKA,EAGElR,OAAOmR,OAAOH,EAAkBI,qBAAsBF,GAFlDF,EAAkBI,4DAsB7B,OAAOzN,KAAK4M,0CAUZ,OAAOlG,OAAO1G,KAAK4M,4CAGNzL,GACb,OAAQA,aAAekM,KAAwBA,EAAkBK,mBAAmBvM,0CAGlEA,GAClB,OAAOA,aAAekM,EAAoBlM,EAAMkM,EAAkBK,mBAAmBvM,YAO7FkM,EAAkBM,KAAO,IAAIN,EAAkB,QAC/CA,EAAkBO,GAAK,IAAIP,EAAkB,MAC7CA,EAAkBQ,IAAM,IAAIR,EAAkB,OAO9CA,EAAkBI,qBAAuB,CACrCK,QAAST,EAAkBO,GAC3BG,IAAKV,EAAkBQ,IACvBG,KAAMX,EAAkBM,KACxBzN,UAAWmN,EAAkBO,IAGlBP,2aC5ETY,EAAkB,SAACC,EAASf,EAAOC,GAIrC,IAHA,IAAMe,EAAU,GACZC,EAAOjB,EAEJiB,EAAOhB,GACVe,EAAQ9H,KAAK+H,GACbA,GAAQF,EAIZ,OAFAC,EAAQ9H,KAAK+H,GAEND,GAGLE,EAAkB,SAACC,EAAc1R,GAOnC,IANA,IAAI2R,EAAU,EACVC,EAAWF,EAAa3M,OAAS,EACjC8M,SACAjN,SAGG+M,GAAWC,GAAU,CAIxB,GAAI5R,IAFJ4E,EAAQ8M,EADRG,EAASF,EAAUvK,KAAK0K,OAAOF,EAAWD,GAAW,KAGlCpB,OAASvQ,EAAQ4E,EAAM4L,IACtC,OAAO5L,EACA5E,GAAS4E,EAAM4L,IACtBmB,EAAUE,EAAS,EACZ7R,EAAQ4E,EAAM2L,QACrBqB,EAAWC,EAAS,GAI5B,OAAO,MChCJ,IAUME,EAAiB,CAC1BC,OAAQ,SACRC,QAAS,UACTC,QAAS,QACTC,QAAS,UACTC,QAAS,qBACTC,IAAK,MACLC,KAAM,QAGGC,EAAQ,CACjBC,MAAO,QACPC,UAAW,YACXC,WAAY,aACZC,QAAS,UACTC,UAAW,aAGFC,EACJ,MC0BF,MCnDA,SAASC,EAAiBC,EAAKC,GAClC,IAAMC,EAAS,GACTC,EAAS,GASf,OARAH,EAAIxG,OAAOG,QAAQ,SAACyC,GAChB+D,EAAOzJ,KAAK0F,EAAMI,SAASjQ,QAE/B0T,EAAIzG,OAAOG,QAAQ,SAACyC,IAC6B,IAAzC+D,EAAO1J,QAAQ2F,EAAMI,SAASjQ,OAC9B2T,EAAOxJ,KAAK0F,EAAMI,SAASjQ,QAG5B2T,ECRX,SAASE,IAAoB,OAAO,EAY7B,SAASC,EAAcC,EAAKC,EAAKC,GAA+D,IAArDC,EAAqD7K,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,IAAAA,UAAA,GAAxB8K,EAAwB9K,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAAb4J,EAAMC,MACtFjD,EAAS,GACTZ,EAAO,GACP+E,EAAqBH,GAAYJ,EACjCQ,EAAgBN,EAAIO,gBACpBC,EAAgBP,EAAIM,gBACpBE,EAAoBH,EAAcrU,KAClCyU,EAAoBF,EAAcvU,KAClCA,EAAUqU,EAAcrU,KAAxB,IAAgCuU,EAAcvU,KAC9C0U,EAAmBlB,EAAgBa,EAAeE,GAExD,GAAIC,IAAsBC,EACtB,MAAM,IAAIE,MAAM,8CA+EpB,OA5EAN,EAAcpH,OAAOG,QAAQ,SAACyC,GAC1B,IAAM+E,EAAY9G,EAAQ,GAAI+B,EAAMI,WACc,IAA9CyE,EAAiBxK,QAAQ0K,EAAU5U,OAAiBkU,IACpDU,EAAU5U,KAAUqU,EAAcrU,KAAlC,IAA0C4U,EAAU5U,MAExDiQ,EAAO9F,KAAKyK,KAEhBL,EAActH,OAAOG,QAAQ,SAACyC,GAC1B,IAAM+E,EAAY9G,EAAQ,GAAI+B,EAAMI,WACc,IAA9CyE,EAAiBxK,QAAQ0K,EAAU5U,MAC9BkU,IACDU,EAAU5U,KAAUuU,EAAcvU,KAAlC,IAA0C4U,EAAU5U,KACpDiQ,EAAO9F,KAAKyK,IAGhB3E,EAAO9F,KAAKyK,KAKpBjE,EAAmBoD,EAAIc,YAAa,SAACpV,GACjC,IAAIqV,GAAW,EACXC,SACJpE,EAAmBqD,EAAIa,YAAa,SAACG,GACjC,IAAMC,EAAQ,GACRC,EAAU,GAChBA,EAAQV,GAAqB,GAC7BU,EAAQT,GAAqB,GAC7BJ,EAAcpH,OAAOG,QAAQ,SAACyC,GAC1BoF,EAAM9K,KAAK0F,EAAMsF,aAAa9F,KAAK5P,IACnCyV,EAAQV,GAAmB3E,EAAM7P,QAAU6P,EAAMsF,aAAa9F,KAAK5P,KAEvE8U,EAActH,OAAOG,QAAQ,SAACyC,IAC+B,IAAnD6E,EAAiBxK,QAAQ2F,EAAMI,SAASjQ,OAAgBkU,GAC1De,EAAM9K,KAAK0F,EAAMsF,aAAa9F,KAAK2F,IAEvCE,EAAQT,GAAmB5E,EAAM7P,QAAU6P,EAAMsF,aAAa9F,KAAK2F,KAGvE,IAIMI,EAAYC,GAAgBH,EAAQV,IACpCc,EAAYD,GAAgBH,EAAQT,IAC1C,GAAIL,EAAmBgB,EAAWE,EALb,kBAAMvB,EAAIwB,gBACV,kBAAMvB,EAAIuB,gBAFb,IAMyE,CACvF,IAAMC,EAAW,GACjBP,EAAM7H,QAAQ,SAACqI,EAASC,GACpBF,EAASvF,EAAOyF,GAAK1V,MAAQyV,IAE7BX,GAAY7B,EAAMC,QAAUiB,EAC5B9E,EAAK0F,GAAeS,GAGpBnG,EAAKlF,KAAKqL,GACVV,GAAW,EACXC,EAActV,QAEf,IAAK0U,IAAalB,EAAME,WAAagB,IAAalB,EAAMG,cAAgB0B,EAAU,CACrF,IAAMU,EAAW,GACb3I,EAAMwH,EAAcpH,OAAOxH,OAAS,EACxCwP,EAAM7H,QAAQ,SAACqI,EAASC,GAEhBF,EAASvF,EAAOyF,GAAK1V,MADrB0V,GAAO7I,EACsB4I,EAGA,OAGrCX,GAAW,EACXC,EAActV,EACd4P,EAAKlF,KAAKqL,QAKf,IAAI/T,GAAU4N,EAAMY,EAAQ,CAAEjQ,SC3GzC,SAAS2V,EAAW7O,EAAGO,GACnB,IAAMuO,KAAQ9O,EACR+O,KAAQxO,EACd,OAAIuO,EAAKC,GACG,EAERD,EAAKC,EACE,EAEJ,EAqEJ,SAASC,EAAWC,GAAyB,IAApBC,EAAoB3M,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAAXsM,EAIrC,OAHII,EAAItQ,OAAS,GArBrB,SAASwQ,EAAMF,EAAKG,EAAIC,EAAIH,GACxB,GAAIG,IAAOD,EAAM,OAAOH,EAExB,IAAMK,EAAMF,EAAKpO,KAAK0K,OAAO2D,EAAKD,GAAM,GAKxC,OAJAD,EAAKF,EAAKG,EAAIE,EAAKJ,GACnBC,EAAKF,EAAKK,EAAM,EAAGD,EAAIH,GAzC3B,SAAgBD,EAAKG,EAAIE,EAAKD,EAAIH,GAG9B,IAFA,IAAMK,EAAUN,EACVO,EAAS,GACN7W,EAAIyW,EAAIzW,GAAK0W,EAAI1W,GAAK,EAC3B6W,EAAO7W,GAAK4W,EAAQ5W,GAKxB,IAHA,IAAIqH,EAAIoP,EACJ7O,EAAI+O,EAAM,EAEL3W,EAAIyW,EAAIzW,GAAK0W,EAAI1W,GAAK,EACvBqH,EAAIsP,GACJC,EAAQ5W,GAAK6W,EAAOjP,GACpBA,GAAK,GACEA,EAAI8O,GACXE,EAAQ5W,GAAK6W,EAAOxP,GACpBA,GAAK,GACEkP,EAAOM,EAAOxP,GAAIwP,EAAOjP,KAAO,GACvCgP,EAAQ5W,GAAK6W,EAAOxP,GACpBA,GAAK,IAELuP,EAAQ5W,GAAK6W,EAAOjP,GACpBA,GAAK,GAqBb8G,CAAM4H,EAAKG,EAAIE,EAAKD,EAAIH,GAEjBD,EAcHE,CAAKF,EAAK,EAAGA,EAAItQ,OAAS,EAAGuQ,GAE1BD,0HC3EX,SAASQ,EAAWC,EAAUC,EAAUzQ,GACpC,IAAI0Q,SACJ,OAAQF,GACR,KAAKlU,EAAeC,WACpB,KAAKN,EAAiBE,SAEduU,EADa,SAAbD,EACU,SAAC3P,EAAGO,GAAJ,OAAUA,EAAErB,GAASc,EAAEd,IAEvB,SAACc,EAAGO,GAAJ,OAAUP,EAAEd,GAASqB,EAAErB,IAErC,MACJ,QACI0Q,EAAU,SAAC5P,EAAGO,GACV,IAAMuO,KAAQ9O,EAAEd,GACV6P,KAAQxO,EAAErB,GAChB,OAAI4P,EAAKC,EACe,SAAbY,EAAsB,GAAK,EAElCb,EAAKC,EACe,SAAbY,GAAuB,EAAI,EAE/B,GAGf,OAAOC,EAUX,SAASC,EAAUtH,EAAMhC,GACrB,IAAMuJ,EAAU,IAAIC,IACdC,EAAc,GAYpB,OAVAzH,EAAKjC,QAAQ,SAAC2J,GACV,IAAMC,EAAWD,EAAM1J,GACnBuJ,EAAQK,IAAID,GACZF,EAAYF,EAAQtW,IAAI0W,IAAW,GAAG7M,KAAK4M,IAE3CD,EAAY3M,KAAK,CAAC6M,EAAU,CAACD,KAC7BH,EAAQM,IAAIF,EAAUF,EAAYrR,OAAS,MAI5CqR,EAYX,SAASK,EAAmBC,EAAcC,EAAcC,GACpD,IAAMnO,EAAM,CACRoO,MAAOH,EAAa,IAQxB,OALAC,EAAaG,OAAO,SAACC,EAAKvF,EAAMwF,GAE5B,OADAD,EAAIvF,GAAQkF,EAAa,GAAGlL,IAAI,SAAA6K,GAAA,OAASA,EAAMO,EAAmBI,GAAK1R,SAChEyR,GACRtO,GAEIA,EA0EJ,SAASwO,EAAapI,EAAYqB,EAAYgH,EAAeC,EAAgBjN,GAKhFA,EAAUzK,OAAOmR,OAAO,GAJL,CACfwG,QAAQ,EACRC,YAAY,GAEwBnN,GAExC,IAAMoN,EAAS,CACX/H,OAAQ,GACRZ,KAAM,GACN4I,KAAM,IAEJH,EAASlN,EAAQkN,OACjBI,EAAaL,GAAkBA,EAAepS,OAAS,EAEvD0S,EAAa,GAiDnB,GA/CgBP,EAAc9G,MAAM,KAE5B1D,QAAQ,SAACgL,GACb,IAAK,IAAI3Y,EAAI,EAAGA,EAAI8P,EAAW9J,OAAQhG,GAAK,EACxC,GAAI8P,EAAW9P,GAAGO,SAAWoY,EAAS,CAClCD,EAAWhO,KAAKoF,EAAW9P,IAC3B,SAMZ0Y,EAAW/K,QAAQ,SAACyC,GAEhBmI,EAAO/H,OAAO9F,KAAK0F,EAAMI,YAGzB6H,GACAE,EAAO/H,OAAO9F,KAAK,CACfnK,KAAM,MACNkQ,KAAM,eAIdS,EAAmBC,EAAY,SAACnR,GAC5BuY,EAAO3I,KAAKlF,KAAK,IACjB,IAAMkO,EAAYL,EAAO3I,KAAK5J,OAAS,EAEvC0S,EAAW/K,QAAQ,SAACyC,EAAOmF,GACvBgD,EAAO3I,KAAKgJ,GAAWrD,EAFf,GAE6BnF,EAAMsF,aAAa9F,KAAK5P,KAE7DqY,IACAE,EAAO3I,KAAKgJ,GAAWF,EAAW1S,QAAUhG,GAGhDuY,EAAOC,KAAK9N,KAAK1K,GAIbyY,GAAcF,EAAO3I,KAAKgJ,GAAWlO,KAAK1K,KAI9CyY,GA7HR,SAAkBI,EAAST,GAOvB,IAPuC,IAC/BxI,EAAiBiJ,EAAjBjJ,KAAMY,EAAWqI,EAAXrI,OACVsI,SACAC,SACAC,SACAhZ,EAAIoY,EAAepS,OAAS,EAEzBhG,GAAK,EAAGA,IACX8Y,EAAYV,EAAepY,GAAG,GAC9B+Y,EAAWX,EAAepY,GAAG,IAC7BgZ,EAAWC,GAAczI,EAAQsI,MXhEf,mBWuEHC,EAEX1C,EAAUzG,EAAM,SAACvI,EAAGO,GAAJ,OAAUmR,EAAS1R,EAAE2R,EAASzS,OAAQqB,EAAEoR,EAASzS,UAC1D2I,EAAQ6J,GAAW,WAC1B,IAAM1B,EAAcH,EAAUtH,EAAMoJ,EAASzS,OACvC2S,EAAYH,EAASA,EAAS/S,OAAS,GACvC4R,EAAemB,EAASI,MAAM,EAAGJ,EAAS/S,OAAS,GACnD6R,EAAqBD,EAAanL,IAAI,SAAA2M,GAAA,OAAKH,GAAczI,EAAQ4I,KAEvE/B,EAAY1J,QAAQ,SAACgK,GACjBA,EAAajN,KAAKgN,EAAmBC,EAAcC,EAAcC,MAGrExB,EAAUgB,EAAa,SAAChQ,EAAGO,GACvB,IAAMxH,EAAIiH,EAAE,GACN5F,EAAImG,EAAE,GACZ,OAAOsR,EAAU9Y,EAAGqB,KAIxBmO,EAAK5J,OAAS,EACdqR,EAAY1J,QAAQ,SAAC2J,GACjB1H,EAAKlF,KAALqB,MAAA6D,EAAAyJ,EAAa/B,EAAM,OAnBG,IAsB1ByB,EAA8C,SAAnChO,OAAOgO,GAAUhT,cAA2B,OAAS,MAChEsQ,EAAUzG,EAAMkH,EAAUkC,EAASvI,KAAMsI,EAAUC,EAASzS,UAIpEsS,EAAQL,KAAO,GACf5I,EAAKjC,QAAQ,SAAC1M,GACV4X,EAAQL,KAAK9N,KAAKzJ,EAAMqY,SA6ExBC,CAAShB,EAAQH,GAGjBjN,EAAQmN,WAAY,CACpB,IAAMkB,EAAU/L,qBAASA,MAAM8K,EAAO/H,OAAOxK,UAASyG,IAAI,iBAAM,KAChE8L,EAAO3I,KAAKjC,QAAQ,SAAC6H,GACjBA,EAAM7H,QAAQ,SAACiC,EAAM5P,GACjBwZ,EAAQxZ,GAAG0K,KAAKkF,OAGxB2I,EAAO3I,KAAO4J,EAGlB,OAAOjB,EC1NJ,SAASkB,EAAYnF,EAAKC,GAC7B,IAAMmF,EAAY,GACZlJ,EAAS,GACTmJ,EAAgB,GAChB/J,EAAO,GACPgF,EAAgBN,EAAIO,gBACpBC,EAAgBP,EAAIM,gBACpB+E,EAAwBhF,EAAc1E,YACtC2J,EAAwB/E,EAAc5E,YACtC3P,EAAUqU,EAAcrU,KAAxB,UAAsCuU,EAAcvU,KAG1D,IAAKgP,EAAW+E,EAAIwF,eAAezI,MAAM,KAAKmF,OAAQjC,EAAIuF,eAAezI,MAAM,KAAKmF,QAChF,OAAO,KAiBX,SAASuD,EAAkBC,EAAI9J,EAAW+J,GACtC/I,EAAmB8I,EAAG5E,YAAa,SAACpV,GAChC,IAAMwV,EAAQ,GACV0E,EAAW,GACfP,EAAchM,QAAQ,SAACwM,GACnB,IAAMlZ,EAAQiP,EAAUiK,GAAYzE,aAAa9F,KAAK5P,GACtDka,OAAgBjZ,EAChBuU,EAAM2E,GAAclZ,IAEnByY,EAAUQ,KACPD,GAAWrK,EAAKlF,KAAK8K,GACzBkE,EAAUQ,IAAY,KASlC,OAjCC5F,EAAIwF,eAAezI,MAAM,KAAM1D,QAAQ,SAACmL,GACrC,IAAM1I,EAAQwJ,EAAsBd,GACpCtI,EAAO9F,KAAK2D,EAAQ,GAAI+B,EAAMI,WAC9BmJ,EAAcjP,KAAK0F,EAAMI,SAASjQ,QA2BtCwZ,EAAkBxF,EAAKsF,GAAuB,GAC9CE,EAAkBzF,EAAKsF,GAAuB,GAEvC,IAAI5X,GAAU4N,EAAMY,EAAQ,CAAEjQ,8PC5DjCgD,GAAgDD,EAAhDC,IAAKC,GAA2CF,EAA3CE,IAAKG,GAAsCL,EAAtCK,MAAOC,GAA+BN,EAA/BM,KAAMC,GAAyBP,EAAzBO,MAAOC,GAAkBR,EAAlBQ,IAAKL,GAAaH,EAAbG,IAAKC,GAAQJ,EAARI,IAEhD,SAAS0W,GAAkB9D,GACvB,OAAOA,EAAI+D,OAAO,SAAAxL,GAAA,QAAUA,aAAgB6C,KAShD,SAAS4I,GAAKhE,GACV,GAAIpH,EAAQoH,MAAUA,EAAI,aAAc7I,OAAQ,CAC5C,IAAM8M,EAAiBH,GAAkB9D,GAIzC,OAHiBiE,EAAevU,OACZuU,EAAexC,OAAO,SAACC,EAAKwC,GAAN,OAAexC,EAAMwC,GAAM,GAC/C9I,EAAkBM,KAG5C,OAAON,EAAkBM,KAU7B,SAASyI,GAAKnE,GACV,GAAIpH,EAAQoH,MAAUA,EAAI,aAAc7I,OAAQ,CAC5C,IAAMiN,EAAWJ,GAAIhE,GACflJ,EAAMkJ,EAAItQ,QAAU,EAC1B,OAAQgH,OAAO2N,MAAMD,IAAaA,aAAoBhJ,EAC7CA,EAAkBM,KAAO0I,EAAWtN,EAEjD,OAAOsE,EAAkBM,KAgG7B,IAAM4I,WACDrX,GAAM+W,IADLO,EAAAC,EAEDtX,GAAMiX,IAFLI,EAAAC,EAGDrX,GAzFL,SAAc6S,GACV,GAAIpH,EAAQoH,MAAUA,EAAI,aAAc7I,OAAQ,CAE5C,IAAMsN,EAAiBX,GAAkB9D,GAEzC,OAAQyE,EAAe/U,OAAUqC,KAAK2S,IAALjP,MAAA1D,KAAA4S,GAAYF,IAAkBrJ,EAAkBM,KAErF,OAAON,EAAkBM,OA+EvB6I,EAAAC,EAIDpX,GAzEL,SAAc4S,GACV,GAAIpH,EAAQoH,MAAUA,EAAI,aAAc7I,OAAQ,CAE5C,IAAMsN,EAAiBX,GAAkB9D,GAEzC,OAAQyE,EAAe/U,OAAUqC,KAAK6S,IAALnP,MAAA1D,KAAA4S,GAAYF,IAAkBrJ,EAAkBM,KAErF,OAAON,EAAkBM,OA8DvB6I,EAAAC,EAKDnX,GAzDL,SAAgB2S,GACZ,OAAOA,EAAI,KAmDTuE,EAAAC,EAMDlX,GA/CL,SAAe0S,GACX,OAAOA,EAAIA,EAAItQ,OAAS,KAwCtB6U,EAAAC,EAODjX,GArCL,SAAgByS,GACZ,OAAIpH,EAAQoH,GACDA,EAAItQ,OAER0L,EAAkBM,OA0BvB6I,EAAAC,EAQDhX,GAbL,SAAcwS,GACV,OAAOjO,KAAK8S,KAbhB,SAAmB7E,GACf,IAAI8E,EAAOX,GAAInE,GACf,OAAOmE,GAAInE,EAAI7J,IAAI,SAAA4O,GAAA,OAAAhT,KAAAiT,IAAQD,EAAMD,EAAS,MAWzBG,CAASjF,MAIxBwE,GAWAU,GAAqBjY,6PC1IrBkY,cACF,SAAAA,IAAe,IAAAC,EAAArX,kGAAAsX,CAAAtX,KAAAoX,GACXpX,KAAKiJ,MAAQ,IAAI8J,IACjB/S,KAAKiJ,MAAMmK,IAAI,aAAcmE,IAE7Blb,OAAOmb,QAAQjB,IAAQjN,QAAQ,SAACpM,GAC5Bma,EAAKpO,MAAMmK,IAAIlW,EAAI,GAAIA,EAAI,0DAc/B,IAAKqI,UAAO5D,OACR,OAAO3B,KAAKiJ,MAAMzM,IAAI,cAG1B,IAAIib,0CAEJ,GAAuB,mBAAZA,EACPzX,KAAKiJ,MAAMmK,IAAI,aAAcqE,OAC1B,CAEH,GADAA,EAAU/Q,OAAO+Q,IAC6B,IAA1Cpb,OAAO4J,KAAKsQ,IAAQnQ,QAAQqR,GAG5B,MAAM,IAAI5G,MAAJ,WAAqB4G,EAArB,0BAFNzX,KAAKiJ,MAAMmK,IAAI,aAAcmD,GAAOkB,IAK5C,OAAOzX,sCAmCD9D,EAAMub,GAAS,IAAAC,EAAA1X,KACrB,GAAuB,mBAAZyX,EACP,MAAM,IAAI5G,MAAM,gCAMpB,OAHA3U,EAAOwK,OAAOxK,GACd8D,KAAKiJ,MAAMmK,IAAIlX,EAAMub,GAEd,WAAQC,EAAKC,aAAazb,yCAGvBA,GACN8D,KAAKiJ,MAAMkK,IAAIjX,IACf8D,KAAKiJ,MAAM2O,OAAO1b,mCAIjBA,GACL,OAAIA,aAAgB0M,SACT1M,EAEJ8D,KAAKiJ,MAAMzM,IAAIN,YAgBf2b,GAZO,WAClB,IAAI5O,EAAQ,KAQZ,OALkB,OAAVA,IACAA,EAAQ,IAAImO,IAETnO,EAPO,uaCrCtB,SAAS6O,GAASC,EAAWpM,EAAUqM,EAAUC,GAC7C,IAAMC,EAxDV,SAAsBH,EAAWpM,GAC7B,IAAMkE,EAAS,GAETsI,EADaJ,EAAUvH,gBACCnE,eAY9B,OAVAhQ,OAAOmb,QAAQW,GAAY7O,QAAQ,SAAA8O,GAAW,IAATlb,EAASmb,GAAAD,EAAA,MACtCzM,GAAYA,EAAShK,QACU,IAA3BgK,EAASvF,QAAQlJ,IACjB2S,EAAOxJ,KAAKnJ,GAGhB2S,EAAOxJ,KAAKnJ,KAIb2S,EAyCWyI,CAAYP,EAAWpM,GACnC4M,EAhCV,SAAwBR,GAA0B,IAAfC,EAAezS,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAAJ,GACpC2O,EAAS,GAETsE,EADaT,EAAUvH,gBACDxE,aACtBuL,EAAaM,GAAaY,iBAchC,OAZApc,OAAO4J,KAAKuS,GAAUlP,QAAQ,SAACoP,GACU,iBAA1BV,EAASU,KAChBV,EAASU,GAAeF,EAASE,GAAaC,YAElD,IAAMC,EAAYf,GAAagB,QAAQb,EAASU,IAC5CE,EACA1E,EAAOwE,GAAeE,GAEtB1E,EAAOwE,GAAenB,EACtBS,EAASU,GAAevB,MAGzBjD,EAcY4E,CAAcf,EAAWC,GACtCvM,EAAasM,EAAUvH,gBACvBuI,EAAgBtN,EAAWI,YAC3BmN,EAASvN,EAAWvP,KACpB+c,EAAe,GACfC,EAAa,GACb/M,EAAS,GACT2G,EAAU,GACVvH,EAAO,GACT4N,SAGJ9c,OAAOmb,QAAQuB,GAAezP,QAAQ,SAAA8P,GAAkB,IAAAC,EAAAhB,GAAAe,EAAA,GAAhBlc,EAAgBmc,EAAA,GAAXzc,EAAWyc,EAAA,GACpD,IAAgC,IAA5BnB,EAAU9R,QAAQlJ,IAAeqb,EAAWrb,GAG5C,OAFAiP,EAAO9F,KAAK2D,EAAQ,GAAIpN,EAAMuP,WAEtBvP,EAAMuP,SAASC,MACvB,KAAK1N,EAAUC,QACXua,EAAW7S,KAAKnJ,GAChB,MACJ,QACA,KAAKwB,EAAUE,UACXqa,EAAa5S,KAAKnJ,MAK9B,IAAIoc,EAAW,EACfzM,EAAmBkL,EAAUhH,YAAa,SAACpV,GACvC,IAAI4d,EAAO,GACXN,EAAa3P,QAAQ,SAACkQ,GAClBD,EAAUA,EAAV,IAAkBR,EAAcS,GAAGnI,aAAa9F,KAAK5P,UAEnCuE,IAAlB4S,EAAQyG,IACRzG,EAAQyG,GAAQD,EAChB/N,EAAKlF,KAAK,IACV4S,EAAa3P,QAAQ,SAACkQ,GAClBjO,EAAK+N,GAAUE,GAAKT,EAAcS,GAAGnI,aAAa9F,KAAK5P,KAE3Dud,EAAW5P,QAAQ,SAACkQ,GAChBjO,EAAK+N,GAAUE,GAAK,CAACT,EAAcS,GAAGnI,aAAa9F,KAAK5P,MAE5D2d,GAAY,GAEZJ,EAAW5P,QAAQ,SAACkQ,GAChBjO,EAAKuH,EAAQyG,IAAOC,GAAGnT,KAAK0S,EAAcS,GAAGnI,aAAa9F,KAAK5P,QAM3E,IAAI8d,EAAc,GACdC,EAAgB,kBAAM3B,EAAUtG,gBAcpC,OAbAlG,EAAKjC,QAAQ,SAACqQ,GACV,IAAMxI,EAAQwI,EACdT,EAAW5P,QAAQ,SAACkQ,GAChBrI,EAAMqI,GAAKjB,EAAWiB,GAAGG,EAAIH,GAAIE,EAAeD,OAGpDxB,GACAA,EAAkB2B,wBAClBT,EAAelB,GAGfkB,EAAe,IAAIxb,GAAU4N,EAAMY,EAAQ,CAAEjQ,KAAM8c,IAEhDG,EC9HJ,SAASU,GAAmB5J,EAAKC,GACpC,IAIM4J,EAAkBpK,EAJFO,EAAIO,gBACJN,EAAIM,iBAK1B,OAAO,SAACc,EAAWE,GACf,IAAIuI,GAAc,EASlB,OARAD,EAAgBxQ,QAAQ,SAACmL,GAGjBsF,IAFAzI,EAAUmD,GAAW7X,QACrB4U,EAAUiD,GAAW7X,QAASmd,KAM/BA,GCjBR,SAASC,GAAO/J,EAAKC,GACxB,IAAMmF,EAAY,GACZlJ,EAAS,GACTmJ,EAAgB,GAChB/J,EAAO,GACPgF,EAAgBN,EAAIO,gBACpBC,EAAgBP,EAAIM,gBACpB+E,EAAwBhF,EAAc1E,YACtC2J,EAAwB/E,EAAc5E,YACtC3P,EAAUqU,EAAcrU,KAAxB,UAAsCuU,EAAcvU,KAG1D,IAAKgP,EAAW+E,EAAIwF,eAAezI,MAAM,KAAKmF,OAAQjC,EAAIuF,eAAezI,MAAM,KAAKmF,QAChF,OAAO,KAgBX,SAASuD,EAAmBC,EAAI9J,GAC5BgB,EAAmB8I,EAAG5E,YAAa,SAACpV,GAChC,IAAMwV,EAAQ,GACV0E,EAAW,GACfP,EAAchM,QAAQ,SAACwM,GACnB,IAAMlZ,EAAQiP,EAAUiK,GAAYzE,aAAa9F,KAAK5P,GACtDka,OAAgBjZ,EAChBuU,EAAM2E,GAAclZ,IAEnByY,EAAUQ,KACXtK,EAAKlF,KAAK8K,GACVkE,EAAUQ,IAAY,KASlC,OAhCC5F,EAAIwF,eAAezI,MAAM,KAAM1D,QAAQ,SAACmL,GACrC,IAAM1I,EAAQwJ,EAAsBd,GACpCtI,EAAO9F,KAAK2D,EAAQ,GAAI+B,EAAMI,WAC9BmJ,EAAcjP,KAAK0F,EAAMI,SAASjQ,QA0BtCwZ,EAAkBzF,EAAKsF,GACvBG,EAAkBxF,EAAKsF,GAEhB,IAAI7X,GAAU4N,EAAMY,EAAQ,CAAEjQ,SCvDlC,SAAS+d,GAAeC,EAAYC,EAAYhK,GACnD,OAAOH,EAAakK,EAAYC,EAAYhK,GAAU,EAAOhB,EAAME,WAGhE,SAAS+K,GAAgBF,EAAYC,EAAYhK,GACpD,OAAOH,EAAamK,EAAYD,EAAY/J,GAAU,EAAOhB,EAAMG,0QCWlD+K,cAQjB,SAAAA,EAAahJ,EAAcvE,gGAAYwN,CAAAta,KAAAqa,GACnCra,KAAKqR,aAAeA,EACpBrR,KAAK8M,WAAaA,8CAUlB,MAAM,IAAI+D,MAAM,wDAUhB,OAAO7Q,KAAKqR,aAAalF,sCAUzB,OAAOnM,KAAKqR,aAAanV,oCAUzB,OAAO8D,KAAKqR,aAAalF,OAAOC,uCAUhC,OAAOpM,KAAKqR,aAAalF,OAAOoO,8CAUhC,OAAOva,KAAKqR,aAAalF,OAAOqO,kDAUhC,OAAOxa,KAAKqR,aAAalF,OAAOsO,aAAeza,KAAKqR,aAAalF,OAAOjQ,oCASpE,IAAAmb,EAAArX,KACEuL,EAAO,GAIb,OAHAsB,EAAmB7M,KAAK8M,WAAY,SAACnR,GACjC4P,EAAKlF,KAAKgR,EAAKhG,aAAa9F,KAAK5P,MAE9B4P,0CAUP,MAAM,IAAIsF,MAAM,0RCpHH6J,irBAAkBL,yCAY/B,OAHKra,KAAK2a,gBACN3a,KAAK2a,cAAgB3a,KAAK4a,uBAEvB5a,KAAK2a,4DAUZ,MAAM,IAAI9J,MAAM,+DAWhB,OAAO7Q,KAAKuL,0QChCCsP,irBAAoBH,0CASjC,OAAOvc,EAAiBC,0DAUL,IAAAsZ,EAAA1X,KACbuZ,EAAO,IAAIuB,IACXC,EAAS,GAUf,OAPAlO,EAAmB7M,KAAK8M,WAAY,SAACnR,GACjC,IAAMsX,EAAQyE,EAAKrG,aAAa9F,KAAK5P,GAChC4d,EAAKpG,IAAIF,KACVsG,EAAKyB,IAAI/H,GACT8H,EAAO1U,KAAK4M,MAGb8H,qQC7BME,eAQjB,SAAAA,EAAa5J,EAAcvE,gGAAYoO,CAAAlb,KAAAib,GAAA,IAAA5D,mKAAA8D,CAAAnb,MAAAib,EAAAG,WAAA/e,OAAAgf,eAAAJ,IAAAnf,KAAAkE,KAC7BqR,EAAcvE,IADe,OAGnCuK,EAAKiE,eAAiB,KAHajE,qUARLqD,sDAqBX,IAAAhD,EAAA1X,KACbuZ,EAAO,IAAIuB,IACXC,EAAS,GAYf,OARAlO,EAAmB7M,KAAK8M,WAAY,SAACnR,GACjC,IAAMsX,EAAQyE,EAAKrG,aAAa9F,KAAK5P,GAChC4d,EAAKpG,IAAIF,KACVsG,EAAKyB,IAAI/H,GACT8H,EAAO1U,KAAK4M,MAIb8H,yDAWP,GAAI/a,KAAKsb,eACL,OAAOtb,KAAKsb,eAUhB,IAPA,IAAMC,EAAavb,KAAKuL,OAAOyK,OAAO,SAAAxL,GAAA,QAAUA,aAAgB6C,KAAoB8E,KAAK,SAACnP,EAAGO,GAAJ,OAAUP,EAAIO,IACjGiY,EAAQD,EAAW5Z,OACrB8Z,EAAU9S,OAAO+S,kBACjBC,SACAC,SACAC,EAAiB,EAEZlgB,EAAI,EAAGA,EAAI6f,EAAO7f,IACvBggB,EAAYJ,EAAW5f,EAAI,IAC3BigB,EAAYL,EAAW5f,MAELggB,IAIlBF,EAAUzX,KAAK2S,IAAI8E,EAASG,EAAYL,EAAW5f,EAAI,IACvDkgB,KAQJ,OALKA,IACDJ,EAAU,MAEdzb,KAAKsb,eAAiBG,EAEfzb,KAAKsb,gDAUZ,OAAOtb,KAAKqR,aAAalF,OAAOpM,+CAUnB,IAAA+b,EAAA9b,KACPuL,EAAO,GASb,OARAsB,EAAmB7M,KAAK8M,WAAY,SAACnR,GACjC,IAAMsX,EAAQ6I,EAAKzK,aAAa9F,KAAK5P,GACjCsX,aAAiB5F,EACjB9B,EAAKlF,KAAK4M,GAEV1H,EAAKlF,KAAKvG,EAAkByG,SAAS0M,EAAO6I,EAAK/b,aAGlDwL,qQC3GMwQ,irBAAerB,sDAS5B,IAAMsB,EAAUhc,KAAKqR,aAAalF,OAAO8P,KACzC,MAAO,CAACD,EAAQ,GAAIA,EAAQA,EAAQra,OAAS,mCAU7C,OAAO3B,KAAKqR,aAAalF,OAAO8P,wQClBnBC,irBAAgB7B,yCAY7B,OAHKra,KAAK2a,gBACN3a,KAAK2a,cAAgB3a,KAAK4a,uBAEvB5a,KAAK2a,6CAUZ,OAAO3a,KAAKqR,aAAalF,OAAOgQ,wCAUhC,OAAOnc,KAAKqR,aAAalF,OAAOwM,UAAYxB,0CAShC,IACJiF,EAAiBpc,KAAKqR,aAAalF,OAAnCiQ,aACR,OAAOA,aAAwBxT,SAAWwT,EAAe/Q,gDAUzD,MAAM,IAAIwF,MAAM,+DAWhB,OAAO7Q,KAAKuL,0QC/DC8Q,irBAAmBH,0CAShC,OAAO1d,EAAeC,yDAUH,IAAAiZ,EAAA1X,KACf2W,EAAMhO,OAAO+S,kBACb7E,EAAMlO,OAAO2T,kBAiBjB,OAdAzP,EAAmB7M,KAAK8M,WAAY,SAACnR,GACjC,IAAMsX,EAAQyE,EAAKrG,aAAa9F,KAAK5P,GACjCsX,aAAiB5F,IAIjB4F,EAAQ0D,IACRA,EAAM1D,GAENA,EAAQ4D,IACRA,EAAM5D,MAIP,CAAC0D,EAAKE,sQC5CA0F,4KAQb,MAAM,IAAI1L,MAAM,0RCJH2L,irBAA0BD,sCAQpCpb,GAQH,OALKkM,EAAkBoP,UAAUtb,GAGpBkM,EAAkBqP,eAAevb,GAFjCuF,OAAOvF,GAAKwb,0QCXZC,eAOjB,SAAAA,EAAazQ,gGAAQ0Q,CAAA7c,KAAA4c,GAAA,IAAAvF,mKAAAyF,CAAA9c,MAAA4c,EAAAxB,WAAA/e,OAAAgf,eAAAuB,IAAA9gB,KAAAkE,OAAA,OAEjBqX,EAAKlL,OAASA,EACdkL,EAAK0F,KAAO,IAAIjd,EAAkBuX,EAAKlL,OAAOpM,QAH7BsX,qUAPmBkF,sCAoBjCpb,GACH,IAAIyC,SAEJ,GAAKyJ,EAAkBoP,UAAUtb,GAI7ByC,EAASyJ,EAAkBqP,eAAevb,OAJP,CACnC,IAAIhB,EAAaH,KAAK+c,KAAKrU,cAAcvH,GACzCyC,EAASzD,EAAaA,EAAW4K,UAAYsC,EAAkBO,GAInE,OAAOhK,qQC9BMoZ,irBAAqBT,sCAQ/Bpb,GAEHA,EAAMuF,OAAOvF,GACb,IAAIyC,SAEJ,GAAKyJ,EAAkBoP,UAAUtb,GAK7ByC,EAASyJ,EAAkBqP,eAAevb,OALP,CACnC,IAAI8b,EAAU9b,EAAIqH,MALR,2DAMV5E,EAASqZ,EAAatU,OAAOuU,WAAWD,EAAQ,IAAvC,IAA8CtU,OAAOuU,WAAWD,EAAQ,IAC9D5P,EAAkBO,GAIzC,OAAOhK,qQCpBMuZ,irBAAyBZ,sCAQnCpb,GACH,IAAIyC,SAEJ,GAAKyJ,EAAkBoP,UAAUtb,GAI7ByC,EAASyJ,EAAkBqP,eAAevb,OAJP,CACnC,IAAIC,EAAY8b,WAAW/b,EAAK,IAChCyC,EAAS+E,OAAO2N,MAAMlV,GAAaiM,EAAkBO,GAAKxM,EAI9D,OAAOwC,qQCnBMwZ,cAUjB,SAAAA,EAAalhB,EAAMqP,EAAMY,EAAQ/J,gGAAQib,CAAArd,KAAAod,GACrCpd,KAAK9D,KAAOA,EACZ8D,KAAKmM,OAASA,EACdnM,KAAKoC,OAASA,EACdpC,KAAKuL,KAAOvL,KAAKsd,UAAU/R,gDAUpBA,GAAM,IAAA8L,EAAArX,KACb,OAAOuL,EAAKnD,IAAI,SAAA6K,GAAA,OAASoE,EAAKjV,OAAOwE,MAAMqM,cCiE5C,SAASsK,GAAaC,EAAYrR,EAAQsR,GAC7C,IAAMC,EAAa,GAUnB,OARMD,GAAWA,EAAQ9b,SACrB8b,EAAUtR,EAAO/D,IAAI,SAAAoC,GAAA,OAAQA,EAAKtO,QAGtCuhB,EAAQnU,QAAQ,SAACqU,EAAQhiB,GACrB+hB,EAAWC,GAAUhiB,IAGlBwQ,EAAO/D,IAAI,SAAAoC,GAAA,OAzFtB,SAAyBe,EAAMY,GAC3BZ,EAAOA,GAAQ,GACf,IAAI8F,SAEJ,OAAQlF,EAAOC,MACf,KAAK1N,EAAUC,QACX,OAAQwN,EAAOoO,SACf,KAAK/b,EAAeC,WAGpB,QAEI,OADA4S,EAAe,IAAI+L,GAAajR,EAAOjQ,KAAMqP,EAAMY,EAAQ,IAAIgR,IACxD,IAAId,GAAWhL,EAAf,MAAkC9F,EAAK5J,OAAS,IAE/D,KAAKjD,EAAUE,UACX,OAAQuN,EAAOoO,SACf,KAAKpc,EAAiBC,YAElB,OADAiT,EAAe,IAAI+L,GAAajR,EAAOjQ,KAAMqP,EAAMY,EAAQ,IAAIqQ,IACxD,IAAI3B,GAAYxJ,EAAhB,MAAmC9F,EAAK5J,OAAS,IAC5D,KAAKxD,EAAiBE,SAElB,OADAgT,EAAe,IAAI+L,GAAajR,EAAOjQ,KAAMqP,EAAMY,EAAQ,IAAIyQ,GAAezQ,IACvE,IAAI8O,GAAS5J,EAAb,MAAgC9F,EAAK5J,OAAS,IACzD,KAAKxD,EAAiBI,OAElB,OADA8S,EAAe,IAAI+L,GAAajR,EAAOjQ,KAAMqP,EAAMY,EAAQ,IAAI6Q,IACxD,IAAIjB,GAAO1K,EAAX,MAA8B9F,EAAK5J,OAAS,IACvD,QAEI,OADA0P,EAAe,IAAI+L,GAAajR,EAAOjQ,KAAMqP,EAAMY,EAAQ,IAAIqQ,IACxD,IAAI3B,GAAYxJ,EAAhB,MAAmC9F,EAAK5J,OAAS,IAEhE,QAEI,OADA0P,EAAe,IAAI+L,GAAajR,EAAOjQ,KAAMqP,EAAMY,EAAQ,IAAIqQ,IACxD,IAAI3B,GAAYxJ,EAAhB,MAAmC9F,EAAK5J,OAAS,KA0DlCic,CAAgBJ,EAAWE,EAAWlT,EAAKtO,OAAQsO,KC3GlE,IAAAqT,GAAA,CACXC,WAAYhgB,EAAWI,MCuCZ6f,OAvBf,SAAiB9L,EAAKnL,GAIlBA,EAAUzK,OAAOmR,OAAO,GAHF,CAClBwQ,gBAAgB,GAEuBlX,GAE3C,IAAI6W,SACEM,EAAU,GACV5X,EAAO6X,EAAYD,GAYzB,OAPIN,EAHA7W,EAAQkX,eAGC/L,EAAIxK,OAAO,EAAG,GAAG,GAEjB,GAGbwK,EAAI3I,QAAQ,SAAAyC,GAAA,OAAS1F,sIAAQ0F,MAEtB,CAAC4R,EAAQM,ICvChBE,GAAM,GACNC,GAAM,GACNC,GAAQ,GACRC,GAAU,GACVC,GAAS,GAEb,SAASC,GAAgBP,GACvB,OAAO,IAAIrV,SAAS,IAAK,WAAaqV,EAAQ7V,IAAI,SAASlM,EAAMP,GAC/D,OAAO8iB,KAAKC,UAAUxiB,GAAQ,OAASP,EAAI,MAC1CsH,KAAK,KAAO,KAWjB,SAAS0b,GAAaC,GACpB,IAAIC,EAAYxiB,OAAOY,OAAO,MAC1BghB,EAAU,GAUd,OARAW,EAAKtV,QAAQ,SAASqQ,GACpB,IAAK,IAAImF,KAAUnF,EACXmF,KAAUD,GACdZ,EAAQ5X,KAAKwY,EAAUC,GAAUA,KAKhCb,EAGT,SAASpe,GAAIjD,EAAOmiB,GAClB,IAAIrhB,EAAId,EAAQ,GAAI+E,EAASjE,EAAEiE,OAC/B,OAAOA,EAASod,EAAQ,IAAI3V,MAAM2V,EAAQpd,EAAS,GAAGsB,KAAK,GAAKvF,EAAIA,EAStE,SAASshB,GAAWrf,GAClB,IAPkBwE,EAOd3B,EAAQ7C,EAAKsf,cACbC,EAAUvf,EAAKwf,gBACfC,EAAUzf,EAAK0f,gBACfC,EAAe3f,EAAK4f,qBACxB,OAAOjJ,MAAM3W,GAAQ,iBAXHwE,EAYDxE,EAAK6f,kBAXR,EAAI,IAAM3f,IAAKsE,EAAM,GAC/BA,EAAO,KAAO,IAAMtE,GAAIsE,EAAM,GAC9BtE,GAAIsE,EAAM,IAS+B,IAAMtE,GAAIF,EAAK8f,cAAgB,EAAG,GAAK,IAAM5f,GAAIF,EAAK+f,aAAc,IAC1GJ,EAAe,IAAMzf,GAAI2C,EAAO,GAAK,IAAM3C,GAAIqf,EAAS,GAAK,IAAMrf,GAAIuf,EAAS,GAAK,IAAMvf,GAAIyf,EAAc,GAAK,IACnHF,EAAU,IAAMvf,GAAI2C,EAAO,GAAK,IAAM3C,GAAIqf,EAAS,GAAK,IAAMrf,GAAIuf,EAAS,GAAK,IAChFF,GAAW1c,EAAQ,IAAM3C,GAAI2C,EAAO,GAAK,IAAM3C,GAAIqf,EAAS,GAAK,IACjE,IAGO,IAAAS,GAAA,SAASC,GACtB,IAAIC,EAAW,IAAIzf,OAAO,KAAQwf,EAAY,SAC1CE,EAAYF,EAAUG,WAAW,GAWrC,SAASC,EAAU1f,EAAMyU,GACvB,IAIIlY,EAJA+hB,EAAO,GACPqB,EAAI3f,EAAKqB,OACTue,EAAI,EACJ9iB,EAAI,EAEJ+iB,EAAMF,GAAK,EACXG,GAAM,EAMV,SAAS9Z,IACP,GAAI6Z,EAAK,OAAO/B,GAChB,GAAIgC,EAAK,OAAOA,GAAM,EAAOjC,GAG7B,IAAIxiB,EAAUK,EAAPqkB,EAAIH,EACX,GAAI5f,EAAKyf,WAAWM,KAAOhC,GAAO,CAChC,KAAO6B,IAAMD,GAAK3f,EAAKyf,WAAWG,KAAO7B,IAAS/d,EAAKyf,aAAaG,KAAO7B,KAI3E,OAHK1iB,EAAIukB,IAAMD,EAAGE,GAAM,GACdnkB,EAAIsE,EAAKyf,WAAWG,QAAU5B,GAAS8B,GAAM,EAC9CpkB,IAAMuiB,KAAU6B,GAAM,EAAU9f,EAAKyf,WAAWG,KAAO5B,MAAW4B,GACpE5f,EAAKwU,MAAMuL,EAAI,EAAG1kB,EAAI,GAAG4E,QAAQ,MAAO,KAIjD,KAAO2f,EAAID,GAAG,CACZ,IAAKjkB,EAAIsE,EAAKyf,WAAWpkB,EAAIukB,QAAU5B,GAAS8B,GAAM,OACjD,GAAIpkB,IAAMuiB,GAAU6B,GAAM,EAAU9f,EAAKyf,WAAWG,KAAO5B,MAAW4B,OACtE,GAAIlkB,IAAM8jB,EAAW,SAC1B,OAAOxf,EAAKwU,MAAMuL,EAAG1kB,GAIvB,OAAOwkB,GAAM,EAAM7f,EAAKwU,MAAMuL,EAAGJ,GAGnC,IA7BI3f,EAAKyf,WAAWE,EAAI,KAAO3B,MAAW2B,EACtC3f,EAAKyf,WAAWE,EAAI,KAAO1B,MAAU0B,GA4BjCpjB,EAAIyJ,OAAa8X,IAAK,CAE5B,IADA,IAAIzE,EAAM,GACH9c,IAAMshB,IAAOthB,IAAMuhB,IAAKzE,EAAItT,KAAKxJ,GAAIA,EAAIyJ,IAC5CyO,GAA4B,OAAtB4E,EAAM5E,EAAE4E,EAAKvc,OACvBwhB,EAAKvY,KAAKsT,GAGZ,OAAOiF,EAGT,SAAS0B,EAAc1B,EAAMX,GAC3B,OAAOW,EAAKxW,IAAI,SAASuR,GACvB,OAAOsE,EAAQ7V,IAAI,SAAS0W,GAC1B,OAAOyB,EAAY5G,EAAImF,MACtB7b,KAAK2c,KAkBZ,SAASY,EAAU7G,GACjB,OAAOA,EAAIvR,IAAImY,GAAatd,KAAK2c,GAGnC,SAASW,EAAY3jB,GACnB,OAAgB,MAATA,EAAgB,GACjBA,aAAiBgD,KAAOof,GAAWpiB,GACnCijB,EAASY,KAAK7jB,GAAS,IAAM,IAAOA,EAAM2D,QAAQ,KAAM,MAAU,IAClE3D,EAGR,MAAO,CACLgK,MA5FF,SAAetG,EAAMyU,GACnB,IAAI2L,EAASzC,EAASW,EAAOoB,EAAU1f,EAAM,SAASqZ,EAAKhe,GACzD,GAAI+kB,EAAS,OAAOA,EAAQ/G,EAAKhe,EAAI,GACrCsiB,EAAUtE,EAAK+G,EAAU3L,EAtD/B,SAAyBkJ,EAASlJ,GAChC,IAAI1X,EAASmhB,GAAgBP,GAC7B,OAAO,SAAStE,EAAKhe,GACnB,OAAOoZ,EAAE1X,EAAOsc,GAAMhe,EAAGsiB,IAmDM0C,CAAgBhH,EAAK5E,GAAKyJ,GAAgB7E,KAGzE,OADAiF,EAAKX,QAAUA,GAAW,GACnBW,GAuFPoB,UAAWA,EACXjgB,OA5BF,SAAgB6e,EAAMX,GAEpB,OADe,MAAXA,IAAiBA,EAAUU,GAAaC,IACrC,CAACX,EAAQ7V,IAAImY,GAAatd,KAAK2c,IAAY/W,OAAOyX,EAAc1B,EAAMX,IAAUhb,KAAK,OA2B5F2d,WAxBF,SAAoBhC,EAAMX,GAExB,OADe,MAAXA,IAAiBA,EAAUU,GAAaC,IACrC0B,EAAc1B,EAAMX,GAAShb,KAAK,OAuBzC4d,WApBF,SAAoBjC,GAClB,OAAOA,EAAKxW,IAAIoY,GAAWvd,KAAK,SC1IhC6d,GAAMC,GAAI,KCAVC,IDEkBF,GAAIla,MACAka,GAAId,UACPc,GAAI/gB,OACA+gB,GAAIF,WACJE,GAAID,WCNrBE,GAAI,OAEQC,GAAIpa,MACAoa,GAAIhB,UACPgB,GAAIjhB,OACAihB,GAAIJ,WACJI,GAAIH,WC2BhBI,OAXf,SAAiBtW,EAAK7D,GAKlBA,EAAUzK,OAAOmR,OAAO,GAJF,CAClBwQ,gBAAgB,EAChBkD,eAAgB,KAEuBpa,GAE3C,IAAMia,EAAMI,GAAMra,EAAQoa,gBAC1B,OAAOnD,GAAOgD,EAAIf,UAAUrV,GAAM7D,ICoBvBsa,OAxBf,SAAmBnP,GACf,IAAM0L,EAAS,GACXhiB,EAAI,EACJ0lB,SACEpD,EAAU,GACV5X,EAAO6X,EAAYD,GAgBzB,OAdAhM,EAAI3I,QAAQ,SAACkB,GACT,IAAMrB,EAAS,GACf,IAAK,IAAIjM,KAAOsN,EACRtN,KAAOygB,EACP0D,EAAiB1D,EAAOzgB,IAExBygB,EAAOzgB,GAAOvB,IACd0lB,EAAiB1lB,EAAI,GAEzBwN,EAAOkY,GAAkB7W,EAAKtN,GAElCmJ,eAAQ8C,KAGL,CAAC9M,OAAO4J,KAAK0X,GAASM,IC1BlBqD,OAXf,SAAe/V,EAAMzE,GACjB,IAAMya,EAAa,CAAEH,YAAUH,UAAQlD,WACjCD,EAAaxS,EAAiBC,GAEpC,IAAKuS,EACD,MAAM,IAAIjN,MAAM,mCAGpB,OAAO0Q,EAAWzD,GAAYvS,EAAMzE,iiBCGjC,SAASyK,GAAiBpI,GAC7B,IAAMqY,EAAO,GAEb,OADAnlB,OAAO4J,KAAKkD,GAAQG,QAAQ,SAACpM,GAAUskB,EAAKtkB,GAAO,IAAIsP,EAAMrD,EAAOjM,GAAMA,KACnEskB,EAGJ,IAAMC,GAAe,SAAArJ,EAA8BsJ,EAAmBC,GAAmB,IAAAC,EAAAC,GAAAzJ,EAAA,GAAlEtL,EAAkE8U,EAAA,GAAtD9N,EAAsD8N,EAAA,GACxFE,EAAShO,EAAcnS,OAASmS,EAAc9G,MAAM,KAAO,GAC3D+U,EAAkBL,EAAkB7V,YACpCmW,EAAYF,EAAO1Z,IAAI,SAAA6Z,GAAA,OT+BxB,SAAoC5Q,EAAcvE,GAAY,IACzDX,EAAWkF,EAAXlF,OAER,OAAQA,EAAOC,MACf,KAAK1N,EAAUC,QACX,OAAQwN,EAAOoO,SACf,KAAK/b,EAAeC,WAEpB,QACI,OAAO,IAAI4d,GAAWhL,EAAcvE,GAE5C,KAAKpO,EAAUE,UACX,OAAQuN,EAAOoO,SACf,KAAKpc,EAAiBC,YAClB,OAAO,IAAIyc,GAAYxJ,EAAcvE,GACzC,KAAK3O,EAAiBE,SAClB,OAAO,IAAI4c,GAAS5J,EAAcvE,GACtC,KAAK3O,EAAiBI,OAClB,OAAO,IAAIwd,GAAO1K,EAAcvE,GACpC,QACI,OAAO,IAAI+N,GAAYxJ,EAAcvE,GAE7C,QACI,OAAO,IAAI+N,GAAYxJ,EAAcvE,IStDNoV,CAA2BH,EAAgBE,GAAM5Q,aAAcvE,KAClG,OAAOrB,EAAWC,gBAAgBsW,EAAWL,IAoBpCQ,GAAqB,SAACC,EAAUC,EAAOC,IAjBZ,SAACD,EAAOC,GAAuC,IACzCC,EADahV,EAA4BhI,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAAnB,GAAIid,EAAejd,UAAA,GAC/E+c,IAAc3T,EAAeI,SAC7BsT,EAAMI,YAAY9gB,OAAS,GAC3B4gB,EAAAF,EAAMI,aAAYpc,KAAlBqB,MAAA6a,EAAAG,GAA0BF,KAE1BH,EAAMI,YAAYpc,KAAK,CACnBsc,GAAIL,EACJM,KAAMrV,EACNsV,SAAUL,IAUlBM,CAAyBT,EAAOC,EADuD/c,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAAnB,GAAmBA,UAAA,IAJlD,SAAC6c,EAAUW,GAAU,IAAAC,GAC1DA,EAAAD,EAAME,qBAAoB5c,KAA1BqB,MAAAsb,EAAAN,GAAkCN,EAASa,qBAA3Cpa,OAAA6Z,GAAmEN,EAASK,eAK5ES,CAA0Bd,EAAUC,IAG3Bc,GAAe,SAACrW,EAAY3D,EAAQia,EAAU7V,EAAQ6U,GAC/D,IAAMiB,EAAgB,GAClBC,GAAqB,EACnBxmB,EAASyQ,EAATzQ,KACFymB,SACA9J,EAAc,GACdC,EAAgB,kBAAM0I,EAAS3Q,gBAC7B+R,EAAmB,SAAAthB,GAAA,OAASkhB,EAlDtC,SAA+Bja,EAAQxN,GACnC,IAAM6lB,EAAO,GADyBiC,GAAA,EAAAC,GAAA,EAAAC,OAAAzjB,EAAA,IAEtC,QAAA0jB,EAAAC,EAAkB1a,EAAlBzM,OAAAonB,cAAAL,GAAAG,EAAAC,EAAAzV,QAAA2V,MAAAN,GAAA,EAA0B,KAAjB1X,EAAiB6X,EAAAhnB,MACtB4kB,EAAKzV,EAAM7P,QAAU,IAAIsQ,EAAMT,EAAMsF,aAAa9F,KAAK5P,GAAIoQ,IAHzB,MAAAiY,GAAAN,GAAA,EAAAC,EAAAK,EAAA,aAAAP,GAAAI,EAAAI,QAAAJ,EAAAI,SAAA,WAAAP,EAAA,MAAAC,GAKtC,OAAOnC,EA8CH0C,CAAqB/a,EAAQjH,GAC7BA,EACAwX,EACAD,IAGA0K,SAkBJ,OAhBIA,EADArnB,IAAS+B,EAAcE,QACb,SAAAmD,GAAA,OAAUshB,EAAiBthB,IAE3B,SAAAA,GAAA,OAASshB,EAAiBthB,IAGxC2K,EAAmBC,EAAY,SAACnR,GACxBwoB,EAAQxoB,MACmB,IAAvB2nB,GAA4B3nB,IAAO2nB,EAAoB,GACvDC,EAAKF,EAAc1hB,OAAS,EAC5B0hB,EAAcE,GAASF,EAAcE,GAAIvW,MAAM,KAAK,GAApD,IAA0DrR,GAE1D0nB,EAAchd,KAAd,GAAsB1K,GAE1B2nB,EAAoB3nB,KAGrB0nB,EAAcpgB,KAAK,MAGjBmhB,GAAqB,SAAC/B,GAC/B,IAAMgC,EAAWhC,EAAMiC,OAAM,GACvB5C,EAAoBW,EAAMkC,uBAShC,OARAF,EAAS5O,eAAiBiM,EAAkBvY,OAAOf,IAAI,SAAA2M,GAAA,OAAKA,EAAE7Y,SAAQ+G,KAAK,KAG3Eye,EAAkB5V,iBAAmB,KACrC4V,EAAkBnV,iBAAmB,KACrCmV,EAAkBxV,eAAiB,KACnCmY,EAASzK,wBAAwB4K,wBAE1BH,GAGEI,GAAyB,SAACpC,EAAOqC,GAA4B,IAAhBnX,EAAgBhI,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAAP,GACzD+c,EAAY/U,EAAO+U,WAAa7S,EAChCkV,EAAkBpX,EAAOoX,kBAAmB,EAC9CC,EAAM,GAINA,EAHCF,EAAW/iB,OAGN+iB,EAAWtc,IAAI,SAAAyc,GAAA,OACXrQ,GAD0BuD,EA6BjC8M,GA5B2BC,UACpB3Y,EAASqI,EAAQrI,OACjB4Y,EAAehN,EAAUiN,kBACzBC,EAAclN,EAAUvH,gBAAgB3E,YACxCN,EAAOiJ,EAAQjJ,KACfwP,EAAS1e,OAAO6oB,OAAOH,GAAcrR,OAAO,SAACC,EAAKwR,GAEpD,OADAxR,EAAIwR,EAAEC,IAAIlpB,MAAQ+oB,EAAYE,EAAEC,IAAIlpB,MAAM6e,SACnCpH,GACR,IAEI,SAACxK,GAgBJ,QAfiBoC,EAAK5J,QAAiB4J,EAAK8Z,KAAK,SAAA1L,GAAA,OAAOxN,EAAOmZ,MAAM,SAACC,GAClE,KAAMA,EAAUrpB,QAAQiN,GACpB,OAAO,EAEX,IAAMvM,EAAQuM,EAAOoc,EAAUrpB,MAAMspB,UACrC,GAAIb,GAAmBY,EAAUnZ,OAAS1N,EAAUC,QAChD,OAAO/B,GAASme,EAAOwK,EAAUrpB,MAAM,IAAMU,GAASme,EAAOwK,EAAUrpB,MAAM,GAGjF,GAAIqpB,EAAUnZ,OAAS1N,EAAUE,UAC7B,OAAO,EAEX,IAAMgV,EAAMmR,EAAaQ,EAAUrpB,MAAMgG,MACzC,OAAOyX,EAAI/F,KAASzK,EAAOoc,EAAUrpB,MAAMspB,eAzBpB,IAACzN,EAC1BvD,EACArI,EACA4Y,EACAE,EACA1Z,EACAwP,IARJ,CAAC,kBAAM,IA+CjB,OAZIuH,IAAc7S,EACE2U,GAAmB/B,GAAOoD,OAAO,SAAAtc,GAAA,OAAUyb,EAAIU,MAAM,SAAAI,GAAA,OAAMA,EAAGvc,MAAU,CACpFwc,WAAW,EACX7oB,KAAM+B,EAAcG,MAGRolB,GAAmB/B,GAAOoD,OAAO,SAAAtc,GAAA,OAAUyb,EAAIS,KAAK,SAAAK,GAAA,OAAMA,EAAGvc,MAAU,CACnFrM,KAAM+B,EAAcG,IACpB2mB,WAAW,KAOVC,GAAkB,SAACxD,EAAUgB,EAAUyC,EAAcC,GAC9D,IAAMC,EAAS3D,EAASkC,MAAMwB,EAAYH,WACpC7Y,EAAaqW,GACf4C,EAAOhV,YACPgV,EAAOxB,uBAAuBpb,OAC9Bia,EACAyC,EACAzD,GAaJ,OAXA2D,EAAOhV,YAAcjE,EACrBiZ,EAAOnM,wBAAwB4K,wBAE/BrC,GACIC,EACA2D,EACApX,EAAeC,OACd,CAAErB,OAAQsY,GACTzC,GAGC2C,GAGEC,GAAmB,SAAC5D,EAAU6D,EAAW1Y,EAAQ2Y,GAC1D,IAAMH,EAAS3D,EAASkC,MAAM/W,EAAOoY,WACjCQ,EAAgBF,EAiBpB,OAhBI1Y,EAAOzQ,OAAS+B,EAAcE,UAC9BonB,EAAgBD,EAAUlQ,OAAO,SAAAvB,GAAA,OAA+C,IAAlCwR,EAAU7f,QAAQqO,MAIpEsR,EAAOtQ,eAAiB0Q,EAAcljB,KAAK,KAC3C8iB,EAAOnM,wBAAwB4K,wBAE/BrC,GACIC,EACA2D,EACApX,EAAeE,QACf,CAAEoX,YAAW1Y,SAAQ6Y,gBAAiBD,GACtC,MAGGJ,GAGEM,GAAqB,SAACC,GAO/B,IALAA,EAAatc,EAAQ,GAAIsc,IACTla,OACZka,EAAWla,KAAO1N,EAAUE,YAG3B0nB,EAAW/L,QACZ,OAAQ+L,EAAWla,MACnB,KAAK1N,EAAUC,QACX2nB,EAAW/L,QAAU/b,EAAeC,WACpC,MACJ,QACA,KAAKC,EAAUE,UACX0nB,EAAW/L,QAAUpc,EAAiBC,YAK9C,OAAOkoB,GA6BEC,GAA4B,SAAApa,GAAA,OAAUA,EAAO/D,IAAI,SAACke,GAG3D,OA7B8B,SAACA,GAC/B,IAAME,EAA2B,CAAChoB,EAAeC,YAC3CgoB,EAAuB,CACzBtoB,EAAiBC,YACjBD,EAAiBI,OACjBJ,EAAiBE,SACjBF,EAAiBG,KAEb8N,EAAwBka,EAAxBla,KAAMmO,EAAkB+L,EAAlB/L,QAASre,EAASoqB,EAATpqB,KAEvB,OAAQkQ,GACR,KAAK1N,EAAUE,UACX,IAA+C,IAA3C6nB,EAAqBrgB,QAAQmU,GAC7B,MAAM,IAAI1J,MAAJ,qDAA+D0J,EAA/D,aAAmFre,EAAnF,UAEV,MACJ,KAAKwC,EAAUC,QACX,IAAmD,IAA/C6nB,EAAyBpgB,QAAQmU,GACjC,MAAM,IAAI1J,MAAJ,mDAA6D0J,EAA7D,aAAiFre,EAAjF,UAEV,MACJ,QACI,MAAM,IAAI2U,MAAJ,wCAAkDzE,EAAlD,aAAmElQ,EAAnE,WAMVwqB,CADAJ,EAAaD,GAAmBC,IAEzBA,KAeEK,GAAa,SAACC,EAAUrb,EAAMY,EAAQrF,GAC/CqF,EAASoa,GAA0Bpa,GACnCrF,EAAUzK,OAAOmR,OAAOnR,OAAOmR,OAAO,GAAIqZ,IAAgB/f,GAC1D,IAAMggB,EAAcC,EAAUjgB,EAAQgX,YAEtC,IAAMgJ,GAAsC,mBAAhBA,EACxB,MAAM,IAAIjW,MAAJ,mCAA6C/J,EAAQgX,WAArD,WANiD,IAAAkJ,EAS3BF,EAAYvb,EAAMzE,GATSmgB,EAAApF,GAAAmF,EAAA,GASpDrJ,EAToDsJ,EAAA,GAS5CC,EAT4CD,EAAA,IAZ/B,SAAC9a,EAAQgb,GACrChb,EAAO7C,QAAQ,SAACgd,GACZ,IAAMc,EAAcd,EAAWe,GAC/B,GAAKD,EAAL,CAEA,IAAMxT,EAAMuT,EAAW/gB,QAAQkgB,EAAWpqB,MAC1CirB,EAAWvT,GAAOwT,EAClBd,EAAWpqB,KAAOkrB,SACXd,EAAWe,MActBC,CAAiBnb,EAAQwR,GACzB,IAAMhS,EAAW4R,GAAa2J,EAAe/a,EAAQwR,GAG/C4J,EAAY9b,EAAWC,gBAAgBC,EAAU7E,EAAQ5K,MAM/D,OALA0qB,EAASY,mBAAqBD,EAE9BX,EAAS7V,YAAcmW,EAAcvlB,QAAUulB,EAAc,GAAGvlB,OAAzC,MAAuDulB,EAAc,GAAGvlB,OAAS,GAAM,GAC9GilB,EAASnR,eAAkBtJ,EAAO/D,IAAI,SAAAoR,GAAA,OAAKA,EAAEtd,OAAO+G,OACpD2jB,EAASa,YAAc3gB,EAAQgX,aAAehgB,EAAWI,KAAOoN,EAAiBC,GAAQzE,EAAQgX,WAC1F8I,GAGEhS,GAAgB,SAACzI,EAAQJ,GAGlC,IAFA,IAAIpQ,EAAI,EAEDA,EAAIwQ,EAAOxK,SAAUhG,EACxB,GAAIoQ,IAAUI,EAAOxQ,GAAGO,KACpB,MAAO,CACHkQ,KAAMD,EAAOxQ,GAAG4e,SAAWpO,EAAOxQ,GAAGyQ,KACrClK,MAAOvG,GAInB,OAAO,MA6BL+rB,GAAgC,SAAC7C,EAAW9M,GAC9C,IAAM4P,EAAc5P,EAAU6P,iBAC1BC,EAAiBhD,EAAU,GAC3BiD,EAAiBjD,EAAU,GAkB/B,OAhBA8C,EAAYre,QAAQ,SAACye,GACjB,GAAKA,EAAL,CADgC,IAMjBC,EAAAC,EANiBC,EA9BF,SAACH,GACnC,IAAII,EAAS,GACT7F,SAEJ,OADAA,EAAYyF,EAAWpF,IAEvB,KAAKhU,EAAeC,OAChBuZ,EAAS,CAACJ,EAAWlF,UACrB,MACJ,KAAKlU,EAAeE,QAChBsZ,EAAS,CAACJ,EAAWnF,KAAKwD,iBAC1B,MACJ,KAAKzX,EAAeG,QAChBwT,EAAY,UACZ6F,EAAS,CAACJ,EAAWnF,KAAKwF,cAAcpb,MAAM,KAAM+a,EAAWlF,UAC/D,MACJ,QACIP,EAAY,KAGhB,MAAO,CACHA,YACA6F,UAc8BE,CAAuBN,GAA7CzF,EALwB4F,EAKxB5F,UAAW6F,EALaD,EAKbC,OACnB,GAAI7F,EACAuF,GAAiBG,EAAAH,GAAevF,GAAf5a,MAAAsgB,EAAAtF,GAA6ByF,GAA7Btf,OAAA,CAAqC,CAClD8c,WAAW,MAEfmC,GAAiBG,EAAAH,GAAexF,GAAf5a,MAAAugB,EAAAvF,GAA6ByF,GAA7Btf,OAAA,CAAqC,CAClD8c,WAAW,SAKhB,CAACkC,EAAgBC,IAWtBQ,GAAuB,SAAvBA,EAAwBvQ,EAAW8M,GAA8C,IAAnCtX,EAAmChI,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAA1B,GAAIgjB,EAAsBhjB,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAAP,GACtEijB,EAAqBD,EAAaC,mBAClCC,EAAgBF,EAAaE,eAAiB,GAEhD1Q,IAAcyQ,MAIAC,EAAc9mB,SAA+C,IAAtC8mB,EAAcriB,QAAQ2R,KAElDA,EAAU2Q,kBAAkB7D,EAAWtX,GAEnCwK,EAAU4Q,UAClBrf,QAAQ,SAACsf,GAAU,IAAAC,EACenB,GAA8B7C,EAAW+D,GADxDE,EAAAjH,GAAAgH,EAAA,GACnBhB,EADmBiB,EAAA,GACHhB,EADGgB,EAAA,GAExBR,EAAqBM,EAAO,CAACf,EAAgBC,GAAiBva,EAAQgb,OAkBjEQ,GAAqB,SAAC1G,GAC/B,IADoD,IAAd2G,EAAczjB,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAAP,GACtC8c,EAAM4G,SACTD,EAAK3iB,KAAKgc,GACVA,EAAQA,EAAM4G,QAElB,OAAOD,GAGEE,GAA2B,SAACC,EAAaC,EAAYC,EAAgB9b,GAC9E,IAAIsV,SACAgC,SACIyE,EAA4CD,EAA5CC,qBAAsBC,EAAsBF,EAAtBE,kBACxBC,EAAsBH,EAAeI,SACrCC,EAA8Bnc,EAAOmc,4BAMvCC,EAAY,GAEhB,GAAoB,OAAhBR,IAA8C,IAAtB5b,EAAOqc,WAC/BD,EAAY,CAAC,CACT9G,SAAU,SAEX,KAAAzJ,EACCyQ,EAAkBxtB,OAAO6oB,OAAOoE,EAAqBQ,iBAC/B,IAAtBP,IACAM,EAAkBA,EAAgB7T,OAAO,SAAA/Z,GAAA,OAAKA,EAAEsR,OAAOkc,WAAaD,KAGxE,IAAMO,EAAmBF,EAAgB7T,OAjB5B,SAACgU,GAEd,OADezc,EAAO4C,UAAa,kBAAM,IAC3B6Z,EAAOzc,KAeqCnF,IAAI,SAAA6hB,GAAA,OAAUA,EAAO1c,OAAOsV,WAEhF4F,EAAgB,GAEtB,IAA0B,IAAtBc,EAA6B,CAC7B,IAAMW,EAAwB7tB,OAAO6oB,OAAOoE,EAAqBQ,gBAEjEI,EAAsB5gB,QAAQ,SAAC6gB,GAC3B,IAAMC,EAAaD,EAAU5c,QACI,IAA7B6c,EAAWC,eAA2BD,EAAWH,SAAW1c,EAAO0c,QAC/DG,EAAWX,WAAaD,IAC5Bf,EAAcpiB,KAAK8jB,EAAU9H,QAC7BQ,EAAWqH,EAAsBlU,OAAO,SAAA/Z,GAAA,OAAKA,IAAMkuB,IAAW/hB,IAAI,SAAAnM,GAAA,OAAKA,EAAEsR,OAAOsV,YACvElhB,QAAUgoB,EAAUtjB,KAAK,CAC9Bwc,WACAyH,OAAQH,EAAU9H,MAClB2G,KAAMD,GAAmBoB,EAAU9H,YAOnDQ,GAAWzJ,EAAA,IAAGvQ,OAAHnB,MAAA0R,EAAA,GAAAvQ,OAAA6Z,GAAiBqH,GAAjB,CAAmCZ,KAAcnT,OAAO,SAAA/Z,GAAA,OAAW,OAANA,IACxE0tB,EAAUtjB,KAAK,CACXwc,WACA4F,wBAAmBA,EAAnB/F,GAAqCnV,EAAOkb,eAAiB,OAIrE,IAAM8B,EAAYnB,EAAW/G,MAEvBmI,EAAanuB,OAAOmR,OAAO,CAC7Bid,kBAAmBtB,EACnBK,uBACDjc,GAEGmd,EAAmBtB,EAAWuB,aAChCjB,GAA+BgB,IAC/B7F,EAAYJ,GAAuBiG,EAAkB7H,EAAU,CAC3D8B,gBAAiB+E,IAErBpB,GAAqBoC,EAAkB7F,EAAW2F,IAGtDb,EAAUrgB,QAAQ,SAACshB,GACf,IAAMC,EAAmBpG,GAAuB8F,EAAWK,EAAI/H,UACzDmG,EAAO4B,EAAI5B,KAEjB,GAAIA,EAAM,CACN,IAAM8B,EA1HO,SAACjG,EAAWmE,GACjC,IAAK,IAAIrtB,EAAI,EAAGoN,EAAMigB,EAAKrnB,OAAQhG,EAAIoN,EAAKpN,IAAK,CAC7C,IAAM0mB,EAAQ2G,EAAKrtB,GACnBkpB,EAAY6C,GAA8B7C,EAAWxC,GAEzD,OAAOwC,EAqHuBkG,CAAiBF,EAAkB7B,EAAKgC,WAC9DJ,EAAIN,OAAO5B,kBAAkBoC,EAAeN,QAE5ClC,GAAqBiC,EAAWM,EAAkBL,EAAY,CAC1D/B,cAAemC,EAAInC,cACnBD,mBAAoBkB,GAA+BgB,iQCgHpDO,cA3jBX,SAAAA,iGAAwBC,CAAAlrB,KAAAirB,GACpB,IAAIE,SAEJnrB,KAAKipB,QAAU,KACfjpB,KAAKyiB,YAAc,GACnBziB,KAAKijB,oBAAsB,GAC3BjjB,KAAK2oB,UAAY,GANG,QAAAzf,EAAA3D,UAAA5D,OAARwmB,EAAQ/e,MAAAF,GAAAG,EAAA,EAAAA,EAAAH,EAAAG,IAAR8e,EAAQ9e,GAAA9D,UAAA8D,GAQE,IAAlB8e,EAAOxmB,SAAkBwpB,EAAShD,EAAO,cAAe8C,GAExDjrB,KAAKyV,eAAiB0V,EAAO1V,eAC7BzV,KAAK+Q,YAAcoa,EAAOpa,YAC1B/Q,KAAKynB,YAAc0D,EAAO1D,YAC1BznB,KAAKipB,QAAUkC,EACfnrB,KAAKwnB,mBAAqBxnB,KAAKipB,QAAQzB,mBACvCxnB,KAAKorB,gBAAkBtgB,IACvB9K,KAAK4Z,wBAAwB4K,0BAE7BmC,GAAUA,cAAC3mB,MAAX6I,OAAoBsf,IACpBnoB,KAAKorB,gBAAkBprB,KAAKwnB,mBAAmBtrB,KAC/C8D,KAAK4Z,wBAAwB4K,wBAC7BxkB,KAAKqrB,sBAAwB,CACzBvB,eAAgB,GAChBwB,iBAAkB,oDA0B1B,OAAOtrB,KAAKwQ,gBAAgBrH,OAAOf,IAAI,SAAAnM,GAAA,OAAKA,EAAEkQ,6CAY9C,OAAOnM,KAAKorB,wDAIZ,OAAOprB,KAAKurB,4DAMZ,OAFAvrB,KAAKurB,YAAc9J,GAAa,CAACzhB,KAAK+Q,YAAa/Q,KAAKyV,gBACnDzV,KAAKukB,uBAAwBvkB,KAAKorB,iBAChCprB,oDAIP,OAAOA,KAAKwnB,gDAiCVgE,EAAUrb,GACZ,OAAOH,EAAahQ,KAAMwrB,EAAUrb,uCAuB3Bqb,GACT,OAAOxb,EAAahQ,KAAMwrB,EAAU3R,GAAkB7Z,KAAMwrB,IAAW,iCAqBpEC,GACH,OAAOzR,GAAMha,KAAMyrB,sCAoBXC,GACR,OAAOtW,EAAWpV,KAAM0rB,kCAkDpBtI,EAAU7V,GACd,IAAMoe,EAAY,CACd7uB,KAAM+B,EAAcC,OACpB6mB,WAAW,GAITG,EAAc,CAAEH,WAFtBpY,EAASlR,OAAOmR,OAAO,GAAIme,EAAWpe,IAEEoY,WACpCiG,SAEAre,EAAOzQ,OAAS+B,EAAcG,IAa9B4sB,EAAM,CAZWhG,GACb5lB,KACAojB,EACA,CAAEtmB,KAAM+B,EAAcC,QACtBgnB,GAEaF,GACb5lB,KACAojB,EACA,CAAEtmB,KAAM+B,EAAcE,SACtB+mB,IAIJ8F,EAAMhG,GACF5lB,KACAojB,EACA7V,EACAuY,GAIR,OAAO8F,oCAsBP,OAAQ5rB,KAAK+Q,YAAYpP,SAAW3B,KAAKyV,eAAe9T,uCAUnC,IAAlBgkB,IAAkBpgB,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,KAAAA,UAAA,GACf8e,EAAW,IAAIrkB,KAAK6rB,YAAY7rB,MAMtC,OALI2lB,EACAtB,EAASyH,UAAU9rB,MAEnBqkB,EAASyH,UAAU,MAEhBzH,kCA8CF4B,EAAW1Y,GAChB,IAAMoe,EAAY,CACd7uB,KAAM+B,EAAcC,OACpB6mB,WAAW,GAEfpY,EAASlR,OAAOmR,OAAO,GAAIme,EAAWpe,GACtC,IAAMwe,EAAc/rB,KAAKglB,kBACnBkB,EAAY7pB,OAAO4J,KAAK8lB,GACtBjvB,EAASyQ,EAATzQ,KAEJkvB,EAAsB/F,EAAUvS,OAAO,SAACC,EAAK5H,GAM7C,MAL+B,WAA3BA,EAAM8f,YAAY3vB,KAClByX,EAAItN,KAAJqB,MAAAiM,wHAAAsY,CAAY/F,EAAUlQ,OAAO,SAAAvB,GAAA,OAA0C,IAA7BA,EAAUyX,OAAOngB,OACpDA,KAASggB,GAChBpY,EAAItN,KAAK0F,GAEN4H,GACR,IAEHqY,EAAsB5iB,MAAMI,KAAK,IAAIsR,IAAIkR,IAAsB5jB,IAAI,SAAA2D,GAAA,OAASA,EAAM4Q,SAClF,IAAI5E,SAEAjb,IAAS+B,EAAcG,IASvB+Y,EAAY,CARUiO,GAAiBhmB,KAAMgsB,EAAqB,CAC9DlvB,KAAM+B,EAAcC,OACpB6mB,UAAWpY,EAAOoY,WACnBO,GACkBF,GAAiBhmB,KAAMgsB,EAAqB,CAC7DlvB,KAAM+B,EAAcE,QACpB4mB,UAAWpY,EAAOoY,WACnBO,IAIHnO,EADsBiO,GAAiBhmB,KAAMgsB,EAAqBze,EAAQ2Y,GAI9E,OAAOnO,4CAIP,OAAO/X,KAAKmsB,6DAWZ,OAPAnsB,KAAKmsB,aAAensB,KAAKurB,YAAYpiB,OAAOuK,OAAO,SAACC,EAAKyY,EAAUzwB,GAK/D,OAJAgY,EAAIyY,EAASlwB,QAAU,CACnBgG,MAAOvG,EACPypB,IAAKgH,EAASjgB,UAEXwH,GACR,IACI3T,uCAWPA,KAAKipB,SAAWjpB,KAAKipB,QAAQoD,YAAYrsB,MACzCA,KAAKipB,QAAU,KACfjpB,KAAK2oB,UAAUrf,QAAQ,SAACsf,GACpBA,EAAMK,QAAU,OAEpBjpB,KAAK2oB,UAAY,uCA6BRC,GACT,IAAIhV,EAAM5T,KAAK2oB,UAAU2D,UAAU,SAAAC,GAAA,OAAWA,IAAY3D,KACjD,IAAThV,GAAa5T,KAAK2oB,UAAUlhB,OAAOmM,EAAK,qCAQjC4Y,GACPxsB,KAAKipB,SAAWjpB,KAAKipB,QAAQoD,YAAYrsB,MACzCA,KAAKipB,QAAUuD,EACfA,GAAUA,EAAO7D,UAAUtiB,KAAKrG,0CA4BhC,OAAOA,KAAKipB,8CA6BZ,OAAOjpB,KAAK2oB,mDA4BZ,OAAO3oB,KAAKyiB,6DA4BZ,OAAOziB,KAAKijB,2rBCwGLtlB,eAlnBX,SAAAA,IAAsB,IAAAya,+FAAAqU,CAAAzsB,KAAArC,GAAA,QAAAuL,EAAA3D,UAAA5D,OAANwF,EAAMiC,MAAAF,GAAAG,EAAA,EAAAA,EAAAH,EAAAG,IAANlC,EAAMkC,GAAA9D,UAAA8D,GAAA,IAAAgO,mKAAAqV,CAAA1sB,MAAAoY,EAAAza,EAAAyd,WAAA/e,OAAAgf,eAAA1d,IAAA7B,KAAA4L,MAAA0Q,EAAA,CAAApY,MAAA6I,OACT1B,KADS,OAGlBkQ,EAAKsV,eAAiB,GAHJtV,qUArCF4T,wCAuGXnkB,GAQLA,EAAUzK,OAAOmR,OAAO,GAPL,CACfof,MAAO,MACPvqB,UAAW,KACXwqB,SAAS,EACTC,cAAc,EACd3a,KAAM,IAE8BrL,GACxC,IAAMqC,EAASnJ,KAAKukB,uBAAuBpb,OAErC4jB,EAAgBlZ,EAAY/X,KAC9BkE,KACAA,KAAKukB,uBAAuBpb,OAC5BnJ,KAAK+Q,YACLjK,EAAQgmB,aAAe3jB,EAAOf,IAAI,SAAAnM,GAAA,OAAKA,EAAEC,SAAQ+G,OAASjD,KAAKyV,eAC/D3O,EAAQqL,KACR,CACI8B,WAA8B,WAAlBnN,EAAQ8lB,MACpB5Y,SAAUlN,EAAQ+lB,UAI1B,IAAK/lB,EAAQzE,UACT,OAAO0qB,EAxBG,IA2BN1qB,EAAcyE,EAAdzE,UACAkJ,EAAuBwhB,EAAvBxhB,KAAMY,EAAiB4gB,EAAjB5gB,OAAQgI,EAAS4Y,EAAT5Y,KAChB6Y,EAAa7gB,EAAO/D,IAAK,SAAA/E,GAAA,OAAKA,EAAEnH,OAEhC+wB,EADgB5wB,OAAO4J,KAAK5D,GACAqR,OAAO,SAACC,EAAKvF,GAC3C,IAAMwF,EAAMoZ,EAAW5mB,QAAQgI,GAI/B,OAHa,IAATwF,GACAD,EAAItN,KAAK,CAACuN,EAAKvR,EAAU+L,KAEtBuF,GACR,IAgCH,MA9BsB,WAAlB7M,EAAQ8lB,MACRK,EAAY3jB,QAAQ,SAAC4jB,GACjB,IAAMC,EAAOD,EAAK,GACZE,EAAQF,EAAK,GAEnB3hB,EAAK4hB,GAAM7jB,QAAQ,SAAC2J,EAAOoa,GACvB9hB,EAAK4hB,GAAME,GAAYD,EAAMtxB,UACzBoE,EACA+S,EACAkB,EAAKkZ,GACLlhB,EAAOghB,QAKnB5hB,EAAKjC,QAAQ,SAAC2J,EAAOoa,GACjBJ,EAAY3jB,QAAQ,SAAC4jB,GACjB,IAAMC,EAAOD,EAAK,GACZE,EAAQF,EAAK,GAEnBja,EAAMka,GAAQC,EAAMtxB,UAChBoE,EACA+S,EAAMka,GACNhZ,EAAKkZ,GACLlhB,EAAOghB,QAMhBJ,kCA2BFO,GAAwD,IAA7CtV,EAA6CzS,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAAlC,GAAIgI,EAA8BhI,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAArB,CAAEogB,WAAW,GAC/CyC,KAAmBkF,EAAUrqB,OAC/BklB,EAAS,CAACnoB,KAAMstB,EAAWtV,GACzBmB,EAAerB,gBAAWqQ,GAgBhC,OAdAhG,GACIniB,KACAmZ,EACAxK,EAAeG,QACf,CAAEwe,YAAWlF,gBAAe3P,eAAgBZ,GAAaY,kBACzDT,GAGAzK,EAAOoY,UACPxM,EAAa2S,UAAU9rB,MAEvBmZ,EAAa2S,UAAU,MAGpB3S,+BAsDLpF,GAA+C,IAA/BxG,EAA+BhI,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAAtB,CAAEogB,WAAW,GAClC4H,EAAUvtB,KAAK8kB,QAAQ,CACzB8H,MAAO,MACPza,KAAM4B,IAGJyZ,EAAe,CADND,EAAQphB,OAAO/D,IAAI,SAAA2D,GAAA,OAASA,EAAM7P,QACnB2M,OAAO0kB,EAAQhiB,MAEvCkiB,EAAW,IAAIztB,KAAK6rB,YAAY2B,EAAcD,EAAQphB,OAAQ,CAAE2R,WAAY,WAgBlF,OAdAqE,GACIniB,KACAytB,EACA9e,EAAeO,KACf3B,EACAwG,GAGAxG,EAAOoY,UACP8H,EAAS3B,UAAU9rB,MAEnBytB,EAAS3B,UAAU,MAGhB2B,oCAwBArhB,EAAMtF,GACbsF,EAAOA,GAAQpM,KAAKynB,YACpB3gB,EAAUzK,OAAOmR,OAAO,GAAI,CAAE0T,eAAgB,KAAOpa,GAErD,IAAMqC,EAASnJ,KAAKwQ,gBAAgBrH,OAC9BukB,EAAUvkB,EAAOf,IAAI,SAAA2M,GAAA,OAAKA,EAAEmS,kBAC5ByG,EAAYD,EAAQ,GAAG/rB,OACzBisB,SACAC,SACAC,SAEJ,GAAI1hB,IAAStO,EAAWC,UAEpB,IADA6vB,EAAiB,GACZC,EAAS,EAAGA,EAASF,EAAWE,IAAU,CAC3C,IAAMlU,EAAM,GACZ,IAAKmU,EAAS,EAAGA,EAAS3kB,EAAOxH,OAAQmsB,IACrCnU,EAAIxQ,EAAO2kB,GAAQ5xB,QAAUwxB,EAAQI,GAAQD,GAEjDD,EAAevnB,KAAKsT,QAErB,GAAIvN,IAAStO,EAAWE,QAAS,CAEpC,IADA4vB,EAAiB,CAACzkB,EAAOf,IAAI,SAAA2M,GAAA,OAAKA,EAAE7Y,SAAQ+G,KAAK6D,EAAQoa,iBACpD2M,EAAS,EAAGA,EAASF,EAAWE,IAAU,CAC3C,IAAMlU,EAAM,GACZ,IAAKmU,EAAS,EAAGA,EAAS3kB,EAAOxH,OAAQmsB,IACrCnU,EAAItT,KAAKqnB,EAAQI,GAAQD,IAE7BD,EAAevnB,KAAKsT,EAAI1W,KAAK6D,EAAQoa,iBAEzC0M,EAAiBA,EAAe3qB,KAAK,UAClC,IAAImJ,IAAStO,EAAWG,QAU3B,MAAM,IAAI4S,MAAJ,aAAuBzE,EAAvB,qBARN,IADAwhB,EAAiB,CAACzkB,EAAOf,IAAI,SAAA2M,GAAA,OAAKA,EAAE7Y,UAC/B2xB,EAAS,EAAGA,EAASF,EAAWE,IAAU,CAC3C,IAAMlU,EAAM,GACZ,IAAKmU,EAAS,EAAGA,EAAS3kB,EAAOxH,OAAQmsB,IACrCnU,EAAItT,KAAKqnB,EAAQI,GAAQD,IAE7BD,EAAevnB,KAAKsT,IAM5B,OAAOiU,mCAGD7hB,GACN,IAAM0I,EAAY1I,EAAM7P,OACxB8D,KAAKyV,gBAAL,IAA2BhB,EAC3B,IAAMiN,EAAoB1hB,KAAKwnB,mBAE/B,GAAK9F,EAAkB7V,YAAYE,EAAM7P,QAElC,CACH,IAAMqN,EAAamY,EAAkBvY,OAAOmjB,UAAU,SAAAyB,GAAA,OAAaA,EAAU7xB,SAAWuY,IACxFlL,GAAc,IAAMmY,EAAkBvY,OAAOI,GAAcwC,QAH3D2V,EAAkBvY,OAAO9C,KAAK0F,GAYlC,OALA2V,EAAkB5V,iBAAmB,KACrC4V,EAAkBnV,iBAAmB,KACrCmV,EAAkBxV,eAAiB,KAEnClM,KAAK4Z,wBAAwB4K,wBACtBxkB,+CAuCQmM,EAAQ6hB,EAAYzgB,GAAQ,IAAAmK,EAAA1X,KAC3CmM,EAASka,GAAmBla,GAC5BoB,EAASlR,OAAOmR,OAAO,GAAI,CAAEmY,WAAW,EAAMsI,YAAY,GAAS1gB,GAEnE,IAAMwX,EAAe/kB,KAAKglB,kBACpBkJ,EAAUF,EAAWlZ,MAAM,EAAGkZ,EAAWrsB,OAAS,GAClDwsB,EAAaH,EAAWA,EAAWrsB,OAAS,GAElD,GAAIojB,EAAa5Y,EAAOjQ,QAAUqR,EAAO0gB,WACrC,MAAM,IAAIpd,MAAS1E,EAAOjQ,KAApB,sCAGV,IAAMkyB,EAAkBF,EAAQ9lB,IAAI,SAAC2D,GACjC,IAAMsiB,EAAYtJ,EAAahZ,GAC/B,IAAKsiB,EAED,MAAM,IAAIxd,MAAS9E,EAAb,gCAEV,OAAOsiB,EAAUnsB,QAGfoiB,EAAQtkB,KAAKskB,MAAM/W,EAAOoY,WAE1B2I,EAAKhK,EAAM9T,gBAAgBrH,OAC3BolB,EAAiBH,EAAgBhmB,IAAI,SAAAwL,GAAA,OAAO0a,EAAG1a,KAEjD6F,EAAc,GACdC,EAAgB,kBAAMhC,EAAKjG,gBAEzB+c,EAAiB,GACvB3hB,EAAmByX,EAAMvT,YAAa,SAACpV,GACnC,IAAM8yB,EAAaF,EAAenmB,IAAI,SAAA2D,GAAA,OAASA,EAAMsF,aAAa9F,KAAK5P,KACvE6yB,EAAe7yB,GAAKwyB,sIAAcM,GAAd5lB,OAAA,CAA0BlN,EAAG+d,EAAeD,OAhCzB,IAAAiV,EAkC3BnR,GAAa,CAACiR,GAAiB,CAACriB,GAAS,CAACA,EAAOjQ,OAA1D6P,EAlCoC4iB,GAAAD,EAAA,MA6C3C,OAVApK,EAAMsK,SAAS7iB,GAEfoW,GACIniB,KACAskB,EACA3V,EAAeK,QACf,CAAEzB,OAAQpB,EAAQhD,OAAQ+kB,GAC1BC,GAGG7J,oCAWA6E,GAA2D,IAA9C5b,EAA8ChI,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAArC,GAAIspB,EAAiCtpB,UAAA,GAAjBilB,EAAiBjlB,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAAJ,GACxDupB,EAAkBvhB,EAAOuhB,gBACzBtF,EAAsBjc,EAAOkc,SAC7BsF,EAAUxhB,EAAOwhB,QACjBxE,EF7HkB,SAAClI,GAC7B,KAAOA,EAAM4G,SACT5G,EAAQA,EAAM4G,QAElB,OAAO5G,EEyHe2M,CAAiBhvB,MAC7BspB,EAAuBiB,EAAUc,sBAEjCjC,EAAa,CACfuB,aFxIuB,SAACtI,GAChC,KAAOA,EAAM4G,SAAW5G,EAAMI,YAAYwM,KAAK,SAAAhzB,GAAA,OAAKA,EAAE0mB,KAAOhU,EAAeG,WACxEuT,EAAQA,EAAM4G,QAElB,OAAO5G,EEkIsB6M,CAAoBlvB,MAGzCqiB,MAAOkI,GAgBX,OAbAsE,GFd0B,SAACvF,GAA6C,IAAvB/b,EAAuBhI,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAAd,GAAI8c,EAAU9c,UAAA,GACxE4pB,SACEL,EAAkBvhB,EAAOuhB,gBACzBjM,EAAWtV,EAAOsV,SAClB3lB,EAASqQ,EAAO0c,OAAhB,IAA0B1c,EAAOkc,SAGnC0F,EADAL,EACkBxF,EAAqBQ,eAErBR,EAAqBgC,iBAG1B,OAAbzI,SACOsM,EAAgBjyB,GAEvBiyB,EAAgBjyB,GAAO,CACnBmlB,QACA9U,UEHc6hB,CAAmB9F,EAAsB/b,EAAQvN,MACnEkpB,GAAyBC,EAAaC,EAAY,CAAEE,uBAAsBG,SAAUD,GAChFntB,OAAOmR,OAAO,CACVuhB,WACDxhB,IAEHuhB,GFxC6B,SAACxF,EAAsBF,EAAYC,GACxE,IAAMiC,EAAmBhC,EAAqBgC,iBAE9C,IAAK,IAAMrB,KAAUqB,EAAkB,CACnC,IACMlB,EADYkB,EAAiBrB,GACN1c,OACvBic,EAAsBH,EAAe9b,OAAOkc,SAC5C4F,GAAwBhG,EAAemB,WAAW6E,uBACpDhG,EAAemB,WAAW6E,sBAAsBjF,EAAYf,EAAe9b,QAC/E,GAAI6c,EAAWX,WAAaD,GAAuB6F,EAAuB,CACtE,IAAMC,EAAgBlF,EAAWvH,SACjCqG,GAAyBoG,EAAelG,EAAY,CAChDE,uBACAC,mBAAmB,EACnBE,SAAUD,GACXY,KE0BHmF,CAA0BjG,EAAsBF,EAAY,CACxD7b,SACAid,eAIDxqB,gCAUPwvB,EAAWziB,GACX,OAAQyiB,GACR,IrCpiBmB,cqCqiBfxvB,KAAK2sB,eAAetmB,KAAK0G,GAG7B,OAAO/M,yCASEwvB,GACT,OAAQA,GACR,IrCnjBmB,cqCojBfxvB,KAAK2sB,eAAiB,GAI1B,OAAO3sB,+CAUQ6kB,EAAWkK,GAAS,IAAAjT,EAAA9b,KACfA,KAAK2sB,eACXrjB,QAAQ,SAAAoc,GAAA,OAAMA,EAAG5pB,KAAKggB,EAAM+I,EAAWkK,iCA8CpDU,EAAkBliB,GACnB,IAAMwX,EAAe/kB,KAAKglB,kBAE1B,IAAKD,EAAa0K,GACd,MAAM,IAAI5e,MAAJ,SAAmB4e,EAAnB,kBAGV,IAAMC,EAAeniB,EAAOrR,MAAWuzB,EAAlB,UAErB,GAAI1K,EAAa2K,GACb,MAAM,IAAI7e,MAAJ,SAAmB6e,EAAnB,mBAGV,IAb2BC,EtCvkB5B,SAAgCC,EAAc9iB,EAAYS,GAAQ,IAC/DY,EAA4CZ,EAA5CY,QAAS0hB,EAAmCtiB,EAAnCsiB,UAAW3hB,EAAwBX,EAAxBW,QAASf,EAAeI,EAAfJ,MAAOC,EAAQG,EAARH,IAD2B0iB,EAEhDF,EAAa7U,SAFmCgV,EAAAC,EAAAF,EAAA,GAE9DG,EAF8DF,EAAA,GAExDG,EAFwDH,EAAA,GAIhE5hB,IACDhB,EAAmB,IAAVA,KAAiBA,GAASA,EAAQ8iB,GAASA,EAAO9iB,EAC3DC,EAAe,IAARA,KAAeA,GAAOA,EAAM8iB,GAAUA,EAAO,EAAK9iB,EAErDyiB,IACA3hB,EAAUlK,KAAKmsB,KAAKnsB,KAAKosB,IAAIhjB,EAAMD,GAAS0iB,IAGhD1hB,EAAUF,EAAgBC,EAASf,EAAOC,IAG1Ce,EAAQ,GAAK8hB,GACb9hB,EAAQvG,QAAQqoB,GAEhB9hB,EAAQA,EAAQxM,OAAS,IAAMuuB,GAC/B/hB,EAAQ9H,KAAK6pB,EAAO,GAIxB,IADA,IAAM5hB,EAAe,GACZ3S,EAAI,EAAGA,EAAIwS,EAAQxM,OAAS,EAAGhG,IACpC2S,EAAajI,KAAK,CACd8G,MAAOgB,EAAQxS,GACfyR,IAAKe,EAAQxS,EAAI,KAIzB,IAAM00B,EAAa,GAYnB,OAXAxjB,EAAmBC,EAAY,SAACnR,GAC5B,IAAMsX,EAAQ2c,EAAave,aAAa9F,KAAK5P,GAC7C,GAAIsX,aAAiB5F,EACjBgjB,EAAWhqB,KAAK4M,OADpB,CAKA,IAAMzR,EAAQ6M,EAAgBC,EAAc2E,GAC5Cod,EAAWhqB,KAAQ7E,EAAM2L,MAAzB,IAAkC3L,EAAM4L,QAGrC,CAAEijB,aAAYpU,KAAM9N,GsC2iBMmiB,CADRtwB,KAAKwQ,gBAAgB3E,YAAY4jB,GACWzvB,KAAK+Q,YAAaxD,GAA3E8iB,EAdmBV,EAcnBU,WAAYpU,EAdO0T,EAcP1T,KAEdsU,EAAWhT,GAAa,CAAC8S,GAAa,CACxC,CACIn0B,KAAMwzB,EACNtjB,KAAM1N,EAAUE,UAChB2b,QAASpc,EAAiBI,OAC1B0d,SACA,CAACyT,IAAe,GAElBpL,EAAQtkB,KAAKskB,MAAM/W,EAAOoY,WAWhC,OAVArB,EAAMsK,SAAS2B,GAEfpO,GACIniB,KACAskB,EACA3V,EAAeM,IACd,CAAEwgB,mBAAkBliB,SAAQmiB,gBAC5B,MAGEpL,yCA8BP,OAAO,IAAI3mB,EAHEqC,KAAKwwB,UAAU1yB,EAAWC,WACxBiC,KAAKywB,kEA9kBWljB,GAC/B,OAAOF,EAAkBK,iBAAiBH,oCAf1C,OAAOsK,YCvFA5B,GAAoDM,GAApDN,IAAKG,GAA+CG,GAA/CH,IAAKO,GAA0CJ,GAA1CI,IAAKE,GAAqCN,GAArCM,IAAK6Z,GAAgCna,GAAhCma,MAAOC,GAAyBpa,GAAzBoa,KAAMC,GAAmBra,GAAnBqa,MAAYC,GAAOta,GAAZua,ICsBjDC,GAAY,CACdC,QtC8LmB,mBAAAC,EAAA1rB,UAAA5D,OAAIuvB,EAAJ9nB,MAAA6nB,GAAAE,EAAA,EAAAA,EAAAF,EAAAE,IAAID,EAAJC,GAAA5rB,UAAA4rB,GAAA,OACnB,SAACxb,GAAqC,IAAjCpI,EAAiChI,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAAxB,CAAEogB,WAAW,GACnByL,EAAYzb,EACZ0b,SACE1J,EAAc,GA8BpB,OA5BAuJ,EAAW5nB,QAAQ,SAACgZ,GAChB8O,EAAY9O,EAAU8O,GACtBzJ,EAAYthB,KAAZqB,MAAAigB,wHAAA2J,CAAoBF,EAAU3O,cACzB4O,IACDA,EAAaD,KAIjBC,GAAcA,IAAeD,GAC7BC,EAAWE,UAIfH,EAAUnO,oBAAsB,GAChCd,GACIxM,EACAyb,EACAziB,EAAeI,QACf,KACA4Y,GAGApa,EAAOoY,UACPyL,EAAUtF,UAAUnW,GAEpByb,EAAUtF,UAAU,MAGjBsF,IsC/NXI,ItC4He,mBAAAC,EAAAlsB,UAAA5D,OAAIwF,EAAJiC,MAAAqoB,GAAAC,EAAA,EAAAA,EAAAD,EAAAC,IAAIvqB,EAAJuqB,GAAAnsB,UAAAmsB,GAAA,OAAa,SAAA/b,GAAA,OAAMA,EAAG6b,IAAH9pB,MAAAiO,EAAUxO,KsC3H5Cse,OtCgCkB,mBAAAvc,EAAA3D,UAAA5D,OAAIwF,EAAJiC,MAAAF,GAAAG,EAAA,EAAAA,EAAAH,EAAAG,IAAIlC,EAAJkC,GAAA9D,UAAA8D,GAAA,OAAa,SAAAsM,GAAA,OAAMA,EAAG8P,OAAH/d,MAAAiO,EAAaxO,KsC/BlDwqB,QtC+DmB,mBAAAC,EAAArsB,UAAA5D,OAAIwF,EAAJiC,MAAAwoB,GAAAC,EAAA,EAAAA,EAAAD,EAAAC,IAAI1qB,EAAJ0qB,GAAAtsB,UAAAssB,GAAA,OAAa,SAAAlc,GAAA,OAAMA,EAAGgc,QAAHjqB,MAAAiO,EAAcxO,KsC9DpD2Q,QtCsJmB,mBAAAga,EAAAvsB,UAAA5D,OAAIwF,EAAJiC,MAAA0oB,GAAAC,EAAA,EAAAA,EAAAD,EAAAC,IAAI5qB,EAAJ4qB,GAAAxsB,UAAAwsB,GAAA,OAAa,SAAApc,GAAA,OAAMA,EAAGmC,QAAHpQ,MAAAiO,EAAcxO,KsCrJpD6qB,kBCvB6B,mBAAA9oB,EAAA3D,UAAA5D,OAAIwF,EAAJiC,MAAAF,GAAAG,EAAA,EAAAA,EAAAH,EAAAG,IAAIlC,EAAJkC,GAAA9D,UAAA8D,GAAA,OAAa,SAAAsM,GAAA,OAAMA,EAAGqc,kBAAHtqB,MAAAiO,EAAwBxO,KDwBxEgL,KCfgB,mBAAAyf,EAAArsB,UAAA5D,OAAIwF,EAAJiC,MAAAwoB,GAAAC,EAAA,EAAAA,EAAAD,EAAAC,IAAI1qB,EAAJ0qB,GAAAtsB,UAAAssB,GAAA,OAAa,SAAAlc,GAAA,OAAMA,EAAGxD,KAAHzK,MAAAiO,EAAWxO,KDgB9C6I,eACAoF,aACA6c,YE/BG,SAAsB/X,EAAYC,GACrC,OAAOnK,EAAakK,EAAYC,EAAYN,GAAkBK,EAAYC,IAAa,IF+BvFF,iBACAG,kBACA8X,c3BxBG,SAAwBhY,EAAYC,EAAYhK,GACnD,OAAO6J,GAAMC,GAAcC,EAAYC,EAAYhK,GAAWiK,GAAeF,EAAYC,EAAYhK,K2BwBrG6J,UAGEmY,QAAcA,QACpB91B,OAAOmR,OAAO7P,GAAW,CACrBozB,aACAqB,QACAzjB,iBACA7O,oBACAhC,aACAe,gBACAwO,oBACA8kB,YACDE,GAEY10B","file":"datamodel.js","sourcesContent":["(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine(\"DataModel\", [], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"DataModel\"] = factory();\n\telse\n\t\troot[\"DataModel\"] = factory();\n})(window, function() {\nreturn "," \t// The module cache\n \tvar installedModules = {};\n\n \t// The require function\n \tfunction __webpack_require__(moduleId) {\n\n \t\t// Check if module is in cache\n \t\tif(installedModules[moduleId]) {\n \t\t\treturn installedModules[moduleId].exports;\n \t\t}\n \t\t// Create a new module (and put it into the cache)\n \t\tvar module = installedModules[moduleId] = {\n \t\t\ti: moduleId,\n \t\t\tl: false,\n \t\t\texports: {}\n \t\t};\n\n \t\t// Execute the module function\n \t\tmodules[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\n \t\t// Flag the module as loaded\n \t\tmodule.l = true;\n\n \t\t// Return the exports of the module\n \t\treturn module.exports;\n \t}\n\n\n \t// expose the modules object (__webpack_modules__)\n \t__webpack_require__.m = modules;\n\n \t// expose the module cache\n \t__webpack_require__.c = installedModules;\n\n \t// define getter function for harmony exports\n \t__webpack_require__.d = function(exports, name, getter) {\n \t\tif(!__webpack_require__.o(exports, name)) {\n \t\t\tObject.defineProperty(exports, name, { enumerable: true, get: getter });\n \t\t}\n \t};\n\n \t// define __esModule on exports\n \t__webpack_require__.r = function(exports) {\n \t\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n \t\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n \t\t}\n \t\tObject.defineProperty(exports, '__esModule', { value: true });\n \t};\n\n \t// create a fake namespace object\n \t// mode & 1: value is a module id, require it\n \t// mode & 2: merge all properties of value into the ns\n \t// mode & 4: return value when already ns object\n \t// mode & 8|1: behave like require\n \t__webpack_require__.t = function(value, mode) {\n \t\tif(mode & 1) value = __webpack_require__(value);\n \t\tif(mode & 8) return value;\n \t\tif((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;\n \t\tvar ns = Object.create(null);\n \t\t__webpack_require__.r(ns);\n \t\tObject.defineProperty(ns, 'default', { enumerable: true, value: value });\n \t\tif(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));\n \t\treturn ns;\n \t};\n\n \t// getDefaultExport function for compatibility with non-harmony modules\n \t__webpack_require__.n = function(module) {\n \t\tvar getter = module && module.__esModule ?\n \t\t\tfunction getDefault() { return module['default']; } :\n \t\t\tfunction getModuleExports() { return module; };\n \t\t__webpack_require__.d(getter, 'a', getter);\n \t\treturn getter;\n \t};\n\n \t// Object.prototype.hasOwnProperty.call\n \t__webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };\n\n \t// __webpack_public_path__\n \t__webpack_require__.p = \"\";\n\n\n \t// Load entry module and return exports\n \treturn __webpack_require__(__webpack_require__.s = 1);\n","const DataModel = require('./export');\n\nmodule.exports = DataModel.default ? DataModel.default : DataModel;\n","/**\n * DataFormat Enum defines the format of the input data.\n * Based on the format of the data the respective adapter is loaded.\n *\n * @readonly\n * @enum {string}\n */\nconst DataFormat = {\n FLAT_JSON: 'FlatJSON',\n DSV_STR: 'DSVStr',\n DSV_ARR: 'DSVArr',\n AUTO: 'Auto'\n};\n\nexport default DataFormat;\n","/**\n * DimensionSubtype enum defines the sub types of the Dimensional Field.\n *\n * @readonly\n * @enum {string}\n */\nconst DimensionSubtype = {\n CATEGORICAL: 'categorical',\n TEMPORAL: 'temporal',\n GEO: 'geo',\n BINNED: 'binned'\n};\n\nexport default DimensionSubtype;\n","/**\n * MeasureSubtype enum defines the sub types of the Measure Field.\n *\n * @readonly\n * @enum {string}\n */\nconst MeasureSubtype = {\n CONTINUOUS: 'continuous'\n};\n\nexport default MeasureSubtype;\n","/**\n * FieldType enum defines the high level field based on which visuals are controlled.\n * Measure in a high level is numeric field and Dimension in a high level is string field.\n *\n * @readonly\n * @enum {string}\n */\nconst FieldType = {\n MEASURE: 'measure',\n DIMENSION: 'dimension'\n};\n\nexport default FieldType;\n","/**\n * Filtering mode enum defines the filering modes of DataModel.\n *\n * @readonly\n * @enum {string}\n */\nconst FilteringMode = {\n NORMAL: 'normal',\n INVERSE: 'inverse',\n ALL: 'all'\n};\n\nexport default FilteringMode;\n","/**\n * Group by function names\n *\n * @readonly\n * @enum {string}\n */\nconst GROUP_BY_FUNCTIONS = {\n SUM: 'sum',\n AVG: 'avg',\n MIN: 'min',\n MAX: 'max',\n FIRST: 'first',\n LAST: 'last',\n COUNT: 'count',\n STD: 'std'\n};\n\nexport default GROUP_BY_FUNCTIONS;\n","/**\n * Creates a JS native date object from input\n *\n * @param {string | number | Date} date Input using which date object to be created\n * @return {Date} : JS native date object\n */\nfunction convertToNativeDate (date) {\n if (date instanceof Date) {\n return date;\n }\n\n return new Date(date);\n}\n/**\n * Apply padding before a number if its less than 1o. This is used when constant digit's number to be returned\n * between 0 - 99\n *\n * @param {number} n Input to be padded\n * @return {string} Padded number\n */\nfunction pad (n) {\n return (n < 10) ? (`0${n}`) : n;\n}\n/*\n * DateFormatter utility to convert any date format to any other date format\n * DateFormatter parse a date time stamp specified by a user abiding by rules which are defined\n * by user in terms of token. It creates JS native date object from the user specified format.\n * That native date can also be displayed\n * in any specified format.\n * This utility class only takes care of format conversion only\n */\n\n/*\n * Escapes all the special character that are used in regular expression.\n * Like\n * RegExp.escape('sgfd-$') // Output: sgfd\\-\\$\n *\n * @param text {String} : text which is to be escaped\n */\nRegExp.escape = function (text) {\n return text.replace(/[-[\\]{}()*+?.,\\\\^$|#\\s]/g, '\\\\$&');\n};\n\n/**\n * DateTimeFormatter class to convert any user format of date time stamp to any other format\n * of date time stamp.\n *\n * @param {string} format Format of the date given. For the above date,\n * 'year: %Y, month: %b, day: %d'.\n * @class\n */\n/* istanbul ignore next */ function DateTimeFormatter (format) {\n this.format = format;\n this.dtParams = undefined;\n this.nativeDate = undefined;\n}\n\n// The identifier of the tokens\nDateTimeFormatter.TOKEN_PREFIX = '%';\n\n// JS native Date constructor takes the date params (year, month, etc) in a certail sequence.\n// This defines the sequence of the date parameters in the constructor.\nDateTimeFormatter.DATETIME_PARAM_SEQUENCE = {\n YEAR: 0,\n MONTH: 1,\n DAY: 2,\n HOUR: 3,\n MINUTE: 4,\n SECOND: 5,\n MILLISECOND: 6\n};\n\n/*\n * This is a default number parsing utility. It tries to parse a number in integer, if parsing is unsuccessful, it\n * gives back a default value.\n *\n * @param: defVal {Number} : Default no if the parsing to integer is not successful\n * @return {Function} : An closure function which is to be called by passing an the value which needs to be parsed.\n */\nDateTimeFormatter.defaultNumberParser = function (defVal) {\n return function (val) {\n let parsedVal;\n if (isFinite(parsedVal = parseInt(val, 10))) {\n return parsedVal;\n }\n\n return defVal;\n };\n};\n\n/*\n * This is a default number range utility. It tries to find an element in the range. If not found it returns a\n * default no as an index.\n *\n * @param: range {Array} : The list which is to be serached\n * @param: defVal {Number} : Default no if the serach and find does not return anything\n * @return {Function} : An closure function which is to be called by passing an the value which needs to be found\n */\nDateTimeFormatter.defaultRangeParser = function (range, defVal) {\n return (val) => {\n let i;\n let l;\n\n if (!val) { return defVal; }\n\n const nVal = val.toLowerCase();\n\n for (i = 0, l = range.length; i < l; i++) {\n if (range[i].toLowerCase() === nVal) {\n return i;\n }\n }\n\n if (i === undefined) {\n return defVal;\n }\n return null;\n };\n};\n\n/*\n * Defines the tokens which are supporter by the dateformatter. Using this definitation a value gets extracted from\n * the user specifed date string. This also formats the value for display purpose from native JS date.\n * The definition of each token contains the following named properties\n * {\n * %token_name% : {\n * name: name of the token, this is used in reverse lookup,\n * extract: a function that returns the regular expression to extract that piece of information. All the\n * regex should be gouped by using ()\n * parser: a function which receives value extracted by the above regex and parse it to get the date params\n * formatter: a formatter function that takes milliseconds or JS Date object and format the param\n * represented by the token only.\n * }\n * }\n *\n * @return {Object} : Definition of the all the supported tokens.\n */\nDateTimeFormatter.getTokenDefinitions = function () {\n const daysDef = {\n short: [\n 'Sun',\n 'Mon',\n 'Tue',\n 'Wed',\n 'Thu',\n 'Fri',\n 'Sat'\n ],\n long: [\n 'Sunday',\n 'Monday',\n 'Tuesday',\n 'Wednesday',\n 'Thursday',\n 'Friday',\n 'Saturday'\n ]\n };\n const monthsDef = {\n short: [\n 'Jan',\n 'Feb',\n 'Mar',\n 'Apr',\n 'May',\n 'Jun',\n 'Jul',\n 'Aug',\n 'Sep',\n 'Oct',\n 'Nov',\n 'Dec'\n ],\n long: [\n 'January',\n 'February',\n 'March',\n 'April',\n 'May',\n 'June',\n 'July',\n 'August',\n 'September',\n 'October',\n 'November',\n 'December'\n ]\n };\n\n const definitions = {\n H: {\n // 24 hours format\n name: 'H',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n\n return d.getHours().toString();\n }\n },\n l: {\n // 12 hours format\n name: 'l',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const hours = d.getHours() % 12;\n\n return (hours === 0 ? 12 : hours).toString();\n }\n },\n p: {\n // AM or PM\n name: 'p',\n index: 3,\n extract () { return '(AM|PM)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'AM' : 'PM');\n }\n },\n P: {\n // am or pm\n name: 'P',\n index: 3,\n extract () { return '(am|pm)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'am' : 'pm');\n }\n },\n M: {\n // Two digit minutes 00 - 59\n name: 'M',\n index: 4,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const mins = d.getMinutes();\n\n return pad(mins);\n }\n },\n S: {\n // Two digit seconds 00 - 59\n name: 'S',\n index: 5,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const seconds = d.getSeconds();\n\n return pad(seconds);\n }\n },\n K: {\n // Milliseconds\n name: 'K',\n index: 6,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const ms = d.getMilliseconds();\n\n return ms.toString();\n }\n },\n a: {\n // Short name of day, like Mon\n name: 'a',\n index: 2,\n extract () { return `(${daysDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.short[day]).toString();\n }\n },\n A: {\n // Long name of day, like Monday\n name: 'A',\n index: 2,\n extract () { return `(${daysDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.long[day]).toString();\n }\n },\n e: {\n // 8 of March, 11 of November\n name: 'e',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return day.toString();\n }\n },\n d: {\n // 08 of March, 11 of November\n name: 'd',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return pad(day);\n }\n },\n b: {\n // Short month, like Jan\n name: 'b',\n index: 1,\n extract () { return `(${monthsDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.short[month]).toString();\n }\n },\n B: {\n // Long month, like January\n name: 'B',\n index: 1,\n extract () { return `(${monthsDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.long[month]).toString();\n }\n },\n m: {\n // Two digit month of year like 01 for January\n name: 'm',\n index: 1,\n extract () { return '(\\\\d+)'; },\n parser (val) { return DateTimeFormatter.defaultNumberParser()(val) - 1; },\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return pad(month + 1);\n }\n },\n y: {\n // Short year like 90 for 1990\n name: 'y',\n index: 0,\n extract () { return '(\\\\d{2})'; },\n parser (val) {\n let result;\n if (val) {\n const l = val.length;\n val = val.substring(l - 2, l);\n }\n let parsedVal = DateTimeFormatter.defaultNumberParser()(val);\n let presentDate = new Date();\n let presentYear = Math.trunc((presentDate.getFullYear()) / 100);\n\n result = `${presentYear}${parsedVal}`;\n\n if (convertToNativeDate(result).getFullYear() > presentDate.getFullYear()) {\n result = `${presentYear - 1}${parsedVal}`;\n }\n return convertToNativeDate(result).getFullYear();\n },\n formatter (val) {\n const d = convertToNativeDate(val);\n let year = d.getFullYear().toString();\n let l;\n\n if (year) {\n l = year.length;\n year = year.substring(l - 2, l);\n }\n\n return year;\n }\n },\n Y: {\n // Long year like 1990\n name: 'Y',\n index: 0,\n extract () { return '(\\\\d{4})'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const year = d.getFullYear().toString();\n\n return year;\n }\n }\n };\n\n return definitions;\n};\n\n/*\n * The tokens which works internally is not user friendly in terms of memorizing the names. This gives a formal\n * definition to the informal notations.\n *\n * @return {Object} : Formal definition of the tokens\n */\nDateTimeFormatter.getTokenFormalNames = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n\n return {\n HOUR: definitions.H,\n HOUR_12: definitions.l,\n AMPM_UPPERCASE: definitions.p,\n AMPM_LOWERCASE: definitions.P,\n MINUTE: definitions.M,\n SECOND: definitions.S,\n SHORT_DAY: definitions.a,\n LONG_DAY: definitions.A,\n DAY_OF_MONTH: definitions.e,\n DAY_OF_MONTH_CONSTANT_WIDTH: definitions.d,\n SHORT_MONTH: definitions.b,\n LONG_MONTH: definitions.B,\n MONTH_OF_YEAR: definitions.m,\n SHORT_YEAR: definitions.y,\n LONG_YEAR: definitions.Y\n };\n};\n\n/*\n * This defines the rules and declares dependencies that resolves a date parameter (year, month etc) from\n * the date time parameter array.\n *\n * @return {Object} : An object that contains dependencies and a resolver function. The dependencies values are fed\n * to the resolver function in that particular sequence only.\n */\nDateTimeFormatter.tokenResolver = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const defaultResolver = (...args) => { // eslint-disable-line require-jsdoc\n let i = 0;\n let arg;\n let targetParam;\n const l = args.length;\n\n for (; i < l; i++) {\n arg = args[i];\n if (args[i]) {\n targetParam = arg;\n }\n }\n\n if (!targetParam) { return null; }\n\n return targetParam[0].parser(targetParam[1]);\n };\n\n return {\n YEAR: [definitions.y, definitions.Y,\n defaultResolver\n ],\n MONTH: [definitions.b, definitions.B, definitions.m,\n defaultResolver\n ],\n DAY: [definitions.a, definitions.A, definitions.e, definitions.d,\n defaultResolver\n ],\n HOUR: [definitions.H, definitions.l, definitions.p, definitions.P,\n function (hourFormat24, hourFormat12, ampmLower, ampmUpper) {\n let targetParam;\n let amOrpm;\n let isPM;\n let val;\n\n if (hourFormat12 && (amOrpm = (ampmLower || ampmUpper))) {\n if (amOrpm[0].parser(amOrpm[1]) === 'pm') {\n isPM = true;\n }\n\n targetParam = hourFormat12;\n } else if (hourFormat12) {\n targetParam = hourFormat12;\n } else {\n targetParam = hourFormat24;\n }\n\n if (!targetParam) { return null; }\n\n val = targetParam[0].parser(targetParam[1]);\n if (isPM) {\n val += 12;\n }\n return val;\n }\n ],\n MINUTE: [definitions.M,\n defaultResolver\n ],\n SECOND: [definitions.S,\n defaultResolver\n ]\n };\n};\n\n/*\n * Finds token from the format rule specified by a user.\n * @param format {String} : The format of the input date specified by the user\n * @return {Array} : An array of objects which contains the available token and their occurence index in the format\n */\nDateTimeFormatter.findTokens = function (format) {\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenLiterals = Object.keys(definitions);\n const occurrence = [];\n let i;\n let forwardChar;\n\n while ((i = format.indexOf(tokenPrefix, i + 1)) >= 0) {\n forwardChar = format[i + 1];\n if (tokenLiterals.indexOf(forwardChar) === -1) { continue; }\n\n occurrence.push({\n index: i,\n token: forwardChar\n });\n }\n\n return occurrence;\n};\n\n/*\n * Format any JS date to a specified date given by user.\n *\n * @param date {Number | Date} : The date object which is to be formatted\n * @param format {String} : The format using which the date will be formatted for display\n */\nDateTimeFormatter.formatAs = function (date, format) {\n const nDate = convertToNativeDate(date);\n const occurrence = DateTimeFormatter.findTokens(format);\n const definitions = DateTimeFormatter.getTokenDefinitions();\n let formattedStr = String(format);\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n let token;\n let formattedVal;\n let i;\n let l;\n\n for (i = 0, l = occurrence.length; i < l; i++) {\n token = occurrence[i].token;\n formattedVal = definitions[token].formatter(nDate);\n formattedStr = formattedStr.replace(new RegExp(tokenPrefix + token, 'g'), formattedVal);\n }\n\n return formattedStr;\n};\n\n/*\n * Parses the user specified date string to extract the date time params.\n *\n * @return {Array} : Value of date time params in an array [year, month, day, hour, minutes, seconds, milli]\n */\nDateTimeFormatter.prototype.parse = function (dateTimeStamp, options) {\n const tokenResolver = DateTimeFormatter.tokenResolver();\n const dtParams = this.extractTokenValue(dateTimeStamp);\n const dtParamSeq = DateTimeFormatter.DATETIME_PARAM_SEQUENCE;\n const noBreak = options && options.noBreak;\n const dtParamArr = [];\n const args = [];\n let resolverKey;\n let resolverParams;\n let resolverFn;\n let val;\n let i;\n let param;\n let resolvedVal;\n let l;\n let result = [];\n\n for (resolverKey in tokenResolver) {\n if (!{}.hasOwnProperty.call(tokenResolver, resolverKey)) { continue; }\n\n args.length = 0;\n resolverParams = tokenResolver[resolverKey];\n resolverFn = resolverParams.splice(resolverParams.length - 1, 1)[0];\n\n for (i = 0, l = resolverParams.length; i < l; i++) {\n param = resolverParams[i];\n val = dtParams[param.name];\n\n if (val === undefined) {\n args.push(null);\n } else {\n args.push([param, val]);\n }\n }\n\n resolvedVal = resolverFn.apply(this, args);\n\n if ((resolvedVal === undefined || resolvedVal === null) && !noBreak) {\n break;\n }\n\n dtParamArr[dtParamSeq[resolverKey]] = resolvedVal;\n }\n\n if (dtParamArr.length && this.checkIfOnlyYear(dtParamArr.length))\n {\n result.unshift(dtParamArr[0], 0, 1); }\n else {\n result.unshift(...dtParamArr);\n }\n\n return result;\n};\n\n/*\n * Extract the value of the token from user specified date time string.\n *\n * @return {Object} : An key value pair which contains the tokens as key and value as pair\n */\nDateTimeFormatter.prototype.extractTokenValue = function (dateTimeStamp) {\n const format = this.format;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const occurrence = DateTimeFormatter.findTokens(format);\n const tokenObj = {};\n\n let lastOccurrenceIndex;\n let occObj;\n let occIndex;\n let targetText;\n let regexFormat;\n\n let l;\n let i;\n\n regexFormat = String(format);\n\n const tokenArr = occurrence.map(obj => obj.token);\n const occurrenceLength = occurrence.length;\n for (i = occurrenceLength - 1; i >= 0; i--) {\n occIndex = occurrence[i].index;\n\n if (occIndex + 1 === regexFormat.length - 1) {\n lastOccurrenceIndex = occIndex;\n continue;\n }\n\n if (lastOccurrenceIndex === undefined) {\n lastOccurrenceIndex = regexFormat.length;\n }\n\n targetText = regexFormat.substring(occIndex + 2, lastOccurrenceIndex);\n regexFormat = regexFormat.substring(0, occIndex + 2) +\n RegExp.escape(targetText) +\n regexFormat.substring(lastOccurrenceIndex, regexFormat.length);\n\n lastOccurrenceIndex = occIndex;\n }\n\n for (i = 0; i < occurrenceLength; i++) {\n occObj = occurrence[i];\n regexFormat = regexFormat.replace(tokenPrefix + occObj.token, definitions[occObj.token].extract());\n }\n\n const extractValues = dateTimeStamp.match(new RegExp(regexFormat)) || [];\n extractValues.shift();\n\n for (i = 0, l = tokenArr.length; i < l; i++) {\n tokenObj[tokenArr[i]] = extractValues[i];\n }\n return tokenObj;\n};\n\n/*\n * Give back the JS native date formed from user specified date string\n *\n * @return {Date} : Native JS Date\n */\nDateTimeFormatter.prototype.getNativeDate = function (dateTimeStamp) {\n let date = null;\n if (Number.isFinite(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n } else if (!this.format && Date.parse(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n }\n else {\n const dtParams = this.dtParams = this.parse(dateTimeStamp);\n if (dtParams.length) {\n this.nativeDate = new Date(...dtParams);\n date = this.nativeDate;\n }\n }\n return date;\n};\n\nDateTimeFormatter.prototype.checkIfOnlyYear = function(len) {\n return len === 1 && this.format.match(/y|Y/g).length;\n};\n\n/*\n * Represents JS native date to a user specified format.\n *\n * @param format {String} : The format according to which the date is to be represented\n * @return {String} : The formatted date string\n */\nDateTimeFormatter.prototype.formatAs = function (format, dateTimeStamp) {\n let nativeDate;\n\n if (dateTimeStamp) {\n nativeDate = this.nativeDate = this.getNativeDate(dateTimeStamp);\n } else if (!(nativeDate = this.nativeDate)) {\n nativeDate = this.getNativeDate(dateTimeStamp);\n }\n\n return DateTimeFormatter.formatAs(nativeDate, format);\n};\n\nexport { DateTimeFormatter as default };\n","/**\n * The utility function to calculate major column.\n *\n * @param {Object} store - The store object.\n * @return {Function} Returns the push function.\n */\nexport default (store) => {\n let i = 0;\n return (...fields) => {\n fields.forEach((val, fieldIndex) => {\n if (!(store[fieldIndex] instanceof Array)) {\n store[fieldIndex] = Array.from({ length: i });\n }\n store[fieldIndex].push(val);\n });\n i++;\n };\n};\n","/* eslint-disable */\nconst OBJECTSTRING = 'object';\nconst objectToStrFn = Object.prototype.toString;\nconst objectToStr = '[object Object]';\nconst arrayToStr = '[object Array]';\n\nfunction checkCyclicRef(obj, parentArr) {\n let i = parentArr.length;\n let bIndex = -1;\n\n while (i) {\n if (obj === parentArr[i]) {\n bIndex = i;\n return bIndex;\n }\n i -= 1;\n }\n\n return bIndex;\n}\n\nfunction merge(obj1, obj2, skipUndef, tgtArr, srcArr) {\n var item,\n srcVal,\n tgtVal,\n str,\n cRef;\n // check whether obj2 is an array\n // if array then iterate through it's index\n // **** MOOTOOLS precution\n\n if (!srcArr) {\n tgtArr = [obj1];\n srcArr = [obj2];\n }\n else {\n tgtArr.push(obj1);\n srcArr.push(obj2);\n }\n\n if (obj2 instanceof Array) {\n for (item = 0; item < obj2.length; item += 1) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (typeof tgtVal !== OBJECTSTRING) {\n if (!(skipUndef && tgtVal === undefined)) {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = tgtVal instanceof Array ? [] : {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n }\n }\n else {\n for (item in obj2) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (tgtVal !== null && typeof tgtVal === OBJECTSTRING) {\n // Fix for issue BUG: FWXT-602\n // IE < 9 Object.prototype.toString.call(null) gives\n // '[object Object]' instead of '[object Null]'\n // that's why null value becomes Object in IE < 9\n str = objectToStrFn.call(tgtVal);\n if (str === objectToStr) {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else if (str === arrayToStr) {\n if (srcVal === null || !(srcVal instanceof Array)) {\n srcVal = obj1[item] = [];\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (skipUndef && tgtVal === undefined) {\n continue;\n }\n obj1[item] = tgtVal;\n }\n }\n }\n return obj1;\n}\n\n\nfunction extend2 (obj1, obj2, skipUndef) {\n //if none of the arguments are object then return back\n if (typeof obj1 !== OBJECTSTRING && typeof obj2 !== OBJECTSTRING) {\n return null;\n }\n\n if (typeof obj2 !== OBJECTSTRING || obj2 === null) {\n return obj1;\n }\n\n if (typeof obj1 !== OBJECTSTRING) {\n obj1 = obj2 instanceof Array ? [] : {};\n }\n merge(obj1, obj2, skipUndef);\n return obj1;\n}\n\nexport { extend2 as default };\n","import { DataFormat } from '../enums';\n\n/**\n * Checks whether the value is an array.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an array otherwise returns false.\n */\nexport function isArray (val) {\n return Array.isArray(val);\n}\n\n/**\n * Checks whether the value is an object.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an object otherwise returns false.\n */\nexport function isObject (val) {\n return val === Object(val);\n}\n\n/**\n * Checks whether the value is a string value.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is a string value otherwise returns false.\n */\nexport function isString (val) {\n return typeof val === 'string';\n}\n\n/**\n * Checks whether the value is callable.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is callable otherwise returns false.\n */\nexport function isCallable (val) {\n return typeof val === 'function';\n}\n\n/**\n * Returns the unique values from the input array.\n *\n * @param {Array} data - The input array.\n * @return {Array} Returns a new array of unique values.\n */\nexport function uniqueValues (data) {\n return [...new Set(data)];\n}\n\nexport const getUniqueId = () => `id-${new Date().getTime()}${Math.round(Math.random() * 10000)}`;\n\n/**\n * Checks Whether two arrays have same content.\n *\n * @param {Array} arr1 - The first array.\n * @param {Array} arr2 - The 2nd array.\n * @return {boolean} Returns whether two array have same content.\n */\nexport function isArrEqual(arr1, arr2) {\n if (!isArray(arr1) || !isArray(arr2)) {\n return arr1 === arr2;\n }\n\n if (arr1.length !== arr2.length) {\n return false;\n }\n\n for (let i = 0; i < arr1.length; i++) {\n if (arr1[i] !== arr2[i]) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * It is the default number format function for the measure field type.\n *\n * @param {any} val - The input value.\n * @return {number} Returns a number value.\n */\nexport function formatNumber(val) {\n return val;\n}\n\n/**\n * Returns the detected data format.\n *\n * @param {any} data - The input data to be tested.\n * @return {string} Returns the data format name.\n */\nexport const detectDataFormat = (data) => {\n if (isString(data)) {\n return DataFormat.DSV_STR;\n } else if (isArray(data) && isArray(data[0])) {\n return DataFormat.DSV_ARR;\n } else if (isArray(data) && (data.length === 0 || isObject(data[0]))) {\n return DataFormat.FLAT_JSON;\n }\n return null;\n};\n","import { FieldType } from './enums';\nimport { getUniqueId } from './utils';\n\nconst fieldStore = {\n data: {},\n\n createNamespace (fieldArr, name) {\n const dataId = name || getUniqueId();\n\n this.data[dataId] = {\n name: dataId,\n fields: fieldArr,\n\n fieldsObj () {\n let fieldsObj = this._cachedFieldsObj;\n\n if (!fieldsObj) {\n fieldsObj = this._cachedFieldsObj = {};\n this.fields.forEach((field) => {\n fieldsObj[field.name()] = field;\n });\n }\n return fieldsObj;\n },\n getMeasure () {\n let measureFields = this._cachedMeasure;\n\n if (!measureFields) {\n measureFields = this._cachedMeasure = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.MEASURE) {\n measureFields[field.name()] = field;\n }\n });\n }\n return measureFields;\n },\n getDimension () {\n let dimensionFields = this._cachedDimension;\n\n if (!this._cachedDimension) {\n dimensionFields = this._cachedDimension = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.DIMENSION) {\n dimensionFields[field.name()] = field;\n }\n });\n }\n return dimensionFields;\n },\n };\n return this.data[dataId];\n },\n};\n\nexport default fieldStore;\n","/**\n * The wrapper class on top of the primitive value of a field.\n *\n * @todo Need to have support for StringValue, NumberValue, DateTimeValue\n * and GeoValue. These types should expose predicate API mostly.\n */\nclass Value {\n\n /**\n * Creates new Value instance.\n *\n * @param {*} val - the primitive value from the field cell.\n * @param {string | Field} field - The field from which the value belongs.\n */\n constructor (val, field) {\n Object.defineProperty(this, '_value', {\n enumerable: false,\n configurable: false,\n writable: false,\n value: val\n });\n\n this.field = field;\n }\n\n /**\n * Returns the field value.\n *\n * @return {*} Returns the current value.\n */\n get value () {\n return this._value;\n }\n\n /**\n * Converts to human readable string.\n *\n * @override\n * @return {string} Returns a human readable string of the field value.\n *\n */\n toString () {\n return String(this.value);\n }\n\n /**\n * Returns the value of the field.\n *\n * @override\n * @return {*} Returns the field value.\n */\n valueOf () {\n return this.value;\n }\n}\n\nexport default Value;\n","/**\n * Iterates through the diffSet array and call the callback with the current\n * index.\n *\n * @param {string} rowDiffset - The row diffset string e.g. '0-4,6,10-13'.\n * @param {Function} callback - The callback function to be called with every index.\n */\nexport function rowDiffsetIterator (rowDiffset, callback) {\n if (rowDiffset.length > 0) {\n const rowDiffArr = rowDiffset.split(',');\n rowDiffArr.forEach((diffStr) => {\n const diffStsArr = diffStr.split('-');\n const start = +(diffStsArr[0]);\n const end = +(diffStsArr[1] || diffStsArr[0]);\n if (end >= start) {\n for (let i = start; i <= end; i += 1) {\n callback(i);\n }\n }\n });\n }\n}\n","/**\n * A parser to parser null, undefined, invalid and NIL values.\n *\n * @public\n * @class\n */\nclass InvalidAwareTypes {\n /**\n * Static method which gets/sets the invalid value registry.\n *\n * @public\n * @param {Object} config - The custom configuration supplied by user.\n * @return {Object} Returns the invalid values registry.\n */\n static invalidAwareVals (config) {\n if (!config) {\n return InvalidAwareTypes._invalidAwareValsMap;\n }\n return Object.assign(InvalidAwareTypes._invalidAwareValsMap, config);\n }\n\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} value - The value of the invalid data type.\n */\n constructor (value) {\n this._value = value;\n }\n\n /**\n * Returns the current value of the instance.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n value () {\n return this._value;\n }\n\n /**\n * Returns the current value of the instance in string format.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n toString () {\n return String(this._value);\n }\n\n static isInvalid(val) {\n return (val instanceof InvalidAwareTypes) || !!InvalidAwareTypes.invalidAwareVals()[val];\n }\n\n static getInvalidType(val) {\n return val instanceof InvalidAwareTypes ? val : InvalidAwareTypes.invalidAwareVals()[val];\n }\n}\n\n/**\n * Enums for Invalid types.\n */\nInvalidAwareTypes.NULL = new InvalidAwareTypes('null');\nInvalidAwareTypes.NA = new InvalidAwareTypes('na');\nInvalidAwareTypes.NIL = new InvalidAwareTypes('nil');\n\n/**\n * Default Registry for mapping the invalid values.\n *\n * @private\n */\nInvalidAwareTypes._invalidAwareValsMap = {\n invalid: InvalidAwareTypes.NA,\n nil: InvalidAwareTypes.NIL,\n null: InvalidAwareTypes.NULL,\n undefined: InvalidAwareTypes.NA\n};\n\nexport default InvalidAwareTypes;\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport InvalidAwareTypes from '../invalid-aware-types';\n\nconst generateBuckets = (binSize, start, end) => {\n const buckets = [];\n let next = start;\n\n while (next < end) {\n buckets.push(next);\n next += binSize;\n }\n buckets.push(next);\n\n return buckets;\n};\n\nconst findBucketRange = (bucketRanges, value) => {\n let leftIdx = 0;\n let rightIdx = bucketRanges.length - 1;\n let midIdx;\n let range;\n\n // Here use binary search as the bucketRanges is a sorted array\n while (leftIdx <= rightIdx) {\n midIdx = leftIdx + Math.floor((rightIdx - leftIdx) / 2);\n range = bucketRanges[midIdx];\n\n if (value >= range.start && value < range.end) {\n return range;\n } else if (value >= range.end) {\n leftIdx = midIdx + 1;\n } else if (value < range.start) {\n rightIdx = midIdx - 1;\n }\n }\n\n return null;\n};\n\n /**\n * Creates the bin data from input measure field and supplied configs.\n *\n * @param {Measure} measureField - The Measure field instance.\n * @param {string} rowDiffset - The datamodel rowDiffset values.\n * @param {Object} config - The config object.\n * @return {Object} Returns the binned data and the corresponding bins.\n */\nexport function createBinnedFieldData (measureField, rowDiffset, config) {\n let { buckets, binsCount, binSize, start, end } = config;\n const [dMin, dMax] = measureField.domain();\n\n if (!buckets) {\n start = (start !== 0 && (!start || start > dMin)) ? dMin : start;\n end = (end !== 0 && (!end || end < dMax)) ? (dMax + 1) : end;\n\n if (binsCount) {\n binSize = Math.ceil(Math.abs(end - start) / binsCount);\n }\n\n buckets = generateBuckets(binSize, start, end);\n }\n\n if (buckets[0] > dMin) {\n buckets.unshift(dMin);\n }\n if (buckets[buckets.length - 1] <= dMax) {\n buckets.push(dMax + 1);\n }\n\n const bucketRanges = [];\n for (let i = 0; i < buckets.length - 1; i++) {\n bucketRanges.push({\n start: buckets[i],\n end: buckets[i + 1]\n });\n }\n\n const binnedData = [];\n rowDiffsetIterator(rowDiffset, (i) => {\n const datum = measureField.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n binnedData.push(datum);\n return;\n }\n\n const range = findBucketRange(bucketRanges, datum);\n binnedData.push(`${range.start}-${range.end}`);\n });\n\n return { binnedData, bins: buckets };\n}\n","export { DataFormat, FilteringMode } from '../enums';\n/**\n * The event name for data propagation.\n */\nexport const PROPAGATION = 'propagation';\n\n/**\n * The name of the unique row id column in DataModel.\n */\nexport const ROW_ID = '__id__';\n\n/**\n * The enums for operation names performed on DataModel.\n */\nexport const DM_DERIVATIVES = {\n SELECT: 'select',\n PROJECT: 'project',\n GROUPBY: 'group',\n COMPOSE: 'compose',\n CAL_VAR: 'calculatedVariable',\n BIN: 'bin',\n SORT: 'sort'\n};\n\nexport const JOINS = {\n CROSS: 'cross',\n LEFTOUTER: 'leftOuter',\n RIGHTOUTER: 'rightOuter',\n NATURAL: 'natural',\n FULLOUTER: 'fullOuter'\n};\n\nexport const LOGICAL_OPERATORS = {\n AND: 'and',\n OR: 'or'\n};\n","import { persistDerivations } from '../helper';\nimport { DM_DERIVATIVES } from '../constants';\n\n/**\n * DataModel's opearators are exposed as composable functional operators as well as chainable operators. Chainable\n * operators are called on the instances of {@link Datamodel} and {@link Relation} class.\n *\n * Those same operators can be used as composable operators from `DataModel.Operators` namespace.\n *\n * All these operators have similar behaviour. All these operators when called with the argument returns a function\n * which expects a DataModel instance.\n *\n * @public\n * @module Operators\n * @namespace DataModel\n */\n\n/**\n * This is functional version of selection operator. {@link link_to_selection | Selection} is a row filtering operation.\n * It takes {@link SelectionPredicate | predicate} for filtering criteria and returns a function.\n * The returned function is called with the DataModel instance on which the action needs to be performed.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection opearation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * [Warn] Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * [Error] `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @example\n * const select = DataModel.Operators.select;\n * usaCarsFn = select(fields => fields.Origin.value === 'USA');\n * usaCarsDm = usaCarsFn(dm);\n * console.log(usaCarsDm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {SelectionPredicate} selectFn - Predicate funciton which is called for each row with the current row\n * ```\n * function (row, i) { ... }\n * ```\n * @param {Object} [config] - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const select = (...args) => dm => dm.select(...args);\n\n/**\n * This is functional version of projection operator. {@link link_to_projection | Projection} is a column filtering\n * operation.It expects list of fields name and either include those or exclude those based on {@link FilteringMode} on\n * the resultant variable.It returns a function which is called with the DataModel instance on which the action needs\n * to be performed.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const project = (...args) => dm => dm.project(...args);\n\n/**\n * This is functional version of binnig operator. Binning happens on a measure field based on a binning configuration.\n * Binning in DataModel does not aggregate the number of rows present in DataModel instance after binning, it just adds\n * a new field with the binned value. Refer binning {@link example_of_binning | example} to have a intuition of what\n * binning is and the use case.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non uniform buckets\n * - providing bin count\n * - providing each bin size\n *\n * When custom buckets are provided as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const buckets = {\n * start: 30\n * stops: [80, 100, 110]\n * };\n * const config = { buckets, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(dm);\n *\n * @text\n * When `binCount` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binCount: 5, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @text\n * When `binSize` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binSize: 200, name: 'binnedHorsepower' }\n * const binnedDm = dataModel.bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {String} name Name of measure which will be used to create bin\n * @param {Object} config Config required for bin creation\n * @param {Array.} config.bucketObj.stops Defination of bucket ranges. Two subsequent number from arrays\n * are picked and a range is created. The first number from range is inclusive and the second number from range\n * is exclusive.\n * @param {Number} [config.bucketObj.startAt] Force the start of the bin from a particular number.\n * If not mentioned, the start of the bin or the lower domain of the data if stops is not mentioned, else its\n * the first value of the stop.\n * @param {Number} config.binSize Bucket size for each bin\n * @param {Number} config.binCount Number of bins which will be created\n * @param {String} config.name Name of the new binned field to be created\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const bin = (...args) => dm => dm.bin(...args);\n\n/**\n * This is functional version of `groupBy` operator.Groups the data using particular dimensions and by reducing\n * measures. It expects a list of dimensions using which it projects the datamodel and perform aggregations to reduce\n * the duplicate tuples. Refer this {@link link_to_one_example_with_group_by | document} to know the intuition behind\n * groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupBy = DataModel.Operators.groupBy;\n * const groupedFn = groupBy(['Year'], { horsepower: 'max' } );\n * groupedDM = groupByFn(dm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const groupBy = (...args) => dm => dm.groupBy(...args);\n\n/**\n * Enables composing operators to run multiple operations and save group of operataion as named opration on a DataModel.\n * The resulting DataModel will be the result of all the operation provided. The operations provided will be executed in\n * a serial manner ie. result of one operation will be the input for the next operations (like pipe operator in unix).\n *\n * Suported operations in compose are\n * - `select`\n * - `project`\n * - `groupBy`\n * - `bin`\n * - `compose`\n *\n * @example\n * const compose = DataModel.Operators.compose;\n * const select = DataModel.Operators.select;\n * const project = DataModel.Operators.project;\n *\n * let composedFn = compose(\n * select(fields => fields.netprofit.value <= 15),\n * project(['netprofit', 'netsales']));\n *\n * const dataModel = new DataModel(data1, schema1);\n *\n * let composedDm = composedFn(dataModel);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} operators: An array of operation that will be applied on the\n * datatable.\n *\n * @returns {DataModel} Instance of resultant DataModel\n */\nexport const compose = (...operations) =>\n (dm, config = { saveChild: true }) => {\n let currentDM = dm;\n let firstChild;\n const derivations = [];\n\n operations.forEach((operation) => {\n currentDM = operation(currentDM);\n derivations.push(...currentDM._derivation);\n if (!firstChild) {\n firstChild = currentDM;\n }\n });\n\n if (firstChild && firstChild !== currentDM) {\n firstChild.dispose();\n }\n\n // reset all ancestorDerivation saved in-between compose\n currentDM._ancestorDerivation = [];\n persistDerivations(\n dm,\n currentDM,\n DM_DERIVATIVES.COMPOSE,\n null,\n derivations\n );\n\n if (config.saveChild) {\n currentDM.setParent(dm);\n } else {\n currentDM.setParent(null);\n }\n\n return currentDM;\n };\n","/**\n * The helper function that returns an array of common schema\n * from two fieldStore instances.\n *\n * @param {FieldStore} fs1 - The first FieldStore instance.\n * @param {FieldStore} fs2 - The second FieldStore instance.\n * @return {Array} An array containing the common schema.\n */\nexport function getCommonSchema (fs1, fs2) {\n const retArr = [];\n const fs1Arr = [];\n fs1.fields.forEach((field) => {\n fs1Arr.push(field.schema().name);\n });\n fs2.fields.forEach((field) => {\n if (fs1Arr.indexOf(field.schema().name) !== -1) {\n retArr.push(field.schema().name);\n }\n });\n return retArr;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { getCommonSchema } from './get-common-schema';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { JOINS } from '../constants';\nimport { prepareJoinData } from '../helper';\n/**\n * Default filter function for crossProduct.\n *\n * @return {boolean} Always returns true.\n */\nfunction defaultFilterFn() { return true; }\n\n/**\n * Implementation of cross product operation between two DataModel instances.\n * It internally creates the data and schema for the new DataModel.\n *\n * @param {DataModel} dataModel1 - The left DataModel instance.\n * @param {DataModel} dataModel2 - The right DataModel instance.\n * @param {Function} filterFn - The filter function which is used to filter the tuples.\n * @param {boolean} [replaceCommonSchema=false] - The flag if the common name schema should be there.\n * @return {DataModel} Returns The newly created DataModel instance from the crossProduct operation.\n */\nexport function crossProduct (dm1, dm2, filterFn, replaceCommonSchema = false, jointype = JOINS.CROSS) {\n const schema = [];\n const data = [];\n const applicableFilterFn = filterFn || defaultFilterFn;\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreName = dm1FieldStore.name;\n const dm2FieldStoreName = dm2FieldStore.name;\n const name = `${dm1FieldStore.name}.${dm2FieldStore.name}`;\n const commonSchemaList = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n if (dm1FieldStoreName === dm2FieldStoreName) {\n throw new Error('DataModels must have different alias names');\n }\n // Here prepare the schema\n dm1FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1 && !replaceCommonSchema) {\n tmpSchema.name = `${dm1FieldStore.name}.${tmpSchema.name}`;\n }\n schema.push(tmpSchema);\n });\n dm2FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1) {\n if (!replaceCommonSchema) {\n tmpSchema.name = `${dm2FieldStore.name}.${tmpSchema.name}`;\n schema.push(tmpSchema);\n }\n } else {\n schema.push(tmpSchema);\n }\n });\n\n // Here prepare Data\n rowDiffsetIterator(dm1._rowDiffset, (i) => {\n let rowAdded = false;\n let rowPosition;\n rowDiffsetIterator(dm2._rowDiffset, (ii) => {\n const tuple = [];\n const userArg = {};\n userArg[dm1FieldStoreName] = {};\n userArg[dm2FieldStoreName] = {};\n dm1FieldStore.fields.forEach((field) => {\n tuple.push(field.partialField.data[i]);\n userArg[dm1FieldStoreName][field.name()] = field.partialField.data[i];\n });\n dm2FieldStore.fields.forEach((field) => {\n if (!(commonSchemaList.indexOf(field.schema().name) !== -1 && replaceCommonSchema)) {\n tuple.push(field.partialField.data[ii]);\n }\n userArg[dm2FieldStoreName][field.name()] = field.partialField.data[ii];\n });\n\n let cachedStore = {};\n let cloneProvider1 = () => dm1.detachedRoot();\n let cloneProvider2 = () => dm2.detachedRoot();\n\n const dm1Fields = prepareJoinData(userArg[dm1FieldStoreName]);\n const dm2Fields = prepareJoinData(userArg[dm2FieldStoreName]);\n if (applicableFilterFn(dm1Fields, dm2Fields, cloneProvider1, cloneProvider2, cachedStore)) {\n const tupleObj = {};\n tuple.forEach((cellVal, iii) => {\n tupleObj[schema[iii].name] = cellVal;\n });\n if (rowAdded && JOINS.CROSS !== jointype) {\n data[rowPosition] = tupleObj;\n }\n else {\n data.push(tupleObj);\n rowAdded = true;\n rowPosition = i;\n }\n } else if ((jointype === JOINS.LEFTOUTER || jointype === JOINS.RIGHTOUTER) && !rowAdded) {\n const tupleObj = {};\n let len = dm1FieldStore.fields.length - 1;\n tuple.forEach((cellVal, iii) => {\n if (iii <= len) {\n tupleObj[schema[iii].name] = cellVal;\n }\n else {\n tupleObj[schema[iii].name] = null;\n }\n });\n rowAdded = true;\n rowPosition = i;\n data.push(tupleObj);\n }\n });\n });\n\n return new DataModel(data, schema, { name });\n}\n","/**\n * The default sort function.\n *\n * @param {*} a - The first value.\n * @param {*} b - The second value.\n * @return {number} Returns the comparison result e.g. 1 or 0 or -1.\n */\nfunction defSortFn (a, b) {\n const a1 = `${a}`;\n const b1 = `${b}`;\n if (a1 < b1) {\n return -1;\n }\n if (a1 > b1) {\n return 1;\n }\n return 0;\n}\n\n/**\n * The helper function for merge sort which creates the sorted array\n * from the two halves of the input array.\n *\n * @param {Array} arr - The target array which needs to be merged.\n * @param {number} lo - The starting index of the first array half.\n * @param {number} mid - The ending index of the first array half.\n * @param {number} hi - The ending index of the second array half.\n * @param {Function} sortFn - The sort function.\n */\nfunction merge (arr, lo, mid, hi, sortFn) {\n const mainArr = arr;\n const auxArr = [];\n for (let i = lo; i <= hi; i += 1) {\n auxArr[i] = mainArr[i];\n }\n let a = lo;\n let b = mid + 1;\n\n for (let i = lo; i <= hi; i += 1) {\n if (a > mid) {\n mainArr[i] = auxArr[b];\n b += 1;\n } else if (b > hi) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else if (sortFn(auxArr[a], auxArr[b]) <= 0) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else {\n mainArr[i] = auxArr[b];\n b += 1;\n }\n }\n}\n\n/**\n * The helper function for merge sort which would be called\n * recursively for sorting the array halves.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {number} lo - The starting index of the array half.\n * @param {number} hi - The ending index of the array half.\n * @param {Function} sortFn - The sort function.\n * @return {Array} Returns the target array itself.\n */\nfunction sort (arr, lo, hi, sortFn) {\n if (hi === lo) { return arr; }\n\n const mid = lo + Math.floor((hi - lo) / 2);\n sort(arr, lo, mid, sortFn);\n sort(arr, mid + 1, hi, sortFn);\n merge(arr, lo, mid, hi, sortFn);\n\n return arr;\n}\n\n/**\n * The implementation of merge sort.\n * It is used in DataModel for stable sorting as it is not sure\n * what the sorting algorithm used by browsers is stable or not.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {Function} [sortFn=defSortFn] - The sort function.\n * @return {Array} Returns the input array itself in sorted order.\n */\nexport function mergeSort (arr, sortFn = defSortFn) {\n if (arr.length > 1) {\n sort(arr, 0, arr.length - 1, sortFn);\n }\n return arr;\n}\n","import { DimensionSubtype, MeasureSubtype } from '../enums';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { mergeSort } from './merge-sort';\nimport { fieldInSchema } from '../helper';\nimport { isCallable, isArray, } from '../utils';\n/**\n * Generates the sorting functions to sort the data of a DataModel instance\n * according to the input data type.\n *\n * @param {string} dataType - The data type e.g. 'measure', 'datetime' etc.\n * @param {string} sortType - The sorting order i.e. 'asc' or 'desc'.\n * @param {integer} index - The index of the data which will be sorted.\n * @return {Function} Returns the the sorting function.\n */\nfunction getSortFn (dataType, sortType, index) {\n let retFunc;\n switch (dataType) {\n case MeasureSubtype.CONTINUOUS:\n case DimensionSubtype.TEMPORAL:\n if (sortType === 'desc') {\n retFunc = (a, b) => b[index] - a[index];\n } else {\n retFunc = (a, b) => a[index] - b[index];\n }\n break;\n default:\n retFunc = (a, b) => {\n const a1 = `${a[index]}`;\n const b1 = `${b[index]}`;\n if (a1 < b1) {\n return sortType === 'desc' ? 1 : -1;\n }\n if (a1 > b1) {\n return sortType === 'desc' ? -1 : 1;\n }\n return 0;\n };\n }\n return retFunc;\n}\n\n/**\n * Groups the data according to the specified target field.\n *\n * @param {Array} data - The input data array.\n * @param {number} fieldIndex - The target field index within schema array.\n * @return {Array} Returns an array containing the grouped data.\n */\nfunction groupData(data, fieldIndex) {\n const hashMap = new Map();\n const groupedData = [];\n\n data.forEach((datum) => {\n const fieldVal = datum[fieldIndex];\n if (hashMap.has(fieldVal)) {\n groupedData[hashMap.get(fieldVal)][1].push(datum);\n } else {\n groupedData.push([fieldVal, [datum]]);\n hashMap.set(fieldVal, groupedData.length - 1);\n }\n });\n\n return groupedData;\n}\n\n/**\n * Creates the argument value used for sorting function when sort is done\n * with another fields.\n *\n * @param {Array} groupedDatum - The grouped datum for a single dimension field value.\n * @param {Array} targetFields - An array of the sorting fields.\n * @param {Array} targetFieldDetails - An array of the sorting field details in schema.\n * @return {Object} Returns an object containing the value of sorting fields and the target field name.\n */\nfunction createSortingFnArg(groupedDatum, targetFields, targetFieldDetails) {\n const arg = {\n label: groupedDatum[0]\n };\n\n targetFields.reduce((acc, next, idx) => {\n acc[next] = groupedDatum[1].map(datum => datum[targetFieldDetails[idx].index]);\n return acc;\n }, arg);\n\n return arg;\n}\n\n/**\n * Sorts the data before return in dataBuilder.\n *\n * @param {Object} dataObj - An object containing the data and schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nfunction sortData(dataObj, sortingDetails) {\n const { data, schema } = dataObj;\n let fieldName;\n let sortMeta;\n let fDetails;\n let i = sortingDetails.length - 1;\n\n for (; i >= 0; i--) {\n fieldName = sortingDetails[i][0];\n sortMeta = sortingDetails[i][1];\n fDetails = fieldInSchema(schema, fieldName);\n\n if (!fDetails) {\n // eslint-disable-next-line no-continue\n continue;\n }\n\n if (isCallable(sortMeta)) {\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortMeta(a[fDetails.index], b[fDetails.index]));\n } else if (isArray(sortMeta)) {\n const groupedData = groupData(data, fDetails.index);\n const sortingFn = sortMeta[sortMeta.length - 1];\n const targetFields = sortMeta.slice(0, sortMeta.length - 1);\n const targetFieldDetails = targetFields.map(f => fieldInSchema(schema, f));\n\n groupedData.forEach((groupedDatum) => {\n groupedDatum.push(createSortingFnArg(groupedDatum, targetFields, targetFieldDetails));\n });\n\n mergeSort(groupedData, (a, b) => {\n const m = a[2];\n const n = b[2];\n return sortingFn(m, n);\n });\n\n // Empty the array\n data.length = 0;\n groupedData.forEach((datum) => {\n data.push(...datum[1]);\n });\n } else {\n sortMeta = String(sortMeta).toLowerCase() === 'desc' ? 'desc' : 'asc';\n mergeSort(data, getSortFn(fDetails.type, sortMeta, fDetails.index));\n }\n }\n\n dataObj.uids = [];\n data.forEach((value) => {\n dataObj.uids.push(value.pop());\n });\n}\n\n\n/**\n * Builds the actual data array.\n *\n * @param {Array} fieldStore - An array of field.\n * @param {string} rowDiffset - A string consisting of which rows to be included eg. '0-2,4,6';\n * @param {string} colIdentifier - A string consisting of the details of which column\n * to be included eg 'date,sales,profit';\n * @param {Object} sortingDetails - An object containing the sorting details of the DataModel instance.\n * @param {Object} options - The options required to create the type of the data.\n * @return {Object} Returns an object containing the multidimensional array and the relative schema.\n */\nexport function dataBuilder (fieldStore, rowDiffset, colIdentifier, sortingDetails, options) {\n const defOptions = {\n addUid: false,\n columnWise: false\n };\n options = Object.assign({}, defOptions, options);\n\n const retObj = {\n schema: [],\n data: [],\n uids: []\n };\n const addUid = options.addUid;\n const reqSorting = sortingDetails && sortingDetails.length > 0;\n // It stores the fields according to the colIdentifier argument\n const tmpDataArr = [];\n // Stores the fields according to the colIdentifier argument\n const colIArr = colIdentifier.split(',');\n\n colIArr.forEach((colName) => {\n for (let i = 0; i < fieldStore.length; i += 1) {\n if (fieldStore[i].name() === colName) {\n tmpDataArr.push(fieldStore[i]);\n break;\n }\n }\n });\n\n // Inserts the schema to the schema object\n tmpDataArr.forEach((field) => {\n /** @todo Need to use extend2 here otherwise user can overwrite the schema. */\n retObj.schema.push(field.schema());\n });\n\n if (addUid) {\n retObj.schema.push({\n name: 'uid',\n type: 'identifier'\n });\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n retObj.data.push([]);\n const insertInd = retObj.data.length - 1;\n let start = 0;\n tmpDataArr.forEach((field, ii) => {\n retObj.data[insertInd][ii + start] = field.partialField.data[i];\n });\n if (addUid) {\n retObj.data[insertInd][tmpDataArr.length] = i;\n }\n // Creates an array of unique identifiers for each row\n retObj.uids.push(i);\n\n // If sorting needed then there is the need to expose the index\n // mapping from the old index to its new index\n if (reqSorting) { retObj.data[insertInd].push(i); }\n });\n\n // Handles the sort functionality\n if (reqSorting) {\n sortData(retObj, sortingDetails);\n }\n\n if (options.columnWise) {\n const tmpData = Array(...Array(retObj.schema.length)).map(() => []);\n retObj.data.forEach((tuple) => {\n tuple.forEach((data, i) => {\n tmpData[i].push(data);\n });\n });\n retObj.data = tmpData;\n }\n\n return retObj;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n\n/**\n * Performs the union operation between two dm instances.\n *\n * @todo Fix the conflicts between union and difference terminology here.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function difference (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n * @param {boolean} addData - If true only tuple will be added to the data.\n */\n function prepareDataHelper(dm, fieldsObj, addData) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n if (addData) { data.push(tuple); }\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm2, dm2FieldStoreFieldObj, false);\n prepareDataHelper(dm1, dm1FieldStoreFieldObj, true);\n\n return new DataModel(data, schema, { name });\n}\n\n","import { isArray } from '../utils';\nimport InvalidAwareTypes from '../invalid-aware-types';\nimport { GROUP_BY_FUNCTIONS } from '../enums';\n\nconst { SUM, AVG, FIRST, LAST, COUNT, STD, MIN, MAX } = GROUP_BY_FUNCTIONS;\n\nfunction getFilteredValues(arr) {\n return arr.filter(item => !(item instanceof InvalidAwareTypes));\n}\n/**\n * Reducer function that returns the sum of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the sum of the array.\n */\nfunction sum (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const filteredNumber = getFilteredValues(arr);\n const totalSum = filteredNumber.length ?\n filteredNumber.reduce((acc, curr) => acc + curr, 0)\n : InvalidAwareTypes.NULL;\n return totalSum;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that returns the average of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the mean value of the array.\n */\nfunction avg (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const totalSum = sum(arr);\n const len = arr.length || 1;\n return (Number.isNaN(totalSum) || totalSum instanceof InvalidAwareTypes) ?\n InvalidAwareTypes.NULL : totalSum / len;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the min value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the minimum value of the array.\n */\nfunction min (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.min(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the max value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the maximum value of the array.\n */\nfunction max (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.max(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the first value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the first value of the array.\n */\nfunction first (arr) {\n return arr[0];\n}\n\n/**\n * Reducer function that gives the last value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the last value of the array.\n */\nfunction last (arr) {\n return arr[arr.length - 1];\n}\n\n/**\n * Reducer function that gives the count value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the length of the array.\n */\nfunction count (arr) {\n if (isArray(arr)) {\n return arr.length;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Calculates the variance of the input array.\n *\n * @param {Array.} arr - The input array.\n * @return {number} Returns the variance of the input array.\n */\nfunction variance (arr) {\n let mean = avg(arr);\n return avg(arr.map(num => (num - mean) ** 2));\n}\n\n/**\n * Calculates the square root of the variance of the input array.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the square root of the variance.\n */\nfunction std (arr) {\n return Math.sqrt(variance(arr));\n}\n\n\nconst fnList = {\n [SUM]: sum,\n [AVG]: avg,\n [MIN]: min,\n [MAX]: max,\n [FIRST]: first,\n [LAST]: last,\n [COUNT]: count,\n [STD]: std\n};\n\nconst defaultReducerName = SUM;\n\nexport {\n defaultReducerName,\n sum as defReducer,\n fnList,\n};\n","import { defReducer, fnList } from '../operator';\n\n/**\n * A page level storage which stores, registers, unregisters reducers for all the datamodel instances. There is only one\n * reducer store available in a page. All the datamodel instances receive same instance of reducer store. DataModel\n * out of the box provides handful of {@link reducer | reducers} which can be used as reducer funciton.\n *\n * @public\n * @namespace DataModel\n */\nclass ReducerStore {\n constructor () {\n this.store = new Map();\n this.store.set('defReducer', defReducer);\n\n Object.entries(fnList).forEach((key) => {\n this.store.set(key[0], key[1]);\n });\n }\n\n /**\n * Changes the `defaultReducer` globally. For all the fields which does not have `defAggFn` mentioned in schema, the\n * value of `defaultReducer` is used for aggregation.\n *\n * @public\n * @param {string} [reducer='sum'] - The name of the default reducer. It picks up the definition from store by doing\n * name lookup. If no name is found then it takes `sum` as the default reducer.\n * @return {ReducerStore} Returns instance of the singleton store in page.\n */\n defaultReducer (...params) {\n if (!params.length) {\n return this.store.get('defReducer');\n }\n\n let reducer = params[0];\n\n if (typeof reducer === 'function') {\n this.store.set('defReducer', reducer);\n } else {\n reducer = String(reducer);\n if (Object.keys(fnList).indexOf(reducer) !== -1) {\n this.store.set('defReducer', fnList[reducer]);\n } else {\n throw new Error(`Reducer ${reducer} not found in registry`);\n }\n }\n return this;\n }\n\n /**\n *\n * Registers a {@link reducer | reducer}.\n * A {@link reducer | reducer} has to be registered before it is used.\n *\n * @example\n * // find the mean squared value of a given set\n * const reducerStore = DataModel.Reducers();\n *\n * reducers.register('meanSquared', (arr) => {\n * const squaredVal = arr.map(item => item * item);\n * let sum = 0;\n * for (let i = 0, l = squaredVal.length; i < l; i++) {\n * sum += squaredVal[i++];\n * }\n *\n * return sum;\n * })\n *\n * // datamodel (dm) is already prepared with cars.json\n * const dm1 = dm.groupBy(['origin'], {\n * accleration: 'meanSquared'\n * });\n *\n * @public\n *\n * @param {string} name formal name for a reducer. If the given name already exists in store it is overridden by new\n * definition.\n * @param {Function} reducer definition of {@link reducer} function.\n *\n * @return {Function} function for unregistering the reducer.\n */\n register (name, reducer) {\n if (typeof reducer !== 'function') {\n throw new Error('Reducer should be a function');\n }\n\n name = String(name);\n this.store.set(name, reducer);\n\n return () => { this.__unregister(name); };\n }\n\n __unregister (name) {\n if (this.store.has(name)) {\n this.store.delete(name);\n }\n }\n\n resolve (name) {\n if (name instanceof Function) {\n return name;\n }\n return this.store.get(name);\n }\n}\n\nconst reducerStore = (function () {\n let store = null;\n\n function getStore () {\n if (store === null) {\n store = new ReducerStore();\n }\n return store;\n }\n return getStore();\n}());\n\nexport default reducerStore;\n","import { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport DataModel from '../export';\nimport reducerStore from '../utils/reducer-store';\nimport { defaultReducerName } from './group-by-function';\nimport { FieldType } from '../enums';\n\n/**\n * This function sanitize the user given field and return a common Array structure field\n * list\n * @param {DataModel} dataModel the dataModel operating on\n * @param {Array} fieldArr user input of field Array\n * @return {Array} arrays of field name\n */\nfunction getFieldArr (dataModel, fieldArr) {\n const retArr = [];\n const fieldStore = dataModel.getFieldspace();\n const dimensions = fieldStore.getDimension();\n\n Object.entries(dimensions).forEach(([key]) => {\n if (fieldArr && fieldArr.length) {\n if (fieldArr.indexOf(key) !== -1) {\n retArr.push(key);\n }\n } else {\n retArr.push(key);\n }\n });\n\n return retArr;\n}\n\n/**\n * This sanitize the reducer provide by the user and create a common type of object.\n * user can give function Also\n * @param {DataModel} dataModel dataModel to worked on\n * @param {Object|function} [reducers={}] reducer provided by the users\n * @return {Object} object containing reducer function for every measure\n */\nfunction getReducerObj (dataModel, reducers = {}) {\n const retObj = {};\n const fieldStore = dataModel.getFieldspace();\n const measures = fieldStore.getMeasure();\n const defReducer = reducerStore.defaultReducer();\n\n Object.keys(measures).forEach((measureName) => {\n if (typeof reducers[measureName] !== 'string') {\n reducers[measureName] = measures[measureName].defAggFn();\n }\n const reducerFn = reducerStore.resolve(reducers[measureName]);\n if (reducerFn) {\n retObj[measureName] = reducerFn;\n } else {\n retObj[measureName] = defReducer;\n reducers[measureName] = defaultReducerName;\n }\n });\n return retObj;\n}\n\n/**\n * main function which perform the group-by operations which reduce the measures value is the\n * fields are common according to the reducer function provided\n * @param {DataModel} dataModel the dataModel to worked\n * @param {Array} fieldArr fields according to which the groupby should be worked\n * @param {Object|Function} reducers reducers function\n * @param {DataModel} existingDataModel Existing datamodel instance\n * @return {DataModel} new dataModel with the group by\n */\nfunction groupBy (dataModel, fieldArr, reducers, existingDataModel) {\n const sFieldArr = getFieldArr(dataModel, fieldArr);\n const reducerObj = getReducerObj(dataModel, reducers);\n const fieldStore = dataModel.getFieldspace();\n const fieldStoreObj = fieldStore.fieldsObj();\n const dbName = fieldStore.name;\n const dimensionArr = [];\n const measureArr = [];\n const schema = [];\n const hashMap = {};\n const data = [];\n let newDataModel;\n\n // Prepare the schema\n Object.entries(fieldStoreObj).forEach(([key, value]) => {\n if (sFieldArr.indexOf(key) !== -1 || reducerObj[key]) {\n schema.push(extend2({}, value.schema()));\n\n switch (value.schema().type) {\n case FieldType.MEASURE:\n measureArr.push(key);\n break;\n default:\n case FieldType.DIMENSION:\n dimensionArr.push(key);\n }\n }\n });\n // Prepare the data\n let rowCount = 0;\n rowDiffsetIterator(dataModel._rowDiffset, (i) => {\n let hash = '';\n dimensionArr.forEach((_) => {\n hash = `${hash}-${fieldStoreObj[_].partialField.data[i]}`;\n });\n if (hashMap[hash] === undefined) {\n hashMap[hash] = rowCount;\n data.push({});\n dimensionArr.forEach((_) => {\n data[rowCount][_] = fieldStoreObj[_].partialField.data[i];\n });\n measureArr.forEach((_) => {\n data[rowCount][_] = [fieldStoreObj[_].partialField.data[i]];\n });\n rowCount += 1;\n } else {\n measureArr.forEach((_) => {\n data[hashMap[hash]][_].push(fieldStoreObj[_].partialField.data[i]);\n });\n }\n });\n\n // reduction\n let cachedStore = {};\n let cloneProvider = () => dataModel.detachedRoot();\n data.forEach((row) => {\n const tuple = row;\n measureArr.forEach((_) => {\n tuple[_] = reducerObj[_](row[_], cloneProvider, cachedStore);\n });\n });\n if (existingDataModel) {\n existingDataModel.__calculateFieldspace();\n newDataModel = existingDataModel;\n }\n else {\n newDataModel = new DataModel(data, schema, { name: dbName });\n }\n return newDataModel;\n}\n\nexport { groupBy, getFieldArr, getReducerObj };\n","import { getCommonSchema } from './get-common-schema';\n\n/**\n * The filter function used in natural join.\n * It generates a function that will have the logic to join two\n * DataModel instances by the process of natural join.\n *\n * @param {DataModel} dm1 - The left DataModel instance.\n * @param {DataModel} dm2 - The right DataModel instance.\n * @return {Function} Returns a function that is used in cross-product operation.\n */\nexport function naturalJoinFilter (dm1, dm2) {\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n // const dm1FieldStoreName = dm1FieldStore.name;\n // const dm2FieldStoreName = dm2FieldStore.name;\n const commonSchemaArr = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n return (dm1Fields, dm2Fields) => {\n let retainTuple = true;\n commonSchemaArr.forEach((fieldName) => {\n if (dm1Fields[fieldName].value ===\n dm2Fields[fieldName].value && retainTuple) {\n retainTuple = true;\n } else {\n retainTuple = false;\n }\n });\n return retainTuple;\n };\n}\n","import DataModel from '../export';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n/**\n * Performs the union operation between two dm instances.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function union (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n */\n function prepareDataHelper (dm, fieldsObj) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n data.push(tuple);\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm1, dm1FieldStoreFieldObj);\n prepareDataHelper(dm2, dm2FieldStoreFieldObj);\n\n return new DataModel(data, schema, { name });\n}\n","import { crossProduct } from './cross-product';\nimport { JOINS } from '../constants';\nimport { union } from './union';\n\n\nexport function leftOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel1, dataModel2, filterFn, false, JOINS.LEFTOUTER);\n}\n\nexport function rightOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel2, dataModel1, filterFn, false, JOINS.RIGHTOUTER);\n}\n\nexport function fullOuterJoin (dataModel1, dataModel2, filterFn) {\n return union(leftOuterJoin(dataModel1, dataModel2, filterFn), rightOuterJoin(dataModel1, dataModel2, filterFn));\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\n\n/**\n * In {@link DataModel}, every tabular data consists of column, a column is stored as field.\n * Field contains all the data for a given column in an array.\n *\n * Each record consists of several fields; the fields of all records form the columns.\n * Examples of fields: name, gender, sex etc.\n *\n * In DataModel, each field can have multiple attributes which describes its data and behaviour.\n * A field can have two types of data: Measure and Dimension.\n *\n * A Dimension Field is the context on which a data is categorized and the measure is the numerical values that\n * quantify the data set.\n * In short a dimension is the lens through which you are looking at your measure data.\n *\n * Refer to {@link Schema} to get info about possible field attributes.\n *\n * @public\n * @class\n */\nexport default class Field {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n this.partialField = partialField;\n this.rowDiffset = rowDiffset;\n }\n\n /**\n * Generates the field type specific domain.\n *\n * @public\n * @abstract\n */\n domain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the the field schema.\n *\n * @public\n * @return {string} Returns the field schema.\n */\n schema () {\n return this.partialField.schema;\n }\n\n /**\n * Returns the name of the field.\n *\n * @public\n * @return {string} Returns the name of the field.\n */\n name () {\n return this.partialField.name;\n }\n\n /**\n * Returns the type of the field.\n *\n * @public\n * @return {string} Returns the type of the field.\n */\n type () {\n return this.partialField.schema.type;\n }\n\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return this.partialField.schema.subtype;\n }\n\n /**\n * Returns the description of the field.\n *\n * @public\n * @return {string} Returns the description of the field.\n */\n description () {\n return this.partialField.schema.description;\n }\n\n /**\n * Returns the display name of the field.\n *\n * @public\n * @return {string} Returns the display name of the field.\n */\n displayName () {\n return this.partialField.schema.displayName || this.partialField.schema.name;\n }\n\n /**\n * Returns the data associated with the field.\n *\n * @public\n * @return {Array} Returns the data.\n */\n data () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n data.push(this.partialField.data[i]);\n });\n return data;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @abstract\n */\n formattedData () {\n throw new Error('Not yet implemented');\n }\n}\n","import Field from '../field';\n\n/**\n * Represents dimension field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Dimension extends Field {\n /**\n * Returns the domain for the dimension field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { DimensionSubtype } from '../../enums';\nimport Dimension from '../dimension';\n/**\n * Represents categorical field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Categorical extends Dimension {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return DimensionSubtype.CATEGORICAL;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n return domain;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport Dimension from '../dimension';\nimport { DateTimeFormatter } from '../../utils';\nimport InvalidAwareTypes from '../../invalid-aware-types';\n\n/**\n * Represents temporal field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Temporal extends Dimension {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n super(partialField, rowDiffset);\n\n this._cachedMinDiff = null;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be\n // occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n\n return domain;\n }\n\n\n /**\n * Calculates the minimum consecutive difference from the associated field data.\n *\n * @public\n * @return {number} Returns the minimum consecutive diff in milliseconds.\n */\n minimumConsecutiveDifference () {\n if (this._cachedMinDiff) {\n return this._cachedMinDiff;\n }\n\n const sortedData = this.data().filter(item => !(item instanceof InvalidAwareTypes)).sort((a, b) => a - b);\n const arrLn = sortedData.length;\n let minDiff = Number.POSITIVE_INFINITY;\n let prevDatum;\n let nextDatum;\n let processedCount = 0;\n\n for (let i = 1; i < arrLn; i++) {\n prevDatum = sortedData[i - 1];\n nextDatum = sortedData[i];\n\n if (nextDatum === prevDatum) {\n continue;\n }\n\n minDiff = Math.min(minDiff, nextDatum - sortedData[i - 1]);\n processedCount++;\n }\n\n if (!processedCount) {\n minDiff = null;\n }\n this._cachedMinDiff = minDiff;\n\n return this._cachedMinDiff;\n }\n\n /**\n * Returns the format specified in the input schema while creating field.\n *\n * @public\n * @return {string} Returns the datetime format.\n */\n format () {\n return this.partialField.schema.format;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n data.push(datum);\n } else {\n data.push(DateTimeFormatter.formatAs(datum, this.format()));\n }\n });\n return data;\n }\n}\n\n","import Dimension from '../dimension';\n\n/**\n * Represents binned field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Binned extends Dimension {\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the last and first values of bins config array.\n */\n calculateDataDomain () {\n const binsArr = this.partialField.schema.bins;\n return [binsArr[0], binsArr[binsArr.length - 1]];\n }\n\n /**\n * Returns the bins config provided while creating the field instance.\n *\n * @public\n * @return {Array} Returns the bins array config.\n */\n bins () {\n return this.partialField.schema.bins;\n }\n}\n","import { formatNumber } from '../../utils';\nimport { defaultReducerName } from '../../operator/group-by-function';\nimport Field from '../field';\n\n/**\n * Represents measure field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Measure extends Field {\n /**\n * Returns the domain for the measure field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Returns the unit of the measure field.\n *\n * @public\n * @return {string} Returns unit of the field.\n */\n unit () {\n return this.partialField.schema.unit;\n }\n\n /**\n * Returns the aggregation function name of the measure field.\n *\n * @public\n * @return {string} Returns aggregation function name of the field.\n */\n defAggFn () {\n return this.partialField.schema.defAggFn || defaultReducerName;\n }\n\n /**\n * Returns the number format of the measure field.\n *\n * @public\n * @return {Function} Returns number format of the field.\n */\n numberFormat () {\n const { numberFormat } = this.partialField.schema;\n return numberFormat instanceof Function ? numberFormat : formatNumber;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { MeasureSubtype } from '../../enums';\nimport Measure from '../measure';\nimport InvalidAwareTypes from '../../invalid-aware-types';\n\n/**\n * Represents continuous field subtype.\n *\n * @public\n * @class\n * @extends Measure\n */\nexport default class Continuous extends Measure {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return MeasureSubtype.CONTINUOUS;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the min and max values.\n */\n calculateDataDomain () {\n let min = Number.POSITIVE_INFINITY;\n let max = Number.NEGATIVE_INFINITY;\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n return;\n }\n\n if (datum < min) {\n min = datum;\n }\n if (datum > max) {\n max = datum;\n }\n });\n\n return [min, max];\n }\n}\n","/**\n * A interface to represent a parser which is responsible to parse the field.\n *\n * @public\n * @interface\n */\nexport default class FieldParser {\n /**\n * Parses a single value of a field and return the sanitized form.\n *\n * @public\n * @abstract\n */\n parse () {\n throw new Error('Not yet implemented');\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the categorical values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class CategoricalParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the stringified form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the stringified value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n result = String(val).trim();\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { DateTimeFormatter } from '../../../utils';\nimport FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the temporal values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class TemporalParser extends FieldParser {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {Object} schema - The schema object for the corresponding field.\n */\n constructor (schema) {\n super();\n this.schema = schema;\n this._dtf = new DateTimeFormatter(this.schema.format);\n }\n\n /**\n * Parses a single value of a field and returns the millisecond value.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {number} Returns the millisecond value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let nativeDate = this._dtf.getNativeDate(val);\n result = nativeDate ? nativeDate.getTime() : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the binned values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class BinnedParser extends FieldParser {\n /**\n * Parses a single binned value of a field and returns the sanitized value.\n *\n * @public\n * @param {string} val - The value of the field.\n * @return {string} Returns the sanitized value.\n */\n parse (val) {\n const regex = /^\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*-\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*$/;\n val = String(val);\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let matched = val.match(regex);\n result = matched ? `${Number.parseFloat(matched[1])}-${Number.parseFloat(matched[2])}`\n : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the continuous values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class ContinuousParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the number form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the number value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let parsedVal = parseFloat(val, 10);\n result = Number.isNaN(parsedVal) ? InvalidAwareTypes.NA : parsedVal;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","/**\n * Stores the full data and the metadata of a field. It provides\n * a single source of data from which the future Field\n * instance can get a subset of it with a rowDiffset config.\n *\n * @class\n * @public\n */\nexport default class PartialField {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} name - The name of the field.\n * @param {Array} data - The data array.\n * @param {Object} schema - The schema object of the corresponding field.\n * @param {FieldParser} parser - The parser instance corresponding to that field.\n */\n constructor (name, data, schema, parser) {\n this.name = name;\n this.schema = schema;\n this.parser = parser;\n this.data = this._sanitize(data);\n }\n\n /**\n * Sanitizes the field data.\n *\n * @private\n * @param {Array} data - The actual input data.\n * @return {Array} Returns the sanitized data.\n */\n _sanitize (data) {\n return data.map(datum => this.parser.parse(datum));\n }\n}\n","import { FieldType, DimensionSubtype, MeasureSubtype } from './enums';\nimport {\n Categorical,\n Temporal,\n Binned,\n Continuous,\n CategoricalParser,\n TemporalParser,\n BinnedParser,\n ContinuousParser,\n PartialField\n} from './fields';\n\n/**\n * Creates a field instance according to the provided data and schema.\n *\n * @param {Array} data - The field data array.\n * @param {Object} schema - The field schema object.\n * @return {Field} Returns the newly created field instance.\n */\nfunction createUnitField(data, schema) {\n data = data || [];\n let partialField;\n\n switch (schema.type) {\n case FieldType.MEASURE:\n switch (schema.subtype) {\n case MeasureSubtype.CONTINUOUS:\n partialField = new PartialField(schema.name, data, schema, new ContinuousParser());\n return new Continuous(partialField, `0-${data.length - 1}`);\n default:\n partialField = new PartialField(schema.name, data, schema, new ContinuousParser());\n return new Continuous(partialField, `0-${data.length - 1}`);\n }\n case FieldType.DIMENSION:\n switch (schema.subtype) {\n case DimensionSubtype.CATEGORICAL:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n case DimensionSubtype.TEMPORAL:\n partialField = new PartialField(schema.name, data, schema, new TemporalParser(schema));\n return new Temporal(partialField, `0-${data.length - 1}`);\n case DimensionSubtype.BINNED:\n partialField = new PartialField(schema.name, data, schema, new BinnedParser());\n return new Binned(partialField, `0-${data.length - 1}`);\n default:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n }\n default:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n }\n}\n\n\n/**\n * Creates a field instance from partialField and rowDiffset.\n *\n * @param {PartialField} partialField - The corresponding partial field.\n * @param {string} rowDiffset - The data subset config.\n * @return {Field} Returns the newly created field instance.\n */\nexport function createUnitFieldFromPartial(partialField, rowDiffset) {\n const { schema } = partialField;\n\n switch (schema.type) {\n case FieldType.MEASURE:\n switch (schema.subtype) {\n case MeasureSubtype.CONTINUOUS:\n return new Continuous(partialField, rowDiffset);\n default:\n return new Continuous(partialField, rowDiffset);\n }\n case FieldType.DIMENSION:\n switch (schema.subtype) {\n case DimensionSubtype.CATEGORICAL:\n return new Categorical(partialField, rowDiffset);\n case DimensionSubtype.TEMPORAL:\n return new Temporal(partialField, rowDiffset);\n case DimensionSubtype.BINNED:\n return new Binned(partialField, rowDiffset);\n default:\n return new Categorical(partialField, rowDiffset);\n }\n default:\n return new Categorical(partialField, rowDiffset);\n }\n}\n\n/**\n * Creates the field instances with input data and schema.\n *\n * @param {Array} dataColumn - The data array for fields.\n * @param {Array} schema - The schema array for fields.\n * @param {Array} headers - The array of header names.\n * @return {Array.} Returns an array of newly created field instances.\n */\nexport function createFields(dataColumn, schema, headers) {\n const headersObj = {};\n\n if (!(headers && headers.length)) {\n headers = schema.map(item => item.name);\n }\n\n headers.forEach((header, i) => {\n headersObj[header] = i;\n });\n\n return schema.map(item => createUnitField(dataColumn[headersObj[item.name]], item));\n}\n","import { DataFormat } from './enums';\n\nexport default {\n dataFormat: DataFormat.AUTO\n};\n","import { columnMajor } from '../utils';\n\n/**\n * Parses and converts data formatted in DSV array to a manageable internal format.\n *\n * @param {Array.} arr - A 2D array containing of the DSV data.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv data is header or not.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * [\"a\", \"b\", \"c\"],\n * [1, 2, 3],\n * [4, 5, 6],\n * [7, 8, 9]\n * ];\n */\nfunction DSVArr (arr, options) {\n const defaultOption = {\n firstRowHeader: true,\n };\n options = Object.assign({}, defaultOption, options);\n\n let header;\n const columns = [];\n const push = columnMajor(columns);\n\n if (options.firstRowHeader) {\n // If header present then mutate the array.\n // Do in-place mutation to save space.\n header = arr.splice(0, 1)[0];\n } else {\n header = [];\n }\n\n arr.forEach(field => push(...field));\n\n return [header, columns];\n}\n\nexport default DSVArr;\n","var EOL = {},\n EOF = {},\n QUOTE = 34,\n NEWLINE = 10,\n RETURN = 13;\n\nfunction objectConverter(columns) {\n return new Function(\"d\", \"return {\" + columns.map(function(name, i) {\n return JSON.stringify(name) + \": d[\" + i + \"]\";\n }).join(\",\") + \"}\");\n}\n\nfunction customConverter(columns, f) {\n var object = objectConverter(columns);\n return function(row, i) {\n return f(object(row), i, columns);\n };\n}\n\n// Compute unique columns in order of discovery.\nfunction inferColumns(rows) {\n var columnSet = Object.create(null),\n columns = [];\n\n rows.forEach(function(row) {\n for (var column in row) {\n if (!(column in columnSet)) {\n columns.push(columnSet[column] = column);\n }\n }\n });\n\n return columns;\n}\n\nfunction pad(value, width) {\n var s = value + \"\", length = s.length;\n return length < width ? new Array(width - length + 1).join(0) + s : s;\n}\n\nfunction formatYear(year) {\n return year < 0 ? \"-\" + pad(-year, 6)\n : year > 9999 ? \"+\" + pad(year, 6)\n : pad(year, 4);\n}\n\nfunction formatDate(date) {\n var hours = date.getUTCHours(),\n minutes = date.getUTCMinutes(),\n seconds = date.getUTCSeconds(),\n milliseconds = date.getUTCMilliseconds();\n return isNaN(date) ? \"Invalid Date\"\n : formatYear(date.getUTCFullYear(), 4) + \"-\" + pad(date.getUTCMonth() + 1, 2) + \"-\" + pad(date.getUTCDate(), 2)\n + (milliseconds ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \":\" + pad(seconds, 2) + \".\" + pad(milliseconds, 3) + \"Z\"\n : seconds ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \":\" + pad(seconds, 2) + \"Z\"\n : minutes || hours ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \"Z\"\n : \"\");\n}\n\nexport default function(delimiter) {\n var reFormat = new RegExp(\"[\\\"\" + delimiter + \"\\n\\r]\"),\n DELIMITER = delimiter.charCodeAt(0);\n\n function parse(text, f) {\n var convert, columns, rows = parseRows(text, function(row, i) {\n if (convert) return convert(row, i - 1);\n columns = row, convert = f ? customConverter(row, f) : objectConverter(row);\n });\n rows.columns = columns || [];\n return rows;\n }\n\n function parseRows(text, f) {\n var rows = [], // output rows\n N = text.length,\n I = 0, // current character index\n n = 0, // current line number\n t, // current token\n eof = N <= 0, // current token followed by EOF?\n eol = false; // current token followed by EOL?\n\n // Strip the trailing newline.\n if (text.charCodeAt(N - 1) === NEWLINE) --N;\n if (text.charCodeAt(N - 1) === RETURN) --N;\n\n function token() {\n if (eof) return EOF;\n if (eol) return eol = false, EOL;\n\n // Unescape quotes.\n var i, j = I, c;\n if (text.charCodeAt(j) === QUOTE) {\n while (I++ < N && text.charCodeAt(I) !== QUOTE || text.charCodeAt(++I) === QUOTE);\n if ((i = I) >= N) eof = true;\n else if ((c = text.charCodeAt(I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n return text.slice(j + 1, i - 1).replace(/\"\"/g, \"\\\"\");\n }\n\n // Find next delimiter or newline.\n while (I < N) {\n if ((c = text.charCodeAt(i = I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n else if (c !== DELIMITER) continue;\n return text.slice(j, i);\n }\n\n // Return last token before EOF.\n return eof = true, text.slice(j, N);\n }\n\n while ((t = token()) !== EOF) {\n var row = [];\n while (t !== EOL && t !== EOF) row.push(t), t = token();\n if (f && (row = f(row, n++)) == null) continue;\n rows.push(row);\n }\n\n return rows;\n }\n\n function preformatBody(rows, columns) {\n return rows.map(function(row) {\n return columns.map(function(column) {\n return formatValue(row[column]);\n }).join(delimiter);\n });\n }\n\n function format(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return [columns.map(formatValue).join(delimiter)].concat(preformatBody(rows, columns)).join(\"\\n\");\n }\n\n function formatBody(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return preformatBody(rows, columns).join(\"\\n\");\n }\n\n function formatRows(rows) {\n return rows.map(formatRow).join(\"\\n\");\n }\n\n function formatRow(row) {\n return row.map(formatValue).join(delimiter);\n }\n\n function formatValue(value) {\n return value == null ? \"\"\n : value instanceof Date ? formatDate(value)\n : reFormat.test(value += \"\") ? \"\\\"\" + value.replace(/\"/g, \"\\\"\\\"\") + \"\\\"\"\n : value;\n }\n\n return {\n parse: parse,\n parseRows: parseRows,\n format: format,\n formatBody: formatBody,\n formatRows: formatRows\n };\n}\n","import dsv from \"./dsv\";\n\nvar csv = dsv(\",\");\n\nexport var csvParse = csv.parse;\nexport var csvParseRows = csv.parseRows;\nexport var csvFormat = csv.format;\nexport var csvFormatBody = csv.formatBody;\nexport var csvFormatRows = csv.formatRows;\n","import dsv from \"./dsv\";\n\nvar tsv = dsv(\"\\t\");\n\nexport var tsvParse = tsv.parse;\nexport var tsvParseRows = tsv.parseRows;\nexport var tsvFormat = tsv.format;\nexport var tsvFormatBody = tsv.formatBody;\nexport var tsvFormatRows = tsv.formatRows;\n","import { dsvFormat as d3Dsv } from 'd3-dsv';\nimport DSVArr from './dsv-arr';\n\n/**\n * Parses and converts data formatted in DSV string to a manageable internal format.\n *\n * @todo Support to be given for https://tools.ietf.org/html/rfc4180.\n * @todo Sample implementation https://github.com/knrz/CSV.js/.\n *\n * @param {string} str - The input DSV string.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv string data is header or not.\n * @param {string} [options.fieldSeparator=\",\"] - The separator of two consecutive field.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = `\n * a,b,c\n * 1,2,3\n * 4,5,6\n * 7,8,9\n * `\n */\nfunction DSVStr (str, options) {\n const defaultOption = {\n firstRowHeader: true,\n fieldSeparator: ','\n };\n options = Object.assign({}, defaultOption, options);\n\n const dsv = d3Dsv(options.fieldSeparator);\n return DSVArr(dsv.parseRows(str), options);\n}\n\nexport default DSVStr;\n","import { columnMajor } from '../utils';\n\n/**\n * Parses and converts data formatted in JSON to a manageable internal format.\n *\n * @param {Array.} arr - The input data formatted in JSON.\n * @return {Array.} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * {\n * \"a\": 1,\n * \"b\": 2,\n * \"c\": 3\n * },\n * {\n * \"a\": 4,\n * \"b\": 5,\n * \"c\": 6\n * },\n * {\n * \"a\": 7,\n * \"b\": 8,\n * \"c\": 9\n * }\n * ];\n */\nfunction FlatJSON (arr) {\n const header = {};\n let i = 0;\n let insertionIndex;\n const columns = [];\n const push = columnMajor(columns);\n\n arr.forEach((item) => {\n const fields = [];\n for (let key in item) {\n if (key in header) {\n insertionIndex = header[key];\n } else {\n header[key] = i++;\n insertionIndex = i - 1;\n }\n fields[insertionIndex] = item[key];\n }\n push(...fields);\n });\n\n return [Object.keys(header), columns];\n}\n\nexport default FlatJSON;\n","import FlatJSON from './flat-json';\nimport DSVArr from './dsv-arr';\nimport DSVStr from './dsv-str';\nimport { detectDataFormat } from '../utils';\n\n/**\n * Parses the input data and detect the format automatically.\n *\n * @param {string|Array} data - The input data.\n * @param {Object} options - An optional config specific to data format.\n * @return {Array.} Returns an array of headers and column major data.\n */\nfunction Auto (data, options) {\n const converters = { FlatJSON, DSVStr, DSVArr };\n const dataFormat = detectDataFormat(data);\n\n if (!dataFormat) {\n throw new Error('Couldn\\'t detect the data format');\n }\n\n return converters[dataFormat](data, options);\n}\n\nexport default Auto;\n","import { FieldType, FilteringMode, DimensionSubtype, MeasureSubtype, DataFormat } from './enums';\nimport fieldStore from './field-store';\nimport Value from './value';\nimport {\n rowDiffsetIterator\n} from './operator';\nimport { DM_DERIVATIVES, LOGICAL_OPERATORS } from './constants';\nimport { createFields, createUnitFieldFromPartial } from './field-creator';\nimport defaultConfig from './default-config';\nimport * as converter from './converter';\nimport { extend2, detectDataFormat } from './utils';\n\n/**\n * Prepares the selection data.\n */\nfunction prepareSelectionData (fields, i) {\n const resp = {};\n for (let field of fields) {\n resp[field.name()] = new Value(field.partialField.data[i], field);\n }\n return resp;\n}\n\nexport function prepareJoinData (fields) {\n const resp = {};\n Object.keys(fields).forEach((key) => { resp[key] = new Value(fields[key], key); });\n return resp;\n}\n\nexport const updateFields = ([rowDiffset, colIdentifier], partialFieldspace, fieldStoreName) => {\n let collID = colIdentifier.length ? colIdentifier.split(',') : [];\n let partialFieldMap = partialFieldspace.fieldsObj();\n let newFields = collID.map(coll => createUnitFieldFromPartial(partialFieldMap[coll].partialField, rowDiffset));\n return fieldStore.createNamespace(newFields, fieldStoreName);\n};\n\nexport const persistCurrentDerivation = (model, operation, config = {}, criteriaFn) => {\n if (operation === DM_DERIVATIVES.COMPOSE) {\n model._derivation.length = 0;\n model._derivation.push(...criteriaFn);\n } else {\n model._derivation.push({\n op: operation,\n meta: config,\n criteria: criteriaFn\n });\n }\n};\n\nexport const persistAncestorDerivation = (sourceDm, newDm) => {\n newDm._ancestorDerivation.push(...sourceDm._ancestorDerivation, ...sourceDm._derivation);\n};\n\nexport const persistDerivations = (sourceDm, model, operation, config = {}, criteriaFn) => {\n persistCurrentDerivation(model, operation, config, criteriaFn);\n persistAncestorDerivation(sourceDm, model);\n};\n\nexport const selectHelper = (rowDiffset, fields, selectFn, config, sourceDm) => {\n const newRowDiffSet = [];\n let lastInsertedValue = -1;\n let { mode } = config;\n let li;\n let cachedStore = {};\n let cloneProvider = () => sourceDm.detachedRoot();\n const selectorHelperFn = index => selectFn(\n prepareSelectionData(fields, index),\n index,\n cloneProvider,\n cachedStore\n );\n\n let checker;\n if (mode === FilteringMode.INVERSE) {\n checker = index => !selectorHelperFn(index);\n } else {\n checker = index => selectorHelperFn(index);\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n if (checker(i)) {\n if (lastInsertedValue !== -1 && i === (lastInsertedValue + 1)) {\n li = newRowDiffSet.length - 1;\n newRowDiffSet[li] = `${newRowDiffSet[li].split('-')[0]}-${i}`;\n } else {\n newRowDiffSet.push(`${i}`);\n }\n lastInsertedValue = i;\n }\n });\n return newRowDiffSet.join(',');\n};\n\nexport const cloneWithAllFields = (model) => {\n const clonedDm = model.clone(false);\n const partialFieldspace = model.getPartialFieldspace();\n clonedDm._colIdentifier = partialFieldspace.fields.map(f => f.name()).join(',');\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n clonedDm.__calculateFieldspace().calculateFieldsConfig();\n\n return clonedDm;\n};\n\nexport const filterPropagationModel = (model, propModels, config = {}) => {\n const operation = config.operation || LOGICAL_OPERATORS.AND;\n const filterByMeasure = config.filterByMeasure || false;\n let fns = [];\n if (!propModels.length) {\n fns = [() => false];\n } else {\n fns = propModels.map(propModel => ((dataModel) => {\n const dataObj = dataModel.getData();\n const schema = dataObj.schema;\n const fieldsConfig = dataModel.getFieldsConfig();\n const fieldsSpace = dataModel.getFieldspace().fieldsObj();\n const data = dataObj.data;\n const domain = Object.values(fieldsConfig).reduce((acc, v) => {\n acc[v.def.name] = fieldsSpace[v.def.name].domain();\n return acc;\n }, {});\n\n return (fields) => {\n const include = !data.length ? false : data.some(row => schema.every((propField) => {\n if (!(propField.name in fields)) {\n return true;\n }\n const value = fields[propField.name].valueOf();\n if (filterByMeasure && propField.type === FieldType.MEASURE) {\n return value >= domain[propField.name][0] && value <= domain[propField.name][1];\n }\n\n if (propField.type !== FieldType.DIMENSION) {\n return true;\n }\n const idx = fieldsConfig[propField.name].index;\n return row[idx] === fields[propField.name].valueOf();\n }));\n return include;\n };\n })(propModel));\n }\n\n let filteredModel;\n if (operation === LOGICAL_OPERATORS.AND) {\n filteredModel = cloneWithAllFields(model).select(fields => fns.every(fn => fn(fields)), {\n saveChild: false,\n mode: FilteringMode.ALL\n });\n } else {\n filteredModel = cloneWithAllFields(model).select(fields => fns.some(fn => fn(fields)), {\n mode: FilteringMode.ALL,\n saveChild: false\n });\n }\n\n return filteredModel;\n};\n\nexport const cloneWithSelect = (sourceDm, selectFn, selectConfig, cloneConfig) => {\n const cloned = sourceDm.clone(cloneConfig.saveChild);\n const rowDiffset = selectHelper(\n cloned._rowDiffset,\n cloned.getPartialFieldspace().fields,\n selectFn,\n selectConfig,\n sourceDm\n );\n cloned._rowDiffset = rowDiffset;\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivations(\n sourceDm,\n cloned,\n DM_DERIVATIVES.SELECT,\n { config: selectConfig },\n selectFn\n );\n\n return cloned;\n};\n\nexport const cloneWithProject = (sourceDm, projField, config, allFields) => {\n const cloned = sourceDm.clone(config.saveChild);\n let projectionSet = projField;\n if (config.mode === FilteringMode.INVERSE) {\n projectionSet = allFields.filter(fieldName => projField.indexOf(fieldName) === -1);\n }\n // cloned._colIdentifier = sourceDm._colIdentifier.split(',')\n // .filter(coll => projectionSet.indexOf(coll) !== -1).join();\n cloned._colIdentifier = projectionSet.join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivations(\n sourceDm,\n cloned,\n DM_DERIVATIVES.PROJECT,\n { projField, config, actualProjField: projectionSet },\n null\n );\n\n return cloned;\n};\n\nexport const sanitizeUnitSchema = (unitSchema) => {\n // Do deep clone of the unit schema as the user might change it later.\n unitSchema = extend2({}, unitSchema);\n if (!unitSchema.type) {\n unitSchema.type = FieldType.DIMENSION;\n }\n\n if (!unitSchema.subtype) {\n switch (unitSchema.type) {\n case FieldType.MEASURE:\n unitSchema.subtype = MeasureSubtype.CONTINUOUS;\n break;\n default:\n case FieldType.DIMENSION:\n unitSchema.subtype = DimensionSubtype.CATEGORICAL;\n break;\n }\n }\n\n return unitSchema;\n};\n\nexport const validateUnitSchema = (unitSchema) => {\n const supportedMeasureSubTypes = [MeasureSubtype.CONTINUOUS];\n const supportedDimSubTypes = [\n DimensionSubtype.CATEGORICAL,\n DimensionSubtype.BINNED,\n DimensionSubtype.TEMPORAL,\n DimensionSubtype.GEO\n ];\n const { type, subtype, name } = unitSchema;\n\n switch (type) {\n case FieldType.DIMENSION:\n if (supportedDimSubTypes.indexOf(subtype) === -1) {\n throw new Error(`DataModel doesn't support dimension field subtype ${subtype} used for ${name} field`);\n }\n break;\n case FieldType.MEASURE:\n if (supportedMeasureSubTypes.indexOf(subtype) === -1) {\n throw new Error(`DataModel doesn't support measure field subtype ${subtype} used for ${name} field`);\n }\n break;\n default:\n throw new Error(`DataModel doesn't support field type ${type} used for ${name} field`);\n }\n};\n\nexport const sanitizeAndValidateSchema = schema => schema.map((unitSchema) => {\n unitSchema = sanitizeUnitSchema(unitSchema);\n validateUnitSchema(unitSchema);\n return unitSchema;\n});\n\nexport const resolveFieldName = (schema, dataHeader) => {\n schema.forEach((unitSchema) => {\n const fieldNameAs = unitSchema.as;\n if (!fieldNameAs) { return; }\n\n const idx = dataHeader.indexOf(unitSchema.name);\n dataHeader[idx] = fieldNameAs;\n unitSchema.name = fieldNameAs;\n delete unitSchema.as;\n });\n};\n\nexport const updateData = (relation, data, schema, options) => {\n schema = sanitizeAndValidateSchema(schema);\n options = Object.assign(Object.assign({}, defaultConfig), options);\n const converterFn = converter[options.dataFormat];\n\n if (!(converterFn && typeof converterFn === 'function')) {\n throw new Error(`No converter function found for ${options.dataFormat} format`);\n }\n\n const [header, formattedData] = converterFn(data, options);\n resolveFieldName(schema, header);\n const fieldArr = createFields(formattedData, schema, header);\n\n // This will create a new fieldStore with the fields\n const nameSpace = fieldStore.createNamespace(fieldArr, options.name);\n relation._partialFieldspace = nameSpace;\n // If data is provided create the default colIdentifier and rowDiffset\n relation._rowDiffset = formattedData.length && formattedData[0].length ? `0-${formattedData[0].length - 1}` : '';\n relation._colIdentifier = (schema.map(_ => _.name)).join();\n relation._dataFormat = options.dataFormat === DataFormat.AUTO ? detectDataFormat(data) : options.dataFormat;\n return relation;\n};\n\nexport const fieldInSchema = (schema, field) => {\n let i = 0;\n\n for (; i < schema.length; ++i) {\n if (field === schema[i].name) {\n return {\n type: schema[i].subtype || schema[i].type,\n index: i\n };\n }\n }\n return null;\n};\n\n\nexport const getDerivationArguments = (derivation) => {\n let params = [];\n let operation;\n operation = derivation.op;\n switch (operation) {\n case DM_DERIVATIVES.SELECT:\n params = [derivation.criteria];\n break;\n case DM_DERIVATIVES.PROJECT:\n params = [derivation.meta.actualProjField];\n break;\n case DM_DERIVATIVES.GROUPBY:\n operation = 'groupBy';\n params = [derivation.meta.groupByString.split(','), derivation.criteria];\n break;\n default:\n operation = null;\n }\n\n return {\n operation,\n params\n };\n};\n\nconst applyExistingOperationOnModel = (propModel, dataModel) => {\n const derivations = dataModel.getDerivations();\n let selectionModel = propModel[0];\n let rejectionModel = propModel[1];\n\n derivations.forEach((derivation) => {\n if (!derivation) {\n return;\n }\n\n const { operation, params } = getDerivationArguments(derivation);\n if (operation) {\n selectionModel = selectionModel[operation](...params, {\n saveChild: false\n });\n rejectionModel = rejectionModel[operation](...params, {\n saveChild: false\n });\n }\n });\n\n return [selectionModel, rejectionModel];\n};\n\nconst getFilteredModel = (propModel, path) => {\n for (let i = 0, len = path.length; i < len; i++) {\n const model = path[i];\n propModel = applyExistingOperationOnModel(propModel, model);\n }\n return propModel;\n};\n\nconst propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = {}) => {\n const nonTraversingModel = propModelInf.nonTraversingModel;\n const excludeModels = propModelInf.excludeModels || [];\n\n if (dataModel === nonTraversingModel) {\n return;\n }\n\n const propagate = excludeModels.length ? excludeModels.indexOf(dataModel) === -1 : true;\n\n propagate && dataModel.handlePropagation(propModel, config);\n\n const children = dataModel._children;\n children.forEach((child) => {\n let [selectionModel, rejectionModel] = applyExistingOperationOnModel(propModel, child);\n propagateIdentifiers(child, [selectionModel, rejectionModel], config, propModelInf);\n });\n};\n\nexport const getRootGroupByModel = (model) => {\n while (model._parent && model._derivation.find(d => d.op !== DM_DERIVATIVES.GROUPBY)) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getRootDataModel = (model) => {\n while (model._parent) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getPathToRootModel = (model, path = []) => {\n while (model._parent) {\n path.push(model);\n model = model._parent;\n }\n return path;\n};\n\nexport const propagateToAllDataModels = (identifiers, rootModels, propagationInf, config) => {\n let criteria;\n let propModel;\n const { propagationNameSpace, propagateToSource } = propagationInf;\n const propagationSourceId = propagationInf.sourceId;\n const propagateInterpolatedValues = config.propagateInterpolatedValues;\n const filterFn = (entry) => {\n const filter = config.filterFn || (() => true);\n return filter(entry, config);\n };\n\n let criterias = [];\n\n if (identifiers === null && config.persistent !== true) {\n criterias = [{\n criteria: []\n }];\n } else {\n let actionCriterias = Object.values(propagationNameSpace.mutableActions);\n if (propagateToSource !== false) {\n actionCriterias = actionCriterias.filter(d => d.config.sourceId !== propagationSourceId);\n }\n\n const filteredCriteria = actionCriterias.filter(filterFn).map(action => action.config.criteria);\n\n const excludeModels = [];\n\n if (propagateToSource !== false) {\n const sourceActionCriterias = Object.values(propagationNameSpace.mutableActions);\n\n sourceActionCriterias.forEach((actionInf) => {\n const actionConf = actionInf.config;\n if (actionConf.applyOnSource === false && actionConf.action === config.action &&\n actionConf.sourceId !== propagationSourceId) {\n excludeModels.push(actionInf.model);\n criteria = sourceActionCriterias.filter(d => d !== actionInf).map(d => d.config.criteria);\n criteria.length && criterias.push({\n criteria,\n models: actionInf.model,\n path: getPathToRootModel(actionInf.model)\n });\n }\n });\n }\n\n\n criteria = [].concat(...[...filteredCriteria, identifiers]).filter(d => d !== null);\n criterias.push({\n criteria,\n excludeModels: [...excludeModels, ...config.excludeModels || []]\n });\n }\n\n const rootModel = rootModels.model;\n\n const propConfig = Object.assign({\n sourceIdentifiers: identifiers,\n propagationSourceId\n }, config);\n\n const rootGroupByModel = rootModels.groupByModel;\n if (propagateInterpolatedValues && rootGroupByModel) {\n propModel = filterPropagationModel(rootGroupByModel, criteria, {\n filterByMeasure: propagateInterpolatedValues\n });\n propagateIdentifiers(rootGroupByModel, propModel, propConfig);\n }\n\n criterias.forEach((inf) => {\n const propagationModel = filterPropagationModel(rootModel, inf.criteria);\n const path = inf.path;\n\n if (path) {\n const filteredModel = getFilteredModel(propagationModel, path.reverse());\n inf.models.handlePropagation(filteredModel, propConfig);\n } else {\n propagateIdentifiers(rootModel, propagationModel, propConfig, {\n excludeModels: inf.excludeModels,\n nonTraversingModel: propagateInterpolatedValues && rootGroupByModel\n });\n }\n });\n};\n\nexport const propagateImmutableActions = (propagationNameSpace, rootModels, propagationInf) => {\n const immutableActions = propagationNameSpace.immutableActions;\n\n for (const action in immutableActions) {\n const actionInf = immutableActions[action];\n const actionConf = actionInf.config;\n const propagationSourceId = propagationInf.config.sourceId;\n const filterImmutableAction = propagationInf.propConfig.filterImmutableAction ?\n propagationInf.propConfig.filterImmutableAction(actionConf, propagationInf.config) : true;\n if (actionConf.sourceId !== propagationSourceId && filterImmutableAction) {\n const criteriaModel = actionConf.criteria;\n propagateToAllDataModels(criteriaModel, rootModels, {\n propagationNameSpace,\n propagateToSource: false,\n sourceId: propagationSourceId\n }, actionConf);\n }\n }\n};\n\nexport const addToPropNamespace = (propagationNameSpace, config = {}, model) => {\n let sourceNamespace;\n const isMutableAction = config.isMutableAction;\n const criteria = config.criteria;\n const key = `${config.action}-${config.sourceId}`;\n\n if (isMutableAction) {\n sourceNamespace = propagationNameSpace.mutableActions;\n } else {\n sourceNamespace = propagationNameSpace.immutableActions;\n }\n\n if (criteria === null) {\n delete sourceNamespace[key];\n } else {\n sourceNamespace[key] = {\n model,\n config\n };\n }\n\n return this;\n};\n","import { FilteringMode } from './enums';\nimport { getUniqueId } from './utils';\nimport { updateFields, cloneWithSelect, cloneWithProject, updateData } from './helper';\nimport { crossProduct, difference, naturalJoinFilter, union } from './operator';\n\n/**\n * Relation provides the definitions of basic operators of relational algebra like *selection*, *projection*, *union*,\n * *difference* etc.\n *\n * It is extended by {@link DataModel} to inherit the functionalities of relational algebra concept.\n *\n * @class\n * @public\n * @module Relation\n * @namespace DataModel\n */\nclass Relation {\n\n /**\n * Creates a new Relation instance by providing underlying data and schema.\n *\n * @private\n *\n * @param {Object | string | Relation} data - The input tabular data in dsv or json format or\n * an existing Relation instance object.\n * @param {Array} schema - An array of data schema.\n * @param {Object} [options] - The optional options.\n */\n constructor (...params) {\n let source;\n\n this._parent = null;\n this._derivation = [];\n this._ancestorDerivation = [];\n this._children = [];\n\n if (params.length === 1 && ((source = params[0]) instanceof Relation)) {\n // parent datamodel was passed as part of source\n this._colIdentifier = source._colIdentifier;\n this._rowDiffset = source._rowDiffset;\n this._dataFormat = source._dataFormat;\n this._parent = source;\n this._partialFieldspace = this._parent._partialFieldspace;\n this._fieldStoreName = getUniqueId();\n this.__calculateFieldspace().calculateFieldsConfig();\n } else {\n updateData(this, ...params);\n this._fieldStoreName = this._partialFieldspace.name;\n this.__calculateFieldspace().calculateFieldsConfig();\n this._propagationNameSpace = {\n mutableActions: {},\n immutableActions: {}\n };\n }\n }\n\n /**\n * Retrieves the {@link Schema | schema} details for every {@link Field | field} as an array.\n *\n * @public\n *\n * @return {Array.} Array of fields schema.\n * ```\n * [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', numberFormat: (val) => `${val} miles / gallon` },\n * { name: 'Cylinder', type: 'dimension' },\n * { name: 'Displacement', type: 'measure', defAggFn: 'max' },\n * { name: 'HorsePower', type: 'measure', defAggFn: 'max' },\n * { name: 'Weight_in_lbs', type: 'measure', defAggFn: 'avg', },\n * { name: 'Acceleration', type: 'measure', defAggFn: 'avg' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin' }\n * ]\n * ```\n */\n getSchema () {\n return this.getFieldspace().fields.map(d => d.schema());\n }\n\n /**\n * Returns the name of the {@link DataModel} instance. If no name was specified during {@link DataModel}\n * initialization, then it returns a auto-generated name.\n *\n * @public\n *\n * @return {string} Name of the DataModel instance.\n */\n getName() {\n return this._fieldStoreName;\n }\n\n getFieldspace () {\n return this._fieldspace;\n }\n\n __calculateFieldspace () {\n this._fieldspace = updateFields([this._rowDiffset, this._colIdentifier],\n this.getPartialFieldspace(), this._fieldStoreName);\n return this;\n }\n\n getPartialFieldspace () {\n return this._partialFieldspace;\n }\n\n /**\n * Performs {@link link_of_cross_product | cross-product} between two {@link DataModel} instances and returns a\n * new {@link DataModel} instance containing the results. This operation is also called theta join.\n *\n * Cross product takes two set and create one set where each value of one set is paired with each value of another\n * set.\n *\n * This method takes an optional predicate which filters the generated result rows. If the predicate returns true\n * the combined row is included in the resulatant table.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.join(originDM)));\n *\n * console.log(carsDM.join(originDM,\n * obj => obj.[originDM.getName()].Origin === obj.[carsDM.getName()].Origin));\n *\n * @text\n * This is chained version of `join` operator. `join` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel to be joined with the current instance DataModel.\n * @param {SelectionPredicate} filterFn - The predicate function that will filter the result of the crossProduct.\n *\n * @return {DataModel} New DataModel instance created after joining.\n */\n join (joinWith, filterFn) {\n return crossProduct(this, joinWith, filterFn);\n }\n\n /**\n * {@link natural_join | Natural join} is a special kind of cross-product join where filtering of rows are performed\n * internally by resolving common fields are from both table and the rows with common value are included.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.naturalJoin(originDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel with which the current instance of DataModel on which the method is\n * called will be joined.\n * @return {DataModel} New DataModel instance created after joining.\n */\n naturalJoin (joinWith) {\n return crossProduct(this, joinWith, naturalJoinFilter(this, joinWith), true);\n }\n\n /**\n * {@link link_to_union | Union} operation can be termed as vertical stacking of all rows from both the DataModel\n * instances, provided that both of the {@link DataModel} instances should have same column names.\n *\n * @example\n * console.log(EuropeanMakerDM.union(USAMakerDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} unionWith - DataModel instance for which union has to be applied with the instance on which\n * the method is called\n *\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n union (unionWith) {\n return union(this, unionWith);\n }\n\n /**\n * {@link link_to_difference | Difference } operation only include rows which are present in the datamodel on which\n * it was called but not on the one passed as argument.\n *\n * @example\n * console.log(highPowerDM.difference(highExpensiveDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} differenceWith - DataModel instance for which difference has to be applied with the instance\n * on which the method is called\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n difference (differenceWith) {\n return difference(this, differenceWith);\n }\n\n /**\n * {@link link_to_selection | Selection} is a row filtering operation. It expects a predicate and an optional mode\n * which control which all rows should be included in the resultant DataModel instance.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection operation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resultant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * // with selection mode NORMAL:\n * const normDt = dt.select(fields => fields.Origin.value === \"USA\")\n * console.log(normDt));\n *\n * // with selection mode INVERSE:\n * const inverDt = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt);\n *\n * // with selection mode ALL:\n * const dtArr = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.ALL })\n * // print the selected parts\n * console.log(dtArr[0]);\n * // print the inverted parts\n * console.log(dtArr[1]);\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Function} selectFn - The predicate function which is called for each row with the current row.\n * ```\n * function (row, i, cloneProvider, store) { ... }\n * ```\n * @param {Object} config - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance.\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection.\n * @return {DataModel} Returns the new DataModel instance(s) after operation.\n */\n select (selectFn, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n\n const cloneConfig = { saveChild: config.saveChild };\n let oDm;\n\n if (config.mode === FilteringMode.ALL) {\n const selectDm = cloneWithSelect(\n this,\n selectFn,\n { mode: FilteringMode.NORMAL },\n cloneConfig\n );\n const rejectDm = cloneWithSelect(\n this,\n selectFn,\n { mode: FilteringMode.INVERSE },\n cloneConfig\n );\n oDm = [selectDm, rejectDm];\n } else {\n oDm = cloneWithSelect(\n this,\n selectFn,\n config,\n cloneConfig\n );\n }\n\n return oDm;\n }\n\n /**\n * Retrieves a boolean value if the current {@link DataModel} instance has data.\n *\n * @example\n * const schema = [\n * { name: 'CarName', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n * const data = [];\n *\n * const dt = new DataModel(data, schema);\n * console.log(dt.isEmpty());\n *\n * @public\n *\n * @return {Boolean} True if the datamodel has no data, otherwise false.\n */\n isEmpty () {\n return !this._rowDiffset.length || !this._colIdentifier.length;\n }\n\n /**\n * Creates a clone from the current DataModel instance with child parent relationship.\n *\n * @private\n * @param {boolean} [saveChild=true] - Whether the cloned instance would be recorded in the parent instance.\n * @return {DataModel} - Returns the newly cloned DataModel instance.\n */\n clone (saveChild = true) {\n const clonedDm = new this.constructor(this);\n if (saveChild) {\n clonedDm.setParent(this);\n } else {\n clonedDm.setParent(null);\n }\n return clonedDm;\n }\n\n /**\n * {@link Projection} is filter column (field) operation. It expects list of fields' name and either include those\n * or exclude those based on {@link FilteringMode} on the resultant variable.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * const dm = new DataModel(data, schema);\n *\n * // with projection mode NORMAL:\n * const normDt = dt.project([\"Name\", \"HorsePower\"]);\n * console.log(normDt.getData());\n *\n * // with projection mode INVERSE:\n * const inverDt = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt.getData());\n *\n * // with selection mode ALL:\n * const dtArr = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.ALL })\n * // print the normal parts\n * console.log(dtArr[0].getData());\n * // print the inverted parts\n * console.log(dtArr[1].getData());\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {DataModel} Returns the new DataModel instance after operation.\n */\n project (projField, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const { mode } = config;\n\n let normalizedProjField = projField.reduce((acc, field) => {\n if (field.constructor.name === 'RegExp') {\n acc.push(...allFields.filter(fieldName => fieldName.search(field) !== -1));\n } else if (field in fieldConfig) {\n acc.push(field);\n }\n return acc;\n }, []);\n\n normalizedProjField = Array.from(new Set(normalizedProjField)).map(field => field.trim());\n let dataModel;\n\n if (mode === FilteringMode.ALL) {\n let projectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.NORMAL,\n saveChild: config.saveChild\n }, allFields);\n let rejectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.INVERSE,\n saveChild: config.saveChild\n }, allFields);\n dataModel = [projectionClone, rejectionClone];\n } else {\n let projectionClone = cloneWithProject(this, normalizedProjField, config, allFields);\n dataModel = projectionClone;\n }\n\n return dataModel;\n }\n\n getFieldsConfig () {\n return this._fieldConfig;\n }\n\n calculateFieldsConfig () {\n this._fieldConfig = this._fieldspace.fields.reduce((acc, fieldObj, i) => {\n acc[fieldObj.name()] = {\n index: i,\n def: fieldObj.schema(),\n };\n return acc;\n }, {});\n return this;\n }\n\n\n /**\n * Frees up the resources associated with the current DataModel instance and breaks all the links instance has in\n * the DAG.\n *\n * @public\n */\n dispose () {\n this._parent && this._parent.removeChild(this);\n this._parent = null;\n this._children.forEach((child) => {\n child._parent = null;\n });\n this._children = [];\n }\n\n /**\n * Removes the specified child {@link DataModel} from the child list of the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\")\n * dt.removeChild(dt2);\n *\n * @private\n *\n * @param {DataModel} child - Delegates the parent to remove this child.\n */\n removeChild (child) {\n let idx = this._children.findIndex(sibling => sibling === child);\n idx !== -1 ? this._children.splice(idx, 1) : true;\n }\n\n /**\n * Sets the specified {@link DataModel} as a parent for the current {@link DataModel} instance.\n *\n * @param {DataModel} parent - The datamodel instance which will act as parent.\n */\n setParent (parent) {\n this._parent && this._parent.removeChild(this);\n this._parent = parent;\n parent && parent._children.push(this);\n }\n\n /**\n * Returns the parent {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const parentDm = dt2.getParent();\n *\n * @return {DataModel} Returns the parent DataModel instance.\n */\n getParent () {\n return this._parent;\n }\n\n /**\n * Returns the immediate child {@link DataModel} instances.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const childDm1 = dt.select(fields => fields.Origin.value === \"USA\");\n * const childDm2 = dt.select(fields => fields.Origin.value === \"Japan\");\n * const childDm3 = dt.groupBy([\"Origin\"]);\n *\n * @return {DataModel[]} Returns the immediate child DataModel instances.\n */\n getChildren () {\n return this._children;\n }\n\n /**\n * Returns the in-between operation meta data while creating the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const derivations = dt3.getDerivations();\n *\n * @return {Any[]} Returns the derivation meta data.\n */\n getDerivations () {\n return this._derivation;\n }\n\n /**\n * Returns the in-between operation meta data happened from root {@link DataModel} to current instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const ancDerivations = dt3.getAncestorDerivations();\n *\n * @return {Any[]} Returns the previous derivation meta data.\n */\n getAncestorDerivations () {\n return this._ancestorDerivation;\n }\n}\n\nexport default Relation;\n","/* eslint-disable default-case */\n\nimport { FieldType, DimensionSubtype, DataFormat } from './enums';\nimport {\n persistDerivations,\n getRootGroupByModel,\n propagateToAllDataModels,\n getRootDataModel,\n propagateImmutableActions,\n addToPropNamespace,\n sanitizeUnitSchema\n} from './helper';\nimport { DM_DERIVATIVES, PROPAGATION } from './constants';\nimport {\n dataBuilder,\n rowDiffsetIterator,\n groupBy\n} from './operator';\nimport { createBinnedFieldData } from './operator/bucket-creator';\nimport Relation from './relation';\nimport reducerStore from './utils/reducer-store';\nimport { createFields } from './field-creator';\nimport InvalidAwareTypes from './invalid-aware-types';\n\n/**\n * DataModel is an in-browser representation of tabular data. It supports\n * {@link https://en.wikipedia.org/wiki/Relational_algebra | relational algebra} operators as well as generic data\n * processing opearators.\n * DataModel extends {@link Relation} class which defines all the relational algebra opreators. DataModel gives\n * definition of generic data processing operators which are not relational algebra complient.\n *\n * @public\n * @class\n * @extends Relation\n * @memberof Datamodel\n */\nclass DataModel extends Relation {\n /**\n * Creates a new DataModel instance by providing data and schema. Data could be in the form of\n * - Flat JSON\n * - DSV String\n * - 2D Array\n *\n * By default DataModel finds suitable adapter to serialize the data. DataModel also expects a\n * {@link Schema | schema} for identifying the variables present in data.\n *\n * @constructor\n * @example\n * const data = loadData('cars.csv');\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', unit : 'cm', scale: '1000', numberformat: val => `${val}G`},\n * { name: 'Cylinders', type: 'dimension' },\n * { name: 'Displacement', type: 'measure' },\n * { name: 'Horsepower', type: 'measure' },\n * { name: 'Weight_in_lbs', type: 'measure' },\n * { name: 'Acceleration', type: 'measure' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin', type: 'dimension' }\n * ];\n * const dm = new DataModel(data, schema, { name: 'Cars' });\n * table(dm);\n *\n * @public\n *\n * @param {Array. | string | Array.} data Input data in any of the mentioned formats\n * @param {Array.} schema Defination of the variables. Order of the variables in data and order of the\n * variables in schema has to be same.\n * @param {object} [options] Optional arguments to specify more settings regarding the creation part\n * @param {string} [options.name] Name of the datamodel instance. If no name is given an auto generated name is\n * assigned to the instance.\n * @param {string} [options.fieldSeparator=','] specify field separator type if the data is of type dsv string.\n */\n constructor (...args) {\n super(...args);\n\n this._onPropagation = [];\n }\n\n /**\n * Reducers are simple functions which reduces an array of numbers to a representative number of the set.\n * Like an array of numbers `[10, 20, 5, 15]` can be reduced to `12.5` if average / mean reducer function is\n * applied. All the measure fields in datamodel (variables in data) needs a reducer to handle aggregation.\n *\n * @public\n *\n * @return {ReducerStore} Singleton instance of {@link ReducerStore}.\n */\n static get Reducers () {\n return reducerStore;\n }\n\n /**\n * Configure null, undefined, invalid values in the source data\n *\n * @public\n *\n * @param {Object} [config] - Configuration to control how null, undefined and non-parsable values are\n * represented in DataModel.\n * @param {string} [config.undefined] - Define how an undefined value will be represented.\n * @param {string} [config.null] - Define how a null value will be represented.\n * @param {string} [config.invalid] - Define how a non-parsable value will be represented.\n */\n static configureInvalidAwareTypes (config) {\n return InvalidAwareTypes.invalidAwareVals(config);\n }\n\n /**\n * Retrieve the data attached to an instance in JSON format.\n *\n * @example\n * // DataModel instance is already prepared and assigned to dm variable\n * const data = dm.getData({\n * order: 'column',\n * formatter: {\n * origin: (val) => val === 'European Union' ? 'EU' : val;\n * }\n * });\n * console.log(data);\n *\n * @public\n *\n * @param {Object} [options] Options to control how the raw data is to be returned.\n * @param {string} [options.order='row'] Defines if data is retieved in row order or column order. Possible values\n * are `'rows'` and `'columns'`\n * @param {Function} [options.formatter=null] Formats the output data. This expects an object, where the keys are\n * the name of the variable needs to be formatted. The formatter function is called for each row passing the\n * value of the cell for a particular row as arguments. The formatter is a function in the form of\n * `function (value, rowId, schema) => { ... }`\n * Know more about {@link Fomatter}.\n *\n * @return {Array} Returns a multidimensional array of the data with schema. The return format looks like\n * ```\n * {\n * data,\n * schema\n * }\n * ```\n */\n getData (options) {\n const defOptions = {\n order: 'row',\n formatter: null,\n withUid: false,\n getAllFields: false,\n sort: []\n };\n options = Object.assign({}, defOptions, options);\n const fields = this.getPartialFieldspace().fields;\n\n const dataGenerated = dataBuilder.call(\n this,\n this.getPartialFieldspace().fields,\n this._rowDiffset,\n options.getAllFields ? fields.map(d => d.name()).join() : this._colIdentifier,\n options.sort,\n {\n columnWise: options.order === 'column',\n addUid: !!options.withUid\n }\n );\n\n if (!options.formatter) {\n return dataGenerated;\n }\n\n const { formatter } = options;\n const { data, schema, uids } = dataGenerated;\n const fieldNames = schema.map((e => e.name));\n const fmtFieldNames = Object.keys(formatter);\n const fmtFieldIdx = fmtFieldNames.reduce((acc, next) => {\n const idx = fieldNames.indexOf(next);\n if (idx !== -1) {\n acc.push([idx, formatter[next]]);\n }\n return acc;\n }, []);\n\n if (options.order === 'column') {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n data[fIdx].forEach((datum, datumIdx) => {\n data[fIdx][datumIdx] = fmtFn.call(\n undefined,\n datum,\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n } else {\n data.forEach((datum, datumIdx) => {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n datum[fIdx] = fmtFn.call(\n undefined,\n datum[fIdx],\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n }\n\n return dataGenerated;\n }\n\n /**\n * Groups the data using particular dimensions and by reducing measures. It expects a list of dimensions using which\n * it projects the datamodel and perform aggregations to reduce the duplicate tuples. Refer this\n * {@link link_to_one_example_with_group_by | document} to know the intuition behind groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupedDM = dm.groupBy(['Year'], { horsepower: 'max' } );\n * console.log(groupedDm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {DataModel} Returns a new DataModel instance after performing the groupby.\n */\n groupBy (fieldsArr, reducers = {}, config = { saveChild: true }) {\n const groupByString = `${fieldsArr.join()}`;\n let params = [this, fieldsArr, reducers];\n const newDataModel = groupBy(...params);\n\n persistDerivations(\n this,\n newDataModel,\n DM_DERIVATIVES.GROUPBY,\n { fieldsArr, groupByString, defaultReducer: reducerStore.defaultReducer() },\n reducers\n );\n\n if (config.saveChild) {\n newDataModel.setParent(this);\n } else {\n newDataModel.setParent(null);\n }\n\n return newDataModel;\n }\n\n /**\n * Performs sorting operation on the current {@link DataModel} instance according to the specified sorting details.\n * Like every other operator it doesn't mutate the current DataModel instance on which it was called, instead\n * returns a new DataModel instance containing the sorted data.\n *\n * DataModel support multi level sorting by listing the variables using which sorting needs to be performed and\n * the type of sorting `ASC` or `DESC`.\n *\n * In the following example, data is sorted by `Origin` field in `DESC` order in first level followed by another\n * level of sorting by `Acceleration` in `ASC` order.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * let sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\"] // Default value is ASC\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * // Sort with a custom sorting function\n * sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\", (a, b) => a - b] // Custom sorting function\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @text\n * DataModel also provides another sorting mechanism out of the box where sort is applied to a variable using\n * another variable which determines the order.\n * Like the above DataModel contains three fields `Origin`, `Name` and `Acceleration`. Now, the data in this\n * model can be sorted by `Origin` field according to the average value of all `Acceleration` for a\n * particular `Origin` value.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * const sortedDm = dm.sort([\n * ['Origin', ['Acceleration', (a, b) => avg(...a.Acceleration) - avg(...b.Acceleration)]]\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @public\n *\n * @param {Array.} sortingDetails - Sorting details based on which the sorting will be performed.\n * @return {DataModel} Returns a new instance of DataModel with sorted data.\n */\n sort (sortingDetails, config = { saveChild: false }) {\n const rawData = this.getData({\n order: 'row',\n sort: sortingDetails\n });\n const header = rawData.schema.map(field => field.name);\n const dataInCSVArr = [header].concat(rawData.data);\n\n const sortedDm = new this.constructor(dataInCSVArr, rawData.schema, { dataFormat: 'DSVArr' });\n\n persistDerivations(\n this,\n sortedDm,\n DM_DERIVATIVES.SORT,\n config,\n sortingDetails\n );\n\n if (config.saveChild) {\n sortedDm.setParent(this);\n } else {\n sortedDm.setParent(null);\n }\n\n return sortedDm;\n }\n\n /**\n * Performs the serialization operation on the current {@link DataModel} instance according to the specified data\n * type. When an {@link DataModel} instance is created, it de-serializes the input data into its internal format,\n * and during its serialization process, it converts its internal data format to the specified data type and returns\n * that data regardless what type of data is used during the {@link DataModel} initialization.\n *\n * @example\n * // here dm is the pre-declared DataModel instance.\n * const csvData = dm.serialize(DataModel.DataFormat.DSV_STR, { fieldSeparator: \",\" });\n * console.log(csvData); // The csv formatted data.\n *\n * const jsonData = dm.serialize(DataModel.DataFormat.FLAT_JSON);\n * console.log(jsonData); // The json data.\n *\n * @public\n *\n * @param {string} type - The data type name for serialization.\n * @param {Object} options - The optional option object.\n * @param {string} options.fieldSeparator - The field separator character for DSV data type.\n * @return {Array|string} Returns the serialized data.\n */\n serialize (type, options) {\n type = type || this._dataFormat;\n options = Object.assign({}, { fieldSeparator: ',' }, options);\n\n const fields = this.getFieldspace().fields;\n const colData = fields.map(f => f.formattedData());\n const rowsCount = colData[0].length;\n let serializedData;\n let rowIdx;\n let colIdx;\n\n if (type === DataFormat.FLAT_JSON) {\n serializedData = [];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = {};\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row[fields[colIdx].name()] = colData[colIdx][rowIdx];\n }\n serializedData.push(row);\n }\n } else if (type === DataFormat.DSV_STR) {\n serializedData = [fields.map(f => f.name()).join(options.fieldSeparator)];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row.join(options.fieldSeparator));\n }\n serializedData = serializedData.join('\\n');\n } else if (type === DataFormat.DSV_ARR) {\n serializedData = [fields.map(f => f.name())];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row);\n }\n } else {\n throw new Error(`Data type ${type} is not supported`);\n }\n\n return serializedData;\n }\n\n addField (field) {\n const fieldName = field.name();\n this._colIdentifier += `,${fieldName}`;\n const partialFieldspace = this._partialFieldspace;\n\n if (!partialFieldspace.fieldsObj()[field.name()]) {\n partialFieldspace.fields.push(field);\n } else {\n const fieldIndex = partialFieldspace.fields.findIndex(fieldinst => fieldinst.name() === fieldName);\n fieldIndex >= 0 && (partialFieldspace.fields[fieldIndex] = field);\n }\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n\n this.__calculateFieldspace().calculateFieldsConfig();\n return this;\n }\n\n /**\n * Creates a new variable calculated from existing variables. This method expects the definition of the newly created\n * variable and a function which resolves the value of the new variable from existing variables.\n *\n * Can create a new measure based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const newDm = dataModel.calculateVariable({\n * name: 'powerToWeight',\n * type: 'measure'\n * }, ['horsepower', 'weight_in_lbs', (hp, weight) => hp / weight ]);\n *\n *\n * Can create a new dimension based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const child = dataModel.calculateVariable(\n * {\n * name: 'Efficiency',\n * type: 'dimension'\n * }, ['horsepower', (hp) => {\n * if (hp < 80) { return 'low'; },\n * else if (hp < 120) { return 'moderate'; }\n * else { return 'high' }\n * }]);\n *\n * @public\n *\n * @param {Object} schema - The schema of newly defined variable.\n * @param {Array.} dependency - An array containing the dependency variable names and a resolver\n * function as the last element.\n * @param {Object} config - An optional config object.\n * @param {boolean} [config.saveChild] - Whether the newly created DataModel will be a child.\n * @param {boolean} [config.replaceVar] - Whether the newly created variable will replace the existing variable.\n * @return {DataModel} Returns an instance of DataModel with the new field.\n */\n calculateVariable (schema, dependency, config) {\n schema = sanitizeUnitSchema(schema);\n config = Object.assign({}, { saveChild: true, replaceVar: false }, config);\n\n const fieldsConfig = this.getFieldsConfig();\n const depVars = dependency.slice(0, dependency.length - 1);\n const retrieveFn = dependency[dependency.length - 1];\n\n if (fieldsConfig[schema.name] && !config.replaceVar) {\n throw new Error(`${schema.name} field already exists in datamodel`);\n }\n\n const depFieldIndices = depVars.map((field) => {\n const fieldSpec = fieldsConfig[field];\n if (!fieldSpec) {\n // @todo dont throw error here, use warning in production mode\n throw new Error(`${field} is not a valid column name.`);\n }\n return fieldSpec.index;\n });\n\n const clone = this.clone(config.saveChild);\n\n const fs = clone.getFieldspace().fields;\n const suppliedFields = depFieldIndices.map(idx => fs[idx]);\n\n let cachedStore = {};\n let cloneProvider = () => this.detachedRoot();\n\n const computedValues = [];\n rowDiffsetIterator(clone._rowDiffset, (i) => {\n const fieldsData = suppliedFields.map(field => field.partialField.data[i]);\n computedValues[i] = retrieveFn(...fieldsData, i, cloneProvider, cachedStore);\n });\n const [field] = createFields([computedValues], [schema], [schema.name]);\n clone.addField(field);\n\n persistDerivations(\n this,\n clone,\n DM_DERIVATIVES.CAL_VAR,\n { config: schema, fields: depVars },\n retrieveFn\n );\n\n return clone;\n }\n\n /**\n * Propagates changes across all the connected DataModel instances.\n *\n * @param {Array} identifiers - A list of identifiers that were interacted with.\n * @param {Object} payload - The interaction specific details.\n *\n * @return {DataModel} DataModel instance.\n */\n propagate (identifiers, config = {}, addToNameSpace, propConfig = {}) {\n const isMutableAction = config.isMutableAction;\n const propagationSourceId = config.sourceId;\n const payload = config.payload;\n const rootModel = getRootDataModel(this);\n const propagationNameSpace = rootModel._propagationNameSpace;\n const rootGroupByModel = getRootGroupByModel(this);\n const rootModels = {\n groupByModel: rootGroupByModel,\n model: rootModel\n };\n\n addToNameSpace && addToPropNamespace(propagationNameSpace, config, this);\n propagateToAllDataModels(identifiers, rootModels, { propagationNameSpace, sourceId: propagationSourceId },\n Object.assign({\n payload\n }, config));\n\n if (isMutableAction) {\n propagateImmutableActions(propagationNameSpace, rootModels, {\n config,\n propConfig\n }, this);\n }\n\n return this;\n }\n\n /**\n * Associates a callback with an event name.\n *\n * @param {string} eventName - The name of the event.\n * @param {Function} callback - The callback to invoke.\n * @return {DataModel} Returns this current DataModel instance itself.\n */\n on (eventName, callback) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation.push(callback);\n break;\n }\n return this;\n }\n\n /**\n * Unsubscribes the callbacks for the provided event name.\n *\n * @param {string} eventName - The name of the event to unsubscribe.\n * @return {DataModel} Returns the current DataModel instance itself.\n */\n unsubscribe (eventName) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation = [];\n break;\n\n }\n return this;\n }\n\n /**\n * This method is used to invoke the method associated with propagation.\n *\n * @param {Object} payload The interaction payload.\n * @param {DataModel} identifiers The propagated DataModel.\n * @memberof DataModel\n */\n handlePropagation (propModel, payload) {\n let propListeners = this._onPropagation;\n propListeners.forEach(fn => fn.call(this, propModel, payload));\n }\n\n /**\n * Performs the binning operation on a measure field based on the binning configuration. Binning means discretizing\n * values of a measure. Binning configuration contains an array; subsequent values from the array marks the boundary\n * of buckets in [inclusive, exclusive) range format. This operation does not mutate the subject measure field,\n * instead, it creates a new field (variable) of type dimension and subtype binned.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non-uniform buckets,\n * - providing bins count,\n * - providing each bin size,\n *\n * When custom `buckets` are provided as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', buckets: [30, 80, 100, 110] }\n * const binnedDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binsCount` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', binsCount: 5, start: 0, end: 100 }\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binSize` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHorsepower', binSize: 20, start: 5}\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @public\n *\n * @param {string} measureFieldName - The name of the target measure field.\n * @param {Object} config - The config object.\n * @param {string} [config.name] - The name of the new field which will be created.\n * @param {string} [config.buckets] - An array containing the bucket ranges.\n * @param {string} [config.binSize] - The size of each bin. It is ignored when buckets are given.\n * @param {string} [config.binsCount] - The total number of bins to generate. It is ignored when buckets are given.\n * @param {string} [config.start] - The start value of the bucket ranges. It is ignored when buckets are given.\n * @param {string} [config.end] - The end value of the bucket ranges. It is ignored when buckets are given.\n * @return {DataModel} Returns a new {@link DataModel} instance with the new field.\n */\n bin (measureFieldName, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n if (!fieldsConfig[measureFieldName]) {\n throw new Error(`Field ${measureFieldName} doesn't exist`);\n }\n\n const binFieldName = config.name || `${measureFieldName}_binned`;\n\n if (fieldsConfig[binFieldName]) {\n throw new Error(`Field ${binFieldName} already exists`);\n }\n\n const measureField = this.getFieldspace().fieldsObj()[measureFieldName];\n const { binnedData, bins } = createBinnedFieldData(measureField, this._rowDiffset, config);\n\n const binField = createFields([binnedData], [\n {\n name: binFieldName,\n type: FieldType.DIMENSION,\n subtype: DimensionSubtype.BINNED,\n bins\n }], [binFieldName])[0];\n\n const clone = this.clone(config.saveChild);\n clone.addField(binField);\n\n persistDerivations(\n this,\n clone,\n DM_DERIVATIVES.BIN,\n { measureFieldName, config, binFieldName },\n null\n );\n\n return clone;\n }\n\n /**\n * Creates a new {@link DataModel} instance with completely detached root from current {@link DataModel} instance,\n * the new {@link DataModel} instance has no parent-children relationship with the current one, but has same data as\n * the current one.\n * This API is useful when a completely different {@link DataModel} but with same data as the current instance is\n * needed.\n *\n * @example\n * const dm = new DataModel(data, schema);\n * const detachedDm = dm.detachedRoot();\n *\n * // has different namespace\n * console.log(dm.getPartialFieldspace().name);\n * console.log(detachedDm.getPartialFieldspace().name);\n *\n * // has same data\n * console.log(dm.getData());\n * console.log(detachedDm.getData());\n *\n * @public\n *\n * @return {DataModel} Returns a detached {@link DataModel} instance.\n */\n detachedRoot () {\n const data = this.serialize(DataFormat.FLAT_JSON);\n const schema = this.getSchema();\n\n return new DataModel(data, schema);\n }\n}\n\nexport default DataModel;\n","import { fnList } from '../operator/group-by-function';\n\nexport const { sum, avg, min, max, first, last, count, std: sd } = fnList;\n","import DataModel from './datamodel';\nimport {\n compose,\n bin,\n select,\n project,\n groupby as groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union\n} from './operator';\nimport * as Stats from './stats';\nimport * as enums from './enums';\nimport { DateTimeFormatter } from './utils';\nimport { DataFormat, FilteringMode, DM_DERIVATIVES } from './constants';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport pkg from '../package.json';\n\nconst Operators = {\n compose,\n bin,\n select,\n project,\n groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union\n};\n\nconst version = pkg.version;\nObject.assign(DataModel, {\n Operators,\n Stats,\n DM_DERIVATIVES,\n DateTimeFormatter,\n DataFormat,\n FilteringMode,\n InvalidAwareTypes,\n version\n}, enums);\n\nexport default DataModel;\n","/**\n * Wrapper on calculateVariable() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const calculateVariable = (...args) => dm => dm.calculateVariable(...args);\n\n/**\n * Wrapper on sort() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const sort = (...args) => dm => dm.sort(...args);\n","import { crossProduct } from './cross-product';\nimport { naturalJoinFilter } from './natural-join-filter-function';\n\nexport function naturalJoin (dataModel1, dataModel2) {\n return crossProduct(dataModel1, dataModel2, naturalJoinFilter(dataModel1, dataModel2), true);\n}\n"],"sourceRoot":""} \ No newline at end of file diff --git a/example/data/cars.json b/example/data/cars.json index 187bd6a..76c4519 100644 --- a/example/data/cars.json +++ b/example/data/cars.json @@ -1,2858 +1,4886 @@ [ - { - "Ticket": "N/A", - "Organisation": "Seaspan", - "Name": "Chase Huber", - "Email ID": "chuber@washcorp.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion ​Data Grids for SharePoint - Perpetual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 2, - "Price": 999, - "Gross Value": 1998, - "Net Value": 1998, - "PO Number": "", - "Date of Order": "2018-04-13", - "Month": "2018-04-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-04-13", - "Payment Mode": "Avangate", - "Source/Ref No.": 69818083, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": "", - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Ball Aerospace", - "Name": "Linda Haugse", - "Email ID": "lhaugse@ball.com", - "Country": "United States of America", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "Cipher Technology Services", - "Partner Name": "Jose Tineo", - "Partner Email ID": "jose.tineo@cipherts.com", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 1, - "Price": 1299, - "Gross Value": 1299, - "Net Value": 909.3, - "PO Number": "", - "Date of Order": "2018-04-16", - "Month": "2018-04-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-04-20", - "Payment Mode": "ShareIt", - "Source/Ref No.": 560988613, - "Payment Due Date": "afdbgd", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": "", - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "USPS (United States Postal Services)", - "Name": "Russ Weimer", - "Email ID": "rustin.v.weimer@usps.gov", - "Country": "United States of America", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "Insight", - "Partner Name": "Spencer Stephenson", - "Partner Email ID": "insightusps@insight.com", - "Product": "Collabion DataParts for SharePoint - Perpetual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 5, - "Price": 3249, - "Gross Value": 16245, - "Net Value": 11371.5, - "PO Number": "", - "Date of Order": "2018-04-25", - "Month": "2018-04-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-04-25", - "Payment Mode": "ShareIt", - "Source/Ref No.": 561252753, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "HELIBRAS - Helicopteros do Brasil S/A", - "Name": "Breno Souza", - "Email ID": "breno.souza.external@helibras.com.br", - "Country": "Brazil", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Charts for SharePoint - Perpetual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 15, - "Reseller Discount": 0, - "Qty": 1, - "Price": 2499, - "Gross Value": 2124.15, - "Net Value": 2124.15, - "PO Number": 49948621, - "Date of Order": "2018-04-25", - "Month": "2018-04-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-06-12", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "EWE Aktiengesellschaft", - "Name": "Martin Tapken", - "Email ID": "Martin.Tapken@ewe.de", - "Country": "Germany", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "SWNetwork GmbH", - "Partner Name": "Mine Akbas", - "Partner Email ID": "mine.akbas@swnetwork.de", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 2, - "Price": 299, - "Gross Value": 598, - "Net Value": 418.6, - "PO Number": "", - "Date of Order": "2018-04-26", - "Month": "2018-04-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-05-24", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "EWE Aktiengesellschaft", - "Name": "Martin Tapken", - "Email ID": "Martin.Tapken@ewe.de", - "Country": "Germany", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "SWNetwork GmbH", - "Partner Name": "Mine Akbas", - "Partner Email ID": "mine.akbas@swnetwork.de", - "Product": "Development Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 1, - "Price": 99, - "Gross Value": 99, - "Net Value": 69.3, - "PO Number": "", - "Date of Order": "2018-04-26", - "Month": "2018-04-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-05-24", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Omya (Schweiz) AG", - "Name": "Andrea Krebs", - "Email ID": "andrea.krebs@omya.com", - "Country": "Switzerland", - "Medium": "Direct", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 299, - "PO Number": 4502815417, - "Date of Order": "2018-04-30", - "Month": "2018-04-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-05-30", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Portland Water District", - "Name": "Charles Davis", - "Email ID": "cdavis@pwd.org", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 299, - "PO Number": "", - "Date of Order": "2018-04-30", - "Month": "2018-04-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-04-30", - "Payment Mode": "ShareIt", - "Source/Ref No.": 561758293, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Royal Cosun", - "Name": "Bea Eenink", - "Email ID": "servicedesk@cosun.com", - "Country": "Netherlands", - "Medium": "Direct", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Charts for SharePoint PRO - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 2, - "Price": 999, - "Gross Value": 1998, - "Net Value": 1998, - "PO Number": "", - "Date of Order": "2018-05-03", - "Month": "2018-05-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-05-03", - "Payment Mode": "ShareIt", - "Source/Ref No.": 562001673, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "ClinLogix", - "Name": "Mr. Bill Fry", - "Email ID": "bfry@clinlogix.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 1299, - "Gross Value": 1299, - "Net Value": 1299, - "PO Number": "", - "Date of Order": "2018-05-04", - "Month": "2018-05-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-05-04", - "Payment Mode": "Avangate", - "Source/Ref No.": 70188838, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Top Tier Software Solutions LLC", - "Name": "Trudy Gallahan", - "Email ID": "tgallahan@ttssllc.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 299, - "PO Number": "", - "Date of Order": "2018-05-05", - "Month": "2018-05-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-05-05", - "Payment Mode": "Avangate", - "Source/Ref No.": 70349922, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Tractors Singapore Limited", - "Name": "Ma keng Ho", - "Email ID": "ma.keng.ho@tractors.simedarby.com.sg", - "Country": "Singapore", - "Medium": "Direct", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Charts for SharePoint PRO - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 10, - "Reseller Discount": 0, - "Qty": 1, - "Price": 999, - "Gross Value": 899.1, - "Net Value": 899.1, - "PO Number": 3030080482, - "Date of Order": "2018-05-08", - "Month": "2018-05-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-06-25", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Lutheran Social Services of Illinois", - "Name": "Tom Garite", - "Email ID": "Tom.Garite@lssi.org", - "Country": "United States of America", - "Medium": "Partner", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "Insight", - "Partner Name": "Roland Guilbault", - "Partner Email ID": "roland.guilbault@insight.com", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 10, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 269.1, - "PO Number": "", - "Date of Order": "2018-05-09", - "Month": "2018-05-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-05-09", - "Payment Mode": "ShareIt", - "Source/Ref No.": 562459643, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Golden Idea Technology Co., Ltd.", - "Name": "Kemp Zhou", - "Email ID": "zhjc@giit.cn", - "Country": "China", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 1299, - "Gross Value": 1299, - "Net Value": 1299, - "PO Number": "", - "Date of Order": "2018-05-10", - "Month": "2018-05-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-05-10", - "Payment Mode": "ShareIt", - "Source/Ref No.": 562500113, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Afton Chemicals", - "Name": "David Osborne", - "Email ID": "David.Osborne@AftonChemical.com", - "Country": "United Kingdom", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "Coretek", - "Partner Name": "Garry Miller", - "Partner Email ID": "garry.miller@coretek.co.uk", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 2, - "Price": 1299, - "Gross Value": 2598, - "Net Value": 1818.6, - "PO Number": "", - "Date of Order": "2018-05-16", - "Month": "2018-05-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-05-16", - "Payment Mode": "ShareIt", - "Source/Ref No.": 562942283, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Matanuska Susitna Borough", - "Name": "Davey Griffith", - "Email ID": "Davey.Griffith@matsugov.us", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Charts for SharePoint PRO - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 2, - "Price": 999, - "Gross Value": 1998, - "Net Value": 1998, - "PO Number": "", - "Date of Order": "2018-05-17", - "Month": "2018-05-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-05-30", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Mori Associates, Inc", - "Name": "Julia Medellin", - "Email ID": "julia.medellin@nasa.gov", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Charts for SharePoint license - Annual", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 2, - "Price": 999, - "Gross Value": 1998, - "Net Value": 1998, - "PO Number": "", - "Date of Order": "2018-05-23", - "Month": "2018-05-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-05-23", - "Payment Mode": "Avangate", - "Source/Ref No.": 71475140, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Boston Scientific", - "Name": "Jorge Arias", - "Email ID": "JorgeMario.AriasChaves@bsci.com", - "Country": "Costa Rica", - "Medium": "Direct", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Charts for SharePoint PRO - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 999, - "Gross Value": 999, - "Net Value": 999, - "PO Number": "", - "Date of Order": "2018-05-28", - "Month": "2018-05-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-05-28", - "Payment Mode": "ShareIt", - "Source/Ref No.": 563714263, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "International Atomic Energy Agency (IAEA)", - "Name": "Aniko Makai", - "Email ID": "a.makai@iaea.org", - "Country": "Austria", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Filter for SharePoint: Annual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 597, - "Gross Value": 597, - "Net Value": 597, - "PO Number": "", - "Date of Order": "2018-06-01", - "Month": "2018-06-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-06-01", - "Payment Mode": "Avangate", - "Source/Ref No.": 71392761, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "General Dynamics European Land Systems", - "Name": "JULIO ALVAREZ", - "Email ID": "julio.alvarez@gdels.com", - "Country": "Spain", - "Medium": "Partner", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "Danysoft Internacional", - "Partner Name": "María Pastrana", - "Partner Email ID": "orders@danysoft.com", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 20, - "Qty": 2, - "Price": 299, - "Gross Value": 598, - "Net Value": 478.4, - "PO Number": "", - "Date of Order": "2018-06-13", - "Month": "2018-06-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-07-17", - "Payment Mode": "ShareIt", - "Source/Ref No.": 567750043, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "STANFORD LINEAR ACCELERATOR", - "Name": "Pamela Wright-Brunache", - "Email ID": "pdwb@slac.stanford.edu", - "Country": "United States of America", - "Medium": "Partner", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "Liquid PC, Inc", - "Partner Name": "Courtney O’Connell", - "Partner Email ID": "coconnell@liquidpc.com", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 20, - "Qty": 3, - "Price": 299, - "Gross Value": 897, - "Net Value": 717.6, - "PO Number": "", - "Date of Order": "2018-06-15", - "Month": "2018-06-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-06-15", - "Payment Mode": "ShareIt", - "Source/Ref No.": 565307183, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "STANFORD LINEAR ACCELERATOR", - "Name": "Pamela Wright-Brunache", - "Email ID": "pdwb@slac.stanford.edu", - "Country": "United States of America", - "Medium": "Partner", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "Liquid PC, Inc", - "Partner Name": "Courtney O’Connell", - "Partner Email ID": "coconnell@liquidpc.com", - "Product": "Development Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 20, - "Qty": 2, - "Price": 99, - "Gross Value": 198, - "Net Value": 158.4, - "PO Number": "", - "Date of Order": "2018-06-15", - "Month": "2018-06-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-06-15", - "Payment Mode": "ShareIt", - "Source/Ref No.": 565307183, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Moneytree Inc", - "Name": "Grady Patterson", - "Email ID": "grady.patterson@moneytreeinc.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Charts for SharePoint PRO - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 999, - "Gross Value": 999, - "Net Value": 999, - "PO Number": "", - "Date of Order": "2018-06-20", - "Month": "2018-06-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-06-20", - "Payment Mode": "ShareIt", - "Source/Ref No.": 565663083, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Centers for Disease Control and Prevention (CDC)", - "Name": "Stephen Campanelli", - "Email ID": "wng1@cdc.gov", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 2, - "Price": 299, - "Gross Value": 598, - "Net Value": 598, - "PO Number": "", - "Date of Order": "2018-06-21", - "Month": "2018-06-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-06-21", - "Payment Mode": "ShareIt", - "Source/Ref No.": 565813083, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Centers for Disease Control and Prevention (CDC)", - "Name": "Stephen Campanelli", - "Email ID": "wng1@cdc.gov", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Development Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 6, - "Price": 99, - "Gross Value": 594, - "Net Value": 594, - "PO Number": "", - "Date of Order": "2018-06-21", - "Month": "2018-06-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-06-21", - "Payment Mode": "ShareIt", - "Source/Ref No.": 565813083, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "USDA – Food & Nutrition Service", - "Name": "Allen Austin", - "Email ID": "allen.austin@fns.usda.gov", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Charts for SharePoint PRO - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 15, - "Reseller Discount": 0, - "Qty": 2, - "Price": 999, - "Gross Value": 1698.3, - "Net Value": 1698.3, - "PO Number": "", - "Date of Order": "2018-06-29", - "Month": "2018-06-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-06-29", - "Payment Mode": "ShareIt", - "Source/Ref No.": 566366323, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Martin’s Famous Pastry Shoppe Inc", - "Name": "Karl Hansen", - "Email ID": "khansen@potatorolls.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 299, - "PO Number": "", - "Date of Order": "2018-07-03", - "Month": "2018-07-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-07-03", - "Payment Mode": "ShareIt", - "Source/Ref No.": 566752293, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Toll Holdings Limited", - "Name": "Stuart Warke", - "Email ID": "stuart.warke@tollgroup.com", - "Country": "Australia", - "Medium": "Partner", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "MicroWay Pty Ltd", - "Partner Name": "Melissa Bourke", - "Partner Email ID": "melissab@microway.com.au", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 2, - "Price": 299, - "Gross Value": 598, - "Net Value": 418.6, - "PO Number": 50880, - "Date of Order": "2018-07-18", - "Month": "2018-07-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-01", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Toll Holdings Limited", - "Name": "Stuart Warke", - "Email ID": "stuart.warke@tollgroup.com", - "Country": "Australia", - "Medium": "Partner", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "MicroWay Pty Ltd", - "Partner Name": "Melissa Bourke", - "Partner Email ID": "melissab@microway.com.au", - "Product": "Development Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 1, - "Price": 99, - "Gross Value": 99, - "Net Value": 69.3, - "PO Number": 50880, - "Date of Order": "2018-07-18", - "Month": "2018-07-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-01", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "CROWN Gabelstapler GmbH & Co KG", - "Name": "Dominique Michalak", - "Email ID": "dominique.michalak@crown.com", - "Country": "Germany", - "Medium": "Direct", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 299, - "PO Number": 1201800214, - "Date of Order": "2018-07-20", - "Month": "2018-07-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-23", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "City of Mississauga", - "Name": "Jennifer MacDonald", - "Email ID": "Jennifer.macdonald@mississauga.ca", - "Country": "Canada", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Charts for Training", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 10, - "Reseller Discount": 0, - "Qty": 1, - "Price": 1000, - "Gross Value": 900, - "Net Value": 900, - "PO Number": "", - "Date of Order": "2018-08-03", - "Month": "2018-08-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-03", - "Payment Mode": "ShareIt", - "Source/Ref No.": 569127193, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "CE3 Inc", - "Name": "Robin Marquis", - "Email ID": "rmarquis@ce3inc.com", - "Country": "United States of America", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "Simple SharePoint", - "Partner Name": "Jeanne Conde", - "Partner Email ID": "jeannec@simplesharepoint.com", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 1, - "Price": 1299, - "Gross Value": 1299, - "Net Value": 909.3, - "PO Number": "", - "Date of Order": "2018-08-08", - "Month": "2018-08-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-08", - "Payment Mode": "ShareIt", - "Source/Ref No.": 569504033, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Simple SharePoint", - "Name": "Jeanne Conde", - "Email ID": "jeannec@simpleportals.com", - "Country": "United States of America", - "Medium": "Partner", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion DataParts for SharePoint - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 1, - "Price": 1299, - "Gross Value": 1299, - "Net Value": 909.3, - "PO Number": "", - "Date of Order": "2018-08-08", - "Month": "2018-08-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-08", - "Payment Mode": "ShareIt", - "Source/Ref No.": 569504733, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "KfW Bankengruppe", - "Name": "Sebastian Mattar", - "Email ID": "sebastian.mattar@kfw.de", - "Country": "Germany", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "Prianto Gmbh", - "Partner Name": "Tim Joosten", - "Partner Email ID": "tim.joosten@prianto.com", - "Product": "Collabion DataParts for SharePoint - Perpetual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 20, - "Qty": 1, - "Price": 3249, - "Gross Value": 3249, - "Net Value": 2599.2, - "PO Number": "", - "Date of Order": "2018-08-09", - "Month": "2018-08-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-09", - "Payment Mode": "ShareIt", - "Source/Ref No.": 568867623, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Ball Aerospace", - "Name": "Ellen Teal", - "Email ID": "steal@ball.com", - "Country": "United States of America", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "Cipher Technology Services", - "Partner Name": "Jose Tineo", - "Partner Email ID": "Jose.Tineo@cipherts.com", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 1, - "Price": 1299, - "Gross Value": 1299, - "Net Value": 909.3, - "PO Number": "", - "Date of Order": "2018-08-11", - "Month": "2018-08-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-11", - "Payment Mode": "ShareIt", - "Source/Ref No.": 569722273, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Singapore Telecommunications Limited", - "Name": "Kelvin Soh", - "Email ID": "kelvinsoh@singtel.com", - "Country": "Singapore", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "SoftwareONE Pte Ltd", - "Partner Name": "Jason Tan", - "Partner Email ID": "Jason.tan@softwareone.com", - "Product": "Collabion Charts for SharePoint PRO - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 4, - "Price": 999, - "Gross Value": 3996, - "Net Value": 2797.2, - "PO Number": "", - "Date of Order": "2018-08-16", - "Month": "2018-08-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-09-18", - "Payment Mode": "ShareIt", - "Source/Ref No.": 573155113, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Martin’s Famous Pastry Shoppe Inc", - "Name": "Nicole Wahl", - "Email ID": "nwahl@potatorolls.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion ​Data Grids for SharePoint - Perpetual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 30, - "Reseller Discount": 0, - "Qty": 1, - "Price": 2997, - "Gross Value": 2097.9, - "Net Value": 2097.9, - "PO Number": "", - "Date of Order": "2018-08-21", - "Month": "2018-08-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-21", - "Payment Mode": "ShareIt", - "Source/Ref No.": 570435233, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Martin’s Famous Pastry Shoppe Inc", - "Name": "Nicole Wahl", - "Email ID": "nwahl@potatorolls.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Filter for SharePoint - Perpetual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 30, - "Reseller Discount": 0, - "Qty": 1, - "Price": 1497, - "Gross Value": 1047.9, - "Net Value": 1047.9, - "PO Number": "", - "Date of Order": "2018-08-21", - "Month": "2018-08-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-21", - "Payment Mode": "ShareIt", - "Source/Ref No.": 570435233, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Acto Informatisering B.V.", - "Name": "Ines Scholten", - "Email ID": "i.scholten@acto.nl", - "Country": "Netherlands", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 299, - "PO Number": "", - "Date of Order": "2018-08-23", - "Month": "2018-08-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-23", - "Payment Mode": "ShareIt", - "Source/Ref No.": 570670483, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Adtollo AB", - "Name": "Stefan Andersson", - "Email ID": "stefan.andersson@adtollo.se", - "Country": "Sweden", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Development Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 99, - "Gross Value": 99, - "Net Value": 99, - "PO Number": "", - "Date of Order": "2018-08-28", - "Month": "2018-08-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-28", - "Payment Mode": "Avangate", - "Source/Ref No.": 79977692, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "HOCHTIEF VICON GMBH", - "Name": "Karsten Kneip", - "Email ID": "karsten.kneip@hochtief.de", - "Country": "Germany", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Charts for SharePoint PRO - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 999, - "Gross Value": 999, - "Net Value": 999, - "PO Number": "", - "Date of Order": "2018-08-28", - "Month": "2018-08-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-28", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "ASCO Valve, Inc.", - "Name": "Michele Testa", - "Email ID": "Michele.Testa@emerson.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Charts for SharePoint - Perpetual License", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 499, - "Gross Value": 499, - "Net Value": 499, - "PO Number": "", - "Date of Order": "2018-08-28", - "Month": "2018-08-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-28", - "Payment Mode": "ShareIt", - "Source/Ref No.": 571018303, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "ACSS/L3-Comm", - "Name": "Jon Mahenski", - "Email ID": "jon.mahenski@l3t.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Charts for SharePoint PRO - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 999, - "Gross Value": 999, - "Net Value": 999, - "PO Number": "", - "Date of Order": "2018-08-30", - "Month": "2018-08-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-30", - "Payment Mode": "ShareIt", - "Source/Ref No.": 571270823, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Attorney Generals Department", - "Name": "Tina Conroy", - "Email ID": "tina.conroy@sa.gov.au", - "Country": "Australia", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "MicroWay Pty Ltd", - "Partner Name": "Matthew Looke", - "Partner Email ID": "invoices@microway.com.au", - "Product": "Collabion DataParts for SharePoint - Perpetual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 1, - "Price": 649, - "Gross Value": 649, - "Net Value": 454.3, - "PO Number": 51341, - "Date of Order": "2018-09-03", - "Month": "2018-09-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-10-15", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Abbey Gate College", - "Name": "Duncan Stewart", - "Email ID": "duncan.stewart@abbeygatecollge.co.uk", - "Country": "United Kingdom", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 299, - "PO Number": "", - "Date of Order": "2018-09-03", - "Month": "2018-09-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-09-03", - "Payment Mode": "ShareIt", - "Source/Ref No.": 571607053, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "MilDef AB", - "Name": "Tiberiju Beldja", - "Email ID": "Tiberiju.Beldja@mildef.com", - "Country": "Sweden", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 299, - "PO Number": "", - "Date of Order": "2018-09-05", - "Month": "2018-09-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-09-05", - "Payment Mode": "ShareIt", - "Source/Ref No.": 571787983, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Mentor Graphics", - "Name": "Echo Schmidt", - "Email ID": "Echo_Schmidt@mentor.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Charts for SharePoint PRO - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 2, - "Price": 999, - "Gross Value": 1998, - "Net Value": 1998, - "PO Number": "", - "Date of Order": "2018-09-06", - "Month": "2018-09-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-09-06", - "Payment Mode": "Avangate", - "Source/Ref No.": 79967907, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "NIST", - "Name": "Silvia Rodriguez", - "Email ID": "silvia.rodriguez@nist.gov", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion ​Data Grids for SharePoint - Annual License", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 2, - "Price": 399, - "Gross Value": 798, - "Net Value": 798, - "PO Number": "", - "Date of Order": "2018-09-07", - "Month": "2018-09-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-09-07", - "Payment Mode": "ShareIt", - "Source/Ref No.": 572056513, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "NIST", - "Name": "Silvia Rodriguez", - "Email ID": "silvia.rodriguez@nist.gov", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Charts for SharePoint license", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 2, - "Price": 999, - "Gross Value": 1998, - "Net Value": 1998, - "PO Number": "", - "Date of Order": "2018-09-07", - "Month": "2018-09-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-09-07", - "Payment Mode": "ShareIt", - "Source/Ref No.": 572056513, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Thomson Reuters-TR Applications Inc.", - "Name": "Gary Forbes", - "Email ID": "garyt.forbes@thomsonreuters.com", - "Country": "United States of America", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "ComponentSource", - "Partner Name": "ComponentSource Sales", - "Partner Email ID": "Sales@componentsource.com", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 4, - "Price": 299, - "Gross Value": 1196, - "Net Value": 837.2, - "PO Number": "", - "Date of Order": "2018-09-11", - "Month": "2018-09-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-09-11", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Washington Corp", - "Name": "Chase Huber", - "Email ID": "chuber@washcorp.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Filters for Collabion Dashboard License (Perpetual)", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 2, - "Price": 499, - "Gross Value": 998, - "Net Value": 998, - "PO Number": "", - "Date of Order": "2018-09-17", - "Month": "2018-09-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-09-17", - "Payment Mode": "Avangate", - "Source/Ref No.": 80912449, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Jacobs Technology", - "Name": "Robert Bunker", - "Email ID": "Robert.Bunker.CTR@MDA.mil", - "Country": "United States of America", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "SHI International Corp.", - "Partner Name": "Rita Katransky", - "Partner Email ID": "Rita_Katransky@SHI.com", - "Product": "Collabion DataParts for SharePoint - Perpetual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 3, - "Price": 3249, - "Gross Value": 9747, - "Net Value": 6822.9, - "PO Number": "", - "Date of Order": "2018-09-18", - "Month": "2018-09-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-10-03", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "CB Richard Ellis, Inc.", - "Name": "Rachel Knepper\nShayla Anthony", - "Email ID": "rachel.knepper@cbre.com\nshayla.anthony@cbre.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "AUC for the period from 27th September 2018 to 26th September 2019", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 1459, - "Gross Value": 1459, - "Net Value": 1459, - "PO Number": "", - "Date of Order": "2018-09-18", - "Month": "2018-09-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-09-18", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Portland Water District", - "Name": "Charles Davis", - "Email ID": "cdavis@pwd.org", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 1299, - "Gross Value": 1299, - "Net Value": 1299, - "PO Number": "", - "Date of Order": "2018-09-26", - "Month": "2018-09-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-09-26", - "Payment Mode": "ShareIt", - "Source/Ref No.": 573921893, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Ellis Medicine", - "Name": "Jerry Adach", - "Email ID": "adachj@ellismedicine.org", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 2, - "Price": 1299, - "Gross Value": 2598, - "Net Value": 2598, - "PO Number": "", - "Date of Order": "2018-09-28", - "Month": "2018-09-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-09-28", - "Payment Mode": "Avangate", - "Source/Ref No.": 81586932, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "SAPORITI S.A.", - "Name": "Eduardo Sternlieb", - "Email ID": "eduardo.sternlieb@gruposaporiti.com", - "Country": "Argentina", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 299, - "PO Number": "", - "Date of Order": "2018-10-02", - "Month": "2018-10-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-10-02", - "Payment Mode": "ShareIt", - "Source/Ref No.": 574484723, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Vermont Cider Company", - "Name": "Robert Waite", - "Email ID": "rwaite@vtciderco.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 1299, - "Gross Value": 1299, - "Net Value": 1299, - "PO Number": "", - "Date of Order": "2018-10-03", - "Month": "2018-10-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-10-03", - "Payment Mode": "Avangate", - "Source/Ref No.": 81397410, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "City of Mississauga", - "Name": "Larry Tyndall", - "Email ID": "Larry.Tyndall@mississauga.ca", - "Country": "Canada", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Charts for SharePoint - Perpetual License", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 30, - "Reseller Discount": 0, - "Qty": 4, - "Price": 1497, - "Gross Value": 4191.6, - "Net Value": 4191.6, - "PO Number": "", - "Date of Order": "2018-10-04", - "Month": "2018-10-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-10-04", - "Payment Mode": "ShareIt", - "Source/Ref No.": 574752593, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "City of Mississauga", - "Name": "Larry Tyndall", - "Email ID": "Larry.Tyndall@mississauga.ca", - "Country": "Canada", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Data Grids for SharePoint - Perpetual License", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 30, - "Reseller Discount": 0, - "Qty": 4, - "Price": 597, - "Gross Value": 1671.6, - "Net Value": 1671.6, - "PO Number": "", - "Date of Order": "2018-10-04", - "Month": "2018-10-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-10-04", - "Payment Mode": "ShareIt", - "Source/Ref No.": 574752593, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "BNP Paribas", - "Name": "Thierry Fournier", - "Email ID": "thierry.fournier@bnpparibas.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion DataParts for SharePoint - Annual License (Upgrade)", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 1200, - "Gross Value": 1200, - "Net Value": 1200, - "PO Number": "", - "Date of Order": "2018-10-05", - "Month": "2018-10-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-10-06", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "BNP Paribas", - "Name": "Thierry Fournier", - "Email ID": "thierry.fournier@bnpparibas.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 1299, - "Gross Value": 1299, - "Net Value": 1299, - "PO Number": "", - "Date of Order": "2018-10-05", - "Month": "2018-10-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-10-06", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Metropolitan Water Reclamation District", - "Name": "Mike O’Mara", - "Email ID": "momara@mwrd.dst.co.us", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 1299, - "Gross Value": 1299, - "Net Value": 1299, - "PO Number": "", - "Date of Order": "2018-10-09", - "Month": "2018-10-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-10-09", - "Payment Mode": "ShareIt", - "Source/Ref No.": 575219743, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Texas Instruments Incorporated", - "Name": "GAM Admins", - "Email ID": "GAMADMINS@TI.COM", - "Country": "United States of America", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "SHI International Corp.", - "Partner Name": "Kristyn Lubertowicz", - "Partner Email ID": "Kristyn_Lubertowicz@SHI.com", - "Product": "Data Grids for SharePoint - Annual License", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 4, - "Price": 399, - "Gross Value": 1596, - "Net Value": 1596, - "PO Number": "", - "Date of Order": "2018-10-09", - "Month": "2018-10-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-11-08", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Dpt for Children and Families", - "Name": "Phillip Curtis", - "Email ID": "Phillip.Curtis@ks.gov", - "Country": "United States of America", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "SHI International Corp.", - "Partner Name": "Collin Coslett", - "Partner Email ID": "Collin_Coslett@SHI.com", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 20, - "Qty": 2, - "Price": 299, - "Gross Value": 598, - "Net Value": 478.4, - "PO Number": "", - "Date of Order": "2018-10-09", - "Month": "2018-10-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-11-08", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Dpt for Children and Families", - "Name": "Phillip Curtis", - "Email ID": "Phillip.Curtis@ks.gov", - "Country": "United States of America", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "SHI International Corp.", - "Partner Name": "Collin Coslett", - "Partner Email ID": "Collin_Coslett@SHI.com", - "Product": "Development Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 20, - "Qty": 2, - "Price": 99, - "Gross Value": 198, - "Net Value": 158.4, - "PO Number": "", - "Date of Order": "2018-10-09", - "Month": "2018-10-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-11-08", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Giesecke and Devrient Mobile Security Australia Pty Ltd", - "Name": "Minh Nguyen", - "Email ID": "minh.nguyen@gi-de.com", - "Country": "Australia", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Charts for SharePoint PRO - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 999, - "Gross Value": 999, - "Net Value": 999, - "PO Number": 51398, - "Date of Order": "2018-10-10", - "Month": "2018-10-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-10-26", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "City of Brownsville", - "Name": "Aida Torres", - "Email ID": "aidat@cob.us", - "Country": "United States of America", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "SHI International Corp.", - "Partner Name": "Rita S Katransky", - "Partner Email ID": "Rita_Katransky@SHI.com", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 1, - "Price": 1299, - "Gross Value": 1299, - "Net Value": 909.3, - "PO Number": "", - "Date of Order": "2018-10-22", - "Month": "2018-10-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-12-21", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Schmalenberger GmbH + Co. KG", - "Name": "Oliver Laun", - "Email ID": "oliver.laun@schmalenberger.de", - "Country": "Germany", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "Siller Portal Integrators GmBH", - "Partner Name": "Ralf Michi", - "Partner Email ID": "michi@s-pi.de", - "Product": "Collabion Charts for SharePoint PRO - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 1, - "Price": 999, - "Gross Value": 999, - "Net Value": 699.3, - "PO Number": "", - "Date of Order": "2018-10-31", - "Month": "2018-10-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-10-31", - "Payment Mode": "ShareIt", - "Source/Ref No.": 577212103, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Norgine Ltd", - "Name": "Matthew Hallam", - "Email ID": "mhallam@norgine.com", - "Country": "United Kingdom", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 4, - "Price": 299, - "Gross Value": 1196, - "Net Value": 1196, - "PO Number": "", - "Date of Order": "2018-11-07", - "Month": "2018-11-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-12-10", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Norgine Ltd", - "Name": "Matthew Hallam", - "Email ID": "mhallam@norgine.com", - "Country": "United Kingdom", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Development Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 2, - "Price": 99, - "Gross Value": 198, - "Net Value": 198, - "PO Number": "", - "Date of Order": "2018-11-07", - "Month": "2018-11-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-12-10", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Big Rivers Electric Corporation", - "Name": "Steve Duncan", - "Email ID": "steve.duncan@bigrivers.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 299, - "PO Number": "", - "Date of Order": "2018-11-09", - "Month": "2018-11-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-11-09", - "Payment Mode": "Avangate", - "Source/Ref No.": 85597214, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Horry Telephone Cooperative, Inc", - "Name": "Dave Bosky", - "Email ID": "dave.bosky@htcinc.net", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion DataParts for SharePoint - Perpetual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 2, - "Price": 3249, - "Gross Value": 6498, - "Net Value": 6498, - "PO Number": "", - "Date of Order": "2018-11-09", - "Month": "2018-11-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "", - "Payment Mode": "-", - "Source/Ref No.": "", - "Payment Due Date": "2018-12-09", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Payment Pending" - }, - { - "Ticket": "N/A", - "Organisation": "Springtech", - "Name": "Alex Goulios", - "Email ID": "Alex.Goulios@springtech.io", - "Country": "Australia", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 1299, - "Gross Value": 1299, - "Net Value": 1299, - "PO Number": "", - "Date of Order": "2018-11-19", - "Month": "2018-11-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-11-19", - "Payment Mode": "ShareIt", - "Source/Ref No.": 578926033, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Guardian Electrical Compliance Limited", - "Name": "Steve Greenhalgh", - "Email ID": "sgreenhalgh@guardianelectrical.co.uk", - "Country": "United Kingdom", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 299, - "PO Number": "", - "Date of Order": "2018-11-19", - "Month": "2018-11-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-11-19", - "Payment Mode": "ShareIt", - "Source/Ref No.": 578928663, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Stiebel Eltron GmbH & Co. KG", - "Name": "Nicole Stapel", - "Email ID": "nicole.stapel@stiebel-eltron.de", - "Country": "Germany", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "CCP Software GmbH", - "Partner Name": "Dirk Haller", - "Partner Email ID": "dhaller@ccpsoft.de", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 10, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 269.1, - "PO Number": "", - "Date of Order": "2018-11-20", - "Month": "2018-11-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-11-20", - "Payment Mode": "ShareIt", - "Source/Ref No.": 579038123, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Stiebel Eltron GmbH & Co. KG", - "Name": "Nicole Stapel", - "Email ID": "nicole.stapel@stiebel-eltron.de", - "Country": "Germany", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "CCP Software GmbH", - "Partner Name": "Dirk Haller", - "Partner Email ID": "dhaller@ccpsoft.de", - "Product": "Development Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 10, - "Qty": 1, - "Price": 99, - "Gross Value": 99, - "Net Value": 89.1, - "PO Number": "", - "Date of Order": "2018-11-20", - "Month": "2018-11-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-11-20", - "Payment Mode": "ShareIt", - "Source/Ref No.": 579038123, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "J. Friedrich Storz Verkehrswegebau GmbH & Co. KG", - "Name": "Fabian Schwager", - "Email ID": "fabian.schwager@storz-tuttlingen.de", - "Country": "Germany", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 299, - "PO Number": "", - "Date of Order": "2018-11-23", - "Month": "2018-11-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-11-23", - "Payment Mode": "ShareIt", - "Source/Ref No.": 578922403, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Metrohm AG", - "Name": "-", - "Email ID": "itorders@metrohm.com", - "Country": "Switzerland", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Charts for SharePoint PRO - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 2, - "Price": 999, - "Gross Value": 1998, - "Net Value": 1998, - "PO Number": "", - "Date of Order": "2018-11-27", - "Month": "2018-11-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-11-27", - "Payment Mode": "Avangate", - "Source/Ref No.": 86681144, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Leidos", - "Name": "Juan Moreno Gongora", - "Email ID": "juan.moreno-gongora@leidos.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 1299, - "Gross Value": 1299, - "Net Value": 1299, - "PO Number": "", - "Date of Order": "2018-11-30", - "Month": "2018-11-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-11-30", - "Payment Mode": "ShareIt", - "Source/Ref No.": 580469703, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Bonneville Power Administration", - "Name": "Heather Johnston", - "Email ID": "hdjohnston@bpa.gov", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 4, - "Price": 1299, - "Gross Value": 5196, - "Net Value": 5196, - "PO Number": "", - "Date of Order": "2018-12-04", - "Month": "2018-12-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-12-04", - "Payment Mode": "ShareIt", - "Source/Ref No.": 580895163, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Atos-LeeCounty Gov", - "Name": "Armando Negron", - "Email ID": "anegron@leegov.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Charts for SharePoint PRO - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 3, - "Price": 999, - "Gross Value": 2997, - "Net Value": 2997, - "PO Number": "", - "Date of Order": "2018-12-07", - "Month": "2018-12-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "", - "Payment Mode": "-", - "Source/Ref No.": "", - "Payment Due Date": "2018-12-17", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Payment Pending" - }, - { - "Ticket": "N/A", - "Organisation": "GKN Driveline Bruneck AG", - "Name": "Martin Maurberger", - "Email ID": "Martin.Maurberger@gkn.com", - "Country": "Italy", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 2, - "Price": 299, - "Gross Value": 598, - "Net Value": 598, - "PO Number": "", - "Date of Order": "2018-12-13", - "Month": "2018-12-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "", - "Payment Mode": "-", - "Source/Ref No.": "", - "Payment Due Date": "2018-12-20", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Payment Pending" - }, - { - "Ticket": "N/A", - "Organisation": "EA Health", - "Name": "Kevin Derrick", - "Email ID": "kevin.derrick@eahealthsolutions.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 1299, - "Gross Value": 1299, - "Net Value": 1299, - "PO Number": "", - "Date of Order": "2018-12-17", - "Month": "2018-12-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-12-17", - "Payment Mode": "ShareIt", - "Source/Ref No.": 582156073, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Xenotech LLC", - "Name": "Christina Cox", - "Email ID": "ccox@xenotechllc.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 299, - "PO Number": "", - "Date of Order": "2018-12-21", - "Month": "2018-12-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-12-21", - "Payment Mode": "ShareIt", - "Source/Ref No.": 582545893, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - } - ] \ No newline at end of file + { + "Maker": "chevrolet", + "Name": "chevrolet chevelle malibu", + "Miles_per_Gallon": 18, + "Cylinders": 8, + "Displacement": 307, + "Horsepower": 130, + "Weight_in_lbs": 3504, + "Acceleration": 12, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "buick", + "Name": "buick skylark 320", + "Miles_per_Gallon": 15, + "Cylinders": 8, + "Displacement": -350, + "Horsepower": 165, + "Weight_in_lbs": 3693, + "Acceleration": 11.5, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth satellite", + "Miles_per_Gallon": 18, + "Cylinders": 8, + "Displacement": -318, + "Horsepower": 150, + "Weight_in_lbs": 3436, + "Acceleration": 11, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc rebel sst", + "Miles_per_Gallon": 16, + "Cylinders": 8, + "Displacement": -304, + "Horsepower": 150, + "Weight_in_lbs": 3433, + "Acceleration": 12, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford torino", + "Miles_per_Gallon": 17, + "Cylinders": 8, + "Displacement": -302, + "Horsepower": 140, + "Weight_in_lbs": 3449, + "Acceleration": 10.5, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford galaxie 500", + "Miles_per_Gallon": 15, + "Cylinders": 8, + "Displacement": -429, + "Horsepower": 198, + "Weight_in_lbs": 4341, + "Acceleration": 10, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet impala", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": -454, + "Horsepower": 220, + "Weight_in_lbs": 4354, + "Acceleration": 9, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth fury iii", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": -440, + "Horsepower": 215, + "Weight_in_lbs": 4312, + "Acceleration": 8.5, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "pontiac", + "Name": "pontiac catalina", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": -455, + "Horsepower": 225, + "Weight_in_lbs": 4425, + "Acceleration": 10, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc ambassador dpl", + "Miles_per_Gallon": 15, + "Cylinders": 8, + "Displacement": -390, + "Horsepower": 190, + "Weight_in_lbs": 3850, + "Acceleration": 8.5, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "citroen", + "Name": "citroen ds-21 pallas", + "Miles_per_Gallon": null, + "Cylinders": 4, + "Displacement": -133, + "Horsepower": 115, + "Weight_in_lbs": 3090, + "Acceleration": 17.5, + "Year": "1970-01-01", + "Origin": "European Union" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet chevelle concours (sw)", + "Miles_per_Gallon": null, + "Cylinders": 8, + "Displacement": -350, + "Horsepower": 165, + "Weight_in_lbs": 4142, + "Acceleration": 11.5, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford torino (sw)", + "Miles_per_Gallon": null, + "Cylinders": 8, + "Displacement": -351, + "Horsepower": 153, + "Weight_in_lbs": 4034, + "Acceleration": 11, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth satellite (sw)", + "Miles_per_Gallon": null, + "Cylinders": 8, + "Displacement": -383, + "Horsepower": 175, + "Weight_in_lbs": 4166, + "Acceleration": 10.5, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc rebel sst (sw)", + "Miles_per_Gallon": null, + "Cylinders": 8, + "Displacement": -360, + "Horsepower": 175, + "Weight_in_lbs": 3850, + "Acceleration": 11, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge challenger se", + "Miles_per_Gallon": 15, + "Cylinders": 8, + "Displacement": -383, + "Horsepower": 170, + "Weight_in_lbs": 3563, + "Acceleration": 10, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth 'cuda 340", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": -340, + "Horsepower": 160, + "Weight_in_lbs": 3609, + "Acceleration": 8, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford mustang boss 302", + "Miles_per_Gallon": null, + "Cylinders": 8, + "Displacement": -302, + "Horsepower": 140, + "Weight_in_lbs": 3353, + "Acceleration": 8, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet monte carlo", + "Miles_per_Gallon": 15, + "Cylinders": 8, + "Displacement": -400, + "Horsepower": 150, + "Weight_in_lbs": 3761, + "Acceleration": 9.5, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "buick", + "Name": "buick estate wagon (sw)", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": -455, + "Horsepower": 225, + "Weight_in_lbs": 3086, + "Acceleration": 10, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "toyota", + "Name": "toyota corona mark ii", + "Miles_per_Gallon": 24, + "Cylinders": 4, + "Displacement": -113, + "Horsepower": 95, + "Weight_in_lbs": 2372, + "Acceleration": 15, + "Year": "1970-01-01", + "Origin": "Japan" + }, + { + "Maker": "plymouth", + "Name": "plymouth duster", + "Miles_per_Gallon": 22, + "Cylinders": 6, + "Displacement": -198, + "Horsepower": 95, + "Weight_in_lbs": 2833, + "Acceleration": 15.5, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc hornet", + "Miles_per_Gallon": 18, + "Cylinders": 6, + "Displacement": -199, + "Horsepower": 97, + "Weight_in_lbs": 2774, + "Acceleration": 15.5, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford maverick", + "Miles_per_Gallon": 21, + "Cylinders": 6, + "Displacement": -200, + "Horsepower": 85, + "Weight_in_lbs": 2587, + "Acceleration": 16, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "datsun", + "Name": "datsun pl510", + "Miles_per_Gallon": 27, + "Cylinders": 4, + "Displacement": -97, + "Horsepower": 88, + "Weight_in_lbs": 2130, + "Acceleration": 14.5, + "Year": "1970-01-01", + "Origin": "Japan" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen 1131 deluxe sedan", + "Miles_per_Gallon": 26, + "Cylinders": 4, + "Displacement": -97, + "Horsepower": 46, + "Weight_in_lbs": 1835, + "Acceleration": 20.5, + "Year": "1970-01-01", + "Origin": "European Union" + }, + { + "Maker": "peugeot", + "Name": "peugeot 504", + "Miles_per_Gallon": 25, + "Cylinders": 4, + "Displacement": -110, + "Horsepower": 87, + "Weight_in_lbs": 2672, + "Acceleration": 17.5, + "Year": "1970-01-01", + "Origin": "European Union" + }, + { + "Maker": "audi", + "Name": "audi 100 ls", + "Miles_per_Gallon": 24, + "Cylinders": 4, + "Displacement": -107, + "Horsepower": 90, + "Weight_in_lbs": 2430, + "Acceleration": 14.5, + "Year": "1970-01-01", + "Origin": "European Union" + }, + { + "Maker": "saab", + "Name": "saab 99e", + "Miles_per_Gallon": 25, + "Cylinders": 4, + "Displacement": -104, + "Horsepower": 95, + "Weight_in_lbs": 2375, + "Acceleration": 17.5, + "Year": "1970-01-01", + "Origin": "European Union" + }, + { + "Maker": "bmw", + "Name": "bmw 2002", + "Miles_per_Gallon": 26, + "Cylinders": 4, + "Displacement": -121, + "Horsepower": 113, + "Weight_in_lbs": 2234, + "Acceleration": 12.5, + "Year": "1970-01-01", + "Origin": "European Union" + }, + { + "Maker": "amc", + "Name": "amc gremlin", + "Miles_per_Gallon": 21, + "Cylinders": 6, + "Displacement": -199, + "Horsepower": 90, + "Weight_in_lbs": 2648, + "Acceleration": 15, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford f250", + "Miles_per_Gallon": 10, + "Cylinders": 8, + "Displacement": -360, + "Horsepower": 215, + "Weight_in_lbs": 4615, + "Acceleration": 14, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "chevy", + "Name": "chevy c20", + "Miles_per_Gallon": 10, + "Cylinders": 8, + "Displacement": -307, + "Horsepower": 200, + "Weight_in_lbs": 4376, + "Acceleration": 15, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge d200", + "Miles_per_Gallon": 11, + "Cylinders": 8, + "Displacement": -318, + "Horsepower": 210, + "Weight_in_lbs": 4382, + "Acceleration": 13.5, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "hi", + "Name": "hi 1200d", + "Miles_per_Gallon": 9, + "Cylinders": 8, + "Displacement": 304, + "Horsepower": 193, + "Weight_in_lbs": 4732, + "Acceleration": 18.5, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "datsun", + "Name": "datsun pl510", + "Miles_per_Gallon": 27, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 88, + "Weight_in_lbs": 2130, + "Acceleration": 14.5, + "Year": "1971-01-01", + "Origin": "Japan" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet vega 2300", + "Miles_per_Gallon": 28, + "Cylinders": 4, + "Displacement": 140, + "Horsepower": 90, + "Weight_in_lbs": 2264, + "Acceleration": 15.5, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "toyota", + "Name": "toyota corona", + "Miles_per_Gallon": 25, + "Cylinders": 4, + "Displacement": -113, + "Horsepower": 95, + "Weight_in_lbs": 2228, + "Acceleration": 14, + "Year": "1971-01-01", + "Origin": "Japan" + }, + { + "Maker": "ford", + "Name": "ford pinto", + "Miles_per_Gallon": 25, + "Cylinders": 4, + "Displacement": -98, + "Horsepower": null, + "Weight_in_lbs": 2046, + "Acceleration": 19, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen super beetle 117", + "Miles_per_Gallon": null, + "Cylinders": 4, + "Displacement": -97, + "Horsepower": 48, + "Weight_in_lbs": 1978, + "Acceleration": 20, + "Year": "1971-01-01", + "Origin": "European Union" + }, + { + "Maker": "amc", + "Name": "amc gremlin", + "Miles_per_Gallon": 19, + "Cylinders": 6, + "Displacement": -232, + "Horsepower": 100, + "Weight_in_lbs": 2634, + "Acceleration": 13, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth satellite custom", + "Miles_per_Gallon": 16, + "Cylinders": 6, + "Displacement": -225, + "Horsepower": 105, + "Weight_in_lbs": 3439, + "Acceleration": 15.5, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet chevelle malibu", + "Miles_per_Gallon": 17, + "Cylinders": 6, + "Displacement": -250, + "Horsepower": 100, + "Weight_in_lbs": 3329, + "Acceleration": 15.5, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford torino 500", + "Miles_per_Gallon": 19, + "Cylinders": 6, + "Displacement": -250, + "Horsepower": 88, + "Weight_in_lbs": 3302, + "Acceleration": 15.5, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc matador", + "Miles_per_Gallon": 18, + "Cylinders": 6, + "Displacement": -232, + "Horsepower": 100, + "Weight_in_lbs": 3288, + "Acceleration": 15.5, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet impala", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": -350, + "Horsepower": 165, + "Weight_in_lbs": 4209, + "Acceleration": 12, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "pontiac", + "Name": "pontiac catalina brougham", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": -400, + "Horsepower": 175, + "Weight_in_lbs": 4464, + "Acceleration": 11.5, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford galaxie 500", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": -351, + "Horsepower": 153, + "Weight_in_lbs": 4154, + "Acceleration": 13.5, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth fury iii", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": -318, + "Horsepower": 150, + "Weight_in_lbs": 4096, + "Acceleration": 13, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge monaco (sw)", + "Miles_per_Gallon": 12, + "Cylinders": 8, + "Displacement": -383, + "Horsepower": 180, + "Weight_in_lbs": 4955, + "Acceleration": 11.5, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford country squire (sw)", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": -400, + "Horsepower": 170, + "Weight_in_lbs": 4746, + "Acceleration": 12, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "pontiac", + "Name": "pontiac safari (sw)", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": -400, + "Horsepower": 175, + "Weight_in_lbs": 5140, + "Acceleration": 12, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc hornet sportabout (sw)", + "Miles_per_Gallon": 18, + "Cylinders": 6, + "Displacement": -258, + "Horsepower": 110, + "Weight_in_lbs": 2962, + "Acceleration": 13.5, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet vega (sw)", + "Miles_per_Gallon": 22, + "Cylinders": 4, + "Displacement": -140, + "Horsepower": 72, + "Weight_in_lbs": 2408, + "Acceleration": 19, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "pontiac", + "Name": "pontiac firebird", + "Miles_per_Gallon": 19, + "Cylinders": 6, + "Displacement": -250, + "Horsepower": 100, + "Weight_in_lbs": 3282, + "Acceleration": 15, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford mustang", + "Miles_per_Gallon": 18, + "Cylinders": 6, + "Displacement": -250, + "Horsepower": 88, + "Weight_in_lbs": 3139, + "Acceleration": 14.5, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "mercury", + "Name": "mercury capri 2000", + "Miles_per_Gallon": 23, + "Cylinders": 4, + "Displacement": -122, + "Horsepower": 86, + "Weight_in_lbs": 2220, + "Acceleration": 14, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "opel", + "Name": "opel 1900", + "Miles_per_Gallon": 28, + "Cylinders": 4, + "Displacement": -116, + "Horsepower": 90, + "Weight_in_lbs": 2123, + "Acceleration": 14, + "Year": "1971-01-01", + "Origin": "European Union" + }, + { + "Maker": "peugeot", + "Name": "peugeot 304", + "Miles_per_Gallon": 30, + "Cylinders": 4, + "Displacement": -79, + "Horsepower": 70, + "Weight_in_lbs": 2074, + "Acceleration": 19.5, + "Year": "1971-01-01", + "Origin": "European Union" + }, + { + "Maker": "fiat", + "Name": "fiat 124b", + "Miles_per_Gallon": 30, + "Cylinders": 4, + "Displacement": -88, + "Horsepower": 76, + "Weight_in_lbs": 2065, + "Acceleration": 14.5, + "Year": "1971-01-01", + "Origin": "European Union" + }, + { + "Maker": "toyota", + "Name": "toyota corolla 1200", + "Miles_per_Gallon": 31, + "Cylinders": 4, + "Displacement": -71, + "Horsepower": 65, + "Weight_in_lbs": 1773, + "Acceleration": 19, + "Year": "1971-01-01", + "Origin": "Japan" + }, + { + "Maker": "datsun", + "Name": "datsun 1200", + "Miles_per_Gallon": 35, + "Cylinders": 4, + "Displacement": -72, + "Horsepower": 69, + "Weight_in_lbs": 1613, + "Acceleration": 18, + "Year": "1971-01-01", + "Origin": "Japan" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen model 111", + "Miles_per_Gallon": 27, + "Cylinders": 4, + "Displacement": -97, + "Horsepower": 60, + "Weight_in_lbs": 1834, + "Acceleration": 19, + "Year": "1971-01-01", + "Origin": "European Union" + }, + { + "Maker": "plymouth", + "Name": "plymouth cricket", + "Miles_per_Gallon": 26, + "Cylinders": 4, + "Displacement": -91, + "Horsepower": 70, + "Weight_in_lbs": 1955, + "Acceleration": 20.5, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "toyota", + "Name": "toyota corona hardtop", + "Miles_per_Gallon": 24, + "Cylinders": 4, + "Displacement": -113, + "Horsepower": 95, + "Weight_in_lbs": 2278, + "Acceleration": 15.5, + "Year": "1972-01-01", + "Origin": "Japan" + }, + { + "Maker": "dodge", + "Name": "dodge colt hardtop", + "Miles_per_Gallon": 25, + "Cylinders": 4, + "Displacement": -97.5, + "Horsepower": 80, + "Weight_in_lbs": 2126, + "Acceleration": 17, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen type 3", + "Miles_per_Gallon": 23, + "Cylinders": 4, + "Displacement": -97, + "Horsepower": 54, + "Weight_in_lbs": 2254, + "Acceleration": 23.5, + "Year": "1972-01-01", + "Origin": "European Union" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet vega", + "Miles_per_Gallon": 20, + "Cylinders": 4, + "Displacement": -140, + "Horsepower": 90, + "Weight_in_lbs": 2408, + "Acceleration": 19.5, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford pinto runabout", + "Miles_per_Gallon": 21, + "Cylinders": 4, + "Displacement": -122, + "Horsepower": 86, + "Weight_in_lbs": 2226, + "Acceleration": 16.5, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet impala", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": -350, + "Horsepower": 165, + "Weight_in_lbs": 4274, + "Acceleration": 12, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "pontiac", + "Name": "pontiac catalina", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": -400, + "Horsepower": 175, + "Weight_in_lbs": 4385, + "Acceleration": 12, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth fury iii", + "Miles_per_Gallon": 15, + "Cylinders": 8, + "Displacement": 318, + "Horsepower": 150, + "Weight_in_lbs": 4135, + "Acceleration": 13.5, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford galaxie 500", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": 351, + "Horsepower": 153, + "Weight_in_lbs": 4129, + "Acceleration": 13, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc ambassador sst", + "Miles_per_Gallon": 17, + "Cylinders": 8, + "Displacement": 304, + "Horsepower": 150, + "Weight_in_lbs": 3672, + "Acceleration": 11.5, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "mercury", + "Name": "mercury marquis", + "Miles_per_Gallon": 11, + "Cylinders": 8, + "Displacement": 429, + "Horsepower": 208, + "Weight_in_lbs": 4633, + "Acceleration": 11, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "buick", + "Name": "buick lesabre custom", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 155, + "Weight_in_lbs": 4502, + "Acceleration": 13.5, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "oldsmobile", + "Name": "oldsmobile delta 88 royale", + "Miles_per_Gallon": 12, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 160, + "Weight_in_lbs": 4456, + "Acceleration": 13.5, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "chrysler", + "Name": "chrysler newport royal", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 400, + "Horsepower": 190, + "Weight_in_lbs": 4422, + "Acceleration": 12.5, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "mazda", + "Name": "mazda rx2 coupe", + "Miles_per_Gallon": 19, + "Cylinders": 3, + "Displacement": 70, + "Horsepower": 97, + "Weight_in_lbs": 2330, + "Acceleration": 13.5, + "Year": "1972-01-01", + "Origin": "Japan" + }, + { + "Maker": "amc", + "Name": "amc matador (sw)", + "Miles_per_Gallon": 15, + "Cylinders": 8, + "Displacement": 304, + "Horsepower": 150, + "Weight_in_lbs": 3892, + "Acceleration": 12.5, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet chevelle concours (sw)", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 307, + "Horsepower": 130, + "Weight_in_lbs": 4098, + "Acceleration": 14, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford gran torino (sw)", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 302, + "Horsepower": 140, + "Weight_in_lbs": 4294, + "Acceleration": 16, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth satellite custom (sw)", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": 318, + "Horsepower": 150, + "Weight_in_lbs": 4077, + "Acceleration": 14, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "volvo", + "Name": "volvo 145e (sw)", + "Miles_per_Gallon": 18, + "Cylinders": 4, + "Displacement": 121, + "Horsepower": 112, + "Weight_in_lbs": 2933, + "Acceleration": 14.5, + "Year": "1972-01-01", + "Origin": "European Union" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen 411 (sw)", + "Miles_per_Gallon": 22, + "Cylinders": 4, + "Displacement": 121, + "Horsepower": 76, + "Weight_in_lbs": 2511, + "Acceleration": 18, + "Year": "1972-01-01", + "Origin": "European Union" + }, + { + "Maker": "peugeot", + "Name": "peugeot 504 (sw)", + "Miles_per_Gallon": 21, + "Cylinders": 4, + "Displacement": 120, + "Horsepower": 87, + "Weight_in_lbs": 2979, + "Acceleration": 19.5, + "Year": "1972-01-01", + "Origin": "European Union" + }, + { + "Maker": "renault", + "Name": "renault 12 (sw)", + "Miles_per_Gallon": 26, + "Cylinders": 4, + "Displacement": 96, + "Horsepower": 69, + "Weight_in_lbs": 2189, + "Acceleration": 18, + "Year": "1972-01-01", + "Origin": "European Union" + }, + { + "Maker": "ford", + "Name": "ford pinto (sw)", + "Miles_per_Gallon": 22, + "Cylinders": 4, + "Displacement": 122, + "Horsepower": 86, + "Weight_in_lbs": 2395, + "Acceleration": 16, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "datsun", + "Name": "datsun 510 (sw)", + "Miles_per_Gallon": 28, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 92, + "Weight_in_lbs": 2288, + "Acceleration": 17, + "Year": "1972-01-01", + "Origin": "Japan" + }, + { + "Maker": "toyouta", + "Name": "toyouta corona mark ii (sw)", + "Miles_per_Gallon": 23, + "Cylinders": 4, + "Displacement": 120, + "Horsepower": 97, + "Weight_in_lbs": 2506, + "Acceleration": 14.5, + "Year": "1972-01-01", + "Origin": "Japan" + }, + { + "Maker": "dodge", + "Name": "dodge colt (sw)", + "Miles_per_Gallon": 28, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 80, + "Weight_in_lbs": 2164, + "Acceleration": 15, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "toyota", + "Name": "toyota corolla 1600 (sw)", + "Miles_per_Gallon": 27, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 88, + "Weight_in_lbs": 2100, + "Acceleration": 16.5, + "Year": "1972-01-01", + "Origin": "Japan" + }, + { + "Maker": "buick", + "Name": "buick century 350", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 175, + "Weight_in_lbs": 4100, + "Acceleration": 13, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc matador", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": 304, + "Horsepower": 150, + "Weight_in_lbs": 3672, + "Acceleration": 11.5, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet malibu", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 145, + "Weight_in_lbs": 3988, + "Acceleration": 13, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford gran torino", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": 302, + "Horsepower": 137, + "Weight_in_lbs": 4042, + "Acceleration": 14.5, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge coronet custom", + "Miles_per_Gallon": 15, + "Cylinders": 8, + "Displacement": 318, + "Horsepower": 150, + "Weight_in_lbs": 3777, + "Acceleration": 12.5, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "mercury", + "Name": "mercury marquis brougham", + "Miles_per_Gallon": 12, + "Cylinders": 8, + "Displacement": 429, + "Horsepower": 198, + "Weight_in_lbs": 4952, + "Acceleration": 11.5, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet caprice classic", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 400, + "Horsepower": 150, + "Weight_in_lbs": 4464, + "Acceleration": 12, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford ltd", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 351, + "Horsepower": 158, + "Weight_in_lbs": 4363, + "Acceleration": 13, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth fury gran sedan", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": 318, + "Horsepower": 150, + "Weight_in_lbs": 4237, + "Acceleration": 14.5, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "chrysler", + "Name": "chrysler new yorker brougham", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 440, + "Horsepower": 215, + "Weight_in_lbs": 4735, + "Acceleration": 11, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "buick", + "Name": "buick electra 225 custom", + "Miles_per_Gallon": 12, + "Cylinders": 8, + "Displacement": 455, + "Horsepower": 225, + "Weight_in_lbs": 4951, + "Acceleration": 11, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc ambassador brougham", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 360, + "Horsepower": 175, + "Weight_in_lbs": 3821, + "Acceleration": 11, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth valiant", + "Miles_per_Gallon": 18, + "Cylinders": 6, + "Displacement": 225, + "Horsepower": 105, + "Weight_in_lbs": 3121, + "Acceleration": 16.5, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet nova custom", + "Miles_per_Gallon": 16, + "Cylinders": 6, + "Displacement": 250, + "Horsepower": 100, + "Weight_in_lbs": 3278, + "Acceleration": 18, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc hornet", + "Miles_per_Gallon": 18, + "Cylinders": 6, + "Displacement": 232, + "Horsepower": 100, + "Weight_in_lbs": 2945, + "Acceleration": 16, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford maverick", + "Miles_per_Gallon": 18, + "Cylinders": 6, + "Displacement": 250, + "Horsepower": 88, + "Weight_in_lbs": 3021, + "Acceleration": 16.5, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth duster", + "Miles_per_Gallon": 23, + "Cylinders": 6, + "Displacement": 198, + "Horsepower": 95, + "Weight_in_lbs": 2904, + "Acceleration": 16, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen super beetle", + "Miles_per_Gallon": 26, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 46, + "Weight_in_lbs": 1950, + "Acceleration": 21, + "Year": "1973-01-01", + "Origin": "European Union" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet impala", + "Miles_per_Gallon": 11, + "Cylinders": 8, + "Displacement": 400, + "Horsepower": 150, + "Weight_in_lbs": 4997, + "Acceleration": 14, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford country", + "Miles_per_Gallon": 12, + "Cylinders": 8, + "Displacement": 400, + "Horsepower": 167, + "Weight_in_lbs": 4906, + "Acceleration": 12.5, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth custom suburb", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 360, + "Horsepower": 170, + "Weight_in_lbs": 4654, + "Acceleration": 13, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "oldsmobile", + "Name": "oldsmobile vista cruiser", + "Miles_per_Gallon": 12, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 180, + "Weight_in_lbs": 4499, + "Acceleration": 12.5, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc gremlin", + "Miles_per_Gallon": 18, + "Cylinders": 6, + "Displacement": 232, + "Horsepower": 100, + "Weight_in_lbs": 2789, + "Acceleration": 15, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "toyota", + "Name": "toyota carina", + "Miles_per_Gallon": 20, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 88, + "Weight_in_lbs": 2279, + "Acceleration": 19, + "Year": "1973-01-01", + "Origin": "Japan" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet vega", + "Miles_per_Gallon": 21, + "Cylinders": 4, + "Displacement": 140, + "Horsepower": 72, + "Weight_in_lbs": 2401, + "Acceleration": 19.5, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "datsun", + "Name": "datsun 610", + "Miles_per_Gallon": 22, + "Cylinders": 4, + "Displacement": 108, + "Horsepower": 94, + "Weight_in_lbs": 2379, + "Acceleration": 16.5, + "Year": "1973-01-01", + "Origin": "Japan" + }, + { + "Maker": "maxda", + "Name": "maxda rx3", + "Miles_per_Gallon": 18, + "Cylinders": 3, + "Displacement": 70, + "Horsepower": 90, + "Weight_in_lbs": 2124, + "Acceleration": 13.5, + "Year": "1973-01-01", + "Origin": "Japan" + }, + { + "Maker": "ford", + "Name": "ford pinto", + "Miles_per_Gallon": 19, + "Cylinders": 4, + "Displacement": 122, + "Horsepower": 85, + "Weight_in_lbs": 2310, + "Acceleration": 18.5, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "mercury", + "Name": "mercury capri v6", + "Miles_per_Gallon": 21, + "Cylinders": 6, + "Displacement": 155, + "Horsepower": 107, + "Weight_in_lbs": 2472, + "Acceleration": 14, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "fiat", + "Name": "fiat 124 sport coupe", + "Miles_per_Gallon": 26, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 90, + "Weight_in_lbs": 2265, + "Acceleration": 15.5, + "Year": "1973-01-01", + "Origin": "European Union" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet monte carlo s", + "Miles_per_Gallon": 15, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 145, + "Weight_in_lbs": 4082, + "Acceleration": 13, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "pontiac", + "Name": "pontiac grand prix", + "Miles_per_Gallon": 16, + "Cylinders": 8, + "Displacement": 400, + "Horsepower": 230, + "Weight_in_lbs": 4278, + "Acceleration": 9.5, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "fiat", + "Name": "fiat 128", + "Miles_per_Gallon": 29, + "Cylinders": 4, + "Displacement": 68, + "Horsepower": 49, + "Weight_in_lbs": 1867, + "Acceleration": 19.5, + "Year": "1973-01-01", + "Origin": "European Union" + }, + { + "Maker": "opel", + "Name": "opel manta", + "Miles_per_Gallon": 24, + "Cylinders": 4, + "Displacement": 116, + "Horsepower": 75, + "Weight_in_lbs": 2158, + "Acceleration": 15.5, + "Year": "1973-01-01", + "Origin": "European Union" + }, + { + "Maker": "audi", + "Name": "audi 100ls", + "Miles_per_Gallon": 20, + "Cylinders": 4, + "Displacement": 114, + "Horsepower": 91, + "Weight_in_lbs": 2582, + "Acceleration": 14, + "Year": "1973-01-01", + "Origin": "European Union" + }, + { + "Maker": "volvo", + "Name": "volvo 144ea", + "Miles_per_Gallon": 19, + "Cylinders": 4, + "Displacement": 121, + "Horsepower": 112, + "Weight_in_lbs": 2868, + "Acceleration": 15.5, + "Year": "1973-01-01", + "Origin": "European Union" + }, + { + "Maker": "dodge", + "Name": "dodge dart custom", + "Miles_per_Gallon": 15, + "Cylinders": 8, + "Displacement": 318, + "Horsepower": 150, + "Weight_in_lbs": 3399, + "Acceleration": 11, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "saab", + "Name": "saab 99le", + "Miles_per_Gallon": 24, + "Cylinders": 4, + "Displacement": 121, + "Horsepower": 110, + "Weight_in_lbs": 2660, + "Acceleration": 14, + "Year": "1973-01-01", + "Origin": "European Union" + }, + { + "Maker": "toyota", + "Name": "toyota mark ii", + "Miles_per_Gallon": 20, + "Cylinders": 6, + "Displacement": 156, + "Horsepower": 122, + "Weight_in_lbs": 2807, + "Acceleration": 13.5, + "Year": "1973-01-01", + "Origin": "Japan" + }, + { + "Maker": "oldsmobile", + "Name": "oldsmobile omega", + "Miles_per_Gallon": 11, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 180, + "Weight_in_lbs": 3664, + "Acceleration": 11, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth duster", + "Miles_per_Gallon": 20, + "Cylinders": 6, + "Displacement": 198, + "Horsepower": 95, + "Weight_in_lbs": 3102, + "Acceleration": 16.5, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford maverick", + "Miles_per_Gallon": 21, + "Cylinders": 6, + "Displacement": 200, + "Horsepower": null, + "Weight_in_lbs": 2875, + "Acceleration": 17, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc hornet", + "Miles_per_Gallon": 19, + "Cylinders": 6, + "Displacement": 232, + "Horsepower": 100, + "Weight_in_lbs": 2901, + "Acceleration": 16, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet nova", + "Miles_per_Gallon": 15, + "Cylinders": 6, + "Displacement": 250, + "Horsepower": 100, + "Weight_in_lbs": 3336, + "Acceleration": 17, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "datsun", + "Name": "datsun b210", + "Miles_per_Gallon": 31, + "Cylinders": 4, + "Displacement": 79, + "Horsepower": 67, + "Weight_in_lbs": 1950, + "Acceleration": 19, + "Year": "1974-01-01", + "Origin": "Japan" + }, + { + "Maker": "ford", + "Name": "ford pinto", + "Miles_per_Gallon": 26, + "Cylinders": 4, + "Displacement": 122, + "Horsepower": 80, + "Weight_in_lbs": 2451, + "Acceleration": 16.5, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "toyota", + "Name": "toyota corolla 1200", + "Miles_per_Gallon": 32, + "Cylinders": 4, + "Displacement": 71, + "Horsepower": 65, + "Weight_in_lbs": 1836, + "Acceleration": 21, + "Year": "1974-01-01", + "Origin": "Japan" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet vega", + "Miles_per_Gallon": 25, + "Cylinders": 4, + "Displacement": 140, + "Horsepower": 75, + "Weight_in_lbs": 2542, + "Acceleration": 17, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet chevelle malibu classic", + "Miles_per_Gallon": 16, + "Cylinders": 6, + "Displacement": 250, + "Horsepower": 100, + "Weight_in_lbs": 3781, + "Acceleration": 17, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc matador", + "Miles_per_Gallon": 16, + "Cylinders": 6, + "Displacement": 258, + "Horsepower": 110, + "Weight_in_lbs": 3632, + "Acceleration": 18, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth satellite sebring", + "Miles_per_Gallon": 18, + "Cylinders": 6, + "Displacement": 225, + "Horsepower": 105, + "Weight_in_lbs": 3613, + "Acceleration": 16.5, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford gran torino", + "Miles_per_Gallon": 16, + "Cylinders": 8, + "Displacement": 302, + "Horsepower": 140, + "Weight_in_lbs": 4141, + "Acceleration": 14, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "buick", + "Name": "buick century luxus (sw)", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 150, + "Weight_in_lbs": 4699, + "Acceleration": 14.5, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge coronet custom (sw)", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": 318, + "Horsepower": 150, + "Weight_in_lbs": 4457, + "Acceleration": 13.5, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford gran torino (sw)", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": 302, + "Horsepower": 140, + "Weight_in_lbs": 4638, + "Acceleration": 16, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc matador (sw)", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": 304, + "Horsepower": 150, + "Weight_in_lbs": 4257, + "Acceleration": 15.5, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "audi", + "Name": "audi fox", + "Miles_per_Gallon": 29, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 83, + "Weight_in_lbs": 2219, + "Acceleration": 16.5, + "Year": "1974-01-01", + "Origin": "European Union" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen dasher", + "Miles_per_Gallon": 26, + "Cylinders": 4, + "Displacement": 79, + "Horsepower": 67, + "Weight_in_lbs": 1963, + "Acceleration": 15.5, + "Year": "1974-01-01", + "Origin": "European Union" + }, + { + "Maker": "opel", + "Name": "opel manta", + "Miles_per_Gallon": 26, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 78, + "Weight_in_lbs": 2300, + "Acceleration": 14.5, + "Year": "1974-01-01", + "Origin": "European Union" + }, + { + "Maker": "toyota", + "Name": "toyota corona", + "Miles_per_Gallon": 31, + "Cylinders": 4, + "Displacement": 76, + "Horsepower": 52, + "Weight_in_lbs": 1649, + "Acceleration": 16.5, + "Year": "1974-01-01", + "Origin": "Japan" + }, + { + "Maker": "datsun", + "Name": "datsun 710", + "Miles_per_Gallon": 32, + "Cylinders": 4, + "Displacement": 83, + "Horsepower": 61, + "Weight_in_lbs": 2003, + "Acceleration": 19, + "Year": "1974-01-01", + "Origin": "Japan" + }, + { + "Maker": "dodge", + "Name": "dodge colt", + "Miles_per_Gallon": 28, + "Cylinders": 4, + "Displacement": 90, + "Horsepower": 75, + "Weight_in_lbs": 2125, + "Acceleration": 14.5, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "fiat", + "Name": "fiat 128", + "Miles_per_Gallon": 24, + "Cylinders": 4, + "Displacement": 90, + "Horsepower": 75, + "Weight_in_lbs": 2108, + "Acceleration": 15.5, + "Year": "1974-01-01", + "Origin": "European Union" + }, + { + "Maker": "fiat", + "Name": "fiat 124 tc", + "Miles_per_Gallon": 26, + "Cylinders": 4, + "Displacement": 116, + "Horsepower": 75, + "Weight_in_lbs": 2246, + "Acceleration": 14, + "Year": "1974-01-01", + "Origin": "European Union" + }, + { + "Maker": "honda", + "Name": "honda civic", + "Miles_per_Gallon": 24, + "Cylinders": 4, + "Displacement": 120, + "Horsepower": 97, + "Weight_in_lbs": 2489, + "Acceleration": 15, + "Year": "1974-01-01", + "Origin": "Japan" + }, + { + "Name": "subaru", + "Maker": "subaru", + "Miles_per_Gallon": 26, + "Cylinders": 4, + "Displacement": 108, + "Horsepower": 93, + "Weight_in_lbs": 2391, + "Acceleration": 15.5, + "Year": "1974-01-01", + "Origin": "Japan" + }, + { + "Maker": "fiat", + "Name": "fiat x1.9", + "Miles_per_Gallon": 31, + "Cylinders": 4, + "Displacement": 79, + "Horsepower": 67, + "Weight_in_lbs": 2000, + "Acceleration": 16, + "Year": "1974-01-01", + "Origin": "European Union" + }, + { + "Maker": "plymouth", + "Name": "plymouth valiant custom", + "Miles_per_Gallon": 19, + "Cylinders": 6, + "Displacement": 225, + "Horsepower": 95, + "Weight_in_lbs": 3264, + "Acceleration": 16, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet nova", + "Miles_per_Gallon": 18, + "Cylinders": 6, + "Displacement": 250, + "Horsepower": 105, + "Weight_in_lbs": 3459, + "Acceleration": 16, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "mercury", + "Name": "mercury monarch", + "Miles_per_Gallon": 15, + "Cylinders": 6, + "Displacement": 250, + "Horsepower": 72, + "Weight_in_lbs": 3432, + "Acceleration": 21, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford maverick", + "Miles_per_Gallon": 15, + "Cylinders": 6, + "Displacement": 250, + "Horsepower": 72, + "Weight_in_lbs": 3158, + "Acceleration": 19.5, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "pontiac", + "Name": "pontiac catalina", + "Miles_per_Gallon": 16, + "Cylinders": 8, + "Displacement": 400, + "Horsepower": 170, + "Weight_in_lbs": 4668, + "Acceleration": 11.5, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet bel air", + "Miles_per_Gallon": 15, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 145, + "Weight_in_lbs": 4440, + "Acceleration": 14, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth grand fury", + "Miles_per_Gallon": 16, + "Cylinders": 8, + "Displacement": 318, + "Horsepower": 150, + "Weight_in_lbs": 4498, + "Acceleration": 14.5, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford ltd", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": 351, + "Horsepower": 148, + "Weight_in_lbs": 4657, + "Acceleration": 13.5, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "buick", + "Name": "buick century", + "Miles_per_Gallon": 17, + "Cylinders": 6, + "Displacement": 231, + "Horsepower": 110, + "Weight_in_lbs": 3907, + "Acceleration": 21, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "chevroelt", + "Name": "chevroelt chevelle malibu", + "Miles_per_Gallon": 16, + "Cylinders": 6, + "Displacement": 250, + "Horsepower": 105, + "Weight_in_lbs": 3897, + "Acceleration": 18.5, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc matador", + "Miles_per_Gallon": 15, + "Cylinders": 6, + "Displacement": 258, + "Horsepower": 110, + "Weight_in_lbs": 3730, + "Acceleration": 19, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth fury", + "Miles_per_Gallon": 18, + "Cylinders": 6, + "Displacement": 225, + "Horsepower": 95, + "Weight_in_lbs": 3785, + "Acceleration": 19, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "buick", + "Name": "buick skyhawk", + "Miles_per_Gallon": 21, + "Cylinders": 6, + "Displacement": 231, + "Horsepower": 110, + "Weight_in_lbs": 3039, + "Acceleration": 15, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet monza 2+2", + "Miles_per_Gallon": 20, + "Cylinders": 8, + "Displacement": 262, + "Horsepower": 110, + "Weight_in_lbs": 3221, + "Acceleration": 13.5, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford mustang ii", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 302, + "Horsepower": 129, + "Weight_in_lbs": 3169, + "Acceleration": 12, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "toyota", + "Name": "toyota corolla", + "Miles_per_Gallon": 29, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 75, + "Weight_in_lbs": 2171, + "Acceleration": 16, + "Year": "1975-01-01", + "Origin": "Japan" + }, + { + "Maker": "ford", + "Name": "ford pinto", + "Miles_per_Gallon": 23, + "Cylinders": 4, + "Displacement": 140, + "Horsepower": 83, + "Weight_in_lbs": 2639, + "Acceleration": 17, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc gremlin", + "Miles_per_Gallon": 20, + "Cylinders": 6, + "Displacement": 232, + "Horsepower": 100, + "Weight_in_lbs": 2914, + "Acceleration": 16, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "pontiac", + "Name": "pontiac astro", + "Miles_per_Gallon": 23, + "Cylinders": 4, + "Displacement": 140, + "Horsepower": 78, + "Weight_in_lbs": 2592, + "Acceleration": 18.5, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "toyota", + "Name": "toyota corona", + "Miles_per_Gallon": 24, + "Cylinders": 4, + "Displacement": 134, + "Horsepower": 96, + "Weight_in_lbs": 2702, + "Acceleration": 13.5, + "Year": "1975-01-01", + "Origin": "Japan" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen dasher", + "Miles_per_Gallon": 25, + "Cylinders": 4, + "Displacement": 90, + "Horsepower": 71, + "Weight_in_lbs": 2223, + "Acceleration": 16.5, + "Year": "1975-01-01", + "Origin": "European Union" + }, + { + "Maker": "datsun", + "Name": "datsun 710", + "Miles_per_Gallon": 24, + "Cylinders": 4, + "Displacement": 119, + "Horsepower": 97, + "Weight_in_lbs": 2545, + "Acceleration": 17, + "Year": "1975-01-01", + "Origin": "Japan" + }, + { + "Maker": "ford", + "Name": "ford pinto", + "Miles_per_Gallon": 18, + "Cylinders": 6, + "Displacement": 171, + "Horsepower": 97, + "Weight_in_lbs": 2984, + "Acceleration": 14.5, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen rabbit", + "Miles_per_Gallon": 29, + "Cylinders": 4, + "Displacement": 90, + "Horsepower": 70, + "Weight_in_lbs": 1937, + "Acceleration": 14, + "Year": "1975-01-01", + "Origin": "European Union" + }, + { + "Maker": "amc", + "Name": "amc pacer", + "Miles_per_Gallon": 19, + "Cylinders": 6, + "Displacement": 232, + "Horsepower": 90, + "Weight_in_lbs": 3211, + "Acceleration": 17, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "audi", + "Name": "audi 100ls", + "Miles_per_Gallon": 23, + "Cylinders": 4, + "Displacement": 115, + "Horsepower": 95, + "Weight_in_lbs": 2694, + "Acceleration": 15, + "Year": "1975-01-01", + "Origin": "European Union" + }, + { + "Maker": "peugeot", + "Name": "peugeot 504", + "Miles_per_Gallon": 23, + "Cylinders": 4, + "Displacement": 120, + "Horsepower": 88, + "Weight_in_lbs": 2957, + "Acceleration": 17, + "Year": "1975-01-01", + "Origin": "European Union" + }, + { + "Maker": "volvo", + "Name": "volvo 244dl", + "Miles_per_Gallon": 22, + "Cylinders": 4, + "Displacement": 121, + "Horsepower": 98, + "Weight_in_lbs": 2945, + "Acceleration": 14.5, + "Year": "1975-01-01", + "Origin": "European Union" + }, + { + "Maker": "saab", + "Name": "saab 99le", + "Miles_per_Gallon": 25, + "Cylinders": 4, + "Displacement": 121, + "Horsepower": 115, + "Weight_in_lbs": 2671, + "Acceleration": 13.5, + "Year": "1975-01-01", + "Origin": "European Union" + }, + { + "Maker": "honda", + "Name": "honda civic cvcc", + "Miles_per_Gallon": 33, + "Cylinders": 4, + "Displacement": 91, + "Horsepower": 53, + "Weight_in_lbs": 1795, + "Acceleration": 17.5, + "Year": "1975-01-01", + "Origin": "Japan" + }, + { + "Maker": "fiat", + "Name": "fiat 131", + "Miles_per_Gallon": 28, + "Cylinders": 4, + "Displacement": 107, + "Horsepower": 86, + "Weight_in_lbs": 2464, + "Acceleration": 15.5, + "Year": "1976-01-01", + "Origin": "European Union" + }, + { + "Maker": "opel", + "Name": "opel 1900", + "Miles_per_Gallon": 25, + "Cylinders": 4, + "Displacement": 116, + "Horsepower": 81, + "Weight_in_lbs": 2220, + "Acceleration": 16.9, + "Year": "1976-01-01", + "Origin": "European Union" + }, + { + "Maker": "capri", + "Name": "capri ii", + "Miles_per_Gallon": 25, + "Cylinders": 4, + "Displacement": 140, + "Horsepower": 92, + "Weight_in_lbs": 2572, + "Acceleration": 14.9, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge colt", + "Miles_per_Gallon": 26, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 79, + "Weight_in_lbs": 2255, + "Acceleration": 17.7, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "renault", + "Name": "renault 12tl", + "Miles_per_Gallon": 27, + "Cylinders": 4, + "Displacement": 101, + "Horsepower": 83, + "Weight_in_lbs": 2202, + "Acceleration": 15.3, + "Year": "1976-01-01", + "Origin": "European Union" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet chevelle malibu classic", + "Miles_per_Gallon": 17.5, + "Cylinders": 8, + "Displacement": 305, + "Horsepower": 140, + "Weight_in_lbs": 4215, + "Acceleration": 13, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge coronet brougham", + "Miles_per_Gallon": 16, + "Cylinders": 8, + "Displacement": 318, + "Horsepower": 150, + "Weight_in_lbs": 4190, + "Acceleration": 13, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc matador", + "Miles_per_Gallon": 15.5, + "Cylinders": 8, + "Displacement": 304, + "Horsepower": 120, + "Weight_in_lbs": 3962, + "Acceleration": 13.9, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford gran torino", + "Miles_per_Gallon": 14.5, + "Cylinders": 8, + "Displacement": 351, + "Horsepower": 152, + "Weight_in_lbs": 4215, + "Acceleration": 12.8, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth valiant", + "Miles_per_Gallon": 22, + "Cylinders": 6, + "Displacement": 225, + "Horsepower": 100, + "Weight_in_lbs": 3233, + "Acceleration": 15.4, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet nova", + "Miles_per_Gallon": 22, + "Cylinders": 6, + "Displacement": 250, + "Horsepower": 105, + "Weight_in_lbs": 3353, + "Acceleration": 14.5, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford maverick", + "Miles_per_Gallon": 24, + "Cylinders": 6, + "Displacement": 200, + "Horsepower": 81, + "Weight_in_lbs": 3012, + "Acceleration": 17.6, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc hornet", + "Miles_per_Gallon": 22.5, + "Cylinders": 6, + "Displacement": 232, + "Horsepower": 90, + "Weight_in_lbs": 3085, + "Acceleration": 17.6, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet chevette", + "Miles_per_Gallon": 29, + "Cylinders": 4, + "Displacement": 85, + "Horsepower": 52, + "Weight_in_lbs": 2035, + "Acceleration": 22.2, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet woody", + "Miles_per_Gallon": 24.5, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 60, + "Weight_in_lbs": 2164, + "Acceleration": 22.1, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "vw", + "Name": "vw rabbit", + "Miles_per_Gallon": 29, + "Cylinders": 4, + "Displacement": 90, + "Horsepower": 70, + "Weight_in_lbs": 1937, + "Acceleration": 14.2, + "Year": "1976-01-01", + "Origin": "European Union" + }, + { + "Maker": "honda", + "Name": "honda civic", + "Miles_per_Gallon": 33, + "Cylinders": 4, + "Displacement": 91, + "Horsepower": 53, + "Weight_in_lbs": 1795, + "Acceleration": 17.4, + "Year": "1976-01-01", + "Origin": "Japan" + }, + { + "Maker": "dodge", + "Name": "dodge aspen se", + "Miles_per_Gallon": 20, + "Cylinders": 6, + "Displacement": 225, + "Horsepower": 100, + "Weight_in_lbs": 3651, + "Acceleration": 17.7, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford granada ghia", + "Miles_per_Gallon": 18, + "Cylinders": 6, + "Displacement": 250, + "Horsepower": 78, + "Weight_in_lbs": 3574, + "Acceleration": 21, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "pontiac", + "Name": "pontiac ventura sj", + "Miles_per_Gallon": 18.5, + "Cylinders": 6, + "Displacement": 250, + "Horsepower": 110, + "Weight_in_lbs": 3645, + "Acceleration": 16.2, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc pacer d/l", + "Miles_per_Gallon": 17.5, + "Cylinders": 6, + "Displacement": 258, + "Horsepower": 95, + "Weight_in_lbs": 3193, + "Acceleration": 17.8, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen rabbit", + "Miles_per_Gallon": 29.5, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 71, + "Weight_in_lbs": 1825, + "Acceleration": 12.2, + "Year": "1976-01-01", + "Origin": "European Union" + }, + { + "Maker": "datsun", + "Name": "datsun b-210", + "Miles_per_Gallon": 32, + "Cylinders": 4, + "Displacement": 85, + "Horsepower": 70, + "Weight_in_lbs": 1990, + "Acceleration": 17, + "Year": "1976-01-01", + "Origin": "Japan" + }, + { + "Maker": "toyota", + "Name": "toyota corolla", + "Miles_per_Gallon": 28, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 75, + "Weight_in_lbs": 2155, + "Acceleration": 16.4, + "Year": "1976-01-01", + "Origin": "Japan" + }, + { + "Maker": "ford", + "Name": "ford pinto", + "Miles_per_Gallon": 26.5, + "Cylinders": 4, + "Displacement": 140, + "Horsepower": 72, + "Weight_in_lbs": 2565, + "Acceleration": 13.6, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "volvo", + "Name": "volvo 245", + "Miles_per_Gallon": 20, + "Cylinders": 4, + "Displacement": 130, + "Horsepower": 102, + "Weight_in_lbs": 3150, + "Acceleration": 15.7, + "Year": "1976-01-01", + "Origin": "European Union" + }, + { + "Maker": "plymouth", + "Name": "plymouth volare premier v8", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 318, + "Horsepower": 150, + "Weight_in_lbs": 3940, + "Acceleration": 13.2, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "peugeot", + "Name": "peugeot 504", + "Miles_per_Gallon": 19, + "Cylinders": 4, + "Displacement": 120, + "Horsepower": 88, + "Weight_in_lbs": 3270, + "Acceleration": 21.9, + "Year": "1976-01-01", + "Origin": "European Union" + }, + { + "Maker": "toyota", + "Name": "toyota mark ii", + "Miles_per_Gallon": 19, + "Cylinders": 6, + "Displacement": 156, + "Horsepower": 108, + "Weight_in_lbs": 2930, + "Acceleration": 15.5, + "Year": "1976-01-01", + "Origin": "Japan" + }, + { + "Name": "mercedes-benz 280s", + "Maker": "mercedes", + "Miles_per_Gallon": 16.5, + "Cylinders": 6, + "Displacement": 168, + "Horsepower": 120, + "Weight_in_lbs": 3820, + "Acceleration": 16.7, + "Year": "1976-01-01", + "Origin": "European Union" + }, + { + "Maker": "cadillac", + "Name": "cadillac seville", + "Miles_per_Gallon": 16.5, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 180, + "Weight_in_lbs": 4380, + "Acceleration": 12.1, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "chevy", + "Name": "chevy c10", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 145, + "Weight_in_lbs": 4055, + "Acceleration": 12, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford f108", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 302, + "Horsepower": 130, + "Weight_in_lbs": 3870, + "Acceleration": 15, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge d100", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 318, + "Horsepower": 150, + "Weight_in_lbs": 3755, + "Acceleration": 14, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "honda", + "Name": "honda Accelerationord cvcc", + "Miles_per_Gallon": 31.5, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 68, + "Weight_in_lbs": 2045, + "Acceleration": 18.5, + "Year": "1977-01-01", + "Origin": "Japan" + }, + { + "Maker": "buick", + "Name": "buick opel isuzu deluxe", + "Miles_per_Gallon": 30, + "Cylinders": 4, + "Displacement": 111, + "Horsepower": 80, + "Weight_in_lbs": 2155, + "Acceleration": 14.8, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "renault", + "Name": "renault 5 gtl", + "Miles_per_Gallon": 36, + "Cylinders": 4, + "Displacement": 79, + "Horsepower": 58, + "Weight_in_lbs": 1825, + "Acceleration": 18.6, + "Year": "1977-01-01", + "Origin": "European Union" + }, + { + "Maker": "plymouth", + "Name": "plymouth arrow gs", + "Miles_per_Gallon": 25.5, + "Cylinders": 4, + "Displacement": 122, + "Horsepower": 96, + "Weight_in_lbs": 2300, + "Acceleration": 15.5, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "datsun", + "Name": "datsun f-10 hatchback", + "Miles_per_Gallon": 33.5, + "Cylinders": 4, + "Displacement": 85, + "Horsepower": 70, + "Weight_in_lbs": 1945, + "Acceleration": 16.8, + "Year": "1977-01-01", + "Origin": "Japan" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet caprice classic", + "Miles_per_Gallon": 17.5, + "Cylinders": 8, + "Displacement": 305, + "Horsepower": 145, + "Weight_in_lbs": 3880, + "Acceleration": 12.5, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "oldsmobile", + "Name": "oldsmobile cutlass supreme", + "Miles_per_Gallon": 17, + "Cylinders": 8, + "Displacement": 260, + "Horsepower": 110, + "Weight_in_lbs": 4060, + "Acceleration": 19, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge monaco brougham", + "Miles_per_Gallon": 15.5, + "Cylinders": 8, + "Displacement": 318, + "Horsepower": 145, + "Weight_in_lbs": 4140, + "Acceleration": 13.7, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "mercury", + "Name": "mercury cougar brougham", + "Miles_per_Gallon": 15, + "Cylinders": 8, + "Displacement": 302, + "Horsepower": 130, + "Weight_in_lbs": 4295, + "Acceleration": 14.9, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet concours", + "Miles_per_Gallon": 17.5, + "Cylinders": 6, + "Displacement": 250, + "Horsepower": 110, + "Weight_in_lbs": 3520, + "Acceleration": 16.4, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "buick", + "Name": "buick skylark", + "Miles_per_Gallon": 20.5, + "Cylinders": 6, + "Displacement": 231, + "Horsepower": 105, + "Weight_in_lbs": 3425, + "Acceleration": 16.9, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth volare custom", + "Miles_per_Gallon": 19, + "Cylinders": 6, + "Displacement": 225, + "Horsepower": 100, + "Weight_in_lbs": 3630, + "Acceleration": 17.7, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford granada", + "Miles_per_Gallon": 18.5, + "Cylinders": 6, + "Displacement": 250, + "Horsepower": 98, + "Weight_in_lbs": 3525, + "Acceleration": 19, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "pontiac", + "Name": "pontiac grand prix lj", + "Miles_per_Gallon": 16, + "Cylinders": 8, + "Displacement": 400, + "Horsepower": 180, + "Weight_in_lbs": 4220, + "Acceleration": 11.1, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet monte carlo landau", + "Miles_per_Gallon": 15.5, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 170, + "Weight_in_lbs": 4165, + "Acceleration": 11.4, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "chrysler", + "Name": "chrysler cordoba", + "Miles_per_Gallon": 15.5, + "Cylinders": 8, + "Displacement": 400, + "Horsepower": 190, + "Weight_in_lbs": 4325, + "Acceleration": 12.2, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford thunderbird", + "Miles_per_Gallon": 16, + "Cylinders": 8, + "Displacement": 351, + "Horsepower": 149, + "Weight_in_lbs": 4335, + "Acceleration": 14.5, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen rabbit custom", + "Miles_per_Gallon": 29, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 78, + "Weight_in_lbs": 1940, + "Acceleration": 14.5, + "Year": "1977-01-01", + "Origin": "European Union" + }, + { + "Maker": "pontiac", + "Name": "pontiac sunbird coupe", + "Miles_per_Gallon": 24.5, + "Cylinders": 4, + "Displacement": 151, + "Horsepower": 88, + "Weight_in_lbs": 2740, + "Acceleration": 16, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "toyota", + "Name": "toyota corolla liftback", + "Miles_per_Gallon": 26, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 75, + "Weight_in_lbs": 2265, + "Acceleration": 18.2, + "Year": "1977-01-01", + "Origin": "Japan" + }, + { + "Maker": "ford", + "Name": "ford mustang ii 2+2", + "Miles_per_Gallon": 25.5, + "Cylinders": 4, + "Displacement": 140, + "Horsepower": 89, + "Weight_in_lbs": 2755, + "Acceleration": 15.8, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet chevette", + "Miles_per_Gallon": 30.5, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 63, + "Weight_in_lbs": 2051, + "Acceleration": 17, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge colt m/m", + "Miles_per_Gallon": 33.5, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 83, + "Weight_in_lbs": 2075, + "Acceleration": 15.9, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "subaru", + "Name": "subaru dl", + "Miles_per_Gallon": 30, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 67, + "Weight_in_lbs": 1985, + "Acceleration": 16.4, + "Year": "1977-01-01", + "Origin": "Japan" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen dasher", + "Miles_per_Gallon": 30.5, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 78, + "Weight_in_lbs": 2190, + "Acceleration": 14.1, + "Year": "1977-01-01", + "Origin": "European Union" + }, + { + "Maker": "datsun", + "Name": "datsun 810", + "Miles_per_Gallon": 22, + "Cylinders": 6, + "Displacement": 146, + "Horsepower": 97, + "Weight_in_lbs": 2815, + "Acceleration": 14.5, + "Year": "1977-01-01", + "Origin": "Japan" + }, + { + "Maker": "bmw", + "Name": "bmw 320i", + "Miles_per_Gallon": 21.5, + "Cylinders": 4, + "Displacement": 121, + "Horsepower": 110, + "Weight_in_lbs": 2600, + "Acceleration": 12.8, + "Year": "1977-01-01", + "Origin": "European Union" + }, + { + "Maker": "mazda", + "Name": "mazda rx-4", + "Miles_per_Gallon": 21.5, + "Cylinders": 3, + "Displacement": 80, + "Horsepower": 110, + "Weight_in_lbs": 2720, + "Acceleration": 13.5, + "Year": "1977-01-01", + "Origin": "Japan" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen rabbit custom diesel", + "Miles_per_Gallon": 43.1, + "Cylinders": 4, + "Displacement": 90, + "Horsepower": 48, + "Weight_in_lbs": 1985, + "Acceleration": 21.5, + "Year": "1978-01-01", + "Origin": "European Union" + }, + { + "Maker": "ford", + "Name": "ford fiesta", + "Miles_per_Gallon": 36.1, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 66, + "Weight_in_lbs": 1800, + "Acceleration": 14.4, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "mazda", + "Name": "mazda glc deluxe", + "Miles_per_Gallon": 32.8, + "Cylinders": 4, + "Displacement": 78, + "Horsepower": 52, + "Weight_in_lbs": 1985, + "Acceleration": 19.4, + "Year": "1978-01-01", + "Origin": "Japan" + }, + { + "Maker": "datsun", + "Name": "datsun b210 gx", + "Miles_per_Gallon": 39.4, + "Cylinders": 4, + "Displacement": 85, + "Horsepower": 70, + "Weight_in_lbs": 2070, + "Acceleration": 18.6, + "Year": "1978-01-01", + "Origin": "Japan" + }, + { + "Maker": "honda", + "Name": "honda civic cvcc", + "Miles_per_Gallon": 36.1, + "Cylinders": 4, + "Displacement": 91, + "Horsepower": 60, + "Weight_in_lbs": 1800, + "Acceleration": 16.4, + "Year": "1978-01-01", + "Origin": "Japan" + }, + { + "Maker": "oldsmobile", + "Name": "oldsmobile cutlass salon brougham", + "Miles_per_Gallon": 19.9, + "Cylinders": 8, + "Displacement": 260, + "Horsepower": 110, + "Weight_in_lbs": 3365, + "Acceleration": 15.5, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge diplomat", + "Miles_per_Gallon": 19.4, + "Cylinders": 8, + "Displacement": 318, + "Horsepower": 140, + "Weight_in_lbs": 3735, + "Acceleration": 13.2, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "mercury", + "Name": "mercury monarch ghia", + "Miles_per_Gallon": 20.2, + "Cylinders": 8, + "Displacement": 302, + "Horsepower": 139, + "Weight_in_lbs": 3570, + "Acceleration": 12.8, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "pontiac", + "Name": "pontiac phoenix lj", + "Miles_per_Gallon": 19.2, + "Cylinders": 6, + "Displacement": 231, + "Horsepower": 105, + "Weight_in_lbs": 3535, + "Acceleration": 19.2, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet malibu", + "Miles_per_Gallon": 20.5, + "Cylinders": 6, + "Displacement": 200, + "Horsepower": 95, + "Weight_in_lbs": 3155, + "Acceleration": 18.2, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford fairmont (auto)", + "Miles_per_Gallon": 20.2, + "Cylinders": 6, + "Displacement": 200, + "Horsepower": 85, + "Weight_in_lbs": 2965, + "Acceleration": 15.8, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford fairmont (man)", + "Miles_per_Gallon": 25.1, + "Cylinders": 4, + "Displacement": 140, + "Horsepower": 88, + "Weight_in_lbs": 2720, + "Acceleration": 15.4, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth volare", + "Miles_per_Gallon": 20.5, + "Cylinders": 6, + "Displacement": 225, + "Horsepower": 100, + "Weight_in_lbs": 3430, + "Acceleration": 17.2, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc concord", + "Miles_per_Gallon": 19.4, + "Cylinders": 6, + "Displacement": 232, + "Horsepower": 90, + "Weight_in_lbs": 3210, + "Acceleration": 17.2, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "buick", + "Name": "buick century special", + "Miles_per_Gallon": 20.6, + "Cylinders": 6, + "Displacement": 231, + "Horsepower": 105, + "Weight_in_lbs": 3380, + "Acceleration": 15.8, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "mercury", + "Name": "mercury zephyr", + "Miles_per_Gallon": 20.8, + "Cylinders": 6, + "Displacement": 200, + "Horsepower": 85, + "Weight_in_lbs": 3070, + "Acceleration": 16.7, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge aspen", + "Miles_per_Gallon": 18.6, + "Cylinders": 6, + "Displacement": 225, + "Horsepower": 110, + "Weight_in_lbs": 3620, + "Acceleration": 18.7, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc concord d/l", + "Miles_per_Gallon": 18.1, + "Cylinders": 6, + "Displacement": 258, + "Horsepower": 120, + "Weight_in_lbs": 3410, + "Acceleration": 15.1, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet monte carlo landau", + "Miles_per_Gallon": 19.2, + "Cylinders": 8, + "Displacement": 305, + "Horsepower": 145, + "Weight_in_lbs": 3425, + "Acceleration": 13.2, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "buick", + "Name": "buick regal sport coupe (turbo)", + "Miles_per_Gallon": 17.7, + "Cylinders": 6, + "Displacement": 231, + "Horsepower": 165, + "Weight_in_lbs": 3445, + "Acceleration": 13.4, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford futura", + "Miles_per_Gallon": 18.1, + "Cylinders": 8, + "Displacement": 302, + "Horsepower": 139, + "Weight_in_lbs": 3205, + "Acceleration": 11.2, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge magnum xe", + "Miles_per_Gallon": 17.5, + "Cylinders": 8, + "Displacement": 318, + "Horsepower": 140, + "Weight_in_lbs": 4080, + "Acceleration": 13.7, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet chevette", + "Miles_per_Gallon": 30, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 68, + "Weight_in_lbs": 2155, + "Acceleration": 16.5, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "toyota", + "Name": "toyota corona", + "Miles_per_Gallon": 27.5, + "Cylinders": 4, + "Displacement": 134, + "Horsepower": 95, + "Weight_in_lbs": 2560, + "Acceleration": 14.2, + "Year": "1978-01-01", + "Origin": "Japan" + }, + { + "Maker": "datsun", + "Name": "datsun 510", + "Miles_per_Gallon": 27.2, + "Cylinders": 4, + "Displacement": 119, + "Horsepower": 97, + "Weight_in_lbs": 2300, + "Acceleration": 14.7, + "Year": "1978-01-01", + "Origin": "Japan" + }, + { + "Maker": "dodge", + "Name": "dodge omni", + "Miles_per_Gallon": 30.9, + "Cylinders": 4, + "Displacement": 105, + "Horsepower": 75, + "Weight_in_lbs": 2230, + "Acceleration": 14.5, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "toyota", + "Name": "toyota celica gt liftback", + "Miles_per_Gallon": 21.1, + "Cylinders": 4, + "Displacement": 134, + "Horsepower": 95, + "Weight_in_lbs": 2515, + "Acceleration": 14.8, + "Year": "1978-01-01", + "Origin": "Japan" + }, + { + "Maker": "plymouth", + "Name": "plymouth sapporo", + "Miles_per_Gallon": 23.2, + "Cylinders": 4, + "Displacement": 156, + "Horsepower": 105, + "Weight_in_lbs": 2745, + "Acceleration": 16.7, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "oldsmobile", + "Name": "oldsmobile starfire sx", + "Miles_per_Gallon": 23.8, + "Cylinders": 4, + "Displacement": 151, + "Horsepower": 85, + "Weight_in_lbs": 2855, + "Acceleration": 17.6, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "datsun", + "Name": "datsun 200-sx", + "Miles_per_Gallon": 23.9, + "Cylinders": 4, + "Displacement": 119, + "Horsepower": 97, + "Weight_in_lbs": 2405, + "Acceleration": 14.9, + "Year": "1978-01-01", + "Origin": "Japan" + }, + { + "Maker": "audi", + "Name": "audi 5000", + "Miles_per_Gallon": 20.3, + "Cylinders": 5, + "Displacement": 131, + "Horsepower": 103, + "Weight_in_lbs": 2830, + "Acceleration": 15.9, + "Year": "1978-01-01", + "Origin": "European Union" + }, + { + "Maker": "volvo", + "Name": "volvo 264gl", + "Miles_per_Gallon": 17, + "Cylinders": 6, + "Displacement": 163, + "Horsepower": 125, + "Weight_in_lbs": 3140, + "Acceleration": 13.6, + "Year": "1978-01-01", + "Origin": "European Union" + }, + { + "Maker": "saab", + "Name": "saab 99gle", + "Miles_per_Gallon": 21.6, + "Cylinders": 4, + "Displacement": 121, + "Horsepower": 115, + "Weight_in_lbs": 2795, + "Acceleration": 15.7, + "Year": "1978-01-01", + "Origin": "European Union" + }, + { + "Maker": "peugeot", + "Name": "peugeot 604sl", + "Miles_per_Gallon": 16.2, + "Cylinders": 6, + "Displacement": 163, + "Horsepower": 133, + "Weight_in_lbs": 3410, + "Acceleration": 15.8, + "Year": "1978-01-01", + "Origin": "European Union" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen scirocco", + "Miles_per_Gallon": 31.5, + "Cylinders": 4, + "Displacement": 89, + "Horsepower": 71, + "Weight_in_lbs": 1990, + "Acceleration": 14.9, + "Year": "1978-01-01", + "Origin": "European Union" + }, + { + "Maker": "honda", + "Name": "honda Accelerationord lx", + "Miles_per_Gallon": 29.5, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 68, + "Weight_in_lbs": 2135, + "Acceleration": 16.6, + "Year": "1978-01-01", + "Origin": "Japan" + }, + { + "Maker": "pontiac", + "Name": "pontiac lemans v6", + "Miles_per_Gallon": 21.5, + "Cylinders": 6, + "Displacement": 231, + "Horsepower": 115, + "Weight_in_lbs": 3245, + "Acceleration": 15.4, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "mercury", + "Name": "mercury zephyr 6", + "Miles_per_Gallon": 19.8, + "Cylinders": 6, + "Displacement": 200, + "Horsepower": 85, + "Weight_in_lbs": 2990, + "Acceleration": 18.2, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford fairmont 4", + "Miles_per_Gallon": 22.3, + "Cylinders": 4, + "Displacement": 140, + "Horsepower": 88, + "Weight_in_lbs": 2890, + "Acceleration": 17.3, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc concord dl 6", + "Miles_per_Gallon": 20.2, + "Cylinders": 6, + "Displacement": 232, + "Horsepower": 90, + "Weight_in_lbs": 3265, + "Acceleration": 18.2, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge aspen 6", + "Miles_per_Gallon": 20.6, + "Cylinders": 6, + "Displacement": 225, + "Horsepower": 110, + "Weight_in_lbs": 3360, + "Acceleration": 16.6, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet caprice classic", + "Miles_per_Gallon": 17, + "Cylinders": 8, + "Displacement": 305, + "Horsepower": 130, + "Weight_in_lbs": 3840, + "Acceleration": 15.4, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford ltd landau", + "Miles_per_Gallon": 17.6, + "Cylinders": 8, + "Displacement": 302, + "Horsepower": 129, + "Weight_in_lbs": 3725, + "Acceleration": 13.4, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "mercury", + "Name": "mercury grand marquis", + "Miles_per_Gallon": 16.5, + "Cylinders": 8, + "Displacement": 351, + "Horsepower": 138, + "Weight_in_lbs": 3955, + "Acceleration": 13.2, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge st. regis", + "Miles_per_Gallon": 18.2, + "Cylinders": 8, + "Displacement": 318, + "Horsepower": 135, + "Weight_in_lbs": 3830, + "Acceleration": 15.2, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "buick", + "Name": "buick estate wagon (sw)", + "Miles_per_Gallon": 16.9, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 155, + "Weight_in_lbs": 4360, + "Acceleration": 14.9, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford country squire (sw)", + "Miles_per_Gallon": 15.5, + "Cylinders": 8, + "Displacement": 351, + "Horsepower": 142, + "Weight_in_lbs": 4054, + "Acceleration": 14.3, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet malibu classic (sw)", + "Miles_per_Gallon": 19.2, + "Cylinders": 8, + "Displacement": 267, + "Horsepower": 125, + "Weight_in_lbs": 3605, + "Acceleration": 15, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "chrysler", + "Name": "chrysler lebaron town @ country (sw)", + "Miles_per_Gallon": 18.5, + "Cylinders": 8, + "Displacement": 360, + "Horsepower": 150, + "Weight_in_lbs": 3940, + "Acceleration": 13, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "vw", + "Name": "vw rabbit custom", + "Miles_per_Gallon": 31.9, + "Cylinders": 4, + "Displacement": 89, + "Horsepower": 71, + "Weight_in_lbs": 1925, + "Acceleration": 14, + "Year": "1979-01-01", + "Origin": "European Union" + }, + { + "Maker": "maxda", + "Name": "maxda glc deluxe", + "Miles_per_Gallon": 34.1, + "Cylinders": 4, + "Displacement": 86, + "Horsepower": 65, + "Weight_in_lbs": 1975, + "Acceleration": 15.2, + "Year": "1979-01-01", + "Origin": "Japan" + }, + { + "Maker": "dodge", + "Name": "dodge colt hatchback custom", + "Miles_per_Gallon": 35.7, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 80, + "Weight_in_lbs": 1915, + "Acceleration": 14.4, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc spirit dl", + "Miles_per_Gallon": 27.4, + "Cylinders": 4, + "Displacement": 121, + "Horsepower": 80, + "Weight_in_lbs": 2670, + "Acceleration": 15, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "mercedes", + "Name": "mercedes benz 300d", + "Miles_per_Gallon": 25.4, + "Cylinders": 5, + "Displacement": 183, + "Horsepower": 77, + "Weight_in_lbs": 3530, + "Acceleration": 20.1, + "Year": "1979-01-01", + "Origin": "European Union" + }, + { + "Maker": "cadillac", + "Name": "cadillac eldorado", + "Miles_per_Gallon": 23, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 125, + "Weight_in_lbs": 3900, + "Acceleration": 17.4, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "peugeot", + "Name": "peugeot 504", + "Miles_per_Gallon": 27.2, + "Cylinders": 4, + "Displacement": 141, + "Horsepower": 71, + "Weight_in_lbs": 3190, + "Acceleration": 24.8, + "Year": "1979-01-01", + "Origin": "European Union" + }, + { + "Maker": "oldsmobile", + "Name": "oldsmobile cutlass salon brougham", + "Miles_per_Gallon": 23.9, + "Cylinders": 8, + "Displacement": 260, + "Horsepower": 90, + "Weight_in_lbs": 3420, + "Acceleration": 22.2, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth horizon", + "Miles_per_Gallon": 34.2, + "Cylinders": 4, + "Displacement": 105, + "Horsepower": 70, + "Weight_in_lbs": 2200, + "Acceleration": 13.2, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth horizon tc3", + "Miles_per_Gallon": 34.5, + "Cylinders": 4, + "Displacement": 105, + "Horsepower": 70, + "Weight_in_lbs": 2150, + "Acceleration": 14.9, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "datsun", + "Name": "datsun 210", + "Miles_per_Gallon": 31.8, + "Cylinders": 4, + "Displacement": 85, + "Horsepower": 65, + "Weight_in_lbs": 2020, + "Acceleration": 19.2, + "Year": "1979-01-01", + "Origin": "Japan" + }, + { + "Maker": "fiat", + "Name": "fiat strada custom", + "Miles_per_Gallon": 37.3, + "Cylinders": 4, + "Displacement": 91, + "Horsepower": 69, + "Weight_in_lbs": 2130, + "Acceleration": 14.7, + "Year": "1979-01-01", + "Origin": "European Union" + }, + { + "Maker": "buick", + "Name": "buick skylark limited", + "Miles_per_Gallon": 28.4, + "Cylinders": 4, + "Displacement": 151, + "Horsepower": 90, + "Weight_in_lbs": 2670, + "Acceleration": 16, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet citation", + "Miles_per_Gallon": 28.8, + "Cylinders": 6, + "Displacement": 173, + "Horsepower": 115, + "Weight_in_lbs": 2595, + "Acceleration": 11.3, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "oldsmobile", + "Name": "oldsmobile omega brougham", + "Miles_per_Gallon": 26.8, + "Cylinders": 6, + "Displacement": 173, + "Horsepower": 115, + "Weight_in_lbs": 2700, + "Acceleration": 12.9, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "pontiac", + "Name": "pontiac phoenix", + "Miles_per_Gallon": 33.5, + "Cylinders": 4, + "Displacement": 151, + "Horsepower": 90, + "Weight_in_lbs": 2556, + "Acceleration": 13.2, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "vw", + "Name": "vw rabbit", + "Miles_per_Gallon": 41.5, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 76, + "Weight_in_lbs": 2144, + "Acceleration": 14.7, + "Year": "1980-01-01", + "Origin": "European Union" + }, + { + "Maker": "toyota", + "Name": "toyota corolla tercel", + "Miles_per_Gallon": 38.1, + "Cylinders": 4, + "Displacement": 89, + "Horsepower": 60, + "Weight_in_lbs": 1968, + "Acceleration": 18.8, + "Year": "1980-01-01", + "Origin": "Japan" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet chevette", + "Miles_per_Gallon": 32.1, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 70, + "Weight_in_lbs": 2120, + "Acceleration": 15.5, + "Year": "1980-01-01", + "Origin": "USA" + }, + { + "Maker": "datsun", + "Name": "datsun 310", + "Miles_per_Gallon": 37.2, + "Cylinders": 4, + "Displacement": 86, + "Horsepower": 65, + "Weight_in_lbs": 2019, + "Acceleration": 16.4, + "Year": "1980-01-01", + "Origin": "Japan" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet citation", + "Miles_per_Gallon": 28, + "Cylinders": 4, + "Displacement": 151, + "Horsepower": 90, + "Weight_in_lbs": 2678, + "Acceleration": 16.5, + "Year": "1980-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford fairmont", + "Miles_per_Gallon": 26.4, + "Cylinders": 4, + "Displacement": 140, + "Horsepower": 88, + "Weight_in_lbs": 2870, + "Acceleration": 18.1, + "Year": "1980-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc concord", + "Miles_per_Gallon": 24.3, + "Cylinders": 4, + "Displacement": 151, + "Horsepower": 90, + "Weight_in_lbs": 3003, + "Acceleration": 20.1, + "Year": "1980-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge aspen", + "Miles_per_Gallon": 19.1, + "Cylinders": 6, + "Displacement": 225, + "Horsepower": 90, + "Weight_in_lbs": 3381, + "Acceleration": 18.7, + "Year": "1980-01-01", + "Origin": "USA" + }, + { + "Maker": "audi", + "Name": "audi 4000", + "Miles_per_Gallon": 34.3, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 78, + "Weight_in_lbs": 2188, + "Acceleration": 15.8, + "Year": "1980-01-01", + "Origin": "European Union" + }, + { + "Maker": "toyota", + "Name": "toyota corona liftback", + "Miles_per_Gallon": 29.8, + "Cylinders": 4, + "Displacement": 134, + "Horsepower": 90, + "Weight_in_lbs": 2711, + "Acceleration": 15.5, + "Year": "1980-01-01", + "Origin": "Japan" + }, + { + "Maker": "mazda", + "Name": "mazda 626", + "Miles_per_Gallon": 31.3, + "Cylinders": 4, + "Displacement": 120, + "Horsepower": 75, + "Weight_in_lbs": 2542, + "Acceleration": 17.5, + "Year": "1980-01-01", + "Origin": "Japan" + }, + { + "Maker": "datsun", + "Name": "datsun 510 hatchback", + "Miles_per_Gallon": 37, + "Cylinders": 4, + "Displacement": 119, + "Horsepower": 92, + "Weight_in_lbs": 2434, + "Acceleration": 15, + "Year": "1980-01-01", + "Origin": "Japan" + }, + { + "Maker": "toyota", + "Name": "toyota corolla", + "Miles_per_Gallon": 32.2, + "Cylinders": 4, + "Displacement": 108, + "Horsepower": 75, + "Weight_in_lbs": 2265, + "Acceleration": 15.2, + "Year": "1980-01-01", + "Origin": "Japan" + }, + { + "Maker": "mazda", + "Name": "mazda glc", + "Miles_per_Gallon": 46.6, + "Cylinders": 4, + "Displacement": 86, + "Horsepower": 65, + "Weight_in_lbs": 2110, + "Acceleration": 17.9, + "Year": "1980-01-01", + "Origin": "Japan" + }, + { + "Maker": "dodge", + "Name": "dodge colt", + "Miles_per_Gallon": 27.9, + "Cylinders": 4, + "Displacement": 156, + "Horsepower": 105, + "Weight_in_lbs": 2800, + "Acceleration": 14.4, + "Year": "1980-01-01", + "Origin": "USA" + }, + { + "Maker": "datsun", + "Name": "datsun 210", + "Miles_per_Gallon": 40.8, + "Cylinders": 4, + "Displacement": 85, + "Horsepower": 65, + "Weight_in_lbs": 2110, + "Acceleration": 19.2, + "Year": "1980-01-01", + "Origin": "Japan" + }, + { + "Maker": "vw", + "Name": "vw rabbit c (diesel)", + "Miles_per_Gallon": 44.3, + "Cylinders": 4, + "Displacement": 90, + "Horsepower": 48, + "Weight_in_lbs": 2085, + "Acceleration": 21.7, + "Year": "1980-01-01", + "Origin": "European Union" + }, + { + "Maker": "vw", + "Name": "vw dasher (diesel)", + "Miles_per_Gallon": 43.4, + "Cylinders": 4, + "Displacement": 90, + "Horsepower": 48, + "Weight_in_lbs": 2335, + "Acceleration": 23.7, + "Year": "1980-01-01", + "Origin": "European Union" + }, + { + "Maker": "audi", + "Name": "audi 5000s (diesel)", + "Miles_per_Gallon": 36.4, + "Cylinders": 5, + "Displacement": 121, + "Horsepower": 67, + "Weight_in_lbs": 2950, + "Acceleration": 19.9, + "Year": "1980-01-01", + "Origin": "European Union" + }, + { + "Name": "mercedes-benz 240d", + "Maker": "mercedes", + "Miles_per_Gallon": 30, + "Cylinders": 4, + "Displacement": 146, + "Horsepower": 67, + "Weight_in_lbs": 3250, + "Acceleration": 21.8, + "Year": "1980-01-01", + "Origin": "European Union" + }, + { + "Maker": "honda", + "Name": "honda civic 1500 gl", + "Miles_per_Gallon": 44.6, + "Cylinders": 4, + "Displacement": 91, + "Horsepower": 67, + "Weight_in_lbs": 1850, + "Acceleration": 13.8, + "Year": "1980-01-01", + "Origin": "Japan" + }, + { + "Maker": "renault", + "Name": "renault lecar deluxe", + "Miles_per_Gallon": 40.9, + "Cylinders": 4, + "Displacement": 85, + "Horsepower": null, + "Weight_in_lbs": 1835, + "Acceleration": 17.3, + "Year": "1980-01-01", + "Origin": "European Union" + }, + { + "Maker": "subaru", + "Name": "subaru dl", + "Miles_per_Gallon": 33.8, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 67, + "Weight_in_lbs": 2145, + "Acceleration": 18, + "Year": "1980-01-01", + "Origin": "Japan" + }, + { + "Maker": "vokswagen", + "Name": "vokswagen rabbit", + "Miles_per_Gallon": 29.8, + "Cylinders": 4, + "Displacement": 89, + "Horsepower": 62, + "Weight_in_lbs": 1845, + "Acceleration": 15.3, + "Year": "1980-01-01", + "Origin": "European Union" + }, + { + "Maker": "datsun", + "Name": "datsun 280-zx", + "Miles_per_Gallon": 32.7, + "Cylinders": 6, + "Displacement": 168, + "Horsepower": 132, + "Weight_in_lbs": 2910, + "Acceleration": 11.4, + "Year": "1980-01-01", + "Origin": "Japan" + }, + { + "Maker": "mazda", + "Name": "mazda rx-7 gs", + "Miles_per_Gallon": 23.7, + "Cylinders": 3, + "Displacement": 70, + "Horsepower": 100, + "Weight_in_lbs": 2420, + "Acceleration": 12.5, + "Year": "1980-01-01", + "Origin": "Japan" + }, + { + "Maker": "triumph", + "Name": "triumph tr7 coupe", + "Miles_per_Gallon": 35, + "Cylinders": 4, + "Displacement": 122, + "Horsepower": 88, + "Weight_in_lbs": 2500, + "Acceleration": 15.1, + "Year": "1980-01-01", + "Origin": "European Union" + }, + { + "Maker": "ford", + "Name": "ford mustang cobra", + "Miles_per_Gallon": 23.6, + "Cylinders": 4, + "Displacement": 140, + "Horsepower": null, + "Weight_in_lbs": 2905, + "Acceleration": 14.3, + "Year": "1980-01-01", + "Origin": "USA" + }, + { + "Maker": "honda", + "Name": "honda Accelerationord", + "Miles_per_Gallon": 32.4, + "Cylinders": 4, + "Displacement": 107, + "Horsepower": 72, + "Weight_in_lbs": 2290, + "Acceleration": 17, + "Year": "1980-01-01", + "Origin": "Japan" + }, + { + "Maker": "honda", + "Name": "honda Accelerationord", + "Miles_per_Gallon": 32.4, + "Cylinders": 4, + "Displacement": 107, + "Horsepower": 72, + "Weight_in_lbs": 2290, + "Acceleration": 17, + "Year": "1981-01-01", + "Origin": "Japan" + }, + { + "Maker": "plymouth", + "Name": "plymouth reliant", + "Miles_per_Gallon": 27.2, + "Cylinders": 4, + "Displacement": 135, + "Horsepower": 84, + "Weight_in_lbs": 2490, + "Acceleration": 15.7, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "buick", + "Name": "buick skylark", + "Miles_per_Gallon": 26.6, + "Cylinders": 4, + "Displacement": 151, + "Horsepower": 84, + "Weight_in_lbs": 2635, + "Acceleration": 16.4, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge aries wagon (sw)", + "Miles_per_Gallon": 25.8, + "Cylinders": 4, + "Displacement": 156, + "Horsepower": 92, + "Weight_in_lbs": 2620, + "Acceleration": 14.4, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet citation", + "Miles_per_Gallon": 23.5, + "Cylinders": 6, + "Displacement": 173, + "Horsepower": 110, + "Weight_in_lbs": 2725, + "Acceleration": 12.6, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth reliant", + "Miles_per_Gallon": 30, + "Cylinders": 4, + "Displacement": 135, + "Horsepower": 84, + "Weight_in_lbs": 2385, + "Acceleration": 12.9, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "toyota", + "Name": "toyota starlet", + "Miles_per_Gallon": 39.1, + "Cylinders": 4, + "Displacement": 79, + "Horsepower": 58, + "Weight_in_lbs": 1755, + "Acceleration": 16.9, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "plymouth", + "Name": "plymouth champ", + "Miles_per_Gallon": 39, + "Cylinders": 4, + "Displacement": 86, + "Horsepower": 64, + "Weight_in_lbs": 1875, + "Acceleration": 16.4, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "honda", + "Name": "honda civic 1300", + "Miles_per_Gallon": 35.1, + "Cylinders": 4, + "Displacement": 81, + "Horsepower": 60, + "Weight_in_lbs": 1760, + "Acceleration": 16.1, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Name": "subaru", + "Maker": "subaru", + "Miles_per_Gallon": 32.3, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 67, + "Weight_in_lbs": 2065, + "Acceleration": 17.8, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "datsun", + "Name": "datsun 210", + "Miles_per_Gallon": 37, + "Cylinders": 4, + "Displacement": 85, + "Horsepower": 65, + "Weight_in_lbs": 1975, + "Acceleration": 19.4, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "toyota", + "Name": "toyota tercel", + "Miles_per_Gallon": 37.7, + "Cylinders": 4, + "Displacement": 89, + "Horsepower": 62, + "Weight_in_lbs": 2050, + "Acceleration": 17.3, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "mazda", + "Name": "mazda glc 4", + "Miles_per_Gallon": 34.1, + "Cylinders": 4, + "Displacement": 91, + "Horsepower": 68, + "Weight_in_lbs": 1985, + "Acceleration": 16, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "plymouth", + "Name": "plymouth horizon 4", + "Miles_per_Gallon": 34.7, + "Cylinders": 4, + "Displacement": 105, + "Horsepower": 63, + "Weight_in_lbs": 2215, + "Acceleration": 14.9, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford escort 4w", + "Miles_per_Gallon": 34.4, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 65, + "Weight_in_lbs": 2045, + "Acceleration": 16.2, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford escort 2h", + "Miles_per_Gallon": 29.9, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 65, + "Weight_in_lbs": 2380, + "Acceleration": 20.7, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen jetta", + "Miles_per_Gallon": 33, + "Cylinders": 4, + "Displacement": 105, + "Horsepower": 74, + "Weight_in_lbs": 2190, + "Acceleration": 14.2, + "Year": "1982-01-01", + "Origin": "European Union" + }, + { + "Maker": "renault", + "Name": "renault 18i", + "Miles_per_Gallon": 34.5, + "Cylinders": 4, + "Displacement": 100, + "Horsepower": null, + "Weight_in_lbs": 2320, + "Acceleration": 15.8, + "Year": "1982-01-01", + "Origin": "European Union" + }, + { + "Maker": "honda", + "Name": "honda prelude", + "Miles_per_Gallon": 33.7, + "Cylinders": 4, + "Displacement": 107, + "Horsepower": 75, + "Weight_in_lbs": 2210, + "Acceleration": 14.4, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "toyota", + "Name": "toyota corolla", + "Miles_per_Gallon": 32.4, + "Cylinders": 4, + "Displacement": 108, + "Horsepower": 75, + "Weight_in_lbs": 2350, + "Acceleration": 16.8, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "datsun", + "Name": "datsun 200sx", + "Miles_per_Gallon": 32.9, + "Cylinders": 4, + "Displacement": 119, + "Horsepower": 100, + "Weight_in_lbs": 2615, + "Acceleration": 14.8, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "mazda", + "Name": "mazda 626", + "Miles_per_Gallon": 31.6, + "Cylinders": 4, + "Displacement": 120, + "Horsepower": 74, + "Weight_in_lbs": 2635, + "Acceleration": 18.3, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "peugeot", + "Name": "peugeot 505s turbo diesel", + "Miles_per_Gallon": 28.1, + "Cylinders": 4, + "Displacement": 141, + "Horsepower": 80, + "Weight_in_lbs": 3230, + "Acceleration": 20.4, + "Year": "1982-01-01", + "Origin": "European Union" + }, + { + "Maker": "saab", + "Name": "saab 900s", + "Miles_per_Gallon": null, + "Cylinders": 4, + "Displacement": 121, + "Horsepower": 110, + "Weight_in_lbs": 2800, + "Acceleration": 15.4, + "Year": "1982-01-01", + "Origin": "European Union" + }, + { + "Maker": "volvo", + "Name": "volvo diesel", + "Miles_per_Gallon": 30.7, + "Cylinders": 6, + "Displacement": 145, + "Horsepower": 76, + "Weight_in_lbs": 3160, + "Acceleration": 19.6, + "Year": "1982-01-01", + "Origin": "European Union" + }, + { + "Maker": "toyota", + "Name": "toyota cressida", + "Miles_per_Gallon": 25.4, + "Cylinders": 6, + "Displacement": 168, + "Horsepower": 116, + "Weight_in_lbs": 2900, + "Acceleration": 12.6, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "datsun", + "Name": "datsun 810 maxima", + "Miles_per_Gallon": 24.2, + "Cylinders": 6, + "Displacement": 146, + "Horsepower": 120, + "Weight_in_lbs": 2930, + "Acceleration": 13.8, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "buick", + "Name": "buick century", + "Miles_per_Gallon": 22.4, + "Cylinders": 6, + "Displacement": 231, + "Horsepower": 110, + "Weight_in_lbs": 3415, + "Acceleration": 15.8, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "oldsmobile", + "Name": "oldsmobile cutlass ls", + "Miles_per_Gallon": 26.6, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 105, + "Weight_in_lbs": 3725, + "Acceleration": 19, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford granada gl", + "Miles_per_Gallon": 20.2, + "Cylinders": 6, + "Displacement": 200, + "Horsepower": 88, + "Weight_in_lbs": 3060, + "Acceleration": 17.1, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "chrysler", + "Name": "chrysler lebaron salon", + "Miles_per_Gallon": 17.6, + "Cylinders": 6, + "Displacement": 225, + "Horsepower": 85, + "Weight_in_lbs": 3465, + "Acceleration": 16.6, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet cavalier", + "Miles_per_Gallon": 28, + "Cylinders": 4, + "Displacement": 112, + "Horsepower": 88, + "Weight_in_lbs": 2605, + "Acceleration": 19.6, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet cavalier wagon", + "Miles_per_Gallon": 27, + "Cylinders": 4, + "Displacement": 112, + "Horsepower": 88, + "Weight_in_lbs": 2640, + "Acceleration": 18.6, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet cavalier 2-door", + "Miles_per_Gallon": 34, + "Cylinders": 4, + "Displacement": 112, + "Horsepower": 88, + "Weight_in_lbs": 2395, + "Acceleration": 18, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "pontiac", + "Name": "pontiac j2000 se hatchback", + "Miles_per_Gallon": 31, + "Cylinders": 4, + "Displacement": 112, + "Horsepower": 85, + "Weight_in_lbs": 2575, + "Acceleration": 16.2, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge aries se", + "Miles_per_Gallon": 29, + "Cylinders": 4, + "Displacement": 135, + "Horsepower": 84, + "Weight_in_lbs": 2525, + "Acceleration": 16, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "pontiac", + "Name": "pontiac phoenix", + "Miles_per_Gallon": 27, + "Cylinders": 4, + "Displacement": 151, + "Horsepower": 90, + "Weight_in_lbs": 2735, + "Acceleration": 18, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford fairmont futura", + "Miles_per_Gallon": 24, + "Cylinders": 4, + "Displacement": 140, + "Horsepower": 92, + "Weight_in_lbs": 2865, + "Acceleration": 16.4, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc concord dl", + "Miles_per_Gallon": 23, + "Cylinders": 4, + "Displacement": 151, + "Horsepower": null, + "Weight_in_lbs": 3035, + "Acceleration": 20.5, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen rabbit l", + "Miles_per_Gallon": 36, + "Cylinders": 4, + "Displacement": 105, + "Horsepower": 74, + "Weight_in_lbs": 1980, + "Acceleration": 15.3, + "Year": "1982-01-01", + "Origin": "European Union" + }, + { + "Maker": "mazda", + "Name": "mazda glc custom l", + "Miles_per_Gallon": 37, + "Cylinders": 4, + "Displacement": 91, + "Horsepower": 68, + "Weight_in_lbs": 2025, + "Acceleration": 18.2, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "mazda", + "Name": "mazda glc custom", + "Miles_per_Gallon": 31, + "Cylinders": 4, + "Displacement": 91, + "Horsepower": 68, + "Weight_in_lbs": 1970, + "Acceleration": 17.6, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "plymouth", + "Name": "plymouth horizon miser", + "Miles_per_Gallon": 38, + "Cylinders": 4, + "Displacement": 105, + "Horsepower": 63, + "Weight_in_lbs": 2125, + "Acceleration": 14.7, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "mercury", + "Name": "mercury lynx l", + "Miles_per_Gallon": 36, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 70, + "Weight_in_lbs": 2125, + "Acceleration": 17.3, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "nissan", + "Name": "nissan stanza xe", + "Miles_per_Gallon": 36, + "Cylinders": 4, + "Displacement": 120, + "Horsepower": 88, + "Weight_in_lbs": 2160, + "Acceleration": 14.5, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "honda", + "Name": "honda Accelerationord", + "Miles_per_Gallon": 36, + "Cylinders": 4, + "Displacement": 107, + "Horsepower": 75, + "Weight_in_lbs": 2205, + "Acceleration": 14.5, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "toyota", + "Name": "toyota corolla", + "Miles_per_Gallon": 34, + "Cylinders": 4, + "Displacement": 108, + "Horsepower": 70, + "Weight_in_lbs": 2245, + "Acceleration": 16.9, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "honda", + "Name": "honda civic", + "Miles_per_Gallon": 38, + "Cylinders": 4, + "Displacement": 91, + "Horsepower": 67, + "Weight_in_lbs": 1965, + "Acceleration": 15, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "honda", + "Name": "honda civic (auto)", + "Miles_per_Gallon": 32, + "Cylinders": 4, + "Displacement": 91, + "Horsepower": 67, + "Weight_in_lbs": 1965, + "Acceleration": 15.7, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "datsun", + "Name": "datsun 310 gx", + "Miles_per_Gallon": 38, + "Cylinders": 4, + "Displacement": 91, + "Horsepower": 67, + "Weight_in_lbs": 1995, + "Acceleration": 16.2, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "buick", + "Name": "buick century limited", + "Miles_per_Gallon": 25, + "Cylinders": 6, + "Displacement": 181, + "Horsepower": 110, + "Weight_in_lbs": 2945, + "Acceleration": 16.4, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "oldsmobile", + "Name": "oldsmobile cutlass ciera (diesel)", + "Miles_per_Gallon": 38, + "Cylinders": 6, + "Displacement": 262, + "Horsepower": 85, + "Weight_in_lbs": 3015, + "Acceleration": 17, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "chrysler", + "Name": "chrysler lebaron medallion", + "Miles_per_Gallon": 26, + "Cylinders": 4, + "Displacement": 156, + "Horsepower": 92, + "Weight_in_lbs": 2585, + "Acceleration": 14.5, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford granada l", + "Miles_per_Gallon": 22, + "Cylinders": 6, + "Displacement": 232, + "Horsepower": 112, + "Weight_in_lbs": 2835, + "Acceleration": 14.7, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "toyota", + "Name": "toyota celica gt", + "Miles_per_Gallon": 32, + "Cylinders": 4, + "Displacement": 144, + "Horsepower": 96, + "Weight_in_lbs": 2665, + "Acceleration": 13.9, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "dodge", + "Name": "dodge charger 2.2", + "Miles_per_Gallon": 36, + "Cylinders": 4, + "Displacement": 135, + "Horsepower": 84, + "Weight_in_lbs": 2370, + "Acceleration": 13, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet camaro", + "Miles_per_Gallon": 27, + "Cylinders": 4, + "Displacement": 151, + "Horsepower": 90, + "Weight_in_lbs": 2950, + "Acceleration": -17.3, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford mustang gl", + "Miles_per_Gallon": 27, + "Cylinders": 4, + "Displacement": 140, + "Horsepower": 86, + "Weight_in_lbs": 2790, + "Acceleration": 15.6, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "vw", + "Name": "vw pickup", + "Miles_per_Gallon": 44, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 52, + "Weight_in_lbs": 2130, + "Acceleration": 24.6, + "Year": "1982-01-01", + "Origin": "European Union" + }, + { + "Maker": "dodge", + "Name": "dodge rampage", + "Miles_per_Gallon": 32, + "Cylinders": 4, + "Displacement": 135, + "Horsepower": 84, + "Weight_in_lbs": 2295, + "Acceleration": 11.6, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford ranger", + "Miles_per_Gallon": 28, + "Cylinders": 4, + "Displacement": 120, + "Horsepower": 79, + "Weight_in_lbs": 2625, + "Acceleration": -18.6, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "chevy", + "Name": "chevy s-10", + "Miles_per_Gallon": 31, + "Cylinders": 4, + "Displacement": 119, + "Horsepower": 82, + "Weight_in_lbs": 2720, + "Acceleration": -19.4, + "Year": "1982-01-01", + "Origin": "USA" + } +] \ No newline at end of file diff --git a/example/index.html b/example/index.html index 36d7eb7..912a004 100644 --- a/example/index.html +++ b/example/index.html @@ -12,8 +12,8 @@ - - + + diff --git a/example/js/datamodel.js b/example/js/datamodel.js index 29f4631..1ad9c86 100644 --- a/example/js/datamodel.js +++ b/example/js/datamodel.js @@ -1,2 +1,2 @@ -!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define("DataModel",[],t):"object"==typeof exports?exports.DataModel=t():e.DataModel=t()}(window,function(){return function(e){var t={};function n(r){if(t[r])return t[r].exports;var a=t[r]={i:r,l:!1,exports:{}};return e[r].call(a.exports,a,a.exports,n),a.l=!0,a.exports}return n.m=e,n.c=t,n.d=function(e,t,r){n.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:r})},n.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},n.t=function(e,t){if(1&t&&(e=n(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var r=Object.create(null);if(n.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var a in e)n.d(r,a,function(t){return e[t]}.bind(null,a));return r},n.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return n.d(t,"a",t),t},n.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},n.p="",n(n.s=1)}([function(e){e.exports={name:"datamodel",description:"Relational algebra compliant in-memory tabular data store",homepage:"https://github.com/chartshq/datamodel",version:"2.0.2",license:"MIT",main:"dist/datamodel.js",author:"Charts.com ",keywords:["datamodel","data","relational","algebra","model","muze","fusioncharts","table","tabular","operation"],repository:{type:"git",url:"https://github.com/chartshq/datamodel.git"},contributors:[{name:"Akash Goswami",email:"akash@charts.com"},{name:"Subhash Haldar",email:"subhash@charts.com"},{name:"Rousan Ali",email:"rousan@charts.com",url:"https://rousan.io"},{name:"Ujjal Kumar Dutta",email:"ujjal@charts.com"}],dependencies:{"d3-dsv":"^1.0.8"},devDependencies:{"babel-cli":"6.26.0","babel-core":"^6.26.3","babel-eslint":"6.1.2","babel-loader":"^7.1.4","babel-plugin-transform-runtime":"^6.23.0","babel-preset-env":"^1.7.0","babel-preset-es2015":"^6.24.1","babel-preset-flow":"^6.23.0",chai:"3.5.0","cross-env":"^5.0.5",eslint:"3.19.0","eslint-config-airbnb":"15.1.0","eslint-plugin-import":"2.7.0","eslint-plugin-jsx-a11y":"5.1.1","eslint-plugin-react":"7.3.0","istanbul-instrumenter-loader":"^3.0.0",jsdoc:"3.5.5",json2yaml:"^1.1.0",karma:"1.7.1","karma-chai":"0.1.0","karma-chrome-launcher":"2.1.1","karma-coverage-istanbul-reporter":"^1.3.0","karma-mocha":"1.3.0","karma-spec-reporter":"0.0.31","karma-webpack":"2.0.3",marked:"^0.5.0",mocha:"3.4.2","mocha-webpack":"0.7.0","transform-runtime":"0.0.0",webpack:"^4.12.0","webpack-cli":"^3.0.7","webpack-dev-server":"^3.1.4"},scripts:{test:"npm run lint && npm run ut",ut:"karma start karma.conf.js",utd:"karma start --single-run false --browsers Chrome karma.conf.js ",build:"webpack --mode production",start:"webpack-dev-server --config webpack.config.dev.js --mode development --open",lint:"eslint ./src","lint-errors":"eslint --quiet ./src",docs:"rm -rf yaml && mkdir yaml && jsdoc -c jsdoc.conf.json"}}},function(e,t,n){var r=n(2);e.exports=r.default?r.default:r},function(e,t,n){"use strict";n.r(t);var r={};n.r(r),n.d(r,"DataFormat",function(){return o}),n.d(r,"DimensionSubtype",function(){return u}),n.d(r,"MeasureSubtype",function(){return c}),n.d(r,"FieldType",function(){return f}),n.d(r,"FilteringMode",function(){return l});var a={};n.r(a),n.d(a,"DSVArr",function(){return Le}),n.d(a,"DSVStr",function(){return ze}),n.d(a,"FlatJSON",function(){return Xe}),n.d(a,"Auto",function(){return $e});var i={};n.r(i),n.d(i,"sum",function(){return yt}),n.d(i,"avg",function(){return gt}),n.d(i,"min",function(){return bt}),n.d(i,"max",function(){return wt}),n.d(i,"first",function(){return Ot}),n.d(i,"last",function(){return _t}),n.d(i,"count",function(){return Et}),n.d(i,"sd",function(){return At});var o={FLAT_JSON:"FlatJSON",DSV_STR:"DSVStr",DSV_ARR:"DSVArr",AUTO:"Auto"},u={CATEGORICAL:"categorical",TEMPORAL:"temporal",GEO:"geo",BINNED:"binned"},c={CONTINUOUS:"continuous"},f={MEASURE:"measure",DIMENSION:"dimension"},l={NORMAL:"normal",INVERSE:"inverse",ALL:"all"};function s(e){return e instanceof Date?e:new Date(e)}function p(e){return e<10?"0"+e:e}function d(e){this.format=e,this.dtParams=void 0,this.nativeDate=void 0}RegExp.escape=function(e){return e.replace(/[-[\]{}()*+?.,\\^$|#\s]/g,"\\$&")},d.TOKEN_PREFIX="%",d.DATETIME_PARAM_SEQUENCE={YEAR:0,MONTH:1,DAY:2,HOUR:3,MINUTE:4,SECOND:5,MILLISECOND:6},d.defaultNumberParser=function(e){return function(t){var n;return isFinite(n=parseInt(t,10))?n:e}},d.defaultRangeParser=function(e,t){return function(n){var r,a=void 0;if(!n)return t;var i=n.toLowerCase();for(a=0,r=e.length;a=0;)o=e[i+1],-1!==r.indexOf(o)&&a.push({index:i,token:o});return a},d.formatAs=function(e,t){var n,r=s(e),a=d.findTokens(t),i=d.getTokenDefinitions(),o=String(t),u=d.TOKEN_PREFIX,c=void 0,f=void 0,l=void 0;for(l=0,n=a.length;l=0;p--)(f=i[p].index)+1!==s.length-1?(void 0===u&&(u=s.length),l=s.substring(f+2,u),s=s.substring(0,f+2)+RegExp.escape(l)+s.substring(u,s.length),u=f):u=f;for(p=0;p0&&e.split(",").forEach(function(e){var n=e.split("-"),r=+n[0],a=+(n[1]||n[0]);if(a>=r)for(var i=r;i<=a;i+=1)t(i)})}var T=function(){return function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,a=!1,i=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){a=!0,i=e}finally{try{!r&&u.return&&u.return()}finally{if(a)throw i}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")}}();function P(e,t,n){var r=n.buckets,a=n.binCount,i=n.binSize,o=n.start,u=[],c=[],f=e.domain(),l=T(f,2),s=l[0],p=l[1],d=p,h=[],v=void 0,m=void 0,y=void 0,g=void 0;if(D(t,function(t){u.push({data:e.partialField.data[t],index:t})}),!r){var b=((p+=1)-s)%(i=i||(p-s)/a);for(a||0===b||(p=p+i-b),v=s+i;v<=p;)h.push(v),v+=i;r={start:o=o||s,stops:h}}m=0===r.start?0:r.start||s,r.stops.forEach(function(e){u.filter(function(t){return t.data>=m&&t.data=r.stops[r.stops.length-1]}).forEach(function(e){c[e.index]=r.stops[r.stops.length-1]+"-"+d}),r.stops.unshift(r.start),g=new Set(r.stops),sr.stops[r.stops.length-1]&&g.add(d),g=[].concat(function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t3&&void 0!==arguments[3]&&arguments[3],a=arguments.length>4&&void 0!==arguments[4]?arguments[4]:H.CROSS,i=[],o=[],u=n||Y,c=e.getFieldspace(),f=t.getFieldspace(),l=c.name,s=f.name,p=c.name+"."+f.name,d=x(c,f);if(l===s)throw new Error("DataModels must have different alias names");return c.fields.forEach(function(e){var t=O({},e.schema());-1===d.indexOf(t.name)||r||(t.name=c.name+"."+t.name),i.push(t)}),f.fields.forEach(function(e){var t=O({},e.schema());-1!==d.indexOf(t.name)?r||(t.name=f.name+"."+t.name,i.push(t)):i.push(t)}),D(e._rowDiffset,function(e){var n=!1,p=void 0;D(t._rowDiffset,function(t){var h=[],v={};v[l]={},v[s]={},c.fields.forEach(function(t){h.push(t.partialField.data[e]),v[l][t.name()]=t.partialField.data[e]}),f.fields.forEach(function(e){-1!==d.indexOf(e.schema().name)&&r||h.push(e.partialField.data[t]),v[s][e.name()]=e.partialField.data[t]});var m=tt(v[l]),y=tt(v[s]);if(u(m,y)){var g={};h.forEach(function(e,t){g[i[t].name]=e}),n&&H.CROSS!==a?o[p]=g:(o.push(g),n=!0,p=e)}else if((a===H.LEFTOUTER||a===H.RIGHTOUTER)&&!n){var b={},w=c.fields.length-1;h.forEach(function(e,t){b[i[t].name]=t<=w?e:null}),n=!0,p=e,o.push(b)}})}),new mt(o,i,{name:p})}function J(e,t){var n=""+e,r=""+t;return nr?1:0}function G(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:J;return e.length>1&&function e(t,n,r,a){if(r===n)return t;var i=n+Math.floor((r-n)/2);return e(t,n,i,a),e(t,i+1,r,a),function(e,t,n,r,a){for(var i=e,o=[],u=t;u<=r;u+=1)o[u]=i[u];for(var c=t,f=n+1,l=t;l<=r;l+=1)c>n?(i[l]=o[f],f+=1):f>r?(i[l]=o[c],c+=1):a(o[c],o[f])<=0?(i[l]=o[c],c+=1):(i[l]=o[f],f+=1)}(t,n,i,r,a),t}(e,0,e.length-1,t),e}function K(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);ti?"desc"===t?-1:1:0}}return r}function q(e,t){var n=new Map,r=[];return e.forEach(function(e){var a=e[t];n.has(a)?r[n.get(a)][1].push(e):(r.push([a,[e]]),n.set(a,r.length-1))}),r}function z(e,t,n){var r={label:e[0]};return t.reduce(function(t,r,a){return t[r]=e[1].map(function(e){return e[n[a].index]}),t},r),r}function X(e,t,n,r,a){var i={schema:[],data:[],uids:[]},o=(a=Object.assign({},{addUid:!1,columnWise:!1},a)).addUid,u=r&&r.length>0,c=[];if(n.split(",").forEach(function(t){for(var n=0;n=0;u--)a=t[u][0],i=t[u][1],(o=ct(r,a))&&(E(i)?G(n,function(e,t){return i(e[o.index],t[o.index])}):_(i)?function(){var e=q(n,o.index),t=i[i.length-1],a=i.slice(0,i.length-1),u=a.map(function(e){return ct(r,e)});e.forEach(function(e){e.push(z(e,a,u))}),G(e,function(e,n){var r=e[2],a=n[2];return t(r,a)}),n.length=0,e.forEach(function(e){n.push.apply(n,K(e[1]))})}():(i="desc"===String(i).toLowerCase()?"desc":"asc",G(n,W(o.type,i,o.index))));e.uids=[],n.forEach(function(t){e.uids.push(t.pop())})}(i,r),a.columnWise){var f=Array.apply(void 0,K(Array(i.schema.length))).map(function(){return[]});i.data.forEach(function(e){e.forEach(function(e,t){f[t].push(e)})}),i.data=f}return i}function $(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!j(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t,r){D(e._rowDiffset,function(e){var o={},u="";a.forEach(function(n){var r=t[n].partialField.data[e];u+="-"+r,o[n]=r}),n[u]||(r&&i.push(o),n[u]=!0)})}return e._colIdentifier.split(",").forEach(function(e){var t=c[e];r.push(O({},t.schema())),a.push(t.schema().name)}),s(t,f,!1),s(e,c,!0),new mt(i,r,{name:l})}function Q(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n={},r=t,a=e.getPartialFieldspace().getMeasure(),i=ae.defaultReducer();return"function"==typeof t&&(i=t),Object.entries(a).forEach(function(e){var o=ie(e,1)[0];"string"==typeof t[o]&&(r[o]=ae.resolve(r[o])?ae.resolve(r[o]):i),"function"!=typeof t[o]&&(r[o]=void 0),n[o]=r[o]||ae.resolve(a[o].defAggFn())||i}),n}(e,n),o=e.getPartialFieldspace(),u=o.fieldsObj(),c=o.name,l=[],s=[],p=[],d={},h=[],v=void 0;Object.entries(u).forEach(function(e){var t=ie(e,2),n=t[0],r=t[1];(-1!==a.indexOf(n)||i[n])&&(p.push(O({},r.schema())),r.schema().type===f.MEASURE?s.push(n):r.schema().type===f.DIMENSION&&l.push(n))});var m=0;return D(e._rowDiffset,function(e){var t="";l.forEach(function(n){t=t+"-"+u[n].partialField.data[e]}),void 0===d[t]?(d[t]=m,h.push({}),l.forEach(function(t){h[m][t]=u[t].partialField.data[e]}),s.forEach(function(t){h[m][t]=[u[t].partialField.data[e]]}),m+=1):s.forEach(function(n){h[d[t]][n].push(u[n].partialField.data[e])})}),h.forEach(function(e){var t=e;s.forEach(function(n){t[n]=i[n](e[n])})}),r?(r.__calculateFieldspace(),v=r):v=new St(h,p,{name:c}),v}function ue(e,t){var n=x(e.getFieldspace(),t.getFieldspace());return function(e,t){var r=!0;return n.forEach(function(n){r=!(e[n].value!==t[n].value||!r)}),r}}function ce(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!j(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t){D(e._rowDiffset,function(e){var r={},o="";a.forEach(function(n){var a=t[n].partialField.data[e];o+="-"+a,r[n]=a}),n[o]||(i.push(r),n[o]=!0)})}return e._colIdentifier.split(",").forEach(function(e){var t=c[e];r.push(O({},t.schema())),a.push(t.schema().name)}),s(e,c),s(t,f),new St(i,r,{name:l})}function fe(e,t,n){return B(e,t,n,!1,H.LEFTOUTER)}function le(e,t,n){return B(t,e,n,!1,H.RIGHTOUTER)}var se=function(){function e(e,t){for(var n=0;nn&&(n=a)}),[t,n]}}]),t}(),je=function(){function e(e,t){for(var n=0;n=i?c=!0:(r=e.charCodeAt(o++))===Be?f=!0:r===Je&&(f=!0,e.charCodeAt(o)===Be&&++o),e.slice(a+1,t-1).replace(/""/g,'"')}for(;o2&&void 0!==arguments[2]?arguments[2]:{},a=arguments[3],i=void 0;t!==I?(i={op:t,meta:r,criteria:a},e._derivation.push(i)):(i=[].concat(Ze(a)),e._derivation.length=0,(n=e._derivation).push.apply(n,Ze(i)))},at=function(e,t){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},r=n.operation||V,a=n.filterByMeasure||!1,i=[];i=t.length?t.map(function(e){return function(e){var t=e.getData(),n=t.schema,r=e.getFieldsConfig(),i=e.getFieldspace().fieldsObj(),o=t.data,u=Object.values(r).reduce(function(e,t){return e[t.def.name]=i[t.def.name].domain(),e},{});return function(e){return!!o.length&&o.some(function(t){return n.every(function(n){if(!(n.name in e))return!0;var i=e[n.name].valueOf();if(a&&n.type===f.MEASURE)return i>=u[n.name][0]&&i<=u[n.name][1];if(n.type!==f.DIMENSION)return!0;var o=r[n.name].index;return t[o]===e[n.name].valueOf()})})}}(e)}):[function(){return!1}];var o=void 0;r===V?o=e.clone(!1,!1).select(function(e){return i.every(function(t){return t(e)})},{saveChild:!1,mode:l.ALL}):o=e.clone(!1,!1).select(function(e){return i.some(function(t){return t(e)})},{mode:l.ALL,saveChild:!1});return o},it=function(e,t,n,r){var a=e.clone(r.saveChild),i=function(e,t,n,r){var a=[],i=-1,o=void 0,u=function(e){return n(et(t,e),e)};return r.mode===l.INVERSE&&(u=function(e){return!n(et(t,e))}),D(e,function(e){u(e)&&(-1!==i&&e===i+1?(o=a.length-1,a[o]=a[o].split("-")[0]+"-"+e):a.push(""+e),i=e)}),a.join(",")}(a._rowDiffset,a.getPartialFieldspace().fields,t,n);return a._rowDiffset=i,a.__calculateFieldspace().calculateFieldsConfig(),r.saveChild&&rt(a,R,{config:n},t),a},ot=function(e,t,n,r){var a=e.clone(n.saveChild),i=t;return n.mode===l.INVERSE&&(i=r.filter(function(e){return-1===t.indexOf(e)})),a._colIdentifier=i.join(","),a.__calculateFieldspace().calculateFieldsConfig(),n.saveChild&&rt(a,C,{projField:t,config:n,actualProjField:i},null),a},ut=function(e,t,n,r){r=Object.assign(Object.assign({},Ue),r);var i=a[r.dataFormat];if(!i||"function"!=typeof i)throw new Error("No converter function found for "+r.dataFormat+" format");var o=i(t,r),u=Qe(o,2),c=u[0],f=u[1],l=Ie(f,n,c),s=k.createNamespace(l,r.name);return e._partialFieldspace=s,e._rowDiffset=f.length&&f[0].length?"0-"+(f[0].length-1):"",e._colIdentifier=n.map(function(e){return e.name}).join(),e},ct=function(e,t){for(var n=0;n2&&void 0!==arguments[2]?arguments[2]:{},a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=a.nonTraversingModel,o=a.excludeModels||[];t!==i&&((!o.length||-1===o.indexOf(t))&&t.handlePropagation(n,r),t._children.forEach(function(t){var i=ft(n,t),o=Qe(i,2),u=o[0],c=o[1];e(t,[u,c],r,a)}))},st=function(e,t,n,r){var a=void 0,i=void 0,o=n.propagationNameSpace,u=n.propagateToSource,c=n.sourceId,f=r.propagateInterpolatedValues,l=[];if(null===e&&!0!==r.persistent)l=[{criteria:[]}];else{var s,p=Object.values(o.mutableActions);!1!==u&&(p=p.filter(function(e){return e.config.sourceId!==c}));var d=p.filter(function(e){return(r.filterFn||function(){return!0})(e,r)}).map(function(e){return e.config.criteria}),h=[];if(!1!==u){var v=Object.values(o.mutableActions);v.forEach(function(e){var t=e.config;!1===t.applyOnSource&&t.action===r.action&&t.sourceId!==c&&(h.push(e.model),(a=v.filter(function(t){return t!==e}).map(function(e){return e.config.criteria})).length&&l.push({criteria:a,models:e.model,path:function e(t){var n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[];return null!==t._parent&&(n.push(t),e(t._parent,n)),n}(e.model)}))})}a=(s=[]).concat.apply(s,[].concat(Ze(d),[e])).filter(function(e){return null!==e}),l.push({criteria:a,excludeModels:[].concat(h,Ze(r.excludeModels||[]))})}var m=t.model,y=Object.assign({sourceIdentifiers:e,propagationSourceId:c},r),g=t.groupByModel;f&&g&&(i=at(g,a,{filterByMeasure:f}),lt(g,i,y)),l.forEach(function(e){var t=at(m,e.criteria),n=e.path;if(n){var r=function(e,t){for(var n=0,r=t.length;n0&&void 0!==arguments[0])||arguments[0],t=void 0;if(!1===(!(arguments.length>1&&void 0!==arguments[1])||arguments[1])){var n=this.getData({getAllFields:!0}),r=n.data,a=n.schema,i=r.map(function(e){var t={};return a.forEach(function(n,r){t[n.name]=e[r]}),t});t=new this.constructor(i,a)}else t=new this.constructor(this);return e&&this._children.push(t),t}},{key:"project",value:function(e,t){var n={mode:l.NORMAL,saveChild:!0};t=Object.assign({},n,t);var r=this.getFieldsConfig(),a=Object.keys(r),i=t.mode,o=e.reduce(function(e,t){return"RegExp"===t.constructor.name?e.push.apply(e,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:[];rt(this,I,null,t),this._parent=e,e._children.push(this)}}]),e}(),ht=function(){return function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,a=!1,i=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){a=!0,i=e}finally{try{!r&&u.return&&u.return()}finally{if(a)throw i}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")}}(),vt=function(){function e(e,t){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:{},n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{saveChild:!0},r=""+e.join(),a=[this,e,t],i=oe.apply(void 0,a);return n.saveChild&&(this._children.push(i),rt(i,M,{fieldsArr:e,groupByString:r,defaultReducer:ae.defaultReducer()},t)),i._parent=this,i}},{key:"sort",value:function(e){var t=this.getData({order:"row",sort:e}),n=[t.schema.map(function(e){return e.name})].concat(t.data),r=new this.constructor(n,t.schema,{dataFormat:"DSVArr"});return r._sortingDetails=e,r}},{key:"addField",value:function(e){var t=e.name();this._colIdentifier+=","+t;var n=this._partialFieldspace;if(n.fieldsObj()[e.name()]){var r=n.fields.findIndex(function(e){return e.name()===t});r>=0&&(n.fields[r]=e)}else n.fields.push(e);return this.__calculateFieldspace().calculateFieldsConfig(),this}},{key:"calculateVariable",value:function(e,t){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{saveChild:!0,replaceVar:!1},r=this.getFieldsConfig(),a=t.slice(0,t.length-1),i=t[t.length-1];if(r[e.name]&&!n.replaceVar)throw new Error(e.name+" field already exists in model.");var o=a.map(function(e){var t=r[e];if(!t)throw new Error(e+" is not a valid column name.");return t.index}),u=this.clone(),c=u.getFieldspace().fields,f=o.map(function(e){return c[e]}),l=[];D(u._rowDiffset,function(e){var t=f.map(function(t){return t.partialField.data[e]});l[e]=i.apply(void 0,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},a=t.isMutableAction,i=t.sourceId,o=t.payload,u=function e(t){return t._parent?e(t._parent):t}(this),c=u._propagationNameSpace,f={groupByModel:function e(t){return t._parent&&t._derivation.find(function(e){return"group"!==e.op})?e(t._parent):t}(this),model:u};return n&&function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=void 0,a=t.isMutableAction,i=t.criteria,o=t.action+"-"+t.sourceId;r=a?e.mutableActions:e.immutableActions,null===i?delete r[o]:r[o]={model:n,config:t}}(c,t,this),st(e,f,{propagationNameSpace:c,sourceId:i},Object.assign({payload:o},t)),a&&function(e,t,n){var r=e.immutableActions;for(var a in r){var i=r[a].config,o=n.config.sourceId,u=!n.propConfig.filterImmutableAction||n.propConfig.filterImmutableAction(i,n.config);if(i.sourceId!==o&&u){var c=i.criteria;st(c,t,{propagationNameSpace:e,propagateToSource:!1,sourceId:o},i)}}}(c,f,{config:t,propConfig:r}),this}},{key:"on",value:function(e,t){switch(e){case"propagation":this._onPropagation.push(t)}return this}},{key:"unsubscribe",value:function(e){switch(e){case"propagation":this._onPropagation=[]}return this}},{key:"handlePropagation",value:function(e,t){var n=this;this._onPropagation.forEach(function(r){return r.call(n,e,t)})}},{key:"bin",value:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=this.clone(),r=t.name||e+"_binned";if(this.getFieldsConfig()[r]||!this.getFieldsConfig()[e])throw new Error("Field "+e+" already exists.");var a=P(this._partialFieldspace.fields.find(function(t){return t.name()===e}),this._rowDiffset,t),i=Ie([a.data],[{name:r,type:f.DIMENSION,subtype:u.BINNED,bins:{range:a.range,mid:a.mid}}],[r])[0];return n.addField(i),rt(n,L,{dimensionName:e,config:t,binFieldName:r},null),n}}],[{key:"Reducers",get:function(){return ae}}]),t}(),yt=te.sum,gt=te.avg,bt=te.min,wt=te.max,Ot=te.first,_t=te.last,Et=te.count,At=te.std,jt=n(0);mt.Operators={compose:function(){for(var e=arguments.length,t=Array(e),n=0;n1&&void 0!==arguments[1]?arguments[1]:{saveChild:!0}).saveChild;return t.forEach(function(e){n=e(n),a.push.apply(a,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&r.dispose(),n}},bin:function(){for(var e=arguments.length,t=Array(e),n=0;na.getFullYear()&&(t=""+(i-1)+r),s(t).getFullYear()},formatter:function(e){var t=s(e).getFullYear().toString(),n=void 0;return t&&(n=t.length,t=t.substring(n-2,n)),t}},Y:{name:"Y",index:0,extract:function(){return"(\\d{4})"},parser:p.defaultNumberParser(),formatter:function(e){return s(e).getFullYear().toString()}}}},p.getTokenFormalNames=function(){var e=p.getTokenDefinitions();return{HOUR:e.H,HOUR_12:e.l,AMPM_UPPERCASE:e.p,AMPM_LOWERCASE:e.P,MINUTE:e.M,SECOND:e.S,SHORT_DAY:e.a,LONG_DAY:e.A,DAY_OF_MONTH:e.e,DAY_OF_MONTH_CONSTANT_WIDTH:e.d,SHORT_MONTH:e.b,LONG_MONTH:e.B,MONTH_OF_YEAR:e.m,SHORT_YEAR:e.y,LONG_YEAR:e.Y}},p.tokenResolver=function(){var e=p.getTokenDefinitions(),t=function(){for(var e=0,t=void 0,n=void 0,r=arguments.length;e=0;)o=e[i+1],-1!==r.indexOf(o)&&a.push({index:i,token:o});return a},p.formatAs=function(e,t){var n,r=s(e),a=p.findTokens(t),i=p.getTokenDefinitions(),o=String(t),u=p.TOKEN_PREFIX,c=void 0,f=void 0,l=void 0;for(l=0,n=a.length;l=0;d--)(f=i[d].index)+1!==s.length-1?(void 0===u&&(u=s.length),l=s.substring(f+2,u),s=s.substring(0,f+2)+RegExp.escape(l)+s.substring(u,s.length),u=f):u=f;for(d=0;d0&&e.split(",").forEach(function(e){var n=e.split("-"),r=+n[0],a=+(n[1]||n[0]);if(a>=r)for(var i=r;i<=a;i+=1)t(i)})}var T=function(){function e(e,t){for(var n=0;n=(i=e[a=n+Math.floor((r-n)/2)]).start&&t=i.end?n=a+1:t3&&void 0!==arguments[3]&&arguments[3],a=arguments.length>4&&void 0!==arguments[4]?arguments[4]:J.CROSS,i=[],o=[],u=n||K,c=e.getFieldspace(),f=t.getFieldspace(),l=c.name,s=f.name,d=c.name+"."+f.name,p=C(c,f);if(l===s)throw new Error("DataModels must have different alias names");return c.fields.forEach(function(e){var t=_({},e.schema());-1===p.indexOf(t.name)||r||(t.name=c.name+"."+t.name),i.push(t)}),f.fields.forEach(function(e){var t=_({},e.schema());-1!==p.indexOf(t.name)?r||(t.name=f.name+"."+t.name,i.push(t)):i.push(t)}),D(e._rowDiffset,function(n){var d=!1,h=void 0;D(t._rowDiffset,function(v){var m=[],y={};y[l]={},y[s]={},c.fields.forEach(function(e){m.push(e.partialField.data[n]),y[l][e.name()]=e.partialField.data[n]}),f.fields.forEach(function(e){-1!==p.indexOf(e.schema().name)&&r||m.push(e.partialField.data[v]),y[s][e.name()]=e.partialField.data[v]});var g=ot(y[l]),b=ot(y[s]);if(u(g,b,function(){return e.detachedRoot()},function(){return t.detachedRoot()},{})){var w={};m.forEach(function(e,t){w[i[t].name]=e}),d&&J.CROSS!==a?o[h]=w:(o.push(w),d=!0,h=n)}else if((a===J.LEFTOUTER||a===J.RIGHTOUTER)&&!d){var _={},O=c.fields.length-1;m.forEach(function(e,t){_[i[t].name]=t<=O?e:null}),d=!0,h=n,o.push(_)}})}),new At(o,i,{name:d})}function z(e,t){var n=""+e,r=""+t;return nr?1:0}function q(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:z;return e.length>1&&function e(t,n,r,a){if(r===n)return t;var i=n+Math.floor((r-n)/2);return e(t,n,i,a),e(t,i+1,r,a),function(e,t,n,r,a){for(var i=e,o=[],u=t;u<=r;u+=1)o[u]=i[u];for(var c=t,f=n+1,l=t;l<=r;l+=1)c>n?(i[l]=o[f],f+=1):f>r?(i[l]=o[c],c+=1):a(o[c],o[f])<=0?(i[l]=o[c],c+=1):(i[l]=o[f],f+=1)}(t,n,i,r,a),t}(e,0,e.length-1,t),e}function X(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);ti?"desc"===t?-1:1:0}}return r}function Q(e,t){var n=new Map,r=[];return e.forEach(function(e){var a=e[t];n.has(a)?r[n.get(a)][1].push(e):(r.push([a,[e]]),n.set(a,r.length-1))}),r}function Z(e,t,n){var r={label:e[0]};return t.reduce(function(t,r,a){return t[r]=e[1].map(function(e){return e[n[a].index]}),t},r),r}function ee(e,t,n,r,a){a=Object.assign({},{addUid:!1,columnWise:!1},a);var i={schema:[],data:[],uids:[]},o=a.addUid,u=r&&r.length>0,c=[];if(n.split(",").forEach(function(t){for(var n=0;n=0;u--)a=t[u][0],i=t[u][1],(o=vt(r,a))&&("function"==typeof i?q(n,function(e,t){return i(e[o.index],t[o.index])}):O(i)?function(){var e=Q(n,o.index),t=i[i.length-1],a=i.slice(0,i.length-1),u=a.map(function(e){return vt(r,e)});e.forEach(function(e){e.push(Z(e,a,u))}),q(e,function(e,n){var r=e[2],a=n[2];return t(r,a)}),n.length=0,e.forEach(function(e){n.push.apply(n,X(e[1]))})}():(i="desc"===String(i).toLowerCase()?"desc":"asc",q(n,$(o.type,i,o.index))));e.uids=[],n.forEach(function(t){e.uids.push(t.pop())})}(i,r),a.columnWise){var f=Array.apply(void 0,X(Array(i.schema.length))).map(function(){return[]});i.data.forEach(function(e){e.forEach(function(e,t){f[t].push(e)})}),i.data=f}return i}function te(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!A(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t,r){D(e._rowDiffset,function(e){var o={},u="";a.forEach(function(n){var r=t[n].partialField.data[e];u+="-"+r,o[n]=r}),n[u]||(r&&i.push(o),n[u]=!0)})}return e._colIdentifier.split(",").forEach(function(e){var t=c[e];r.push(_({},t.schema())),a.push(t.schema().name)}),s(t,f,!1),s(e,c,!0),new At(i,r,{name:l})}function ne(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n={},r=e.getFieldspace().getMeasure(),a=le.defaultReducer();return Object.keys(r).forEach(function(e){"string"!=typeof t[e]&&(t[e]=r[e].defAggFn());var i=le.resolve(t[e]);i?n[e]=i:(n[e]=a,t[e]=ue)}),n}(e,n),o=e.getFieldspace(),u=o.fieldsObj(),c=o.name,l=[],s=[],d=[],p={},h=[],v=void 0;Object.entries(u).forEach(function(e){var t=se(e,2),n=t[0],r=t[1];if(-1!==a.indexOf(n)||i[n])switch(d.push(_({},r.schema())),r.schema().type){case f.MEASURE:s.push(n);break;default:case f.DIMENSION:l.push(n)}});var m=0;D(e._rowDiffset,function(e){var t="";l.forEach(function(n){t=t+"-"+u[n].partialField.data[e]}),void 0===p[t]?(p[t]=m,h.push({}),l.forEach(function(t){h[m][t]=u[t].partialField.data[e]}),s.forEach(function(t){h[m][t]=[u[t].partialField.data[e]]}),m+=1):s.forEach(function(n){h[p[t]][n].push(u[n].partialField.data[e])})});var y={},g=function(){return e.detachedRoot()};return h.forEach(function(e){var t=e;s.forEach(function(n){t[n]=i[n](e[n],g,y)})}),r?(r.__calculateFieldspace(),v=r):v=new Mt(h,d,{name:c}),v}function pe(e,t){var n=C(e.getFieldspace(),t.getFieldspace());return function(e,t){var r=!0;return n.forEach(function(n){r=!(e[n].value!==t[n].value||!r)}),r}}function he(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!A(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t){D(e._rowDiffset,function(e){var r={},o="";a.forEach(function(n){var a=t[n].partialField.data[e];o+="-"+a,r[n]=a}),n[o]||(i.push(r),n[o]=!0)})}return e._colIdentifier.split(",").forEach(function(e){var t=c[e];r.push(_({},t.schema())),a.push(t.schema().name)}),s(e,c),s(t,f),new Mt(i,r,{name:l})}function ve(e,t,n){return W(e,t,n,!1,J.LEFTOUTER)}function me(e,t,n){return W(t,e,n,!1,J.RIGHTOUTER)}var ye=function(){function e(e,t){for(var n=0;nn&&(n=a))}),[t,n]}}]),t}(),Te=function(){function e(e,t){for(var n=0;n=i?c=!0:(r=e.charCodeAt(o++))===qe?f=!0:r===Xe&&(f=!0,e.charCodeAt(o)===qe&&++o),e.slice(a+1,t-1).replace(/""/g,'"')}for(;o2&&void 0!==arguments[2]?arguments[2]:{},a=arguments[3],i=void 0;t!==H?(i={op:t,meta:r,criteria:a},e._derivation.push(i)):(i=[].concat(it(a)),e._derivation.length=0,(n=e._derivation).push.apply(n,it(i)))},ft=function(e,t,n,r,a){var i=[],o=-1,u=r.mode,c=void 0,f={},s=function(){return a.detachedRoot()},d=function(e){return n(function(e,t){var n={},r=!0,a=!1,i=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done);r=!0){var c=o.value;n[c.name()]=new F(c.partialField.data[t],c)}}catch(e){a=!0,i=e}finally{try{!r&&u.return&&u.return()}finally{if(a)throw i}}return n}(t,e),e,s,f)},p=void 0;return p=u===l.INVERSE?function(e){return!d(e)}:function(e){return d(e)},D(e,function(e){p(e)&&(-1!==o&&e===o+1?(c=i.length-1,i[c]=i[c].split("-")[0]+"-"+e):i.push(""+e),o=e)}),i.join(",")},lt=function(e,t){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},r=n.operation||G,a=n.filterByMeasure||!1,i=[];i=t.length?t.map(function(e){return n=(t=e).getData(),r=n.schema,i=t.getFieldsConfig(),o=t.getFieldspace().fieldsObj(),u=n.data,c=Object.values(i).reduce(function(e,t){return e[t.def.name]=o[t.def.name].domain(),e},{}),function(e){return!!u.length&&u.some(function(t){return r.every(function(n){if(!(n.name in e))return!0;var r=e[n.name].valueOf();if(a&&n.type===f.MEASURE)return r>=c[n.name][0]&&r<=c[n.name][1];if(n.type!==f.DIMENSION)return!0;var o=i[n.name].index;return t[o]===e[n.name].valueOf()})})};var t,n,r,i,o,u,c}):[function(){return!1}];var o=void 0;r===G?o=e.clone(!1,!1).select(function(e){return i.every(function(t){return t(e)})},{saveChild:!1,mode:l.ALL}):o=e.clone(!1,!1).select(function(e){return i.some(function(t){return t(e)})},{mode:l.ALL,saveChild:!1});return o},st=function(e,t,n,r){var a=e.clone(r.saveChild),i=ft(a._rowDiffset,a.getPartialFieldspace().fields,t,n,e);return a._rowDiffset=i,a.__calculateFieldspace().calculateFieldsConfig(),ct(a,L,{config:n},t),a},dt=function(e,t,n,r){var a=e.clone(n.saveChild),i=t;return n.mode===l.INVERSE&&(i=r.filter(function(e){return-1===t.indexOf(e)})),a._colIdentifier=i.join(","),a.__calculateFieldspace().calculateFieldsConfig(),ct(a,U,{projField:t,config:n,actualProjField:i},null),a},pt=function(e){if((e=_({},e)).type||(e.type=f.DIMENSION),!e.subtype)switch(e.type){case f.MEASURE:e.subtype=c.CONTINUOUS;break;default:case f.DIMENSION:e.subtype=u.CATEGORICAL}return e},ht=function(e,t,n,r){n=function(e){return e.map(function(e){return pt(e)})}(n),r=Object.assign(Object.assign({},Je),r);var i=a[r.dataFormat];if(!i||"function"!=typeof i)throw new Error("No converter function found for "+r.dataFormat+" format");var u=i(t,r),c=at(u,2),f=c[0],l=c[1],s=Be(l,n,f),d=k.createNamespace(s,r.name);return e._partialFieldspace=d,e._rowDiffset=l.length&&l[0].length?"0-"+(l[0].length-1):"",e._colIdentifier=n.map(function(e){return e.name}).join(),e._dataFormat=r.dataFormat===o.AUTO?S(t):r.dataFormat,e},vt=function(e,t){for(var n=0;n2&&void 0!==arguments[2]?arguments[2]:{},a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=a.nonTraversingModel,o=a.excludeModels||[];t!==i&&((!o.length||-1===o.indexOf(t))&&t.handlePropagation(n,r),t._children.forEach(function(t){var i=mt(n,t),o=at(i,2),u=o[0],c=o[1];e(t,[u,c],r,a)}))},gt=function(e){for(var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[];e._parent;)t.push(e),e=e._parent;return t},bt=function(e,t,n,r){var a=void 0,i=void 0,o=n.propagationNameSpace,u=n.propagateToSource,c=n.sourceId,f=r.propagateInterpolatedValues,l=[];if(null===e&&!0!==r.persistent)l=[{criteria:[]}];else{var s,d=Object.values(o.mutableActions);!1!==u&&(d=d.filter(function(e){return e.config.sourceId!==c}));var p=d.filter(function(e){return(r.filterFn||function(){return!0})(e,r)}).map(function(e){return e.config.criteria}),h=[];if(!1!==u){var v=Object.values(o.mutableActions);v.forEach(function(e){var t=e.config;!1===t.applyOnSource&&t.action===r.action&&t.sourceId!==c&&(h.push(e.model),(a=v.filter(function(t){return t!==e}).map(function(e){return e.config.criteria})).length&&l.push({criteria:a,models:e.model,path:gt(e.model)}))})}a=(s=[]).concat.apply(s,[].concat(it(p),[e])).filter(function(e){return null!==e}),l.push({criteria:a,excludeModels:[].concat(h,it(r.excludeModels||[]))})}var m=t.model,y=Object.assign({sourceIdentifiers:e,propagationSourceId:c},r),g=t.groupByModel;f&&g&&(i=lt(g,a,{filterByMeasure:f}),yt(g,i,y)),l.forEach(function(e){var t=lt(m,e.criteria),n=e.path;if(n){var r=function(e,t){for(var n=0,r=t.length;n0&&void 0!==arguments[0])||arguments[0],t=void 0;if(!1===(!(arguments.length>1&&void 0!==arguments[1])||arguments[1])){var n=this.getData({getAllFields:!0}),r=n.data,a=n.schema,i=r.map(function(e){var t={};return a.forEach(function(n,r){t[n.name]=e[r]}),t});t=new this.constructor(i,a)}else t=new this.constructor(this);return e&&this._children.push(t),t}},{key:"project",value:function(e,t){var n={mode:l.NORMAL,saveChild:!0};t=Object.assign({},n,t);var r=this.getFieldsConfig(),a=Object.keys(r),i=t.mode,o=e.reduce(function(e,t){return"RegExp"===t.constructor.name?e.push.apply(e,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:[];ct(this,H,null,t),this._parent=e,e._children.push(this)}},{key:"getParent",value:function(){return this._parent}},{key:"getChildren",value:function(){return this._children}},{key:"getDerivations",value:function(){return this._derivation}}]),e}(),Ot=function(){return function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,a=!1,i=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){a=!0,i=e}finally{try{!r&&u.return&&u.return()}finally{if(a)throw i}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")}}(),Et=function(){function e(e,t){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:{},n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{saveChild:!0},r=""+e.join(),a=[this,e,t],i=de.apply(void 0,a);return ct(i,V,{fieldsArr:e,groupByString:r,defaultReducer:le.defaultReducer()},t),n.saveChild&&this._children.push(i),i._parent=this,i}},{key:"sort",value:function(e){var t=this.getData({order:"row",sort:e}),n=[t.schema.map(function(e){return e.name})].concat(t.data),r=new this.constructor(n,t.schema,{dataFormat:"DSVArr"});return r._sortingDetails=e,r}},{key:"serialize",value:function(e,t){e=e||this._dataFormat,t=Object.assign({},{fieldSeparator:","},t);var n=this.getFieldspace().fields,r=n.map(function(e){return e.formattedData()}),a=r[0].length,i=void 0,u=void 0,c=void 0;if(e===o.FLAT_JSON)for(i=[],u=0;u=0&&(n.fields[r]=e)}else n.fields.push(e);return n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,this.__calculateFieldspace().calculateFieldsConfig(),this}},{key:"calculateVariable",value:function(e,t,n){var r=this;e=pt(e),n=Object.assign({},{saveChild:!0,replaceVar:!1},n);var a=this.getFieldsConfig(),i=t.slice(0,t.length-1),o=t[t.length-1];if(a[e.name]&&!n.replaceVar)throw new Error(e.name+" field already exists in datamodel");var u=i.map(function(e){var t=a[e];if(!t)throw new Error(e+" is not a valid column name.");return t.index}),c=this.clone(),f=c.getFieldspace().fields,l=u.map(function(e){return f[e]}),s={},d=function(){return r.detachedRoot()},p=[];D(c._rowDiffset,function(e){var t=l.map(function(t){return t.partialField.data[e]});p[e]=o.apply(void 0,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},a=t.isMutableAction,i=t.sourceId,o=t.payload,u=function(e){for(;e._parent;)e=e._parent;return e}(this),c=u._propagationNameSpace,f={groupByModel:function e(t){return t._parent&&t._derivation.find(function(e){return"group"!==e.op})?e(t._parent):t}(this),model:u};return n&&function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=void 0,a=t.isMutableAction,i=t.criteria,o=t.action+"-"+t.sourceId;r=a?e.mutableActions:e.immutableActions,null===i?delete r[o]:r[o]={model:n,config:t}}(c,t,this),bt(e,f,{propagationNameSpace:c,sourceId:i},Object.assign({payload:o},t)),a&&function(e,t,n){var r=e.immutableActions;for(var a in r){var i=r[a].config,o=n.config.sourceId,u=!n.propConfig.filterImmutableAction||n.propConfig.filterImmutableAction(i,n.config);if(i.sourceId!==o&&u){var c=i.criteria;bt(c,t,{propagationNameSpace:e,propagateToSource:!1,sourceId:o},i)}}}(c,f,{config:t,propConfig:r}),this}},{key:"on",value:function(e,t){switch(e){case"propagation":this._onPropagation.push(t)}return this}},{key:"unsubscribe",value:function(e){switch(e){case"propagation":this._onPropagation=[]}return this}},{key:"handlePropagation",value:function(e,t){var n=this;this._onPropagation.forEach(function(r){return r.call(n,e,t)})}},{key:"bin",value:function(e,t){var n=this.getFieldsConfig();if(!n[e])throw new Error("Field "+e+" doesn't exist");var r=t.name||e+"_binned";if(n[r])throw new Error("Field "+r+" already exists");var a=function(e,t,n){var r=n.buckets,a=n.binsCount,i=n.binSize,o=n.start,u=n.end,c=e.domain(),f=M(c,2),l=f[0],s=f[1];r||(o=0!==o&&(!o||o>l)?l:o,u=0!==u&&(!u||ul&&r.unshift(l),r[r.length-1]<=s&&r.push(s+1);for(var d=[],p=0;p1&&void 0!==arguments[1]?arguments[1]:{saveChild:!0},r=e,a=void 0,i=[],o=n.saveChild;return t.forEach(function(e){r=e(r),i.push.apply(i,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&a.dispose(),r}},bin:function(){for(var e=arguments.length,t=Array(e),n=0;n { + let jsonData = data; + const schema = [{ + name: 'Name', type: 'dimension' }, { @@ -14,7 +15,8 @@ const schema = [ { name: 'roll', type: 'measure', - defAggFn: "avg" + defAggFn: "avg", + as: "roll2" } ]; @@ -25,53 +27,45 @@ const data = [ roll: 2 }, { - name: 'Sumant', - birthday: '1996-08-04', - roll: 89 + name: 'Miles_per_Gallon', + type: 'measure' }, + { - name: 'Ajay', - birthday: '1994-01-03', - roll: 31 + name: 'Displacement', + type: 'measure' }, { - name: 'Sushant', - birthday: '1994-01-03', - roll: 99 + name: 'Horsepower', + type: 'measure' }, { - name: 'Samim', - birthday: '1994-01-03', - roll: 12 + name: 'Weight_in_lbs', + type: 'measure' }, { - name: 'Akash', - birthday: '1994-01-03', - roll: 20 + name: 'Acceleration', + type: 'measure' }, { - name: 'Rousan', - birthday: '1995-07-06', - roll: 10 + name: 'Origin', + type: 'dimension' }, { name: 'Akash', birthday: '1994-01-03', - roll: -10 + roll: 120 }, { name: 'Rousan', birthday: '1995-07-06', - roll: -23 + roll: 93 } ]; -const dm = new DataModel(data, schema); - -// const groupedDm = dm.groupBy(['name']); - -const groupedDm2 = dm.select(fields => fields.name.value === "Rousan"); +const dm = new DataModel(data, schema); +const dm2 = dm.project(["name", "roll"]); // const schema = [ // { name: 'Name', type: 'dimension' }, // { name: 'HorsePower', type: 'measure' }, @@ -141,16 +135,11 @@ const groupedDm2 = dm.select(fields => fields.name.value === "Rousan"); // const dataModel1 = new DataModel(data1, schema1, { name: 'ModelA' }); // const dataModel2 = new DataModel(data2, schema2, { name: 'ModelB' }); -// const joinedDm = dataModel1.join(dataModel2, (f1, f2, cloneProvider1, cloneProvider2, store) => { -// if (!store.clonedDm1) { -// store.clonedDm1 = cloneProvider1(); -// } -// if (!store.clonedDm2) { -// store.clonedDm2 = cloneProvider2(); -// } -// if (!store.avgPopulation) { -// store.avgPopulation = store.clonedDm2.groupBy([""], { population: "avg" }).getData().data[0][0]; -// } + let rootData = new DataModel(jsonData, schema); + let dm = rootData.project(["Origin", "Acceleration"]); + let dm5 = DataModel.Operators.compose( + DataModel.Operators.groupBy(["Origin"]), + DataModel.Operators.select(f => f.Acceleration.value > 1000) + )(dm); +}); -// return (f1.profit.value * f1.sales.value) > store.avgPopulation; -// }); diff --git a/example/samples/example4.js b/example/samples/example4.js index 74b0c63..471f457 100644 --- a/example/samples/example4.js +++ b/example/samples/example4.js @@ -1,52 +1,57 @@ /* eslint-disable */ + d3.json('./data/cars.json', (data) => { const jsonData = data, - schema = [{ - name: 'Name', - type: 'dimension' - }, { - name: 'Miles_per_Gallon', - type: 'measure', - unit : 'cm', - scale: '1000', - numberformat: '12-3-3' - }, { - name: 'Cylinders', - type: 'dimension' - }, { - name: 'Displacement', - type: 'measure' - }, { - name: 'Horsepower', - type: 'measure' - }, { - name: 'Weight_in_lbs', - type: 'measure', - }, { - name: 'Acceleration', - type: 'measure' - }, { - name: 'Year', - type: 'dimension', - }, { - name: 'Origin', - type: 'dimension' - }]; - - const rootData = new window.DataModel(jsonData, schema); - - const groupedDm = rootData.groupBy(['Origin', 'Cylinders']) - const binnedDm = groupedDm.bin('Miles_per_Gallon', { binsCount: 10}) - }); - - dm.calculateVariable ({ - name: "fieldName", - type: "measure|dimension" - }, ["existingField1", "existingField2", (existingField1, existingField2) => { - return "operation_value" - }]) + schema = [ + { + "name": "Name", + "type": "dimension" + }, + { + "name": "Maker", + "type": "dimension" + }, + { + "name": "Miles_per_Gallon", + "type": "measure", + "defAggFn": "avg" + }, + { + "name": "Displacement", + "type": "measure", + "subtype": "continuous", + "defAggFn": "max" + }, + { + "name": "Horsepower", + "type": "measure", + "defAggFn": "avg" + }, + { + "name": "Weight_in_lbs", + "type": "measure", + "defAggFn": "min" + }, + { + "name": "Acceleration", + "type": "measure", + "defAggFn": "avg" + }, + { + "name": "Origin", + "type": "dimension" + }, + { + "name": "Cylinders", + "type": "dimension" + }, + { + "name": "Year", + "type": "dimension", + "subtype": "temporal", + "format": "%Y-%m-%d" + } + ] -// load('../../js/cars.csv') -// .then((res) => { -// dm = new DataModel(res.split('\n').map(line => line.split(',')), {}, { name: "myDataModel", dataFormat: 'DSVArr' }); -// }); + dm = new DataModel(jsonData, schema); +}); \ No newline at end of file diff --git a/example/samples/example5.js b/example/samples/example5.js index c4a977f..4277d78 100644 --- a/example/samples/example5.js +++ b/example/samples/example5.js @@ -2,152 +2,170 @@ fetch("/data/cars.json") .then(resp => resp.json()) .then(data => { - const schema = [ - // { - // "name": "Ticket", - // "type": "dimension" - // }, - // { - // "name": "Organisation", - // "type": "dimension" - // }, - // { - // "name": "Name", - // "type": "dimension" - // }, - // { - // "name": "Email ID", - // "type": "dimension" - // }, - // { - // "name": "Country", - // "type": "dimension" - // }, - // { - // "name": "Medium", - // "type": "dimension" - // }, - // { - // "name": "Member", - // "type": "dimension" - // }, - // { - // "name": "Shared with Member", - // "type": "dimension" - // }, - // { - // "name": "Partner", - // "type": "dimension" - // }, - // { - // "name": "Partner Name", - // "type": "dimension" - // }, - // { - // "name": "Partner Email ID", - // "type": "dimension" - // }, - // { - // "name": "Product", - // "type": "dimension" - // }, - // { - // "name": "New / Renewal", - // "type": "dimension" - // }, - // { - // "name": "Industry", - // "type": "dimension" - // }, - // { - // "name": "Trade Discount", - // "type": "measure" - // }, - // { - // "name": "Reseller Discount", - // "type": "measure" - // }, - { - name: 'Qty', - type: 'measure' - }, - // { - // "name": "Price", - // "type": "measure" - // }, - // { - // "name": "Gross Value", - // "type": "measure" - // }, - // { - // "name": "Net Value", - // "type": "measure" - // }, - // { - // "name": "PO Number", - // "type": "measure" - // }, - { - name: 'Date of Order', - type: 'dimension', - subtype: 'temporal', - format: '%Y-%m-%d' - }, - // { - // "name": "Month", - // "type": "dimension", - // "subtype": "temporal", - // "format": "%Y-%m-%d" - // }, - // { - // "name": "Quarter", - // "type": "dimension" - // }, - { - "name": "Date of Payment", - "type": "dimension", - "subtype": "temporal", - "format": "%Y-%m-%d" - }, - // { - // "name": "Payment Mode", - // "type": "dimension" - // }, - // { - // "name": "Source/Ref No.", - // "type": "measure" - // }, - // { - // "name": "Payment Due Date", - // "type": "dimension", - // "subtype": "temporal", - // "format": "%Y-%m-%d" - // }, - // { - // "name": "Lead in Date", - // "type": "dimension" - // }, - // { - // "name": "Lead out Date", - // "type": "dimension" - // }, - // { - // name: 'Days Taken', - // type: 'measure' - // }, - // { - // name: 'Status', - // type: 'dimension' - // } - ]; + // const schema = [ + // // { + // // "name": "Ticket", + // // "type": "dimension" + // // }, + // // { + // // "name": "Organisation", + // // "type": "dimension" + // // }, + // // { + // // "name": "Name", + // // "type": "dimension" + // // }, + // // { + // // "name": "Email ID", + // // "type": "dimension" + // // }, + // // { + // // "name": "Country", + // // "type": "dimension" + // // }, + // // { + // // "name": "Medium", + // // "type": "dimension" + // // }, + // // { + // // "name": "Member", + // // "type": "dimension" + // // }, + // // { + // // "name": "Shared with Member", + // // "type": "dimension" + // // }, + // // { + // // "name": "Partner", + // // "type": "dimension" + // // }, + // // { + // // "name": "Partner Name", + // // "type": "dimension" + // // }, + // // { + // // "name": "Partner Email ID", + // // "type": "dimension" + // // }, + // // { + // // "name": "Product", + // // "type": "dimension" + // // }, + // // { + // // "name": "New / Renewal", + // // "type": "dimension" + // // }, + // // { + // // "name": "Industry", + // // "type": "dimension" + // // }, + // // { + // // "name": "Trade Discount", + // // "type": "measure" + // // }, + // // { + // // "name": "Reseller Discount", + // // "type": "measure" + // // }, + // { + // name: 'Qty', + // type: 'measure' + // }, + // // { + // // "name": "Price", + // // "type": "measure" + // // }, + // // { + // // "name": "Gross Value", + // // "type": "measure" + // // }, + // // { + // // "name": "Net Value", + // // "type": "measure" + // // }, + // // { + // // "name": "PO Number", + // // "type": "measure" + // // }, + // { + // name: 'Date of Order', + // type: 'dimension', + // subtype: 'temporal', + // format: '%Y-%m-%d' + // }, + // // { + // // "name": "Month", + // // "type": "dimension", + // // "subtype": "temporal", + // // "format": "%Y-%m-%d" + // // }, + // // { + // // "name": "Quarter", + // // "type": "dimension" + // // }, + // { + // "name": "Date of Payment", + // "type": "dimension", + // "subtype": "temporal", + // "format": "%Y-%m-%d" + // }, + // // { + // // "name": "Payment Mode", + // // "type": "dimension" + // // }, + // // { + // // "name": "Source/Ref No.", + // // "type": "measure" + // // }, + // // { + // // "name": "Payment Due Date", + // // "type": "dimension", + // // "subtype": "temporal", + // // "format": "%Y-%m-%d" + // // }, + // // { + // // "name": "Lead in Date", + // // "type": "dimension" + // // }, + // // { + // // "name": "Lead out Date", + // // "type": "dimension" + // // }, + // // { + // // name: 'Days Taken', + // // type: 'measure' + // // }, + // // { + // // name: 'Status', + // // type: 'dimension' + // // } + // ]; - // DataModel.configureInvalidAwareTypes({ - // "": DataModel.InvalidAwareTypes.NULL, - // }); - const dm = new DataModel(data, schema); - const dmData = dm.getData().data; - const selected = dm.select(fields => fields['Date of Payment'].value === DataModel.InvalidAwareTypes.NULL); + // // DataModel.configureInvalidAwareTypes({ + // // "": DataModel.InvalidAwareTypes.NULL, + // // }); + // const dm = new DataModel(data, schema); + // const dmData = dm.getData().data; + // const selected = dm.select(fields => fields['Date of Payment'].value === DataModel.InvalidAwareTypes.NULL); - const compData = dm.groupBy(['name']).getData(); + // const compData = dm.groupBy(['name']).getData(); + + const data1 = [ + { profit: 10, sales: 25, city: 'a', state: 'aa' }, + { profit: 15, sales: 20, city: 'b', state: 'bb' }, + { profit: 10, sales: 25, city: 'a', state: 'ab' }, + { profit: 15, sales: 20, city: 'b', state: 'ba' }, + ]; + const schema1 = [ + { name: 'profit', type: 'measure' }, + { name: 'sales', type: 'measure' }, + { name: 'city', type: 'dimension' }, + { name: 'state', type: 'dimension' }, + ]; + const dataModel = new DataModel(data1, schema1, { name: 'Yo' }); + + kk = dataModel.project(['profit','sales']) + + mm = kk.sort(['sales'],{saveChild: true}) }) diff --git a/package.json b/package.json index 23d604e..f783694 100644 --- a/package.json +++ b/package.json @@ -2,10 +2,9 @@ "name": "datamodel", "description": "Relational algebra compliant in-memory tabular data store", "homepage": "https://github.com/chartshq/datamodel", - "version": "2.1.0", + "version": "2.2.0", "license": "MIT", "main": "dist/datamodel.js", - "author": "Charts.com ", "keywords": [ "datamodel", "data", @@ -18,6 +17,7 @@ "tabular", "operation" ], + "author": "Muzejs.org (https://muzejs.org/)", "repository": { "type": "git", "url": "https://github.com/chartshq/datamodel.git" @@ -25,20 +25,19 @@ "contributors": [ { "name": "Akash Goswami", - "email": "akash@charts.com" + "email": "akashgoswami90s@gmail.com" }, { - "name": "Subhash Haldar", - "email": "subhash@charts.com" + "name": "Subhash Haldar" }, { "name": "Rousan Ali", - "email": "rousan@charts.com", + "email": "rousanali786@gmail.com", "url": "https://rousan.io" }, { "name": "Ujjal Kumar Dutta", - "email": "ujjal@charts.com" + "email": "duttaujjalkumar@live.com" } ], "dependencies": { diff --git a/src/constants/index.js b/src/constants/index.js index 86c5bb0..512970f 100644 --- a/src/constants/index.js +++ b/src/constants/index.js @@ -18,7 +18,8 @@ export const DM_DERIVATIVES = { GROUPBY: 'group', COMPOSE: 'compose', CAL_VAR: 'calculatedVariable', - BIN: 'bin' + BIN: 'bin', + SORT: 'sort' }; export const JOINS = { diff --git a/src/datamodel.js b/src/datamodel.js index cba7b8f..809eecb 100644 --- a/src/datamodel.js +++ b/src/datamodel.js @@ -2,7 +2,7 @@ import { FieldType, DimensionSubtype, DataFormat } from './enums'; import { - persistDerivation, + persistDerivations, getRootGroupByModel, propagateToAllDataModels, getRootDataModel, @@ -75,7 +75,6 @@ class DataModel extends Relation { super(...args); this._onPropagation = []; - this._sortingDetails = []; } /** @@ -239,7 +238,8 @@ class DataModel extends Relation { let params = [this, fieldsArr, reducers]; const newDataModel = groupBy(...params); - persistDerivation( + persistDerivations( + this, newDataModel, DM_DERIVATIVES.GROUPBY, { fieldsArr, groupByString, defaultReducer: reducerStore.defaultReducer() }, @@ -247,9 +247,10 @@ class DataModel extends Relation { ); if (config.saveChild) { - this._children.push(newDataModel); + newDataModel.setParent(this); + } else { + newDataModel.setParent(null); } - newDataModel._parent = this; return newDataModel; } @@ -305,7 +306,7 @@ class DataModel extends Relation { * @param {Array.} sortingDetails - Sorting details based on which the sorting will be performed. * @return {DataModel} Returns a new instance of DataModel with sorted data. */ - sort (sortingDetails) { + sort (sortingDetails, config = { saveChild: false }) { const rawData = this.getData({ order: 'row', sort: sortingDetails @@ -314,7 +315,21 @@ class DataModel extends Relation { const dataInCSVArr = [header].concat(rawData.data); const sortedDm = new this.constructor(dataInCSVArr, rawData.schema, { dataFormat: 'DSVArr' }); - sortedDm._sortingDetails = sortingDetails; + + persistDerivations( + this, + sortedDm, + DM_DERIVATIVES.SORT, + config, + sortingDetails + ); + + if (config.saveChild) { + sortedDm.setParent(this); + } else { + sortedDm.setParent(null); + } + return sortedDm; } @@ -463,7 +478,7 @@ class DataModel extends Relation { return fieldSpec.index; }); - const clone = this.clone(); + const clone = this.clone(config.saveChild); const fs = clone.getFieldspace().fields; const suppliedFields = depFieldIndices.map(idx => fs[idx]); @@ -479,7 +494,13 @@ class DataModel extends Relation { const [field] = createFields([computedValues], [schema], [schema.name]); clone.addField(field); - persistDerivation(clone, DM_DERIVATIVES.CAL_VAR, { config: schema, fields: depVars }, retrieveFn); + persistDerivations( + this, + clone, + DM_DERIVATIVES.CAL_VAR, + { config: schema, fields: depVars }, + retrieveFn + ); return clone; } @@ -631,10 +652,16 @@ class DataModel extends Relation { bins }], [binFieldName])[0]; - const clone = this.clone(); + const clone = this.clone(config.saveChild); clone.addField(binField); - persistDerivation(clone, DM_DERIVATIVES.BIN, { measureFieldName, config, binFieldName }, null); + persistDerivations( + this, + clone, + DM_DERIVATIVES.BIN, + { measureFieldName, config, binFieldName }, + null + ); return clone; } diff --git a/src/enums/group-by-functions.js b/src/enums/group-by-functions.js new file mode 100644 index 0000000..672b828 --- /dev/null +++ b/src/enums/group-by-functions.js @@ -0,0 +1,18 @@ +/** + * Group by function names + * + * @readonly + * @enum {string} + */ +const GROUP_BY_FUNCTIONS = { + SUM: 'sum', + AVG: 'avg', + MIN: 'min', + MAX: 'max', + FIRST: 'first', + LAST: 'last', + COUNT: 'count', + STD: 'std' +}; + +export default GROUP_BY_FUNCTIONS; diff --git a/src/enums/index.js b/src/enums/index.js index 39bf314..1e76f6b 100644 --- a/src/enums/index.js +++ b/src/enums/index.js @@ -12,3 +12,4 @@ export { default as DimensionSubtype } from './dimension-subtype'; export { default as MeasureSubtype } from './measure-subtype'; export { default as FieldType } from './field-type'; export { default as FilteringMode } from './filtering-mode'; +export { default as GROUP_BY_FUNCTIONS } from './group-by-functions'; diff --git a/src/export.js b/src/export.js index 34135e9..ed7d7c6 100644 --- a/src/export.js +++ b/src/export.js @@ -18,11 +18,11 @@ import { import * as Stats from './stats'; import * as enums from './enums'; import { DateTimeFormatter } from './utils'; -import { DataFormat, FilteringMode } from './constants'; +import { DataFormat, FilteringMode, DM_DERIVATIVES } from './constants'; import InvalidAwareTypes from './invalid-aware-types'; import pkg from '../package.json'; -DataModel.Operators = { +const Operators = { compose, bin, select, @@ -38,12 +38,17 @@ DataModel.Operators = { fullOuterJoin, union }; -DataModel.Stats = Stats; -Object.assign(DataModel, enums); -DataModel.DateTimeFormatter = DateTimeFormatter; -DataModel.DataFormat = DataFormat; -DataModel.FilteringMode = FilteringMode; -DataModel.InvalidAwareTypes = InvalidAwareTypes; -DataModel.version = pkg.version; + +const version = pkg.version; +Object.assign(DataModel, { + Operators, + Stats, + DM_DERIVATIVES, + DateTimeFormatter, + DataFormat, + FilteringMode, + InvalidAwareTypes, + version +}, enums); export default DataModel; diff --git a/src/helper.js b/src/helper.js index 476576b..cd985e6 100644 --- a/src/helper.js +++ b/src/helper.js @@ -34,23 +34,28 @@ export const updateFields = ([rowDiffset, colIdentifier], partialFieldspace, fie return fieldStore.createNamespace(newFields, fieldStoreName); }; -export const persistDerivation = (model, operation, config = {}, criteriaFn) => { - let derivative; - if (operation !== DM_DERIVATIVES.COMPOSE) { - derivative = { +export const persistCurrentDerivation = (model, operation, config = {}, criteriaFn) => { + if (operation === DM_DERIVATIVES.COMPOSE) { + model._derivation.length = 0; + model._derivation.push(...criteriaFn); + } else { + model._derivation.push({ op: operation, meta: config, criteria: criteriaFn - }; - model._derivation.push(derivative); - } - else { - derivative = [...criteriaFn]; - model._derivation.length = 0; - model._derivation.push(...derivative); + }); } }; +export const persistAncestorDerivation = (sourceDm, newDm) => { + newDm._ancestorDerivation.push(...sourceDm._ancestorDerivation, ...sourceDm._derivation); +}; + +export const persistDerivations = (sourceDm, model, operation, config = {}, criteriaFn) => { + persistCurrentDerivation(model, operation, config, criteriaFn); + persistAncestorDerivation(sourceDm, model); +}; + export const selectHelper = (rowDiffset, fields, selectFn, config, sourceDm) => { const newRowDiffSet = []; let lastInsertedValue = -1; @@ -86,6 +91,20 @@ export const selectHelper = (rowDiffset, fields, selectFn, config, sourceDm) => return newRowDiffSet.join(','); }; +export const cloneWithAllFields = (model) => { + const clonedDm = model.clone(false); + const partialFieldspace = model.getPartialFieldspace(); + clonedDm._colIdentifier = partialFieldspace.fields.map(f => f.name()).join(','); + + // flush out cached namespace values on addition of new fields + partialFieldspace._cachedFieldsObj = null; + partialFieldspace._cachedDimension = null; + partialFieldspace._cachedMeasure = null; + clonedDm.__calculateFieldspace().calculateFieldsConfig(); + + return clonedDm; +}; + export const filterPropagationModel = (model, propModels, config = {}) => { const operation = config.operation || LOGICAL_OPERATORS.AND; const filterByMeasure = config.filterByMeasure || false; @@ -127,13 +146,12 @@ export const filterPropagationModel = (model, propModels, config = {}) => { let filteredModel; if (operation === LOGICAL_OPERATORS.AND) { - const clonedModel = model.clone(false, false); - filteredModel = clonedModel.select(fields => fns.every(fn => fn(fields)), { + filteredModel = cloneWithAllFields(model).select(fields => fns.every(fn => fn(fields)), { saveChild: false, mode: FilteringMode.ALL }); } else { - filteredModel = model.clone(false, false).select(fields => fns.some(fn => fn(fields)), { + filteredModel = cloneWithAllFields(model).select(fields => fns.some(fn => fn(fields)), { mode: FilteringMode.ALL, saveChild: false }); @@ -154,7 +172,13 @@ export const cloneWithSelect = (sourceDm, selectFn, selectConfig, cloneConfig) = cloned._rowDiffset = rowDiffset; cloned.__calculateFieldspace().calculateFieldsConfig(); - persistDerivation(cloned, DM_DERIVATIVES.SELECT, { config: selectConfig }, selectFn); + persistDerivations( + sourceDm, + cloned, + DM_DERIVATIVES.SELECT, + { config: selectConfig }, + selectFn + ); return cloned; }; @@ -170,7 +194,8 @@ export const cloneWithProject = (sourceDm, projField, config, allFields) => { cloned._colIdentifier = projectionSet.join(','); cloned.__calculateFieldspace().calculateFieldsConfig(); - persistDerivation( + persistDerivations( + sourceDm, cloned, DM_DERIVATIVES.PROJECT, { projField, config, actualProjField: projectionSet }, @@ -202,10 +227,52 @@ export const sanitizeUnitSchema = (unitSchema) => { return unitSchema; }; -export const sanitizeSchema = schema => schema.map(unitSchema => sanitizeUnitSchema(unitSchema)); +export const validateUnitSchema = (unitSchema) => { + const supportedMeasureSubTypes = [MeasureSubtype.CONTINUOUS]; + const supportedDimSubTypes = [ + DimensionSubtype.CATEGORICAL, + DimensionSubtype.BINNED, + DimensionSubtype.TEMPORAL, + DimensionSubtype.GEO + ]; + const { type, subtype, name } = unitSchema; + + switch (type) { + case FieldType.DIMENSION: + if (supportedDimSubTypes.indexOf(subtype) === -1) { + throw new Error(`DataModel doesn't support dimension field subtype ${subtype} used for ${name} field`); + } + break; + case FieldType.MEASURE: + if (supportedMeasureSubTypes.indexOf(subtype) === -1) { + throw new Error(`DataModel doesn't support measure field subtype ${subtype} used for ${name} field`); + } + break; + default: + throw new Error(`DataModel doesn't support field type ${type} used for ${name} field`); + } +}; + +export const sanitizeAndValidateSchema = schema => schema.map((unitSchema) => { + unitSchema = sanitizeUnitSchema(unitSchema); + validateUnitSchema(unitSchema); + return unitSchema; +}); + +export const resolveFieldName = (schema, dataHeader) => { + schema.forEach((unitSchema) => { + const fieldNameAs = unitSchema.as; + if (!fieldNameAs) { return; } + + const idx = dataHeader.indexOf(unitSchema.name); + dataHeader[idx] = fieldNameAs; + unitSchema.name = fieldNameAs; + delete unitSchema.as; + }); +}; export const updateData = (relation, data, schema, options) => { - schema = sanitizeSchema(schema); + schema = sanitizeAndValidateSchema(schema); options = Object.assign(Object.assign({}, defaultConfig), options); const converterFn = converter[options.dataFormat]; @@ -214,6 +281,7 @@ export const updateData = (relation, data, schema, options) => { } const [header, formattedData] = converterFn(data, options); + resolveFieldName(schema, header); const fieldArr = createFields(formattedData, schema, header); // This will create a new fieldStore with the fields @@ -241,26 +309,23 @@ export const fieldInSchema = (schema, field) => { }; -export const getOperationArguments = (child) => { - const derivation = child._derivation; +export const getDerivationArguments = (derivation) => { let params = []; let operation; - if (derivation && derivation.length === 1) { - operation = derivation[0].op; - switch (operation) { - case DM_DERIVATIVES.SELECT: - params = [derivation[0].criteria]; - break; - case DM_DERIVATIVES.PROJECT: - params = [derivation[0].meta.actualProjField]; - break; - case DM_DERIVATIVES.GROUPBY: - operation = 'groupBy'; - params = [derivation[0].meta.groupByString.split(','), derivation[0].criteria]; - break; - default: - break; - } + operation = derivation.op; + switch (operation) { + case DM_DERIVATIVES.SELECT: + params = [derivation.criteria]; + break; + case DM_DERIVATIVES.PROJECT: + params = [derivation.meta.actualProjField]; + break; + case DM_DERIVATIVES.GROUPBY: + operation = 'groupBy'; + params = [derivation.meta.groupByString.split(','), derivation.criteria]; + break; + default: + operation = null; } return { @@ -270,17 +335,26 @@ export const getOperationArguments = (child) => { }; const applyExistingOperationOnModel = (propModel, dataModel) => { - const { operation, params } = getOperationArguments(dataModel); + const derivations = dataModel.getDerivations(); let selectionModel = propModel[0]; let rejectionModel = propModel[1]; - if (operation && params.length) { - selectionModel = propModel[0][operation](...params, { - saveChild: false - }); - rejectionModel = propModel[1][operation](...params, { - saveChild: false - }); - } + + derivations.forEach((derivation) => { + if (!derivation) { + return; + } + + const { operation, params } = getDerivationArguments(derivation); + if (operation) { + selectionModel = selectionModel[operation](...params, { + saveChild: false + }); + rejectionModel = rejectionModel[operation](...params, { + saveChild: false + }); + } + }); + return [selectionModel, rejectionModel]; }; @@ -312,8 +386,8 @@ const propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = }; export const getRootGroupByModel = (model) => { - if (model._parent && model._derivation.find(d => d.op !== 'group')) { - return getRootGroupByModel(model._parent); + while (model._parent && model._derivation.find(d => d.op !== DM_DERIVATIVES.GROUPBY)) { + model = model._parent; } return model; }; diff --git a/src/helper.spec.js b/src/helper.spec.js new file mode 100644 index 0000000..121235c --- /dev/null +++ b/src/helper.spec.js @@ -0,0 +1,76 @@ +/* global describe, it */ + +import { expect } from 'chai'; +import DataModel from './index'; +import { getRootGroupByModel, getRootDataModel, getPathToRootModel } from './helper'; + +describe('getRootGroupByModel', () => { + const data = [ + { age: 30, job: 'unemployed', marital: 'married' }, + { age: 10, job: 'services', marital: 'married' }, + { age: 22, job: 'self-employed', marital: 'single' } + ]; + const schema = [ + { name: 'age', type: 'measure' }, + { name: 'job', type: 'dimension' }, + { name: 'marital', type: 'dimension' }, + ]; + + it('should return nearest groupBy DataModel', () => { + const dm = new DataModel(data, schema); + const dm1 = dm.groupBy(['job', 'marital']); + const dm2 = dm1.select(fields => fields.age.value > 15); + const dm3 = dm2.project(['age', 'job', 'marital']); + + expect(getRootGroupByModel(dm3)).to.be.equal(dm1); + }); +}); + +describe('getRootDataModel', () => { + const data = [ + { age: 30, job: 'unemployed', marital: 'married' }, + { age: 10, job: 'services', marital: 'married' }, + { age: 22, job: 'self-employed', marital: 'single' } + ]; + const schema = [ + { name: 'age', type: 'measure' }, + { name: 'job', type: 'dimension' }, + { name: 'marital', type: 'dimension' }, + ]; + + it('should return root DataModel', () => { + const dm = new DataModel(data, schema); + const dm1 = dm.groupBy(['job', 'marital']); + const dm2 = dm1.select(fields => fields.age.value > 15); + const dm3 = dm2.project(['age', 'job', 'marital']); + + expect(getRootDataModel(dm3)).to.be.equal(dm); + }); +}); + + +describe('getPathToRootModel', () => { + const data = [ + { age: 30, job: 'unemployed', marital: 'married' }, + { age: 10, job: 'services', marital: 'married' }, + { age: 22, job: 'self-employed', marital: 'single' } + ]; + const schema = [ + { name: 'age', type: 'measure' }, + { name: 'job', type: 'dimension' }, + { name: 'marital', type: 'dimension' }, + ]; + + it('should return root DataModel', () => { + const dm = new DataModel(data, schema); + const dm1 = dm.groupBy(['job', 'marital']); + const dm2 = dm1.select(fields => fields.age.value > 15); + const dm3 = dm2.project(['age', 'job', 'marital']); + const paths = getPathToRootModel(dm3); + + expect(paths.length).to.be.equal(3); + expect(paths[0]).to.be.equal(dm3); + expect(paths[1]).to.be.equal(dm2); + expect(paths[2]).to.be.equal(dm1); + }); +}); diff --git a/src/index.spec.js b/src/index.spec.js index 3ead16e..c959417 100644 --- a/src/index.spec.js +++ b/src/index.spec.js @@ -1,5 +1,5 @@ /* global beforeEach, describe, it, context */ -/* eslint-disable no-unused-expressions */ +/* eslint-disable no-unused-expressions, no-new */ import { expect } from 'chai'; import { FilteringMode, DataFormat } from './enums'; @@ -13,6 +13,39 @@ function avg(...nums) { } describe('DataModel', () => { + describe('#Constructor', () => { + it('should validate schema before use', () => { + const data = [ + { age: 30, job: 'unemployed', marital: null }, + { age: 'Age', job: 'services', marital: 'married' }, + { age: 22, job: undefined, marital: 'single' } + ]; + let schema = [ + { name: 'age', type: 'measure' }, + { name: 'job', type: 'dimension' }, + { name: 'marital', type: 'un-supported-type' }, + ]; + const mockedFn = () => { + new DataModel(data, schema); + }; + expect(mockedFn).to.throw(); + + schema = [ + { name: 'age', type: 'measure' }, + { name: 'job', type: 'dimension' }, + { name: 'marital', type: 'dimension', subtype: 'invalid-subtype' }, + ]; + expect(mockedFn).to.throw(); + + schema = [ + { name: 'age', type: 'measure', subtype: 'invalid-subtype' }, + { name: 'job', type: 'dimension' }, + { name: 'marital', type: 'dimension' }, + ]; + expect(mockedFn).to.throw(); + }); + }); + describe('#version', () => { it('should be same to the version value specified in package.json file', () => { expect(DataModel.version).to.equal(pkg.version); @@ -45,6 +78,35 @@ describe('DataModel', () => { }); }); + describe('#getFieldsConfig', () => { + it('should return all field meta info', () => { + const schema = [ + { name: 'name', type: 'dimension' }, + { name: 'birthday', type: 'dimension', subtype: 'temporal', format: '%Y-%m-%d' } + ]; + + const data = [ + { name: 'Rousan', birthday: '1995-07-05', roll: 12 }, + { name: 'Sumant', birthday: '1996-08-04', roll: 89 }, + { name: 'Akash', birthday: '1994-01-03', roll: 33 } + ]; + const dataModel = new DataModel(data, schema); + const expected = { + name: { + index: 0, + def: { name: 'name', type: 'dimension', subtype: 'categorical' }, + }, + birthday: { + index: 1, + def: { name: 'birthday', type: 'dimension', subtype: 'temporal', format: '%Y-%m-%d' } + } + }; + + expect(dataModel.getFieldsConfig()).to.be.deep.equal(expected); + }); + }); + + describe('#clone', () => { it('should make a new copy of the current DataModel instance', () => { const data = [ @@ -66,6 +128,42 @@ describe('DataModel', () => { expect(cloneRelation._colIdentifier).to.equal(dataModel._colIdentifier); expect(cloneRelation._rowDiffset).to.equal(dataModel._rowDiffset); }); + + it('should set parent-child relationship when saveChild is true', () => { + const data = [ + { age: 30, job: 'unemployed', marital: 'married' }, + { age: 33, job: 'services', marital: 'married' }, + { age: 35, job: 'management', marital: 'single' } + ]; + const schema = [ + { name: 'age', type: 'measure' }, + { name: 'job', type: 'dimension' }, + { name: 'marital', type: 'dimension' }, + ]; + const dataModel = new DataModel(data, schema); + + const cloneDm = dataModel.clone(true); + expect(cloneDm.getParent()).to.be.equal(dataModel); + expect(dataModel.getChildren()[0]).to.be.equal(cloneDm); + }); + + it('should remove parent-child relationship when saveChild is false', () => { + const data = [ + { age: 30, job: 'unemployed', marital: 'married' }, + { age: 33, job: 'services', marital: 'married' }, + { age: 35, job: 'management', marital: 'single' } + ]; + const schema = [ + { name: 'age', type: 'measure' }, + { name: 'job', type: 'dimension' }, + { name: 'marital', type: 'dimension' }, + ]; + const dataModel = new DataModel(data, schema); + + const cloneDm = dataModel.clone(false); + expect(cloneDm.getParent()).to.be.null; + expect(dataModel.getChildren().length).to.be.equal(0); + }); }); context('Test for empty DataModel', () => { @@ -86,6 +184,26 @@ describe('DataModel', () => { }); }); + context('Test for resolving schema', () => { + it('should take field alternative name in schema', () => { + const data = [ + { age: 30, job: 'unemployed', marital_status: 'married' }, + { age: 33, job: 'services', marital_status: 'married' }, + { age: 35, job: 'management', marital_status: 'single' } + ]; + const schema = [ + { name: 'age', type: 'measure' }, + { name: 'job', type: 'dimension' }, + { name: 'marital_status', type: 'dimension', as: 'marital' }, + ]; + const dm = new DataModel(data, schema); + + expect(dm.getFieldspace().fieldsObj().marital_status).to.be.undefined; + expect(!!dm.getFieldspace().fieldsObj().marital).to.be.true; + }); + }); + + context('Test for a failing data format type', () => { let mockedDm = () => new DataModel([], [], { dataFormat: 'erroneous-data-type' }); @@ -295,6 +413,63 @@ describe('DataModel', () => { expect(dataModel.getData({ withUid: true })).to.deep.equal(expected); }); + + it('should return all field data when getAllFields is true', () => { + const data = [ + { age: 30, job: 'unemployed', marital: 'married' }, + { age: 33, job: 'services', marital: 'married' }, + { age: 35, job: 'management', marital: 'single' } + ]; + const schema = [ + { name: 'age', type: 'measure' }, + { name: 'job', type: 'dimension' }, + { name: 'marital', type: 'dimension' }, + ]; + const dataModel = new DataModel(data, schema); + const dm = dataModel.project(['age', 'job']); + const expected = { + schema: [ + { + name: 'age', + type: 'measure', + subtype: 'continuous' + }, + { + name: 'job', + type: 'dimension', + subtype: 'categorical' + }, + { + name: 'marital', + type: 'dimension', + subtype: 'categorical' + } + ], + data: [ + [ + 30, + 'unemployed', + 'married' + ], + [ + 33, + 'services', + 'married' + ], + [ + 35, + 'management', + 'single' + ] + ], + uids: [ + 0, + 1, + 2 + ] + }; + expect(dm.getData({ getAllFields: true })).to.deep.equal(expected); + }); }); describe('#project', () => { @@ -425,11 +600,22 @@ describe('DataModel', () => { it('should store derivation criteria info', () => { const dataModel = new DataModel(data, schema); - let projectedDataModel = dataModel.project(['age', 'job'], { saveChild: true }); + const dm = dataModel.select(fields => fields.age.value < 40); + const projectedDataModel = dm.project(['age', 'job']); expect(projectedDataModel.getDerivations()[0].op).to.be.equal(DM_DERIVATIVES.PROJECT); + expect(projectedDataModel.getAncestorDerivations()[0].op).to.be.equal(DM_DERIVATIVES.SELECT); + }); - projectedDataModel = dataModel.project(['age', 'job'], { saveChild: false }); - expect(projectedDataModel.getDerivations()[0].op).to.be.equal(DM_DERIVATIVES.PROJECT); + it('should control parent-child relationships on saveChild config', () => { + let rootDm = new DataModel(data, schema); + let dm = rootDm.project(['age', 'job'], { saveChild: true }); + expect(dm.getParent()).to.be.equal(rootDm); + expect(rootDm.getChildren()[0]).to.be.equal(dm); + + rootDm = new DataModel(data, schema); + dm = rootDm.project(['age', 'job'], { saveChild: false }); + expect(dm.getParent()).to.be.null; + expect(rootDm.getChildren().length).to.be.equal(0); }); }); @@ -671,11 +857,22 @@ describe('DataModel', () => { it('should store derivation criteria info', () => { const dataModel = new DataModel(data, schema); - let selectedDm = dataModel.select(fields => fields.age.value < 40, { saveChild: true }); + const dm = dataModel.project(['age', 'job']); + const selectedDm = dm.select(fields => fields.age.value < 40); expect(selectedDm.getDerivations()[0].op).to.be.equal(DM_DERIVATIVES.SELECT); + expect(selectedDm.getAncestorDerivations()[0].op).to.be.equal(DM_DERIVATIVES.PROJECT); + }); - selectedDm = dataModel.select(fields => fields.age.value < 40, { saveChild: false }); - expect(selectedDm.getDerivations()[0].op).to.be.equal(DM_DERIVATIVES.SELECT); + it('should control parent-child relationships on saveChild config', () => { + let rootDm = new DataModel(data, schema); + let dm = rootDm.select(fields => fields.age.value < 40, { saveChild: true }); + expect(dm.getParent()).to.be.equal(rootDm); + expect(rootDm.getChildren()[0]).to.be.equal(dm); + + rootDm = new DataModel(data, schema); + dm = rootDm.select(fields => fields.age.value > 40, { saveChild: false }); + expect(dm.getParent()).to.be.null; + expect(rootDm.getChildren().length).to.be.equal(0); }); }); @@ -713,9 +910,6 @@ describe('DataModel', () => { }; expect(sortedDm).not.to.equal(dataModel); - expect(sortedDm._sortingDetails).to.deep.equal([ - ['age', 'desc'] - ]); expect(sortedDm.getData()).to.deep.equal(expData); }); @@ -755,10 +949,6 @@ describe('DataModel', () => { ], uids: [0, 1, 2, 3, 4, 5] }; - expect(sortedDm._sortingDetails).to.deep.equal([ - ['age', 'desc'], - ['job'], - ]); expect(sortedDm.getData()).to.deep.equal(expData); }); @@ -906,6 +1096,54 @@ describe('DataModel', () => { }; expect(sortedDm.getData()).to.deep.equal(expected); }); + + it('should store derivation criteria info', () => { + const data = [ + { age: 30, job: 'management', marital: 'married' }, + { age: 59, job: 'blue-collar', marital: 'married' }, + { age: 35, job: 'management', marital: 'single' }, + { age: 57, job: 'self-employed', marital: 'married' }, + { age: 28, job: 'blue-collar', marital: 'married' }, + { age: 30, job: 'blue-collar', marital: 'single' }, + ]; + const schema = [ + { name: 'age', type: 'measure' }, + { name: 'job', type: 'dimension' }, + { name: 'marital', type: 'dimension' } + ]; + const rootDm = new DataModel(data, schema); + + const dm = rootDm.select(fields => fields.age.value > 30); + const sortedDm = dm.sort([['age', 'ASC']]); + expect(sortedDm.getDerivations()[0].op).to.eql(DM_DERIVATIVES.SORT); + expect(sortedDm.getAncestorDerivations()[0].op).to.eql(DM_DERIVATIVES.SELECT); + }); + + it('should control parent-child relationships on saveChild config', () => { + const data = [ + { age: 30, job: 'management', marital: 'married' }, + { age: 59, job: 'blue-collar', marital: 'married' }, + { age: 35, job: 'management', marital: 'single' }, + { age: 57, job: 'self-employed', marital: 'married' }, + { age: 28, job: 'blue-collar', marital: 'married' }, + { age: 30, job: 'blue-collar', marital: 'single' }, + ]; + const schema = [ + { name: 'age', type: 'measure' }, + { name: 'job', type: 'dimension' }, + { name: 'marital', type: 'dimension' } + ]; + + let rootDm = new DataModel(data, schema); + let dm = rootDm.sort([['age', 'ASC']], { saveChild: true }); + expect(dm.getParent()).to.be.equal(rootDm); + expect(rootDm.getChildren()[0]).to.be.equal(dm); + + rootDm = new DataModel(data, schema); + dm = rootDm.sort([['age', 'ASC']], { saveChild: false }); + expect(dm.getParent()).to.be.null; + expect(rootDm.getChildren().length).to.be.equal(0); + }); }); describe('#join', () => { @@ -1286,21 +1524,15 @@ describe('DataModel', () => { ]; const dataModel = new DataModel(data1, schema1); - let calDm = dataModel.calculateVariable({ + const dm = dataModel.project(['first', 'second']); + let calDm = dm.calculateVariable({ name: 'NewField', type: 'dimension' }, ['first', 'second', (first, second) => `${first} ${second}` - ], { saveChild: true }); - expect(calDm.getDerivations()[0].op).to.equal(DM_DERIVATIVES.CAL_VAR); - - calDm = dataModel.calculateVariable({ - name: 'NewField2', - type: 'dimension' - }, ['first', 'second', (first, second) => - `${first} ${second}` - ], { saveChild: false }); + ]); expect(calDm.getDerivations()[0].op).to.equal(DM_DERIVATIVES.CAL_VAR); + expect(calDm.getAncestorDerivations()[0].op).to.equal(DM_DERIVATIVES.PROJECT); }); it('should return correct value from the callback function', () => { @@ -1440,6 +1672,37 @@ describe('DataModel', () => { expect(mockedFn).to.throw('country is not a valid column name'); }); + + it('should control parent-child relationships on saveChild config', () => { + const data1 = [ + { profit: 10, sales: 20, city: 'a', state: 'aa' }, + { profit: 15, sales: 25, city: 'b', state: 'bb' }, + { profit: 10, sales: 20, city: 'a', state: 'ab' }, + { profit: 15, sales: 25, city: 'b', state: 'ba' }, + ]; + const schema1 = [ + { name: 'profit', type: 'measure' }, + { name: 'sales', type: 'measure' }, + { name: 'city', type: 'dimension' }, + { name: 'state', type: 'dimension' }, + ]; + + let rootDm = new DataModel(data1, schema1); + let dm = rootDm.calculateVariable({ + name: 'profitIndex', + type: 'measure' + }, ['profit', (profit, i) => profit * i], { saveChild: true }); + expect(dm.getParent()).to.be.equal(rootDm); + expect(rootDm.getChildren()[0]).to.be.equal(dm); + + rootDm = new DataModel(data1, schema1); + dm = rootDm.calculateVariable({ + name: 'profitIndex2', + type: 'measure' + }, ['profit', (profit, i) => profit * i], { saveChild: false }); + expect(dm.getParent()).to.be.null; + expect(rootDm.getChildren().length).to.be.equal(0); + }); }); describe('#propagate', () => { @@ -1753,11 +2016,22 @@ describe('DataModel', () => { it('should store derivation criteria info', () => { const rootDm = new DataModel(data1, schema1); - let groupedDm = rootDm.groupBy(['first'], { profit: 'avg' }, { saveChild: true }); + const dm = rootDm.select(fields => fields.profit.value > 15); + const groupedDm = dm.groupBy(['first'], { profit: 'avg' }); expect(groupedDm.getDerivations()[0].op).to.eql(DM_DERIVATIVES.GROUPBY); + expect(groupedDm.getAncestorDerivations()[0].op).to.eql(DM_DERIVATIVES.SELECT); + }); - groupedDm = rootDm.groupBy(['first'], { profit: 'avg' }, { saveChild: false }); - expect(groupedDm.getDerivations()[0].op).to.eql(DM_DERIVATIVES.GROUPBY); + it('should control parent-child relationships on saveChild config', () => { + let rootDm = new DataModel(data1, schema1); + let dm = rootDm.groupBy(['first'], { profit: 'avg' }, { saveChild: true }); + expect(dm.getParent()).to.be.equal(rootDm); + expect(rootDm.getChildren()[0]).to.be.equal(dm); + + rootDm = new DataModel(data1, schema1); + dm = rootDm.groupBy(['first'], { profit: 'avg' }, { saveChild: false }); + expect(dm.getParent()).to.be.null; + expect(rootDm.getChildren().length).to.be.equal(0); }); }); }); @@ -1790,42 +2064,31 @@ describe('DataModel', () => { ]; const dataModel = new DataModel(data1, schema1); describe('#dispose', () => { - it('Should remove child on calling dispose', () => { - let dm2 = dataModel.select(fields => fields.profit.value < 150); - expect(dataModel._children.length).to.equal(1); + it('Should remove all references as gc can detect it as free object', () => { + const rootDm = new DataModel(data1, schema1); + const dm2 = rootDm.select(fields => fields.profit.value < 150); + const dm3 = dm2.project(['profit', 'sales']); dm2.dispose(); - expect(dataModel._children.length).to.equal(0); + expect(rootDm.getChildren().length).to.equal(0); + expect(dm3.getParent()).to.be.null; }); }); - describe('#addParent', () => { - it('Adding parent should save criteria in parent', () => { - let dm2 = dataModel.select(fields => fields.profit.value < 150); - let dm3 = dm2.groupBy(['sales'], { - profit: null - }); - let dm4 = dm3.project(['sales']); - let data = dm4.getData(); - let projFields = ['first']; - let projectConfig = {}; - let normalizedprojFields = []; - let criteriaQueue = [ - { - op: 'select', - meta: '', - criteria: fields => fields.profit.value < 150 - }, - { - op: 'project', - meta: { projFields, projectConfig, normalizedprojFields }, - criteria: null - } - ]; - dm3.dispose(); - dm4.addParent(dm2, criteriaQueue); - expect(dm2._children.length).to.equal(1); - expect(dm2._children[0].getData()).to.deep.equal(data); - expect(dm4._parent).to.equal(dm2); + describe('#setParent', () => { + it('should change parent and child relationships', () => { + const dm2 = dataModel.select(fields => fields.profit.value < 150); + const dm3 = dm2.groupBy(['sales'], { profit: 'avg' }, { saveChild: false }); + dm3.setParent(dm2); + expect(dm3._parent).to.be.equal(dm2); + expect(dm2._children[0]).to.be.equal(dm3); + }); + + it('should reset parent-child relationships when passing null as parent', () => { + const dm2 = dataModel.select(fields => fields.profit.value < 150); + const dm3 = dm2.groupBy(['sales'], { profit: 'avg' }, { saveChild: true }); + dm3.setParent(null); + expect(dm3._parent).to.be.null; + expect(dm2._children.length).to.be.equal(0); }); }); }); @@ -1910,6 +2173,29 @@ describe('DataModel', () => { }); }); + describe('#getAncestorDerivations', () => { + it('should return in-between ancestor derivative operations', () => { + const schema = [ + { name: 'Name', type: 'dimension' }, + { name: 'HorsePower', type: 'measure' }, + { name: 'Origin', type: 'dimension' } + ]; + const data = [ + { Name: 'chevrolet chevelle malibu', Horsepower: 130, Origin: 'USA' }, + { Name: 'citroen ds-21 pallas', Horsepower: 115, Origin: 'Europe' }, + { Name: 'datsun pl510', Horsepower: 88, Origin: 'Japan' }, + { Name: 'amc rebel sst', Horsepower: 150, Origin: 'USA' }, + ]; + const dt = new DataModel(data, schema); + const dt2 = dt.select(fields => fields.Origin.value === 'USA'); + const dt3 = dt2.groupBy(['Origin'], { HorsePower: 'avg' }); + const ancDerivations = dt3.getAncestorDerivations(); + expect(Array.isArray(ancDerivations)).to.be.true; + expect(ancDerivations.length).to.be.equal(1); + expect(ancDerivations[0].op).to.be.equal(DM_DERIVATIVES.SELECT); + }); + }); + describe('#detachedRoot', () => { const schema = [ { @@ -2457,8 +2743,35 @@ describe('DataModel', () => { ]; const dataModel = new DataModel(data1, schema1); - const binnedDm = dataModel.bin('profit', { binSize: 10, name: 'BinnedField' }); + const dm = dataModel.project(['profit', 'sales']); + const binnedDm = dm.bin('profit', { binSize: 10, name: 'BinnedField' }); expect(binnedDm.getDerivations()[0].op).to.be.equal(DM_DERIVATIVES.BIN); + expect(binnedDm.getAncestorDerivations()[0].op).to.be.equal(DM_DERIVATIVES.PROJECT); + }); + + it('should control parent-child relationships on saveChild config', () => { + const data1 = [ + { profit: 10, sales: 20, first: 'Hey', second: 'Jude' }, + { profit: 15, sales: 25, first: 'Norwegian', second: 'Wood' }, + { profit: 15, sales: 25, first: 'Norwegian', second: 'Wood' }, + { profit: 15, sales: 25, first: 'Norwegian', second: 'Wood' } + ]; + const schema1 = [ + { name: 'profit', type: 'measure' }, + { name: 'sales', type: 'measure' }, + { name: 'first', type: 'dimension' }, + { name: 'second', type: 'dimension' }, + ]; + + let rootDm = new DataModel(data1, schema1); + let dm = rootDm.bin('profit', { binSize: 10, name: 'binnedProfit', saveChild: true }); + expect(dm.getParent()).to.be.equal(rootDm); + expect(rootDm.getChildren()[0]).to.be.equal(dm); + + rootDm = new DataModel(data1, schema1); + dm = rootDm.bin('sales', { binSize: 12, name: 'binnedSales', saveChild: false }); + expect(dm.getParent()).to.be.null; + expect(rootDm.getChildren().length).to.be.equal(0); }); }); }); diff --git a/src/operator/compose.js b/src/operator/compose.js index 06cf7be..3930ecb 100644 --- a/src/operator/compose.js +++ b/src/operator/compose.js @@ -1,3 +1,5 @@ +import { persistDerivations } from '../helper'; +import { DM_DERIVATIVES } from '../constants'; /** * DataModel's opearators are exposed as composable functional operators as well as chainable operators. Chainable @@ -214,21 +216,35 @@ export const groupBy = (...args) => dm => dm.groupBy(...args); export const compose = (...operations) => (dm, config = { saveChild: true }) => { let currentDM = dm; - let frstChild; + let firstChild; const derivations = []; - const saveChild = config.saveChild; operations.forEach((operation) => { currentDM = operation(currentDM); derivations.push(...currentDM._derivation); - if (!frstChild) { - frstChild = currentDM; + if (!firstChild) { + firstChild = currentDM; } }); - saveChild && currentDM.addParent(dm, derivations); - if (derivations.length > 1) { - frstChild.dispose(); + if (firstChild && firstChild !== currentDM) { + firstChild.dispose(); + } + + // reset all ancestorDerivation saved in-between compose + currentDM._ancestorDerivation = []; + persistDerivations( + dm, + currentDM, + DM_DERIVATIVES.COMPOSE, + null, + derivations + ); + + if (config.saveChild) { + currentDM.setParent(dm); + } else { + currentDM.setParent(null); } return currentDM; diff --git a/src/operator/compose.spec.js b/src/operator/compose.spec.js index aca7f32..a5d20fb 100644 --- a/src/operator/compose.spec.js +++ b/src/operator/compose.spec.js @@ -194,5 +194,29 @@ describe('Testing compose functionality', () => { composedDm = nestedComposedFn2(dataModel2); expect(normalDm.getData()).to.deep.equal(composedDm.getData()); }); + + it('should keep child-parent relationship when saveChild is true', () => { + const dataModel = new DataModel(data1, schema1); + const composedFn = compose( + select(fields => fields.profit.value <= 15), + project(['id', 'profit', 'sales']), + ); + + const dm = composedFn(dataModel, { saveChild: true }); + expect(dm.getParent()).to.be.equal(dataModel); + expect(dataModel.getChildren()[0]).to.be.equal(dm); + }); + + it('should remove child-parent relationship when saveChild is false', () => { + const dataModel = new DataModel(data1, schema1); + const composedFn = compose( + select(fields => fields.profit.value <= 15), + project(['id', 'profit', 'sales']), + ); + + const dm = composedFn(dataModel, { saveChild: false }); + expect(dm.getParent()).to.be.null; + expect(dataModel.getChildren().length).to.be.equal(0); + }); }); }); diff --git a/src/operator/group-by-function.js b/src/operator/group-by-function.js index 79498d2..5d5f1cf 100644 --- a/src/operator/group-by-function.js +++ b/src/operator/group-by-function.js @@ -1,6 +1,8 @@ import { isArray } from '../utils'; import InvalidAwareTypes from '../invalid-aware-types'; +import { GROUP_BY_FUNCTIONS } from '../enums'; +const { SUM, AVG, FIRST, LAST, COUNT, STD, MIN, MAX } = GROUP_BY_FUNCTIONS; function getFilteredValues(arr) { return arr.filter(item => !(item instanceof InvalidAwareTypes)); @@ -134,17 +136,17 @@ function std (arr) { const fnList = { - sum, - avg, - min, - max, - first, - last, - count, - std + [SUM]: sum, + [AVG]: avg, + [MIN]: min, + [MAX]: max, + [FIRST]: first, + [LAST]: last, + [COUNT]: count, + [STD]: std }; -const defaultReducerName = 'sum'; +const defaultReducerName = SUM; export { defaultReducerName, diff --git a/src/relation.js b/src/relation.js index c6f7866..3b62c3e 100644 --- a/src/relation.js +++ b/src/relation.js @@ -1,8 +1,7 @@ import { FilteringMode } from './enums'; import { getUniqueId } from './utils'; -import { persistDerivation, updateFields, cloneWithSelect, cloneWithProject, updateData } from './helper'; +import { updateFields, cloneWithSelect, cloneWithProject, updateData } from './helper'; import { crossProduct, difference, naturalJoinFilter, union } from './operator'; -import { DM_DERIVATIVES } from './constants'; /** * Relation provides the definitions of basic operators of relational algebra like *selection*, *projection*, *union*, @@ -32,6 +31,7 @@ class Relation { this._parent = null; this._derivation = []; + this._ancestorDerivation = []; this._children = []; if (params.length === 1 && ((source = params[0]) instanceof Relation)) { @@ -317,31 +317,14 @@ class Relation { * @param {boolean} [saveChild=true] - Whether the cloned instance would be recorded in the parent instance. * @return {DataModel} - Returns the newly cloned DataModel instance. */ - clone (saveChild = true, linkParent = true) { - let retDataModel; - if (linkParent === false) { - const dataObj = this.getData({ - getAllFields: true - }); - const data = dataObj.data; - const schema = dataObj.schema; - const jsonData = data.map((row) => { - const rowObj = {}; - schema.forEach((field, i) => { - rowObj[field.name] = row[i]; - }); - return rowObj; - }); - retDataModel = new this.constructor(jsonData, schema); - } - else { - retDataModel = new this.constructor(this); - } - + clone (saveChild = true) { + const clonedDm = new this.constructor(this); if (saveChild) { - this._children.push(retDataModel); + clonedDm.setParent(this); + } else { + clonedDm.setParent(null); } - return retDataModel; + return clonedDm; } /** @@ -432,10 +415,10 @@ class Relation { } calculateFieldsConfig () { - this._fieldConfig = this._fieldspace.fields.reduce((acc, fieldDef, i) => { - acc[fieldDef.name()] = { + this._fieldConfig = this._fieldspace.fields.reduce((acc, fieldObj, i) => { + acc[fieldObj.name()] = { index: i, - def: { name: fieldDef.name(), type: fieldDef.type(), subtype: fieldDef.subtype() } + def: fieldObj.schema(), }; return acc; }, {}); @@ -450,8 +433,12 @@ class Relation { * @public */ dispose () { - this._parent.removeChild(this); + this._parent && this._parent.removeChild(this); this._parent = null; + this._children.forEach((child) => { + child._parent = null; + }); + this._children = []; } /** @@ -486,18 +473,14 @@ class Relation { } /** - * Adds the specified {@link DataModel} as a parent for the current {@link DataModel} instance. - * - * The optional criteriaQueue is an array containing the history of transaction performed on parent - * {@link DataModel} to get the current one. + * Sets the specified {@link DataModel} as a parent for the current {@link DataModel} instance. * * @param {DataModel} parent - The datamodel instance which will act as parent. - * @param {Array} criteriaQueue - Queue contains in-between operation meta-data. */ - addParent (parent, criteriaQueue = []) { - persistDerivation(this, DM_DERIVATIVES.COMPOSE, null, criteriaQueue); + setParent (parent) { + this._parent && this._parent.removeChild(this); this._parent = parent; - parent._children.push(this); + parent && parent._children.push(this); } /** @@ -553,7 +536,7 @@ class Relation { * * @return {DataModel[]} Returns the immediate child DataModel instances. */ - getChildren() { + getChildren () { return this._children; } @@ -581,9 +564,37 @@ class Relation { * * @return {Any[]} Returns the derivation meta data. */ - getDerivations() { + getDerivations () { return this._derivation; } + + /** + * Returns the in-between operation meta data happened from root {@link DataModel} to current instance. + * + * @example + * const schema = [ + * { name: 'Name', type: 'dimension' }, + * { name: 'HorsePower', type: 'measure' }, + * { name: "Origin", type: 'dimension' } + * ]; + * + * const data = [ + * { Name: "chevrolet chevelle malibu", Horsepower: 130, Origin: "USA" }, + * { Name: "citroen ds-21 pallas", Horsepower: 115, Origin: "Europe" }, + * { Name: "datsun pl510", Horsepower: 88, Origin: "Japan" }, + * { Name: "amc rebel sst", Horsepower: 150, Origin: "USA"}, + * ] + * + * const dt = new DataModel(data, schema); + * const dt2 = dt.select(fields => fields.Origin.value === "USA"); + * const dt3 = dt2.groupBy(["Origin"]); + * const ancDerivations = dt3.getAncestorDerivations(); + * + * @return {Any[]} Returns the previous derivation meta data. + */ + getAncestorDerivations () { + return this._ancestorDerivation; + } } export default Relation;