From 3cc264c8338546fdbf4e0ac658bd082c2705ff08 Mon Sep 17 00:00:00 2001 From: Rousan Ali Date: Thu, 7 Feb 2019 14:50:19 +0530 Subject: [PATCH 01/21] Controll child-parent relationships based on saveChild config --- example/data/cars.json | 7742 ++++++++++++++++++++++------------- example/index.html | 4 +- example/samples/example3.js | 167 +- src/datamodel.js | 13 +- src/helper.js | 31 +- src/index.spec.js | 237 +- src/operator/compose.js | 25 +- src/relation.js | 85 +- 8 files changed, 5198 insertions(+), 3106 deletions(-) diff --git a/example/data/cars.json b/example/data/cars.json index 187bd6a..76c4519 100644 --- a/example/data/cars.json +++ b/example/data/cars.json @@ -1,2858 +1,4886 @@ [ - { - "Ticket": "N/A", - "Organisation": "Seaspan", - "Name": "Chase Huber", - "Email ID": "chuber@washcorp.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion ​Data Grids for SharePoint - Perpetual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 2, - "Price": 999, - "Gross Value": 1998, - "Net Value": 1998, - "PO Number": "", - "Date of Order": "2018-04-13", - "Month": "2018-04-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-04-13", - "Payment Mode": "Avangate", - "Source/Ref No.": 69818083, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": "", - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Ball Aerospace", - "Name": "Linda Haugse", - "Email ID": "lhaugse@ball.com", - "Country": "United States of America", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "Cipher Technology Services", - "Partner Name": "Jose Tineo", - "Partner Email ID": "jose.tineo@cipherts.com", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 1, - "Price": 1299, - "Gross Value": 1299, - "Net Value": 909.3, - "PO Number": "", - "Date of Order": "2018-04-16", - "Month": "2018-04-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-04-20", - "Payment Mode": "ShareIt", - "Source/Ref No.": 560988613, - "Payment Due Date": "afdbgd", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": "", - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "USPS (United States Postal Services)", - "Name": "Russ Weimer", - "Email ID": "rustin.v.weimer@usps.gov", - "Country": "United States of America", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "Insight", - "Partner Name": "Spencer Stephenson", - "Partner Email ID": "insightusps@insight.com", - "Product": "Collabion DataParts for SharePoint - Perpetual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 5, - "Price": 3249, - "Gross Value": 16245, - "Net Value": 11371.5, - "PO Number": "", - "Date of Order": "2018-04-25", - "Month": "2018-04-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-04-25", - "Payment Mode": "ShareIt", - "Source/Ref No.": 561252753, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "HELIBRAS - Helicopteros do Brasil S/A", - "Name": "Breno Souza", - "Email ID": "breno.souza.external@helibras.com.br", - "Country": "Brazil", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Charts for SharePoint - Perpetual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 15, - "Reseller Discount": 0, - "Qty": 1, - "Price": 2499, - "Gross Value": 2124.15, - "Net Value": 2124.15, - "PO Number": 49948621, - "Date of Order": "2018-04-25", - "Month": "2018-04-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-06-12", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "EWE Aktiengesellschaft", - "Name": "Martin Tapken", - "Email ID": "Martin.Tapken@ewe.de", - "Country": "Germany", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "SWNetwork GmbH", - "Partner Name": "Mine Akbas", - "Partner Email ID": "mine.akbas@swnetwork.de", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 2, - "Price": 299, - "Gross Value": 598, - "Net Value": 418.6, - "PO Number": "", - "Date of Order": "2018-04-26", - "Month": "2018-04-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-05-24", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "EWE Aktiengesellschaft", - "Name": "Martin Tapken", - "Email ID": "Martin.Tapken@ewe.de", - "Country": "Germany", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "SWNetwork GmbH", - "Partner Name": "Mine Akbas", - "Partner Email ID": "mine.akbas@swnetwork.de", - "Product": "Development Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 1, - "Price": 99, - "Gross Value": 99, - "Net Value": 69.3, - "PO Number": "", - "Date of Order": "2018-04-26", - "Month": "2018-04-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-05-24", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Omya (Schweiz) AG", - "Name": "Andrea Krebs", - "Email ID": "andrea.krebs@omya.com", - "Country": "Switzerland", - "Medium": "Direct", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 299, - "PO Number": 4502815417, - "Date of Order": "2018-04-30", - "Month": "2018-04-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-05-30", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Portland Water District", - "Name": "Charles Davis", - "Email ID": "cdavis@pwd.org", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 299, - "PO Number": "", - "Date of Order": "2018-04-30", - "Month": "2018-04-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-04-30", - "Payment Mode": "ShareIt", - "Source/Ref No.": 561758293, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Royal Cosun", - "Name": "Bea Eenink", - "Email ID": "servicedesk@cosun.com", - "Country": "Netherlands", - "Medium": "Direct", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Charts for SharePoint PRO - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 2, - "Price": 999, - "Gross Value": 1998, - "Net Value": 1998, - "PO Number": "", - "Date of Order": "2018-05-03", - "Month": "2018-05-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-05-03", - "Payment Mode": "ShareIt", - "Source/Ref No.": 562001673, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "ClinLogix", - "Name": "Mr. Bill Fry", - "Email ID": "bfry@clinlogix.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 1299, - "Gross Value": 1299, - "Net Value": 1299, - "PO Number": "", - "Date of Order": "2018-05-04", - "Month": "2018-05-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-05-04", - "Payment Mode": "Avangate", - "Source/Ref No.": 70188838, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Top Tier Software Solutions LLC", - "Name": "Trudy Gallahan", - "Email ID": "tgallahan@ttssllc.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 299, - "PO Number": "", - "Date of Order": "2018-05-05", - "Month": "2018-05-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-05-05", - "Payment Mode": "Avangate", - "Source/Ref No.": 70349922, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Tractors Singapore Limited", - "Name": "Ma keng Ho", - "Email ID": "ma.keng.ho@tractors.simedarby.com.sg", - "Country": "Singapore", - "Medium": "Direct", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Charts for SharePoint PRO - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 10, - "Reseller Discount": 0, - "Qty": 1, - "Price": 999, - "Gross Value": 899.1, - "Net Value": 899.1, - "PO Number": 3030080482, - "Date of Order": "2018-05-08", - "Month": "2018-05-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-06-25", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Lutheran Social Services of Illinois", - "Name": "Tom Garite", - "Email ID": "Tom.Garite@lssi.org", - "Country": "United States of America", - "Medium": "Partner", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "Insight", - "Partner Name": "Roland Guilbault", - "Partner Email ID": "roland.guilbault@insight.com", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 10, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 269.1, - "PO Number": "", - "Date of Order": "2018-05-09", - "Month": "2018-05-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-05-09", - "Payment Mode": "ShareIt", - "Source/Ref No.": 562459643, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Golden Idea Technology Co., Ltd.", - "Name": "Kemp Zhou", - "Email ID": "zhjc@giit.cn", - "Country": "China", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 1299, - "Gross Value": 1299, - "Net Value": 1299, - "PO Number": "", - "Date of Order": "2018-05-10", - "Month": "2018-05-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-05-10", - "Payment Mode": "ShareIt", - "Source/Ref No.": 562500113, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Afton Chemicals", - "Name": "David Osborne", - "Email ID": "David.Osborne@AftonChemical.com", - "Country": "United Kingdom", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "Coretek", - "Partner Name": "Garry Miller", - "Partner Email ID": "garry.miller@coretek.co.uk", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 2, - "Price": 1299, - "Gross Value": 2598, - "Net Value": 1818.6, - "PO Number": "", - "Date of Order": "2018-05-16", - "Month": "2018-05-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-05-16", - "Payment Mode": "ShareIt", - "Source/Ref No.": 562942283, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Matanuska Susitna Borough", - "Name": "Davey Griffith", - "Email ID": "Davey.Griffith@matsugov.us", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Charts for SharePoint PRO - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 2, - "Price": 999, - "Gross Value": 1998, - "Net Value": 1998, - "PO Number": "", - "Date of Order": "2018-05-17", - "Month": "2018-05-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-05-30", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Mori Associates, Inc", - "Name": "Julia Medellin", - "Email ID": "julia.medellin@nasa.gov", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Charts for SharePoint license - Annual", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 2, - "Price": 999, - "Gross Value": 1998, - "Net Value": 1998, - "PO Number": "", - "Date of Order": "2018-05-23", - "Month": "2018-05-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-05-23", - "Payment Mode": "Avangate", - "Source/Ref No.": 71475140, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Boston Scientific", - "Name": "Jorge Arias", - "Email ID": "JorgeMario.AriasChaves@bsci.com", - "Country": "Costa Rica", - "Medium": "Direct", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Charts for SharePoint PRO - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 999, - "Gross Value": 999, - "Net Value": 999, - "PO Number": "", - "Date of Order": "2018-05-28", - "Month": "2018-05-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-05-28", - "Payment Mode": "ShareIt", - "Source/Ref No.": 563714263, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "International Atomic Energy Agency (IAEA)", - "Name": "Aniko Makai", - "Email ID": "a.makai@iaea.org", - "Country": "Austria", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Filter for SharePoint: Annual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 597, - "Gross Value": 597, - "Net Value": 597, - "PO Number": "", - "Date of Order": "2018-06-01", - "Month": "2018-06-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-06-01", - "Payment Mode": "Avangate", - "Source/Ref No.": 71392761, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "General Dynamics European Land Systems", - "Name": "JULIO ALVAREZ", - "Email ID": "julio.alvarez@gdels.com", - "Country": "Spain", - "Medium": "Partner", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "Danysoft Internacional", - "Partner Name": "María Pastrana", - "Partner Email ID": "orders@danysoft.com", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 20, - "Qty": 2, - "Price": 299, - "Gross Value": 598, - "Net Value": 478.4, - "PO Number": "", - "Date of Order": "2018-06-13", - "Month": "2018-06-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-07-17", - "Payment Mode": "ShareIt", - "Source/Ref No.": 567750043, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "STANFORD LINEAR ACCELERATOR", - "Name": "Pamela Wright-Brunache", - "Email ID": "pdwb@slac.stanford.edu", - "Country": "United States of America", - "Medium": "Partner", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "Liquid PC, Inc", - "Partner Name": "Courtney O’Connell", - "Partner Email ID": "coconnell@liquidpc.com", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 20, - "Qty": 3, - "Price": 299, - "Gross Value": 897, - "Net Value": 717.6, - "PO Number": "", - "Date of Order": "2018-06-15", - "Month": "2018-06-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-06-15", - "Payment Mode": "ShareIt", - "Source/Ref No.": 565307183, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "STANFORD LINEAR ACCELERATOR", - "Name": "Pamela Wright-Brunache", - "Email ID": "pdwb@slac.stanford.edu", - "Country": "United States of America", - "Medium": "Partner", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "Liquid PC, Inc", - "Partner Name": "Courtney O’Connell", - "Partner Email ID": "coconnell@liquidpc.com", - "Product": "Development Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 20, - "Qty": 2, - "Price": 99, - "Gross Value": 198, - "Net Value": 158.4, - "PO Number": "", - "Date of Order": "2018-06-15", - "Month": "2018-06-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-06-15", - "Payment Mode": "ShareIt", - "Source/Ref No.": 565307183, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Moneytree Inc", - "Name": "Grady Patterson", - "Email ID": "grady.patterson@moneytreeinc.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Charts for SharePoint PRO - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 999, - "Gross Value": 999, - "Net Value": 999, - "PO Number": "", - "Date of Order": "2018-06-20", - "Month": "2018-06-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-06-20", - "Payment Mode": "ShareIt", - "Source/Ref No.": 565663083, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Centers for Disease Control and Prevention (CDC)", - "Name": "Stephen Campanelli", - "Email ID": "wng1@cdc.gov", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 2, - "Price": 299, - "Gross Value": 598, - "Net Value": 598, - "PO Number": "", - "Date of Order": "2018-06-21", - "Month": "2018-06-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-06-21", - "Payment Mode": "ShareIt", - "Source/Ref No.": 565813083, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Centers for Disease Control and Prevention (CDC)", - "Name": "Stephen Campanelli", - "Email ID": "wng1@cdc.gov", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Development Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 6, - "Price": 99, - "Gross Value": 594, - "Net Value": 594, - "PO Number": "", - "Date of Order": "2018-06-21", - "Month": "2018-06-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-06-21", - "Payment Mode": "ShareIt", - "Source/Ref No.": 565813083, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "USDA – Food & Nutrition Service", - "Name": "Allen Austin", - "Email ID": "allen.austin@fns.usda.gov", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Charts for SharePoint PRO - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 15, - "Reseller Discount": 0, - "Qty": 2, - "Price": 999, - "Gross Value": 1698.3, - "Net Value": 1698.3, - "PO Number": "", - "Date of Order": "2018-06-29", - "Month": "2018-06-01", - "Quarter": "Q1 - 2018/19", - "Date of Payment": "2018-06-29", - "Payment Mode": "ShareIt", - "Source/Ref No.": 566366323, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Martin’s Famous Pastry Shoppe Inc", - "Name": "Karl Hansen", - "Email ID": "khansen@potatorolls.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 299, - "PO Number": "", - "Date of Order": "2018-07-03", - "Month": "2018-07-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-07-03", - "Payment Mode": "ShareIt", - "Source/Ref No.": 566752293, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Toll Holdings Limited", - "Name": "Stuart Warke", - "Email ID": "stuart.warke@tollgroup.com", - "Country": "Australia", - "Medium": "Partner", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "MicroWay Pty Ltd", - "Partner Name": "Melissa Bourke", - "Partner Email ID": "melissab@microway.com.au", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 2, - "Price": 299, - "Gross Value": 598, - "Net Value": 418.6, - "PO Number": 50880, - "Date of Order": "2018-07-18", - "Month": "2018-07-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-01", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Toll Holdings Limited", - "Name": "Stuart Warke", - "Email ID": "stuart.warke@tollgroup.com", - "Country": "Australia", - "Medium": "Partner", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "MicroWay Pty Ltd", - "Partner Name": "Melissa Bourke", - "Partner Email ID": "melissab@microway.com.au", - "Product": "Development Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 1, - "Price": 99, - "Gross Value": 99, - "Net Value": 69.3, - "PO Number": 50880, - "Date of Order": "2018-07-18", - "Month": "2018-07-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-01", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "CROWN Gabelstapler GmbH & Co KG", - "Name": "Dominique Michalak", - "Email ID": "dominique.michalak@crown.com", - "Country": "Germany", - "Medium": "Direct", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 299, - "PO Number": 1201800214, - "Date of Order": "2018-07-20", - "Month": "2018-07-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-23", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "City of Mississauga", - "Name": "Jennifer MacDonald", - "Email ID": "Jennifer.macdonald@mississauga.ca", - "Country": "Canada", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Charts for Training", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 10, - "Reseller Discount": 0, - "Qty": 1, - "Price": 1000, - "Gross Value": 900, - "Net Value": 900, - "PO Number": "", - "Date of Order": "2018-08-03", - "Month": "2018-08-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-03", - "Payment Mode": "ShareIt", - "Source/Ref No.": 569127193, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "CE3 Inc", - "Name": "Robin Marquis", - "Email ID": "rmarquis@ce3inc.com", - "Country": "United States of America", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "Simple SharePoint", - "Partner Name": "Jeanne Conde", - "Partner Email ID": "jeannec@simplesharepoint.com", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 1, - "Price": 1299, - "Gross Value": 1299, - "Net Value": 909.3, - "PO Number": "", - "Date of Order": "2018-08-08", - "Month": "2018-08-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-08", - "Payment Mode": "ShareIt", - "Source/Ref No.": 569504033, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Simple SharePoint", - "Name": "Jeanne Conde", - "Email ID": "jeannec@simpleportals.com", - "Country": "United States of America", - "Medium": "Partner", - "Member": "Arjun", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion DataParts for SharePoint - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 1, - "Price": 1299, - "Gross Value": 1299, - "Net Value": 909.3, - "PO Number": "", - "Date of Order": "2018-08-08", - "Month": "2018-08-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-08", - "Payment Mode": "ShareIt", - "Source/Ref No.": 569504733, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "KfW Bankengruppe", - "Name": "Sebastian Mattar", - "Email ID": "sebastian.mattar@kfw.de", - "Country": "Germany", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "Prianto Gmbh", - "Partner Name": "Tim Joosten", - "Partner Email ID": "tim.joosten@prianto.com", - "Product": "Collabion DataParts for SharePoint - Perpetual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 20, - "Qty": 1, - "Price": 3249, - "Gross Value": 3249, - "Net Value": 2599.2, - "PO Number": "", - "Date of Order": "2018-08-09", - "Month": "2018-08-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-09", - "Payment Mode": "ShareIt", - "Source/Ref No.": 568867623, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Ball Aerospace", - "Name": "Ellen Teal", - "Email ID": "steal@ball.com", - "Country": "United States of America", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "Cipher Technology Services", - "Partner Name": "Jose Tineo", - "Partner Email ID": "Jose.Tineo@cipherts.com", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 1, - "Price": 1299, - "Gross Value": 1299, - "Net Value": 909.3, - "PO Number": "", - "Date of Order": "2018-08-11", - "Month": "2018-08-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-11", - "Payment Mode": "ShareIt", - "Source/Ref No.": 569722273, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Singapore Telecommunications Limited", - "Name": "Kelvin Soh", - "Email ID": "kelvinsoh@singtel.com", - "Country": "Singapore", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "SoftwareONE Pte Ltd", - "Partner Name": "Jason Tan", - "Partner Email ID": "Jason.tan@softwareone.com", - "Product": "Collabion Charts for SharePoint PRO - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 4, - "Price": 999, - "Gross Value": 3996, - "Net Value": 2797.2, - "PO Number": "", - "Date of Order": "2018-08-16", - "Month": "2018-08-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-09-18", - "Payment Mode": "ShareIt", - "Source/Ref No.": 573155113, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Martin’s Famous Pastry Shoppe Inc", - "Name": "Nicole Wahl", - "Email ID": "nwahl@potatorolls.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion ​Data Grids for SharePoint - Perpetual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 30, - "Reseller Discount": 0, - "Qty": 1, - "Price": 2997, - "Gross Value": 2097.9, - "Net Value": 2097.9, - "PO Number": "", - "Date of Order": "2018-08-21", - "Month": "2018-08-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-21", - "Payment Mode": "ShareIt", - "Source/Ref No.": 570435233, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Martin’s Famous Pastry Shoppe Inc", - "Name": "Nicole Wahl", - "Email ID": "nwahl@potatorolls.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Filter for SharePoint - Perpetual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 30, - "Reseller Discount": 0, - "Qty": 1, - "Price": 1497, - "Gross Value": 1047.9, - "Net Value": 1047.9, - "PO Number": "", - "Date of Order": "2018-08-21", - "Month": "2018-08-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-21", - "Payment Mode": "ShareIt", - "Source/Ref No.": 570435233, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Acto Informatisering B.V.", - "Name": "Ines Scholten", - "Email ID": "i.scholten@acto.nl", - "Country": "Netherlands", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 299, - "PO Number": "", - "Date of Order": "2018-08-23", - "Month": "2018-08-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-23", - "Payment Mode": "ShareIt", - "Source/Ref No.": 570670483, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Adtollo AB", - "Name": "Stefan Andersson", - "Email ID": "stefan.andersson@adtollo.se", - "Country": "Sweden", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Development Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 99, - "Gross Value": 99, - "Net Value": 99, - "PO Number": "", - "Date of Order": "2018-08-28", - "Month": "2018-08-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-28", - "Payment Mode": "Avangate", - "Source/Ref No.": 79977692, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "HOCHTIEF VICON GMBH", - "Name": "Karsten Kneip", - "Email ID": "karsten.kneip@hochtief.de", - "Country": "Germany", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Charts for SharePoint PRO - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 999, - "Gross Value": 999, - "Net Value": 999, - "PO Number": "", - "Date of Order": "2018-08-28", - "Month": "2018-08-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-28", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "ASCO Valve, Inc.", - "Name": "Michele Testa", - "Email ID": "Michele.Testa@emerson.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Charts for SharePoint - Perpetual License", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 499, - "Gross Value": 499, - "Net Value": 499, - "PO Number": "", - "Date of Order": "2018-08-28", - "Month": "2018-08-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-28", - "Payment Mode": "ShareIt", - "Source/Ref No.": 571018303, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "ACSS/L3-Comm", - "Name": "Jon Mahenski", - "Email ID": "jon.mahenski@l3t.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Charts for SharePoint PRO - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 999, - "Gross Value": 999, - "Net Value": 999, - "PO Number": "", - "Date of Order": "2018-08-30", - "Month": "2018-08-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-08-30", - "Payment Mode": "ShareIt", - "Source/Ref No.": 571270823, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Attorney Generals Department", - "Name": "Tina Conroy", - "Email ID": "tina.conroy@sa.gov.au", - "Country": "Australia", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "MicroWay Pty Ltd", - "Partner Name": "Matthew Looke", - "Partner Email ID": "invoices@microway.com.au", - "Product": "Collabion DataParts for SharePoint - Perpetual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 1, - "Price": 649, - "Gross Value": 649, - "Net Value": 454.3, - "PO Number": 51341, - "Date of Order": "2018-09-03", - "Month": "2018-09-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-10-15", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Abbey Gate College", - "Name": "Duncan Stewart", - "Email ID": "duncan.stewart@abbeygatecollge.co.uk", - "Country": "United Kingdom", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 299, - "PO Number": "", - "Date of Order": "2018-09-03", - "Month": "2018-09-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-09-03", - "Payment Mode": "ShareIt", - "Source/Ref No.": 571607053, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "MilDef AB", - "Name": "Tiberiju Beldja", - "Email ID": "Tiberiju.Beldja@mildef.com", - "Country": "Sweden", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 299, - "PO Number": "", - "Date of Order": "2018-09-05", - "Month": "2018-09-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-09-05", - "Payment Mode": "ShareIt", - "Source/Ref No.": 571787983, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Mentor Graphics", - "Name": "Echo Schmidt", - "Email ID": "Echo_Schmidt@mentor.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Charts for SharePoint PRO - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 2, - "Price": 999, - "Gross Value": 1998, - "Net Value": 1998, - "PO Number": "", - "Date of Order": "2018-09-06", - "Month": "2018-09-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-09-06", - "Payment Mode": "Avangate", - "Source/Ref No.": 79967907, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "NIST", - "Name": "Silvia Rodriguez", - "Email ID": "silvia.rodriguez@nist.gov", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion ​Data Grids for SharePoint - Annual License", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 2, - "Price": 399, - "Gross Value": 798, - "Net Value": 798, - "PO Number": "", - "Date of Order": "2018-09-07", - "Month": "2018-09-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-09-07", - "Payment Mode": "ShareIt", - "Source/Ref No.": 572056513, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "NIST", - "Name": "Silvia Rodriguez", - "Email ID": "silvia.rodriguez@nist.gov", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Charts for SharePoint license", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 2, - "Price": 999, - "Gross Value": 1998, - "Net Value": 1998, - "PO Number": "", - "Date of Order": "2018-09-07", - "Month": "2018-09-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-09-07", - "Payment Mode": "ShareIt", - "Source/Ref No.": 572056513, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Thomson Reuters-TR Applications Inc.", - "Name": "Gary Forbes", - "Email ID": "garyt.forbes@thomsonreuters.com", - "Country": "United States of America", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "ComponentSource", - "Partner Name": "ComponentSource Sales", - "Partner Email ID": "Sales@componentsource.com", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 4, - "Price": 299, - "Gross Value": 1196, - "Net Value": 837.2, - "PO Number": "", - "Date of Order": "2018-09-11", - "Month": "2018-09-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-09-11", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Washington Corp", - "Name": "Chase Huber", - "Email ID": "chuber@washcorp.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Filters for Collabion Dashboard License (Perpetual)", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 2, - "Price": 499, - "Gross Value": 998, - "Net Value": 998, - "PO Number": "", - "Date of Order": "2018-09-17", - "Month": "2018-09-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-09-17", - "Payment Mode": "Avangate", - "Source/Ref No.": 80912449, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Jacobs Technology", - "Name": "Robert Bunker", - "Email ID": "Robert.Bunker.CTR@MDA.mil", - "Country": "United States of America", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "SHI International Corp.", - "Partner Name": "Rita Katransky", - "Partner Email ID": "Rita_Katransky@SHI.com", - "Product": "Collabion DataParts for SharePoint - Perpetual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 3, - "Price": 3249, - "Gross Value": 9747, - "Net Value": 6822.9, - "PO Number": "", - "Date of Order": "2018-09-18", - "Month": "2018-09-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-10-03", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "CB Richard Ellis, Inc.", - "Name": "Rachel Knepper\nShayla Anthony", - "Email ID": "rachel.knepper@cbre.com\nshayla.anthony@cbre.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "AUC for the period from 27th September 2018 to 26th September 2019", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 1459, - "Gross Value": 1459, - "Net Value": 1459, - "PO Number": "", - "Date of Order": "2018-09-18", - "Month": "2018-09-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-09-18", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Portland Water District", - "Name": "Charles Davis", - "Email ID": "cdavis@pwd.org", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 1299, - "Gross Value": 1299, - "Net Value": 1299, - "PO Number": "", - "Date of Order": "2018-09-26", - "Month": "2018-09-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-09-26", - "Payment Mode": "ShareIt", - "Source/Ref No.": 573921893, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Ellis Medicine", - "Name": "Jerry Adach", - "Email ID": "adachj@ellismedicine.org", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 2, - "Price": 1299, - "Gross Value": 2598, - "Net Value": 2598, - "PO Number": "", - "Date of Order": "2018-09-28", - "Month": "2018-09-01", - "Quarter": "Q2 - 2018/19", - "Date of Payment": "2018-09-28", - "Payment Mode": "Avangate", - "Source/Ref No.": 81586932, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "SAPORITI S.A.", - "Name": "Eduardo Sternlieb", - "Email ID": "eduardo.sternlieb@gruposaporiti.com", - "Country": "Argentina", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 299, - "PO Number": "", - "Date of Order": "2018-10-02", - "Month": "2018-10-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-10-02", - "Payment Mode": "ShareIt", - "Source/Ref No.": 574484723, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Vermont Cider Company", - "Name": "Robert Waite", - "Email ID": "rwaite@vtciderco.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 1299, - "Gross Value": 1299, - "Net Value": 1299, - "PO Number": "", - "Date of Order": "2018-10-03", - "Month": "2018-10-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-10-03", - "Payment Mode": "Avangate", - "Source/Ref No.": 81397410, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "City of Mississauga", - "Name": "Larry Tyndall", - "Email ID": "Larry.Tyndall@mississauga.ca", - "Country": "Canada", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Charts for SharePoint - Perpetual License", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 30, - "Reseller Discount": 0, - "Qty": 4, - "Price": 1497, - "Gross Value": 4191.6, - "Net Value": 4191.6, - "PO Number": "", - "Date of Order": "2018-10-04", - "Month": "2018-10-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-10-04", - "Payment Mode": "ShareIt", - "Source/Ref No.": 574752593, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "City of Mississauga", - "Name": "Larry Tyndall", - "Email ID": "Larry.Tyndall@mississauga.ca", - "Country": "Canada", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Data Grids for SharePoint - Perpetual License", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 30, - "Reseller Discount": 0, - "Qty": 4, - "Price": 597, - "Gross Value": 1671.6, - "Net Value": 1671.6, - "PO Number": "", - "Date of Order": "2018-10-04", - "Month": "2018-10-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-10-04", - "Payment Mode": "ShareIt", - "Source/Ref No.": 574752593, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "BNP Paribas", - "Name": "Thierry Fournier", - "Email ID": "thierry.fournier@bnpparibas.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion DataParts for SharePoint - Annual License (Upgrade)", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 1200, - "Gross Value": 1200, - "Net Value": 1200, - "PO Number": "", - "Date of Order": "2018-10-05", - "Month": "2018-10-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-10-06", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "BNP Paribas", - "Name": "Thierry Fournier", - "Email ID": "thierry.fournier@bnpparibas.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 1299, - "Gross Value": 1299, - "Net Value": 1299, - "PO Number": "", - "Date of Order": "2018-10-05", - "Month": "2018-10-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-10-06", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Metropolitan Water Reclamation District", - "Name": "Mike O’Mara", - "Email ID": "momara@mwrd.dst.co.us", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 1299, - "Gross Value": 1299, - "Net Value": 1299, - "PO Number": "", - "Date of Order": "2018-10-09", - "Month": "2018-10-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-10-09", - "Payment Mode": "ShareIt", - "Source/Ref No.": 575219743, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Texas Instruments Incorporated", - "Name": "GAM Admins", - "Email ID": "GAMADMINS@TI.COM", - "Country": "United States of America", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "SHI International Corp.", - "Partner Name": "Kristyn Lubertowicz", - "Partner Email ID": "Kristyn_Lubertowicz@SHI.com", - "Product": "Data Grids for SharePoint - Annual License", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 4, - "Price": 399, - "Gross Value": 1596, - "Net Value": 1596, - "PO Number": "", - "Date of Order": "2018-10-09", - "Month": "2018-10-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-11-08", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Dpt for Children and Families", - "Name": "Phillip Curtis", - "Email ID": "Phillip.Curtis@ks.gov", - "Country": "United States of America", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "SHI International Corp.", - "Partner Name": "Collin Coslett", - "Partner Email ID": "Collin_Coslett@SHI.com", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 20, - "Qty": 2, - "Price": 299, - "Gross Value": 598, - "Net Value": 478.4, - "PO Number": "", - "Date of Order": "2018-10-09", - "Month": "2018-10-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-11-08", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Dpt for Children and Families", - "Name": "Phillip Curtis", - "Email ID": "Phillip.Curtis@ks.gov", - "Country": "United States of America", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "SHI International Corp.", - "Partner Name": "Collin Coslett", - "Partner Email ID": "Collin_Coslett@SHI.com", - "Product": "Development Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 20, - "Qty": 2, - "Price": 99, - "Gross Value": 198, - "Net Value": 158.4, - "PO Number": "", - "Date of Order": "2018-10-09", - "Month": "2018-10-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-11-08", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Giesecke and Devrient Mobile Security Australia Pty Ltd", - "Name": "Minh Nguyen", - "Email ID": "minh.nguyen@gi-de.com", - "Country": "Australia", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Charts for SharePoint PRO - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 999, - "Gross Value": 999, - "Net Value": 999, - "PO Number": 51398, - "Date of Order": "2018-10-10", - "Month": "2018-10-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-10-26", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "City of Brownsville", - "Name": "Aida Torres", - "Email ID": "aidat@cob.us", - "Country": "United States of America", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "SHI International Corp.", - "Partner Name": "Rita S Katransky", - "Partner Email ID": "Rita_Katransky@SHI.com", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 1, - "Price": 1299, - "Gross Value": 1299, - "Net Value": 909.3, - "PO Number": "", - "Date of Order": "2018-10-22", - "Month": "2018-10-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-12-21", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Schmalenberger GmbH + Co. KG", - "Name": "Oliver Laun", - "Email ID": "oliver.laun@schmalenberger.de", - "Country": "Germany", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "Siller Portal Integrators GmBH", - "Partner Name": "Ralf Michi", - "Partner Email ID": "michi@s-pi.de", - "Product": "Collabion Charts for SharePoint PRO - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 30, - "Qty": 1, - "Price": 999, - "Gross Value": 999, - "Net Value": 699.3, - "PO Number": "", - "Date of Order": "2018-10-31", - "Month": "2018-10-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-10-31", - "Payment Mode": "ShareIt", - "Source/Ref No.": 577212103, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Norgine Ltd", - "Name": "Matthew Hallam", - "Email ID": "mhallam@norgine.com", - "Country": "United Kingdom", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 4, - "Price": 299, - "Gross Value": 1196, - "Net Value": 1196, - "PO Number": "", - "Date of Order": "2018-11-07", - "Month": "2018-11-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-12-10", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Norgine Ltd", - "Name": "Matthew Hallam", - "Email ID": "mhallam@norgine.com", - "Country": "United Kingdom", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Development Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 2, - "Price": 99, - "Gross Value": 198, - "Net Value": 198, - "PO Number": "", - "Date of Order": "2018-11-07", - "Month": "2018-11-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-12-10", - "Payment Mode": "Bank Wire", - "Source/Ref No.": "", - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Big Rivers Electric Corporation", - "Name": "Steve Duncan", - "Email ID": "steve.duncan@bigrivers.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 299, - "PO Number": "", - "Date of Order": "2018-11-09", - "Month": "2018-11-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-11-09", - "Payment Mode": "Avangate", - "Source/Ref No.": 85597214, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Horry Telephone Cooperative, Inc", - "Name": "Dave Bosky", - "Email ID": "dave.bosky@htcinc.net", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion DataParts for SharePoint - Perpetual License", - "New / Renewal": "New", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 2, - "Price": 3249, - "Gross Value": 6498, - "Net Value": 6498, - "PO Number": "", - "Date of Order": "2018-11-09", - "Month": "2018-11-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "", - "Payment Mode": "-", - "Source/Ref No.": "", - "Payment Due Date": "2018-12-09", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Payment Pending" - }, - { - "Ticket": "N/A", - "Organisation": "Springtech", - "Name": "Alex Goulios", - "Email ID": "Alex.Goulios@springtech.io", - "Country": "Australia", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 1299, - "Gross Value": 1299, - "Net Value": 1299, - "PO Number": "", - "Date of Order": "2018-11-19", - "Month": "2018-11-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-11-19", - "Payment Mode": "ShareIt", - "Source/Ref No.": 578926033, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Guardian Electrical Compliance Limited", - "Name": "Steve Greenhalgh", - "Email ID": "sgreenhalgh@guardianelectrical.co.uk", - "Country": "United Kingdom", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 299, - "PO Number": "", - "Date of Order": "2018-11-19", - "Month": "2018-11-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-11-19", - "Payment Mode": "ShareIt", - "Source/Ref No.": 578928663, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Stiebel Eltron GmbH & Co. KG", - "Name": "Nicole Stapel", - "Email ID": "nicole.stapel@stiebel-eltron.de", - "Country": "Germany", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "CCP Software GmbH", - "Partner Name": "Dirk Haller", - "Partner Email ID": "dhaller@ccpsoft.de", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 10, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 269.1, - "PO Number": "", - "Date of Order": "2018-11-20", - "Month": "2018-11-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-11-20", - "Payment Mode": "ShareIt", - "Source/Ref No.": 579038123, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Stiebel Eltron GmbH & Co. KG", - "Name": "Nicole Stapel", - "Email ID": "nicole.stapel@stiebel-eltron.de", - "Country": "Germany", - "Medium": "Partner", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "CCP Software GmbH", - "Partner Name": "Dirk Haller", - "Partner Email ID": "dhaller@ccpsoft.de", - "Product": "Development Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 10, - "Qty": 1, - "Price": 99, - "Gross Value": 99, - "Net Value": 89.1, - "PO Number": "", - "Date of Order": "2018-11-20", - "Month": "2018-11-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-11-20", - "Payment Mode": "ShareIt", - "Source/Ref No.": 579038123, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "J. Friedrich Storz Verkehrswegebau GmbH & Co. KG", - "Name": "Fabian Schwager", - "Email ID": "fabian.schwager@storz-tuttlingen.de", - "Country": "Germany", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 299, - "PO Number": "", - "Date of Order": "2018-11-23", - "Month": "2018-11-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-11-23", - "Payment Mode": "ShareIt", - "Source/Ref No.": 578922403, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Metrohm AG", - "Name": "-", - "Email ID": "itorders@metrohm.com", - "Country": "Switzerland", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Charts for SharePoint PRO - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 2, - "Price": 999, - "Gross Value": 1998, - "Net Value": 1998, - "PO Number": "", - "Date of Order": "2018-11-27", - "Month": "2018-11-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-11-27", - "Payment Mode": "Avangate", - "Source/Ref No.": 86681144, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Leidos", - "Name": "Juan Moreno Gongora", - "Email ID": "juan.moreno-gongora@leidos.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 1299, - "Gross Value": 1299, - "Net Value": 1299, - "PO Number": "", - "Date of Order": "2018-11-30", - "Month": "2018-11-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-11-30", - "Payment Mode": "ShareIt", - "Source/Ref No.": 580469703, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Bonneville Power Administration", - "Name": "Heather Johnston", - "Email ID": "hdjohnston@bpa.gov", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 4, - "Price": 1299, - "Gross Value": 5196, - "Net Value": 5196, - "PO Number": "", - "Date of Order": "2018-12-04", - "Month": "2018-12-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-12-04", - "Payment Mode": "ShareIt", - "Source/Ref No.": 580895163, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Atos-LeeCounty Gov", - "Name": "Armando Negron", - "Email ID": "anegron@leegov.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion Charts for SharePoint PRO - Annual Renewal", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 3, - "Price": 999, - "Gross Value": 2997, - "Net Value": 2997, - "PO Number": "", - "Date of Order": "2018-12-07", - "Month": "2018-12-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "", - "Payment Mode": "-", - "Source/Ref No.": "", - "Payment Due Date": "2018-12-17", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Payment Pending" - }, - { - "Ticket": "N/A", - "Organisation": "GKN Driveline Bruneck AG", - "Name": "Martin Maurberger", - "Email ID": "Martin.Maurberger@gkn.com", - "Country": "Italy", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 2, - "Price": 299, - "Gross Value": 598, - "Net Value": 598, - "PO Number": "", - "Date of Order": "2018-12-13", - "Month": "2018-12-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "", - "Payment Mode": "-", - "Source/Ref No.": "", - "Payment Due Date": "2018-12-20", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Payment Pending" - }, - { - "Ticket": "N/A", - "Organisation": "EA Health", - "Name": "Kevin Derrick", - "Email ID": "kevin.derrick@eahealthsolutions.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Collabion DataParts for SharePoint - Annual License", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 1299, - "Gross Value": 1299, - "Net Value": 1299, - "PO Number": "", - "Date of Order": "2018-12-17", - "Month": "2018-12-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-12-17", - "Payment Mode": "ShareIt", - "Source/Ref No.": 582156073, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - }, - { - "Ticket": "N/A", - "Organisation": "Xenotech LLC", - "Name": "Christina Cox", - "Email ID": "ccox@xenotechllc.com", - "Country": "United States of America", - "Medium": "Direct", - "Member": "Debraj", - "Shared with Member": "None", - "Partner": "-", - "Partner Name": "-", - "Partner Email ID": "-", - "Product": "Production Support", - "New / Renewal": "Renewal", - "Industry": "", - "Trade Discount": 0, - "Reseller Discount": 0, - "Qty": 1, - "Price": 299, - "Gross Value": 299, - "Net Value": 299, - "PO Number": "", - "Date of Order": "2018-12-21", - "Month": "2018-12-01", - "Quarter": "Q3 - 2018/19", - "Date of Payment": "2018-12-21", - "Payment Mode": "ShareIt", - "Source/Ref No.": 582545893, - "Payment Due Date": "", - "Lead in Date": "", - "Lead out Date": "", - "Days Taken": 0, - "Status": "Sale" - } - ] \ No newline at end of file + { + "Maker": "chevrolet", + "Name": "chevrolet chevelle malibu", + "Miles_per_Gallon": 18, + "Cylinders": 8, + "Displacement": 307, + "Horsepower": 130, + "Weight_in_lbs": 3504, + "Acceleration": 12, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "buick", + "Name": "buick skylark 320", + "Miles_per_Gallon": 15, + "Cylinders": 8, + "Displacement": -350, + "Horsepower": 165, + "Weight_in_lbs": 3693, + "Acceleration": 11.5, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth satellite", + "Miles_per_Gallon": 18, + "Cylinders": 8, + "Displacement": -318, + "Horsepower": 150, + "Weight_in_lbs": 3436, + "Acceleration": 11, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc rebel sst", + "Miles_per_Gallon": 16, + "Cylinders": 8, + "Displacement": -304, + "Horsepower": 150, + "Weight_in_lbs": 3433, + "Acceleration": 12, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford torino", + "Miles_per_Gallon": 17, + "Cylinders": 8, + "Displacement": -302, + "Horsepower": 140, + "Weight_in_lbs": 3449, + "Acceleration": 10.5, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford galaxie 500", + "Miles_per_Gallon": 15, + "Cylinders": 8, + "Displacement": -429, + "Horsepower": 198, + "Weight_in_lbs": 4341, + "Acceleration": 10, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet impala", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": -454, + "Horsepower": 220, + "Weight_in_lbs": 4354, + "Acceleration": 9, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth fury iii", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": -440, + "Horsepower": 215, + "Weight_in_lbs": 4312, + "Acceleration": 8.5, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "pontiac", + "Name": "pontiac catalina", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": -455, + "Horsepower": 225, + "Weight_in_lbs": 4425, + "Acceleration": 10, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc ambassador dpl", + "Miles_per_Gallon": 15, + "Cylinders": 8, + "Displacement": -390, + "Horsepower": 190, + "Weight_in_lbs": 3850, + "Acceleration": 8.5, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "citroen", + "Name": "citroen ds-21 pallas", + "Miles_per_Gallon": null, + "Cylinders": 4, + "Displacement": -133, + "Horsepower": 115, + "Weight_in_lbs": 3090, + "Acceleration": 17.5, + "Year": "1970-01-01", + "Origin": "European Union" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet chevelle concours (sw)", + "Miles_per_Gallon": null, + "Cylinders": 8, + "Displacement": -350, + "Horsepower": 165, + "Weight_in_lbs": 4142, + "Acceleration": 11.5, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford torino (sw)", + "Miles_per_Gallon": null, + "Cylinders": 8, + "Displacement": -351, + "Horsepower": 153, + "Weight_in_lbs": 4034, + "Acceleration": 11, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth satellite (sw)", + "Miles_per_Gallon": null, + "Cylinders": 8, + "Displacement": -383, + "Horsepower": 175, + "Weight_in_lbs": 4166, + "Acceleration": 10.5, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc rebel sst (sw)", + "Miles_per_Gallon": null, + "Cylinders": 8, + "Displacement": -360, + "Horsepower": 175, + "Weight_in_lbs": 3850, + "Acceleration": 11, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge challenger se", + "Miles_per_Gallon": 15, + "Cylinders": 8, + "Displacement": -383, + "Horsepower": 170, + "Weight_in_lbs": 3563, + "Acceleration": 10, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth 'cuda 340", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": -340, + "Horsepower": 160, + "Weight_in_lbs": 3609, + "Acceleration": 8, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford mustang boss 302", + "Miles_per_Gallon": null, + "Cylinders": 8, + "Displacement": -302, + "Horsepower": 140, + "Weight_in_lbs": 3353, + "Acceleration": 8, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet monte carlo", + "Miles_per_Gallon": 15, + "Cylinders": 8, + "Displacement": -400, + "Horsepower": 150, + "Weight_in_lbs": 3761, + "Acceleration": 9.5, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "buick", + "Name": "buick estate wagon (sw)", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": -455, + "Horsepower": 225, + "Weight_in_lbs": 3086, + "Acceleration": 10, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "toyota", + "Name": "toyota corona mark ii", + "Miles_per_Gallon": 24, + "Cylinders": 4, + "Displacement": -113, + "Horsepower": 95, + "Weight_in_lbs": 2372, + "Acceleration": 15, + "Year": "1970-01-01", + "Origin": "Japan" + }, + { + "Maker": "plymouth", + "Name": "plymouth duster", + "Miles_per_Gallon": 22, + "Cylinders": 6, + "Displacement": -198, + "Horsepower": 95, + "Weight_in_lbs": 2833, + "Acceleration": 15.5, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc hornet", + "Miles_per_Gallon": 18, + "Cylinders": 6, + "Displacement": -199, + "Horsepower": 97, + "Weight_in_lbs": 2774, + "Acceleration": 15.5, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford maverick", + "Miles_per_Gallon": 21, + "Cylinders": 6, + "Displacement": -200, + "Horsepower": 85, + "Weight_in_lbs": 2587, + "Acceleration": 16, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "datsun", + "Name": "datsun pl510", + "Miles_per_Gallon": 27, + "Cylinders": 4, + "Displacement": -97, + "Horsepower": 88, + "Weight_in_lbs": 2130, + "Acceleration": 14.5, + "Year": "1970-01-01", + "Origin": "Japan" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen 1131 deluxe sedan", + "Miles_per_Gallon": 26, + "Cylinders": 4, + "Displacement": -97, + "Horsepower": 46, + "Weight_in_lbs": 1835, + "Acceleration": 20.5, + "Year": "1970-01-01", + "Origin": "European Union" + }, + { + "Maker": "peugeot", + "Name": "peugeot 504", + "Miles_per_Gallon": 25, + "Cylinders": 4, + "Displacement": -110, + "Horsepower": 87, + "Weight_in_lbs": 2672, + "Acceleration": 17.5, + "Year": "1970-01-01", + "Origin": "European Union" + }, + { + "Maker": "audi", + "Name": "audi 100 ls", + "Miles_per_Gallon": 24, + "Cylinders": 4, + "Displacement": -107, + "Horsepower": 90, + "Weight_in_lbs": 2430, + "Acceleration": 14.5, + "Year": "1970-01-01", + "Origin": "European Union" + }, + { + "Maker": "saab", + "Name": "saab 99e", + "Miles_per_Gallon": 25, + "Cylinders": 4, + "Displacement": -104, + "Horsepower": 95, + "Weight_in_lbs": 2375, + "Acceleration": 17.5, + "Year": "1970-01-01", + "Origin": "European Union" + }, + { + "Maker": "bmw", + "Name": "bmw 2002", + "Miles_per_Gallon": 26, + "Cylinders": 4, + "Displacement": -121, + "Horsepower": 113, + "Weight_in_lbs": 2234, + "Acceleration": 12.5, + "Year": "1970-01-01", + "Origin": "European Union" + }, + { + "Maker": "amc", + "Name": "amc gremlin", + "Miles_per_Gallon": 21, + "Cylinders": 6, + "Displacement": -199, + "Horsepower": 90, + "Weight_in_lbs": 2648, + "Acceleration": 15, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford f250", + "Miles_per_Gallon": 10, + "Cylinders": 8, + "Displacement": -360, + "Horsepower": 215, + "Weight_in_lbs": 4615, + "Acceleration": 14, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "chevy", + "Name": "chevy c20", + "Miles_per_Gallon": 10, + "Cylinders": 8, + "Displacement": -307, + "Horsepower": 200, + "Weight_in_lbs": 4376, + "Acceleration": 15, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge d200", + "Miles_per_Gallon": 11, + "Cylinders": 8, + "Displacement": -318, + "Horsepower": 210, + "Weight_in_lbs": 4382, + "Acceleration": 13.5, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "hi", + "Name": "hi 1200d", + "Miles_per_Gallon": 9, + "Cylinders": 8, + "Displacement": 304, + "Horsepower": 193, + "Weight_in_lbs": 4732, + "Acceleration": 18.5, + "Year": "1970-01-01", + "Origin": "USA" + }, + { + "Maker": "datsun", + "Name": "datsun pl510", + "Miles_per_Gallon": 27, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 88, + "Weight_in_lbs": 2130, + "Acceleration": 14.5, + "Year": "1971-01-01", + "Origin": "Japan" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet vega 2300", + "Miles_per_Gallon": 28, + "Cylinders": 4, + "Displacement": 140, + "Horsepower": 90, + "Weight_in_lbs": 2264, + "Acceleration": 15.5, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "toyota", + "Name": "toyota corona", + "Miles_per_Gallon": 25, + "Cylinders": 4, + "Displacement": -113, + "Horsepower": 95, + "Weight_in_lbs": 2228, + "Acceleration": 14, + "Year": "1971-01-01", + "Origin": "Japan" + }, + { + "Maker": "ford", + "Name": "ford pinto", + "Miles_per_Gallon": 25, + "Cylinders": 4, + "Displacement": -98, + "Horsepower": null, + "Weight_in_lbs": 2046, + "Acceleration": 19, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen super beetle 117", + "Miles_per_Gallon": null, + "Cylinders": 4, + "Displacement": -97, + "Horsepower": 48, + "Weight_in_lbs": 1978, + "Acceleration": 20, + "Year": "1971-01-01", + "Origin": "European Union" + }, + { + "Maker": "amc", + "Name": "amc gremlin", + "Miles_per_Gallon": 19, + "Cylinders": 6, + "Displacement": -232, + "Horsepower": 100, + "Weight_in_lbs": 2634, + "Acceleration": 13, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth satellite custom", + "Miles_per_Gallon": 16, + "Cylinders": 6, + "Displacement": -225, + "Horsepower": 105, + "Weight_in_lbs": 3439, + "Acceleration": 15.5, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet chevelle malibu", + "Miles_per_Gallon": 17, + "Cylinders": 6, + "Displacement": -250, + "Horsepower": 100, + "Weight_in_lbs": 3329, + "Acceleration": 15.5, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford torino 500", + "Miles_per_Gallon": 19, + "Cylinders": 6, + "Displacement": -250, + "Horsepower": 88, + "Weight_in_lbs": 3302, + "Acceleration": 15.5, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc matador", + "Miles_per_Gallon": 18, + "Cylinders": 6, + "Displacement": -232, + "Horsepower": 100, + "Weight_in_lbs": 3288, + "Acceleration": 15.5, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet impala", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": -350, + "Horsepower": 165, + "Weight_in_lbs": 4209, + "Acceleration": 12, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "pontiac", + "Name": "pontiac catalina brougham", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": -400, + "Horsepower": 175, + "Weight_in_lbs": 4464, + "Acceleration": 11.5, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford galaxie 500", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": -351, + "Horsepower": 153, + "Weight_in_lbs": 4154, + "Acceleration": 13.5, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth fury iii", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": -318, + "Horsepower": 150, + "Weight_in_lbs": 4096, + "Acceleration": 13, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge monaco (sw)", + "Miles_per_Gallon": 12, + "Cylinders": 8, + "Displacement": -383, + "Horsepower": 180, + "Weight_in_lbs": 4955, + "Acceleration": 11.5, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford country squire (sw)", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": -400, + "Horsepower": 170, + "Weight_in_lbs": 4746, + "Acceleration": 12, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "pontiac", + "Name": "pontiac safari (sw)", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": -400, + "Horsepower": 175, + "Weight_in_lbs": 5140, + "Acceleration": 12, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc hornet sportabout (sw)", + "Miles_per_Gallon": 18, + "Cylinders": 6, + "Displacement": -258, + "Horsepower": 110, + "Weight_in_lbs": 2962, + "Acceleration": 13.5, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet vega (sw)", + "Miles_per_Gallon": 22, + "Cylinders": 4, + "Displacement": -140, + "Horsepower": 72, + "Weight_in_lbs": 2408, + "Acceleration": 19, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "pontiac", + "Name": "pontiac firebird", + "Miles_per_Gallon": 19, + "Cylinders": 6, + "Displacement": -250, + "Horsepower": 100, + "Weight_in_lbs": 3282, + "Acceleration": 15, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford mustang", + "Miles_per_Gallon": 18, + "Cylinders": 6, + "Displacement": -250, + "Horsepower": 88, + "Weight_in_lbs": 3139, + "Acceleration": 14.5, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "mercury", + "Name": "mercury capri 2000", + "Miles_per_Gallon": 23, + "Cylinders": 4, + "Displacement": -122, + "Horsepower": 86, + "Weight_in_lbs": 2220, + "Acceleration": 14, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "opel", + "Name": "opel 1900", + "Miles_per_Gallon": 28, + "Cylinders": 4, + "Displacement": -116, + "Horsepower": 90, + "Weight_in_lbs": 2123, + "Acceleration": 14, + "Year": "1971-01-01", + "Origin": "European Union" + }, + { + "Maker": "peugeot", + "Name": "peugeot 304", + "Miles_per_Gallon": 30, + "Cylinders": 4, + "Displacement": -79, + "Horsepower": 70, + "Weight_in_lbs": 2074, + "Acceleration": 19.5, + "Year": "1971-01-01", + "Origin": "European Union" + }, + { + "Maker": "fiat", + "Name": "fiat 124b", + "Miles_per_Gallon": 30, + "Cylinders": 4, + "Displacement": -88, + "Horsepower": 76, + "Weight_in_lbs": 2065, + "Acceleration": 14.5, + "Year": "1971-01-01", + "Origin": "European Union" + }, + { + "Maker": "toyota", + "Name": "toyota corolla 1200", + "Miles_per_Gallon": 31, + "Cylinders": 4, + "Displacement": -71, + "Horsepower": 65, + "Weight_in_lbs": 1773, + "Acceleration": 19, + "Year": "1971-01-01", + "Origin": "Japan" + }, + { + "Maker": "datsun", + "Name": "datsun 1200", + "Miles_per_Gallon": 35, + "Cylinders": 4, + "Displacement": -72, + "Horsepower": 69, + "Weight_in_lbs": 1613, + "Acceleration": 18, + "Year": "1971-01-01", + "Origin": "Japan" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen model 111", + "Miles_per_Gallon": 27, + "Cylinders": 4, + "Displacement": -97, + "Horsepower": 60, + "Weight_in_lbs": 1834, + "Acceleration": 19, + "Year": "1971-01-01", + "Origin": "European Union" + }, + { + "Maker": "plymouth", + "Name": "plymouth cricket", + "Miles_per_Gallon": 26, + "Cylinders": 4, + "Displacement": -91, + "Horsepower": 70, + "Weight_in_lbs": 1955, + "Acceleration": 20.5, + "Year": "1971-01-01", + "Origin": "USA" + }, + { + "Maker": "toyota", + "Name": "toyota corona hardtop", + "Miles_per_Gallon": 24, + "Cylinders": 4, + "Displacement": -113, + "Horsepower": 95, + "Weight_in_lbs": 2278, + "Acceleration": 15.5, + "Year": "1972-01-01", + "Origin": "Japan" + }, + { + "Maker": "dodge", + "Name": "dodge colt hardtop", + "Miles_per_Gallon": 25, + "Cylinders": 4, + "Displacement": -97.5, + "Horsepower": 80, + "Weight_in_lbs": 2126, + "Acceleration": 17, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen type 3", + "Miles_per_Gallon": 23, + "Cylinders": 4, + "Displacement": -97, + "Horsepower": 54, + "Weight_in_lbs": 2254, + "Acceleration": 23.5, + "Year": "1972-01-01", + "Origin": "European Union" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet vega", + "Miles_per_Gallon": 20, + "Cylinders": 4, + "Displacement": -140, + "Horsepower": 90, + "Weight_in_lbs": 2408, + "Acceleration": 19.5, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford pinto runabout", + "Miles_per_Gallon": 21, + "Cylinders": 4, + "Displacement": -122, + "Horsepower": 86, + "Weight_in_lbs": 2226, + "Acceleration": 16.5, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet impala", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": -350, + "Horsepower": 165, + "Weight_in_lbs": 4274, + "Acceleration": 12, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "pontiac", + "Name": "pontiac catalina", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": -400, + "Horsepower": 175, + "Weight_in_lbs": 4385, + "Acceleration": 12, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth fury iii", + "Miles_per_Gallon": 15, + "Cylinders": 8, + "Displacement": 318, + "Horsepower": 150, + "Weight_in_lbs": 4135, + "Acceleration": 13.5, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford galaxie 500", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": 351, + "Horsepower": 153, + "Weight_in_lbs": 4129, + "Acceleration": 13, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc ambassador sst", + "Miles_per_Gallon": 17, + "Cylinders": 8, + "Displacement": 304, + "Horsepower": 150, + "Weight_in_lbs": 3672, + "Acceleration": 11.5, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "mercury", + "Name": "mercury marquis", + "Miles_per_Gallon": 11, + "Cylinders": 8, + "Displacement": 429, + "Horsepower": 208, + "Weight_in_lbs": 4633, + "Acceleration": 11, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "buick", + "Name": "buick lesabre custom", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 155, + "Weight_in_lbs": 4502, + "Acceleration": 13.5, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "oldsmobile", + "Name": "oldsmobile delta 88 royale", + "Miles_per_Gallon": 12, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 160, + "Weight_in_lbs": 4456, + "Acceleration": 13.5, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "chrysler", + "Name": "chrysler newport royal", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 400, + "Horsepower": 190, + "Weight_in_lbs": 4422, + "Acceleration": 12.5, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "mazda", + "Name": "mazda rx2 coupe", + "Miles_per_Gallon": 19, + "Cylinders": 3, + "Displacement": 70, + "Horsepower": 97, + "Weight_in_lbs": 2330, + "Acceleration": 13.5, + "Year": "1972-01-01", + "Origin": "Japan" + }, + { + "Maker": "amc", + "Name": "amc matador (sw)", + "Miles_per_Gallon": 15, + "Cylinders": 8, + "Displacement": 304, + "Horsepower": 150, + "Weight_in_lbs": 3892, + "Acceleration": 12.5, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet chevelle concours (sw)", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 307, + "Horsepower": 130, + "Weight_in_lbs": 4098, + "Acceleration": 14, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford gran torino (sw)", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 302, + "Horsepower": 140, + "Weight_in_lbs": 4294, + "Acceleration": 16, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth satellite custom (sw)", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": 318, + "Horsepower": 150, + "Weight_in_lbs": 4077, + "Acceleration": 14, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "volvo", + "Name": "volvo 145e (sw)", + "Miles_per_Gallon": 18, + "Cylinders": 4, + "Displacement": 121, + "Horsepower": 112, + "Weight_in_lbs": 2933, + "Acceleration": 14.5, + "Year": "1972-01-01", + "Origin": "European Union" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen 411 (sw)", + "Miles_per_Gallon": 22, + "Cylinders": 4, + "Displacement": 121, + "Horsepower": 76, + "Weight_in_lbs": 2511, + "Acceleration": 18, + "Year": "1972-01-01", + "Origin": "European Union" + }, + { + "Maker": "peugeot", + "Name": "peugeot 504 (sw)", + "Miles_per_Gallon": 21, + "Cylinders": 4, + "Displacement": 120, + "Horsepower": 87, + "Weight_in_lbs": 2979, + "Acceleration": 19.5, + "Year": "1972-01-01", + "Origin": "European Union" + }, + { + "Maker": "renault", + "Name": "renault 12 (sw)", + "Miles_per_Gallon": 26, + "Cylinders": 4, + "Displacement": 96, + "Horsepower": 69, + "Weight_in_lbs": 2189, + "Acceleration": 18, + "Year": "1972-01-01", + "Origin": "European Union" + }, + { + "Maker": "ford", + "Name": "ford pinto (sw)", + "Miles_per_Gallon": 22, + "Cylinders": 4, + "Displacement": 122, + "Horsepower": 86, + "Weight_in_lbs": 2395, + "Acceleration": 16, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "datsun", + "Name": "datsun 510 (sw)", + "Miles_per_Gallon": 28, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 92, + "Weight_in_lbs": 2288, + "Acceleration": 17, + "Year": "1972-01-01", + "Origin": "Japan" + }, + { + "Maker": "toyouta", + "Name": "toyouta corona mark ii (sw)", + "Miles_per_Gallon": 23, + "Cylinders": 4, + "Displacement": 120, + "Horsepower": 97, + "Weight_in_lbs": 2506, + "Acceleration": 14.5, + "Year": "1972-01-01", + "Origin": "Japan" + }, + { + "Maker": "dodge", + "Name": "dodge colt (sw)", + "Miles_per_Gallon": 28, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 80, + "Weight_in_lbs": 2164, + "Acceleration": 15, + "Year": "1972-01-01", + "Origin": "USA" + }, + { + "Maker": "toyota", + "Name": "toyota corolla 1600 (sw)", + "Miles_per_Gallon": 27, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 88, + "Weight_in_lbs": 2100, + "Acceleration": 16.5, + "Year": "1972-01-01", + "Origin": "Japan" + }, + { + "Maker": "buick", + "Name": "buick century 350", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 175, + "Weight_in_lbs": 4100, + "Acceleration": 13, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc matador", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": 304, + "Horsepower": 150, + "Weight_in_lbs": 3672, + "Acceleration": 11.5, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet malibu", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 145, + "Weight_in_lbs": 3988, + "Acceleration": 13, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford gran torino", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": 302, + "Horsepower": 137, + "Weight_in_lbs": 4042, + "Acceleration": 14.5, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge coronet custom", + "Miles_per_Gallon": 15, + "Cylinders": 8, + "Displacement": 318, + "Horsepower": 150, + "Weight_in_lbs": 3777, + "Acceleration": 12.5, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "mercury", + "Name": "mercury marquis brougham", + "Miles_per_Gallon": 12, + "Cylinders": 8, + "Displacement": 429, + "Horsepower": 198, + "Weight_in_lbs": 4952, + "Acceleration": 11.5, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet caprice classic", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 400, + "Horsepower": 150, + "Weight_in_lbs": 4464, + "Acceleration": 12, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford ltd", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 351, + "Horsepower": 158, + "Weight_in_lbs": 4363, + "Acceleration": 13, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth fury gran sedan", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": 318, + "Horsepower": 150, + "Weight_in_lbs": 4237, + "Acceleration": 14.5, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "chrysler", + "Name": "chrysler new yorker brougham", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 440, + "Horsepower": 215, + "Weight_in_lbs": 4735, + "Acceleration": 11, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "buick", + "Name": "buick electra 225 custom", + "Miles_per_Gallon": 12, + "Cylinders": 8, + "Displacement": 455, + "Horsepower": 225, + "Weight_in_lbs": 4951, + "Acceleration": 11, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc ambassador brougham", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 360, + "Horsepower": 175, + "Weight_in_lbs": 3821, + "Acceleration": 11, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth valiant", + "Miles_per_Gallon": 18, + "Cylinders": 6, + "Displacement": 225, + "Horsepower": 105, + "Weight_in_lbs": 3121, + "Acceleration": 16.5, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet nova custom", + "Miles_per_Gallon": 16, + "Cylinders": 6, + "Displacement": 250, + "Horsepower": 100, + "Weight_in_lbs": 3278, + "Acceleration": 18, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc hornet", + "Miles_per_Gallon": 18, + "Cylinders": 6, + "Displacement": 232, + "Horsepower": 100, + "Weight_in_lbs": 2945, + "Acceleration": 16, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford maverick", + "Miles_per_Gallon": 18, + "Cylinders": 6, + "Displacement": 250, + "Horsepower": 88, + "Weight_in_lbs": 3021, + "Acceleration": 16.5, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth duster", + "Miles_per_Gallon": 23, + "Cylinders": 6, + "Displacement": 198, + "Horsepower": 95, + "Weight_in_lbs": 2904, + "Acceleration": 16, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen super beetle", + "Miles_per_Gallon": 26, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 46, + "Weight_in_lbs": 1950, + "Acceleration": 21, + "Year": "1973-01-01", + "Origin": "European Union" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet impala", + "Miles_per_Gallon": 11, + "Cylinders": 8, + "Displacement": 400, + "Horsepower": 150, + "Weight_in_lbs": 4997, + "Acceleration": 14, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford country", + "Miles_per_Gallon": 12, + "Cylinders": 8, + "Displacement": 400, + "Horsepower": 167, + "Weight_in_lbs": 4906, + "Acceleration": 12.5, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth custom suburb", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 360, + "Horsepower": 170, + "Weight_in_lbs": 4654, + "Acceleration": 13, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "oldsmobile", + "Name": "oldsmobile vista cruiser", + "Miles_per_Gallon": 12, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 180, + "Weight_in_lbs": 4499, + "Acceleration": 12.5, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc gremlin", + "Miles_per_Gallon": 18, + "Cylinders": 6, + "Displacement": 232, + "Horsepower": 100, + "Weight_in_lbs": 2789, + "Acceleration": 15, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "toyota", + "Name": "toyota carina", + "Miles_per_Gallon": 20, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 88, + "Weight_in_lbs": 2279, + "Acceleration": 19, + "Year": "1973-01-01", + "Origin": "Japan" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet vega", + "Miles_per_Gallon": 21, + "Cylinders": 4, + "Displacement": 140, + "Horsepower": 72, + "Weight_in_lbs": 2401, + "Acceleration": 19.5, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "datsun", + "Name": "datsun 610", + "Miles_per_Gallon": 22, + "Cylinders": 4, + "Displacement": 108, + "Horsepower": 94, + "Weight_in_lbs": 2379, + "Acceleration": 16.5, + "Year": "1973-01-01", + "Origin": "Japan" + }, + { + "Maker": "maxda", + "Name": "maxda rx3", + "Miles_per_Gallon": 18, + "Cylinders": 3, + "Displacement": 70, + "Horsepower": 90, + "Weight_in_lbs": 2124, + "Acceleration": 13.5, + "Year": "1973-01-01", + "Origin": "Japan" + }, + { + "Maker": "ford", + "Name": "ford pinto", + "Miles_per_Gallon": 19, + "Cylinders": 4, + "Displacement": 122, + "Horsepower": 85, + "Weight_in_lbs": 2310, + "Acceleration": 18.5, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "mercury", + "Name": "mercury capri v6", + "Miles_per_Gallon": 21, + "Cylinders": 6, + "Displacement": 155, + "Horsepower": 107, + "Weight_in_lbs": 2472, + "Acceleration": 14, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "fiat", + "Name": "fiat 124 sport coupe", + "Miles_per_Gallon": 26, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 90, + "Weight_in_lbs": 2265, + "Acceleration": 15.5, + "Year": "1973-01-01", + "Origin": "European Union" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet monte carlo s", + "Miles_per_Gallon": 15, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 145, + "Weight_in_lbs": 4082, + "Acceleration": 13, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "pontiac", + "Name": "pontiac grand prix", + "Miles_per_Gallon": 16, + "Cylinders": 8, + "Displacement": 400, + "Horsepower": 230, + "Weight_in_lbs": 4278, + "Acceleration": 9.5, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "fiat", + "Name": "fiat 128", + "Miles_per_Gallon": 29, + "Cylinders": 4, + "Displacement": 68, + "Horsepower": 49, + "Weight_in_lbs": 1867, + "Acceleration": 19.5, + "Year": "1973-01-01", + "Origin": "European Union" + }, + { + "Maker": "opel", + "Name": "opel manta", + "Miles_per_Gallon": 24, + "Cylinders": 4, + "Displacement": 116, + "Horsepower": 75, + "Weight_in_lbs": 2158, + "Acceleration": 15.5, + "Year": "1973-01-01", + "Origin": "European Union" + }, + { + "Maker": "audi", + "Name": "audi 100ls", + "Miles_per_Gallon": 20, + "Cylinders": 4, + "Displacement": 114, + "Horsepower": 91, + "Weight_in_lbs": 2582, + "Acceleration": 14, + "Year": "1973-01-01", + "Origin": "European Union" + }, + { + "Maker": "volvo", + "Name": "volvo 144ea", + "Miles_per_Gallon": 19, + "Cylinders": 4, + "Displacement": 121, + "Horsepower": 112, + "Weight_in_lbs": 2868, + "Acceleration": 15.5, + "Year": "1973-01-01", + "Origin": "European Union" + }, + { + "Maker": "dodge", + "Name": "dodge dart custom", + "Miles_per_Gallon": 15, + "Cylinders": 8, + "Displacement": 318, + "Horsepower": 150, + "Weight_in_lbs": 3399, + "Acceleration": 11, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "saab", + "Name": "saab 99le", + "Miles_per_Gallon": 24, + "Cylinders": 4, + "Displacement": 121, + "Horsepower": 110, + "Weight_in_lbs": 2660, + "Acceleration": 14, + "Year": "1973-01-01", + "Origin": "European Union" + }, + { + "Maker": "toyota", + "Name": "toyota mark ii", + "Miles_per_Gallon": 20, + "Cylinders": 6, + "Displacement": 156, + "Horsepower": 122, + "Weight_in_lbs": 2807, + "Acceleration": 13.5, + "Year": "1973-01-01", + "Origin": "Japan" + }, + { + "Maker": "oldsmobile", + "Name": "oldsmobile omega", + "Miles_per_Gallon": 11, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 180, + "Weight_in_lbs": 3664, + "Acceleration": 11, + "Year": "1973-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth duster", + "Miles_per_Gallon": 20, + "Cylinders": 6, + "Displacement": 198, + "Horsepower": 95, + "Weight_in_lbs": 3102, + "Acceleration": 16.5, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford maverick", + "Miles_per_Gallon": 21, + "Cylinders": 6, + "Displacement": 200, + "Horsepower": null, + "Weight_in_lbs": 2875, + "Acceleration": 17, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc hornet", + "Miles_per_Gallon": 19, + "Cylinders": 6, + "Displacement": 232, + "Horsepower": 100, + "Weight_in_lbs": 2901, + "Acceleration": 16, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet nova", + "Miles_per_Gallon": 15, + "Cylinders": 6, + "Displacement": 250, + "Horsepower": 100, + "Weight_in_lbs": 3336, + "Acceleration": 17, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "datsun", + "Name": "datsun b210", + "Miles_per_Gallon": 31, + "Cylinders": 4, + "Displacement": 79, + "Horsepower": 67, + "Weight_in_lbs": 1950, + "Acceleration": 19, + "Year": "1974-01-01", + "Origin": "Japan" + }, + { + "Maker": "ford", + "Name": "ford pinto", + "Miles_per_Gallon": 26, + "Cylinders": 4, + "Displacement": 122, + "Horsepower": 80, + "Weight_in_lbs": 2451, + "Acceleration": 16.5, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "toyota", + "Name": "toyota corolla 1200", + "Miles_per_Gallon": 32, + "Cylinders": 4, + "Displacement": 71, + "Horsepower": 65, + "Weight_in_lbs": 1836, + "Acceleration": 21, + "Year": "1974-01-01", + "Origin": "Japan" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet vega", + "Miles_per_Gallon": 25, + "Cylinders": 4, + "Displacement": 140, + "Horsepower": 75, + "Weight_in_lbs": 2542, + "Acceleration": 17, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet chevelle malibu classic", + "Miles_per_Gallon": 16, + "Cylinders": 6, + "Displacement": 250, + "Horsepower": 100, + "Weight_in_lbs": 3781, + "Acceleration": 17, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc matador", + "Miles_per_Gallon": 16, + "Cylinders": 6, + "Displacement": 258, + "Horsepower": 110, + "Weight_in_lbs": 3632, + "Acceleration": 18, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth satellite sebring", + "Miles_per_Gallon": 18, + "Cylinders": 6, + "Displacement": 225, + "Horsepower": 105, + "Weight_in_lbs": 3613, + "Acceleration": 16.5, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford gran torino", + "Miles_per_Gallon": 16, + "Cylinders": 8, + "Displacement": 302, + "Horsepower": 140, + "Weight_in_lbs": 4141, + "Acceleration": 14, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "buick", + "Name": "buick century luxus (sw)", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 150, + "Weight_in_lbs": 4699, + "Acceleration": 14.5, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge coronet custom (sw)", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": 318, + "Horsepower": 150, + "Weight_in_lbs": 4457, + "Acceleration": 13.5, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford gran torino (sw)", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": 302, + "Horsepower": 140, + "Weight_in_lbs": 4638, + "Acceleration": 16, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc matador (sw)", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": 304, + "Horsepower": 150, + "Weight_in_lbs": 4257, + "Acceleration": 15.5, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "audi", + "Name": "audi fox", + "Miles_per_Gallon": 29, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 83, + "Weight_in_lbs": 2219, + "Acceleration": 16.5, + "Year": "1974-01-01", + "Origin": "European Union" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen dasher", + "Miles_per_Gallon": 26, + "Cylinders": 4, + "Displacement": 79, + "Horsepower": 67, + "Weight_in_lbs": 1963, + "Acceleration": 15.5, + "Year": "1974-01-01", + "Origin": "European Union" + }, + { + "Maker": "opel", + "Name": "opel manta", + "Miles_per_Gallon": 26, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 78, + "Weight_in_lbs": 2300, + "Acceleration": 14.5, + "Year": "1974-01-01", + "Origin": "European Union" + }, + { + "Maker": "toyota", + "Name": "toyota corona", + "Miles_per_Gallon": 31, + "Cylinders": 4, + "Displacement": 76, + "Horsepower": 52, + "Weight_in_lbs": 1649, + "Acceleration": 16.5, + "Year": "1974-01-01", + "Origin": "Japan" + }, + { + "Maker": "datsun", + "Name": "datsun 710", + "Miles_per_Gallon": 32, + "Cylinders": 4, + "Displacement": 83, + "Horsepower": 61, + "Weight_in_lbs": 2003, + "Acceleration": 19, + "Year": "1974-01-01", + "Origin": "Japan" + }, + { + "Maker": "dodge", + "Name": "dodge colt", + "Miles_per_Gallon": 28, + "Cylinders": 4, + "Displacement": 90, + "Horsepower": 75, + "Weight_in_lbs": 2125, + "Acceleration": 14.5, + "Year": "1974-01-01", + "Origin": "USA" + }, + { + "Maker": "fiat", + "Name": "fiat 128", + "Miles_per_Gallon": 24, + "Cylinders": 4, + "Displacement": 90, + "Horsepower": 75, + "Weight_in_lbs": 2108, + "Acceleration": 15.5, + "Year": "1974-01-01", + "Origin": "European Union" + }, + { + "Maker": "fiat", + "Name": "fiat 124 tc", + "Miles_per_Gallon": 26, + "Cylinders": 4, + "Displacement": 116, + "Horsepower": 75, + "Weight_in_lbs": 2246, + "Acceleration": 14, + "Year": "1974-01-01", + "Origin": "European Union" + }, + { + "Maker": "honda", + "Name": "honda civic", + "Miles_per_Gallon": 24, + "Cylinders": 4, + "Displacement": 120, + "Horsepower": 97, + "Weight_in_lbs": 2489, + "Acceleration": 15, + "Year": "1974-01-01", + "Origin": "Japan" + }, + { + "Name": "subaru", + "Maker": "subaru", + "Miles_per_Gallon": 26, + "Cylinders": 4, + "Displacement": 108, + "Horsepower": 93, + "Weight_in_lbs": 2391, + "Acceleration": 15.5, + "Year": "1974-01-01", + "Origin": "Japan" + }, + { + "Maker": "fiat", + "Name": "fiat x1.9", + "Miles_per_Gallon": 31, + "Cylinders": 4, + "Displacement": 79, + "Horsepower": 67, + "Weight_in_lbs": 2000, + "Acceleration": 16, + "Year": "1974-01-01", + "Origin": "European Union" + }, + { + "Maker": "plymouth", + "Name": "plymouth valiant custom", + "Miles_per_Gallon": 19, + "Cylinders": 6, + "Displacement": 225, + "Horsepower": 95, + "Weight_in_lbs": 3264, + "Acceleration": 16, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet nova", + "Miles_per_Gallon": 18, + "Cylinders": 6, + "Displacement": 250, + "Horsepower": 105, + "Weight_in_lbs": 3459, + "Acceleration": 16, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "mercury", + "Name": "mercury monarch", + "Miles_per_Gallon": 15, + "Cylinders": 6, + "Displacement": 250, + "Horsepower": 72, + "Weight_in_lbs": 3432, + "Acceleration": 21, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford maverick", + "Miles_per_Gallon": 15, + "Cylinders": 6, + "Displacement": 250, + "Horsepower": 72, + "Weight_in_lbs": 3158, + "Acceleration": 19.5, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "pontiac", + "Name": "pontiac catalina", + "Miles_per_Gallon": 16, + "Cylinders": 8, + "Displacement": 400, + "Horsepower": 170, + "Weight_in_lbs": 4668, + "Acceleration": 11.5, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet bel air", + "Miles_per_Gallon": 15, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 145, + "Weight_in_lbs": 4440, + "Acceleration": 14, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth grand fury", + "Miles_per_Gallon": 16, + "Cylinders": 8, + "Displacement": 318, + "Horsepower": 150, + "Weight_in_lbs": 4498, + "Acceleration": 14.5, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford ltd", + "Miles_per_Gallon": 14, + "Cylinders": 8, + "Displacement": 351, + "Horsepower": 148, + "Weight_in_lbs": 4657, + "Acceleration": 13.5, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "buick", + "Name": "buick century", + "Miles_per_Gallon": 17, + "Cylinders": 6, + "Displacement": 231, + "Horsepower": 110, + "Weight_in_lbs": 3907, + "Acceleration": 21, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "chevroelt", + "Name": "chevroelt chevelle malibu", + "Miles_per_Gallon": 16, + "Cylinders": 6, + "Displacement": 250, + "Horsepower": 105, + "Weight_in_lbs": 3897, + "Acceleration": 18.5, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc matador", + "Miles_per_Gallon": 15, + "Cylinders": 6, + "Displacement": 258, + "Horsepower": 110, + "Weight_in_lbs": 3730, + "Acceleration": 19, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth fury", + "Miles_per_Gallon": 18, + "Cylinders": 6, + "Displacement": 225, + "Horsepower": 95, + "Weight_in_lbs": 3785, + "Acceleration": 19, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "buick", + "Name": "buick skyhawk", + "Miles_per_Gallon": 21, + "Cylinders": 6, + "Displacement": 231, + "Horsepower": 110, + "Weight_in_lbs": 3039, + "Acceleration": 15, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet monza 2+2", + "Miles_per_Gallon": 20, + "Cylinders": 8, + "Displacement": 262, + "Horsepower": 110, + "Weight_in_lbs": 3221, + "Acceleration": 13.5, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford mustang ii", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 302, + "Horsepower": 129, + "Weight_in_lbs": 3169, + "Acceleration": 12, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "toyota", + "Name": "toyota corolla", + "Miles_per_Gallon": 29, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 75, + "Weight_in_lbs": 2171, + "Acceleration": 16, + "Year": "1975-01-01", + "Origin": "Japan" + }, + { + "Maker": "ford", + "Name": "ford pinto", + "Miles_per_Gallon": 23, + "Cylinders": 4, + "Displacement": 140, + "Horsepower": 83, + "Weight_in_lbs": 2639, + "Acceleration": 17, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc gremlin", + "Miles_per_Gallon": 20, + "Cylinders": 6, + "Displacement": 232, + "Horsepower": 100, + "Weight_in_lbs": 2914, + "Acceleration": 16, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "pontiac", + "Name": "pontiac astro", + "Miles_per_Gallon": 23, + "Cylinders": 4, + "Displacement": 140, + "Horsepower": 78, + "Weight_in_lbs": 2592, + "Acceleration": 18.5, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "toyota", + "Name": "toyota corona", + "Miles_per_Gallon": 24, + "Cylinders": 4, + "Displacement": 134, + "Horsepower": 96, + "Weight_in_lbs": 2702, + "Acceleration": 13.5, + "Year": "1975-01-01", + "Origin": "Japan" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen dasher", + "Miles_per_Gallon": 25, + "Cylinders": 4, + "Displacement": 90, + "Horsepower": 71, + "Weight_in_lbs": 2223, + "Acceleration": 16.5, + "Year": "1975-01-01", + "Origin": "European Union" + }, + { + "Maker": "datsun", + "Name": "datsun 710", + "Miles_per_Gallon": 24, + "Cylinders": 4, + "Displacement": 119, + "Horsepower": 97, + "Weight_in_lbs": 2545, + "Acceleration": 17, + "Year": "1975-01-01", + "Origin": "Japan" + }, + { + "Maker": "ford", + "Name": "ford pinto", + "Miles_per_Gallon": 18, + "Cylinders": 6, + "Displacement": 171, + "Horsepower": 97, + "Weight_in_lbs": 2984, + "Acceleration": 14.5, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen rabbit", + "Miles_per_Gallon": 29, + "Cylinders": 4, + "Displacement": 90, + "Horsepower": 70, + "Weight_in_lbs": 1937, + "Acceleration": 14, + "Year": "1975-01-01", + "Origin": "European Union" + }, + { + "Maker": "amc", + "Name": "amc pacer", + "Miles_per_Gallon": 19, + "Cylinders": 6, + "Displacement": 232, + "Horsepower": 90, + "Weight_in_lbs": 3211, + "Acceleration": 17, + "Year": "1975-01-01", + "Origin": "USA" + }, + { + "Maker": "audi", + "Name": "audi 100ls", + "Miles_per_Gallon": 23, + "Cylinders": 4, + "Displacement": 115, + "Horsepower": 95, + "Weight_in_lbs": 2694, + "Acceleration": 15, + "Year": "1975-01-01", + "Origin": "European Union" + }, + { + "Maker": "peugeot", + "Name": "peugeot 504", + "Miles_per_Gallon": 23, + "Cylinders": 4, + "Displacement": 120, + "Horsepower": 88, + "Weight_in_lbs": 2957, + "Acceleration": 17, + "Year": "1975-01-01", + "Origin": "European Union" + }, + { + "Maker": "volvo", + "Name": "volvo 244dl", + "Miles_per_Gallon": 22, + "Cylinders": 4, + "Displacement": 121, + "Horsepower": 98, + "Weight_in_lbs": 2945, + "Acceleration": 14.5, + "Year": "1975-01-01", + "Origin": "European Union" + }, + { + "Maker": "saab", + "Name": "saab 99le", + "Miles_per_Gallon": 25, + "Cylinders": 4, + "Displacement": 121, + "Horsepower": 115, + "Weight_in_lbs": 2671, + "Acceleration": 13.5, + "Year": "1975-01-01", + "Origin": "European Union" + }, + { + "Maker": "honda", + "Name": "honda civic cvcc", + "Miles_per_Gallon": 33, + "Cylinders": 4, + "Displacement": 91, + "Horsepower": 53, + "Weight_in_lbs": 1795, + "Acceleration": 17.5, + "Year": "1975-01-01", + "Origin": "Japan" + }, + { + "Maker": "fiat", + "Name": "fiat 131", + "Miles_per_Gallon": 28, + "Cylinders": 4, + "Displacement": 107, + "Horsepower": 86, + "Weight_in_lbs": 2464, + "Acceleration": 15.5, + "Year": "1976-01-01", + "Origin": "European Union" + }, + { + "Maker": "opel", + "Name": "opel 1900", + "Miles_per_Gallon": 25, + "Cylinders": 4, + "Displacement": 116, + "Horsepower": 81, + "Weight_in_lbs": 2220, + "Acceleration": 16.9, + "Year": "1976-01-01", + "Origin": "European Union" + }, + { + "Maker": "capri", + "Name": "capri ii", + "Miles_per_Gallon": 25, + "Cylinders": 4, + "Displacement": 140, + "Horsepower": 92, + "Weight_in_lbs": 2572, + "Acceleration": 14.9, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge colt", + "Miles_per_Gallon": 26, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 79, + "Weight_in_lbs": 2255, + "Acceleration": 17.7, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "renault", + "Name": "renault 12tl", + "Miles_per_Gallon": 27, + "Cylinders": 4, + "Displacement": 101, + "Horsepower": 83, + "Weight_in_lbs": 2202, + "Acceleration": 15.3, + "Year": "1976-01-01", + "Origin": "European Union" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet chevelle malibu classic", + "Miles_per_Gallon": 17.5, + "Cylinders": 8, + "Displacement": 305, + "Horsepower": 140, + "Weight_in_lbs": 4215, + "Acceleration": 13, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge coronet brougham", + "Miles_per_Gallon": 16, + "Cylinders": 8, + "Displacement": 318, + "Horsepower": 150, + "Weight_in_lbs": 4190, + "Acceleration": 13, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc matador", + "Miles_per_Gallon": 15.5, + "Cylinders": 8, + "Displacement": 304, + "Horsepower": 120, + "Weight_in_lbs": 3962, + "Acceleration": 13.9, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford gran torino", + "Miles_per_Gallon": 14.5, + "Cylinders": 8, + "Displacement": 351, + "Horsepower": 152, + "Weight_in_lbs": 4215, + "Acceleration": 12.8, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth valiant", + "Miles_per_Gallon": 22, + "Cylinders": 6, + "Displacement": 225, + "Horsepower": 100, + "Weight_in_lbs": 3233, + "Acceleration": 15.4, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet nova", + "Miles_per_Gallon": 22, + "Cylinders": 6, + "Displacement": 250, + "Horsepower": 105, + "Weight_in_lbs": 3353, + "Acceleration": 14.5, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford maverick", + "Miles_per_Gallon": 24, + "Cylinders": 6, + "Displacement": 200, + "Horsepower": 81, + "Weight_in_lbs": 3012, + "Acceleration": 17.6, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc hornet", + "Miles_per_Gallon": 22.5, + "Cylinders": 6, + "Displacement": 232, + "Horsepower": 90, + "Weight_in_lbs": 3085, + "Acceleration": 17.6, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet chevette", + "Miles_per_Gallon": 29, + "Cylinders": 4, + "Displacement": 85, + "Horsepower": 52, + "Weight_in_lbs": 2035, + "Acceleration": 22.2, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet woody", + "Miles_per_Gallon": 24.5, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 60, + "Weight_in_lbs": 2164, + "Acceleration": 22.1, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "vw", + "Name": "vw rabbit", + "Miles_per_Gallon": 29, + "Cylinders": 4, + "Displacement": 90, + "Horsepower": 70, + "Weight_in_lbs": 1937, + "Acceleration": 14.2, + "Year": "1976-01-01", + "Origin": "European Union" + }, + { + "Maker": "honda", + "Name": "honda civic", + "Miles_per_Gallon": 33, + "Cylinders": 4, + "Displacement": 91, + "Horsepower": 53, + "Weight_in_lbs": 1795, + "Acceleration": 17.4, + "Year": "1976-01-01", + "Origin": "Japan" + }, + { + "Maker": "dodge", + "Name": "dodge aspen se", + "Miles_per_Gallon": 20, + "Cylinders": 6, + "Displacement": 225, + "Horsepower": 100, + "Weight_in_lbs": 3651, + "Acceleration": 17.7, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford granada ghia", + "Miles_per_Gallon": 18, + "Cylinders": 6, + "Displacement": 250, + "Horsepower": 78, + "Weight_in_lbs": 3574, + "Acceleration": 21, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "pontiac", + "Name": "pontiac ventura sj", + "Miles_per_Gallon": 18.5, + "Cylinders": 6, + "Displacement": 250, + "Horsepower": 110, + "Weight_in_lbs": 3645, + "Acceleration": 16.2, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc pacer d/l", + "Miles_per_Gallon": 17.5, + "Cylinders": 6, + "Displacement": 258, + "Horsepower": 95, + "Weight_in_lbs": 3193, + "Acceleration": 17.8, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen rabbit", + "Miles_per_Gallon": 29.5, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 71, + "Weight_in_lbs": 1825, + "Acceleration": 12.2, + "Year": "1976-01-01", + "Origin": "European Union" + }, + { + "Maker": "datsun", + "Name": "datsun b-210", + "Miles_per_Gallon": 32, + "Cylinders": 4, + "Displacement": 85, + "Horsepower": 70, + "Weight_in_lbs": 1990, + "Acceleration": 17, + "Year": "1976-01-01", + "Origin": "Japan" + }, + { + "Maker": "toyota", + "Name": "toyota corolla", + "Miles_per_Gallon": 28, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 75, + "Weight_in_lbs": 2155, + "Acceleration": 16.4, + "Year": "1976-01-01", + "Origin": "Japan" + }, + { + "Maker": "ford", + "Name": "ford pinto", + "Miles_per_Gallon": 26.5, + "Cylinders": 4, + "Displacement": 140, + "Horsepower": 72, + "Weight_in_lbs": 2565, + "Acceleration": 13.6, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "volvo", + "Name": "volvo 245", + "Miles_per_Gallon": 20, + "Cylinders": 4, + "Displacement": 130, + "Horsepower": 102, + "Weight_in_lbs": 3150, + "Acceleration": 15.7, + "Year": "1976-01-01", + "Origin": "European Union" + }, + { + "Maker": "plymouth", + "Name": "plymouth volare premier v8", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 318, + "Horsepower": 150, + "Weight_in_lbs": 3940, + "Acceleration": 13.2, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "peugeot", + "Name": "peugeot 504", + "Miles_per_Gallon": 19, + "Cylinders": 4, + "Displacement": 120, + "Horsepower": 88, + "Weight_in_lbs": 3270, + "Acceleration": 21.9, + "Year": "1976-01-01", + "Origin": "European Union" + }, + { + "Maker": "toyota", + "Name": "toyota mark ii", + "Miles_per_Gallon": 19, + "Cylinders": 6, + "Displacement": 156, + "Horsepower": 108, + "Weight_in_lbs": 2930, + "Acceleration": 15.5, + "Year": "1976-01-01", + "Origin": "Japan" + }, + { + "Name": "mercedes-benz 280s", + "Maker": "mercedes", + "Miles_per_Gallon": 16.5, + "Cylinders": 6, + "Displacement": 168, + "Horsepower": 120, + "Weight_in_lbs": 3820, + "Acceleration": 16.7, + "Year": "1976-01-01", + "Origin": "European Union" + }, + { + "Maker": "cadillac", + "Name": "cadillac seville", + "Miles_per_Gallon": 16.5, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 180, + "Weight_in_lbs": 4380, + "Acceleration": 12.1, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "chevy", + "Name": "chevy c10", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 145, + "Weight_in_lbs": 4055, + "Acceleration": 12, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford f108", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 302, + "Horsepower": 130, + "Weight_in_lbs": 3870, + "Acceleration": 15, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge d100", + "Miles_per_Gallon": 13, + "Cylinders": 8, + "Displacement": 318, + "Horsepower": 150, + "Weight_in_lbs": 3755, + "Acceleration": 14, + "Year": "1976-01-01", + "Origin": "USA" + }, + { + "Maker": "honda", + "Name": "honda Accelerationord cvcc", + "Miles_per_Gallon": 31.5, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 68, + "Weight_in_lbs": 2045, + "Acceleration": 18.5, + "Year": "1977-01-01", + "Origin": "Japan" + }, + { + "Maker": "buick", + "Name": "buick opel isuzu deluxe", + "Miles_per_Gallon": 30, + "Cylinders": 4, + "Displacement": 111, + "Horsepower": 80, + "Weight_in_lbs": 2155, + "Acceleration": 14.8, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "renault", + "Name": "renault 5 gtl", + "Miles_per_Gallon": 36, + "Cylinders": 4, + "Displacement": 79, + "Horsepower": 58, + "Weight_in_lbs": 1825, + "Acceleration": 18.6, + "Year": "1977-01-01", + "Origin": "European Union" + }, + { + "Maker": "plymouth", + "Name": "plymouth arrow gs", + "Miles_per_Gallon": 25.5, + "Cylinders": 4, + "Displacement": 122, + "Horsepower": 96, + "Weight_in_lbs": 2300, + "Acceleration": 15.5, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "datsun", + "Name": "datsun f-10 hatchback", + "Miles_per_Gallon": 33.5, + "Cylinders": 4, + "Displacement": 85, + "Horsepower": 70, + "Weight_in_lbs": 1945, + "Acceleration": 16.8, + "Year": "1977-01-01", + "Origin": "Japan" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet caprice classic", + "Miles_per_Gallon": 17.5, + "Cylinders": 8, + "Displacement": 305, + "Horsepower": 145, + "Weight_in_lbs": 3880, + "Acceleration": 12.5, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "oldsmobile", + "Name": "oldsmobile cutlass supreme", + "Miles_per_Gallon": 17, + "Cylinders": 8, + "Displacement": 260, + "Horsepower": 110, + "Weight_in_lbs": 4060, + "Acceleration": 19, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge monaco brougham", + "Miles_per_Gallon": 15.5, + "Cylinders": 8, + "Displacement": 318, + "Horsepower": 145, + "Weight_in_lbs": 4140, + "Acceleration": 13.7, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "mercury", + "Name": "mercury cougar brougham", + "Miles_per_Gallon": 15, + "Cylinders": 8, + "Displacement": 302, + "Horsepower": 130, + "Weight_in_lbs": 4295, + "Acceleration": 14.9, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet concours", + "Miles_per_Gallon": 17.5, + "Cylinders": 6, + "Displacement": 250, + "Horsepower": 110, + "Weight_in_lbs": 3520, + "Acceleration": 16.4, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "buick", + "Name": "buick skylark", + "Miles_per_Gallon": 20.5, + "Cylinders": 6, + "Displacement": 231, + "Horsepower": 105, + "Weight_in_lbs": 3425, + "Acceleration": 16.9, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth volare custom", + "Miles_per_Gallon": 19, + "Cylinders": 6, + "Displacement": 225, + "Horsepower": 100, + "Weight_in_lbs": 3630, + "Acceleration": 17.7, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford granada", + "Miles_per_Gallon": 18.5, + "Cylinders": 6, + "Displacement": 250, + "Horsepower": 98, + "Weight_in_lbs": 3525, + "Acceleration": 19, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "pontiac", + "Name": "pontiac grand prix lj", + "Miles_per_Gallon": 16, + "Cylinders": 8, + "Displacement": 400, + "Horsepower": 180, + "Weight_in_lbs": 4220, + "Acceleration": 11.1, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet monte carlo landau", + "Miles_per_Gallon": 15.5, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 170, + "Weight_in_lbs": 4165, + "Acceleration": 11.4, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "chrysler", + "Name": "chrysler cordoba", + "Miles_per_Gallon": 15.5, + "Cylinders": 8, + "Displacement": 400, + "Horsepower": 190, + "Weight_in_lbs": 4325, + "Acceleration": 12.2, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford thunderbird", + "Miles_per_Gallon": 16, + "Cylinders": 8, + "Displacement": 351, + "Horsepower": 149, + "Weight_in_lbs": 4335, + "Acceleration": 14.5, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen rabbit custom", + "Miles_per_Gallon": 29, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 78, + "Weight_in_lbs": 1940, + "Acceleration": 14.5, + "Year": "1977-01-01", + "Origin": "European Union" + }, + { + "Maker": "pontiac", + "Name": "pontiac sunbird coupe", + "Miles_per_Gallon": 24.5, + "Cylinders": 4, + "Displacement": 151, + "Horsepower": 88, + "Weight_in_lbs": 2740, + "Acceleration": 16, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "toyota", + "Name": "toyota corolla liftback", + "Miles_per_Gallon": 26, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 75, + "Weight_in_lbs": 2265, + "Acceleration": 18.2, + "Year": "1977-01-01", + "Origin": "Japan" + }, + { + "Maker": "ford", + "Name": "ford mustang ii 2+2", + "Miles_per_Gallon": 25.5, + "Cylinders": 4, + "Displacement": 140, + "Horsepower": 89, + "Weight_in_lbs": 2755, + "Acceleration": 15.8, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet chevette", + "Miles_per_Gallon": 30.5, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 63, + "Weight_in_lbs": 2051, + "Acceleration": 17, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge colt m/m", + "Miles_per_Gallon": 33.5, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 83, + "Weight_in_lbs": 2075, + "Acceleration": 15.9, + "Year": "1977-01-01", + "Origin": "USA" + }, + { + "Maker": "subaru", + "Name": "subaru dl", + "Miles_per_Gallon": 30, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 67, + "Weight_in_lbs": 1985, + "Acceleration": 16.4, + "Year": "1977-01-01", + "Origin": "Japan" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen dasher", + "Miles_per_Gallon": 30.5, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 78, + "Weight_in_lbs": 2190, + "Acceleration": 14.1, + "Year": "1977-01-01", + "Origin": "European Union" + }, + { + "Maker": "datsun", + "Name": "datsun 810", + "Miles_per_Gallon": 22, + "Cylinders": 6, + "Displacement": 146, + "Horsepower": 97, + "Weight_in_lbs": 2815, + "Acceleration": 14.5, + "Year": "1977-01-01", + "Origin": "Japan" + }, + { + "Maker": "bmw", + "Name": "bmw 320i", + "Miles_per_Gallon": 21.5, + "Cylinders": 4, + "Displacement": 121, + "Horsepower": 110, + "Weight_in_lbs": 2600, + "Acceleration": 12.8, + "Year": "1977-01-01", + "Origin": "European Union" + }, + { + "Maker": "mazda", + "Name": "mazda rx-4", + "Miles_per_Gallon": 21.5, + "Cylinders": 3, + "Displacement": 80, + "Horsepower": 110, + "Weight_in_lbs": 2720, + "Acceleration": 13.5, + "Year": "1977-01-01", + "Origin": "Japan" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen rabbit custom diesel", + "Miles_per_Gallon": 43.1, + "Cylinders": 4, + "Displacement": 90, + "Horsepower": 48, + "Weight_in_lbs": 1985, + "Acceleration": 21.5, + "Year": "1978-01-01", + "Origin": "European Union" + }, + { + "Maker": "ford", + "Name": "ford fiesta", + "Miles_per_Gallon": 36.1, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 66, + "Weight_in_lbs": 1800, + "Acceleration": 14.4, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "mazda", + "Name": "mazda glc deluxe", + "Miles_per_Gallon": 32.8, + "Cylinders": 4, + "Displacement": 78, + "Horsepower": 52, + "Weight_in_lbs": 1985, + "Acceleration": 19.4, + "Year": "1978-01-01", + "Origin": "Japan" + }, + { + "Maker": "datsun", + "Name": "datsun b210 gx", + "Miles_per_Gallon": 39.4, + "Cylinders": 4, + "Displacement": 85, + "Horsepower": 70, + "Weight_in_lbs": 2070, + "Acceleration": 18.6, + "Year": "1978-01-01", + "Origin": "Japan" + }, + { + "Maker": "honda", + "Name": "honda civic cvcc", + "Miles_per_Gallon": 36.1, + "Cylinders": 4, + "Displacement": 91, + "Horsepower": 60, + "Weight_in_lbs": 1800, + "Acceleration": 16.4, + "Year": "1978-01-01", + "Origin": "Japan" + }, + { + "Maker": "oldsmobile", + "Name": "oldsmobile cutlass salon brougham", + "Miles_per_Gallon": 19.9, + "Cylinders": 8, + "Displacement": 260, + "Horsepower": 110, + "Weight_in_lbs": 3365, + "Acceleration": 15.5, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge diplomat", + "Miles_per_Gallon": 19.4, + "Cylinders": 8, + "Displacement": 318, + "Horsepower": 140, + "Weight_in_lbs": 3735, + "Acceleration": 13.2, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "mercury", + "Name": "mercury monarch ghia", + "Miles_per_Gallon": 20.2, + "Cylinders": 8, + "Displacement": 302, + "Horsepower": 139, + "Weight_in_lbs": 3570, + "Acceleration": 12.8, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "pontiac", + "Name": "pontiac phoenix lj", + "Miles_per_Gallon": 19.2, + "Cylinders": 6, + "Displacement": 231, + "Horsepower": 105, + "Weight_in_lbs": 3535, + "Acceleration": 19.2, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet malibu", + "Miles_per_Gallon": 20.5, + "Cylinders": 6, + "Displacement": 200, + "Horsepower": 95, + "Weight_in_lbs": 3155, + "Acceleration": 18.2, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford fairmont (auto)", + "Miles_per_Gallon": 20.2, + "Cylinders": 6, + "Displacement": 200, + "Horsepower": 85, + "Weight_in_lbs": 2965, + "Acceleration": 15.8, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford fairmont (man)", + "Miles_per_Gallon": 25.1, + "Cylinders": 4, + "Displacement": 140, + "Horsepower": 88, + "Weight_in_lbs": 2720, + "Acceleration": 15.4, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth volare", + "Miles_per_Gallon": 20.5, + "Cylinders": 6, + "Displacement": 225, + "Horsepower": 100, + "Weight_in_lbs": 3430, + "Acceleration": 17.2, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc concord", + "Miles_per_Gallon": 19.4, + "Cylinders": 6, + "Displacement": 232, + "Horsepower": 90, + "Weight_in_lbs": 3210, + "Acceleration": 17.2, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "buick", + "Name": "buick century special", + "Miles_per_Gallon": 20.6, + "Cylinders": 6, + "Displacement": 231, + "Horsepower": 105, + "Weight_in_lbs": 3380, + "Acceleration": 15.8, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "mercury", + "Name": "mercury zephyr", + "Miles_per_Gallon": 20.8, + "Cylinders": 6, + "Displacement": 200, + "Horsepower": 85, + "Weight_in_lbs": 3070, + "Acceleration": 16.7, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge aspen", + "Miles_per_Gallon": 18.6, + "Cylinders": 6, + "Displacement": 225, + "Horsepower": 110, + "Weight_in_lbs": 3620, + "Acceleration": 18.7, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc concord d/l", + "Miles_per_Gallon": 18.1, + "Cylinders": 6, + "Displacement": 258, + "Horsepower": 120, + "Weight_in_lbs": 3410, + "Acceleration": 15.1, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet monte carlo landau", + "Miles_per_Gallon": 19.2, + "Cylinders": 8, + "Displacement": 305, + "Horsepower": 145, + "Weight_in_lbs": 3425, + "Acceleration": 13.2, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "buick", + "Name": "buick regal sport coupe (turbo)", + "Miles_per_Gallon": 17.7, + "Cylinders": 6, + "Displacement": 231, + "Horsepower": 165, + "Weight_in_lbs": 3445, + "Acceleration": 13.4, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford futura", + "Miles_per_Gallon": 18.1, + "Cylinders": 8, + "Displacement": 302, + "Horsepower": 139, + "Weight_in_lbs": 3205, + "Acceleration": 11.2, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge magnum xe", + "Miles_per_Gallon": 17.5, + "Cylinders": 8, + "Displacement": 318, + "Horsepower": 140, + "Weight_in_lbs": 4080, + "Acceleration": 13.7, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet chevette", + "Miles_per_Gallon": 30, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 68, + "Weight_in_lbs": 2155, + "Acceleration": 16.5, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "toyota", + "Name": "toyota corona", + "Miles_per_Gallon": 27.5, + "Cylinders": 4, + "Displacement": 134, + "Horsepower": 95, + "Weight_in_lbs": 2560, + "Acceleration": 14.2, + "Year": "1978-01-01", + "Origin": "Japan" + }, + { + "Maker": "datsun", + "Name": "datsun 510", + "Miles_per_Gallon": 27.2, + "Cylinders": 4, + "Displacement": 119, + "Horsepower": 97, + "Weight_in_lbs": 2300, + "Acceleration": 14.7, + "Year": "1978-01-01", + "Origin": "Japan" + }, + { + "Maker": "dodge", + "Name": "dodge omni", + "Miles_per_Gallon": 30.9, + "Cylinders": 4, + "Displacement": 105, + "Horsepower": 75, + "Weight_in_lbs": 2230, + "Acceleration": 14.5, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "toyota", + "Name": "toyota celica gt liftback", + "Miles_per_Gallon": 21.1, + "Cylinders": 4, + "Displacement": 134, + "Horsepower": 95, + "Weight_in_lbs": 2515, + "Acceleration": 14.8, + "Year": "1978-01-01", + "Origin": "Japan" + }, + { + "Maker": "plymouth", + "Name": "plymouth sapporo", + "Miles_per_Gallon": 23.2, + "Cylinders": 4, + "Displacement": 156, + "Horsepower": 105, + "Weight_in_lbs": 2745, + "Acceleration": 16.7, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "oldsmobile", + "Name": "oldsmobile starfire sx", + "Miles_per_Gallon": 23.8, + "Cylinders": 4, + "Displacement": 151, + "Horsepower": 85, + "Weight_in_lbs": 2855, + "Acceleration": 17.6, + "Year": "1978-01-01", + "Origin": "USA" + }, + { + "Maker": "datsun", + "Name": "datsun 200-sx", + "Miles_per_Gallon": 23.9, + "Cylinders": 4, + "Displacement": 119, + "Horsepower": 97, + "Weight_in_lbs": 2405, + "Acceleration": 14.9, + "Year": "1978-01-01", + "Origin": "Japan" + }, + { + "Maker": "audi", + "Name": "audi 5000", + "Miles_per_Gallon": 20.3, + "Cylinders": 5, + "Displacement": 131, + "Horsepower": 103, + "Weight_in_lbs": 2830, + "Acceleration": 15.9, + "Year": "1978-01-01", + "Origin": "European Union" + }, + { + "Maker": "volvo", + "Name": "volvo 264gl", + "Miles_per_Gallon": 17, + "Cylinders": 6, + "Displacement": 163, + "Horsepower": 125, + "Weight_in_lbs": 3140, + "Acceleration": 13.6, + "Year": "1978-01-01", + "Origin": "European Union" + }, + { + "Maker": "saab", + "Name": "saab 99gle", + "Miles_per_Gallon": 21.6, + "Cylinders": 4, + "Displacement": 121, + "Horsepower": 115, + "Weight_in_lbs": 2795, + "Acceleration": 15.7, + "Year": "1978-01-01", + "Origin": "European Union" + }, + { + "Maker": "peugeot", + "Name": "peugeot 604sl", + "Miles_per_Gallon": 16.2, + "Cylinders": 6, + "Displacement": 163, + "Horsepower": 133, + "Weight_in_lbs": 3410, + "Acceleration": 15.8, + "Year": "1978-01-01", + "Origin": "European Union" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen scirocco", + "Miles_per_Gallon": 31.5, + "Cylinders": 4, + "Displacement": 89, + "Horsepower": 71, + "Weight_in_lbs": 1990, + "Acceleration": 14.9, + "Year": "1978-01-01", + "Origin": "European Union" + }, + { + "Maker": "honda", + "Name": "honda Accelerationord lx", + "Miles_per_Gallon": 29.5, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 68, + "Weight_in_lbs": 2135, + "Acceleration": 16.6, + "Year": "1978-01-01", + "Origin": "Japan" + }, + { + "Maker": "pontiac", + "Name": "pontiac lemans v6", + "Miles_per_Gallon": 21.5, + "Cylinders": 6, + "Displacement": 231, + "Horsepower": 115, + "Weight_in_lbs": 3245, + "Acceleration": 15.4, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "mercury", + "Name": "mercury zephyr 6", + "Miles_per_Gallon": 19.8, + "Cylinders": 6, + "Displacement": 200, + "Horsepower": 85, + "Weight_in_lbs": 2990, + "Acceleration": 18.2, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford fairmont 4", + "Miles_per_Gallon": 22.3, + "Cylinders": 4, + "Displacement": 140, + "Horsepower": 88, + "Weight_in_lbs": 2890, + "Acceleration": 17.3, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc concord dl 6", + "Miles_per_Gallon": 20.2, + "Cylinders": 6, + "Displacement": 232, + "Horsepower": 90, + "Weight_in_lbs": 3265, + "Acceleration": 18.2, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge aspen 6", + "Miles_per_Gallon": 20.6, + "Cylinders": 6, + "Displacement": 225, + "Horsepower": 110, + "Weight_in_lbs": 3360, + "Acceleration": 16.6, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet caprice classic", + "Miles_per_Gallon": 17, + "Cylinders": 8, + "Displacement": 305, + "Horsepower": 130, + "Weight_in_lbs": 3840, + "Acceleration": 15.4, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford ltd landau", + "Miles_per_Gallon": 17.6, + "Cylinders": 8, + "Displacement": 302, + "Horsepower": 129, + "Weight_in_lbs": 3725, + "Acceleration": 13.4, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "mercury", + "Name": "mercury grand marquis", + "Miles_per_Gallon": 16.5, + "Cylinders": 8, + "Displacement": 351, + "Horsepower": 138, + "Weight_in_lbs": 3955, + "Acceleration": 13.2, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge st. regis", + "Miles_per_Gallon": 18.2, + "Cylinders": 8, + "Displacement": 318, + "Horsepower": 135, + "Weight_in_lbs": 3830, + "Acceleration": 15.2, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "buick", + "Name": "buick estate wagon (sw)", + "Miles_per_Gallon": 16.9, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 155, + "Weight_in_lbs": 4360, + "Acceleration": 14.9, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford country squire (sw)", + "Miles_per_Gallon": 15.5, + "Cylinders": 8, + "Displacement": 351, + "Horsepower": 142, + "Weight_in_lbs": 4054, + "Acceleration": 14.3, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet malibu classic (sw)", + "Miles_per_Gallon": 19.2, + "Cylinders": 8, + "Displacement": 267, + "Horsepower": 125, + "Weight_in_lbs": 3605, + "Acceleration": 15, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "chrysler", + "Name": "chrysler lebaron town @ country (sw)", + "Miles_per_Gallon": 18.5, + "Cylinders": 8, + "Displacement": 360, + "Horsepower": 150, + "Weight_in_lbs": 3940, + "Acceleration": 13, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "vw", + "Name": "vw rabbit custom", + "Miles_per_Gallon": 31.9, + "Cylinders": 4, + "Displacement": 89, + "Horsepower": 71, + "Weight_in_lbs": 1925, + "Acceleration": 14, + "Year": "1979-01-01", + "Origin": "European Union" + }, + { + "Maker": "maxda", + "Name": "maxda glc deluxe", + "Miles_per_Gallon": 34.1, + "Cylinders": 4, + "Displacement": 86, + "Horsepower": 65, + "Weight_in_lbs": 1975, + "Acceleration": 15.2, + "Year": "1979-01-01", + "Origin": "Japan" + }, + { + "Maker": "dodge", + "Name": "dodge colt hatchback custom", + "Miles_per_Gallon": 35.7, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 80, + "Weight_in_lbs": 1915, + "Acceleration": 14.4, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc spirit dl", + "Miles_per_Gallon": 27.4, + "Cylinders": 4, + "Displacement": 121, + "Horsepower": 80, + "Weight_in_lbs": 2670, + "Acceleration": 15, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "mercedes", + "Name": "mercedes benz 300d", + "Miles_per_Gallon": 25.4, + "Cylinders": 5, + "Displacement": 183, + "Horsepower": 77, + "Weight_in_lbs": 3530, + "Acceleration": 20.1, + "Year": "1979-01-01", + "Origin": "European Union" + }, + { + "Maker": "cadillac", + "Name": "cadillac eldorado", + "Miles_per_Gallon": 23, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 125, + "Weight_in_lbs": 3900, + "Acceleration": 17.4, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "peugeot", + "Name": "peugeot 504", + "Miles_per_Gallon": 27.2, + "Cylinders": 4, + "Displacement": 141, + "Horsepower": 71, + "Weight_in_lbs": 3190, + "Acceleration": 24.8, + "Year": "1979-01-01", + "Origin": "European Union" + }, + { + "Maker": "oldsmobile", + "Name": "oldsmobile cutlass salon brougham", + "Miles_per_Gallon": 23.9, + "Cylinders": 8, + "Displacement": 260, + "Horsepower": 90, + "Weight_in_lbs": 3420, + "Acceleration": 22.2, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth horizon", + "Miles_per_Gallon": 34.2, + "Cylinders": 4, + "Displacement": 105, + "Horsepower": 70, + "Weight_in_lbs": 2200, + "Acceleration": 13.2, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth horizon tc3", + "Miles_per_Gallon": 34.5, + "Cylinders": 4, + "Displacement": 105, + "Horsepower": 70, + "Weight_in_lbs": 2150, + "Acceleration": 14.9, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "datsun", + "Name": "datsun 210", + "Miles_per_Gallon": 31.8, + "Cylinders": 4, + "Displacement": 85, + "Horsepower": 65, + "Weight_in_lbs": 2020, + "Acceleration": 19.2, + "Year": "1979-01-01", + "Origin": "Japan" + }, + { + "Maker": "fiat", + "Name": "fiat strada custom", + "Miles_per_Gallon": 37.3, + "Cylinders": 4, + "Displacement": 91, + "Horsepower": 69, + "Weight_in_lbs": 2130, + "Acceleration": 14.7, + "Year": "1979-01-01", + "Origin": "European Union" + }, + { + "Maker": "buick", + "Name": "buick skylark limited", + "Miles_per_Gallon": 28.4, + "Cylinders": 4, + "Displacement": 151, + "Horsepower": 90, + "Weight_in_lbs": 2670, + "Acceleration": 16, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet citation", + "Miles_per_Gallon": 28.8, + "Cylinders": 6, + "Displacement": 173, + "Horsepower": 115, + "Weight_in_lbs": 2595, + "Acceleration": 11.3, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "oldsmobile", + "Name": "oldsmobile omega brougham", + "Miles_per_Gallon": 26.8, + "Cylinders": 6, + "Displacement": 173, + "Horsepower": 115, + "Weight_in_lbs": 2700, + "Acceleration": 12.9, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "pontiac", + "Name": "pontiac phoenix", + "Miles_per_Gallon": 33.5, + "Cylinders": 4, + "Displacement": 151, + "Horsepower": 90, + "Weight_in_lbs": 2556, + "Acceleration": 13.2, + "Year": "1979-01-01", + "Origin": "USA" + }, + { + "Maker": "vw", + "Name": "vw rabbit", + "Miles_per_Gallon": 41.5, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 76, + "Weight_in_lbs": 2144, + "Acceleration": 14.7, + "Year": "1980-01-01", + "Origin": "European Union" + }, + { + "Maker": "toyota", + "Name": "toyota corolla tercel", + "Miles_per_Gallon": 38.1, + "Cylinders": 4, + "Displacement": 89, + "Horsepower": 60, + "Weight_in_lbs": 1968, + "Acceleration": 18.8, + "Year": "1980-01-01", + "Origin": "Japan" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet chevette", + "Miles_per_Gallon": 32.1, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 70, + "Weight_in_lbs": 2120, + "Acceleration": 15.5, + "Year": "1980-01-01", + "Origin": "USA" + }, + { + "Maker": "datsun", + "Name": "datsun 310", + "Miles_per_Gallon": 37.2, + "Cylinders": 4, + "Displacement": 86, + "Horsepower": 65, + "Weight_in_lbs": 2019, + "Acceleration": 16.4, + "Year": "1980-01-01", + "Origin": "Japan" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet citation", + "Miles_per_Gallon": 28, + "Cylinders": 4, + "Displacement": 151, + "Horsepower": 90, + "Weight_in_lbs": 2678, + "Acceleration": 16.5, + "Year": "1980-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford fairmont", + "Miles_per_Gallon": 26.4, + "Cylinders": 4, + "Displacement": 140, + "Horsepower": 88, + "Weight_in_lbs": 2870, + "Acceleration": 18.1, + "Year": "1980-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc concord", + "Miles_per_Gallon": 24.3, + "Cylinders": 4, + "Displacement": 151, + "Horsepower": 90, + "Weight_in_lbs": 3003, + "Acceleration": 20.1, + "Year": "1980-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge aspen", + "Miles_per_Gallon": 19.1, + "Cylinders": 6, + "Displacement": 225, + "Horsepower": 90, + "Weight_in_lbs": 3381, + "Acceleration": 18.7, + "Year": "1980-01-01", + "Origin": "USA" + }, + { + "Maker": "audi", + "Name": "audi 4000", + "Miles_per_Gallon": 34.3, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 78, + "Weight_in_lbs": 2188, + "Acceleration": 15.8, + "Year": "1980-01-01", + "Origin": "European Union" + }, + { + "Maker": "toyota", + "Name": "toyota corona liftback", + "Miles_per_Gallon": 29.8, + "Cylinders": 4, + "Displacement": 134, + "Horsepower": 90, + "Weight_in_lbs": 2711, + "Acceleration": 15.5, + "Year": "1980-01-01", + "Origin": "Japan" + }, + { + "Maker": "mazda", + "Name": "mazda 626", + "Miles_per_Gallon": 31.3, + "Cylinders": 4, + "Displacement": 120, + "Horsepower": 75, + "Weight_in_lbs": 2542, + "Acceleration": 17.5, + "Year": "1980-01-01", + "Origin": "Japan" + }, + { + "Maker": "datsun", + "Name": "datsun 510 hatchback", + "Miles_per_Gallon": 37, + "Cylinders": 4, + "Displacement": 119, + "Horsepower": 92, + "Weight_in_lbs": 2434, + "Acceleration": 15, + "Year": "1980-01-01", + "Origin": "Japan" + }, + { + "Maker": "toyota", + "Name": "toyota corolla", + "Miles_per_Gallon": 32.2, + "Cylinders": 4, + "Displacement": 108, + "Horsepower": 75, + "Weight_in_lbs": 2265, + "Acceleration": 15.2, + "Year": "1980-01-01", + "Origin": "Japan" + }, + { + "Maker": "mazda", + "Name": "mazda glc", + "Miles_per_Gallon": 46.6, + "Cylinders": 4, + "Displacement": 86, + "Horsepower": 65, + "Weight_in_lbs": 2110, + "Acceleration": 17.9, + "Year": "1980-01-01", + "Origin": "Japan" + }, + { + "Maker": "dodge", + "Name": "dodge colt", + "Miles_per_Gallon": 27.9, + "Cylinders": 4, + "Displacement": 156, + "Horsepower": 105, + "Weight_in_lbs": 2800, + "Acceleration": 14.4, + "Year": "1980-01-01", + "Origin": "USA" + }, + { + "Maker": "datsun", + "Name": "datsun 210", + "Miles_per_Gallon": 40.8, + "Cylinders": 4, + "Displacement": 85, + "Horsepower": 65, + "Weight_in_lbs": 2110, + "Acceleration": 19.2, + "Year": "1980-01-01", + "Origin": "Japan" + }, + { + "Maker": "vw", + "Name": "vw rabbit c (diesel)", + "Miles_per_Gallon": 44.3, + "Cylinders": 4, + "Displacement": 90, + "Horsepower": 48, + "Weight_in_lbs": 2085, + "Acceleration": 21.7, + "Year": "1980-01-01", + "Origin": "European Union" + }, + { + "Maker": "vw", + "Name": "vw dasher (diesel)", + "Miles_per_Gallon": 43.4, + "Cylinders": 4, + "Displacement": 90, + "Horsepower": 48, + "Weight_in_lbs": 2335, + "Acceleration": 23.7, + "Year": "1980-01-01", + "Origin": "European Union" + }, + { + "Maker": "audi", + "Name": "audi 5000s (diesel)", + "Miles_per_Gallon": 36.4, + "Cylinders": 5, + "Displacement": 121, + "Horsepower": 67, + "Weight_in_lbs": 2950, + "Acceleration": 19.9, + "Year": "1980-01-01", + "Origin": "European Union" + }, + { + "Name": "mercedes-benz 240d", + "Maker": "mercedes", + "Miles_per_Gallon": 30, + "Cylinders": 4, + "Displacement": 146, + "Horsepower": 67, + "Weight_in_lbs": 3250, + "Acceleration": 21.8, + "Year": "1980-01-01", + "Origin": "European Union" + }, + { + "Maker": "honda", + "Name": "honda civic 1500 gl", + "Miles_per_Gallon": 44.6, + "Cylinders": 4, + "Displacement": 91, + "Horsepower": 67, + "Weight_in_lbs": 1850, + "Acceleration": 13.8, + "Year": "1980-01-01", + "Origin": "Japan" + }, + { + "Maker": "renault", + "Name": "renault lecar deluxe", + "Miles_per_Gallon": 40.9, + "Cylinders": 4, + "Displacement": 85, + "Horsepower": null, + "Weight_in_lbs": 1835, + "Acceleration": 17.3, + "Year": "1980-01-01", + "Origin": "European Union" + }, + { + "Maker": "subaru", + "Name": "subaru dl", + "Miles_per_Gallon": 33.8, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 67, + "Weight_in_lbs": 2145, + "Acceleration": 18, + "Year": "1980-01-01", + "Origin": "Japan" + }, + { + "Maker": "vokswagen", + "Name": "vokswagen rabbit", + "Miles_per_Gallon": 29.8, + "Cylinders": 4, + "Displacement": 89, + "Horsepower": 62, + "Weight_in_lbs": 1845, + "Acceleration": 15.3, + "Year": "1980-01-01", + "Origin": "European Union" + }, + { + "Maker": "datsun", + "Name": "datsun 280-zx", + "Miles_per_Gallon": 32.7, + "Cylinders": 6, + "Displacement": 168, + "Horsepower": 132, + "Weight_in_lbs": 2910, + "Acceleration": 11.4, + "Year": "1980-01-01", + "Origin": "Japan" + }, + { + "Maker": "mazda", + "Name": "mazda rx-7 gs", + "Miles_per_Gallon": 23.7, + "Cylinders": 3, + "Displacement": 70, + "Horsepower": 100, + "Weight_in_lbs": 2420, + "Acceleration": 12.5, + "Year": "1980-01-01", + "Origin": "Japan" + }, + { + "Maker": "triumph", + "Name": "triumph tr7 coupe", + "Miles_per_Gallon": 35, + "Cylinders": 4, + "Displacement": 122, + "Horsepower": 88, + "Weight_in_lbs": 2500, + "Acceleration": 15.1, + "Year": "1980-01-01", + "Origin": "European Union" + }, + { + "Maker": "ford", + "Name": "ford mustang cobra", + "Miles_per_Gallon": 23.6, + "Cylinders": 4, + "Displacement": 140, + "Horsepower": null, + "Weight_in_lbs": 2905, + "Acceleration": 14.3, + "Year": "1980-01-01", + "Origin": "USA" + }, + { + "Maker": "honda", + "Name": "honda Accelerationord", + "Miles_per_Gallon": 32.4, + "Cylinders": 4, + "Displacement": 107, + "Horsepower": 72, + "Weight_in_lbs": 2290, + "Acceleration": 17, + "Year": "1980-01-01", + "Origin": "Japan" + }, + { + "Maker": "honda", + "Name": "honda Accelerationord", + "Miles_per_Gallon": 32.4, + "Cylinders": 4, + "Displacement": 107, + "Horsepower": 72, + "Weight_in_lbs": 2290, + "Acceleration": 17, + "Year": "1981-01-01", + "Origin": "Japan" + }, + { + "Maker": "plymouth", + "Name": "plymouth reliant", + "Miles_per_Gallon": 27.2, + "Cylinders": 4, + "Displacement": 135, + "Horsepower": 84, + "Weight_in_lbs": 2490, + "Acceleration": 15.7, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "buick", + "Name": "buick skylark", + "Miles_per_Gallon": 26.6, + "Cylinders": 4, + "Displacement": 151, + "Horsepower": 84, + "Weight_in_lbs": 2635, + "Acceleration": 16.4, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge aries wagon (sw)", + "Miles_per_Gallon": 25.8, + "Cylinders": 4, + "Displacement": 156, + "Horsepower": 92, + "Weight_in_lbs": 2620, + "Acceleration": 14.4, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet citation", + "Miles_per_Gallon": 23.5, + "Cylinders": 6, + "Displacement": 173, + "Horsepower": 110, + "Weight_in_lbs": 2725, + "Acceleration": 12.6, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "plymouth", + "Name": "plymouth reliant", + "Miles_per_Gallon": 30, + "Cylinders": 4, + "Displacement": 135, + "Horsepower": 84, + "Weight_in_lbs": 2385, + "Acceleration": 12.9, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "toyota", + "Name": "toyota starlet", + "Miles_per_Gallon": 39.1, + "Cylinders": 4, + "Displacement": 79, + "Horsepower": 58, + "Weight_in_lbs": 1755, + "Acceleration": 16.9, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "plymouth", + "Name": "plymouth champ", + "Miles_per_Gallon": 39, + "Cylinders": 4, + "Displacement": 86, + "Horsepower": 64, + "Weight_in_lbs": 1875, + "Acceleration": 16.4, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "honda", + "Name": "honda civic 1300", + "Miles_per_Gallon": 35.1, + "Cylinders": 4, + "Displacement": 81, + "Horsepower": 60, + "Weight_in_lbs": 1760, + "Acceleration": 16.1, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Name": "subaru", + "Maker": "subaru", + "Miles_per_Gallon": 32.3, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 67, + "Weight_in_lbs": 2065, + "Acceleration": 17.8, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "datsun", + "Name": "datsun 210", + "Miles_per_Gallon": 37, + "Cylinders": 4, + "Displacement": 85, + "Horsepower": 65, + "Weight_in_lbs": 1975, + "Acceleration": 19.4, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "toyota", + "Name": "toyota tercel", + "Miles_per_Gallon": 37.7, + "Cylinders": 4, + "Displacement": 89, + "Horsepower": 62, + "Weight_in_lbs": 2050, + "Acceleration": 17.3, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "mazda", + "Name": "mazda glc 4", + "Miles_per_Gallon": 34.1, + "Cylinders": 4, + "Displacement": 91, + "Horsepower": 68, + "Weight_in_lbs": 1985, + "Acceleration": 16, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "plymouth", + "Name": "plymouth horizon 4", + "Miles_per_Gallon": 34.7, + "Cylinders": 4, + "Displacement": 105, + "Horsepower": 63, + "Weight_in_lbs": 2215, + "Acceleration": 14.9, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford escort 4w", + "Miles_per_Gallon": 34.4, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 65, + "Weight_in_lbs": 2045, + "Acceleration": 16.2, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford escort 2h", + "Miles_per_Gallon": 29.9, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 65, + "Weight_in_lbs": 2380, + "Acceleration": 20.7, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen jetta", + "Miles_per_Gallon": 33, + "Cylinders": 4, + "Displacement": 105, + "Horsepower": 74, + "Weight_in_lbs": 2190, + "Acceleration": 14.2, + "Year": "1982-01-01", + "Origin": "European Union" + }, + { + "Maker": "renault", + "Name": "renault 18i", + "Miles_per_Gallon": 34.5, + "Cylinders": 4, + "Displacement": 100, + "Horsepower": null, + "Weight_in_lbs": 2320, + "Acceleration": 15.8, + "Year": "1982-01-01", + "Origin": "European Union" + }, + { + "Maker": "honda", + "Name": "honda prelude", + "Miles_per_Gallon": 33.7, + "Cylinders": 4, + "Displacement": 107, + "Horsepower": 75, + "Weight_in_lbs": 2210, + "Acceleration": 14.4, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "toyota", + "Name": "toyota corolla", + "Miles_per_Gallon": 32.4, + "Cylinders": 4, + "Displacement": 108, + "Horsepower": 75, + "Weight_in_lbs": 2350, + "Acceleration": 16.8, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "datsun", + "Name": "datsun 200sx", + "Miles_per_Gallon": 32.9, + "Cylinders": 4, + "Displacement": 119, + "Horsepower": 100, + "Weight_in_lbs": 2615, + "Acceleration": 14.8, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "mazda", + "Name": "mazda 626", + "Miles_per_Gallon": 31.6, + "Cylinders": 4, + "Displacement": 120, + "Horsepower": 74, + "Weight_in_lbs": 2635, + "Acceleration": 18.3, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "peugeot", + "Name": "peugeot 505s turbo diesel", + "Miles_per_Gallon": 28.1, + "Cylinders": 4, + "Displacement": 141, + "Horsepower": 80, + "Weight_in_lbs": 3230, + "Acceleration": 20.4, + "Year": "1982-01-01", + "Origin": "European Union" + }, + { + "Maker": "saab", + "Name": "saab 900s", + "Miles_per_Gallon": null, + "Cylinders": 4, + "Displacement": 121, + "Horsepower": 110, + "Weight_in_lbs": 2800, + "Acceleration": 15.4, + "Year": "1982-01-01", + "Origin": "European Union" + }, + { + "Maker": "volvo", + "Name": "volvo diesel", + "Miles_per_Gallon": 30.7, + "Cylinders": 6, + "Displacement": 145, + "Horsepower": 76, + "Weight_in_lbs": 3160, + "Acceleration": 19.6, + "Year": "1982-01-01", + "Origin": "European Union" + }, + { + "Maker": "toyota", + "Name": "toyota cressida", + "Miles_per_Gallon": 25.4, + "Cylinders": 6, + "Displacement": 168, + "Horsepower": 116, + "Weight_in_lbs": 2900, + "Acceleration": 12.6, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "datsun", + "Name": "datsun 810 maxima", + "Miles_per_Gallon": 24.2, + "Cylinders": 6, + "Displacement": 146, + "Horsepower": 120, + "Weight_in_lbs": 2930, + "Acceleration": 13.8, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "buick", + "Name": "buick century", + "Miles_per_Gallon": 22.4, + "Cylinders": 6, + "Displacement": 231, + "Horsepower": 110, + "Weight_in_lbs": 3415, + "Acceleration": 15.8, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "oldsmobile", + "Name": "oldsmobile cutlass ls", + "Miles_per_Gallon": 26.6, + "Cylinders": 8, + "Displacement": 350, + "Horsepower": 105, + "Weight_in_lbs": 3725, + "Acceleration": 19, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford granada gl", + "Miles_per_Gallon": 20.2, + "Cylinders": 6, + "Displacement": 200, + "Horsepower": 88, + "Weight_in_lbs": 3060, + "Acceleration": 17.1, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "chrysler", + "Name": "chrysler lebaron salon", + "Miles_per_Gallon": 17.6, + "Cylinders": 6, + "Displacement": 225, + "Horsepower": 85, + "Weight_in_lbs": 3465, + "Acceleration": 16.6, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet cavalier", + "Miles_per_Gallon": 28, + "Cylinders": 4, + "Displacement": 112, + "Horsepower": 88, + "Weight_in_lbs": 2605, + "Acceleration": 19.6, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet cavalier wagon", + "Miles_per_Gallon": 27, + "Cylinders": 4, + "Displacement": 112, + "Horsepower": 88, + "Weight_in_lbs": 2640, + "Acceleration": 18.6, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet cavalier 2-door", + "Miles_per_Gallon": 34, + "Cylinders": 4, + "Displacement": 112, + "Horsepower": 88, + "Weight_in_lbs": 2395, + "Acceleration": 18, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "pontiac", + "Name": "pontiac j2000 se hatchback", + "Miles_per_Gallon": 31, + "Cylinders": 4, + "Displacement": 112, + "Horsepower": 85, + "Weight_in_lbs": 2575, + "Acceleration": 16.2, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "dodge", + "Name": "dodge aries se", + "Miles_per_Gallon": 29, + "Cylinders": 4, + "Displacement": 135, + "Horsepower": 84, + "Weight_in_lbs": 2525, + "Acceleration": 16, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "pontiac", + "Name": "pontiac phoenix", + "Miles_per_Gallon": 27, + "Cylinders": 4, + "Displacement": 151, + "Horsepower": 90, + "Weight_in_lbs": 2735, + "Acceleration": 18, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford fairmont futura", + "Miles_per_Gallon": 24, + "Cylinders": 4, + "Displacement": 140, + "Horsepower": 92, + "Weight_in_lbs": 2865, + "Acceleration": 16.4, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "amc", + "Name": "amc concord dl", + "Miles_per_Gallon": 23, + "Cylinders": 4, + "Displacement": 151, + "Horsepower": null, + "Weight_in_lbs": 3035, + "Acceleration": 20.5, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "volkswagen", + "Name": "volkswagen rabbit l", + "Miles_per_Gallon": 36, + "Cylinders": 4, + "Displacement": 105, + "Horsepower": 74, + "Weight_in_lbs": 1980, + "Acceleration": 15.3, + "Year": "1982-01-01", + "Origin": "European Union" + }, + { + "Maker": "mazda", + "Name": "mazda glc custom l", + "Miles_per_Gallon": 37, + "Cylinders": 4, + "Displacement": 91, + "Horsepower": 68, + "Weight_in_lbs": 2025, + "Acceleration": 18.2, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "mazda", + "Name": "mazda glc custom", + "Miles_per_Gallon": 31, + "Cylinders": 4, + "Displacement": 91, + "Horsepower": 68, + "Weight_in_lbs": 1970, + "Acceleration": 17.6, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "plymouth", + "Name": "plymouth horizon miser", + "Miles_per_Gallon": 38, + "Cylinders": 4, + "Displacement": 105, + "Horsepower": 63, + "Weight_in_lbs": 2125, + "Acceleration": 14.7, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "mercury", + "Name": "mercury lynx l", + "Miles_per_Gallon": 36, + "Cylinders": 4, + "Displacement": 98, + "Horsepower": 70, + "Weight_in_lbs": 2125, + "Acceleration": 17.3, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "nissan", + "Name": "nissan stanza xe", + "Miles_per_Gallon": 36, + "Cylinders": 4, + "Displacement": 120, + "Horsepower": 88, + "Weight_in_lbs": 2160, + "Acceleration": 14.5, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "honda", + "Name": "honda Accelerationord", + "Miles_per_Gallon": 36, + "Cylinders": 4, + "Displacement": 107, + "Horsepower": 75, + "Weight_in_lbs": 2205, + "Acceleration": 14.5, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "toyota", + "Name": "toyota corolla", + "Miles_per_Gallon": 34, + "Cylinders": 4, + "Displacement": 108, + "Horsepower": 70, + "Weight_in_lbs": 2245, + "Acceleration": 16.9, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "honda", + "Name": "honda civic", + "Miles_per_Gallon": 38, + "Cylinders": 4, + "Displacement": 91, + "Horsepower": 67, + "Weight_in_lbs": 1965, + "Acceleration": 15, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "honda", + "Name": "honda civic (auto)", + "Miles_per_Gallon": 32, + "Cylinders": 4, + "Displacement": 91, + "Horsepower": 67, + "Weight_in_lbs": 1965, + "Acceleration": 15.7, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "datsun", + "Name": "datsun 310 gx", + "Miles_per_Gallon": 38, + "Cylinders": 4, + "Displacement": 91, + "Horsepower": 67, + "Weight_in_lbs": 1995, + "Acceleration": 16.2, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "buick", + "Name": "buick century limited", + "Miles_per_Gallon": 25, + "Cylinders": 6, + "Displacement": 181, + "Horsepower": 110, + "Weight_in_lbs": 2945, + "Acceleration": 16.4, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "oldsmobile", + "Name": "oldsmobile cutlass ciera (diesel)", + "Miles_per_Gallon": 38, + "Cylinders": 6, + "Displacement": 262, + "Horsepower": 85, + "Weight_in_lbs": 3015, + "Acceleration": 17, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "chrysler", + "Name": "chrysler lebaron medallion", + "Miles_per_Gallon": 26, + "Cylinders": 4, + "Displacement": 156, + "Horsepower": 92, + "Weight_in_lbs": 2585, + "Acceleration": 14.5, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford granada l", + "Miles_per_Gallon": 22, + "Cylinders": 6, + "Displacement": 232, + "Horsepower": 112, + "Weight_in_lbs": 2835, + "Acceleration": 14.7, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "toyota", + "Name": "toyota celica gt", + "Miles_per_Gallon": 32, + "Cylinders": 4, + "Displacement": 144, + "Horsepower": 96, + "Weight_in_lbs": 2665, + "Acceleration": 13.9, + "Year": "1982-01-01", + "Origin": "Japan" + }, + { + "Maker": "dodge", + "Name": "dodge charger 2.2", + "Miles_per_Gallon": 36, + "Cylinders": 4, + "Displacement": 135, + "Horsepower": 84, + "Weight_in_lbs": 2370, + "Acceleration": 13, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "chevrolet", + "Name": "chevrolet camaro", + "Miles_per_Gallon": 27, + "Cylinders": 4, + "Displacement": 151, + "Horsepower": 90, + "Weight_in_lbs": 2950, + "Acceleration": -17.3, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford mustang gl", + "Miles_per_Gallon": 27, + "Cylinders": 4, + "Displacement": 140, + "Horsepower": 86, + "Weight_in_lbs": 2790, + "Acceleration": 15.6, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "vw", + "Name": "vw pickup", + "Miles_per_Gallon": 44, + "Cylinders": 4, + "Displacement": 97, + "Horsepower": 52, + "Weight_in_lbs": 2130, + "Acceleration": 24.6, + "Year": "1982-01-01", + "Origin": "European Union" + }, + { + "Maker": "dodge", + "Name": "dodge rampage", + "Miles_per_Gallon": 32, + "Cylinders": 4, + "Displacement": 135, + "Horsepower": 84, + "Weight_in_lbs": 2295, + "Acceleration": 11.6, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "ford", + "Name": "ford ranger", + "Miles_per_Gallon": 28, + "Cylinders": 4, + "Displacement": 120, + "Horsepower": 79, + "Weight_in_lbs": 2625, + "Acceleration": -18.6, + "Year": "1982-01-01", + "Origin": "USA" + }, + { + "Maker": "chevy", + "Name": "chevy s-10", + "Miles_per_Gallon": 31, + "Cylinders": 4, + "Displacement": 119, + "Horsepower": 82, + "Weight_in_lbs": 2720, + "Acceleration": -19.4, + "Year": "1982-01-01", + "Origin": "USA" + } +] \ No newline at end of file diff --git a/example/index.html b/example/index.html index 36d7eb7..de4b91c 100644 --- a/example/index.html +++ b/example/index.html @@ -12,8 +12,8 @@ - - + + diff --git a/example/samples/example3.js b/example/samples/example3.js index b8d5fbd..966126b 100644 --- a/example/samples/example3.js +++ b/example/samples/example3.js @@ -1,156 +1,57 @@ -const DataModel = window.DataModel; +/* eslint-disable */ -const schema = [ - { - name: 'name', +d3.json('../data/cars.json', (data) => { + let jsonData = data; + const schema = [{ + name: 'Name', type: 'dimension' }, { - name: 'birthday', - type: 'dimension', - subtype: 'temporal', - format: '%Y-%m-%d' - }, - { - name: 'roll', - type: 'measure', - defAggFn: "avg" - } -]; - -const data = [ - { - name: 'Rousan', - birthday: '1995-07-05', - roll: 2 + name: 'Maker', + type: 'dimension' }, { - name: 'Sumant', - birthday: '1996-08-04', - roll: 89 + name: 'Miles_per_Gallon', + type: 'measure' }, + { - name: 'Ajay', - birthday: '1994-01-03', - roll: 31 + name: 'Displacement', + type: 'measure' }, { - name: 'Sushant', - birthday: '1994-01-03', - roll: 99 + name: 'Horsepower', + type: 'measure' }, { - name: 'Samim', - birthday: '1994-01-03', - roll: 12 + name: 'Weight_in_lbs', + type: 'measure' }, { - name: 'Akash', - birthday: '1994-01-03', - roll: 20 + name: 'Acceleration', + type: 'measure' }, { - name: 'Rousan', - birthday: '1995-07-06', - roll: 10 + name: 'Origin', + type: 'dimension' }, { - name: 'Akash', - birthday: '1994-01-03', - roll: -10 + name: 'Cylinders', + type: 'dimension' }, { - name: 'Rousan', - birthday: '1995-07-06', - roll: -23 + name: 'Year', + type: 'dimension', + subtype: 'temporal', + format: '%Y-%m-%d' } -]; - -const dm = new DataModel(data, schema); - -// const groupedDm = dm.groupBy(['name']); - -const groupedDm2 = dm.select(fields => fields.name.value === "Rousan"); - -// const schema = [ -// { name: 'Name', type: 'dimension' }, -// { name: 'HorsePower', type: 'measure' }, -// { name: 'Origin', type: 'dimension' } -// ]; -// const data = [ -// { Name: 'chevrolet chevelle malibu', Horsepower: 130, Origin: 'USA' }, -// { Name: 'citroen ds-21 pallas', Horsepower: 115, Origin: 'Europe' }, -// { Name: 'datsun pl510', Horsepower: 88, Origin: 'Japan' }, -// { Name: 'amc rebel sst', Horsepower: 150, Origin: 'USA' }, -// ]; -// const dt = new DataModel(schema, data); - -// const dt2 = dt.select(fields => fields.Origin.value === 'USA'); - -// const selectedDm = dm.select(fields => fields.roll.value > 10 || fields.roll.value < 0); - - - -// debugger; - -// const groupedDm = dm.groupBy(["name"], { -// roll: (vals, cloneProvider, store) => { -// if (!store.clonedDm) { -// store.clonedDm = cloneProvider(); -// } -// if (!store.avgRoll) { -// store.avgRoll = store.clonedDm.groupBy([""], { roll: "avg" }).getData().data[0][0]; -// } - -// return DataModel.Stats.avg(vals) - store.avgRoll; -// } -// }); -// const calDm = dm.calculateVariable({ -// name: "abc", -// type: "measure" -// }, ["roll", (roll, i, cloneProvider, store) => { -// if (!store.clonedDm) { -// store.clonedDm = cloneProvider(); -// } -// if (!store.avgRoll) { -// store.avgRoll = store.clonedDm.groupBy([""], {roll: "avg"}).getData().data[0][0]; -// } - -// return store.avgRoll - roll; -// }]); - -// const DataModel = window.DataModel; - -// const data1 = [ -// { profit: 10, sales: 20, city: 'a' }, -// { profit: 15, sales: 25, city: 'b' }, -// ]; -// const schema1 = [ -// { name: 'profit', type: 'measure' }, -// { name: 'sales', type: 'measure' }, -// { name: 'city', type: 'dimension' }, -// ]; -// const data2 = [ -// { population: 200, city: 'a' }, -// { population: 250, city: 'b' }, -// ]; -// const schema2 = [ -// { name: 'population', type: 'measure' }, -// { name: 'city', type: 'dimension' }, -// ]; -// const dataModel1 = new DataModel(data1, schema1, { name: 'ModelA' }); -// const dataModel2 = new DataModel(data2, schema2, { name: 'ModelB' }); + ]; -// const joinedDm = dataModel1.join(dataModel2, (f1, f2, cloneProvider1, cloneProvider2, store) => { -// if (!store.clonedDm1) { -// store.clonedDm1 = cloneProvider1(); -// } -// if (!store.clonedDm2) { -// store.clonedDm2 = cloneProvider2(); -// } -// if (!store.avgPopulation) { -// store.avgPopulation = store.clonedDm2.groupBy([""], { population: "avg" }).getData().data[0][0]; -// } + let rootData = new DataModel(jsonData, schema); + let dm = rootData.project(["Origin", "Acceleration"]); + let dm5 = DataModel.Operators.compose( + DataModel.Operators.groupBy(["Origin"]), + DataModel.Operators.select(f => f.Acceleration.value > 1000) + )(dm); +}); -// return (f1.profit.value * f1.sales.value) > store.avgPopulation; -// }); diff --git a/src/datamodel.js b/src/datamodel.js index cba7b8f..a77de5a 100644 --- a/src/datamodel.js +++ b/src/datamodel.js @@ -3,6 +3,7 @@ import { FieldType, DimensionSubtype, DataFormat } from './enums'; import { persistDerivation, + persistAncestorDerivation, getRootGroupByModel, propagateToAllDataModels, getRootDataModel, @@ -245,11 +246,13 @@ class DataModel extends Relation { { fieldsArr, groupByString, defaultReducer: reducerStore.defaultReducer() }, reducers ); + persistAncestorDerivation(this, newDataModel); if (config.saveChild) { - this._children.push(newDataModel); + newDataModel.setParent(this); + } else { + newDataModel.setParent(null); } - newDataModel._parent = this; return newDataModel; } @@ -463,7 +466,7 @@ class DataModel extends Relation { return fieldSpec.index; }); - const clone = this.clone(); + const clone = this.clone(config.saveChild); const fs = clone.getFieldspace().fields; const suppliedFields = depFieldIndices.map(idx => fs[idx]); @@ -480,6 +483,7 @@ class DataModel extends Relation { clone.addField(field); persistDerivation(clone, DM_DERIVATIVES.CAL_VAR, { config: schema, fields: depVars }, retrieveFn); + persistAncestorDerivation(this, clone); return clone; } @@ -631,10 +635,11 @@ class DataModel extends Relation { bins }], [binFieldName])[0]; - const clone = this.clone(); + const clone = this.clone(config.saveChild); clone.addField(binField); persistDerivation(clone, DM_DERIVATIVES.BIN, { measureFieldName, config, binFieldName }, null); + persistAncestorDerivation(this, clone); return clone; } diff --git a/src/helper.js b/src/helper.js index 476576b..04991d5 100644 --- a/src/helper.js +++ b/src/helper.js @@ -35,22 +35,22 @@ export const updateFields = ([rowDiffset, colIdentifier], partialFieldspace, fie }; export const persistDerivation = (model, operation, config = {}, criteriaFn) => { - let derivative; - if (operation !== DM_DERIVATIVES.COMPOSE) { - derivative = { + if (operation === DM_DERIVATIVES.COMPOSE) { + model._derivation.length = 0; + model._derivation.push(...criteriaFn); + } else { + model._derivation.push({ op: operation, meta: config, criteria: criteriaFn - }; - model._derivation.push(derivative); - } - else { - derivative = [...criteriaFn]; - model._derivation.length = 0; - model._derivation.push(...derivative); + }); } }; +export const persistAncestorDerivation = (sourceDm, newDm) => { + newDm._ancestorDerivation.push(...sourceDm._ancestorDerivation, ...sourceDm._derivation); +}; + export const selectHelper = (rowDiffset, fields, selectFn, config, sourceDm) => { const newRowDiffSet = []; let lastInsertedValue = -1; @@ -127,13 +127,12 @@ export const filterPropagationModel = (model, propModels, config = {}) => { let filteredModel; if (operation === LOGICAL_OPERATORS.AND) { - const clonedModel = model.clone(false, false); - filteredModel = clonedModel.select(fields => fns.every(fn => fn(fields)), { + filteredModel = model.select(fields => fns.every(fn => fn(fields)), { saveChild: false, mode: FilteringMode.ALL }); } else { - filteredModel = model.clone(false, false).select(fields => fns.some(fn => fn(fields)), { + filteredModel = model.select(fields => fns.some(fn => fn(fields)), { mode: FilteringMode.ALL, saveChild: false }); @@ -155,6 +154,7 @@ export const cloneWithSelect = (sourceDm, selectFn, selectConfig, cloneConfig) = cloned.__calculateFieldspace().calculateFieldsConfig(); persistDerivation(cloned, DM_DERIVATIVES.SELECT, { config: selectConfig }, selectFn); + persistAncestorDerivation(sourceDm, cloned); return cloned; }; @@ -176,6 +176,7 @@ export const cloneWithProject = (sourceDm, projField, config, allFields) => { { projField, config, actualProjField: projectionSet }, null ); + persistAncestorDerivation(sourceDm, cloned); return cloned; }; @@ -312,8 +313,8 @@ const propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = }; export const getRootGroupByModel = (model) => { - if (model._parent && model._derivation.find(d => d.op !== 'group')) { - return getRootGroupByModel(model._parent); + while (model._parent && model._derivation.find(d => d.op !== 'group')) { + model = model._parent; } return model; }; diff --git a/src/index.spec.js b/src/index.spec.js index 3ead16e..f0c5506 100644 --- a/src/index.spec.js +++ b/src/index.spec.js @@ -66,6 +66,42 @@ describe('DataModel', () => { expect(cloneRelation._colIdentifier).to.equal(dataModel._colIdentifier); expect(cloneRelation._rowDiffset).to.equal(dataModel._rowDiffset); }); + + it('should set parent-child relationship when saveChild is true', () => { + const data = [ + { age: 30, job: 'unemployed', marital: 'married' }, + { age: 33, job: 'services', marital: 'married' }, + { age: 35, job: 'management', marital: 'single' } + ]; + const schema = [ + { name: 'age', type: 'measure' }, + { name: 'job', type: 'dimension' }, + { name: 'marital', type: 'dimension' }, + ]; + const dataModel = new DataModel(data, schema); + + const cloneDm = dataModel.clone(true); + expect(cloneDm.getParent()).to.be.equal(dataModel); + expect(dataModel.getChildren()[0]).to.be.equal(cloneDm); + }); + + it('should remove parent-child relationship when saveChild is false', () => { + const data = [ + { age: 30, job: 'unemployed', marital: 'married' }, + { age: 33, job: 'services', marital: 'married' }, + { age: 35, job: 'management', marital: 'single' } + ]; + const schema = [ + { name: 'age', type: 'measure' }, + { name: 'job', type: 'dimension' }, + { name: 'marital', type: 'dimension' }, + ]; + const dataModel = new DataModel(data, schema); + + const cloneDm = dataModel.clone(false); + expect(cloneDm.getParent()).to.be.null; + expect(dataModel.getChildren().length).to.be.equal(0); + }); }); context('Test for empty DataModel', () => { @@ -425,11 +461,22 @@ describe('DataModel', () => { it('should store derivation criteria info', () => { const dataModel = new DataModel(data, schema); - let projectedDataModel = dataModel.project(['age', 'job'], { saveChild: true }); + const dm = dataModel.select(fields => fields.age.value < 40); + const projectedDataModel = dm.project(['age', 'job']); expect(projectedDataModel.getDerivations()[0].op).to.be.equal(DM_DERIVATIVES.PROJECT); + expect(projectedDataModel.getAncestorDerivations()[0].op).to.be.equal(DM_DERIVATIVES.SELECT); + }); - projectedDataModel = dataModel.project(['age', 'job'], { saveChild: false }); - expect(projectedDataModel.getDerivations()[0].op).to.be.equal(DM_DERIVATIVES.PROJECT); + it('should control parent-child relationships on saveChild config', () => { + let rootDm = new DataModel(data, schema); + let dm = rootDm.project(['age', 'job'], { saveChild: true }); + expect(dm.getParent()).to.be.equal(rootDm); + expect(rootDm.getChildren()[0]).to.be.equal(dm); + + rootDm = new DataModel(data, schema); + dm = rootDm.project(['age', 'job'], { saveChild: false }); + expect(dm.getParent()).to.be.null; + expect(rootDm.getChildren().length).to.be.equal(0); }); }); @@ -671,11 +718,22 @@ describe('DataModel', () => { it('should store derivation criteria info', () => { const dataModel = new DataModel(data, schema); - let selectedDm = dataModel.select(fields => fields.age.value < 40, { saveChild: true }); + const dm = dataModel.project(['age', 'job']); + const selectedDm = dm.select(fields => fields.age.value < 40); expect(selectedDm.getDerivations()[0].op).to.be.equal(DM_DERIVATIVES.SELECT); + expect(selectedDm.getAncestorDerivations()[0].op).to.be.equal(DM_DERIVATIVES.PROJECT); + }); - selectedDm = dataModel.select(fields => fields.age.value < 40, { saveChild: false }); - expect(selectedDm.getDerivations()[0].op).to.be.equal(DM_DERIVATIVES.SELECT); + it('should control parent-child relationships on saveChild config', () => { + let rootDm = new DataModel(data, schema); + let dm = rootDm.select(fields => fields.age.value < 40, { saveChild: true }); + expect(dm.getParent()).to.be.equal(rootDm); + expect(rootDm.getChildren()[0]).to.be.equal(dm); + + rootDm = new DataModel(data, schema); + dm = rootDm.select(fields => fields.age.value > 40, { saveChild: false }); + expect(dm.getParent()).to.be.null; + expect(rootDm.getChildren().length).to.be.equal(0); }); }); @@ -1286,21 +1344,15 @@ describe('DataModel', () => { ]; const dataModel = new DataModel(data1, schema1); - let calDm = dataModel.calculateVariable({ + const dm = dataModel.project(['first', 'second']); + let calDm = dm.calculateVariable({ name: 'NewField', type: 'dimension' }, ['first', 'second', (first, second) => `${first} ${second}` - ], { saveChild: true }); - expect(calDm.getDerivations()[0].op).to.equal(DM_DERIVATIVES.CAL_VAR); - - calDm = dataModel.calculateVariable({ - name: 'NewField2', - type: 'dimension' - }, ['first', 'second', (first, second) => - `${first} ${second}` - ], { saveChild: false }); + ]); expect(calDm.getDerivations()[0].op).to.equal(DM_DERIVATIVES.CAL_VAR); + expect(calDm.getAncestorDerivations()[0].op).to.equal(DM_DERIVATIVES.PROJECT); }); it('should return correct value from the callback function', () => { @@ -1440,6 +1492,37 @@ describe('DataModel', () => { expect(mockedFn).to.throw('country is not a valid column name'); }); + + it('should control parent-child relationships on saveChild config', () => { + const data1 = [ + { profit: 10, sales: 20, city: 'a', state: 'aa' }, + { profit: 15, sales: 25, city: 'b', state: 'bb' }, + { profit: 10, sales: 20, city: 'a', state: 'ab' }, + { profit: 15, sales: 25, city: 'b', state: 'ba' }, + ]; + const schema1 = [ + { name: 'profit', type: 'measure' }, + { name: 'sales', type: 'measure' }, + { name: 'city', type: 'dimension' }, + { name: 'state', type: 'dimension' }, + ]; + + let rootDm = new DataModel(data1, schema1); + let dm = rootDm.calculateVariable({ + name: 'profitIndex', + type: 'measure' + }, ['profit', (profit, i) => profit * i], { saveChild: true }); + expect(dm.getParent()).to.be.equal(rootDm); + expect(rootDm.getChildren()[0]).to.be.equal(dm); + + rootDm = new DataModel(data1, schema1); + dm = rootDm.calculateVariable({ + name: 'profitIndex2', + type: 'measure' + }, ['profit', (profit, i) => profit * i], { saveChild: false }); + expect(dm.getParent()).to.be.null; + expect(rootDm.getChildren().length).to.be.equal(0); + }); }); describe('#propagate', () => { @@ -1753,11 +1836,22 @@ describe('DataModel', () => { it('should store derivation criteria info', () => { const rootDm = new DataModel(data1, schema1); - let groupedDm = rootDm.groupBy(['first'], { profit: 'avg' }, { saveChild: true }); + const dm = rootDm.select(fields => fields.profit.value > 15); + const groupedDm = dm.groupBy(['first'], { profit: 'avg' }); expect(groupedDm.getDerivations()[0].op).to.eql(DM_DERIVATIVES.GROUPBY); + expect(groupedDm.getAncestorDerivations()[0].op).to.eql(DM_DERIVATIVES.SELECT); + }); - groupedDm = rootDm.groupBy(['first'], { profit: 'avg' }, { saveChild: false }); - expect(groupedDm.getDerivations()[0].op).to.eql(DM_DERIVATIVES.GROUPBY); + it('should control parent-child relationships on saveChild config', () => { + let rootDm = new DataModel(data1, schema1); + let dm = rootDm.groupBy(['first'], { profit: 'avg' }, { saveChild: true }); + expect(dm.getParent()).to.be.equal(rootDm); + expect(rootDm.getChildren()[0]).to.be.equal(dm); + + rootDm = new DataModel(data1, schema1); + dm = rootDm.groupBy(['first'], { profit: 'avg' }, { saveChild: false }); + expect(dm.getParent()).to.be.null; + expect(rootDm.getChildren().length).to.be.equal(0); }); }); }); @@ -1790,42 +1884,31 @@ describe('DataModel', () => { ]; const dataModel = new DataModel(data1, schema1); describe('#dispose', () => { - it('Should remove child on calling dispose', () => { - let dm2 = dataModel.select(fields => fields.profit.value < 150); - expect(dataModel._children.length).to.equal(1); + it('Should remove all references as gc can detect it as free object', () => { + const rootDm = new DataModel(data1, schema1); + const dm2 = rootDm.select(fields => fields.profit.value < 150); + const dm3 = dm2.project(['profit', 'sales']); dm2.dispose(); - expect(dataModel._children.length).to.equal(0); + expect(rootDm.getChildren().length).to.equal(0); + expect(dm3.getParent()).to.be.null; }); }); - describe('#addParent', () => { - it('Adding parent should save criteria in parent', () => { - let dm2 = dataModel.select(fields => fields.profit.value < 150); - let dm3 = dm2.groupBy(['sales'], { - profit: null - }); - let dm4 = dm3.project(['sales']); - let data = dm4.getData(); - let projFields = ['first']; - let projectConfig = {}; - let normalizedprojFields = []; - let criteriaQueue = [ - { - op: 'select', - meta: '', - criteria: fields => fields.profit.value < 150 - }, - { - op: 'project', - meta: { projFields, projectConfig, normalizedprojFields }, - criteria: null - } - ]; - dm3.dispose(); - dm4.addParent(dm2, criteriaQueue); - expect(dm2._children.length).to.equal(1); - expect(dm2._children[0].getData()).to.deep.equal(data); - expect(dm4._parent).to.equal(dm2); + describe('#setParent', () => { + it('should change parent and child relationships', () => { + const dm2 = dataModel.select(fields => fields.profit.value < 150); + const dm3 = dm2.groupBy(['sales'], { profit: 'avg' }, { saveChild: false }); + dm3.setParent(dm2); + expect(dm3._parent).to.be.equal(dm2); + expect(dm2._children[0]).to.be.equal(dm3); + }); + + it('should reset parent-child relationships when passing null as parent', () => { + const dm2 = dataModel.select(fields => fields.profit.value < 150); + const dm3 = dm2.groupBy(['sales'], { profit: 'avg' }, { saveChild: true }); + dm3.setParent(null); + expect(dm3._parent).to.be.null; + expect(dm2._children.length).to.be.equal(0); }); }); }); @@ -1910,6 +1993,29 @@ describe('DataModel', () => { }); }); + describe('#getAncestorDerivations', () => { + it('should return in-between ancestor derivative operations', () => { + const schema = [ + { name: 'Name', type: 'dimension' }, + { name: 'HorsePower', type: 'measure' }, + { name: 'Origin', type: 'dimension' } + ]; + const data = [ + { Name: 'chevrolet chevelle malibu', Horsepower: 130, Origin: 'USA' }, + { Name: 'citroen ds-21 pallas', Horsepower: 115, Origin: 'Europe' }, + { Name: 'datsun pl510', Horsepower: 88, Origin: 'Japan' }, + { Name: 'amc rebel sst', Horsepower: 150, Origin: 'USA' }, + ]; + const dt = new DataModel(data, schema); + const dt2 = dt.select(fields => fields.Origin.value === 'USA'); + const dt3 = dt2.groupBy(['Origin'], { HorsePower: 'avg' }); + const ancDerivations = dt3.getAncestorDerivations(); + expect(Array.isArray(ancDerivations)).to.be.true; + expect(ancDerivations.length).to.be.equal(1); + expect(ancDerivations[0].op).to.be.equal(DM_DERIVATIVES.SELECT); + }); + }); + describe('#detachedRoot', () => { const schema = [ { @@ -2457,8 +2563,35 @@ describe('DataModel', () => { ]; const dataModel = new DataModel(data1, schema1); - const binnedDm = dataModel.bin('profit', { binSize: 10, name: 'BinnedField' }); + const dm = dataModel.project(['profit', 'sales']); + const binnedDm = dm.bin('profit', { binSize: 10, name: 'BinnedField' }); expect(binnedDm.getDerivations()[0].op).to.be.equal(DM_DERIVATIVES.BIN); + expect(binnedDm.getAncestorDerivations()[0].op).to.be.equal(DM_DERIVATIVES.PROJECT); + }); + + it('should control parent-child relationships on saveChild config', () => { + const data1 = [ + { profit: 10, sales: 20, first: 'Hey', second: 'Jude' }, + { profit: 15, sales: 25, first: 'Norwegian', second: 'Wood' }, + { profit: 15, sales: 25, first: 'Norwegian', second: 'Wood' }, + { profit: 15, sales: 25, first: 'Norwegian', second: 'Wood' } + ]; + const schema1 = [ + { name: 'profit', type: 'measure' }, + { name: 'sales', type: 'measure' }, + { name: 'first', type: 'dimension' }, + { name: 'second', type: 'dimension' }, + ]; + + let rootDm = new DataModel(data1, schema1); + let dm = rootDm.bin('profit', { binSize: 10, name: 'binnedProfit', saveChild: true }); + expect(dm.getParent()).to.be.equal(rootDm); + expect(rootDm.getChildren()[0]).to.be.equal(dm); + + rootDm = new DataModel(data1, schema1); + dm = rootDm.bin('sales', { binSize: 12, name: 'binnedSales', saveChild: false }); + expect(dm.getParent()).to.be.null; + expect(rootDm.getChildren().length).to.be.equal(0); }); }); }); diff --git a/src/operator/compose.js b/src/operator/compose.js index 06cf7be..0fb9f36 100644 --- a/src/operator/compose.js +++ b/src/operator/compose.js @@ -1,3 +1,5 @@ +import { persistDerivation, persistAncestorDerivation } from '../helper'; +import { DM_DERIVATIVES } from '../constants'; /** * DataModel's opearators are exposed as composable functional operators as well as chainable operators. Chainable @@ -214,21 +216,30 @@ export const groupBy = (...args) => dm => dm.groupBy(...args); export const compose = (...operations) => (dm, config = { saveChild: true }) => { let currentDM = dm; - let frstChild; + let firstChild; const derivations = []; - const saveChild = config.saveChild; operations.forEach((operation) => { currentDM = operation(currentDM); derivations.push(...currentDM._derivation); - if (!frstChild) { - frstChild = currentDM; + if (!firstChild) { + firstChild = currentDM; } }); - saveChild && currentDM.addParent(dm, derivations); - if (derivations.length > 1) { - frstChild.dispose(); + if (firstChild && firstChild !== currentDM) { + firstChild.dispose(); + } + + persistDerivation(currentDM, DM_DERIVATIVES.COMPOSE, null, derivations); + // reset all ancestorDerivation saved in-between compose + currentDM._ancestorDerivation = []; + persistAncestorDerivation(dm, currentDM); + + if (config.saveChild) { + currentDM.setParent(dm); + } else { + currentDM.setParent(null); } return currentDM; diff --git a/src/relation.js b/src/relation.js index c6f7866..aa45556 100644 --- a/src/relation.js +++ b/src/relation.js @@ -1,8 +1,7 @@ import { FilteringMode } from './enums'; import { getUniqueId } from './utils'; -import { persistDerivation, updateFields, cloneWithSelect, cloneWithProject, updateData } from './helper'; +import { updateFields, cloneWithSelect, cloneWithProject, updateData } from './helper'; import { crossProduct, difference, naturalJoinFilter, union } from './operator'; -import { DM_DERIVATIVES } from './constants'; /** * Relation provides the definitions of basic operators of relational algebra like *selection*, *projection*, *union*, @@ -32,6 +31,7 @@ class Relation { this._parent = null; this._derivation = []; + this._ancestorDerivation = []; this._children = []; if (params.length === 1 && ((source = params[0]) instanceof Relation)) { @@ -317,31 +317,14 @@ class Relation { * @param {boolean} [saveChild=true] - Whether the cloned instance would be recorded in the parent instance. * @return {DataModel} - Returns the newly cloned DataModel instance. */ - clone (saveChild = true, linkParent = true) { - let retDataModel; - if (linkParent === false) { - const dataObj = this.getData({ - getAllFields: true - }); - const data = dataObj.data; - const schema = dataObj.schema; - const jsonData = data.map((row) => { - const rowObj = {}; - schema.forEach((field, i) => { - rowObj[field.name] = row[i]; - }); - return rowObj; - }); - retDataModel = new this.constructor(jsonData, schema); - } - else { - retDataModel = new this.constructor(this); - } - + clone (saveChild = true) { + const clonedDm = new this.constructor(this); if (saveChild) { - this._children.push(retDataModel); + clonedDm.setParent(this); + } else { + clonedDm.setParent(null); } - return retDataModel; + return clonedDm; } /** @@ -450,8 +433,14 @@ class Relation { * @public */ dispose () { - this._parent.removeChild(this); + this._parent && this._parent.removeChild(this); this._parent = null; + this._children.forEach((child) => { + if (child._parent === this) { + child._parent = null; + } + }); + this._children = []; } /** @@ -486,18 +475,14 @@ class Relation { } /** - * Adds the specified {@link DataModel} as a parent for the current {@link DataModel} instance. - * - * The optional criteriaQueue is an array containing the history of transaction performed on parent - * {@link DataModel} to get the current one. + * Sets the specified {@link DataModel} as a parent for the current {@link DataModel} instance. * * @param {DataModel} parent - The datamodel instance which will act as parent. - * @param {Array} criteriaQueue - Queue contains in-between operation meta-data. */ - addParent (parent, criteriaQueue = []) { - persistDerivation(this, DM_DERIVATIVES.COMPOSE, null, criteriaQueue); + setParent (parent) { + this._parent && this._parent.removeChild(this); this._parent = parent; - parent._children.push(this); + parent && parent._children.push(this); } /** @@ -553,7 +538,7 @@ class Relation { * * @return {DataModel[]} Returns the immediate child DataModel instances. */ - getChildren() { + getChildren () { return this._children; } @@ -581,9 +566,37 @@ class Relation { * * @return {Any[]} Returns the derivation meta data. */ - getDerivations() { + getDerivations () { return this._derivation; } + + /** + * Returns the in-between operation meta data happened from root {@link DataModel} to current instance. + * + * @example + * const schema = [ + * { name: 'Name', type: 'dimension' }, + * { name: 'HorsePower', type: 'measure' }, + * { name: "Origin", type: 'dimension' } + * ]; + * + * const data = [ + * { Name: "chevrolet chevelle malibu", Horsepower: 130, Origin: "USA" }, + * { Name: "citroen ds-21 pallas", Horsepower: 115, Origin: "Europe" }, + * { Name: "datsun pl510", Horsepower: 88, Origin: "Japan" }, + * { Name: "amc rebel sst", Horsepower: 150, Origin: "USA"}, + * ] + * + * const dt = new DataModel(data, schema); + * const dt2 = dt.select(fields => fields.Origin.value === "USA"); + * const dt3 = dt2.groupBy(["Origin"]); + * const ancDerivations = dt3.getAncestorDerivations(); + * + * @return {Any[]} Returns the previous derivation meta data. + */ + getAncestorDerivations () { + return this._ancestorDerivation; + } } export default Relation; From f0a4b65804e0230165ca2d49d2806e1dda9e19c9 Mon Sep 17 00:00:00 2001 From: Rousan Ali Date: Thu, 7 Feb 2019 16:10:03 +0530 Subject: [PATCH 02/21] Consider all derivations while doing applyExistingOperationOnModel --- src/helper.js | 78 ++++++++++++++++++++++++++++++++------------------- 1 file changed, 49 insertions(+), 29 deletions(-) diff --git a/src/helper.js b/src/helper.js index 04991d5..3c38532 100644 --- a/src/helper.js +++ b/src/helper.js @@ -86,6 +86,20 @@ export const selectHelper = (rowDiffset, fields, selectFn, config, sourceDm) => return newRowDiffSet.join(','); }; +export const cloneWithAllFields = (model) => { + const clonedDm = model.clone(false); + const partialFieldspace = model.getPartialFieldspace(); + clonedDm._colIdentifier = model.partialFieldspace.fields.map(f => f.name()).join(','); + + // flush out cached namespace values on addition of new fields + partialFieldspace._cachedFieldsObj = null; + partialFieldspace._cachedDimension = null; + partialFieldspace._cachedMeasure = null; + clonedDm.__calculateFieldspace().calculateFieldsConfig(); + + return clonedDm; +}; + export const filterPropagationModel = (model, propModels, config = {}) => { const operation = config.operation || LOGICAL_OPERATORS.AND; const filterByMeasure = config.filterByMeasure || false; @@ -127,12 +141,12 @@ export const filterPropagationModel = (model, propModels, config = {}) => { let filteredModel; if (operation === LOGICAL_OPERATORS.AND) { - filteredModel = model.select(fields => fns.every(fn => fn(fields)), { + filteredModel = cloneWithAllFields(model).select(fields => fns.every(fn => fn(fields)), { saveChild: false, mode: FilteringMode.ALL }); } else { - filteredModel = model.select(fields => fns.some(fn => fn(fields)), { + filteredModel = cloneWithAllFields(model).select(fields => fns.some(fn => fn(fields)), { mode: FilteringMode.ALL, saveChild: false }); @@ -242,26 +256,23 @@ export const fieldInSchema = (schema, field) => { }; -export const getOperationArguments = (child) => { - const derivation = child._derivation; +export const getDerivationArguments = (derivation) => { let params = []; let operation; - if (derivation && derivation.length === 1) { - operation = derivation[0].op; - switch (operation) { - case DM_DERIVATIVES.SELECT: - params = [derivation[0].criteria]; - break; - case DM_DERIVATIVES.PROJECT: - params = [derivation[0].meta.actualProjField]; - break; - case DM_DERIVATIVES.GROUPBY: - operation = 'groupBy'; - params = [derivation[0].meta.groupByString.split(','), derivation[0].criteria]; - break; - default: - break; - } + operation = derivation.op; + switch (operation) { + case DM_DERIVATIVES.SELECT: + params = [derivation[0].criteria]; + break; + case DM_DERIVATIVES.PROJECT: + params = [derivation[0].meta.actualProjField]; + break; + case DM_DERIVATIVES.GROUPBY: + operation = 'groupBy'; + params = [derivation[0].meta.groupByString.split(','), derivation[0].criteria]; + break; + default: + operation = null; } return { @@ -271,17 +282,26 @@ export const getOperationArguments = (child) => { }; const applyExistingOperationOnModel = (propModel, dataModel) => { - const { operation, params } = getOperationArguments(dataModel); + const derivations = dataModel.getDerivations(); let selectionModel = propModel[0]; let rejectionModel = propModel[1]; - if (operation && params.length) { - selectionModel = propModel[0][operation](...params, { - saveChild: false - }); - rejectionModel = propModel[1][operation](...params, { - saveChild: false - }); - } + + derivations.forEach((derivation) => { + if (!derivation) { + return; + } + + const { operation, params } = getDerivationArguments(derivation); + if (operation) { + selectionModel = selectionModel[operation](...params, { + saveChild: false + }); + rejectionModel = rejectionModel[operation](...params, { + saveChild: false + }); + } + }); + return [selectionModel, rejectionModel]; }; From d831a0572bad2d95bfe25d0500a895f5d675d1c6 Mon Sep 17 00:00:00 2001 From: Rousan Ali Date: Thu, 7 Feb 2019 16:19:14 +0530 Subject: [PATCH 03/21] Do some code refactor --- src/helper.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/helper.js b/src/helper.js index 3c38532..55154f1 100644 --- a/src/helper.js +++ b/src/helper.js @@ -89,7 +89,7 @@ export const selectHelper = (rowDiffset, fields, selectFn, config, sourceDm) => export const cloneWithAllFields = (model) => { const clonedDm = model.clone(false); const partialFieldspace = model.getPartialFieldspace(); - clonedDm._colIdentifier = model.partialFieldspace.fields.map(f => f.name()).join(','); + clonedDm._colIdentifier = partialFieldspace.fields.map(f => f.name()).join(','); // flush out cached namespace values on addition of new fields partialFieldspace._cachedFieldsObj = null; @@ -262,14 +262,14 @@ export const getDerivationArguments = (derivation) => { operation = derivation.op; switch (operation) { case DM_DERIVATIVES.SELECT: - params = [derivation[0].criteria]; + params = [derivation.criteria]; break; case DM_DERIVATIVES.PROJECT: - params = [derivation[0].meta.actualProjField]; + params = [derivation.meta.actualProjField]; break; case DM_DERIVATIVES.GROUPBY: operation = 'groupBy'; - params = [derivation[0].meta.groupByString.split(','), derivation[0].criteria]; + params = [derivation.meta.groupByString.split(','), derivation.criteria]; break; default: operation = null; From 2b4857d17db9cf2f958d7aab6d9c10c2e292b799 Mon Sep 17 00:00:00 2001 From: Rousan Ali Date: Fri, 8 Feb 2019 13:08:05 +0530 Subject: [PATCH 04/21] Do some code refactoring --- src/relation.js | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/relation.js b/src/relation.js index aa45556..60b3a6f 100644 --- a/src/relation.js +++ b/src/relation.js @@ -436,9 +436,7 @@ class Relation { this._parent && this._parent.removeChild(this); this._parent = null; this._children.forEach((child) => { - if (child._parent === this) { - child._parent = null; - } + child._parent = null; }); this._children = []; } From baadee304536d85d23d94a757749cd6bc4893328 Mon Sep 17 00:00:00 2001 From: Rousan Ali Date: Fri, 8 Feb 2019 13:10:37 +0530 Subject: [PATCH 05/21] Add enum for 'group' string --- src/helper.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/helper.js b/src/helper.js index 55154f1..44914d3 100644 --- a/src/helper.js +++ b/src/helper.js @@ -333,7 +333,7 @@ const propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = }; export const getRootGroupByModel = (model) => { - while (model._parent && model._derivation.find(d => d.op !== 'group')) { + while (model._parent && model._derivation.find(d => d.op !== DM_DERIVATIVES.GROUPBY)) { model = model._parent; } return model; From ca2b1ad863c5ea99b7b360ac676a89c8590c8c8f Mon Sep 17 00:00:00 2001 From: Rousan Ali Date: Fri, 8 Feb 2019 14:04:19 +0530 Subject: [PATCH 06/21] Add unit testcases for some helper methods --- src/helper.spec.js | 76 ++++++++++++++++++++++++++++++++++++ src/operator/compose.spec.js | 24 ++++++++++++ 2 files changed, 100 insertions(+) create mode 100644 src/helper.spec.js diff --git a/src/helper.spec.js b/src/helper.spec.js new file mode 100644 index 0000000..121235c --- /dev/null +++ b/src/helper.spec.js @@ -0,0 +1,76 @@ +/* global describe, it */ + +import { expect } from 'chai'; +import DataModel from './index'; +import { getRootGroupByModel, getRootDataModel, getPathToRootModel } from './helper'; + +describe('getRootGroupByModel', () => { + const data = [ + { age: 30, job: 'unemployed', marital: 'married' }, + { age: 10, job: 'services', marital: 'married' }, + { age: 22, job: 'self-employed', marital: 'single' } + ]; + const schema = [ + { name: 'age', type: 'measure' }, + { name: 'job', type: 'dimension' }, + { name: 'marital', type: 'dimension' }, + ]; + + it('should return nearest groupBy DataModel', () => { + const dm = new DataModel(data, schema); + const dm1 = dm.groupBy(['job', 'marital']); + const dm2 = dm1.select(fields => fields.age.value > 15); + const dm3 = dm2.project(['age', 'job', 'marital']); + + expect(getRootGroupByModel(dm3)).to.be.equal(dm1); + }); +}); + +describe('getRootDataModel', () => { + const data = [ + { age: 30, job: 'unemployed', marital: 'married' }, + { age: 10, job: 'services', marital: 'married' }, + { age: 22, job: 'self-employed', marital: 'single' } + ]; + const schema = [ + { name: 'age', type: 'measure' }, + { name: 'job', type: 'dimension' }, + { name: 'marital', type: 'dimension' }, + ]; + + it('should return root DataModel', () => { + const dm = new DataModel(data, schema); + const dm1 = dm.groupBy(['job', 'marital']); + const dm2 = dm1.select(fields => fields.age.value > 15); + const dm3 = dm2.project(['age', 'job', 'marital']); + + expect(getRootDataModel(dm3)).to.be.equal(dm); + }); +}); + + +describe('getPathToRootModel', () => { + const data = [ + { age: 30, job: 'unemployed', marital: 'married' }, + { age: 10, job: 'services', marital: 'married' }, + { age: 22, job: 'self-employed', marital: 'single' } + ]; + const schema = [ + { name: 'age', type: 'measure' }, + { name: 'job', type: 'dimension' }, + { name: 'marital', type: 'dimension' }, + ]; + + it('should return root DataModel', () => { + const dm = new DataModel(data, schema); + const dm1 = dm.groupBy(['job', 'marital']); + const dm2 = dm1.select(fields => fields.age.value > 15); + const dm3 = dm2.project(['age', 'job', 'marital']); + const paths = getPathToRootModel(dm3); + + expect(paths.length).to.be.equal(3); + expect(paths[0]).to.be.equal(dm3); + expect(paths[1]).to.be.equal(dm2); + expect(paths[2]).to.be.equal(dm1); + }); +}); diff --git a/src/operator/compose.spec.js b/src/operator/compose.spec.js index aca7f32..a5d20fb 100644 --- a/src/operator/compose.spec.js +++ b/src/operator/compose.spec.js @@ -194,5 +194,29 @@ describe('Testing compose functionality', () => { composedDm = nestedComposedFn2(dataModel2); expect(normalDm.getData()).to.deep.equal(composedDm.getData()); }); + + it('should keep child-parent relationship when saveChild is true', () => { + const dataModel = new DataModel(data1, schema1); + const composedFn = compose( + select(fields => fields.profit.value <= 15), + project(['id', 'profit', 'sales']), + ); + + const dm = composedFn(dataModel, { saveChild: true }); + expect(dm.getParent()).to.be.equal(dataModel); + expect(dataModel.getChildren()[0]).to.be.equal(dm); + }); + + it('should remove child-parent relationship when saveChild is false', () => { + const dataModel = new DataModel(data1, schema1); + const composedFn = compose( + select(fields => fields.profit.value <= 15), + project(['id', 'profit', 'sales']), + ); + + const dm = composedFn(dataModel, { saveChild: false }); + expect(dm.getParent()).to.be.null; + expect(dataModel.getChildren().length).to.be.equal(0); + }); }); }); From 85e818878c1cc5f880935129ad8ca4be046814d7 Mon Sep 17 00:00:00 2001 From: Rousan Ali Date: Fri, 8 Feb 2019 14:25:50 +0530 Subject: [PATCH 07/21] Add unit testcase for getData() getAllFields() arg --- src/index.spec.js | 56 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 56 insertions(+) diff --git a/src/index.spec.js b/src/index.spec.js index f0c5506..44fc053 100644 --- a/src/index.spec.js +++ b/src/index.spec.js @@ -331,6 +331,62 @@ describe('DataModel', () => { expect(dataModel.getData({ withUid: true })).to.deep.equal(expected); }); + + it('should return all field data when getAllFields is true', () => { + const schema = [ + { name: 'name', type: 'dimension' }, + { name: 'birthday', type: 'dimension', subtype: 'temporal', format: '%Y-%m-%d' }, + { name: 'roll', type: 'measure' }, + ]; + + const data = [ + { name: 'Rousan', birthday: '1995-07-05', roll: 10 }, + { name: 'Sumant', birthday: '1996-08-04', roll: 14 }, + { name: 'Akash', birthday: '1994-01-03', roll: 11 } + ]; + const dataModel = new DataModel(data, schema); + const dm = dataModel.project(['name', 'roll']); + const expected = { + schema: [ + { + name: 'name', + type: 'dimension', + subtype: 'categorical' + }, + { + name: 'birthday', + type: 'dimension', + subtype: 'temporal', + format: '%Y-%m-%d' + }, + { + name: 'roll', + type: 'measure', + subtype: 'continuous' + } + ], + data: [ + [ + 'Rousan', + 804882600000, + 10 + ], + [ + 'Sumant', + 839097000000, + 14 + ], + [ + 'Akash', + 757535400000, + 11 + ] + ], + uids: [0, 1, 2] + }; + + expect(dm.getData({ getAllFields: true })).to.deep.equal(expected); + }); }); describe('#project', () => { From 2687a428e8476edfa9d5f7bf2bc85bc58b2113cd Mon Sep 17 00:00:00 2001 From: Rousan Ali Date: Fri, 8 Feb 2019 14:33:40 +0530 Subject: [PATCH 08/21] Update unit testcase for getData() getAllFields() arg --- src/index.spec.js | 61 ++++++++++++++++++++++++----------------------- 1 file changed, 31 insertions(+), 30 deletions(-) diff --git a/src/index.spec.js b/src/index.spec.js index 44fc053..77bec97 100644 --- a/src/index.spec.js +++ b/src/index.spec.js @@ -333,58 +333,59 @@ describe('DataModel', () => { }); it('should return all field data when getAllFields is true', () => { - const schema = [ - { name: 'name', type: 'dimension' }, - { name: 'birthday', type: 'dimension', subtype: 'temporal', format: '%Y-%m-%d' }, - { name: 'roll', type: 'measure' }, - ]; - const data = [ - { name: 'Rousan', birthday: '1995-07-05', roll: 10 }, - { name: 'Sumant', birthday: '1996-08-04', roll: 14 }, - { name: 'Akash', birthday: '1994-01-03', roll: 11 } + { age: 30, job: 'unemployed', marital: 'married' }, + { age: 33, job: 'services', marital: 'married' }, + { age: 35, job: 'management', marital: 'single' } + ]; + const schema = [ + { name: 'age', type: 'measure' }, + { name: 'job', type: 'dimension' }, + { name: 'marital', type: 'dimension' }, ]; const dataModel = new DataModel(data, schema); - const dm = dataModel.project(['name', 'roll']); + const dm = dataModel.project(['age', 'job']); const expected = { schema: [ { - name: 'name', - type: 'dimension', - subtype: 'categorical' + name: 'age', + type: 'measure', + subtype: 'continuous' }, { - name: 'birthday', + name: 'job', type: 'dimension', - subtype: 'temporal', - format: '%Y-%m-%d' + subtype: 'categorical' }, { - name: 'roll', - type: 'measure', - subtype: 'continuous' + name: 'marital', + type: 'dimension', + subtype: 'categorical' } ], data: [ [ - 'Rousan', - 804882600000, - 10 + 30, + 'unemployed', + 'married' ], [ - 'Sumant', - 839097000000, - 14 + 33, + 'services', + 'married' ], [ - 'Akash', - 757535400000, - 11 + 35, + 'management', + 'single' ] ], - uids: [0, 1, 2] + uids: [ + 0, + 1, + 2 + ] }; - expect(dm.getData({ getAllFields: true })).to.deep.equal(expected); }); }); From 1207832b50677608b90407e4814a5015262855c9 Mon Sep 17 00:00:00 2001 From: Ujjal Kumar Dutta Date: Thu, 21 Feb 2019 15:39:19 +0530 Subject: [PATCH 09/21] persisted derivations for sort --- dist/datamodel.js | 2 - dist/datamodel.js.map | 1 - example/samples/example5.js | 308 +++++++++++++++++++----------------- src/constants/index.js | 3 +- src/datamodel.js | 16 +- 5 files changed, 180 insertions(+), 150 deletions(-) delete mode 100644 dist/datamodel.js delete mode 100644 dist/datamodel.js.map diff --git a/dist/datamodel.js b/dist/datamodel.js deleted file mode 100644 index 669600c..0000000 --- a/dist/datamodel.js +++ /dev/null @@ -1,2 +0,0 @@ -!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define("DataModel",[],t):"object"==typeof exports?exports.DataModel=t():e.DataModel=t()}(window,function(){return function(e){var t={};function n(r){if(t[r])return t[r].exports;var a=t[r]={i:r,l:!1,exports:{}};return e[r].call(a.exports,a,a.exports,n),a.l=!0,a.exports}return n.m=e,n.c=t,n.d=function(e,t,r){n.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:r})},n.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},n.t=function(e,t){if(1&t&&(e=n(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var r=Object.create(null);if(n.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var a in e)n.d(r,a,function(t){return e[t]}.bind(null,a));return r},n.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return n.d(t,"a",t),t},n.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},n.p="",n(n.s=1)}([function(e){e.exports={name:"datamodel",description:"Relational algebra compliant in-memory tabular data store",homepage:"https://github.com/chartshq/datamodel",version:"2.0.2",license:"MIT",main:"dist/datamodel.js",author:"Charts.com ",keywords:["datamodel","data","relational","algebra","model","muze","fusioncharts","table","tabular","operation"],repository:{type:"git",url:"https://github.com/chartshq/datamodel.git"},contributors:[{name:"Akash Goswami",email:"akash@charts.com"},{name:"Subhash Haldar",email:"subhash@charts.com"},{name:"Rousan Ali",email:"rousan@charts.com",url:"https://rousan.io"},{name:"Ujjal Kumar Dutta",email:"ujjal@charts.com"}],dependencies:{"d3-dsv":"^1.0.8"},devDependencies:{"babel-cli":"6.26.0","babel-core":"^6.26.3","babel-eslint":"6.1.2","babel-loader":"^7.1.4","babel-plugin-transform-runtime":"^6.23.0","babel-preset-env":"^1.7.0","babel-preset-es2015":"^6.24.1","babel-preset-flow":"^6.23.0",chai:"3.5.0","cross-env":"^5.0.5",eslint:"3.19.0","eslint-config-airbnb":"15.1.0","eslint-plugin-import":"2.7.0","eslint-plugin-jsx-a11y":"5.1.1","eslint-plugin-react":"7.3.0","istanbul-instrumenter-loader":"^3.0.0",jsdoc:"3.5.5",json2yaml:"^1.1.0",karma:"1.7.1","karma-chai":"0.1.0","karma-chrome-launcher":"2.1.1","karma-coverage-istanbul-reporter":"^1.3.0","karma-mocha":"1.3.0","karma-spec-reporter":"0.0.31","karma-webpack":"2.0.3",marked:"^0.5.0",mocha:"3.4.2","mocha-webpack":"0.7.0","transform-runtime":"0.0.0",webpack:"^4.12.0","webpack-cli":"^3.0.7","webpack-dev-server":"^3.1.4"},scripts:{test:"npm run lint && npm run ut",ut:"karma start karma.conf.js",utd:"karma start --single-run false --browsers Chrome karma.conf.js ",build:"webpack --mode production",start:"webpack-dev-server --config webpack.config.dev.js --mode development --open",lint:"eslint ./src","lint-errors":"eslint --quiet ./src",docs:"rm -rf yaml && mkdir yaml && jsdoc -c jsdoc.conf.json"}}},function(e,t,n){var r=n(2);e.exports=r.default?r.default:r},function(e,t,n){"use strict";n.r(t);var r={};n.r(r),n.d(r,"DataFormat",function(){return o}),n.d(r,"DimensionSubtype",function(){return u}),n.d(r,"MeasureSubtype",function(){return c}),n.d(r,"FieldType",function(){return f}),n.d(r,"FilteringMode",function(){return l});var a={};n.r(a),n.d(a,"DSVArr",function(){return Ve}),n.d(a,"DSVStr",function(){return $e}),n.d(a,"FlatJSON",function(){return Qe}),n.d(a,"Auto",function(){return Ze});var i={};n.r(i),n.d(i,"sum",function(){return _t}),n.d(i,"avg",function(){return Ot}),n.d(i,"min",function(){return Et}),n.d(i,"max",function(){return At}),n.d(i,"first",function(){return jt}),n.d(i,"last",function(){return St}),n.d(i,"count",function(){return Ft}),n.d(i,"sd",function(){return kt});var o={FLAT_JSON:"FlatJSON",DSV_STR:"DSVStr",DSV_ARR:"DSVArr",AUTO:"Auto"},u={CATEGORICAL:"categorical",TEMPORAL:"temporal",GEO:"geo",BINNED:"binned"},c={CONTINUOUS:"continuous"},f={MEASURE:"measure",DIMENSION:"dimension"},l={NORMAL:"normal",INVERSE:"inverse",ALL:"all"};function s(e){return e instanceof Date?e:new Date(e)}function d(e){return e<10?"0"+e:e}function p(e){this.format=e,this.dtParams=void 0,this.nativeDate=void 0}RegExp.escape=function(e){return e.replace(/[-[\]{}()*+?.,\\^$|#\s]/g,"\\$&")},p.TOKEN_PREFIX="%",p.DATETIME_PARAM_SEQUENCE={YEAR:0,MONTH:1,DAY:2,HOUR:3,MINUTE:4,SECOND:5,MILLISECOND:6},p.defaultNumberParser=function(e){return function(t){var n;return isFinite(n=parseInt(t,10))?n:e}},p.defaultRangeParser=function(e,t){return function(n){var r,a=void 0;if(!n)return t;var i=n.toLowerCase();for(a=0,r=e.length;a=0;)o=e[i+1],-1!==r.indexOf(o)&&a.push({index:i,token:o});return a},p.formatAs=function(e,t){var n,r=s(e),a=p.findTokens(t),i=p.getTokenDefinitions(),o=String(t),u=p.TOKEN_PREFIX,c=void 0,f=void 0,l=void 0;for(l=0,n=a.length;l=0;d--)(f=i[d].index)+1!==s.length-1?(void 0===u&&(u=s.length),l=s.substring(f+2,u),s=s.substring(0,f+2)+RegExp.escape(l)+s.substring(u,s.length),u=f):u=f;for(d=0;d0&&e.split(",").forEach(function(e){var n=e.split("-"),r=+n[0],a=+(n[1]||n[0]);if(a>=r)for(var i=r;i<=a;i+=1)t(i)})}var T=function(){return function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,a=!1,i=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){a=!0,i=e}finally{try{!r&&u.return&&u.return()}finally{if(a)throw i}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")}}(),R=function(e,t,n){for(var r=[],a=t;a=(i=e[a=n+Math.floor((r-n)/2)]).start&&t=i.end?n=a+1:t3&&void 0!==arguments[3]&&arguments[3],a=arguments.length>4&&void 0!==arguments[4]?arguments[4]:V.CROSS,i=[],o=[],u=n||B,c=e.getFieldspace(),f=t.getFieldspace(),l=c.name,s=f.name,d=c.name+"."+f.name,p=M(c,f);if(l===s)throw new Error("DataModels must have different alias names");return c.fields.forEach(function(e){var t=_({},e.schema());-1===p.indexOf(t.name)||r||(t.name=c.name+"."+t.name),i.push(t)}),f.fields.forEach(function(e){var t=_({},e.schema());-1!==p.indexOf(t.name)?r||(t.name=f.name+"."+t.name,i.push(t)):i.push(t)}),D(e._rowDiffset,function(n){var d=!1,h=void 0;D(t._rowDiffset,function(v){var m=[],y={};y[l]={},y[s]={},c.fields.forEach(function(e){m.push(e.partialField.data[n]),y[l][e.name()]=e.partialField.data[n]}),f.fields.forEach(function(e){-1!==p.indexOf(e.schema().name)&&r||m.push(e.partialField.data[v]),y[s][e.name()]=e.partialField.data[v]});var b=nt(y[l]),g=nt(y[s]);if(u(b,g,function(){return e.detachedRoot()},function(){return t.detachedRoot()},{})){var w={};m.forEach(function(e,t){w[i[t].name]=e}),d&&V.CROSS!==a?o[h]=w:(o.push(w),d=!0,h=n)}else if((a===V.LEFTOUTER||a===V.RIGHTOUTER)&&!d){var _={},O=c.fields.length-1;m.forEach(function(e,t){_[i[t].name]=t<=O?e:null}),d=!0,h=n,o.push(_)}})}),new wt(o,i,{name:d})}function G(e,t){var n=""+e,r=""+t;return nr?1:0}function K(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:G;return e.length>1&&function e(t,n,r,a){if(r===n)return t;var i=n+Math.floor((r-n)/2);return e(t,n,i,a),e(t,i+1,r,a),function(e,t,n,r,a){for(var i=e,o=[],u=t;u<=r;u+=1)o[u]=i[u];for(var c=t,f=n+1,l=t;l<=r;l+=1)c>n?(i[l]=o[f],f+=1):f>r?(i[l]=o[c],c+=1):a(o[c],o[f])<=0?(i[l]=o[c],c+=1):(i[l]=o[f],f+=1)}(t,n,i,r,a),t}(e,0,e.length-1,t),e}function W(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);ti?"desc"===t?-1:1:0}}return r}function q(e,t){var n=new Map,r=[];return e.forEach(function(e){var a=e[t];n.has(a)?r[n.get(a)][1].push(e):(r.push([a,[e]]),n.set(a,r.length-1))}),r}function X(e,t,n){var r={label:e[0]};return t.reduce(function(t,r,a){return t[r]=e[1].map(function(e){return e[n[a].index]}),t},r),r}function $(e,t,n,r,a){a=Object.assign({},{addUid:!1,columnWise:!1},a);var i={schema:[],data:[],uids:[]},o=a.addUid,u=r&&r.length>0,c=[];if(n.split(",").forEach(function(t){for(var n=0;n=0;u--)a=t[u][0],i=t[u][1],(o=st(r,a))&&("function"==typeof i?K(n,function(e,t){return i(e[o.index],t[o.index])}):O(i)?function(){var e=q(n,o.index),t=i[i.length-1],a=i.slice(0,i.length-1),u=a.map(function(e){return st(r,e)});e.forEach(function(e){e.push(X(e,a,u))}),K(e,function(e,n){var r=e[2],a=n[2];return t(r,a)}),n.length=0,e.forEach(function(e){n.push.apply(n,W(e[1]))})}():(i="desc"===String(i).toLowerCase()?"desc":"asc",K(n,z(o.type,i,o.index))));e.uids=[],n.forEach(function(t){e.uids.push(t.pop())})}(i,r),a.columnWise){var f=Array.apply(void 0,W(Array(i.schema.length))).map(function(){return[]});i.data.forEach(function(e){e.forEach(function(e,t){f[t].push(e)})}),i.data=f}return i}function Q(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!A(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t,r){D(e._rowDiffset,function(e){var o={},u="";a.forEach(function(n){var r=t[n].partialField.data[e];u+="-"+r,o[n]=r}),n[u]||(r&&i.push(o),n[u]=!0)})}return e._colIdentifier.split(",").forEach(function(e){var t=c[e];r.push(_({},t.schema())),a.push(t.schema().name)}),s(t,f,!1),s(e,c,!0),new wt(i,r,{name:l})}function Z(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n={},r=e.getFieldspace().getMeasure(),a=oe.defaultReducer();return Object.keys(r).forEach(function(e){"string"!=typeof t[e]&&(t[e]=r[e].defAggFn());var i=oe.resolve(t[e]);i?n[e]=i:(n[e]=a,t[e]=re)}),n}(e,n),o=e.getFieldspace(),u=o.fieldsObj(),c=o.name,l=[],s=[],d=[],p={},h=[],v=void 0;Object.entries(u).forEach(function(e){var t=ue(e,2),n=t[0],r=t[1];if(-1!==a.indexOf(n)||i[n])switch(d.push(_({},r.schema())),r.schema().type){case f.MEASURE:s.push(n);break;default:case f.DIMENSION:l.push(n)}});var m=0;D(e._rowDiffset,function(e){var t="";l.forEach(function(n){t=t+"-"+u[n].partialField.data[e]}),void 0===p[t]?(p[t]=m,h.push({}),l.forEach(function(t){h[m][t]=u[t].partialField.data[e]}),s.forEach(function(t){h[m][t]=[u[t].partialField.data[e]]}),m+=1):s.forEach(function(n){h[p[t]][n].push(u[n].partialField.data[e])})});var y={},b=function(){return e.detachedRoot()};return h.forEach(function(e){var t=e;s.forEach(function(n){t[n]=i[n](e[n],b,y)})}),r?(r.__calculateFieldspace(),v=r):v=new Dt(h,d,{name:c}),v}function fe(e,t){var n=M(e.getFieldspace(),t.getFieldspace());return function(e,t){var r=!0;return n.forEach(function(n){r=!(e[n].value!==t[n].value||!r)}),r}}function le(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!A(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t){D(e._rowDiffset,function(e){var r={},o="";a.forEach(function(n){var a=t[n].partialField.data[e];o+="-"+a,r[n]=a}),n[o]||(i.push(r),n[o]=!0)})}return e._colIdentifier.split(",").forEach(function(e){var t=c[e];r.push(_({},t.schema())),a.push(t.schema().name)}),s(e,c),s(t,f),new Dt(i,r,{name:l})}function se(e,t,n){return J(e,t,n,!1,V.LEFTOUTER)}function de(e,t,n){return J(t,e,n,!1,V.RIGHTOUTER)}var pe=function(){function e(e,t){for(var n=0;nn&&(n=a))}),[t,n]}}]),t}(),Fe=function(){function e(e,t){for(var n=0;n=i?c=!0:(r=e.charCodeAt(o++))===Ge?f=!0:r===Ke&&(f=!0,e.charCodeAt(o)===Ge&&++o),e.slice(a+1,t-1).replace(/""/g,'"')}for(;o2&&void 0!==arguments[2]?arguments[2]:{},a=arguments[3],i=void 0;t!==U?(i={op:t,meta:r,criteria:a},e._derivation.push(i)):(i=[].concat(tt(a)),e._derivation.length=0,(n=e._derivation).push.apply(n,tt(i)))},it=function(e,t,n,r,a){var i=[],o=-1,u=r.mode,c=void 0,f={},s=function(){return a.detachedRoot()},d=function(e){return n(function(e,t){var n={},r=!0,a=!1,i=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done);r=!0){var c=o.value;n[c.name()]=new N(c.partialField.data[t],c)}}catch(e){a=!0,i=e}finally{try{!r&&u.return&&u.return()}finally{if(a)throw i}}return n}(t,e),e,s,f)},p=void 0;return p=u===l.INVERSE?function(e){return!d(e)}:function(e){return d(e)},D(e,function(e){p(e)&&(-1!==o&&e===o+1?(c=i.length-1,i[c]=i[c].split("-")[0]+"-"+e):i.push(""+e),o=e)}),i.join(",")},ot=function(e,t){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},r=n.operation||Y,a=n.filterByMeasure||!1,i=[];i=t.length?t.map(function(e){return n=(t=e).getData(),r=n.schema,i=t.getFieldsConfig(),o=t.getFieldspace().fieldsObj(),u=n.data,c=Object.values(i).reduce(function(e,t){return e[t.def.name]=o[t.def.name].domain(),e},{}),function(e){return!!u.length&&u.some(function(t){return r.every(function(n){if(!(n.name in e))return!0;var r=e[n.name].valueOf();if(a&&n.type===f.MEASURE)return r>=c[n.name][0]&&r<=c[n.name][1];if(n.type!==f.DIMENSION)return!0;var o=i[n.name].index;return t[o]===e[n.name].valueOf()})})};var t,n,r,i,o,u,c}):[function(){return!1}];var o=void 0;r===Y?o=e.clone(!1,!1).select(function(e){return i.every(function(t){return t(e)})},{saveChild:!1,mode:l.ALL}):o=e.clone(!1,!1).select(function(e){return i.some(function(t){return t(e)})},{mode:l.ALL,saveChild:!1});return o},ut=function(e,t,n,r){var a=e.clone(r.saveChild),i=it(a._rowDiffset,a.getPartialFieldspace().fields,t,n,e);return a._rowDiffset=i,a.__calculateFieldspace().calculateFieldsConfig(),r.saveChild&&at(a,x,{config:n},t),a},ct=function(e,t,n,r){var a=e.clone(n.saveChild),i=t;return n.mode===l.INVERSE&&(i=r.filter(function(e){return-1===t.indexOf(e)})),a._colIdentifier=i.join(","),a.__calculateFieldspace().calculateFieldsConfig(),n.saveChild&&at(a,C,{projField:t,config:n,actualProjField:i},null),a},ft=function(e){if((e=_({},e)).type||(e.type=f.DIMENSION),!e.subtype)switch(e.type){case f.MEASURE:e.subtype=c.CONTINUOUS;break;default:case f.DIMENSION:e.subtype=u.CATEGORICAL}return e},lt=function(e,t,n,r){n=function(e){return e.map(function(e){return ft(e)})}(n),r=Object.assign(Object.assign({},He),r);var i=a[r.dataFormat];if(!i||"function"!=typeof i)throw new Error("No converter function found for "+r.dataFormat+" format");var u=i(t,r),c=et(u,2),f=c[0],l=c[1],s=Le(l,n,f),d=F.createNamespace(s,r.name);return e._partialFieldspace=d,e._rowDiffset=l.length&&l[0].length?"0-"+(l[0].length-1):"",e._colIdentifier=n.map(function(e){return e.name}).join(),e._dataFormat=r.dataFormat===o.AUTO?S(t):r.dataFormat,e},st=function(e,t){for(var n=0;n2&&void 0!==arguments[2]?arguments[2]:{},a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=a.nonTraversingModel,o=a.excludeModels||[];t!==i&&((!o.length||-1===o.indexOf(t))&&t.handlePropagation(n,r),t._children.forEach(function(t){var i=dt(n,t),o=et(i,2),u=o[0],c=o[1];e(t,[u,c],r,a)}))},ht=function e(t){var n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[];return null!==t._parent&&(n.push(t),e(t._parent,n)),n},vt=function(e,t,n,r){var a=void 0,i=void 0,o=n.propagationNameSpace,u=n.propagateToSource,c=n.sourceId,f=r.propagateInterpolatedValues,l=[];if(null===e&&!0!==r.persistent)l=[{criteria:[]}];else{var s,d=Object.values(o.mutableActions);!1!==u&&(d=d.filter(function(e){return e.config.sourceId!==c}));var p=d.filter(function(e){return(r.filterFn||function(){return!0})(e,r)}).map(function(e){return e.config.criteria}),h=[];if(!1!==u){var v=Object.values(o.mutableActions);v.forEach(function(e){var t=e.config;!1===t.applyOnSource&&t.action===r.action&&t.sourceId!==c&&(h.push(e.model),(a=v.filter(function(t){return t!==e}).map(function(e){return e.config.criteria})).length&&l.push({criteria:a,models:e.model,path:ht(e.model)}))})}a=(s=[]).concat.apply(s,[].concat(tt(p),[e])).filter(function(e){return null!==e}),l.push({criteria:a,excludeModels:[].concat(h,tt(r.excludeModels||[]))})}var m=t.model,y=Object.assign({sourceIdentifiers:e,propagationSourceId:c},r),b=t.groupByModel;f&&b&&(i=ot(b,a,{filterByMeasure:f}),pt(b,i,y)),l.forEach(function(e){var t=ot(m,e.criteria),n=e.path;if(n){var r=function(e,t){for(var n=0,r=t.length;n0&&void 0!==arguments[0])||arguments[0],t=void 0;if(!1===(!(arguments.length>1&&void 0!==arguments[1])||arguments[1])){var n=this.getData({getAllFields:!0}),r=n.data,a=n.schema,i=r.map(function(e){var t={};return a.forEach(function(n,r){t[n.name]=e[r]}),t});t=new this.constructor(i,a)}else t=new this.constructor(this);return e&&this._children.push(t),t}},{key:"project",value:function(e,t){var n={mode:l.NORMAL,saveChild:!0};t=Object.assign({},n,t);var r=this.getFieldsConfig(),a=Object.keys(r),i=t.mode,o=e.reduce(function(e,t){return"RegExp"===t.constructor.name?e.push.apply(e,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:[];at(this,U,null,t),this._parent=e,e._children.push(this)}},{key:"getParent",value:function(){return this._parent}},{key:"getChildren",value:function(){return this._children}},{key:"getDerivations",value:function(){return this._derivation}}]),e}(),bt=function(){return function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,a=!1,i=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){a=!0,i=e}finally{try{!r&&u.return&&u.return()}finally{if(a)throw i}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")}}(),gt=function(){function e(e,t){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:{},n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{saveChild:!0},r=""+e.join(),a=[this,e,t],i=ce.apply(void 0,a);return n.saveChild&&(this._children.push(i),at(i,I,{fieldsArr:e,groupByString:r,defaultReducer:oe.defaultReducer()},t)),i._parent=this,i}},{key:"sort",value:function(e){var t=this.getData({order:"row",sort:e}),n=[t.schema.map(function(e){return e.name})].concat(t.data),r=new this.constructor(n,t.schema,{dataFormat:"DSVArr"});return r._sortingDetails=e,r}},{key:"serialize",value:function(e,t){e=e||this._dataFormat,t=Object.assign({},{fieldSeparator:","},t);var n=this.getFieldspace().fields,r=n.map(function(e){return e.formattedData()}),a=r[0].length,i=void 0,u=void 0,c=void 0;if(e===o.FLAT_JSON)for(i=[],u=0;u=0&&(n.fields[r]=e)}else n.fields.push(e);return n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,this.__calculateFieldspace().calculateFieldsConfig(),this}},{key:"calculateVariable",value:function(e,t,n){var r=this;e=ft(e),n=Object.assign({},{saveChild:!0,replaceVar:!1},n);var a=this.getFieldsConfig(),i=t.slice(0,t.length-1),o=t[t.length-1];if(a[e.name]&&!n.replaceVar)throw new Error(e.name+" field already exists in datamodel");var u=i.map(function(e){var t=a[e];if(!t)throw new Error(e+" is not a valid column name.");return t.index}),c=this.clone(),f=c.getFieldspace().fields,l=u.map(function(e){return f[e]}),s={},d=function(){return r.detachedRoot()},p=[];D(c._rowDiffset,function(e){var t=l.map(function(t){return t.partialField.data[e]});p[e]=o.apply(void 0,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},a=t.isMutableAction,i=t.sourceId,o=t.payload,u=function e(t){return t._parent?e(t._parent):t}(this),c=u._propagationNameSpace,f={groupByModel:function e(t){return t._parent&&t._derivation.find(function(e){return"group"!==e.op})?e(t._parent):t}(this),model:u};return n&&function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=void 0,a=t.isMutableAction,i=t.criteria,o=t.action+"-"+t.sourceId;r=a?e.mutableActions:e.immutableActions,null===i?delete r[o]:r[o]={model:n,config:t}}(c,t,this),vt(e,f,{propagationNameSpace:c,sourceId:i},Object.assign({payload:o},t)),a&&function(e,t,n){var r=e.immutableActions;for(var a in r){var i=r[a].config,o=n.config.sourceId,u=!n.propConfig.filterImmutableAction||n.propConfig.filterImmutableAction(i,n.config);if(i.sourceId!==o&&u){var c=i.criteria;vt(c,t,{propagationNameSpace:e,propagateToSource:!1,sourceId:o},i)}}}(c,f,{config:t,propConfig:r}),this}},{key:"on",value:function(e,t){switch(e){case"propagation":this._onPropagation.push(t)}return this}},{key:"unsubscribe",value:function(e){switch(e){case"propagation":this._onPropagation=[]}return this}},{key:"handlePropagation",value:function(e,t){var n=this;this._onPropagation.forEach(function(r){return r.call(n,e,t)})}},{key:"bin",value:function(e,t){var n=this.getFieldsConfig();if(!n[e])throw new Error("Field "+e+" doesn't exist");var r=t.name||e+"_binned";if(n[r])throw new Error("Field "+r+" already exists");var a=function(e,t,n){var r=n.buckets,a=n.binsCount,i=n.binSize,o=n.start,u=n.end,c=e.domain(),f=T(c,2),l=f[0],s=f[1];r||(o=0!==o&&(!o||o>l)?l:o,u=0!==u&&(!u||ul&&r.unshift(l),r[r.length-1]<=s&&r.push(s+1);for(var d=[],p=0;p1&&void 0!==arguments[1]?arguments[1]:{saveChild:!0},r=e,a=void 0,i=[],o=n.saveChild;return t.forEach(function(e){r=e(r),i.push.apply(i,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&a.dispose(),r}},bin:function(){for(var e=arguments.length,t=Array(e),n=0;n {\n let i;\n let l;\n\n if (!val) { return defVal; }\n\n const nVal = val.toLowerCase();\n\n for (i = 0, l = range.length; i < l; i++) {\n if (range[i].toLowerCase() === nVal) {\n return i;\n }\n }\n\n if (i === undefined) {\n return defVal;\n }\n return null;\n };\n};\n\n/*\n * Defines the tokens which are supporter by the dateformatter. Using this definitation a value gets extracted from\n * the user specifed date string. This also formats the value for display purpose from native JS date.\n * The definition of each token contains the following named properties\n * {\n * %token_name% : {\n * name: name of the token, this is used in reverse lookup,\n * extract: a function that returns the regular expression to extract that piece of information. All the\n * regex should be gouped by using ()\n * parser: a function which receives value extracted by the above regex and parse it to get the date params\n * formatter: a formatter function that takes milliseconds or JS Date object and format the param\n * represented by the token only.\n * }\n * }\n *\n * @return {Object} : Definition of the all the supported tokens.\n */\nDateTimeFormatter.getTokenDefinitions = function () {\n const daysDef = {\n short: [\n 'Sun',\n 'Mon',\n 'Tue',\n 'Wed',\n 'Thu',\n 'Fri',\n 'Sat'\n ],\n long: [\n 'Sunday',\n 'Monday',\n 'Tuesday',\n 'Wednesday',\n 'Thursday',\n 'Friday',\n 'Saturday'\n ]\n };\n const monthsDef = {\n short: [\n 'Jan',\n 'Feb',\n 'Mar',\n 'Apr',\n 'May',\n 'Jun',\n 'Jul',\n 'Aug',\n 'Sep',\n 'Oct',\n 'Nov',\n 'Dec'\n ],\n long: [\n 'January',\n 'February',\n 'March',\n 'April',\n 'May',\n 'June',\n 'July',\n 'August',\n 'September',\n 'October',\n 'November',\n 'December'\n ]\n };\n\n const definitions = {\n H: {\n // 24 hours format\n name: 'H',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n\n return d.getHours().toString();\n }\n },\n l: {\n // 12 hours format\n name: 'l',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const hours = d.getHours() % 12;\n\n return (hours === 0 ? 12 : hours).toString();\n }\n },\n p: {\n // AM or PM\n name: 'p',\n index: 3,\n extract () { return '(AM|PM)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'AM' : 'PM');\n }\n },\n P: {\n // am or pm\n name: 'P',\n index: 3,\n extract () { return '(am|pm)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'am' : 'pm');\n }\n },\n M: {\n // Two digit minutes 00 - 59\n name: 'M',\n index: 4,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const mins = d.getMinutes();\n\n return pad(mins);\n }\n },\n S: {\n // Two digit seconds 00 - 59\n name: 'S',\n index: 5,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const seconds = d.getSeconds();\n\n return pad(seconds);\n }\n },\n K: {\n // Milliseconds\n name: 'K',\n index: 6,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const ms = d.getMilliseconds();\n\n return ms.toString();\n }\n },\n a: {\n // Short name of day, like Mon\n name: 'a',\n index: 2,\n extract () { return `(${daysDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.short[day]).toString();\n }\n },\n A: {\n // Long name of day, like Monday\n name: 'A',\n index: 2,\n extract () { return `(${daysDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.long[day]).toString();\n }\n },\n e: {\n // 8 of March, 11 of November\n name: 'e',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return day.toString();\n }\n },\n d: {\n // 08 of March, 11 of November\n name: 'd',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return pad(day);\n }\n },\n b: {\n // Short month, like Jan\n name: 'b',\n index: 1,\n extract () { return `(${monthsDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.short[month]).toString();\n }\n },\n B: {\n // Long month, like January\n name: 'B',\n index: 1,\n extract () { return `(${monthsDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.long[month]).toString();\n }\n },\n m: {\n // Two digit month of year like 01 for January\n name: 'm',\n index: 1,\n extract () { return '(\\\\d+)'; },\n parser (val) { return DateTimeFormatter.defaultNumberParser()(val) - 1; },\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return pad(month + 1);\n }\n },\n y: {\n // Short year like 90 for 1990\n name: 'y',\n index: 0,\n extract () { return '(\\\\d{2})'; },\n parser (val) {\n if (val) {\n const l = val.length;\n val = val.substring(l - 2, l);\n }\n let parsedVal = DateTimeFormatter.defaultNumberParser()(val);\n let presentYear = Math.trunc(((new Date()).getFullYear()) / 100);\n if (parsedVal instanceof Number) {\n parsedVal = (presentYear * 100) + parsedVal;\n } else {\n parsedVal = `${presentYear}${parsedVal}`;\n }\n return parsedVal;\n },\n formatter (val) {\n const d = convertToNativeDate(val);\n let year = d.getFullYear().toString();\n let l;\n\n if (year) {\n l = year.length;\n year = year.substring(l - 2, l);\n }\n\n return year;\n }\n },\n Y: {\n // Long year like 1990\n name: 'Y',\n index: 0,\n extract () { return '(\\\\d{4})'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const year = d.getFullYear().toString();\n\n return year;\n }\n }\n };\n\n return definitions;\n};\n\n/*\n * The tokens which works internally is not user friendly in terms of memorizing the names. This gives a formal\n * definition to the informal notations.\n *\n * @return {Object} : Formal definition of the tokens\n */\nDateTimeFormatter.getTokenFormalNames = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n\n return {\n HOUR: definitions.H,\n HOUR_12: definitions.l,\n AMPM_UPPERCASE: definitions.p,\n AMPM_LOWERCASE: definitions.P,\n MINUTE: definitions.M,\n SECOND: definitions.S,\n SHORT_DAY: definitions.a,\n LONG_DAY: definitions.A,\n DAY_OF_MONTH: definitions.e,\n DAY_OF_MONTH_CONSTANT_WIDTH: definitions.d,\n SHORT_MONTH: definitions.b,\n LONG_MONTH: definitions.B,\n MONTH_OF_YEAR: definitions.m,\n SHORT_YEAR: definitions.y,\n LONG_YEAR: definitions.Y\n };\n};\n\n/*\n * This defines the rules and declares dependencies that resolves a date parameter (year, month etc) from\n * the date time parameter array.\n *\n * @return {Object} : An object that contains dependencies and a resolver function. The dependencies values are fed\n * to the resolver function in that particular sequence only.\n */\nDateTimeFormatter.tokenResolver = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const defaultResolver = (...args) => { // eslint-disable-line require-jsdoc\n let i = 0;\n let arg;\n let targetParam;\n const l = args.length;\n\n for (; i < l; i++) {\n arg = args[i];\n if (args[i]) {\n targetParam = arg;\n }\n }\n\n if (!targetParam) { return null; }\n\n return targetParam[0].parser(targetParam[1]);\n };\n\n return {\n YEAR: [definitions.y, definitions.Y,\n defaultResolver\n ],\n MONTH: [definitions.b, definitions.B, definitions.m,\n defaultResolver\n ],\n DAY: [definitions.a, definitions.A, definitions.e, definitions.d,\n defaultResolver\n ],\n HOUR: [definitions.H, definitions.l, definitions.p, definitions.P,\n function (hourFormat24, hourFormat12, ampmLower, ampmUpper) {\n let targetParam;\n let amOrpm;\n let isPM;\n let val;\n\n if (hourFormat12 && (amOrpm = (ampmLower || ampmUpper))) {\n if (amOrpm[0].parser(amOrpm[1]) === 'pm') {\n isPM = true;\n }\n\n targetParam = hourFormat12;\n } else if (hourFormat12) {\n targetParam = hourFormat12;\n } else {\n targetParam = hourFormat24;\n }\n\n if (!targetParam) { return null; }\n\n val = targetParam[0].parser(targetParam[1]);\n if (isPM) {\n val += 12;\n }\n return val;\n }\n ],\n MINUTE: [definitions.M,\n defaultResolver\n ],\n SECOND: [definitions.S,\n defaultResolver\n ]\n };\n};\n\n/*\n * Finds token from the format rule specified by a user.\n * @param format {String} : The format of the input date specified by the user\n * @return {Array} : An array of objects which contains the available token and their occurence index in the format\n */\nDateTimeFormatter.findTokens = function (format) {\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenLiterals = Object.keys(definitions);\n const occurrence = [];\n let i;\n let forwardChar;\n\n while ((i = format.indexOf(tokenPrefix, i + 1)) >= 0) {\n forwardChar = format[i + 1];\n if (tokenLiterals.indexOf(forwardChar) === -1) { continue; }\n\n occurrence.push({\n index: i,\n token: forwardChar\n });\n }\n\n return occurrence;\n};\n\n/*\n * Format any JS date to a specified date given by user.\n *\n * @param date {Number | Date} : The date object which is to be formatted\n * @param format {String} : The format using which the date will be formatted for display\n */\nDateTimeFormatter.formatAs = function (date, format) {\n const nDate = convertToNativeDate(date);\n const occurrence = DateTimeFormatter.findTokens(format);\n const definitions = DateTimeFormatter.getTokenDefinitions();\n let formattedStr = String(format);\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n let token;\n let formattedVal;\n let i;\n let l;\n\n for (i = 0, l = occurrence.length; i < l; i++) {\n token = occurrence[i].token;\n formattedVal = definitions[token].formatter(nDate);\n formattedStr = formattedStr.replace(new RegExp(tokenPrefix + token, 'g'), formattedVal);\n }\n\n return formattedStr;\n};\n\n/*\n * Parses the user specified date string to extract the date time params.\n *\n * @return {Array} : Value of date time params in an array [year, month, day, hour, minutes, seconds, milli]\n */\nDateTimeFormatter.prototype.parse = function (dateTimeStamp, options) {\n const tokenResolver = DateTimeFormatter.tokenResolver();\n const dtParams = this.extractTokenValue(dateTimeStamp);\n const dtParamSeq = DateTimeFormatter.DATETIME_PARAM_SEQUENCE;\n const noBreak = options && options.noBreak;\n const dtParamArr = [];\n const args = [];\n let resolverKey;\n let resolverParams;\n let resolverFn;\n let val;\n let i;\n let param;\n let resolvedVal;\n let l;\n\n for (resolverKey in tokenResolver) {\n if (!{}.hasOwnProperty.call(tokenResolver, resolverKey)) { continue; }\n\n args.length = 0;\n resolverParams = tokenResolver[resolverKey];\n resolverFn = resolverParams.splice(resolverParams.length - 1, 1)[0];\n\n for (i = 0, l = resolverParams.length; i < l; i++) {\n param = resolverParams[i];\n val = dtParams[param.name];\n\n if (val === undefined) {\n args.push(null);\n } else {\n args.push([param, val]);\n }\n }\n\n resolvedVal = resolverFn.apply(this, args);\n\n if ((resolvedVal === undefined || resolvedVal === null) && !noBreak) {\n break;\n }\n\n dtParamArr[dtParamSeq[resolverKey]] = resolvedVal;\n }\n\n return dtParamArr;\n};\n\n/*\n * Extract the value of the token from user specified date time string.\n *\n * @return {Object} : An key value pair which contains the tokens as key and value as pair\n */\nDateTimeFormatter.prototype.extractTokenValue = function (dateTimeStamp) {\n const format = this.format;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const occurrence = DateTimeFormatter.findTokens(format);\n const tokenObj = {};\n\n let lastOccurrenceIndex;\n let occObj;\n let occIndex;\n let targetText;\n let regexFormat;\n\n let l;\n let i;\n\n regexFormat = String(format);\n\n const tokenArr = occurrence.map(obj => obj.token);\n const occurrenceLength = occurrence.length;\n for (i = occurrenceLength - 1; i >= 0; i--) {\n occIndex = occurrence[i].index;\n\n if (occIndex + 1 === regexFormat.length - 1) {\n lastOccurrenceIndex = occIndex;\n continue;\n }\n\n if (lastOccurrenceIndex === undefined) {\n lastOccurrenceIndex = regexFormat.length;\n }\n\n targetText = regexFormat.substring(occIndex + 2, lastOccurrenceIndex);\n regexFormat = regexFormat.substring(0, occIndex + 2) +\n RegExp.escape(targetText) +\n regexFormat.substring(lastOccurrenceIndex, regexFormat.length);\n\n lastOccurrenceIndex = occIndex;\n }\n\n for (i = 0; i < occurrenceLength; i++) {\n occObj = occurrence[i];\n regexFormat = regexFormat.replace(tokenPrefix + occObj.token, definitions[occObj.token].extract());\n }\n\n const extractValues = dateTimeStamp.match(new RegExp(regexFormat)) || [];\n extractValues.shift();\n\n for (i = 0, l = tokenArr.length; i < l; i++) {\n tokenObj[tokenArr[i]] = extractValues[i];\n }\n return tokenObj;\n};\n\n/*\n * Give back the JS native date formed from user specified date string\n *\n * @return {Date} : Native JS Date\n */\nDateTimeFormatter.prototype.getNativeDate = function (dateTimeStamp) {\n if (dateTimeStamp instanceof Date) {\n return dateTimeStamp;\n } else if (isFinite(dateTimeStamp) && !!this.format) {\n return new Date(dateTimeStamp);\n }\n\n const dtParams = this.dtParams = this.parse(dateTimeStamp);\n\n dtParams.unshift(null);\n this.nativeDate = new (Function.prototype.bind.apply(Date, dtParams))();\n return this.nativeDate;\n};\n\n/*\n * Represents JS native date to a user specified format.\n *\n * @param format {String} : The format according to which the date is to be represented\n * @return {String} : The formatted date string\n */\nDateTimeFormatter.prototype.formatAs = function (format, dateTimeStamp) {\n let nativeDate;\n\n if (dateTimeStamp) {\n nativeDate = this.nativeDate = this.getNativeDate(dateTimeStamp);\n } else if (!(nativeDate = this.nativeDate)) {\n nativeDate = this.getNativeDate(dateTimeStamp);\n }\n\n return DateTimeFormatter.formatAs(nativeDate, format);\n};\n\nexport { DateTimeFormatter as default };\n","/**\n * The utility function to calculate major column.\n *\n * @param {Object} store - The store object.\n * @return {Function} Returns the push function.\n */\nexport default (store) => {\n let i = 0;\n return (...fields) => {\n fields.forEach((val, fieldIndex) => {\n if (!(store[fieldIndex] instanceof Array)) {\n store[fieldIndex] = Array.from({ length: i });\n }\n store[fieldIndex].push(val);\n });\n i++;\n };\n};\n","/* eslint-disable */\nconst OBJECTSTRING = 'object';\nconst objectToStrFn = Object.prototype.toString;\nconst objectToStr = '[object Object]';\nconst arrayToStr = '[object Array]';\n\nfunction checkCyclicRef(obj, parentArr) {\n let i = parentArr.length;\n let bIndex = -1;\n\n while (i) {\n if (obj === parentArr[i]) {\n bIndex = i;\n return bIndex;\n }\n i -= 1;\n }\n\n return bIndex;\n}\n\nfunction merge(obj1, obj2, skipUndef, tgtArr, srcArr) {\n var item,\n srcVal,\n tgtVal,\n str,\n cRef;\n // check whether obj2 is an array\n // if array then iterate through it's index\n // **** MOOTOOLS precution\n\n if (!srcArr) {\n tgtArr = [obj1];\n srcArr = [obj2];\n }\n else {\n tgtArr.push(obj1);\n srcArr.push(obj2);\n }\n\n if (obj2 instanceof Array) {\n for (item = 0; item < obj2.length; item += 1) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (typeof tgtVal !== OBJECTSTRING) {\n if (!(skipUndef && tgtVal === undefined)) {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = tgtVal instanceof Array ? [] : {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n }\n }\n else {\n for (item in obj2) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (tgtVal !== null && typeof tgtVal === OBJECTSTRING) {\n // Fix for issue BUG: FWXT-602\n // IE < 9 Object.prototype.toString.call(null) gives\n // '[object Object]' instead of '[object Null]'\n // that's why null value becomes Object in IE < 9\n str = objectToStrFn.call(tgtVal);\n if (str === objectToStr) {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else if (str === arrayToStr) {\n if (srcVal === null || !(srcVal instanceof Array)) {\n srcVal = obj1[item] = [];\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (skipUndef && tgtVal === undefined) {\n continue;\n }\n obj1[item] = tgtVal;\n }\n }\n }\n return obj1;\n}\n\n\nfunction extend2 (obj1, obj2, skipUndef) {\n //if none of the arguments are object then return back\n if (typeof obj1 !== OBJECTSTRING && typeof obj2 !== OBJECTSTRING) {\n return null;\n }\n\n if (typeof obj2 !== OBJECTSTRING || obj2 === null) {\n return obj1;\n }\n\n if (typeof obj1 !== OBJECTSTRING) {\n obj1 = obj2 instanceof Array ? [] : {};\n }\n merge(obj1, obj2, skipUndef);\n return obj1;\n}\n\nexport { extend2 as default };\n","import { DataFormat } from '../enums';\n\n/**\n * Checks whether the value is an array.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an array otherwise returns false.\n */\nexport function isArray (val) {\n return Array.isArray(val);\n}\n\n/**\n * Checks whether the value is an object.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an object otherwise returns false.\n */\nexport function isObject (val) {\n return val === Object(val);\n}\n\n/**\n * Checks whether the value is a string value.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is a string value otherwise returns false.\n */\nexport function isString (val) {\n return typeof val === 'string';\n}\n\n/**\n * Checks whether the value is callable.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is callable otherwise returns false.\n */\nexport function isCallable (val) {\n return typeof val === 'function';\n}\n\n/**\n * Returns the unique values from the input array.\n *\n * @param {Array} data - The input array.\n * @return {Array} Returns a new array of unique values.\n */\nexport function uniqueValues (data) {\n return [...new Set(data)];\n}\n\nexport const getUniqueId = () => `id-${new Date().getTime()}${Math.round(Math.random() * 10000)}`;\n\n/**\n * Checks Whether two arrays have same content.\n *\n * @param {Array} arr1 - The first array.\n * @param {Array} arr2 - The 2nd array.\n * @return {boolean} Returns whether two array have same content.\n */\nexport function isArrEqual(arr1, arr2) {\n if (!isArray(arr1) || !isArray(arr2)) {\n return arr1 === arr2;\n }\n\n if (arr1.length !== arr2.length) {\n return false;\n }\n\n for (let i = 0; i < arr1.length; i++) {\n if (arr1[i] !== arr2[i]) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * It is the default number format function for the measure field type.\n *\n * @param {any} val - The input value.\n * @return {number} Returns a number value.\n */\nexport function formatNumber(val) {\n return val;\n}\n\n/**\n * Returns the detected data format.\n *\n * @param {any} data - The input data to be tested.\n * @return {string} Returns the data format name.\n */\nexport const detectDataFormat = (data) => {\n if (isString(data)) {\n return DataFormat.DSV_STR;\n } else if (isArray(data) && isArray(data[0])) {\n return DataFormat.DSV_ARR;\n } else if (isArray(data) && (data.length === 0 || isObject(data[0]))) {\n return DataFormat.FLAT_JSON;\n }\n return null;\n};\n","import { FieldType } from './enums';\nimport { getUniqueId } from './utils';\n\nconst fieldStore = {\n data: {},\n\n createNamespace (fieldArr, name) {\n const dataId = name || getUniqueId();\n\n this.data[dataId] = {\n name: dataId,\n fields: fieldArr,\n\n fieldsObj () {\n let fieldsObj = this._cachedFieldsObj;\n\n if (!fieldsObj) {\n fieldsObj = this._cachedFieldsObj = {};\n this.fields.forEach((field) => {\n fieldsObj[field.name()] = field;\n });\n }\n return fieldsObj;\n },\n getMeasure () {\n let measureFields = this._cachedMeasure;\n\n if (!measureFields) {\n measureFields = this._cachedMeasure = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.MEASURE) {\n measureFields[field.name()] = field;\n }\n });\n }\n return measureFields;\n },\n getDimension () {\n let dimensionFields = this._cachedDimension;\n\n if (!this._cachedDimension) {\n dimensionFields = this._cachedDimension = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.DIMENSION) {\n dimensionFields[field.name()] = field;\n }\n });\n }\n return dimensionFields;\n },\n };\n return this.data[dataId];\n },\n};\n\nexport default fieldStore;\n","/**\n * The wrapper class on top of the primitive value of a field.\n *\n * @todo Need to have support for StringValue, NumberValue, DateTimeValue\n * and GeoValue. These types should expose predicate API mostly.\n */\nclass Value {\n\n /**\n * Creates new Value instance.\n *\n * @param {*} val - the primitive value from the field cell.\n * @param {string | Field} field - The field from which the value belongs.\n */\n constructor (val, field) {\n Object.defineProperty(this, '_value', {\n enumerable: false,\n configurable: false,\n writable: false,\n value: val\n });\n\n this.field = field;\n }\n\n /**\n * Returns the field value.\n *\n * @return {*} Returns the current value.\n */\n get value () {\n return this._value;\n }\n\n /**\n * Converts to human readable string.\n *\n * @override\n * @return {string} Returns a human readable string of the field value.\n *\n */\n toString () {\n return String(this.value);\n }\n\n /**\n * Returns the value of the field.\n *\n * @override\n * @return {*} Returns the field value.\n */\n valueOf () {\n return this.value;\n }\n}\n\nexport default Value;\n","/**\n * Iterates through the diffSet array and call the callback with the current\n * index.\n *\n * @param {string} rowDiffset - The row diffset string e.g. '0-4,6,10-13'.\n * @param {Function} callback - The callback function to be called with every index.\n */\nexport function rowDiffsetIterator (rowDiffset, callback) {\n if (rowDiffset.length > 0) {\n const rowDiffArr = rowDiffset.split(',');\n rowDiffArr.forEach((diffStr) => {\n const diffStsArr = diffStr.split('-');\n const start = +(diffStsArr[0]);\n const end = +(diffStsArr[1] || diffStsArr[0]);\n if (end >= start) {\n for (let i = start; i <= end; i += 1) {\n callback(i);\n }\n }\n });\n }\n}\n","import { rowDiffsetIterator } from './row-diffset-iterator';\n\nconst generateBuckets = (binSize, start, end) => {\n const buckets = [];\n let next = start;\n\n while (next < end) {\n buckets.push(next);\n next += binSize;\n }\n buckets.push(next);\n\n return buckets;\n};\n\nconst findBucketRange = (bucketRanges, value) => {\n let leftIdx = 0;\n let rightIdx = bucketRanges.length - 1;\n let midIdx;\n let range;\n\n // Here use binary search as the bucketRanges is a sorted array\n while (leftIdx <= rightIdx) {\n midIdx = leftIdx + Math.floor((rightIdx - leftIdx) / 2);\n range = bucketRanges[midIdx];\n\n if (value >= range.start && value < range.end) {\n return range;\n } else if (value >= range.end) {\n leftIdx = midIdx + 1;\n } else if (value < range.start) {\n rightIdx = midIdx - 1;\n }\n }\n\n return null;\n};\n\n /**\n * Creates the bin data from input measure field and supplied configs.\n *\n * @param {Measure} measureField - The Measure field instance.\n * @param {string} rowDiffset - The datamodel rowDiffset values.\n * @param {Object} config - The config object.\n * @return {Object} Returns the binned data and the corresponding bins.\n */\nexport function createBinnedFieldData (measureField, rowDiffset, config) {\n let { buckets, binsCount, binSize, start, end } = config;\n const [dMin, dMax] = measureField.domain();\n\n if (!buckets) {\n start = (start !== 0 && (!start || start > dMin)) ? dMin : start;\n end = (end !== 0 && (!end || end < dMax)) ? (dMax + 1) : end;\n\n if (binsCount) {\n binSize = Math.ceil(Math.abs(end - start) / binsCount);\n }\n\n buckets = generateBuckets(binSize, start, end);\n }\n\n if (buckets[0] > dMin) {\n buckets.unshift(dMin);\n }\n if (buckets[buckets.length - 1] <= dMax) {\n buckets.push(dMax + 1);\n }\n\n const bucketRanges = [];\n for (let i = 0; i < buckets.length - 1; i++) {\n bucketRanges.push({\n start: buckets[i],\n end: buckets[i + 1]\n });\n }\n\n const binnedData = [];\n rowDiffsetIterator(rowDiffset, (i) => {\n const datum = measureField.partialField.data[i];\n if (datum === null) {\n binnedData.push(null);\n return;\n }\n\n const range = findBucketRange(bucketRanges, datum);\n binnedData.push(`${range.start}-${range.end}`);\n });\n\n return { binnedData, bins: buckets };\n}\n","/**\n * The helper function that returns an array of common schema\n * from two fieldStore instances.\n *\n * @param {FieldStore} fs1 - The first FieldStore instance.\n * @param {FieldStore} fs2 - The second FieldStore instance.\n * @return {Array} An array containing the common schema.\n */\nexport function getCommonSchema (fs1, fs2) {\n const retArr = [];\n const fs1Arr = [];\n fs1.fields.forEach((field) => {\n fs1Arr.push(field.schema().name);\n });\n fs2.fields.forEach((field) => {\n if (fs1Arr.indexOf(field.schema().name) !== -1) {\n retArr.push(field.schema().name);\n }\n });\n return retArr;\n}\n","export { DataFormat, FilteringMode } from '../enums';\n/**\n * The event name for data propagation.\n */\nexport const PROPAGATION = 'propagation';\n\n/**\n * The name of the unique row id column in DataModel.\n */\nexport const ROW_ID = '__id__';\n\n/**\n * The enums for operation names performed on DataModel.\n */\nexport const DM_DERIVATIVES = {\n SELECT: 'select',\n PROJECT: 'project',\n GROUPBY: 'group',\n COMPOSE: 'compose',\n CAL_VAR: 'calculatedVariable',\n BIN: 'bin'\n};\n\nexport const JOINS = {\n CROSS: 'cross',\n LEFTOUTER: 'leftOuter',\n RIGHTOUTER: 'rightOuter',\n NATURAL: 'natural',\n FULLOUTER: 'fullOuter'\n};\n\nexport const LOGICAL_OPERATORS = {\n AND: 'and',\n OR: 'or'\n};\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { getCommonSchema } from './get-common-schema';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { JOINS } from '../constants';\nimport { prepareJoinData } from '../helper';\n/**\n * Default filter function for crossProduct.\n *\n * @return {boolean} Always returns true.\n */\nfunction defaultFilterFn() { return true; }\n\n/**\n * Implementation of cross product operation between two DataModel instances.\n * It internally creates the data and schema for the new DataModel.\n *\n * @param {DataModel} dataModel1 - The left DataModel instance.\n * @param {DataModel} dataModel2 - The right DataModel instance.\n * @param {Function} filterFn - The filter function which is used to filter the tuples.\n * @param {boolean} [replaceCommonSchema=false] - The flag if the common name schema should be there.\n * @return {DataModel} Returns The newly created DataModel instance from the crossProduct operation.\n */\nexport function crossProduct (dm1, dm2, filterFn, replaceCommonSchema = false, jointype = JOINS.CROSS) {\n const schema = [];\n const data = [];\n const applicableFilterFn = filterFn || defaultFilterFn;\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreName = dm1FieldStore.name;\n const dm2FieldStoreName = dm2FieldStore.name;\n const name = `${dm1FieldStore.name}.${dm2FieldStore.name}`;\n const commonSchemaList = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n if (dm1FieldStoreName === dm2FieldStoreName) {\n throw new Error('DataModels must have different alias names');\n }\n // Here prepare the schema\n dm1FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1 && !replaceCommonSchema) {\n tmpSchema.name = `${dm1FieldStore.name}.${tmpSchema.name}`;\n }\n schema.push(tmpSchema);\n });\n dm2FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1) {\n if (!replaceCommonSchema) {\n tmpSchema.name = `${dm2FieldStore.name}.${tmpSchema.name}`;\n schema.push(tmpSchema);\n }\n } else {\n schema.push(tmpSchema);\n }\n });\n\n // Here prepare Data\n rowDiffsetIterator(dm1._rowDiffset, (i) => {\n let rowAdded = false;\n let rowPosition;\n rowDiffsetIterator(dm2._rowDiffset, (ii) => {\n const tuple = [];\n const userArg = {};\n userArg[dm1FieldStoreName] = {};\n userArg[dm2FieldStoreName] = {};\n dm1FieldStore.fields.forEach((field) => {\n tuple.push(field.partialField.data[i]);\n userArg[dm1FieldStoreName][field.name()] = field.partialField.data[i];\n });\n dm2FieldStore.fields.forEach((field) => {\n if (!(commonSchemaList.indexOf(field.schema().name) !== -1 && replaceCommonSchema)) {\n tuple.push(field.partialField.data[ii]);\n }\n userArg[dm2FieldStoreName][field.name()] = field.partialField.data[ii];\n });\n\n let cachedStore = {};\n let cloneProvider1 = () => dm1.detachedRoot();\n let cloneProvider2 = () => dm2.detachedRoot();\n\n const dm1Fields = prepareJoinData(userArg[dm1FieldStoreName]);\n const dm2Fields = prepareJoinData(userArg[dm2FieldStoreName]);\n if (applicableFilterFn(dm1Fields, dm2Fields, cloneProvider1, cloneProvider2, cachedStore)) {\n const tupleObj = {};\n tuple.forEach((cellVal, iii) => {\n tupleObj[schema[iii].name] = cellVal;\n });\n if (rowAdded && JOINS.CROSS !== jointype) {\n data[rowPosition] = tupleObj;\n }\n else {\n data.push(tupleObj);\n rowAdded = true;\n rowPosition = i;\n }\n } else if ((jointype === JOINS.LEFTOUTER || jointype === JOINS.RIGHTOUTER) && !rowAdded) {\n const tupleObj = {};\n let len = dm1FieldStore.fields.length - 1;\n tuple.forEach((cellVal, iii) => {\n if (iii <= len) {\n tupleObj[schema[iii].name] = cellVal;\n }\n else {\n tupleObj[schema[iii].name] = null;\n }\n });\n rowAdded = true;\n rowPosition = i;\n data.push(tupleObj);\n }\n });\n });\n\n return new DataModel(data, schema, { name });\n}\n","/**\n * The default sort function.\n *\n * @param {*} a - The first value.\n * @param {*} b - The second value.\n * @return {number} Returns the comparison result e.g. 1 or 0 or -1.\n */\nfunction defSortFn (a, b) {\n const a1 = `${a}`;\n const b1 = `${b}`;\n if (a1 < b1) {\n return -1;\n }\n if (a1 > b1) {\n return 1;\n }\n return 0;\n}\n\n/**\n * The helper function for merge sort which creates the sorted array\n * from the two halves of the input array.\n *\n * @param {Array} arr - The target array which needs to be merged.\n * @param {number} lo - The starting index of the first array half.\n * @param {number} mid - The ending index of the first array half.\n * @param {number} hi - The ending index of the second array half.\n * @param {Function} sortFn - The sort function.\n */\nfunction merge (arr, lo, mid, hi, sortFn) {\n const mainArr = arr;\n const auxArr = [];\n for (let i = lo; i <= hi; i += 1) {\n auxArr[i] = mainArr[i];\n }\n let a = lo;\n let b = mid + 1;\n\n for (let i = lo; i <= hi; i += 1) {\n if (a > mid) {\n mainArr[i] = auxArr[b];\n b += 1;\n } else if (b > hi) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else if (sortFn(auxArr[a], auxArr[b]) <= 0) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else {\n mainArr[i] = auxArr[b];\n b += 1;\n }\n }\n}\n\n/**\n * The helper function for merge sort which would be called\n * recursively for sorting the array halves.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {number} lo - The starting index of the array half.\n * @param {number} hi - The ending index of the array half.\n * @param {Function} sortFn - The sort function.\n * @return {Array} Returns the target array itself.\n */\nfunction sort (arr, lo, hi, sortFn) {\n if (hi === lo) { return arr; }\n\n const mid = lo + Math.floor((hi - lo) / 2);\n sort(arr, lo, mid, sortFn);\n sort(arr, mid + 1, hi, sortFn);\n merge(arr, lo, mid, hi, sortFn);\n\n return arr;\n}\n\n/**\n * The implementation of merge sort.\n * It is used in DataModel for stable sorting as it is not sure\n * what the sorting algorithm used by browsers is stable or not.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {Function} [sortFn=defSortFn] - The sort function.\n * @return {Array} Returns the input array itself in sorted order.\n */\nexport function mergeSort (arr, sortFn = defSortFn) {\n if (arr.length > 1) {\n sort(arr, 0, arr.length - 1, sortFn);\n }\n return arr;\n}\n","import { DimensionSubtype, MeasureSubtype } from '../enums';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { mergeSort } from './merge-sort';\nimport { fieldInSchema } from '../helper';\nimport { isCallable, isArray, } from '../utils';\n/**\n * Generates the sorting functions to sort the data of a DataModel instance\n * according to the input data type.\n *\n * @param {string} dataType - The data type e.g. 'measure', 'datetime' etc.\n * @param {string} sortType - The sorting order i.e. 'asc' or 'desc'.\n * @param {integer} index - The index of the data which will be sorted.\n * @return {Function} Returns the the sorting function.\n */\nfunction getSortFn (dataType, sortType, index) {\n let retFunc;\n switch (dataType) {\n case MeasureSubtype.CONTINUOUS:\n case DimensionSubtype.TEMPORAL:\n if (sortType === 'desc') {\n retFunc = (a, b) => b[index] - a[index];\n } else {\n retFunc = (a, b) => a[index] - b[index];\n }\n break;\n default:\n retFunc = (a, b) => {\n const a1 = `${a[index]}`;\n const b1 = `${b[index]}`;\n if (a1 < b1) {\n return sortType === 'desc' ? 1 : -1;\n }\n if (a1 > b1) {\n return sortType === 'desc' ? -1 : 1;\n }\n return 0;\n };\n }\n return retFunc;\n}\n\n/**\n * Groups the data according to the specified target field.\n *\n * @param {Array} data - The input data array.\n * @param {number} fieldIndex - The target field index within schema array.\n * @return {Array} Returns an array containing the grouped data.\n */\nfunction groupData(data, fieldIndex) {\n const hashMap = new Map();\n const groupedData = [];\n\n data.forEach((datum) => {\n const fieldVal = datum[fieldIndex];\n if (hashMap.has(fieldVal)) {\n groupedData[hashMap.get(fieldVal)][1].push(datum);\n } else {\n groupedData.push([fieldVal, [datum]]);\n hashMap.set(fieldVal, groupedData.length - 1);\n }\n });\n\n return groupedData;\n}\n\n/**\n * Creates the argument value used for sorting function when sort is done\n * with another fields.\n *\n * @param {Array} groupedDatum - The grouped datum for a single dimension field value.\n * @param {Array} targetFields - An array of the sorting fields.\n * @param {Array} targetFieldDetails - An array of the sorting field details in schema.\n * @return {Object} Returns an object containing the value of sorting fields and the target field name.\n */\nfunction createSortingFnArg(groupedDatum, targetFields, targetFieldDetails) {\n const arg = {\n label: groupedDatum[0]\n };\n\n targetFields.reduce((acc, next, idx) => {\n acc[next] = groupedDatum[1].map(datum => datum[targetFieldDetails[idx].index]);\n return acc;\n }, arg);\n\n return arg;\n}\n\n/**\n * Sorts the data before return in dataBuilder.\n *\n * @param {Object} dataObj - An object containing the data and schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nfunction sortData(dataObj, sortingDetails) {\n const { data, schema } = dataObj;\n let fieldName;\n let sortMeta;\n let fDetails;\n let i = sortingDetails.length - 1;\n\n for (; i >= 0; i--) {\n fieldName = sortingDetails[i][0];\n sortMeta = sortingDetails[i][1];\n fDetails = fieldInSchema(schema, fieldName);\n\n if (!fDetails) {\n // eslint-disable-next-line no-continue\n continue;\n }\n\n if (isCallable(sortMeta)) {\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortMeta(a[fDetails.index], b[fDetails.index]));\n } else if (isArray(sortMeta)) {\n const groupedData = groupData(data, fDetails.index);\n const sortingFn = sortMeta[sortMeta.length - 1];\n const targetFields = sortMeta.slice(0, sortMeta.length - 1);\n const targetFieldDetails = targetFields.map(f => fieldInSchema(schema, f));\n\n groupedData.forEach((groupedDatum) => {\n groupedDatum.push(createSortingFnArg(groupedDatum, targetFields, targetFieldDetails));\n });\n\n mergeSort(groupedData, (a, b) => {\n const m = a[2];\n const n = b[2];\n return sortingFn(m, n);\n });\n\n // Empty the array\n data.length = 0;\n groupedData.forEach((datum) => {\n data.push(...datum[1]);\n });\n } else {\n sortMeta = String(sortMeta).toLowerCase() === 'desc' ? 'desc' : 'asc';\n mergeSort(data, getSortFn(fDetails.type, sortMeta, fDetails.index));\n }\n }\n\n dataObj.uids = [];\n data.forEach((value) => {\n dataObj.uids.push(value.pop());\n });\n}\n\n\n/**\n * Builds the actual data array.\n *\n * @param {Array} fieldStore - An array of field.\n * @param {string} rowDiffset - A string consisting of which rows to be included eg. '0-2,4,6';\n * @param {string} colIdentifier - A string consisting of the details of which column\n * to be included eg 'date,sales,profit';\n * @param {Object} sortingDetails - An object containing the sorting details of the DataModel instance.\n * @param {Object} options - The options required to create the type of the data.\n * @return {Object} Returns an object containing the multidimensional array and the relative schema.\n */\nexport function dataBuilder (fieldStore, rowDiffset, colIdentifier, sortingDetails, options) {\n const defOptions = {\n addUid: false,\n columnWise: false\n };\n options = Object.assign({}, defOptions, options);\n\n const retObj = {\n schema: [],\n data: [],\n uids: []\n };\n const addUid = options.addUid;\n const reqSorting = sortingDetails && sortingDetails.length > 0;\n // It stores the fields according to the colIdentifier argument\n const tmpDataArr = [];\n // Stores the fields according to the colIdentifier argument\n const colIArr = colIdentifier.split(',');\n\n colIArr.forEach((colName) => {\n for (let i = 0; i < fieldStore.length; i += 1) {\n if (fieldStore[i].name() === colName) {\n tmpDataArr.push(fieldStore[i]);\n break;\n }\n }\n });\n\n // Inserts the schema to the schema object\n tmpDataArr.forEach((field) => {\n /** @todo Need to use extend2 here otherwise user can overwrite the schema. */\n retObj.schema.push(field.schema());\n });\n\n if (addUid) {\n retObj.schema.push({\n name: 'uid',\n type: 'identifier'\n });\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n retObj.data.push([]);\n const insertInd = retObj.data.length - 1;\n let start = 0;\n tmpDataArr.forEach((field, ii) => {\n retObj.data[insertInd][ii + start] = field.partialField.data[i];\n });\n if (addUid) {\n retObj.data[insertInd][tmpDataArr.length] = i;\n }\n // Creates an array of unique identifiers for each row\n retObj.uids.push(i);\n\n // If sorting needed then there is the need to expose the index\n // mapping from the old index to its new index\n if (reqSorting) { retObj.data[insertInd].push(i); }\n });\n\n // Handles the sort functionality\n if (reqSorting) {\n sortData(retObj, sortingDetails);\n }\n\n if (options.columnWise) {\n const tmpData = Array(...Array(retObj.schema.length)).map(() => []);\n retObj.data.forEach((tuple) => {\n tuple.forEach((data, i) => {\n tmpData[i].push(data);\n });\n });\n retObj.data = tmpData;\n }\n\n return retObj;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n\n/**\n * Performs the union operation between two dm instances.\n *\n * @todo Fix the conflicts between union and difference terminology here.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function difference (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n * @param {boolean} addData - If true only tuple will be added to the data.\n */\n function prepareDataHelper(dm, fieldsObj, addData) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n if (addData) { data.push(tuple); }\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm2, dm2FieldStoreFieldObj, false);\n prepareDataHelper(dm1, dm1FieldStoreFieldObj, true);\n\n return new DataModel(data, schema, { name });\n}\n\n","import { isArray } from '../utils';\n\n/**\n * Reducer function that returns the sum of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the sum of the array.\n */\nfunction sum (arr) {\n if (isArray(arr)) {\n const totalSum = arr.reduce((acc, curr) =>\n ((curr === null || curr === undefined) ? acc : acc + +curr)\n , null);\n\n return Number.isNaN(totalSum) ? null : totalSum;\n }\n return null;\n}\n\n/**\n * Reducer function that returns the average of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the mean value of the array.\n */\nfunction avg (arr) {\n if (isArray(arr)) {\n const totalSum = sum(arr);\n const len = arr.length || 1;\n return (Number.isNaN(totalSum) || totalSum === null) ? null : totalSum / len;\n }\n return null;\n}\n\n/**\n * Reducer function that gives the min value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the minimum value of the array.\n */\nfunction min (arr) {\n if (isArray(arr)) {\n // Filter out undefined, null and NaN values\n const filteredValues = arr.filter(num =>\n !(num === undefined || num === null || Number.isNaN(+num)));\n\n return (filteredValues.length) ? Math.min(...filteredValues) : null;\n }\n return null;\n}\n\n/**\n * Reducer function that gives the max value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the maximum value of the array.\n */\nfunction max (arr) {\n if (isArray(arr)) {\n // Filter out undefined, null and NaN values\n const filteredValues = arr.filter(num =>\n !(num === undefined || num === null || Number.isNaN(+num)));\n\n return (filteredValues.length) ? Math.max(...filteredValues) : null;\n }\n return null;\n}\n\n/**\n * Reducer function that gives the first value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the first value of the array.\n */\nfunction first (arr) {\n return arr[0];\n}\n\n/**\n * Reducer function that gives the last value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the last value of the array.\n */\nfunction last (arr) {\n return arr[arr.length - 1];\n}\n\n/**\n * Reducer function that gives the count value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the length of the array.\n */\nfunction count (arr) {\n if (isArray(arr)) {\n return arr.length;\n }\n return null;\n}\n\n/**\n * Calculates the variance of the input array.\n *\n * @param {Array.} arr - The input array.\n * @return {number} Returns the variance of the input array.\n */\nfunction variance (arr) {\n let mean = avg(arr);\n return avg(arr.map(num => (num - mean) ** 2));\n}\n\n/**\n * Calculates the square root of the variance of the input array.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the square root of the variance.\n */\nfunction std (arr) {\n return Math.sqrt(variance(arr));\n}\n\n\nconst fnList = {\n sum,\n avg,\n min,\n max,\n first,\n last,\n count,\n std\n};\n\nconst defaultReducerName = 'sum';\n\nexport {\n defaultReducerName,\n sum as defReducer,\n fnList,\n};\n","import { defReducer, fnList } from '../operator';\n\n/**\n * A page level storage which stores, registers, unregisters reducers for all the datamodel instances. There is only one\n * reducer store available in a page. All the datamodel instances receive same instance of reducer store. DataModel\n * out of the box provides handful of {@link reducer | reducers} which can be used as reducer funciton.\n *\n * @public\n * @namespace DataModel\n */\nclass ReducerStore {\n constructor () {\n this.store = new Map();\n this.store.set('defReducer', defReducer);\n\n Object.entries(fnList).forEach((key) => {\n this.store.set(key[0], key[1]);\n });\n }\n\n /**\n * Changes the `defaultReducer` globally. For all the fields which does not have `defAggFn` mentioned in schema, the\n * value of `defaultReducer` is used for aggregation.\n *\n * @public\n * @param {string} [reducer='sum'] - The name of the default reducer. It picks up the definition from store by doing\n * name lookup. If no name is found then it takes `sum` as the default reducer.\n * @return {ReducerStore} Returns instance of the singleton store in page.\n */\n defaultReducer (...params) {\n if (!params.length) {\n return this.store.get('defReducer');\n }\n\n let reducer = params[0];\n\n if (typeof reducer === 'function') {\n this.store.set('defReducer', reducer);\n } else {\n reducer = String(reducer);\n if (Object.keys(fnList).indexOf(reducer) !== -1) {\n this.store.set('defReducer', fnList[reducer]);\n } else {\n throw new Error(`Reducer ${reducer} not found in registry`);\n }\n }\n return this;\n }\n\n /**\n *\n * Registers a {@link reducer | reducer}.\n * A {@link reducer | reducer} has to be registered before it is used.\n *\n * @example\n * // find the mean squared value of a given set\n * const reducerStore = DataModel.Reducers();\n *\n * reducers.register('meanSquared', (arr) => {\n * const squaredVal = arr.map(item => item * item);\n * let sum = 0;\n * for (let i = 0, l = squaredVal.length; i < l; i++) {\n * sum += squaredVal[i++];\n * }\n *\n * return sum;\n * })\n *\n * // datamodel (dm) is already prepared with cars.json\n * const dm1 = dm.groupBy(['origin'], {\n * accleration: 'meanSquared'\n * });\n *\n * @public\n *\n * @param {string} name formal name for a reducer. If the given name already exists in store it is overridden by new\n * definition.\n * @param {Function} reducer definition of {@link reducer} function.\n *\n * @return {Function} function for unregistering the reducer.\n */\n register (name, reducer) {\n if (typeof reducer !== 'function') {\n throw new Error('Reducer should be a function');\n }\n\n name = String(name);\n this.store.set(name, reducer);\n\n return () => { this.__unregister(name); };\n }\n\n __unregister (name) {\n if (this.store.has(name)) {\n this.store.delete(name);\n }\n }\n\n resolve (name) {\n if (name instanceof Function) {\n return name;\n }\n return this.store.get(name);\n }\n}\n\nconst reducerStore = (function () {\n let store = null;\n\n function getStore () {\n if (store === null) {\n store = new ReducerStore();\n }\n return store;\n }\n return getStore();\n}());\n\nexport default reducerStore;\n","import { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport DataModel from '../export';\nimport reducerStore from '../utils/reducer-store';\nimport { defaultReducerName } from './group-by-function';\nimport { FieldType } from '../enums';\n\n/**\n * This function sanitize the user given field and return a common Array structure field\n * list\n * @param {DataModel} dataModel the dataModel operating on\n * @param {Array} fieldArr user input of field Array\n * @return {Array} arrays of field name\n */\nfunction getFieldArr (dataModel, fieldArr) {\n const retArr = [];\n const fieldStore = dataModel.getFieldspace();\n const dimensions = fieldStore.getDimension();\n\n Object.entries(dimensions).forEach(([key]) => {\n if (fieldArr && fieldArr.length) {\n if (fieldArr.indexOf(key) !== -1) {\n retArr.push(key);\n }\n } else {\n retArr.push(key);\n }\n });\n\n return retArr;\n}\n\n/**\n * This sanitize the reducer provide by the user and create a common type of object.\n * user can give function Also\n * @param {DataModel} dataModel dataModel to worked on\n * @param {Object|function} [reducers={}] reducer provided by the users\n * @return {Object} object containing reducer function for every measure\n */\nfunction getReducerObj (dataModel, reducers = {}) {\n const retObj = {};\n const fieldStore = dataModel.getFieldspace();\n const measures = fieldStore.getMeasure();\n const defReducer = reducerStore.defaultReducer();\n\n Object.keys(measures).forEach((measureName) => {\n if (typeof reducers[measureName] !== 'string') {\n reducers[measureName] = measures[measureName].defAggFn();\n }\n const reducerFn = reducerStore.resolve(reducers[measureName]);\n if (reducerFn) {\n retObj[measureName] = reducerFn;\n } else {\n retObj[measureName] = defReducer;\n reducers[measureName] = defaultReducerName;\n }\n });\n return retObj;\n}\n\n/**\n * main function which perform the group-by operations which reduce the measures value is the\n * fields are common according to the reducer function provided\n * @param {DataModel} dataModel the dataModel to worked\n * @param {Array} fieldArr fields according to which the groupby should be worked\n * @param {Object|Function} reducers reducers function\n * @param {DataModel} existingDataModel Existing datamodel instance\n * @return {DataModel} new dataModel with the group by\n */\nfunction groupBy (dataModel, fieldArr, reducers, existingDataModel) {\n const sFieldArr = getFieldArr(dataModel, fieldArr);\n const reducerObj = getReducerObj(dataModel, reducers);\n const fieldStore = dataModel.getFieldspace();\n const fieldStoreObj = fieldStore.fieldsObj();\n const dbName = fieldStore.name;\n const dimensionArr = [];\n const measureArr = [];\n const schema = [];\n const hashMap = {};\n const data = [];\n let newDataModel;\n\n // Prepare the schema\n Object.entries(fieldStoreObj).forEach(([key, value]) => {\n if (sFieldArr.indexOf(key) !== -1 || reducerObj[key]) {\n schema.push(extend2({}, value.schema()));\n\n switch (value.schema().type) {\n case FieldType.MEASURE:\n measureArr.push(key);\n break;\n default:\n case FieldType.DIMENSION:\n dimensionArr.push(key);\n }\n }\n });\n // Prepare the data\n let rowCount = 0;\n rowDiffsetIterator(dataModel._rowDiffset, (i) => {\n let hash = '';\n dimensionArr.forEach((_) => {\n hash = `${hash}-${fieldStoreObj[_].partialField.data[i]}`;\n });\n if (hashMap[hash] === undefined) {\n hashMap[hash] = rowCount;\n data.push({});\n dimensionArr.forEach((_) => {\n data[rowCount][_] = fieldStoreObj[_].partialField.data[i];\n });\n measureArr.forEach((_) => {\n data[rowCount][_] = [fieldStoreObj[_].partialField.data[i]];\n });\n rowCount += 1;\n } else {\n measureArr.forEach((_) => {\n data[hashMap[hash]][_].push(fieldStoreObj[_].partialField.data[i]);\n });\n }\n });\n\n // reduction\n let cachedStore = {};\n let cloneProvider = () => dataModel.detachedRoot();\n data.forEach((row) => {\n const tuple = row;\n measureArr.forEach((_) => {\n tuple[_] = reducerObj[_](row[_], cloneProvider, cachedStore);\n });\n });\n if (existingDataModel) {\n existingDataModel.__calculateFieldspace();\n newDataModel = existingDataModel;\n }\n else {\n newDataModel = new DataModel(data, schema, { name: dbName });\n }\n return newDataModel;\n}\n\nexport { groupBy, getFieldArr, getReducerObj };\n","import { getCommonSchema } from './get-common-schema';\n\n/**\n * The filter function used in natural join.\n * It generates a function that will have the logic to join two\n * DataModel instances by the process of natural join.\n *\n * @param {DataModel} dm1 - The left DataModel instance.\n * @param {DataModel} dm2 - The right DataModel instance.\n * @return {Function} Returns a function that is used in cross-product operation.\n */\nexport function naturalJoinFilter (dm1, dm2) {\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n // const dm1FieldStoreName = dm1FieldStore.name;\n // const dm2FieldStoreName = dm2FieldStore.name;\n const commonSchemaArr = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n return (dm1Fields, dm2Fields) => {\n let retainTuple = true;\n commonSchemaArr.forEach((fieldName) => {\n if (dm1Fields[fieldName].value ===\n dm2Fields[fieldName].value && retainTuple) {\n retainTuple = true;\n } else {\n retainTuple = false;\n }\n });\n return retainTuple;\n };\n}\n","import DataModel from '../export';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n/**\n * Performs the union operation between two dm instances.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function union (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n */\n function prepareDataHelper (dm, fieldsObj) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n data.push(tuple);\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm1, dm1FieldStoreFieldObj);\n prepareDataHelper(dm2, dm2FieldStoreFieldObj);\n\n return new DataModel(data, schema, { name });\n}\n","import { crossProduct } from './cross-product';\nimport { JOINS } from '../constants';\nimport { union } from './union';\n\n\nexport function leftOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel1, dataModel2, filterFn, false, JOINS.LEFTOUTER);\n}\n\nexport function rightOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel2, dataModel1, filterFn, false, JOINS.RIGHTOUTER);\n}\n\nexport function fullOuterJoin (dataModel1, dataModel2, filterFn) {\n return union(leftOuterJoin(dataModel1, dataModel2, filterFn), rightOuterJoin(dataModel1, dataModel2, filterFn));\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\n\n/**\n * In {@link DataModel}, every tabular data consists of column, a column is stored as field.\n * Field contains all the data for a given column in an array.\n *\n * Each record consists of several fields; the fields of all records form the columns.\n * Examples of fields: name, gender, sex etc.\n *\n * In DataModel, each field can have multiple attributes which describes its data and behaviour.\n * A field can have two types of data: Measure and Dimension.\n *\n * A Dimension Field is the context on which a data is categorized and the measure is the numerical values that\n * quantify the data set.\n * In short a dimension is the lens through which you are looking at your measure data.\n *\n * Refer to {@link Schema} to get info about possible field attributes.\n *\n * @public\n * @class\n */\nexport default class Field {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n this.partialField = partialField;\n this.rowDiffset = rowDiffset;\n }\n\n /**\n * Generates the field type specific domain.\n *\n * @public\n * @abstract\n */\n domain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the the field schema.\n *\n * @public\n * @return {string} Returns the field schema.\n */\n schema () {\n return this.partialField.schema;\n }\n\n /**\n * Returns the name of the field.\n *\n * @public\n * @return {string} Returns the name of the field.\n */\n name () {\n return this.partialField.name;\n }\n\n /**\n * Returns the type of the field.\n *\n * @public\n * @return {string} Returns the type of the field.\n */\n type () {\n return this.partialField.schema.type;\n }\n\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return this.partialField.schema.subtype;\n }\n\n /**\n * Returns the description of the field.\n *\n * @public\n * @return {string} Returns the description of the field.\n */\n description () {\n return this.partialField.schema.description;\n }\n\n /**\n * Returns the display name of the field.\n *\n * @public\n * @return {string} Returns the display name of the field.\n */\n displayName () {\n return this.partialField.schema.displayName || this.partialField.schema.name;\n }\n\n /**\n * Returns the data associated with the field.\n *\n * @public\n * @return {Array} Returns the data.\n */\n data () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n data.push(this.partialField.data[i]);\n });\n return data;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @abstract\n */\n formattedData () {\n throw new Error('Not yet implemented');\n }\n}\n","import Field from '../field';\n\n/**\n * Represents dimension field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Dimension extends Field {\n /**\n * Returns the domain for the dimension field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { DimensionSubtype } from '../../enums';\nimport Dimension from '../dimension';\n\n/**\n * Represents categorical field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Categorical extends Dimension {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return DimensionSubtype.CATEGORICAL;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum === null) {\n return;\n }\n\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n return domain;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport Dimension from '../dimension';\nimport { DateTimeFormatter } from '../../utils';\n\n/**\n * Represents temporal field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Temporal extends Dimension {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n super(partialField, rowDiffset);\n\n this._cachedMinDiff = null;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be\n // occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum === null) {\n return;\n }\n\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n\n return domain;\n }\n\n\n /**\n * Calculates the minimum consecutive difference from the associated field data.\n *\n * @public\n * @return {number} Returns the minimum consecutive diff in milliseconds.\n */\n minimumConsecutiveDifference () {\n if (this._cachedMinDiff) {\n return this._cachedMinDiff;\n }\n\n const sortedData = this.data().sort((a, b) => a - b);\n const arrLn = sortedData.length;\n let minDiff = Number.POSITIVE_INFINITY;\n let prevDatum;\n let nextDatum;\n let processedCount = 0;\n\n for (let i = 1; i < arrLn; i++) {\n prevDatum = sortedData[i - 1];\n nextDatum = sortedData[i];\n\n if (nextDatum === prevDatum) {\n continue;\n }\n\n minDiff = Math.min(minDiff, nextDatum - sortedData[i - 1]);\n processedCount++;\n }\n\n if (!processedCount) {\n minDiff = null;\n }\n this._cachedMinDiff = minDiff;\n\n return this._cachedMinDiff;\n }\n\n /**\n * Returns the format specified in the input schema while creating field.\n *\n * @public\n * @return {string} Returns the datetime format.\n */\n format () {\n return this.partialField.schema.format;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum === null) {\n data.push(null);\n } else {\n data.push(DateTimeFormatter.formatAs(datum, this.format()));\n }\n });\n return data;\n }\n}\n\n","import Dimension from '../dimension';\n\n/**\n * Represents binned field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Binned extends Dimension {\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the last and first values of bins config array.\n */\n calculateDataDomain () {\n const binsArr = this.partialField.schema.bins;\n return [binsArr[0], binsArr[binsArr.length - 1]];\n }\n\n /**\n * Returns the bins config provided while creating the field instance.\n *\n * @public\n * @return {Array} Returns the bins array config.\n */\n bins () {\n return this.partialField.schema.bins;\n }\n}\n","import { formatNumber } from '../../utils';\nimport { defaultReducerName } from '../../operator/group-by-function';\nimport Field from '../field';\n\n/**\n * Represents measure field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Measure extends Field {\n /**\n * Returns the domain for the measure field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Returns the unit of the measure field.\n *\n * @public\n * @return {string} Returns unit of the field.\n */\n unit () {\n return this.partialField.schema.unit;\n }\n\n /**\n * Returns the aggregation function name of the measure field.\n *\n * @public\n * @return {string} Returns aggregation function name of the field.\n */\n defAggFn () {\n return this.partialField.schema.defAggFn || defaultReducerName;\n }\n\n /**\n * Returns the number format of the measure field.\n *\n * @public\n * @return {Function} Returns number format of the field.\n */\n numberFormat () {\n const { numberFormat } = this.partialField.schema;\n return numberFormat instanceof Function ? numberFormat : formatNumber;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { MeasureSubtype } from '../../enums';\nimport Measure from '../measure';\n\n/**\n * Represents continuous field subtype.\n *\n * @public\n * @class\n * @extends Measure\n */\nexport default class Continuous extends Measure {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return MeasureSubtype.CONTINUOUS;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the min and max values.\n */\n calculateDataDomain () {\n let min = Number.POSITIVE_INFINITY;\n let max = Number.NEGATIVE_INFINITY;\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum === null) {\n return;\n }\n\n if (datum < min) {\n min = datum;\n }\n if (datum > max) {\n max = datum;\n }\n });\n\n return [min, max];\n }\n}\n","/**\n * A interface to represent a parser which is responsible to parse the field.\n *\n * @public\n * @interface\n */\nexport default class FieldParser {\n /**\n * Parses a single value of a field and return the sanitized form.\n *\n * @public\n * @abstract\n */\n parse () {\n throw new Error('Not yet implemented');\n }\n}\n","import FieldParser from '../field-parser';\n\n/**\n * A FieldParser which parses the categorical values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class CategoricalParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the stringified form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the stringified value.\n */\n parse (val) {\n return (val === undefined || val === null) ? null : String(val).trim();\n }\n}\n","import { DateTimeFormatter } from '../../../utils';\nimport FieldParser from '../field-parser';\n\n/**\n * A FieldParser which parses the temporal values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class TemporalParser extends FieldParser {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {Object} schema - The schema object for the corresponding field.\n */\n constructor (schema) {\n super();\n this.schema = schema;\n this._dtf = null;\n }\n\n /**\n * Parses a single value of a field and returns the millisecond value.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {number} Returns the millisecond value.\n */\n parse (val) {\n if (val === null || val === undefined) {\n return null;\n }\n\n if (this.schema.format) {\n this._dtf = this._dtf || new DateTimeFormatter(this.schema.format);\n return this._dtf.getNativeDate(val).getTime();\n }\n\n // If format is not present which means the value is such that\n // it could be directly passed to Date constructor.\n return +new Date(val);\n }\n}\n","import FieldParser from '../field-parser';\n\n/**\n * A FieldParser which parses the binned values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class BinnedParser extends FieldParser {\n /**\n * Parses a single binned value of a field and returns the sanitized value.\n *\n * @public\n * @param {string} val - The value of the field.\n * @return {string} Returns the sanitized value.\n */\n parse (val) {\n if (val === null || val === undefined) {\n return null;\n }\n\n const regex = /^\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*-\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*$/;\n val = String(val);\n\n const matched = val.match(regex);\n if (!matched) {\n return null;\n }\n\n return `${Number.parseFloat(matched[1])}-${Number.parseFloat(matched[2])}`;\n }\n}\n","import FieldParser from '../field-parser';\n\n/**\n * A FieldParser which parses the continuous values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class ContinuousParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the number form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the number value.\n */\n parse (val) {\n val = parseFloat(val, 10);\n return Number.isNaN(val) ? null : val;\n }\n}\n","/**\n * Stores the full data and the metadata of a field. It provides\n * a single source of data from which the future Field\n * instance can get a subset of it with a rowDiffset config.\n *\n * @class\n * @public\n */\nexport default class PartialField {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} name - The name of the field.\n * @param {Array} data - The data array.\n * @param {Object} schema - The schema object of the corresponding field.\n * @param {FieldParser} parser - The parser instance corresponding to that field.\n */\n constructor (name, data, schema, parser) {\n this.name = name;\n this.schema = schema;\n this.parser = parser;\n this.data = this._sanitize(data);\n }\n\n /**\n * Sanitizes the field data.\n *\n * @private\n * @param {Array} data - The actual input data.\n * @return {Array} Returns the sanitized data.\n */\n _sanitize (data) {\n return data.map(datum => this.parser.parse(datum));\n }\n}\n","import { FieldType, DimensionSubtype, MeasureSubtype } from './enums';\nimport {\n Categorical,\n Temporal,\n Binned,\n Continuous,\n CategoricalParser,\n TemporalParser,\n BinnedParser,\n ContinuousParser,\n PartialField\n} from './fields';\n\n/**\n * Creates a field instance according to the provided data and schema.\n *\n * @param {Array} data - The field data array.\n * @param {Object} schema - The field schema object.\n * @return {Field} Returns the newly created field instance.\n */\nfunction createUnitField(data, schema) {\n data = data || [];\n let partialField;\n\n switch (schema.type) {\n case FieldType.MEASURE:\n switch (schema.subtype) {\n case MeasureSubtype.CONTINUOUS:\n partialField = new PartialField(schema.name, data, schema, new ContinuousParser());\n return new Continuous(partialField, `0-${data.length - 1}`);\n default:\n partialField = new PartialField(schema.name, data, schema, new ContinuousParser());\n return new Continuous(partialField, `0-${data.length - 1}`);\n }\n case FieldType.DIMENSION:\n switch (schema.subtype) {\n case DimensionSubtype.CATEGORICAL:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n case DimensionSubtype.TEMPORAL:\n partialField = new PartialField(schema.name, data, schema, new TemporalParser(schema));\n return new Temporal(partialField, `0-${data.length - 1}`);\n case DimensionSubtype.BINNED:\n partialField = new PartialField(schema.name, data, schema, new BinnedParser());\n return new Binned(partialField, `0-${data.length - 1}`);\n default:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n }\n default:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n }\n}\n\n\n/**\n * Creates a field instance from partialField and rowDiffset.\n *\n * @param {PartialField} partialField - The corresponding partial field.\n * @param {string} rowDiffset - The data subset config.\n * @return {Field} Returns the newly created field instance.\n */\nexport function createUnitFieldFromPartial(partialField, rowDiffset) {\n const { schema } = partialField;\n\n switch (schema.type) {\n case FieldType.MEASURE:\n switch (schema.subtype) {\n case MeasureSubtype.CONTINUOUS:\n return new Continuous(partialField, rowDiffset);\n default:\n return new Continuous(partialField, rowDiffset);\n }\n case FieldType.DIMENSION:\n switch (schema.subtype) {\n case DimensionSubtype.CATEGORICAL:\n return new Categorical(partialField, rowDiffset);\n case DimensionSubtype.TEMPORAL:\n return new Temporal(partialField, rowDiffset);\n case DimensionSubtype.BINNED:\n return new Binned(partialField, rowDiffset);\n default:\n return new Categorical(partialField, rowDiffset);\n }\n default:\n return new Categorical(partialField, rowDiffset);\n }\n}\n\n/**\n * Creates the field instances with input data and schema.\n *\n * @param {Array} dataColumn - The data array for fields.\n * @param {Array} schema - The schema array for fields.\n * @param {Array} headers - The array of header names.\n * @return {Array.} Returns an array of newly created field instances.\n */\nexport function createFields(dataColumn, schema, headers) {\n const headersObj = {};\n\n if (!(headers && headers.length)) {\n headers = schema.map(item => item.name);\n }\n\n headers.forEach((header, i) => {\n headersObj[header] = i;\n });\n\n return schema.map(item => createUnitField(dataColumn[headersObj[item.name]], item));\n}\n","import { DataFormat } from './enums';\n\nexport default {\n dataFormat: DataFormat.AUTO\n};\n","import { columnMajor } from '../utils';\n\n/**\n * Parses and converts data formatted in DSV array to a manageable internal format.\n *\n * @param {Array.} arr - A 2D array containing of the DSV data.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv data is header or not.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * [\"a\", \"b\", \"c\"],\n * [1, 2, 3],\n * [4, 5, 6],\n * [7, 8, 9]\n * ];\n */\nfunction DSVArr (arr, options) {\n const defaultOption = {\n firstRowHeader: true,\n };\n options = Object.assign({}, defaultOption, options);\n\n let header;\n const columns = [];\n const push = columnMajor(columns);\n\n if (options.firstRowHeader) {\n // If header present then mutate the array.\n // Do in-place mutation to save space.\n header = arr.splice(0, 1)[0];\n } else {\n header = [];\n }\n\n arr.forEach(field => push(...field));\n\n return [header, columns];\n}\n\nexport default DSVArr;\n","var EOL = {},\n EOF = {},\n QUOTE = 34,\n NEWLINE = 10,\n RETURN = 13;\n\nfunction objectConverter(columns) {\n return new Function(\"d\", \"return {\" + columns.map(function(name, i) {\n return JSON.stringify(name) + \": d[\" + i + \"]\";\n }).join(\",\") + \"}\");\n}\n\nfunction customConverter(columns, f) {\n var object = objectConverter(columns);\n return function(row, i) {\n return f(object(row), i, columns);\n };\n}\n\n// Compute unique columns in order of discovery.\nfunction inferColumns(rows) {\n var columnSet = Object.create(null),\n columns = [];\n\n rows.forEach(function(row) {\n for (var column in row) {\n if (!(column in columnSet)) {\n columns.push(columnSet[column] = column);\n }\n }\n });\n\n return columns;\n}\n\nexport default function(delimiter) {\n var reFormat = new RegExp(\"[\\\"\" + delimiter + \"\\n\\r]\"),\n DELIMITER = delimiter.charCodeAt(0);\n\n function parse(text, f) {\n var convert, columns, rows = parseRows(text, function(row, i) {\n if (convert) return convert(row, i - 1);\n columns = row, convert = f ? customConverter(row, f) : objectConverter(row);\n });\n rows.columns = columns || [];\n return rows;\n }\n\n function parseRows(text, f) {\n var rows = [], // output rows\n N = text.length,\n I = 0, // current character index\n n = 0, // current line number\n t, // current token\n eof = N <= 0, // current token followed by EOF?\n eol = false; // current token followed by EOL?\n\n // Strip the trailing newline.\n if (text.charCodeAt(N - 1) === NEWLINE) --N;\n if (text.charCodeAt(N - 1) === RETURN) --N;\n\n function token() {\n if (eof) return EOF;\n if (eol) return eol = false, EOL;\n\n // Unescape quotes.\n var i, j = I, c;\n if (text.charCodeAt(j) === QUOTE) {\n while (I++ < N && text.charCodeAt(I) !== QUOTE || text.charCodeAt(++I) === QUOTE);\n if ((i = I) >= N) eof = true;\n else if ((c = text.charCodeAt(I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n return text.slice(j + 1, i - 1).replace(/\"\"/g, \"\\\"\");\n }\n\n // Find next delimiter or newline.\n while (I < N) {\n if ((c = text.charCodeAt(i = I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n else if (c !== DELIMITER) continue;\n return text.slice(j, i);\n }\n\n // Return last token before EOF.\n return eof = true, text.slice(j, N);\n }\n\n while ((t = token()) !== EOF) {\n var row = [];\n while (t !== EOL && t !== EOF) row.push(t), t = token();\n if (f && (row = f(row, n++)) == null) continue;\n rows.push(row);\n }\n\n return rows;\n }\n\n function format(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return [columns.map(formatValue).join(delimiter)].concat(rows.map(function(row) {\n return columns.map(function(column) {\n return formatValue(row[column]);\n }).join(delimiter);\n })).join(\"\\n\");\n }\n\n function formatRows(rows) {\n return rows.map(formatRow).join(\"\\n\");\n }\n\n function formatRow(row) {\n return row.map(formatValue).join(delimiter);\n }\n\n function formatValue(text) {\n return text == null ? \"\"\n : reFormat.test(text += \"\") ? \"\\\"\" + text.replace(/\"/g, \"\\\"\\\"\") + \"\\\"\"\n : text;\n }\n\n return {\n parse: parse,\n parseRows: parseRows,\n format: format,\n formatRows: formatRows\n };\n}\n","import dsv from \"./dsv\";\n\nvar csv = dsv(\",\");\n\nexport var csvParse = csv.parse;\nexport var csvParseRows = csv.parseRows;\nexport var csvFormat = csv.format;\nexport var csvFormatRows = csv.formatRows;\n","import dsv from \"./dsv\";\n\nvar tsv = dsv(\"\\t\");\n\nexport var tsvParse = tsv.parse;\nexport var tsvParseRows = tsv.parseRows;\nexport var tsvFormat = tsv.format;\nexport var tsvFormatRows = tsv.formatRows;\n","import { dsvFormat as d3Dsv } from 'd3-dsv';\nimport DSVArr from './dsv-arr';\n\n/**\n * Parses and converts data formatted in DSV string to a manageable internal format.\n *\n * @todo Support to be given for https://tools.ietf.org/html/rfc4180.\n * @todo Sample implementation https://github.com/knrz/CSV.js/.\n *\n * @param {string} str - The input DSV string.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv string data is header or not.\n * @param {string} [options.fieldSeparator=\",\"] - The separator of two consecutive field.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = `\n * a,b,c\n * 1,2,3\n * 4,5,6\n * 7,8,9\n * `\n */\nfunction DSVStr (str, options) {\n const defaultOption = {\n firstRowHeader: true,\n fieldSeparator: ','\n };\n options = Object.assign({}, defaultOption, options);\n\n const dsv = d3Dsv(options.fieldSeparator);\n return DSVArr(dsv.parseRows(str), options);\n}\n\nexport default DSVStr;\n","import { columnMajor } from '../utils';\n\n/**\n * Parses and converts data formatted in JSON to a manageable internal format.\n *\n * @param {Array.} arr - The input data formatted in JSON.\n * @return {Array.} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * {\n * \"a\": 1,\n * \"b\": 2,\n * \"c\": 3\n * },\n * {\n * \"a\": 4,\n * \"b\": 5,\n * \"c\": 6\n * },\n * {\n * \"a\": 7,\n * \"b\": 8,\n * \"c\": 9\n * }\n * ];\n */\nfunction FlatJSON (arr) {\n const header = {};\n let i = 0;\n let insertionIndex;\n const columns = [];\n const push = columnMajor(columns);\n\n arr.forEach((item) => {\n const fields = [];\n for (let key in item) {\n if (key in header) {\n insertionIndex = header[key];\n } else {\n header[key] = i++;\n insertionIndex = i - 1;\n }\n fields[insertionIndex] = item[key];\n }\n push(...fields);\n });\n\n return [Object.keys(header), columns];\n}\n\nexport default FlatJSON;\n","import FlatJSON from './flat-json';\nimport DSVArr from './dsv-arr';\nimport DSVStr from './dsv-str';\nimport { detectDataFormat } from '../utils';\n\n/**\n * Parses the input data and detect the format automatically.\n *\n * @param {string|Array} data - The input data.\n * @param {Object} options - An optional config specific to data format.\n * @return {Array.} Returns an array of headers and column major data.\n */\nfunction Auto (data, options) {\n const converters = { FlatJSON, DSVStr, DSVArr };\n const dataFormat = detectDataFormat(data);\n\n if (!dataFormat) {\n throw new Error('Couldn\\'t detect the data format');\n }\n\n return converters[dataFormat](data, options);\n}\n\nexport default Auto;\n","import { FieldType, FilteringMode, DimensionSubtype, MeasureSubtype, DataFormat } from './enums';\nimport fieldStore from './field-store';\nimport Value from './value';\nimport {\n rowDiffsetIterator\n} from './operator';\nimport { DM_DERIVATIVES, LOGICAL_OPERATORS } from './constants';\nimport { createFields, createUnitFieldFromPartial } from './field-creator';\nimport defaultConfig from './default-config';\nimport * as converter from './converter';\nimport { extend2, detectDataFormat } from './utils';\n\n/**\n * Prepares the selection data.\n */\nfunction prepareSelectionData (fields, i) {\n const resp = {};\n for (let field of fields) {\n resp[field.name()] = new Value(field.partialField.data[i], field);\n }\n return resp;\n}\n\nexport function prepareJoinData (fields) {\n const resp = {};\n Object.keys(fields).forEach((key) => { resp[key] = new Value(fields[key], key); });\n return resp;\n}\n\nexport const updateFields = ([rowDiffset, colIdentifier], partialFieldspace, fieldStoreName) => {\n let collID = colIdentifier.length ? colIdentifier.split(',') : [];\n let partialFieldMap = partialFieldspace.fieldsObj();\n let newFields = collID.map(coll => createUnitFieldFromPartial(partialFieldMap[coll].partialField, rowDiffset));\n return fieldStore.createNamespace(newFields, fieldStoreName);\n};\n\nexport const persistDerivation = (model, operation, config = {}, criteriaFn) => {\n let derivative;\n if (operation !== DM_DERIVATIVES.COMPOSE) {\n derivative = {\n op: operation,\n meta: config,\n criteria: criteriaFn\n };\n model._derivation.push(derivative);\n }\n else {\n derivative = [...criteriaFn];\n model._derivation.length = 0;\n model._derivation.push(...derivative);\n }\n};\n\nexport const selectHelper = (rowDiffset, fields, selectFn, config, sourceDm) => {\n const newRowDiffSet = [];\n let lastInsertedValue = -1;\n let { mode } = config;\n let li;\n let cachedStore = {};\n let cloneProvider = () => sourceDm.detachedRoot();\n const selectorHelperFn = index => selectFn(\n prepareSelectionData(fields, index),\n index,\n cloneProvider,\n cachedStore\n );\n\n let checker;\n if (mode === FilteringMode.INVERSE) {\n checker = index => !selectorHelperFn(index);\n } else {\n checker = index => selectorHelperFn(index);\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n if (checker(i)) {\n if (lastInsertedValue !== -1 && i === (lastInsertedValue + 1)) {\n li = newRowDiffSet.length - 1;\n newRowDiffSet[li] = `${newRowDiffSet[li].split('-')[0]}-${i}`;\n } else {\n newRowDiffSet.push(`${i}`);\n }\n lastInsertedValue = i;\n }\n });\n return newRowDiffSet.join(',');\n};\n\nexport const filterPropagationModel = (model, propModels, config = {}) => {\n const operation = config.operation || LOGICAL_OPERATORS.AND;\n const filterByMeasure = config.filterByMeasure || false;\n let fns = [];\n if (!propModels.length) {\n fns = [() => false];\n } else {\n fns = propModels.map(propModel => ((dataModel) => {\n const dataObj = dataModel.getData();\n const schema = dataObj.schema;\n const fieldsConfig = dataModel.getFieldsConfig();\n const fieldsSpace = dataModel.getFieldspace().fieldsObj();\n const data = dataObj.data;\n const domain = Object.values(fieldsConfig).reduce((acc, v) => {\n acc[v.def.name] = fieldsSpace[v.def.name].domain();\n return acc;\n }, {});\n\n return (fields) => {\n const include = !data.length ? false : data.some(row => schema.every((propField) => {\n if (!(propField.name in fields)) {\n return true;\n }\n const value = fields[propField.name].valueOf();\n if (filterByMeasure && propField.type === FieldType.MEASURE) {\n return value >= domain[propField.name][0] && value <= domain[propField.name][1];\n }\n\n if (propField.type !== FieldType.DIMENSION) {\n return true;\n }\n const idx = fieldsConfig[propField.name].index;\n return row[idx] === fields[propField.name].valueOf();\n }));\n return include;\n };\n })(propModel));\n }\n\n let filteredModel;\n if (operation === LOGICAL_OPERATORS.AND) {\n const clonedModel = model.clone(false, false);\n filteredModel = clonedModel.select(fields => fns.every(fn => fn(fields)), {\n saveChild: false,\n mode: FilteringMode.ALL\n });\n } else {\n filteredModel = model.clone(false, false).select(fields => fns.some(fn => fn(fields)), {\n mode: FilteringMode.ALL,\n saveChild: false\n });\n }\n\n return filteredModel;\n};\n\nexport const cloneWithSelect = (sourceDm, selectFn, selectConfig, cloneConfig) => {\n const cloned = sourceDm.clone(cloneConfig.saveChild);\n const rowDiffset = selectHelper(\n cloned._rowDiffset,\n cloned.getPartialFieldspace().fields,\n selectFn,\n selectConfig,\n sourceDm\n );\n cloned._rowDiffset = rowDiffset;\n cloned.__calculateFieldspace().calculateFieldsConfig();\n // Store reference to child model and selector function\n if (cloneConfig.saveChild) {\n persistDerivation(cloned, DM_DERIVATIVES.SELECT, { config: selectConfig }, selectFn);\n }\n\n return cloned;\n};\n\nexport const cloneWithProject = (sourceDm, projField, config, allFields) => {\n const cloned = sourceDm.clone(config.saveChild);\n let projectionSet = projField;\n if (config.mode === FilteringMode.INVERSE) {\n projectionSet = allFields.filter(fieldName => projField.indexOf(fieldName) === -1);\n }\n // cloned._colIdentifier = sourceDm._colIdentifier.split(',')\n // .filter(coll => projectionSet.indexOf(coll) !== -1).join();\n cloned._colIdentifier = projectionSet.join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n // Store reference to child model and projection fields\n if (config.saveChild) {\n persistDerivation(\n cloned,\n DM_DERIVATIVES.PROJECT,\n { projField, config, actualProjField: projectionSet },\n null\n );\n }\n\n return cloned;\n};\n\nexport const sanitizeUnitSchema = (unitSchema) => {\n // Do deep clone of the unit schema as the user might change it later.\n unitSchema = extend2({}, unitSchema);\n if (!unitSchema.type) {\n unitSchema.type = FieldType.DIMENSION;\n }\n\n if (!unitSchema.subtype) {\n switch (unitSchema.type) {\n case FieldType.MEASURE:\n unitSchema.subtype = MeasureSubtype.CONTINUOUS;\n break;\n default:\n case FieldType.DIMENSION:\n unitSchema.subtype = DimensionSubtype.CATEGORICAL;\n break;\n }\n }\n\n return unitSchema;\n};\n\nexport const sanitizeSchema = schema => schema.map(unitSchema => sanitizeUnitSchema(unitSchema));\n\nexport const updateData = (relation, data, schema, options) => {\n schema = sanitizeSchema(schema);\n options = Object.assign(Object.assign({}, defaultConfig), options);\n const converterFn = converter[options.dataFormat];\n\n if (!(converterFn && typeof converterFn === 'function')) {\n throw new Error(`No converter function found for ${options.dataFormat} format`);\n }\n\n const [header, formattedData] = converterFn(data, options);\n const fieldArr = createFields(formattedData, schema, header);\n\n // This will create a new fieldStore with the fields\n const nameSpace = fieldStore.createNamespace(fieldArr, options.name);\n relation._partialFieldspace = nameSpace;\n // If data is provided create the default colIdentifier and rowDiffset\n relation._rowDiffset = formattedData.length && formattedData[0].length ? `0-${formattedData[0].length - 1}` : '';\n relation._colIdentifier = (schema.map(_ => _.name)).join();\n relation._dataFormat = options.dataFormat === DataFormat.AUTO ? detectDataFormat(data) : options.dataFormat;\n return relation;\n};\n\nexport const fieldInSchema = (schema, field) => {\n let i = 0;\n\n for (; i < schema.length; ++i) {\n if (field === schema[i].name) {\n return {\n type: schema[i].subtype || schema[i].type,\n index: i\n };\n }\n }\n return null;\n};\n\n\nexport const getOperationArguments = (child) => {\n const derivation = child._derivation;\n let params = [];\n let operation;\n if (derivation && derivation.length === 1) {\n operation = derivation[0].op;\n switch (operation) {\n case DM_DERIVATIVES.SELECT:\n params = [derivation[0].criteria];\n break;\n case DM_DERIVATIVES.PROJECT:\n params = [derivation[0].meta.actualProjField];\n break;\n case DM_DERIVATIVES.GROUPBY:\n operation = 'groupBy';\n params = [derivation[0].meta.groupByString.split(','), derivation[0].criteria];\n break;\n default:\n break;\n }\n }\n\n return {\n operation,\n params\n };\n};\n\nconst applyExistingOperationOnModel = (propModel, dataModel) => {\n const { operation, params } = getOperationArguments(dataModel);\n let selectionModel = propModel[0];\n let rejectionModel = propModel[1];\n if (operation && params.length) {\n selectionModel = propModel[0][operation](...params, {\n saveChild: false\n });\n rejectionModel = propModel[1][operation](...params, {\n saveChild: false\n });\n }\n return [selectionModel, rejectionModel];\n};\n\nconst getFilteredModel = (propModel, path) => {\n for (let i = 0, len = path.length; i < len; i++) {\n const model = path[i];\n propModel = applyExistingOperationOnModel(propModel, model);\n }\n return propModel;\n};\n\nconst propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = {}) => {\n const nonTraversingModel = propModelInf.nonTraversingModel;\n const excludeModels = propModelInf.excludeModels || [];\n\n if (dataModel === nonTraversingModel) {\n return;\n }\n\n const propagate = excludeModels.length ? excludeModels.indexOf(dataModel) === -1 : true;\n\n propagate && dataModel.handlePropagation(propModel, config);\n\n const children = dataModel._children;\n children.forEach((child) => {\n let [selectionModel, rejectionModel] = applyExistingOperationOnModel(propModel, child);\n propagateIdentifiers(child, [selectionModel, rejectionModel], config, propModelInf);\n });\n};\n\nexport const getRootGroupByModel = (model) => {\n if (model._parent && model._derivation.find(d => d.op !== 'group')) {\n return getRootGroupByModel(model._parent);\n }\n return model;\n};\n\nexport const getRootDataModel = (model) => {\n if (model._parent) {\n return getRootDataModel(model._parent);\n }\n return model;\n};\n\nexport const getPathToRootModel = (model, path = []) => {\n if (model._parent !== null) {\n path.push(model);\n getPathToRootModel(model._parent, path);\n }\n return path;\n};\n\nexport const propagateToAllDataModels = (identifiers, rootModels, propagationInf, config) => {\n let criteria;\n let propModel;\n const { propagationNameSpace, propagateToSource } = propagationInf;\n const propagationSourceId = propagationInf.sourceId;\n const propagateInterpolatedValues = config.propagateInterpolatedValues;\n const filterFn = (entry) => {\n const filter = config.filterFn || (() => true);\n return filter(entry, config);\n };\n\n let criterias = [];\n\n if (identifiers === null && config.persistent !== true) {\n criterias = [{\n criteria: []\n }];\n } else {\n let actionCriterias = Object.values(propagationNameSpace.mutableActions);\n if (propagateToSource !== false) {\n actionCriterias = actionCriterias.filter(d => d.config.sourceId !== propagationSourceId);\n }\n\n const filteredCriteria = actionCriterias.filter(filterFn).map(action => action.config.criteria);\n\n const excludeModels = [];\n\n if (propagateToSource !== false) {\n const sourceActionCriterias = Object.values(propagationNameSpace.mutableActions);\n\n sourceActionCriterias.forEach((actionInf) => {\n const actionConf = actionInf.config;\n if (actionConf.applyOnSource === false && actionConf.action === config.action &&\n actionConf.sourceId !== propagationSourceId) {\n excludeModels.push(actionInf.model);\n criteria = sourceActionCriterias.filter(d => d !== actionInf).map(d => d.config.criteria);\n criteria.length && criterias.push({\n criteria,\n models: actionInf.model,\n path: getPathToRootModel(actionInf.model)\n });\n }\n });\n }\n\n\n criteria = [].concat(...[...filteredCriteria, identifiers]).filter(d => d !== null);\n criterias.push({\n criteria,\n excludeModels: [...excludeModels, ...config.excludeModels || []]\n });\n }\n\n const rootModel = rootModels.model;\n\n const propConfig = Object.assign({\n sourceIdentifiers: identifiers,\n propagationSourceId\n }, config);\n\n const rootGroupByModel = rootModels.groupByModel;\n if (propagateInterpolatedValues && rootGroupByModel) {\n propModel = filterPropagationModel(rootGroupByModel, criteria, {\n filterByMeasure: propagateInterpolatedValues\n });\n propagateIdentifiers(rootGroupByModel, propModel, propConfig);\n }\n\n criterias.forEach((inf) => {\n const propagationModel = filterPropagationModel(rootModel, inf.criteria);\n const path = inf.path;\n\n if (path) {\n const filteredModel = getFilteredModel(propagationModel, path.reverse());\n inf.models.handlePropagation(filteredModel, propConfig);\n } else {\n propagateIdentifiers(rootModel, propagationModel, propConfig, {\n excludeModels: inf.excludeModels,\n nonTraversingModel: propagateInterpolatedValues && rootGroupByModel\n });\n }\n });\n};\n\nexport const propagateImmutableActions = (propagationNameSpace, rootModels, propagationInf) => {\n const immutableActions = propagationNameSpace.immutableActions;\n\n for (const action in immutableActions) {\n const actionInf = immutableActions[action];\n const actionConf = actionInf.config;\n const propagationSourceId = propagationInf.config.sourceId;\n const filterImmutableAction = propagationInf.propConfig.filterImmutableAction ?\n propagationInf.propConfig.filterImmutableAction(actionConf, propagationInf.config) : true;\n if (actionConf.sourceId !== propagationSourceId && filterImmutableAction) {\n const criteriaModel = actionConf.criteria;\n propagateToAllDataModels(criteriaModel, rootModels, {\n propagationNameSpace,\n propagateToSource: false,\n sourceId: propagationSourceId\n }, actionConf);\n }\n }\n};\n\nexport const addToPropNamespace = (propagationNameSpace, config = {}, model) => {\n let sourceNamespace;\n const isMutableAction = config.isMutableAction;\n const criteria = config.criteria;\n const key = `${config.action}-${config.sourceId}`;\n\n if (isMutableAction) {\n sourceNamespace = propagationNameSpace.mutableActions;\n } else {\n sourceNamespace = propagationNameSpace.immutableActions;\n }\n\n if (criteria === null) {\n delete sourceNamespace[key];\n } else {\n sourceNamespace[key] = {\n model,\n config\n };\n }\n\n return this;\n};\n","import { FilteringMode } from './enums';\nimport { getUniqueId } from './utils';\nimport { persistDerivation, updateFields, cloneWithSelect, cloneWithProject, updateData } from './helper';\nimport { crossProduct, difference, naturalJoinFilter, union } from './operator';\nimport { DM_DERIVATIVES } from './constants';\n\n/**\n * Relation provides the definitions of basic operators of relational algebra like *selection*, *projection*, *union*,\n * *difference* etc.\n *\n * It is extended by {@link DataModel} to inherit the functionalities of relational algebra concept.\n *\n * @class\n * @public\n * @module Relation\n * @namespace DataModel\n */\nclass Relation {\n\n /**\n * Creates a new Relation instance by providing underlying data and schema.\n *\n * @private\n *\n * @param {Object | string | Relation} data - The input tabular data in dsv or json format or\n * an existing Relation instance object.\n * @param {Array} schema - An array of data schema.\n * @param {Object} [options] - The optional options.\n */\n constructor (...params) {\n let source;\n\n this._parent = null;\n this._derivation = [];\n this._children = [];\n\n if (params.length === 1 && ((source = params[0]) instanceof Relation)) {\n // parent datamodel was passed as part of source\n this._colIdentifier = source._colIdentifier;\n this._rowDiffset = source._rowDiffset;\n this._dataFormat = source._dataFormat;\n this._parent = source;\n this._partialFieldspace = this._parent._partialFieldspace;\n this._fieldStoreName = getUniqueId();\n this.__calculateFieldspace().calculateFieldsConfig();\n } else {\n updateData(this, ...params);\n this._fieldStoreName = this._partialFieldspace.name;\n this.__calculateFieldspace().calculateFieldsConfig();\n this._propagationNameSpace = {\n mutableActions: {},\n immutableActions: {}\n };\n }\n }\n\n /**\n * Retrieves the {@link Schema | schema} details for every {@link Field | field} as an array.\n *\n * @public\n *\n * @return {Array.} Array of fields schema.\n * ```\n * [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', numberFormat: (val) => `${val} miles / gallon` },\n * { name: 'Cylinder', type: 'dimension' },\n * { name: 'Displacement', type: 'measure', defAggFn: 'max' },\n * { name: 'HorsePower', type: 'measure', defAggFn: 'max' },\n * { name: 'Weight_in_lbs', type: 'measure', defAggFn: 'avg', },\n * { name: 'Acceleration', type: 'measure', defAggFn: 'avg' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin' }\n * ]\n * ```\n */\n getSchema () {\n return this.getFieldspace().fields.map(d => d.schema());\n }\n\n /**\n * Returns the name of the {@link DataModel} instance. If no name was specified during {@link DataModel}\n * initialization, then it returns a auto-generated name.\n *\n * @public\n *\n * @return {string} Name of the DataModel instance.\n */\n getName() {\n return this._fieldStoreName;\n }\n\n getFieldspace () {\n return this._fieldspace;\n }\n\n __calculateFieldspace () {\n this._fieldspace = updateFields([this._rowDiffset, this._colIdentifier],\n this.getPartialFieldspace(), this._fieldStoreName);\n return this;\n }\n\n getPartialFieldspace () {\n return this._partialFieldspace;\n }\n\n /**\n * Performs {@link link_of_cross_product | cross-product} between two {@link DataModel} instances and returns a\n * new {@link DataModel} instance containing the results. This operation is also called theta join.\n *\n * Cross product takes two set and create one set where each value of one set is paired with each value of another\n * set.\n *\n * This method takes an optional predicate which filters the generated result rows. If the predicate returns true\n * the combined row is included in the resulatant table.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.join(originDM)));\n *\n * console.log(carsDM.join(originDM,\n * obj => obj.[originDM.getName()].Origin === obj.[carsDM.getName()].Origin));\n *\n * @text\n * This is chained version of `join` operator. `join` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel to be joined with the current instance DataModel.\n * @param {SelectionPredicate} filterFn - The predicate function that will filter the result of the crossProduct.\n *\n * @return {DataModel} New DataModel instance created after joining.\n */\n join (joinWith, filterFn) {\n return crossProduct(this, joinWith, filterFn);\n }\n\n /**\n * {@link natural_join | Natural join} is a special kind of cross-product join where filtering of rows are performed\n * internally by resolving common fields are from both table and the rows with common value are included.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.naturalJoin(originDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel with which the current instance of DataModel on which the method is\n * called will be joined.\n * @return {DataModel} New DataModel instance created after joining.\n */\n naturalJoin (joinWith) {\n return crossProduct(this, joinWith, naturalJoinFilter(this, joinWith), true);\n }\n\n /**\n * {@link link_to_union | Union} operation can be termed as vertical stacking of all rows from both the DataModel\n * instances, provided that both of the {@link DataModel} instances should have same column names.\n *\n * @example\n * console.log(EuropeanMakerDM.union(USAMakerDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} unionWith - DataModel instance for which union has to be applied with the instance on which\n * the method is called\n *\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n union (unionWith) {\n return union(this, unionWith);\n }\n\n /**\n * {@link link_to_difference | Difference } operation only include rows which are present in the datamodel on which\n * it was called but not on the one passed as argument.\n *\n * @example\n * console.log(highPowerDM.difference(highExpensiveDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} differenceWith - DataModel instance for which difference has to be applied with the instance\n * on which the method is called\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n difference (differenceWith) {\n return difference(this, differenceWith);\n }\n\n /**\n * {@link link_to_selection | Selection} is a row filtering operation. It expects a predicate and an optional mode\n * which control which all rows should be included in the resultant DataModel instance.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection operation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resultant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * // with selection mode NORMAL:\n * const normDt = dt.select(fields => fields.Origin.value === \"USA\")\n * console.log(normDt));\n *\n * // with selection mode INVERSE:\n * const inverDt = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt);\n *\n * // with selection mode ALL:\n * const dtArr = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.ALL })\n * // print the selected parts\n * console.log(dtArr[0]);\n * // print the inverted parts\n * console.log(dtArr[1]);\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Function} selectFn - The predicate function which is called for each row with the current row.\n * ```\n * function (row, i, cloneProvider, store) { ... }\n * ```\n * @param {Object} config - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance.\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection.\n * @return {DataModel} Returns the new DataModel instance(s) after operation.\n */\n select (selectFn, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n\n const cloneConfig = { saveChild: config.saveChild };\n let oDm;\n\n if (config.mode === FilteringMode.ALL) {\n const selectDm = cloneWithSelect(\n this,\n selectFn,\n { mode: FilteringMode.NORMAL },\n cloneConfig\n );\n const rejectDm = cloneWithSelect(\n this,\n selectFn,\n { mode: FilteringMode.INVERSE },\n cloneConfig\n );\n oDm = [selectDm, rejectDm];\n } else {\n oDm = cloneWithSelect(\n this,\n selectFn,\n config,\n cloneConfig\n );\n }\n\n return oDm;\n }\n\n /**\n * Retrieves a boolean value if the current {@link DataModel} instance has data.\n *\n * @example\n * const schema = [\n * { name: 'CarName', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n * const data = [];\n *\n * const dt = new DataModel(data, schema);\n * console.log(dt.isEmpty());\n *\n * @public\n *\n * @return {Boolean} True if the datamodel has no data, otherwise false.\n */\n isEmpty () {\n return !this._rowDiffset.length || !this._colIdentifier.length;\n }\n\n /**\n * Creates a clone from the current DataModel instance with child parent relationship.\n *\n * @private\n * @param {boolean} [saveChild=true] - Whether the cloned instance would be recorded in the parent instance.\n * @return {DataModel} - Returns the newly cloned DataModel instance.\n */\n clone (saveChild = true, linkParent = true) {\n let retDataModel;\n if (linkParent === false) {\n const dataObj = this.getData({\n getAllFields: true\n });\n const data = dataObj.data;\n const schema = dataObj.schema;\n const jsonData = data.map((row) => {\n const rowObj = {};\n schema.forEach((field, i) => {\n rowObj[field.name] = row[i];\n });\n return rowObj;\n });\n retDataModel = new this.constructor(jsonData, schema);\n }\n else {\n retDataModel = new this.constructor(this);\n }\n\n if (saveChild) {\n this._children.push(retDataModel);\n }\n return retDataModel;\n }\n\n /**\n * {@link Projection} is filter column (field) operation. It expects list of fields' name and either include those\n * or exclude those based on {@link FilteringMode} on the resultant variable.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * const dm = new DataModel(data, schema);\n *\n * // with projection mode NORMAL:\n * const normDt = dt.project([\"Name\", \"HorsePower\"]);\n * console.log(normDt.getData());\n *\n * // with projection mode INVERSE:\n * const inverDt = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt.getData());\n *\n * // with selection mode ALL:\n * const dtArr = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.ALL })\n * // print the normal parts\n * console.log(dtArr[0].getData());\n * // print the inverted parts\n * console.log(dtArr[1].getData());\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {DataModel} Returns the new DataModel instance after operation.\n */\n project (projField, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const { mode } = config;\n\n let normalizedProjField = projField.reduce((acc, field) => {\n if (field.constructor.name === 'RegExp') {\n acc.push(...allFields.filter(fieldName => fieldName.search(field) !== -1));\n } else if (field in fieldConfig) {\n acc.push(field);\n }\n return acc;\n }, []);\n\n normalizedProjField = Array.from(new Set(normalizedProjField)).map(field => field.trim());\n let dataModel;\n\n if (mode === FilteringMode.ALL) {\n let projectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.NORMAL,\n saveChild: config.saveChild\n }, allFields);\n let rejectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.INVERSE,\n saveChild: config.saveChild\n }, allFields);\n dataModel = [projectionClone, rejectionClone];\n } else {\n let projectionClone = cloneWithProject(this, normalizedProjField, config, allFields);\n dataModel = projectionClone;\n }\n\n return dataModel;\n }\n\n getFieldsConfig () {\n return this._fieldConfig;\n }\n\n calculateFieldsConfig () {\n this._fieldConfig = this._fieldspace.fields.reduce((acc, fieldDef, i) => {\n acc[fieldDef.name()] = {\n index: i,\n def: { name: fieldDef.name(), type: fieldDef.type(), subtype: fieldDef.subtype() }\n };\n return acc;\n }, {});\n return this;\n }\n\n\n /**\n * Frees up the resources associated with the current DataModel instance and breaks all the links instance has in\n * the DAG.\n *\n * @public\n */\n dispose () {\n this._parent.removeChild(this);\n this._parent = null;\n }\n\n /**\n * Removes the specified child {@link DataModel} from the child list of the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\")\n * dt.removeChild(dt2);\n *\n * @private\n *\n * @param {DataModel} child - Delegates the parent to remove this child.\n */\n removeChild (child) {\n let idx = this._children.findIndex(sibling => sibling === child);\n idx !== -1 ? this._children.splice(idx, 1) : true;\n }\n\n /**\n * Adds the specified {@link DataModel} as a parent for the current {@link DataModel} instance.\n *\n * The optional criteriaQueue is an array containing the history of transaction performed on parent\n * {@link DataModel} to get the current one.\n *\n * @param {DataModel} parent - The datamodel instance which will act as parent.\n * @param {Array} criteriaQueue - Queue contains in-between operation meta-data.\n */\n addParent (parent, criteriaQueue = []) {\n persistDerivation(this, DM_DERIVATIVES.COMPOSE, null, criteriaQueue);\n this._parent = parent;\n parent._children.push(this);\n }\n\n /**\n * Returns the parent {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const parentDm = dt2.getParent();\n *\n * @return {DataModel} Returns the parent DataModel instance.\n */\n getParent () {\n return this._parent;\n }\n\n /**\n * Returns the immediate child {@link DataModel} instances.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const childDm1 = dt.select(fields => fields.Origin.value === \"USA\");\n * const childDm2 = dt.select(fields => fields.Origin.value === \"Japan\");\n * const childDm3 = dt.groupBy([\"Origin\"]);\n *\n * @return {DataModel[]} Returns the immediate child DataModel instances.\n */\n getChildren() {\n return this._children;\n }\n\n /**\n * Returns the in-between operation meta data while creating the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const derivations = dt3.getDerivations();\n *\n * @return {Any[]} Returns the derivation meta data.\n */\n getDerivations() {\n return this._derivation;\n }\n}\n\nexport default Relation;\n","/* eslint-disable default-case */\n\nimport { FieldType, DimensionSubtype, DataFormat } from './enums';\nimport {\n persistDerivation,\n getRootGroupByModel,\n propagateToAllDataModels,\n getRootDataModel,\n propagateImmutableActions,\n addToPropNamespace,\n sanitizeUnitSchema\n} from './helper';\nimport { DM_DERIVATIVES, PROPAGATION } from './constants';\nimport {\n dataBuilder,\n rowDiffsetIterator,\n groupBy\n} from './operator';\nimport { createBinnedFieldData } from './operator/bucket-creator';\nimport Relation from './relation';\nimport reducerStore from './utils/reducer-store';\nimport { createFields } from './field-creator';\n\n/**\n * DataModel is an in-browser representation of tabular data. It supports\n * {@link https://en.wikipedia.org/wiki/Relational_algebra | relational algebra} operators as well as generic data\n * processing opearators.\n * DataModel extends {@link Relation} class which defines all the relational algebra opreators. DataModel gives\n * definition of generic data processing operators which are not relational algebra complient.\n *\n * @public\n * @class\n * @extends Relation\n * @memberof Datamodel\n */\nclass DataModel extends Relation {\n /**\n * Creates a new DataModel instance by providing data and schema. Data could be in the form of\n * - Flat JSON\n * - DSV String\n * - 2D Array\n *\n * By default DataModel finds suitable adapter to serialize the data. DataModel also expects a\n * {@link Schema | schema} for identifying the variables present in data.\n *\n * @constructor\n * @example\n * const data = loadData('cars.csv');\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', unit : 'cm', scale: '1000', numberformat: val => `${val}G`},\n * { name: 'Cylinders', type: 'dimension' },\n * { name: 'Displacement', type: 'measure' },\n * { name: 'Horsepower', type: 'measure' },\n * { name: 'Weight_in_lbs', type: 'measure' },\n * { name: 'Acceleration', type: 'measure' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin', type: 'dimension' }\n * ];\n * const dm = new DataModel(data, schema, { name: 'Cars' });\n * table(dm);\n *\n * @public\n *\n * @param {Array. | string | Array.} data Input data in any of the mentioned formats\n * @param {Array.} schema Defination of the variables. Order of the variables in data and order of the\n * variables in schema has to be same.\n * @param {object} [options] Optional arguments to specify more settings regarding the creation part\n * @param {string} [options.name] Name of the datamodel instance. If no name is given an auto generated name is\n * assigned to the instance.\n * @param {string} [options.fieldSeparator=','] specify field separator type if the data is of type dsv string.\n */\n constructor (...args) {\n super(...args);\n\n this._onPropagation = [];\n this._sortingDetails = [];\n }\n\n /**\n * Reducers are simple functions which reduces an array of numbers to a representative number of the set.\n * Like an array of numbers `[10, 20, 5, 15]` can be reduced to `12.5` if average / mean reducer function is\n * applied. All the measure fields in datamodel (variables in data) needs a reducer to handle aggregation.\n *\n * @public\n *\n * @return {ReducerStore} Singleton instance of {@link ReducerStore}.\n */\n static get Reducers () {\n return reducerStore;\n }\n\n /**\n * Retrieve the data attached to an instance in JSON format.\n *\n * @example\n * // DataModel instance is already prepared and assigned to dm variable\n * const data = dm.getData({\n * order: 'column',\n * formatter: {\n * origin: (val) => val === 'European Union' ? 'EU' : val;\n * }\n * });\n * console.log(data);\n *\n * @public\n *\n * @param {Object} [options] Options to control how the raw data is to be returned.\n * @param {string} [options.order='row'] Defines if data is retieved in row order or column order. Possible values\n * are `'rows'` and `'columns'`\n * @param {Function} [options.formatter=null] Formats the output data. This expects an object, where the keys are\n * the name of the variable needs to be formatted. The formatter function is called for each row passing the\n * value of the cell for a particular row as arguments. The formatter is a function in the form of\n * `function (value, rowId, schema) => { ... }`\n * Know more about {@link Fomatter}.\n *\n * @return {Array} Returns a multidimensional array of the data with schema. The return format looks like\n * ```\n * {\n * data,\n * schema\n * }\n * ```\n */\n getData (options) {\n const defOptions = {\n order: 'row',\n formatter: null,\n withUid: false,\n getAllFields: false,\n sort: []\n };\n options = Object.assign({}, defOptions, options);\n const fields = this.getPartialFieldspace().fields;\n\n const dataGenerated = dataBuilder.call(\n this,\n this.getPartialFieldspace().fields,\n this._rowDiffset,\n options.getAllFields ? fields.map(d => d.name()).join() : this._colIdentifier,\n options.sort,\n {\n columnWise: options.order === 'column',\n addUid: !!options.withUid\n }\n );\n\n if (!options.formatter) {\n return dataGenerated;\n }\n\n const { formatter } = options;\n const { data, schema, uids } = dataGenerated;\n const fieldNames = schema.map((e => e.name));\n const fmtFieldNames = Object.keys(formatter);\n const fmtFieldIdx = fmtFieldNames.reduce((acc, next) => {\n const idx = fieldNames.indexOf(next);\n if (idx !== -1) {\n acc.push([idx, formatter[next]]);\n }\n return acc;\n }, []);\n\n if (options.order === 'column') {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n data[fIdx].forEach((datum, datumIdx) => {\n data[fIdx][datumIdx] = fmtFn.call(\n undefined,\n datum,\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n } else {\n data.forEach((datum, datumIdx) => {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n datum[fIdx] = fmtFn.call(\n undefined,\n datum[fIdx],\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n }\n\n return dataGenerated;\n }\n\n /**\n * Groups the data using particular dimensions and by reducing measures. It expects a list of dimensions using which\n * it projects the datamodel and perform aggregations to reduce the duplicate tuples. Refer this\n * {@link link_to_one_example_with_group_by | document} to know the intuition behind groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupedDM = dm.groupBy(['Year'], { horsepower: 'max' } );\n * console.log(groupedDm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {DataModel} Returns a new DataModel instance after performing the groupby.\n */\n groupBy (fieldsArr, reducers = {}, config = { saveChild: true }) {\n const groupByString = `${fieldsArr.join()}`;\n let params = [this, fieldsArr, reducers];\n const newDataModel = groupBy(...params);\n\n if (config.saveChild) {\n this._children.push(newDataModel);\n persistDerivation(\n newDataModel,\n DM_DERIVATIVES.GROUPBY,\n { fieldsArr, groupByString, defaultReducer: reducerStore.defaultReducer() },\n reducers\n );\n }\n\n newDataModel._parent = this;\n return newDataModel;\n }\n\n /**\n * Performs sorting operation on the current {@link DataModel} instance according to the specified sorting details.\n * Like every other operator it doesn't mutate the current DataModel instance on which it was called, instead\n * returns a new DataModel instance containing the sorted data.\n *\n * DataModel support multi level sorting by listing the variables using which sorting needs to be performed and\n * the type of sorting `ASC` or `DESC`.\n *\n * In the following example, data is sorted by `Origin` field in `DESC` order in first level followed by another\n * level of sorting by `Acceleration` in `ASC` order.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * let sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\"] // Default value is ASC\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * // Sort with a custom sorting function\n * sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\", (a, b) => a - b] // Custom sorting function\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @text\n * DataModel also provides another sorting mechanism out of the box where sort is applied to a variable using\n * another variable which determines the order.\n * Like the above DataModel contains three fields `Origin`, `Name` and `Acceleration`. Now, the data in this\n * model can be sorted by `Origin` field according to the average value of all `Acceleration` for a\n * particular `Origin` value.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * const sortedDm = dm.sort([\n * ['Origin', ['Acceleration', (a, b) => avg(...a.Acceleration) - avg(...b.Acceleration)]]\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @public\n *\n * @param {Array.} sortingDetails - Sorting details based on which the sorting will be performed.\n * @return {DataModel} Returns a new instance of DataModel with sorted data.\n */\n sort (sortingDetails) {\n const rawData = this.getData({\n order: 'row',\n sort: sortingDetails\n });\n const header = rawData.schema.map(field => field.name);\n const dataInCSVArr = [header].concat(rawData.data);\n\n const sortedDm = new this.constructor(dataInCSVArr, rawData.schema, { dataFormat: 'DSVArr' });\n sortedDm._sortingDetails = sortingDetails;\n return sortedDm;\n }\n\n /**\n * Performs the serialization operation on the current {@link DataModel} instance according to the specified data\n * type. When an {@link DataModel} instance is created, it de-serializes the input data into its internal format,\n * and during its serialization process, it converts its internal data format to the specified data type and returns\n * that data regardless what type of data is used during the {@link DataModel} initialization.\n *\n * @example\n * // here dm is the pre-declared DataModel instance.\n * const csvData = dm.serialize(DataModel.DataFormat.DSV_STR, { fieldSeparator: \",\" });\n * console.log(csvData); // The csv formatted data.\n *\n * const jsonData = dm.serialize(DataModel.DataFormat.FLAT_JSON);\n * console.log(jsonData); // The json data.\n *\n * @public\n *\n * @param {string} type - The data type name for serialization.\n * @param {Object} options - The optional option object.\n * @param {string} options.fieldSeparator - The field separator character for DSV data type.\n * @return {Array|string} Returns the serialized data.\n */\n serialize (type, options) {\n type = type || this._dataFormat;\n options = Object.assign({}, { fieldSeparator: ',' }, options);\n\n const fields = this.getFieldspace().fields;\n const colData = fields.map(f => f.formattedData());\n const rowsCount = colData[0].length;\n let serializedData;\n let rowIdx;\n let colIdx;\n\n if (type === DataFormat.FLAT_JSON) {\n serializedData = [];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = {};\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row[fields[colIdx].name()] = colData[colIdx][rowIdx];\n }\n serializedData.push(row);\n }\n } else if (type === DataFormat.DSV_STR) {\n serializedData = [fields.map(f => f.name()).join(options.fieldSeparator)];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row.join(options.fieldSeparator));\n }\n serializedData = serializedData.join('\\n');\n } else if (type === DataFormat.DSV_ARR) {\n serializedData = [fields.map(f => f.name())];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row);\n }\n } else {\n throw new Error(`Data type ${type} is not supported`);\n }\n\n return serializedData;\n }\n\n addField (field) {\n const fieldName = field.name();\n this._colIdentifier += `,${fieldName}`;\n const partialFieldspace = this._partialFieldspace;\n\n if (!partialFieldspace.fieldsObj()[field.name()]) {\n partialFieldspace.fields.push(field);\n } else {\n const fieldIndex = partialFieldspace.fields.findIndex(fieldinst => fieldinst.name() === fieldName);\n fieldIndex >= 0 && (partialFieldspace.fields[fieldIndex] = field);\n }\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n\n this.__calculateFieldspace().calculateFieldsConfig();\n return this;\n }\n\n /**\n * Creates a new variable calculated from existing variables. This method expects the definition of the newly created\n * variable and a function which resolves the value of the new variable from existing variables.\n *\n * Can create a new measure based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const newDm = dataModel.calculateVariable({\n * name: 'powerToWeight',\n * type: 'measure'\n * }, ['horsepower', 'weight_in_lbs', (hp, weight) => hp / weight ]);\n *\n *\n * Can create a new dimension based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const child = dataModel.calculateVariable(\n * {\n * name: 'Efficiency',\n * type: 'dimension'\n * }, ['horsepower', (hp) => {\n * if (hp < 80) { return 'low'; },\n * else if (hp < 120) { return 'moderate'; }\n * else { return 'high' }\n * }]);\n *\n * @public\n *\n * @param {Object} schema - The schema of newly defined variable.\n * @param {Array.} dependency - An array containing the dependency variable names and a resolver\n * function as the last element.\n * @param {Object} config - An optional config object.\n * @param {boolean} [config.saveChild] - Whether the newly created DataModel will be a child.\n * @param {boolean} [config.replaceVar] - Whether the newly created variable will replace the existing variable.\n * @return {DataModel} Returns an instance of DataModel with the new field.\n */\n calculateVariable (schema, dependency, config) {\n schema = sanitizeUnitSchema(schema);\n config = Object.assign({}, { saveChild: true, replaceVar: false }, config);\n\n const fieldsConfig = this.getFieldsConfig();\n const depVars = dependency.slice(0, dependency.length - 1);\n const retrieveFn = dependency[dependency.length - 1];\n\n if (fieldsConfig[schema.name] && !config.replaceVar) {\n throw new Error(`${schema.name} field already exists in datamodel`);\n }\n\n const depFieldIndices = depVars.map((field) => {\n const fieldSpec = fieldsConfig[field];\n if (!fieldSpec) {\n // @todo dont throw error here, use warning in production mode\n throw new Error(`${field} is not a valid column name.`);\n }\n return fieldSpec.index;\n });\n\n const clone = this.clone();\n\n const fs = clone.getFieldspace().fields;\n const suppliedFields = depFieldIndices.map(idx => fs[idx]);\n\n let cachedStore = {};\n let cloneProvider = () => this.detachedRoot();\n\n const computedValues = [];\n rowDiffsetIterator(clone._rowDiffset, (i) => {\n const fieldsData = suppliedFields.map(field => field.partialField.data[i]);\n computedValues[i] = retrieveFn(...fieldsData, i, cloneProvider, cachedStore);\n });\n const [field] = createFields([computedValues], [schema], [schema.name]);\n clone.addField(field);\n\n if (config.saveChild) {\n persistDerivation(clone, DM_DERIVATIVES.CAL_VAR, { config: schema, fields: depVars }, retrieveFn);\n }\n\n return clone;\n }\n\n /**\n * Propagates changes across all the connected DataModel instances.\n *\n * @param {Array} identifiers - A list of identifiers that were interacted with.\n * @param {Object} payload - The interaction specific details.\n *\n * @return {DataModel} DataModel instance.\n */\n propagate (identifiers, config = {}, addToNameSpace, propConfig = {}) {\n const isMutableAction = config.isMutableAction;\n const propagationSourceId = config.sourceId;\n const payload = config.payload;\n const rootModel = getRootDataModel(this);\n const propagationNameSpace = rootModel._propagationNameSpace;\n const rootGroupByModel = getRootGroupByModel(this);\n const rootModels = {\n groupByModel: rootGroupByModel,\n model: rootModel\n };\n\n addToNameSpace && addToPropNamespace(propagationNameSpace, config, this);\n propagateToAllDataModels(identifiers, rootModels, { propagationNameSpace, sourceId: propagationSourceId },\n Object.assign({\n payload\n }, config));\n\n if (isMutableAction) {\n propagateImmutableActions(propagationNameSpace, rootModels, {\n config,\n propConfig\n }, this);\n }\n\n return this;\n }\n\n /**\n * Associates a callback with an event name.\n *\n * @param {string} eventName - The name of the event.\n * @param {Function} callback - The callback to invoke.\n * @return {DataModel} Returns this current DataModel instance itself.\n */\n on (eventName, callback) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation.push(callback);\n break;\n }\n return this;\n }\n\n /**\n * Unsubscribes the callbacks for the provided event name.\n *\n * @param {string} eventName - The name of the event to unsubscribe.\n * @return {DataModel} Returns the current DataModel instance itself.\n */\n unsubscribe (eventName) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation = [];\n break;\n\n }\n return this;\n }\n\n /**\n * This method is used to invoke the method associated with propagation.\n *\n * @param {Object} payload The interaction payload.\n * @param {DataModel} identifiers The propagated DataModel.\n * @memberof DataModel\n */\n handlePropagation (propModel, payload) {\n let propListeners = this._onPropagation;\n propListeners.forEach(fn => fn.call(this, propModel, payload));\n }\n\n /**\n * Performs the binning operation on a measure field based on the binning configuration. Binning means discretizing\n * values of a measure. Binning configuration contains an array; subsequent values from the array marks the boundary\n * of buckets in [inclusive, exclusive) range format. This operation does not mutate the subject measure field,\n * instead, it creates a new field (variable) of type dimension and subtype binned.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non-uniform buckets,\n * - providing bins count,\n * - providing each bin size,\n *\n * When custom `buckets` are provided as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', buckets: [30, 80, 100, 110] }\n * const binnedDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binsCount` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', binsCount: 5, start: 0, end: 100 }\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binSize` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHorsepower', binSize: 20, start: 5}\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @public\n *\n * @param {string} measureFieldName - The name of the target measure field.\n * @param {Object} config - The config object.\n * @param {string} [config.name] - The name of the new field which will be created.\n * @param {string} [config.buckets] - An array containing the bucket ranges.\n * @param {string} [config.binSize] - The size of each bin. It is ignored when buckets are given.\n * @param {string} [config.binsCount] - The total number of bins to generate. It is ignored when buckets are given.\n * @param {string} [config.start] - The start value of the bucket ranges. It is ignored when buckets are given.\n * @param {string} [config.end] - The end value of the bucket ranges. It is ignored when buckets are given.\n * @return {DataModel} Returns a new {@link DataModel} instance with the new field.\n */\n bin (measureFieldName, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n if (!fieldsConfig[measureFieldName]) {\n throw new Error(`Field ${measureFieldName} doesn't exist`);\n }\n\n const binFieldName = config.name || `${measureFieldName}_binned`;\n\n if (fieldsConfig[binFieldName]) {\n throw new Error(`Field ${binFieldName} already exists`);\n }\n\n const measureField = this.getFieldspace().fieldsObj()[measureFieldName];\n const { binnedData, bins } = createBinnedFieldData(measureField, this._rowDiffset, config);\n\n const binField = createFields([binnedData], [\n {\n name: binFieldName,\n type: FieldType.DIMENSION,\n subtype: DimensionSubtype.BINNED,\n bins\n }], [binFieldName])[0];\n\n const clone = this.clone();\n clone.addField(binField);\n\n persistDerivation(clone, DM_DERIVATIVES.BIN, { measureFieldName, config, binFieldName }, null);\n\n return clone;\n }\n\n /**\n * Creates a new {@link DataModel} instance with completely detached root from current {@link DataModel} instance,\n * the new {@link DataModel} instance has no parent-children relationship with the current one, but has same data as\n * the current one.\n * This API is useful when a completely different {@link DataModel} but with same data as the current instance is\n * needed.\n *\n * @example\n * const dm = new DataModel(data, schema);\n * const detachedDm = dm.detachedRoot();\n *\n * // has different namespace\n * console.log(dm.getPartialFieldspace().name);\n * console.log(detachedDm.getPartialFieldspace().name);\n *\n * // has same data\n * console.log(dm.getData());\n * console.log(detachedDm.getData());\n *\n * @public\n *\n * @return {DataModel} Returns a detached {@link DataModel} instance.\n */\n detachedRoot () {\n const data = this.serialize(DataFormat.FLAT_JSON);\n const schema = this.getSchema();\n\n return new DataModel(data, schema);\n }\n}\n\nexport default DataModel;\n","import { fnList } from '../operator/group-by-function';\n\nexport const { sum, avg, min, max, first, last, count, std: sd } = fnList;\n","import DataModel from './datamodel';\nimport {\n compose,\n bin,\n select,\n project,\n groupby as groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union\n} from './operator';\nimport * as Stats from './stats';\nimport * as enums from './enums';\nimport { DateTimeFormatter } from './utils';\nimport { DataFormat, FilteringMode } from './constants';\nimport pkg from '../package.json';\n\nDataModel.Operators = {\n compose,\n bin,\n select,\n project,\n groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union\n};\nDataModel.Stats = Stats;\nObject.assign(DataModel, enums);\nDataModel.DateTimeFormatter = DateTimeFormatter;\nDataModel.DataFormat = DataFormat;\nDataModel.FilteringMode = FilteringMode;\nDataModel.version = pkg.version;\n\nexport default DataModel;\n","\n/**\n * DataModel's opearators are exposed as composable functional operators as well as chainable operators. Chainable\n * operators are called on the instances of {@link Datamodel} and {@link Relation} class.\n *\n * Those same operators can be used as composable operators from `DataModel.Operators` namespace.\n *\n * All these operators have similar behaviour. All these operators when called with the argument returns a function\n * which expects a DataModel instance.\n *\n * @public\n * @module Operators\n * @namespace DataModel\n */\n\n/**\n * This is functional version of selection operator. {@link link_to_selection | Selection} is a row filtering operation.\n * It takes {@link SelectionPredicate | predicate} for filtering criteria and returns a function.\n * The returned function is called with the DataModel instance on which the action needs to be performed.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection opearation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * [Warn] Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * [Error] `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @example\n * const select = DataModel.Operators.select;\n * usaCarsFn = select(fields => fields.Origin.value === 'USA');\n * usaCarsDm = usaCarsFn(dm);\n * console.log(usaCarsDm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {SelectionPredicate} selectFn - Predicate funciton which is called for each row with the current row\n * ```\n * function (row, i) { ... }\n * ```\n * @param {Object} [config] - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const select = (...args) => dm => dm.select(...args);\n\n/**\n * This is functional version of projection operator. {@link link_to_projection | Projection} is a column filtering\n * operation.It expects list of fields name and either include those or exclude those based on {@link FilteringMode} on\n * the resultant variable.It returns a function which is called with the DataModel instance on which the action needs\n * to be performed.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const project = (...args) => dm => dm.project(...args);\n\n/**\n * This is functional version of binnig operator. Binning happens on a measure field based on a binning configuration.\n * Binning in DataModel does not aggregate the number of rows present in DataModel instance after binning, it just adds\n * a new field with the binned value. Refer binning {@link example_of_binning | example} to have a intuition of what\n * binning is and the use case.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non uniform buckets\n * - providing bin count\n * - providing each bin size\n *\n * When custom buckets are provided as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const buckets = {\n * start: 30\n * stops: [80, 100, 110]\n * };\n * const config = { buckets, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(dm);\n *\n * @text\n * When `binCount` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binCount: 5, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @text\n * When `binSize` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binSize: 200, name: 'binnedHorsepower' }\n * const binnedDm = dataModel.bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {String} name Name of measure which will be used to create bin\n * @param {Object} config Config required for bin creation\n * @param {Array.} config.bucketObj.stops Defination of bucket ranges. Two subsequent number from arrays\n * are picked and a range is created. The first number from range is inclusive and the second number from range\n * is exclusive.\n * @param {Number} [config.bucketObj.startAt] Force the start of the bin from a particular number.\n * If not mentioned, the start of the bin or the lower domain of the data if stops is not mentioned, else its\n * the first value of the stop.\n * @param {Number} config.binSize Bucket size for each bin\n * @param {Number} config.binCount Number of bins which will be created\n * @param {String} config.name Name of the new binned field to be created\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const bin = (...args) => dm => dm.bin(...args);\n\n/**\n * This is functional version of `groupBy` operator.Groups the data using particular dimensions and by reducing\n * measures. It expects a list of dimensions using which it projects the datamodel and perform aggregations to reduce\n * the duplicate tuples. Refer this {@link link_to_one_example_with_group_by | document} to know the intuition behind\n * groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupBy = DataModel.Operators.groupBy;\n * const groupedFn = groupBy(['Year'], { horsepower: 'max' } );\n * groupedDM = groupByFn(dm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const groupBy = (...args) => dm => dm.groupBy(...args);\n\n/**\n * Enables composing operators to run multiple operations and save group of operataion as named opration on a DataModel.\n * The resulting DataModel will be the result of all the operation provided. The operations provided will be executed in\n * a serial manner ie. result of one operation will be the input for the next operations (like pipe operator in unix).\n *\n * Suported operations in compose are\n * - `select`\n * - `project`\n * - `groupBy`\n * - `bin`\n * - `compose`\n *\n * @example\n * const compose = DataModel.Operators.compose;\n * const select = DataModel.Operators.select;\n * const project = DataModel.Operators.project;\n *\n * let composedFn = compose(\n * select(fields => fields.netprofit.value <= 15),\n * project(['netprofit', 'netsales']));\n *\n * const dataModel = new DataModel(data1, schema1);\n *\n * let composedDm = composedFn(dataModel);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} operators: An array of operation that will be applied on the\n * datatable.\n *\n * @returns {DataModel} Instance of resultant DataModel\n */\nexport const compose = (...operations) =>\n (dm, config = { saveChild: true }) => {\n let currentDM = dm;\n let frstChild;\n const derivations = [];\n const saveChild = config.saveChild;\n\n operations.forEach((operation) => {\n currentDM = operation(currentDM);\n derivations.push(...currentDM._derivation);\n if (!frstChild) {\n frstChild = currentDM;\n }\n });\n\n saveChild && currentDM.addParent(dm, derivations);\n if (derivations.length > 1) {\n frstChild.dispose();\n }\n\n return currentDM;\n };\n","/**\n * Wrapper on calculateVariable() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const calculateVariable = (...args) => dm => dm.calculateVariable(...args);\n\n/**\n * Wrapper on sort() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const sort = (...args) => dm => dm.sort(...args);\n","import { crossProduct } from './cross-product';\nimport { naturalJoinFilter } from './natural-join-filter-function';\n\nexport function naturalJoin (dataModel1, dataModel2) {\n return crossProduct(dataModel1, dataModel2, naturalJoinFilter(dataModel1, dataModel2), true);\n}\n"],"sourceRoot":""} \ No newline at end of file diff --git a/example/samples/example5.js b/example/samples/example5.js index c4a977f..4277d78 100644 --- a/example/samples/example5.js +++ b/example/samples/example5.js @@ -2,152 +2,170 @@ fetch("/data/cars.json") .then(resp => resp.json()) .then(data => { - const schema = [ - // { - // "name": "Ticket", - // "type": "dimension" - // }, - // { - // "name": "Organisation", - // "type": "dimension" - // }, - // { - // "name": "Name", - // "type": "dimension" - // }, - // { - // "name": "Email ID", - // "type": "dimension" - // }, - // { - // "name": "Country", - // "type": "dimension" - // }, - // { - // "name": "Medium", - // "type": "dimension" - // }, - // { - // "name": "Member", - // "type": "dimension" - // }, - // { - // "name": "Shared with Member", - // "type": "dimension" - // }, - // { - // "name": "Partner", - // "type": "dimension" - // }, - // { - // "name": "Partner Name", - // "type": "dimension" - // }, - // { - // "name": "Partner Email ID", - // "type": "dimension" - // }, - // { - // "name": "Product", - // "type": "dimension" - // }, - // { - // "name": "New / Renewal", - // "type": "dimension" - // }, - // { - // "name": "Industry", - // "type": "dimension" - // }, - // { - // "name": "Trade Discount", - // "type": "measure" - // }, - // { - // "name": "Reseller Discount", - // "type": "measure" - // }, - { - name: 'Qty', - type: 'measure' - }, - // { - // "name": "Price", - // "type": "measure" - // }, - // { - // "name": "Gross Value", - // "type": "measure" - // }, - // { - // "name": "Net Value", - // "type": "measure" - // }, - // { - // "name": "PO Number", - // "type": "measure" - // }, - { - name: 'Date of Order', - type: 'dimension', - subtype: 'temporal', - format: '%Y-%m-%d' - }, - // { - // "name": "Month", - // "type": "dimension", - // "subtype": "temporal", - // "format": "%Y-%m-%d" - // }, - // { - // "name": "Quarter", - // "type": "dimension" - // }, - { - "name": "Date of Payment", - "type": "dimension", - "subtype": "temporal", - "format": "%Y-%m-%d" - }, - // { - // "name": "Payment Mode", - // "type": "dimension" - // }, - // { - // "name": "Source/Ref No.", - // "type": "measure" - // }, - // { - // "name": "Payment Due Date", - // "type": "dimension", - // "subtype": "temporal", - // "format": "%Y-%m-%d" - // }, - // { - // "name": "Lead in Date", - // "type": "dimension" - // }, - // { - // "name": "Lead out Date", - // "type": "dimension" - // }, - // { - // name: 'Days Taken', - // type: 'measure' - // }, - // { - // name: 'Status', - // type: 'dimension' - // } - ]; + // const schema = [ + // // { + // // "name": "Ticket", + // // "type": "dimension" + // // }, + // // { + // // "name": "Organisation", + // // "type": "dimension" + // // }, + // // { + // // "name": "Name", + // // "type": "dimension" + // // }, + // // { + // // "name": "Email ID", + // // "type": "dimension" + // // }, + // // { + // // "name": "Country", + // // "type": "dimension" + // // }, + // // { + // // "name": "Medium", + // // "type": "dimension" + // // }, + // // { + // // "name": "Member", + // // "type": "dimension" + // // }, + // // { + // // "name": "Shared with Member", + // // "type": "dimension" + // // }, + // // { + // // "name": "Partner", + // // "type": "dimension" + // // }, + // // { + // // "name": "Partner Name", + // // "type": "dimension" + // // }, + // // { + // // "name": "Partner Email ID", + // // "type": "dimension" + // // }, + // // { + // // "name": "Product", + // // "type": "dimension" + // // }, + // // { + // // "name": "New / Renewal", + // // "type": "dimension" + // // }, + // // { + // // "name": "Industry", + // // "type": "dimension" + // // }, + // // { + // // "name": "Trade Discount", + // // "type": "measure" + // // }, + // // { + // // "name": "Reseller Discount", + // // "type": "measure" + // // }, + // { + // name: 'Qty', + // type: 'measure' + // }, + // // { + // // "name": "Price", + // // "type": "measure" + // // }, + // // { + // // "name": "Gross Value", + // // "type": "measure" + // // }, + // // { + // // "name": "Net Value", + // // "type": "measure" + // // }, + // // { + // // "name": "PO Number", + // // "type": "measure" + // // }, + // { + // name: 'Date of Order', + // type: 'dimension', + // subtype: 'temporal', + // format: '%Y-%m-%d' + // }, + // // { + // // "name": "Month", + // // "type": "dimension", + // // "subtype": "temporal", + // // "format": "%Y-%m-%d" + // // }, + // // { + // // "name": "Quarter", + // // "type": "dimension" + // // }, + // { + // "name": "Date of Payment", + // "type": "dimension", + // "subtype": "temporal", + // "format": "%Y-%m-%d" + // }, + // // { + // // "name": "Payment Mode", + // // "type": "dimension" + // // }, + // // { + // // "name": "Source/Ref No.", + // // "type": "measure" + // // }, + // // { + // // "name": "Payment Due Date", + // // "type": "dimension", + // // "subtype": "temporal", + // // "format": "%Y-%m-%d" + // // }, + // // { + // // "name": "Lead in Date", + // // "type": "dimension" + // // }, + // // { + // // "name": "Lead out Date", + // // "type": "dimension" + // // }, + // // { + // // name: 'Days Taken', + // // type: 'measure' + // // }, + // // { + // // name: 'Status', + // // type: 'dimension' + // // } + // ]; - // DataModel.configureInvalidAwareTypes({ - // "": DataModel.InvalidAwareTypes.NULL, - // }); - const dm = new DataModel(data, schema); - const dmData = dm.getData().data; - const selected = dm.select(fields => fields['Date of Payment'].value === DataModel.InvalidAwareTypes.NULL); + // // DataModel.configureInvalidAwareTypes({ + // // "": DataModel.InvalidAwareTypes.NULL, + // // }); + // const dm = new DataModel(data, schema); + // const dmData = dm.getData().data; + // const selected = dm.select(fields => fields['Date of Payment'].value === DataModel.InvalidAwareTypes.NULL); - const compData = dm.groupBy(['name']).getData(); + // const compData = dm.groupBy(['name']).getData(); + + const data1 = [ + { profit: 10, sales: 25, city: 'a', state: 'aa' }, + { profit: 15, sales: 20, city: 'b', state: 'bb' }, + { profit: 10, sales: 25, city: 'a', state: 'ab' }, + { profit: 15, sales: 20, city: 'b', state: 'ba' }, + ]; + const schema1 = [ + { name: 'profit', type: 'measure' }, + { name: 'sales', type: 'measure' }, + { name: 'city', type: 'dimension' }, + { name: 'state', type: 'dimension' }, + ]; + const dataModel = new DataModel(data1, schema1, { name: 'Yo' }); + + kk = dataModel.project(['profit','sales']) + + mm = kk.sort(['sales'],{saveChild: true}) }) diff --git a/src/constants/index.js b/src/constants/index.js index 86c5bb0..512970f 100644 --- a/src/constants/index.js +++ b/src/constants/index.js @@ -18,7 +18,8 @@ export const DM_DERIVATIVES = { GROUPBY: 'group', COMPOSE: 'compose', CAL_VAR: 'calculatedVariable', - BIN: 'bin' + BIN: 'bin', + SORT: 'sort' }; export const JOINS = { diff --git a/src/datamodel.js b/src/datamodel.js index cba7b8f..1ed5693 100644 --- a/src/datamodel.js +++ b/src/datamodel.js @@ -305,7 +305,13 @@ class DataModel extends Relation { * @param {Array.} sortingDetails - Sorting details based on which the sorting will be performed. * @return {DataModel} Returns a new instance of DataModel with sorted data. */ - sort (sortingDetails) { + sort (sortingDetails, config = { saveChild: false }) { + if (this._sortingDetails.length) { + const parent = this._parent; + this.dispose(); + return parent.sort(sortingDetails, config); + } + const rawData = this.getData({ order: 'row', sort: sortingDetails @@ -315,6 +321,14 @@ class DataModel extends Relation { const sortedDm = new this.constructor(dataInCSVArr, rawData.schema, { dataFormat: 'DSVArr' }); sortedDm._sortingDetails = sortingDetails; + sortedDm._derivation = [...this._derivation]; + + persistDerivation(sortedDm, DM_DERIVATIVES.SORT, config, sortingDetails); + + if (config.saveChild) { + this._children.push(sortedDm); + } + sortedDm._parent = this; return sortedDm; } From 0387b3d7089eb5d6ce8cac6d34e5c1c7447bb640 Mon Sep 17 00:00:00 2001 From: Rousan Ali Date: Tue, 12 Mar 2019 19:23:19 +0530 Subject: [PATCH 10/21] Rename charts.com to muzejs.org --- LICENSE | 2 +- README.md | 4 ++-- dist/datamodel.js | 2 +- dist/datamodel.js.map | 2 +- example/js/datamodel.js | 2 +- package.json | 10 ++++------ 6 files changed, 10 insertions(+), 12 deletions(-) diff --git a/LICENSE b/LICENSE index d8c182b..b84531d 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2018 Charts.com +Copyright (c) 2018 Muzejs.org Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/README.md b/README.md index 8e8df01..b9f046f 100644 --- a/README.md +++ b/README.md @@ -39,7 +39,7 @@ DataModel can be used if you need an in-browser tabular data store for data anal Insert the DataModel build into the ``: ```html - + ``` ### NPM @@ -144,7 +144,7 @@ console.log(projectDm.getData().schema); ## Documentation -Find detailed documentation and API reference from [here](https://www.charts.com/muze/docs/introduction-to-datamodel). +Find detailed documentation and API reference from [here](https://muzejs.org/docs/introduction-to-datamodel). ## Contributing diff --git a/dist/datamodel.js b/dist/datamodel.js index ea3c2ae..1ad9c86 100644 --- a/dist/datamodel.js +++ b/dist/datamodel.js @@ -1,2 +1,2 @@ -!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define("DataModel",[],t):"object"==typeof exports?exports.DataModel=t():e.DataModel=t()}(window,function(){return function(e){var t={};function n(r){if(t[r])return t[r].exports;var a=t[r]={i:r,l:!1,exports:{}};return e[r].call(a.exports,a,a.exports,n),a.l=!0,a.exports}return n.m=e,n.c=t,n.d=function(e,t,r){n.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:r})},n.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},n.t=function(e,t){if(1&t&&(e=n(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var r=Object.create(null);if(n.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var a in e)n.d(r,a,function(t){return e[t]}.bind(null,a));return r},n.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return n.d(t,"a",t),t},n.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},n.p="",n(n.s=1)}([function(e){e.exports={name:"datamodel",description:"Relational algebra compliant in-memory tabular data store",homepage:"https://github.com/chartshq/datamodel",version:"2.1.0",license:"MIT",main:"dist/datamodel.js",author:"Charts.com ",keywords:["datamodel","data","relational","algebra","model","muze","fusioncharts","table","tabular","operation"],repository:{type:"git",url:"https://github.com/chartshq/datamodel.git"},contributors:[{name:"Akash Goswami",email:"akash@charts.com"},{name:"Subhash Haldar",email:"subhash@charts.com"},{name:"Rousan Ali",email:"rousan@charts.com",url:"https://rousan.io"},{name:"Ujjal Kumar Dutta",email:"ujjal@charts.com"}],dependencies:{"d3-dsv":"^1.0.8"},devDependencies:{"babel-cli":"6.26.0","babel-core":"^6.26.3","babel-eslint":"6.1.2","babel-loader":"^7.1.4","babel-plugin-transform-runtime":"^6.23.0","babel-preset-env":"^1.7.0","babel-preset-es2015":"^6.24.1","babel-preset-flow":"^6.23.0",chai:"3.5.0","cross-env":"^5.0.5",eslint:"3.19.0","eslint-config-airbnb":"15.1.0","eslint-plugin-import":"2.7.0","eslint-plugin-jsx-a11y":"5.1.1","eslint-plugin-react":"7.3.0","istanbul-instrumenter-loader":"^3.0.0",jsdoc:"3.5.5",json2yaml:"^1.1.0",karma:"1.7.1","karma-chai":"0.1.0","karma-chrome-launcher":"2.1.1","karma-coverage-istanbul-reporter":"^1.3.0","karma-mocha":"1.3.0","karma-spec-reporter":"0.0.31","karma-webpack":"2.0.3",marked:"^0.5.0",mocha:"3.4.2","mocha-webpack":"0.7.0","transform-runtime":"0.0.0",webpack:"^4.12.0","webpack-cli":"^3.0.7","webpack-dev-server":"^3.1.4"},scripts:{test:"npm run lint && npm run ut",ut:"karma start karma.conf.js",utd:"karma start --single-run false --browsers Chrome karma.conf.js ",build:"webpack --mode production",start:"webpack-dev-server --config webpack.config.dev.js --mode development --open",lint:"eslint ./src","lint-errors":"eslint --quiet ./src",docs:"rm -rf yaml && mkdir yaml && jsdoc -c jsdoc.conf.json"}}},function(e,t,n){var r=n(2);e.exports=r.default?r.default:r},function(e,t,n){"use strict";n.r(t);var r={};n.r(r),n.d(r,"DataFormat",function(){return o}),n.d(r,"DimensionSubtype",function(){return u}),n.d(r,"MeasureSubtype",function(){return c}),n.d(r,"FieldType",function(){return f}),n.d(r,"FilteringMode",function(){return l});var a={};n.r(a),n.d(a,"DSVArr",function(){return Ge}),n.d(a,"DSVStr",function(){return tt}),n.d(a,"FlatJSON",function(){return nt}),n.d(a,"Auto",function(){return rt});var i={};n.r(i),n.d(i,"sum",function(){return jt}),n.d(i,"avg",function(){return St}),n.d(i,"min",function(){return Nt}),n.d(i,"max",function(){return kt}),n.d(i,"first",function(){return Ft}),n.d(i,"last",function(){return Dt}),n.d(i,"count",function(){return Tt}),n.d(i,"sd",function(){return Rt});var o={FLAT_JSON:"FlatJSON",DSV_STR:"DSVStr",DSV_ARR:"DSVArr",AUTO:"Auto"},u={CATEGORICAL:"categorical",TEMPORAL:"temporal",GEO:"geo",BINNED:"binned"},c={CONTINUOUS:"continuous"},f={MEASURE:"measure",DIMENSION:"dimension"},l={NORMAL:"normal",INVERSE:"inverse",ALL:"all"};function s(e){return e instanceof Date?e:new Date(e)}function d(e){return e<10?"0"+e:e}function p(e){this.format=e,this.dtParams=void 0,this.nativeDate=void 0}RegExp.escape=function(e){return e.replace(/[-[\]{}()*+?.,\\^$|#\s]/g,"\\$&")},p.TOKEN_PREFIX="%",p.DATETIME_PARAM_SEQUENCE={YEAR:0,MONTH:1,DAY:2,HOUR:3,MINUTE:4,SECOND:5,MILLISECOND:6},p.defaultNumberParser=function(e){return function(t){var n;return isFinite(n=parseInt(t,10))?n:e}},p.defaultRangeParser=function(e,t){return function(n){var r,a=void 0;if(!n)return t;var i=n.toLowerCase();for(a=0,r=e.length;aa.getFullYear()&&(t=""+(i-1)+r),s(t).getFullYear()},formatter:function(e){var t=s(e).getFullYear().toString(),n=void 0;return t&&(n=t.length,t=t.substring(n-2,n)),t}},Y:{name:"Y",index:0,extract:function(){return"(\\d{4})"},parser:p.defaultNumberParser(),formatter:function(e){return s(e).getFullYear().toString()}}}},p.getTokenFormalNames=function(){var e=p.getTokenDefinitions();return{HOUR:e.H,HOUR_12:e.l,AMPM_UPPERCASE:e.p,AMPM_LOWERCASE:e.P,MINUTE:e.M,SECOND:e.S,SHORT_DAY:e.a,LONG_DAY:e.A,DAY_OF_MONTH:e.e,DAY_OF_MONTH_CONSTANT_WIDTH:e.d,SHORT_MONTH:e.b,LONG_MONTH:e.B,MONTH_OF_YEAR:e.m,SHORT_YEAR:e.y,LONG_YEAR:e.Y}},p.tokenResolver=function(){var e=p.getTokenDefinitions(),t=function(){for(var e=0,t=void 0,n=void 0,r=arguments.length;e=0;)o=e[i+1],-1!==r.indexOf(o)&&a.push({index:i,token:o});return a},p.formatAs=function(e,t){var n,r=s(e),a=p.findTokens(t),i=p.getTokenDefinitions(),o=String(t),u=p.TOKEN_PREFIX,c=void 0,f=void 0,l=void 0;for(l=0,n=a.length;l=0;d--)(f=i[d].index)+1!==s.length-1?(void 0===u&&(u=s.length),l=s.substring(f+2,u),s=s.substring(0,f+2)+RegExp.escape(l)+s.substring(u,s.length),u=f):u=f;for(d=0;d0&&e.split(",").forEach(function(e){var n=e.split("-"),r=+n[0],a=+(n[1]||n[0]);if(a>=r)for(var i=r;i<=a;i+=1)t(i)})}var T=function(){function e(e,t){for(var n=0;n=(i=e[a=n+Math.floor((r-n)/2)]).start&&t=i.end?n=a+1:t3&&void 0!==arguments[3]&&arguments[3],a=arguments.length>4&&void 0!==arguments[4]?arguments[4]:J.CROSS,i=[],o=[],u=n||K,c=e.getFieldspace(),f=t.getFieldspace(),l=c.name,s=f.name,d=c.name+"."+f.name,p=C(c,f);if(l===s)throw new Error("DataModels must have different alias names");return c.fields.forEach(function(e){var t=_({},e.schema());-1===p.indexOf(t.name)||r||(t.name=c.name+"."+t.name),i.push(t)}),f.fields.forEach(function(e){var t=_({},e.schema());-1!==p.indexOf(t.name)?r||(t.name=f.name+"."+t.name,i.push(t)):i.push(t)}),D(e._rowDiffset,function(n){var d=!1,h=void 0;D(t._rowDiffset,function(v){var m=[],y={};y[l]={},y[s]={},c.fields.forEach(function(e){m.push(e.partialField.data[n]),y[l][e.name()]=e.partialField.data[n]}),f.fields.forEach(function(e){-1!==p.indexOf(e.schema().name)&&r||m.push(e.partialField.data[v]),y[s][e.name()]=e.partialField.data[v]});var g=ot(y[l]),b=ot(y[s]);if(u(g,b,function(){return e.detachedRoot()},function(){return t.detachedRoot()},{})){var w={};m.forEach(function(e,t){w[i[t].name]=e}),d&&J.CROSS!==a?o[h]=w:(o.push(w),d=!0,h=n)}else if((a===J.LEFTOUTER||a===J.RIGHTOUTER)&&!d){var _={},O=c.fields.length-1;m.forEach(function(e,t){_[i[t].name]=t<=O?e:null}),d=!0,h=n,o.push(_)}})}),new At(o,i,{name:d})}function z(e,t){var n=""+e,r=""+t;return nr?1:0}function q(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:z;return e.length>1&&function e(t,n,r,a){if(r===n)return t;var i=n+Math.floor((r-n)/2);return e(t,n,i,a),e(t,i+1,r,a),function(e,t,n,r,a){for(var i=e,o=[],u=t;u<=r;u+=1)o[u]=i[u];for(var c=t,f=n+1,l=t;l<=r;l+=1)c>n?(i[l]=o[f],f+=1):f>r?(i[l]=o[c],c+=1):a(o[c],o[f])<=0?(i[l]=o[c],c+=1):(i[l]=o[f],f+=1)}(t,n,i,r,a),t}(e,0,e.length-1,t),e}function X(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);ti?"desc"===t?-1:1:0}}return r}function Q(e,t){var n=new Map,r=[];return e.forEach(function(e){var a=e[t];n.has(a)?r[n.get(a)][1].push(e):(r.push([a,[e]]),n.set(a,r.length-1))}),r}function Z(e,t,n){var r={label:e[0]};return t.reduce(function(t,r,a){return t[r]=e[1].map(function(e){return e[n[a].index]}),t},r),r}function ee(e,t,n,r,a){a=Object.assign({},{addUid:!1,columnWise:!1},a);var i={schema:[],data:[],uids:[]},o=a.addUid,u=r&&r.length>0,c=[];if(n.split(",").forEach(function(t){for(var n=0;n=0;u--)a=t[u][0],i=t[u][1],(o=vt(r,a))&&("function"==typeof i?q(n,function(e,t){return i(e[o.index],t[o.index])}):O(i)?function(){var e=Q(n,o.index),t=i[i.length-1],a=i.slice(0,i.length-1),u=a.map(function(e){return vt(r,e)});e.forEach(function(e){e.push(Z(e,a,u))}),q(e,function(e,n){var r=e[2],a=n[2];return t(r,a)}),n.length=0,e.forEach(function(e){n.push.apply(n,X(e[1]))})}():(i="desc"===String(i).toLowerCase()?"desc":"asc",q(n,$(o.type,i,o.index))));e.uids=[],n.forEach(function(t){e.uids.push(t.pop())})}(i,r),a.columnWise){var f=Array.apply(void 0,X(Array(i.schema.length))).map(function(){return[]});i.data.forEach(function(e){e.forEach(function(e,t){f[t].push(e)})}),i.data=f}return i}function te(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!A(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t,r){D(e._rowDiffset,function(e){var o={},u="";a.forEach(function(n){var r=t[n].partialField.data[e];u+="-"+r,o[n]=r}),n[u]||(r&&i.push(o),n[u]=!0)})}return e._colIdentifier.split(",").forEach(function(e){var t=c[e];r.push(_({},t.schema())),a.push(t.schema().name)}),s(t,f,!1),s(e,c,!0),new At(i,r,{name:l})}function ne(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n={},r=e.getFieldspace().getMeasure(),a=le.defaultReducer();return Object.keys(r).forEach(function(e){"string"!=typeof t[e]&&(t[e]=r[e].defAggFn());var i=le.resolve(t[e]);i?n[e]=i:(n[e]=a,t[e]=ue)}),n}(e,n),o=e.getFieldspace(),u=o.fieldsObj(),c=o.name,l=[],s=[],d=[],p={},h=[],v=void 0;Object.entries(u).forEach(function(e){var t=se(e,2),n=t[0],r=t[1];if(-1!==a.indexOf(n)||i[n])switch(d.push(_({},r.schema())),r.schema().type){case f.MEASURE:s.push(n);break;default:case f.DIMENSION:l.push(n)}});var m=0;D(e._rowDiffset,function(e){var t="";l.forEach(function(n){t=t+"-"+u[n].partialField.data[e]}),void 0===p[t]?(p[t]=m,h.push({}),l.forEach(function(t){h[m][t]=u[t].partialField.data[e]}),s.forEach(function(t){h[m][t]=[u[t].partialField.data[e]]}),m+=1):s.forEach(function(n){h[p[t]][n].push(u[n].partialField.data[e])})});var y={},g=function(){return e.detachedRoot()};return h.forEach(function(e){var t=e;s.forEach(function(n){t[n]=i[n](e[n],g,y)})}),r?(r.__calculateFieldspace(),v=r):v=new Mt(h,d,{name:c}),v}function pe(e,t){var n=C(e.getFieldspace(),t.getFieldspace());return function(e,t){var r=!0;return n.forEach(function(n){r=!(e[n].value!==t[n].value||!r)}),r}}function he(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!A(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t){D(e._rowDiffset,function(e){var r={},o="";a.forEach(function(n){var a=t[n].partialField.data[e];o+="-"+a,r[n]=a}),n[o]||(i.push(r),n[o]=!0)})}return e._colIdentifier.split(",").forEach(function(e){var t=c[e];r.push(_({},t.schema())),a.push(t.schema().name)}),s(e,c),s(t,f),new Mt(i,r,{name:l})}function ve(e,t,n){return W(e,t,n,!1,J.LEFTOUTER)}function me(e,t,n){return W(t,e,n,!1,J.RIGHTOUTER)}var ye=function(){function e(e,t){for(var n=0;nn&&(n=a))}),[t,n]}}]),t}(),Te=function(){function e(e,t){for(var n=0;n=i?c=!0:(r=e.charCodeAt(o++))===qe?f=!0:r===Xe&&(f=!0,e.charCodeAt(o)===qe&&++o),e.slice(a+1,t-1).replace(/""/g,'"')}for(;o2&&void 0!==arguments[2]?arguments[2]:{},a=arguments[3],i=void 0;t!==H?(i={op:t,meta:r,criteria:a},e._derivation.push(i)):(i=[].concat(it(a)),e._derivation.length=0,(n=e._derivation).push.apply(n,it(i)))},ft=function(e,t,n,r,a){var i=[],o=-1,u=r.mode,c=void 0,f={},s=function(){return a.detachedRoot()},d=function(e){return n(function(e,t){var n={},r=!0,a=!1,i=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done);r=!0){var c=o.value;n[c.name()]=new F(c.partialField.data[t],c)}}catch(e){a=!0,i=e}finally{try{!r&&u.return&&u.return()}finally{if(a)throw i}}return n}(t,e),e,s,f)},p=void 0;return p=u===l.INVERSE?function(e){return!d(e)}:function(e){return d(e)},D(e,function(e){p(e)&&(-1!==o&&e===o+1?(c=i.length-1,i[c]=i[c].split("-")[0]+"-"+e):i.push(""+e),o=e)}),i.join(",")},lt=function(e,t){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},r=n.operation||G,a=n.filterByMeasure||!1,i=[];i=t.length?t.map(function(e){return n=(t=e).getData(),r=n.schema,i=t.getFieldsConfig(),o=t.getFieldspace().fieldsObj(),u=n.data,c=Object.values(i).reduce(function(e,t){return e[t.def.name]=o[t.def.name].domain(),e},{}),function(e){return!!u.length&&u.some(function(t){return r.every(function(n){if(!(n.name in e))return!0;var r=e[n.name].valueOf();if(a&&n.type===f.MEASURE)return r>=c[n.name][0]&&r<=c[n.name][1];if(n.type!==f.DIMENSION)return!0;var o=i[n.name].index;return t[o]===e[n.name].valueOf()})})};var t,n,r,i,o,u,c}):[function(){return!1}];var o=void 0;r===G?o=e.clone(!1,!1).select(function(e){return i.every(function(t){return t(e)})},{saveChild:!1,mode:l.ALL}):o=e.clone(!1,!1).select(function(e){return i.some(function(t){return t(e)})},{mode:l.ALL,saveChild:!1});return o},st=function(e,t,n,r){var a=e.clone(r.saveChild),i=ft(a._rowDiffset,a.getPartialFieldspace().fields,t,n,e);return a._rowDiffset=i,a.__calculateFieldspace().calculateFieldsConfig(),ct(a,L,{config:n},t),a},dt=function(e,t,n,r){var a=e.clone(n.saveChild),i=t;return n.mode===l.INVERSE&&(i=r.filter(function(e){return-1===t.indexOf(e)})),a._colIdentifier=i.join(","),a.__calculateFieldspace().calculateFieldsConfig(),ct(a,U,{projField:t,config:n,actualProjField:i},null),a},pt=function(e){if((e=_({},e)).type||(e.type=f.DIMENSION),!e.subtype)switch(e.type){case f.MEASURE:e.subtype=c.CONTINUOUS;break;default:case f.DIMENSION:e.subtype=u.CATEGORICAL}return e},ht=function(e,t,n,r){n=function(e){return e.map(function(e){return pt(e)})}(n),r=Object.assign(Object.assign({},Je),r);var i=a[r.dataFormat];if(!i||"function"!=typeof i)throw new Error("No converter function found for "+r.dataFormat+" format");var u=i(t,r),c=at(u,2),f=c[0],l=c[1],s=Be(l,n,f),d=N.createNamespace(s,r.name);return e._partialFieldspace=d,e._rowDiffset=l.length&&l[0].length?"0-"+(l[0].length-1):"",e._colIdentifier=n.map(function(e){return e.name}).join(),e._dataFormat=r.dataFormat===o.AUTO?S(t):r.dataFormat,e},vt=function(e,t){for(var n=0;n2&&void 0!==arguments[2]?arguments[2]:{},a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=a.nonTraversingModel,o=a.excludeModels||[];t!==i&&((!o.length||-1===o.indexOf(t))&&t.handlePropagation(n,r),t._children.forEach(function(t){var i=mt(n,t),o=at(i,2),u=o[0],c=o[1];e(t,[u,c],r,a)}))},gt=function(e){for(var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[];e._parent;)t.push(e),e=e._parent;return t},bt=function(e,t,n,r){var a=void 0,i=void 0,o=n.propagationNameSpace,u=n.propagateToSource,c=n.sourceId,f=r.propagateInterpolatedValues,l=[];if(null===e&&!0!==r.persistent)l=[{criteria:[]}];else{var s,d=Object.values(o.mutableActions);!1!==u&&(d=d.filter(function(e){return e.config.sourceId!==c}));var p=d.filter(function(e){return(r.filterFn||function(){return!0})(e,r)}).map(function(e){return e.config.criteria}),h=[];if(!1!==u){var v=Object.values(o.mutableActions);v.forEach(function(e){var t=e.config;!1===t.applyOnSource&&t.action===r.action&&t.sourceId!==c&&(h.push(e.model),(a=v.filter(function(t){return t!==e}).map(function(e){return e.config.criteria})).length&&l.push({criteria:a,models:e.model,path:gt(e.model)}))})}a=(s=[]).concat.apply(s,[].concat(it(p),[e])).filter(function(e){return null!==e}),l.push({criteria:a,excludeModels:[].concat(h,it(r.excludeModels||[]))})}var m=t.model,y=Object.assign({sourceIdentifiers:e,propagationSourceId:c},r),g=t.groupByModel;f&&g&&(i=lt(g,a,{filterByMeasure:f}),yt(g,i,y)),l.forEach(function(e){var t=lt(m,e.criteria),n=e.path;if(n){var r=function(e,t){for(var n=0,r=t.length;n0&&void 0!==arguments[0])||arguments[0],t=void 0;if(!1===(!(arguments.length>1&&void 0!==arguments[1])||arguments[1])){var n=this.getData({getAllFields:!0}),r=n.data,a=n.schema,i=r.map(function(e){var t={};return a.forEach(function(n,r){t[n.name]=e[r]}),t});t=new this.constructor(i,a)}else t=new this.constructor(this);return e&&this._children.push(t),t}},{key:"project",value:function(e,t){var n={mode:l.NORMAL,saveChild:!0};t=Object.assign({},n,t);var r=this.getFieldsConfig(),a=Object.keys(r),i=t.mode,o=e.reduce(function(e,t){return"RegExp"===t.constructor.name?e.push.apply(e,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:[];ct(this,H,null,t),this._parent=e,e._children.push(this)}},{key:"getParent",value:function(){return this._parent}},{key:"getChildren",value:function(){return this._children}},{key:"getDerivations",value:function(){return this._derivation}}]),e}(),Ot=function(){return function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,a=!1,i=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){a=!0,i=e}finally{try{!r&&u.return&&u.return()}finally{if(a)throw i}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")}}(),Et=function(){function e(e,t){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:{},n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{saveChild:!0},r=""+e.join(),a=[this,e,t],i=de.apply(void 0,a);return ct(i,V,{fieldsArr:e,groupByString:r,defaultReducer:le.defaultReducer()},t),n.saveChild&&this._children.push(i),i._parent=this,i}},{key:"sort",value:function(e){var t=this.getData({order:"row",sort:e}),n=[t.schema.map(function(e){return e.name})].concat(t.data),r=new this.constructor(n,t.schema,{dataFormat:"DSVArr"});return r._sortingDetails=e,r}},{key:"serialize",value:function(e,t){e=e||this._dataFormat,t=Object.assign({},{fieldSeparator:","},t);var n=this.getFieldspace().fields,r=n.map(function(e){return e.formattedData()}),a=r[0].length,i=void 0,u=void 0,c=void 0;if(e===o.FLAT_JSON)for(i=[],u=0;u=0&&(n.fields[r]=e)}else n.fields.push(e);return n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,this.__calculateFieldspace().calculateFieldsConfig(),this}},{key:"calculateVariable",value:function(e,t,n){var r=this;e=pt(e),n=Object.assign({},{saveChild:!0,replaceVar:!1},n);var a=this.getFieldsConfig(),i=t.slice(0,t.length-1),o=t[t.length-1];if(a[e.name]&&!n.replaceVar)throw new Error(e.name+" field already exists in datamodel");var u=i.map(function(e){var t=a[e];if(!t)throw new Error(e+" is not a valid column name.");return t.index}),c=this.clone(),f=c.getFieldspace().fields,l=u.map(function(e){return f[e]}),s={},d=function(){return r.detachedRoot()},p=[];D(c._rowDiffset,function(e){var t=l.map(function(t){return t.partialField.data[e]});p[e]=o.apply(void 0,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},a=t.isMutableAction,i=t.sourceId,o=t.payload,u=function(e){for(;e._parent;)e=e._parent;return e}(this),c=u._propagationNameSpace,f={groupByModel:function e(t){return t._parent&&t._derivation.find(function(e){return"group"!==e.op})?e(t._parent):t}(this),model:u};return n&&function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=void 0,a=t.isMutableAction,i=t.criteria,o=t.action+"-"+t.sourceId;r=a?e.mutableActions:e.immutableActions,null===i?delete r[o]:r[o]={model:n,config:t}}(c,t,this),bt(e,f,{propagationNameSpace:c,sourceId:i},Object.assign({payload:o},t)),a&&function(e,t,n){var r=e.immutableActions;for(var a in r){var i=r[a].config,o=n.config.sourceId,u=!n.propConfig.filterImmutableAction||n.propConfig.filterImmutableAction(i,n.config);if(i.sourceId!==o&&u){var c=i.criteria;bt(c,t,{propagationNameSpace:e,propagateToSource:!1,sourceId:o},i)}}}(c,f,{config:t,propConfig:r}),this}},{key:"on",value:function(e,t){switch(e){case"propagation":this._onPropagation.push(t)}return this}},{key:"unsubscribe",value:function(e){switch(e){case"propagation":this._onPropagation=[]}return this}},{key:"handlePropagation",value:function(e,t){var n=this;this._onPropagation.forEach(function(r){return r.call(n,e,t)})}},{key:"bin",value:function(e,t){var n=this.getFieldsConfig();if(!n[e])throw new Error("Field "+e+" doesn't exist");var r=t.name||e+"_binned";if(n[r])throw new Error("Field "+r+" already exists");var a=function(e,t,n){var r=n.buckets,a=n.binsCount,i=n.binSize,o=n.start,u=n.end,c=e.domain(),f=M(c,2),l=f[0],s=f[1];r||(o=0!==o&&(!o||o>l)?l:o,u=0!==u&&(!u||ul&&r.unshift(l),r[r.length-1]<=s&&r.push(s+1);for(var d=[],p=0;p1&&void 0!==arguments[1]?arguments[1]:{saveChild:!0},r=e,a=void 0,i=[],o=n.saveChild;return t.forEach(function(e){r=e(r),i.push.apply(i,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&a.dispose(),r}},bin:function(){for(var e=arguments.length,t=Array(e),n=0;na.getFullYear()&&(t=""+(i-1)+r),s(t).getFullYear()},formatter:function(e){var t=s(e).getFullYear().toString(),n=void 0;return t&&(n=t.length,t=t.substring(n-2,n)),t}},Y:{name:"Y",index:0,extract:function(){return"(\\d{4})"},parser:p.defaultNumberParser(),formatter:function(e){return s(e).getFullYear().toString()}}}},p.getTokenFormalNames=function(){var e=p.getTokenDefinitions();return{HOUR:e.H,HOUR_12:e.l,AMPM_UPPERCASE:e.p,AMPM_LOWERCASE:e.P,MINUTE:e.M,SECOND:e.S,SHORT_DAY:e.a,LONG_DAY:e.A,DAY_OF_MONTH:e.e,DAY_OF_MONTH_CONSTANT_WIDTH:e.d,SHORT_MONTH:e.b,LONG_MONTH:e.B,MONTH_OF_YEAR:e.m,SHORT_YEAR:e.y,LONG_YEAR:e.Y}},p.tokenResolver=function(){var e=p.getTokenDefinitions(),t=function(){for(var e=0,t=void 0,n=void 0,r=arguments.length;e=0;)o=e[i+1],-1!==r.indexOf(o)&&a.push({index:i,token:o});return a},p.formatAs=function(e,t){var n,r=s(e),a=p.findTokens(t),i=p.getTokenDefinitions(),o=String(t),u=p.TOKEN_PREFIX,c=void 0,f=void 0,l=void 0;for(l=0,n=a.length;l=0;d--)(f=i[d].index)+1!==s.length-1?(void 0===u&&(u=s.length),l=s.substring(f+2,u),s=s.substring(0,f+2)+RegExp.escape(l)+s.substring(u,s.length),u=f):u=f;for(d=0;d0&&e.split(",").forEach(function(e){var n=e.split("-"),r=+n[0],a=+(n[1]||n[0]);if(a>=r)for(var i=r;i<=a;i+=1)t(i)})}var T=function(){function e(e,t){for(var n=0;n=(i=e[a=n+Math.floor((r-n)/2)]).start&&t=i.end?n=a+1:t3&&void 0!==arguments[3]&&arguments[3],a=arguments.length>4&&void 0!==arguments[4]?arguments[4]:J.CROSS,i=[],o=[],u=n||K,c=e.getFieldspace(),f=t.getFieldspace(),l=c.name,s=f.name,d=c.name+"."+f.name,p=C(c,f);if(l===s)throw new Error("DataModels must have different alias names");return c.fields.forEach(function(e){var t=_({},e.schema());-1===p.indexOf(t.name)||r||(t.name=c.name+"."+t.name),i.push(t)}),f.fields.forEach(function(e){var t=_({},e.schema());-1!==p.indexOf(t.name)?r||(t.name=f.name+"."+t.name,i.push(t)):i.push(t)}),D(e._rowDiffset,function(n){var d=!1,h=void 0;D(t._rowDiffset,function(v){var m=[],y={};y[l]={},y[s]={},c.fields.forEach(function(e){m.push(e.partialField.data[n]),y[l][e.name()]=e.partialField.data[n]}),f.fields.forEach(function(e){-1!==p.indexOf(e.schema().name)&&r||m.push(e.partialField.data[v]),y[s][e.name()]=e.partialField.data[v]});var g=ot(y[l]),b=ot(y[s]);if(u(g,b,function(){return e.detachedRoot()},function(){return t.detachedRoot()},{})){var w={};m.forEach(function(e,t){w[i[t].name]=e}),d&&J.CROSS!==a?o[h]=w:(o.push(w),d=!0,h=n)}else if((a===J.LEFTOUTER||a===J.RIGHTOUTER)&&!d){var _={},O=c.fields.length-1;m.forEach(function(e,t){_[i[t].name]=t<=O?e:null}),d=!0,h=n,o.push(_)}})}),new At(o,i,{name:d})}function z(e,t){var n=""+e,r=""+t;return nr?1:0}function q(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:z;return e.length>1&&function e(t,n,r,a){if(r===n)return t;var i=n+Math.floor((r-n)/2);return e(t,n,i,a),e(t,i+1,r,a),function(e,t,n,r,a){for(var i=e,o=[],u=t;u<=r;u+=1)o[u]=i[u];for(var c=t,f=n+1,l=t;l<=r;l+=1)c>n?(i[l]=o[f],f+=1):f>r?(i[l]=o[c],c+=1):a(o[c],o[f])<=0?(i[l]=o[c],c+=1):(i[l]=o[f],f+=1)}(t,n,i,r,a),t}(e,0,e.length-1,t),e}function X(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);ti?"desc"===t?-1:1:0}}return r}function Q(e,t){var n=new Map,r=[];return e.forEach(function(e){var a=e[t];n.has(a)?r[n.get(a)][1].push(e):(r.push([a,[e]]),n.set(a,r.length-1))}),r}function Z(e,t,n){var r={label:e[0]};return t.reduce(function(t,r,a){return t[r]=e[1].map(function(e){return e[n[a].index]}),t},r),r}function ee(e,t,n,r,a){a=Object.assign({},{addUid:!1,columnWise:!1},a);var i={schema:[],data:[],uids:[]},o=a.addUid,u=r&&r.length>0,c=[];if(n.split(",").forEach(function(t){for(var n=0;n=0;u--)a=t[u][0],i=t[u][1],(o=vt(r,a))&&("function"==typeof i?q(n,function(e,t){return i(e[o.index],t[o.index])}):O(i)?function(){var e=Q(n,o.index),t=i[i.length-1],a=i.slice(0,i.length-1),u=a.map(function(e){return vt(r,e)});e.forEach(function(e){e.push(Z(e,a,u))}),q(e,function(e,n){var r=e[2],a=n[2];return t(r,a)}),n.length=0,e.forEach(function(e){n.push.apply(n,X(e[1]))})}():(i="desc"===String(i).toLowerCase()?"desc":"asc",q(n,$(o.type,i,o.index))));e.uids=[],n.forEach(function(t){e.uids.push(t.pop())})}(i,r),a.columnWise){var f=Array.apply(void 0,X(Array(i.schema.length))).map(function(){return[]});i.data.forEach(function(e){e.forEach(function(e,t){f[t].push(e)})}),i.data=f}return i}function te(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!A(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t,r){D(e._rowDiffset,function(e){var o={},u="";a.forEach(function(n){var r=t[n].partialField.data[e];u+="-"+r,o[n]=r}),n[u]||(r&&i.push(o),n[u]=!0)})}return e._colIdentifier.split(",").forEach(function(e){var t=c[e];r.push(_({},t.schema())),a.push(t.schema().name)}),s(t,f,!1),s(e,c,!0),new At(i,r,{name:l})}function ne(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n={},r=e.getFieldspace().getMeasure(),a=le.defaultReducer();return Object.keys(r).forEach(function(e){"string"!=typeof t[e]&&(t[e]=r[e].defAggFn());var i=le.resolve(t[e]);i?n[e]=i:(n[e]=a,t[e]=ue)}),n}(e,n),o=e.getFieldspace(),u=o.fieldsObj(),c=o.name,l=[],s=[],d=[],p={},h=[],v=void 0;Object.entries(u).forEach(function(e){var t=se(e,2),n=t[0],r=t[1];if(-1!==a.indexOf(n)||i[n])switch(d.push(_({},r.schema())),r.schema().type){case f.MEASURE:s.push(n);break;default:case f.DIMENSION:l.push(n)}});var m=0;D(e._rowDiffset,function(e){var t="";l.forEach(function(n){t=t+"-"+u[n].partialField.data[e]}),void 0===p[t]?(p[t]=m,h.push({}),l.forEach(function(t){h[m][t]=u[t].partialField.data[e]}),s.forEach(function(t){h[m][t]=[u[t].partialField.data[e]]}),m+=1):s.forEach(function(n){h[p[t]][n].push(u[n].partialField.data[e])})});var y={},g=function(){return e.detachedRoot()};return h.forEach(function(e){var t=e;s.forEach(function(n){t[n]=i[n](e[n],g,y)})}),r?(r.__calculateFieldspace(),v=r):v=new Mt(h,d,{name:c}),v}function pe(e,t){var n=C(e.getFieldspace(),t.getFieldspace());return function(e,t){var r=!0;return n.forEach(function(n){r=!(e[n].value!==t[n].value||!r)}),r}}function he(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!A(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t){D(e._rowDiffset,function(e){var r={},o="";a.forEach(function(n){var a=t[n].partialField.data[e];o+="-"+a,r[n]=a}),n[o]||(i.push(r),n[o]=!0)})}return e._colIdentifier.split(",").forEach(function(e){var t=c[e];r.push(_({},t.schema())),a.push(t.schema().name)}),s(e,c),s(t,f),new Mt(i,r,{name:l})}function ve(e,t,n){return W(e,t,n,!1,J.LEFTOUTER)}function me(e,t,n){return W(t,e,n,!1,J.RIGHTOUTER)}var ye=function(){function e(e,t){for(var n=0;nn&&(n=a))}),[t,n]}}]),t}(),Te=function(){function e(e,t){for(var n=0;n=i?c=!0:(r=e.charCodeAt(o++))===qe?f=!0:r===Xe&&(f=!0,e.charCodeAt(o)===qe&&++o),e.slice(a+1,t-1).replace(/""/g,'"')}for(;o2&&void 0!==arguments[2]?arguments[2]:{},a=arguments[3],i=void 0;t!==H?(i={op:t,meta:r,criteria:a},e._derivation.push(i)):(i=[].concat(it(a)),e._derivation.length=0,(n=e._derivation).push.apply(n,it(i)))},ft=function(e,t,n,r,a){var i=[],o=-1,u=r.mode,c=void 0,f={},s=function(){return a.detachedRoot()},d=function(e){return n(function(e,t){var n={},r=!0,a=!1,i=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done);r=!0){var c=o.value;n[c.name()]=new F(c.partialField.data[t],c)}}catch(e){a=!0,i=e}finally{try{!r&&u.return&&u.return()}finally{if(a)throw i}}return n}(t,e),e,s,f)},p=void 0;return p=u===l.INVERSE?function(e){return!d(e)}:function(e){return d(e)},D(e,function(e){p(e)&&(-1!==o&&e===o+1?(c=i.length-1,i[c]=i[c].split("-")[0]+"-"+e):i.push(""+e),o=e)}),i.join(",")},lt=function(e,t){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},r=n.operation||G,a=n.filterByMeasure||!1,i=[];i=t.length?t.map(function(e){return n=(t=e).getData(),r=n.schema,i=t.getFieldsConfig(),o=t.getFieldspace().fieldsObj(),u=n.data,c=Object.values(i).reduce(function(e,t){return e[t.def.name]=o[t.def.name].domain(),e},{}),function(e){return!!u.length&&u.some(function(t){return r.every(function(n){if(!(n.name in e))return!0;var r=e[n.name].valueOf();if(a&&n.type===f.MEASURE)return r>=c[n.name][0]&&r<=c[n.name][1];if(n.type!==f.DIMENSION)return!0;var o=i[n.name].index;return t[o]===e[n.name].valueOf()})})};var t,n,r,i,o,u,c}):[function(){return!1}];var o=void 0;r===G?o=e.clone(!1,!1).select(function(e){return i.every(function(t){return t(e)})},{saveChild:!1,mode:l.ALL}):o=e.clone(!1,!1).select(function(e){return i.some(function(t){return t(e)})},{mode:l.ALL,saveChild:!1});return o},st=function(e,t,n,r){var a=e.clone(r.saveChild),i=ft(a._rowDiffset,a.getPartialFieldspace().fields,t,n,e);return a._rowDiffset=i,a.__calculateFieldspace().calculateFieldsConfig(),ct(a,L,{config:n},t),a},dt=function(e,t,n,r){var a=e.clone(n.saveChild),i=t;return n.mode===l.INVERSE&&(i=r.filter(function(e){return-1===t.indexOf(e)})),a._colIdentifier=i.join(","),a.__calculateFieldspace().calculateFieldsConfig(),ct(a,U,{projField:t,config:n,actualProjField:i},null),a},pt=function(e){if((e=_({},e)).type||(e.type=f.DIMENSION),!e.subtype)switch(e.type){case f.MEASURE:e.subtype=c.CONTINUOUS;break;default:case f.DIMENSION:e.subtype=u.CATEGORICAL}return e},ht=function(e,t,n,r){n=function(e){return e.map(function(e){return pt(e)})}(n),r=Object.assign(Object.assign({},Je),r);var i=a[r.dataFormat];if(!i||"function"!=typeof i)throw new Error("No converter function found for "+r.dataFormat+" format");var u=i(t,r),c=at(u,2),f=c[0],l=c[1],s=Be(l,n,f),d=k.createNamespace(s,r.name);return e._partialFieldspace=d,e._rowDiffset=l.length&&l[0].length?"0-"+(l[0].length-1):"",e._colIdentifier=n.map(function(e){return e.name}).join(),e._dataFormat=r.dataFormat===o.AUTO?S(t):r.dataFormat,e},vt=function(e,t){for(var n=0;n2&&void 0!==arguments[2]?arguments[2]:{},a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=a.nonTraversingModel,o=a.excludeModels||[];t!==i&&((!o.length||-1===o.indexOf(t))&&t.handlePropagation(n,r),t._children.forEach(function(t){var i=mt(n,t),o=at(i,2),u=o[0],c=o[1];e(t,[u,c],r,a)}))},gt=function(e){for(var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[];e._parent;)t.push(e),e=e._parent;return t},bt=function(e,t,n,r){var a=void 0,i=void 0,o=n.propagationNameSpace,u=n.propagateToSource,c=n.sourceId,f=r.propagateInterpolatedValues,l=[];if(null===e&&!0!==r.persistent)l=[{criteria:[]}];else{var s,d=Object.values(o.mutableActions);!1!==u&&(d=d.filter(function(e){return e.config.sourceId!==c}));var p=d.filter(function(e){return(r.filterFn||function(){return!0})(e,r)}).map(function(e){return e.config.criteria}),h=[];if(!1!==u){var v=Object.values(o.mutableActions);v.forEach(function(e){var t=e.config;!1===t.applyOnSource&&t.action===r.action&&t.sourceId!==c&&(h.push(e.model),(a=v.filter(function(t){return t!==e}).map(function(e){return e.config.criteria})).length&&l.push({criteria:a,models:e.model,path:gt(e.model)}))})}a=(s=[]).concat.apply(s,[].concat(it(p),[e])).filter(function(e){return null!==e}),l.push({criteria:a,excludeModels:[].concat(h,it(r.excludeModels||[]))})}var m=t.model,y=Object.assign({sourceIdentifiers:e,propagationSourceId:c},r),g=t.groupByModel;f&&g&&(i=lt(g,a,{filterByMeasure:f}),yt(g,i,y)),l.forEach(function(e){var t=lt(m,e.criteria),n=e.path;if(n){var r=function(e,t){for(var n=0,r=t.length;n0&&void 0!==arguments[0])||arguments[0],t=void 0;if(!1===(!(arguments.length>1&&void 0!==arguments[1])||arguments[1])){var n=this.getData({getAllFields:!0}),r=n.data,a=n.schema,i=r.map(function(e){var t={};return a.forEach(function(n,r){t[n.name]=e[r]}),t});t=new this.constructor(i,a)}else t=new this.constructor(this);return e&&this._children.push(t),t}},{key:"project",value:function(e,t){var n={mode:l.NORMAL,saveChild:!0};t=Object.assign({},n,t);var r=this.getFieldsConfig(),a=Object.keys(r),i=t.mode,o=e.reduce(function(e,t){return"RegExp"===t.constructor.name?e.push.apply(e,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:[];ct(this,H,null,t),this._parent=e,e._children.push(this)}},{key:"getParent",value:function(){return this._parent}},{key:"getChildren",value:function(){return this._children}},{key:"getDerivations",value:function(){return this._derivation}}]),e}(),Ot=function(){return function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,a=!1,i=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){a=!0,i=e}finally{try{!r&&u.return&&u.return()}finally{if(a)throw i}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")}}(),Et=function(){function e(e,t){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:{},n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{saveChild:!0},r=""+e.join(),a=[this,e,t],i=de.apply(void 0,a);return ct(i,V,{fieldsArr:e,groupByString:r,defaultReducer:le.defaultReducer()},t),n.saveChild&&this._children.push(i),i._parent=this,i}},{key:"sort",value:function(e){var t=this.getData({order:"row",sort:e}),n=[t.schema.map(function(e){return e.name})].concat(t.data),r=new this.constructor(n,t.schema,{dataFormat:"DSVArr"});return r._sortingDetails=e,r}},{key:"serialize",value:function(e,t){e=e||this._dataFormat,t=Object.assign({},{fieldSeparator:","},t);var n=this.getFieldspace().fields,r=n.map(function(e){return e.formattedData()}),a=r[0].length,i=void 0,u=void 0,c=void 0;if(e===o.FLAT_JSON)for(i=[],u=0;u=0&&(n.fields[r]=e)}else n.fields.push(e);return n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,this.__calculateFieldspace().calculateFieldsConfig(),this}},{key:"calculateVariable",value:function(e,t,n){var r=this;e=pt(e),n=Object.assign({},{saveChild:!0,replaceVar:!1},n);var a=this.getFieldsConfig(),i=t.slice(0,t.length-1),o=t[t.length-1];if(a[e.name]&&!n.replaceVar)throw new Error(e.name+" field already exists in datamodel");var u=i.map(function(e){var t=a[e];if(!t)throw new Error(e+" is not a valid column name.");return t.index}),c=this.clone(),f=c.getFieldspace().fields,l=u.map(function(e){return f[e]}),s={},d=function(){return r.detachedRoot()},p=[];D(c._rowDiffset,function(e){var t=l.map(function(t){return t.partialField.data[e]});p[e]=o.apply(void 0,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},a=t.isMutableAction,i=t.sourceId,o=t.payload,u=function(e){for(;e._parent;)e=e._parent;return e}(this),c=u._propagationNameSpace,f={groupByModel:function e(t){return t._parent&&t._derivation.find(function(e){return"group"!==e.op})?e(t._parent):t}(this),model:u};return n&&function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=void 0,a=t.isMutableAction,i=t.criteria,o=t.action+"-"+t.sourceId;r=a?e.mutableActions:e.immutableActions,null===i?delete r[o]:r[o]={model:n,config:t}}(c,t,this),bt(e,f,{propagationNameSpace:c,sourceId:i},Object.assign({payload:o},t)),a&&function(e,t,n){var r=e.immutableActions;for(var a in r){var i=r[a].config,o=n.config.sourceId,u=!n.propConfig.filterImmutableAction||n.propConfig.filterImmutableAction(i,n.config);if(i.sourceId!==o&&u){var c=i.criteria;bt(c,t,{propagationNameSpace:e,propagateToSource:!1,sourceId:o},i)}}}(c,f,{config:t,propConfig:r}),this}},{key:"on",value:function(e,t){switch(e){case"propagation":this._onPropagation.push(t)}return this}},{key:"unsubscribe",value:function(e){switch(e){case"propagation":this._onPropagation=[]}return this}},{key:"handlePropagation",value:function(e,t){var n=this;this._onPropagation.forEach(function(r){return r.call(n,e,t)})}},{key:"bin",value:function(e,t){var n=this.getFieldsConfig();if(!n[e])throw new Error("Field "+e+" doesn't exist");var r=t.name||e+"_binned";if(n[r])throw new Error("Field "+r+" already exists");var a=function(e,t,n){var r=n.buckets,a=n.binsCount,i=n.binSize,o=n.start,u=n.end,c=e.domain(),f=M(c,2),l=f[0],s=f[1];r||(o=0!==o&&(!o||o>l)?l:o,u=0!==u&&(!u||ul&&r.unshift(l),r[r.length-1]<=s&&r.push(s+1);for(var d=[],p=0;p1&&void 0!==arguments[1]?arguments[1]:{saveChild:!0},r=e,a=void 0,i=[],o=n.saveChild;return t.forEach(function(e){r=e(r),i.push.apply(i,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&a.dispose(),r}},bin:function(){for(var e=arguments.length,t=Array(e),n=0;n {\n let i;\n let l;\n\n if (!val) { return defVal; }\n\n const nVal = val.toLowerCase();\n\n for (i = 0, l = range.length; i < l; i++) {\n if (range[i].toLowerCase() === nVal) {\n return i;\n }\n }\n\n if (i === undefined) {\n return defVal;\n }\n return null;\n };\n};\n\n/*\n * Defines the tokens which are supporter by the dateformatter. Using this definitation a value gets extracted from\n * the user specifed date string. This also formats the value for display purpose from native JS date.\n * The definition of each token contains the following named properties\n * {\n * %token_name% : {\n * name: name of the token, this is used in reverse lookup,\n * extract: a function that returns the regular expression to extract that piece of information. All the\n * regex should be gouped by using ()\n * parser: a function which receives value extracted by the above regex and parse it to get the date params\n * formatter: a formatter function that takes milliseconds or JS Date object and format the param\n * represented by the token only.\n * }\n * }\n *\n * @return {Object} : Definition of the all the supported tokens.\n */\nDateTimeFormatter.getTokenDefinitions = function () {\n const daysDef = {\n short: [\n 'Sun',\n 'Mon',\n 'Tue',\n 'Wed',\n 'Thu',\n 'Fri',\n 'Sat'\n ],\n long: [\n 'Sunday',\n 'Monday',\n 'Tuesday',\n 'Wednesday',\n 'Thursday',\n 'Friday',\n 'Saturday'\n ]\n };\n const monthsDef = {\n short: [\n 'Jan',\n 'Feb',\n 'Mar',\n 'Apr',\n 'May',\n 'Jun',\n 'Jul',\n 'Aug',\n 'Sep',\n 'Oct',\n 'Nov',\n 'Dec'\n ],\n long: [\n 'January',\n 'February',\n 'March',\n 'April',\n 'May',\n 'June',\n 'July',\n 'August',\n 'September',\n 'October',\n 'November',\n 'December'\n ]\n };\n\n const definitions = {\n H: {\n // 24 hours format\n name: 'H',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n\n return d.getHours().toString();\n }\n },\n l: {\n // 12 hours format\n name: 'l',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const hours = d.getHours() % 12;\n\n return (hours === 0 ? 12 : hours).toString();\n }\n },\n p: {\n // AM or PM\n name: 'p',\n index: 3,\n extract () { return '(AM|PM)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'AM' : 'PM');\n }\n },\n P: {\n // am or pm\n name: 'P',\n index: 3,\n extract () { return '(am|pm)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'am' : 'pm');\n }\n },\n M: {\n // Two digit minutes 00 - 59\n name: 'M',\n index: 4,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const mins = d.getMinutes();\n\n return pad(mins);\n }\n },\n S: {\n // Two digit seconds 00 - 59\n name: 'S',\n index: 5,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const seconds = d.getSeconds();\n\n return pad(seconds);\n }\n },\n K: {\n // Milliseconds\n name: 'K',\n index: 6,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const ms = d.getMilliseconds();\n\n return ms.toString();\n }\n },\n a: {\n // Short name of day, like Mon\n name: 'a',\n index: 2,\n extract () { return `(${daysDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.short[day]).toString();\n }\n },\n A: {\n // Long name of day, like Monday\n name: 'A',\n index: 2,\n extract () { return `(${daysDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.long[day]).toString();\n }\n },\n e: {\n // 8 of March, 11 of November\n name: 'e',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return day.toString();\n }\n },\n d: {\n // 08 of March, 11 of November\n name: 'd',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return pad(day);\n }\n },\n b: {\n // Short month, like Jan\n name: 'b',\n index: 1,\n extract () { return `(${monthsDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.short[month]).toString();\n }\n },\n B: {\n // Long month, like January\n name: 'B',\n index: 1,\n extract () { return `(${monthsDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.long[month]).toString();\n }\n },\n m: {\n // Two digit month of year like 01 for January\n name: 'm',\n index: 1,\n extract () { return '(\\\\d+)'; },\n parser (val) { return DateTimeFormatter.defaultNumberParser()(val) - 1; },\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return pad(month + 1);\n }\n },\n y: {\n // Short year like 90 for 1990\n name: 'y',\n index: 0,\n extract () { return '(\\\\d{2})'; },\n parser (val) {\n let result;\n if (val) {\n const l = val.length;\n val = val.substring(l - 2, l);\n }\n let parsedVal = DateTimeFormatter.defaultNumberParser()(val);\n let presentDate = new Date();\n let presentYear = Math.trunc((presentDate.getFullYear()) / 100);\n\n result = `${presentYear}${parsedVal}`;\n\n if (convertToNativeDate(result).getFullYear() > presentDate.getFullYear()) {\n result = `${presentYear - 1}${parsedVal}`;\n }\n return convertToNativeDate(result).getFullYear();\n },\n formatter (val) {\n const d = convertToNativeDate(val);\n let year = d.getFullYear().toString();\n let l;\n\n if (year) {\n l = year.length;\n year = year.substring(l - 2, l);\n }\n\n return year;\n }\n },\n Y: {\n // Long year like 1990\n name: 'Y',\n index: 0,\n extract () { return '(\\\\d{4})'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const year = d.getFullYear().toString();\n\n return year;\n }\n }\n };\n\n return definitions;\n};\n\n/*\n * The tokens which works internally is not user friendly in terms of memorizing the names. This gives a formal\n * definition to the informal notations.\n *\n * @return {Object} : Formal definition of the tokens\n */\nDateTimeFormatter.getTokenFormalNames = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n\n return {\n HOUR: definitions.H,\n HOUR_12: definitions.l,\n AMPM_UPPERCASE: definitions.p,\n AMPM_LOWERCASE: definitions.P,\n MINUTE: definitions.M,\n SECOND: definitions.S,\n SHORT_DAY: definitions.a,\n LONG_DAY: definitions.A,\n DAY_OF_MONTH: definitions.e,\n DAY_OF_MONTH_CONSTANT_WIDTH: definitions.d,\n SHORT_MONTH: definitions.b,\n LONG_MONTH: definitions.B,\n MONTH_OF_YEAR: definitions.m,\n SHORT_YEAR: definitions.y,\n LONG_YEAR: definitions.Y\n };\n};\n\n/*\n * This defines the rules and declares dependencies that resolves a date parameter (year, month etc) from\n * the date time parameter array.\n *\n * @return {Object} : An object that contains dependencies and a resolver function. The dependencies values are fed\n * to the resolver function in that particular sequence only.\n */\nDateTimeFormatter.tokenResolver = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const defaultResolver = (...args) => { // eslint-disable-line require-jsdoc\n let i = 0;\n let arg;\n let targetParam;\n const l = args.length;\n\n for (; i < l; i++) {\n arg = args[i];\n if (args[i]) {\n targetParam = arg;\n }\n }\n\n if (!targetParam) { return null; }\n\n return targetParam[0].parser(targetParam[1]);\n };\n\n return {\n YEAR: [definitions.y, definitions.Y,\n defaultResolver\n ],\n MONTH: [definitions.b, definitions.B, definitions.m,\n defaultResolver\n ],\n DAY: [definitions.a, definitions.A, definitions.e, definitions.d,\n defaultResolver\n ],\n HOUR: [definitions.H, definitions.l, definitions.p, definitions.P,\n function (hourFormat24, hourFormat12, ampmLower, ampmUpper) {\n let targetParam;\n let amOrpm;\n let isPM;\n let val;\n\n if (hourFormat12 && (amOrpm = (ampmLower || ampmUpper))) {\n if (amOrpm[0].parser(amOrpm[1]) === 'pm') {\n isPM = true;\n }\n\n targetParam = hourFormat12;\n } else if (hourFormat12) {\n targetParam = hourFormat12;\n } else {\n targetParam = hourFormat24;\n }\n\n if (!targetParam) { return null; }\n\n val = targetParam[0].parser(targetParam[1]);\n if (isPM) {\n val += 12;\n }\n return val;\n }\n ],\n MINUTE: [definitions.M,\n defaultResolver\n ],\n SECOND: [definitions.S,\n defaultResolver\n ]\n };\n};\n\n/*\n * Finds token from the format rule specified by a user.\n * @param format {String} : The format of the input date specified by the user\n * @return {Array} : An array of objects which contains the available token and their occurence index in the format\n */\nDateTimeFormatter.findTokens = function (format) {\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenLiterals = Object.keys(definitions);\n const occurrence = [];\n let i;\n let forwardChar;\n\n while ((i = format.indexOf(tokenPrefix, i + 1)) >= 0) {\n forwardChar = format[i + 1];\n if (tokenLiterals.indexOf(forwardChar) === -1) { continue; }\n\n occurrence.push({\n index: i,\n token: forwardChar\n });\n }\n\n return occurrence;\n};\n\n/*\n * Format any JS date to a specified date given by user.\n *\n * @param date {Number | Date} : The date object which is to be formatted\n * @param format {String} : The format using which the date will be formatted for display\n */\nDateTimeFormatter.formatAs = function (date, format) {\n const nDate = convertToNativeDate(date);\n const occurrence = DateTimeFormatter.findTokens(format);\n const definitions = DateTimeFormatter.getTokenDefinitions();\n let formattedStr = String(format);\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n let token;\n let formattedVal;\n let i;\n let l;\n\n for (i = 0, l = occurrence.length; i < l; i++) {\n token = occurrence[i].token;\n formattedVal = definitions[token].formatter(nDate);\n formattedStr = formattedStr.replace(new RegExp(tokenPrefix + token, 'g'), formattedVal);\n }\n\n return formattedStr;\n};\n\n/*\n * Parses the user specified date string to extract the date time params.\n *\n * @return {Array} : Value of date time params in an array [year, month, day, hour, minutes, seconds, milli]\n */\nDateTimeFormatter.prototype.parse = function (dateTimeStamp, options) {\n const tokenResolver = DateTimeFormatter.tokenResolver();\n const dtParams = this.extractTokenValue(dateTimeStamp);\n const dtParamSeq = DateTimeFormatter.DATETIME_PARAM_SEQUENCE;\n const noBreak = options && options.noBreak;\n const dtParamArr = [];\n const args = [];\n let resolverKey;\n let resolverParams;\n let resolverFn;\n let val;\n let i;\n let param;\n let resolvedVal;\n let l;\n let result = [];\n\n for (resolverKey in tokenResolver) {\n if (!{}.hasOwnProperty.call(tokenResolver, resolverKey)) { continue; }\n\n args.length = 0;\n resolverParams = tokenResolver[resolverKey];\n resolverFn = resolverParams.splice(resolverParams.length - 1, 1)[0];\n\n for (i = 0, l = resolverParams.length; i < l; i++) {\n param = resolverParams[i];\n val = dtParams[param.name];\n\n if (val === undefined) {\n args.push(null);\n } else {\n args.push([param, val]);\n }\n }\n\n resolvedVal = resolverFn.apply(this, args);\n\n if ((resolvedVal === undefined || resolvedVal === null) && !noBreak) {\n break;\n }\n\n dtParamArr[dtParamSeq[resolverKey]] = resolvedVal;\n }\n\n if (dtParamArr.length && this.checkIfOnlyYear(dtParamArr.length))\n {\n result.unshift(dtParamArr[0], 0, 1); }\n else {\n result.unshift(...dtParamArr);\n }\n\n return result;\n};\n\n/*\n * Extract the value of the token from user specified date time string.\n *\n * @return {Object} : An key value pair which contains the tokens as key and value as pair\n */\nDateTimeFormatter.prototype.extractTokenValue = function (dateTimeStamp) {\n const format = this.format;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const occurrence = DateTimeFormatter.findTokens(format);\n const tokenObj = {};\n\n let lastOccurrenceIndex;\n let occObj;\n let occIndex;\n let targetText;\n let regexFormat;\n\n let l;\n let i;\n\n regexFormat = String(format);\n\n const tokenArr = occurrence.map(obj => obj.token);\n const occurrenceLength = occurrence.length;\n for (i = occurrenceLength - 1; i >= 0; i--) {\n occIndex = occurrence[i].index;\n\n if (occIndex + 1 === regexFormat.length - 1) {\n lastOccurrenceIndex = occIndex;\n continue;\n }\n\n if (lastOccurrenceIndex === undefined) {\n lastOccurrenceIndex = regexFormat.length;\n }\n\n targetText = regexFormat.substring(occIndex + 2, lastOccurrenceIndex);\n regexFormat = regexFormat.substring(0, occIndex + 2) +\n RegExp.escape(targetText) +\n regexFormat.substring(lastOccurrenceIndex, regexFormat.length);\n\n lastOccurrenceIndex = occIndex;\n }\n\n for (i = 0; i < occurrenceLength; i++) {\n occObj = occurrence[i];\n regexFormat = regexFormat.replace(tokenPrefix + occObj.token, definitions[occObj.token].extract());\n }\n\n const extractValues = dateTimeStamp.match(new RegExp(regexFormat)) || [];\n extractValues.shift();\n\n for (i = 0, l = tokenArr.length; i < l; i++) {\n tokenObj[tokenArr[i]] = extractValues[i];\n }\n return tokenObj;\n};\n\n/*\n * Give back the JS native date formed from user specified date string\n *\n * @return {Date} : Native JS Date\n */\nDateTimeFormatter.prototype.getNativeDate = function (dateTimeStamp) {\n let date = null;\n if (Number.isFinite(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n } else if (!this.format && Date.parse(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n }\n else {\n const dtParams = this.dtParams = this.parse(dateTimeStamp);\n if (dtParams.length) {\n this.nativeDate = new Date(...dtParams);\n date = this.nativeDate;\n }\n }\n return date;\n};\n\nDateTimeFormatter.prototype.checkIfOnlyYear = function(len) {\n return len === 1 && this.format.match(/y|Y/g).length;\n};\n\n/*\n * Represents JS native date to a user specified format.\n *\n * @param format {String} : The format according to which the date is to be represented\n * @return {String} : The formatted date string\n */\nDateTimeFormatter.prototype.formatAs = function (format, dateTimeStamp) {\n let nativeDate;\n\n if (dateTimeStamp) {\n nativeDate = this.nativeDate = this.getNativeDate(dateTimeStamp);\n } else if (!(nativeDate = this.nativeDate)) {\n nativeDate = this.getNativeDate(dateTimeStamp);\n }\n\n return DateTimeFormatter.formatAs(nativeDate, format);\n};\n\nexport { DateTimeFormatter as default };\n","/**\n * The utility function to calculate major column.\n *\n * @param {Object} store - The store object.\n * @return {Function} Returns the push function.\n */\nexport default (store) => {\n let i = 0;\n return (...fields) => {\n fields.forEach((val, fieldIndex) => {\n if (!(store[fieldIndex] instanceof Array)) {\n store[fieldIndex] = Array.from({ length: i });\n }\n store[fieldIndex].push(val);\n });\n i++;\n };\n};\n","/* eslint-disable */\nconst OBJECTSTRING = 'object';\nconst objectToStrFn = Object.prototype.toString;\nconst objectToStr = '[object Object]';\nconst arrayToStr = '[object Array]';\n\nfunction checkCyclicRef(obj, parentArr) {\n let i = parentArr.length;\n let bIndex = -1;\n\n while (i) {\n if (obj === parentArr[i]) {\n bIndex = i;\n return bIndex;\n }\n i -= 1;\n }\n\n return bIndex;\n}\n\nfunction merge(obj1, obj2, skipUndef, tgtArr, srcArr) {\n var item,\n srcVal,\n tgtVal,\n str,\n cRef;\n // check whether obj2 is an array\n // if array then iterate through it's index\n // **** MOOTOOLS precution\n\n if (!srcArr) {\n tgtArr = [obj1];\n srcArr = [obj2];\n }\n else {\n tgtArr.push(obj1);\n srcArr.push(obj2);\n }\n\n if (obj2 instanceof Array) {\n for (item = 0; item < obj2.length; item += 1) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (typeof tgtVal !== OBJECTSTRING) {\n if (!(skipUndef && tgtVal === undefined)) {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = tgtVal instanceof Array ? [] : {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n }\n }\n else {\n for (item in obj2) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (tgtVal !== null && typeof tgtVal === OBJECTSTRING) {\n // Fix for issue BUG: FWXT-602\n // IE < 9 Object.prototype.toString.call(null) gives\n // '[object Object]' instead of '[object Null]'\n // that's why null value becomes Object in IE < 9\n str = objectToStrFn.call(tgtVal);\n if (str === objectToStr) {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else if (str === arrayToStr) {\n if (srcVal === null || !(srcVal instanceof Array)) {\n srcVal = obj1[item] = [];\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (skipUndef && tgtVal === undefined) {\n continue;\n }\n obj1[item] = tgtVal;\n }\n }\n }\n return obj1;\n}\n\n\nfunction extend2 (obj1, obj2, skipUndef) {\n //if none of the arguments are object then return back\n if (typeof obj1 !== OBJECTSTRING && typeof obj2 !== OBJECTSTRING) {\n return null;\n }\n\n if (typeof obj2 !== OBJECTSTRING || obj2 === null) {\n return obj1;\n }\n\n if (typeof obj1 !== OBJECTSTRING) {\n obj1 = obj2 instanceof Array ? [] : {};\n }\n merge(obj1, obj2, skipUndef);\n return obj1;\n}\n\nexport { extend2 as default };\n","import { DataFormat } from '../enums';\n\n/**\n * Checks whether the value is an array.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an array otherwise returns false.\n */\nexport function isArray (val) {\n return Array.isArray(val);\n}\n\n/**\n * Checks whether the value is an object.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an object otherwise returns false.\n */\nexport function isObject (val) {\n return val === Object(val);\n}\n\n/**\n * Checks whether the value is a string value.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is a string value otherwise returns false.\n */\nexport function isString (val) {\n return typeof val === 'string';\n}\n\n/**\n * Checks whether the value is callable.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is callable otherwise returns false.\n */\nexport function isCallable (val) {\n return typeof val === 'function';\n}\n\n/**\n * Returns the unique values from the input array.\n *\n * @param {Array} data - The input array.\n * @return {Array} Returns a new array of unique values.\n */\nexport function uniqueValues (data) {\n return [...new Set(data)];\n}\n\nexport const getUniqueId = () => `id-${new Date().getTime()}${Math.round(Math.random() * 10000)}`;\n\n/**\n * Checks Whether two arrays have same content.\n *\n * @param {Array} arr1 - The first array.\n * @param {Array} arr2 - The 2nd array.\n * @return {boolean} Returns whether two array have same content.\n */\nexport function isArrEqual(arr1, arr2) {\n if (!isArray(arr1) || !isArray(arr2)) {\n return arr1 === arr2;\n }\n\n if (arr1.length !== arr2.length) {\n return false;\n }\n\n for (let i = 0; i < arr1.length; i++) {\n if (arr1[i] !== arr2[i]) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * It is the default number format function for the measure field type.\n *\n * @param {any} val - The input value.\n * @return {number} Returns a number value.\n */\nexport function formatNumber(val) {\n return val;\n}\n\n/**\n * Returns the detected data format.\n *\n * @param {any} data - The input data to be tested.\n * @return {string} Returns the data format name.\n */\nexport const detectDataFormat = (data) => {\n if (isString(data)) {\n return DataFormat.DSV_STR;\n } else if (isArray(data) && isArray(data[0])) {\n return DataFormat.DSV_ARR;\n } else if (isArray(data) && (data.length === 0 || isObject(data[0]))) {\n return DataFormat.FLAT_JSON;\n }\n return null;\n};\n","import { FieldType } from './enums';\nimport { getUniqueId } from './utils';\n\nconst fieldStore = {\n data: {},\n\n createNamespace (fieldArr, name) {\n const dataId = name || getUniqueId();\n\n this.data[dataId] = {\n name: dataId,\n fields: fieldArr,\n\n fieldsObj () {\n let fieldsObj = this._cachedFieldsObj;\n\n if (!fieldsObj) {\n fieldsObj = this._cachedFieldsObj = {};\n this.fields.forEach((field) => {\n fieldsObj[field.name()] = field;\n });\n }\n return fieldsObj;\n },\n getMeasure () {\n let measureFields = this._cachedMeasure;\n\n if (!measureFields) {\n measureFields = this._cachedMeasure = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.MEASURE) {\n measureFields[field.name()] = field;\n }\n });\n }\n return measureFields;\n },\n getDimension () {\n let dimensionFields = this._cachedDimension;\n\n if (!this._cachedDimension) {\n dimensionFields = this._cachedDimension = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.DIMENSION) {\n dimensionFields[field.name()] = field;\n }\n });\n }\n return dimensionFields;\n },\n };\n return this.data[dataId];\n },\n};\n\nexport default fieldStore;\n","/**\n * The wrapper class on top of the primitive value of a field.\n *\n * @todo Need to have support for StringValue, NumberValue, DateTimeValue\n * and GeoValue. These types should expose predicate API mostly.\n */\nclass Value {\n\n /**\n * Creates new Value instance.\n *\n * @param {*} val - the primitive value from the field cell.\n * @param {string | Field} field - The field from which the value belongs.\n */\n constructor (val, field) {\n Object.defineProperty(this, '_value', {\n enumerable: false,\n configurable: false,\n writable: false,\n value: val\n });\n\n this.field = field;\n }\n\n /**\n * Returns the field value.\n *\n * @return {*} Returns the current value.\n */\n get value () {\n return this._value;\n }\n\n /**\n * Converts to human readable string.\n *\n * @override\n * @return {string} Returns a human readable string of the field value.\n *\n */\n toString () {\n return String(this.value);\n }\n\n /**\n * Returns the value of the field.\n *\n * @override\n * @return {*} Returns the field value.\n */\n valueOf () {\n return this.value;\n }\n}\n\nexport default Value;\n","/**\n * Iterates through the diffSet array and call the callback with the current\n * index.\n *\n * @param {string} rowDiffset - The row diffset string e.g. '0-4,6,10-13'.\n * @param {Function} callback - The callback function to be called with every index.\n */\nexport function rowDiffsetIterator (rowDiffset, callback) {\n if (rowDiffset.length > 0) {\n const rowDiffArr = rowDiffset.split(',');\n rowDiffArr.forEach((diffStr) => {\n const diffStsArr = diffStr.split('-');\n const start = +(diffStsArr[0]);\n const end = +(diffStsArr[1] || diffStsArr[0]);\n if (end >= start) {\n for (let i = start; i <= end; i += 1) {\n callback(i);\n }\n }\n });\n }\n}\n","/**\n * A parser to parser null, undefined, invalid and NIL values.\n *\n * @public\n * @class\n */\nclass InvalidAwareTypes {\n /**\n * Static method which gets/sets the invalid value registry.\n *\n * @public\n * @param {Object} config - The custom configuration supplied by user.\n * @return {Object} Returns the invalid values registry.\n */\n static invalidAwareVals (config) {\n if (!config) {\n return InvalidAwareTypes._invalidAwareValsMap;\n }\n return Object.assign(InvalidAwareTypes._invalidAwareValsMap, config);\n }\n\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} value - The value of the invalid data type.\n */\n constructor (value) {\n this._value = value;\n }\n\n /**\n * Returns the current value of the instance.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n value () {\n return this._value;\n }\n\n /**\n * Returns the current value of the instance in string format.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n toString () {\n return String(this._value);\n }\n\n static isInvalid(val) {\n return (val instanceof InvalidAwareTypes) || !!InvalidAwareTypes.invalidAwareVals()[val];\n }\n\n static getInvalidType(val) {\n return val instanceof InvalidAwareTypes ? val : InvalidAwareTypes.invalidAwareVals()[val];\n }\n}\n\n/**\n * Enums for Invalid types.\n */\nInvalidAwareTypes.NULL = new InvalidAwareTypes('null');\nInvalidAwareTypes.NA = new InvalidAwareTypes('na');\nInvalidAwareTypes.NIL = new InvalidAwareTypes('nil');\n\n/**\n * Default Registry for mapping the invalid values.\n *\n * @private\n */\nInvalidAwareTypes._invalidAwareValsMap = {\n invalid: InvalidAwareTypes.NA,\n nil: InvalidAwareTypes.NIL,\n null: InvalidAwareTypes.NULL,\n undefined: InvalidAwareTypes.NA\n};\n\nexport default InvalidAwareTypes;\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport InvalidAwareTypes from '../invalid-aware-types';\n\nconst generateBuckets = (binSize, start, end) => {\n const buckets = [];\n let next = start;\n\n while (next < end) {\n buckets.push(next);\n next += binSize;\n }\n buckets.push(next);\n\n return buckets;\n};\n\nconst findBucketRange = (bucketRanges, value) => {\n let leftIdx = 0;\n let rightIdx = bucketRanges.length - 1;\n let midIdx;\n let range;\n\n // Here use binary search as the bucketRanges is a sorted array\n while (leftIdx <= rightIdx) {\n midIdx = leftIdx + Math.floor((rightIdx - leftIdx) / 2);\n range = bucketRanges[midIdx];\n\n if (value >= range.start && value < range.end) {\n return range;\n } else if (value >= range.end) {\n leftIdx = midIdx + 1;\n } else if (value < range.start) {\n rightIdx = midIdx - 1;\n }\n }\n\n return null;\n};\n\n /**\n * Creates the bin data from input measure field and supplied configs.\n *\n * @param {Measure} measureField - The Measure field instance.\n * @param {string} rowDiffset - The datamodel rowDiffset values.\n * @param {Object} config - The config object.\n * @return {Object} Returns the binned data and the corresponding bins.\n */\nexport function createBinnedFieldData (measureField, rowDiffset, config) {\n let { buckets, binsCount, binSize, start, end } = config;\n const [dMin, dMax] = measureField.domain();\n\n if (!buckets) {\n start = (start !== 0 && (!start || start > dMin)) ? dMin : start;\n end = (end !== 0 && (!end || end < dMax)) ? (dMax + 1) : end;\n\n if (binsCount) {\n binSize = Math.ceil(Math.abs(end - start) / binsCount);\n }\n\n buckets = generateBuckets(binSize, start, end);\n }\n\n if (buckets[0] > dMin) {\n buckets.unshift(dMin);\n }\n if (buckets[buckets.length - 1] <= dMax) {\n buckets.push(dMax + 1);\n }\n\n const bucketRanges = [];\n for (let i = 0; i < buckets.length - 1; i++) {\n bucketRanges.push({\n start: buckets[i],\n end: buckets[i + 1]\n });\n }\n\n const binnedData = [];\n rowDiffsetIterator(rowDiffset, (i) => {\n const datum = measureField.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n binnedData.push(datum);\n return;\n }\n\n const range = findBucketRange(bucketRanges, datum);\n binnedData.push(`${range.start}-${range.end}`);\n });\n\n return { binnedData, bins: buckets };\n}\n","/**\n * The helper function that returns an array of common schema\n * from two fieldStore instances.\n *\n * @param {FieldStore} fs1 - The first FieldStore instance.\n * @param {FieldStore} fs2 - The second FieldStore instance.\n * @return {Array} An array containing the common schema.\n */\nexport function getCommonSchema (fs1, fs2) {\n const retArr = [];\n const fs1Arr = [];\n fs1.fields.forEach((field) => {\n fs1Arr.push(field.schema().name);\n });\n fs2.fields.forEach((field) => {\n if (fs1Arr.indexOf(field.schema().name) !== -1) {\n retArr.push(field.schema().name);\n }\n });\n return retArr;\n}\n","export { DataFormat, FilteringMode } from '../enums';\n/**\n * The event name for data propagation.\n */\nexport const PROPAGATION = 'propagation';\n\n/**\n * The name of the unique row id column in DataModel.\n */\nexport const ROW_ID = '__id__';\n\n/**\n * The enums for operation names performed on DataModel.\n */\nexport const DM_DERIVATIVES = {\n SELECT: 'select',\n PROJECT: 'project',\n GROUPBY: 'group',\n COMPOSE: 'compose',\n CAL_VAR: 'calculatedVariable',\n BIN: 'bin'\n};\n\nexport const JOINS = {\n CROSS: 'cross',\n LEFTOUTER: 'leftOuter',\n RIGHTOUTER: 'rightOuter',\n NATURAL: 'natural',\n FULLOUTER: 'fullOuter'\n};\n\nexport const LOGICAL_OPERATORS = {\n AND: 'and',\n OR: 'or'\n};\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { getCommonSchema } from './get-common-schema';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { JOINS } from '../constants';\nimport { prepareJoinData } from '../helper';\n/**\n * Default filter function for crossProduct.\n *\n * @return {boolean} Always returns true.\n */\nfunction defaultFilterFn() { return true; }\n\n/**\n * Implementation of cross product operation between two DataModel instances.\n * It internally creates the data and schema for the new DataModel.\n *\n * @param {DataModel} dataModel1 - The left DataModel instance.\n * @param {DataModel} dataModel2 - The right DataModel instance.\n * @param {Function} filterFn - The filter function which is used to filter the tuples.\n * @param {boolean} [replaceCommonSchema=false] - The flag if the common name schema should be there.\n * @return {DataModel} Returns The newly created DataModel instance from the crossProduct operation.\n */\nexport function crossProduct (dm1, dm2, filterFn, replaceCommonSchema = false, jointype = JOINS.CROSS) {\n const schema = [];\n const data = [];\n const applicableFilterFn = filterFn || defaultFilterFn;\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreName = dm1FieldStore.name;\n const dm2FieldStoreName = dm2FieldStore.name;\n const name = `${dm1FieldStore.name}.${dm2FieldStore.name}`;\n const commonSchemaList = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n if (dm1FieldStoreName === dm2FieldStoreName) {\n throw new Error('DataModels must have different alias names');\n }\n // Here prepare the schema\n dm1FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1 && !replaceCommonSchema) {\n tmpSchema.name = `${dm1FieldStore.name}.${tmpSchema.name}`;\n }\n schema.push(tmpSchema);\n });\n dm2FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1) {\n if (!replaceCommonSchema) {\n tmpSchema.name = `${dm2FieldStore.name}.${tmpSchema.name}`;\n schema.push(tmpSchema);\n }\n } else {\n schema.push(tmpSchema);\n }\n });\n\n // Here prepare Data\n rowDiffsetIterator(dm1._rowDiffset, (i) => {\n let rowAdded = false;\n let rowPosition;\n rowDiffsetIterator(dm2._rowDiffset, (ii) => {\n const tuple = [];\n const userArg = {};\n userArg[dm1FieldStoreName] = {};\n userArg[dm2FieldStoreName] = {};\n dm1FieldStore.fields.forEach((field) => {\n tuple.push(field.partialField.data[i]);\n userArg[dm1FieldStoreName][field.name()] = field.partialField.data[i];\n });\n dm2FieldStore.fields.forEach((field) => {\n if (!(commonSchemaList.indexOf(field.schema().name) !== -1 && replaceCommonSchema)) {\n tuple.push(field.partialField.data[ii]);\n }\n userArg[dm2FieldStoreName][field.name()] = field.partialField.data[ii];\n });\n\n let cachedStore = {};\n let cloneProvider1 = () => dm1.detachedRoot();\n let cloneProvider2 = () => dm2.detachedRoot();\n\n const dm1Fields = prepareJoinData(userArg[dm1FieldStoreName]);\n const dm2Fields = prepareJoinData(userArg[dm2FieldStoreName]);\n if (applicableFilterFn(dm1Fields, dm2Fields, cloneProvider1, cloneProvider2, cachedStore)) {\n const tupleObj = {};\n tuple.forEach((cellVal, iii) => {\n tupleObj[schema[iii].name] = cellVal;\n });\n if (rowAdded && JOINS.CROSS !== jointype) {\n data[rowPosition] = tupleObj;\n }\n else {\n data.push(tupleObj);\n rowAdded = true;\n rowPosition = i;\n }\n } else if ((jointype === JOINS.LEFTOUTER || jointype === JOINS.RIGHTOUTER) && !rowAdded) {\n const tupleObj = {};\n let len = dm1FieldStore.fields.length - 1;\n tuple.forEach((cellVal, iii) => {\n if (iii <= len) {\n tupleObj[schema[iii].name] = cellVal;\n }\n else {\n tupleObj[schema[iii].name] = null;\n }\n });\n rowAdded = true;\n rowPosition = i;\n data.push(tupleObj);\n }\n });\n });\n\n return new DataModel(data, schema, { name });\n}\n","/**\n * The default sort function.\n *\n * @param {*} a - The first value.\n * @param {*} b - The second value.\n * @return {number} Returns the comparison result e.g. 1 or 0 or -1.\n */\nfunction defSortFn (a, b) {\n const a1 = `${a}`;\n const b1 = `${b}`;\n if (a1 < b1) {\n return -1;\n }\n if (a1 > b1) {\n return 1;\n }\n return 0;\n}\n\n/**\n * The helper function for merge sort which creates the sorted array\n * from the two halves of the input array.\n *\n * @param {Array} arr - The target array which needs to be merged.\n * @param {number} lo - The starting index of the first array half.\n * @param {number} mid - The ending index of the first array half.\n * @param {number} hi - The ending index of the second array half.\n * @param {Function} sortFn - The sort function.\n */\nfunction merge (arr, lo, mid, hi, sortFn) {\n const mainArr = arr;\n const auxArr = [];\n for (let i = lo; i <= hi; i += 1) {\n auxArr[i] = mainArr[i];\n }\n let a = lo;\n let b = mid + 1;\n\n for (let i = lo; i <= hi; i += 1) {\n if (a > mid) {\n mainArr[i] = auxArr[b];\n b += 1;\n } else if (b > hi) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else if (sortFn(auxArr[a], auxArr[b]) <= 0) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else {\n mainArr[i] = auxArr[b];\n b += 1;\n }\n }\n}\n\n/**\n * The helper function for merge sort which would be called\n * recursively for sorting the array halves.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {number} lo - The starting index of the array half.\n * @param {number} hi - The ending index of the array half.\n * @param {Function} sortFn - The sort function.\n * @return {Array} Returns the target array itself.\n */\nfunction sort (arr, lo, hi, sortFn) {\n if (hi === lo) { return arr; }\n\n const mid = lo + Math.floor((hi - lo) / 2);\n sort(arr, lo, mid, sortFn);\n sort(arr, mid + 1, hi, sortFn);\n merge(arr, lo, mid, hi, sortFn);\n\n return arr;\n}\n\n/**\n * The implementation of merge sort.\n * It is used in DataModel for stable sorting as it is not sure\n * what the sorting algorithm used by browsers is stable or not.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {Function} [sortFn=defSortFn] - The sort function.\n * @return {Array} Returns the input array itself in sorted order.\n */\nexport function mergeSort (arr, sortFn = defSortFn) {\n if (arr.length > 1) {\n sort(arr, 0, arr.length - 1, sortFn);\n }\n return arr;\n}\n","import { DimensionSubtype, MeasureSubtype } from '../enums';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { mergeSort } from './merge-sort';\nimport { fieldInSchema } from '../helper';\nimport { isCallable, isArray, } from '../utils';\n/**\n * Generates the sorting functions to sort the data of a DataModel instance\n * according to the input data type.\n *\n * @param {string} dataType - The data type e.g. 'measure', 'datetime' etc.\n * @param {string} sortType - The sorting order i.e. 'asc' or 'desc'.\n * @param {integer} index - The index of the data which will be sorted.\n * @return {Function} Returns the the sorting function.\n */\nfunction getSortFn (dataType, sortType, index) {\n let retFunc;\n switch (dataType) {\n case MeasureSubtype.CONTINUOUS:\n case DimensionSubtype.TEMPORAL:\n if (sortType === 'desc') {\n retFunc = (a, b) => b[index] - a[index];\n } else {\n retFunc = (a, b) => a[index] - b[index];\n }\n break;\n default:\n retFunc = (a, b) => {\n const a1 = `${a[index]}`;\n const b1 = `${b[index]}`;\n if (a1 < b1) {\n return sortType === 'desc' ? 1 : -1;\n }\n if (a1 > b1) {\n return sortType === 'desc' ? -1 : 1;\n }\n return 0;\n };\n }\n return retFunc;\n}\n\n/**\n * Groups the data according to the specified target field.\n *\n * @param {Array} data - The input data array.\n * @param {number} fieldIndex - The target field index within schema array.\n * @return {Array} Returns an array containing the grouped data.\n */\nfunction groupData(data, fieldIndex) {\n const hashMap = new Map();\n const groupedData = [];\n\n data.forEach((datum) => {\n const fieldVal = datum[fieldIndex];\n if (hashMap.has(fieldVal)) {\n groupedData[hashMap.get(fieldVal)][1].push(datum);\n } else {\n groupedData.push([fieldVal, [datum]]);\n hashMap.set(fieldVal, groupedData.length - 1);\n }\n });\n\n return groupedData;\n}\n\n/**\n * Creates the argument value used for sorting function when sort is done\n * with another fields.\n *\n * @param {Array} groupedDatum - The grouped datum for a single dimension field value.\n * @param {Array} targetFields - An array of the sorting fields.\n * @param {Array} targetFieldDetails - An array of the sorting field details in schema.\n * @return {Object} Returns an object containing the value of sorting fields and the target field name.\n */\nfunction createSortingFnArg(groupedDatum, targetFields, targetFieldDetails) {\n const arg = {\n label: groupedDatum[0]\n };\n\n targetFields.reduce((acc, next, idx) => {\n acc[next] = groupedDatum[1].map(datum => datum[targetFieldDetails[idx].index]);\n return acc;\n }, arg);\n\n return arg;\n}\n\n/**\n * Sorts the data before return in dataBuilder.\n *\n * @param {Object} dataObj - An object containing the data and schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nfunction sortData(dataObj, sortingDetails) {\n const { data, schema } = dataObj;\n let fieldName;\n let sortMeta;\n let fDetails;\n let i = sortingDetails.length - 1;\n\n for (; i >= 0; i--) {\n fieldName = sortingDetails[i][0];\n sortMeta = sortingDetails[i][1];\n fDetails = fieldInSchema(schema, fieldName);\n\n if (!fDetails) {\n // eslint-disable-next-line no-continue\n continue;\n }\n\n if (isCallable(sortMeta)) {\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortMeta(a[fDetails.index], b[fDetails.index]));\n } else if (isArray(sortMeta)) {\n const groupedData = groupData(data, fDetails.index);\n const sortingFn = sortMeta[sortMeta.length - 1];\n const targetFields = sortMeta.slice(0, sortMeta.length - 1);\n const targetFieldDetails = targetFields.map(f => fieldInSchema(schema, f));\n\n groupedData.forEach((groupedDatum) => {\n groupedDatum.push(createSortingFnArg(groupedDatum, targetFields, targetFieldDetails));\n });\n\n mergeSort(groupedData, (a, b) => {\n const m = a[2];\n const n = b[2];\n return sortingFn(m, n);\n });\n\n // Empty the array\n data.length = 0;\n groupedData.forEach((datum) => {\n data.push(...datum[1]);\n });\n } else {\n sortMeta = String(sortMeta).toLowerCase() === 'desc' ? 'desc' : 'asc';\n mergeSort(data, getSortFn(fDetails.type, sortMeta, fDetails.index));\n }\n }\n\n dataObj.uids = [];\n data.forEach((value) => {\n dataObj.uids.push(value.pop());\n });\n}\n\n\n/**\n * Builds the actual data array.\n *\n * @param {Array} fieldStore - An array of field.\n * @param {string} rowDiffset - A string consisting of which rows to be included eg. '0-2,4,6';\n * @param {string} colIdentifier - A string consisting of the details of which column\n * to be included eg 'date,sales,profit';\n * @param {Object} sortingDetails - An object containing the sorting details of the DataModel instance.\n * @param {Object} options - The options required to create the type of the data.\n * @return {Object} Returns an object containing the multidimensional array and the relative schema.\n */\nexport function dataBuilder (fieldStore, rowDiffset, colIdentifier, sortingDetails, options) {\n const defOptions = {\n addUid: false,\n columnWise: false\n };\n options = Object.assign({}, defOptions, options);\n\n const retObj = {\n schema: [],\n data: [],\n uids: []\n };\n const addUid = options.addUid;\n const reqSorting = sortingDetails && sortingDetails.length > 0;\n // It stores the fields according to the colIdentifier argument\n const tmpDataArr = [];\n // Stores the fields according to the colIdentifier argument\n const colIArr = colIdentifier.split(',');\n\n colIArr.forEach((colName) => {\n for (let i = 0; i < fieldStore.length; i += 1) {\n if (fieldStore[i].name() === colName) {\n tmpDataArr.push(fieldStore[i]);\n break;\n }\n }\n });\n\n // Inserts the schema to the schema object\n tmpDataArr.forEach((field) => {\n /** @todo Need to use extend2 here otherwise user can overwrite the schema. */\n retObj.schema.push(field.schema());\n });\n\n if (addUid) {\n retObj.schema.push({\n name: 'uid',\n type: 'identifier'\n });\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n retObj.data.push([]);\n const insertInd = retObj.data.length - 1;\n let start = 0;\n tmpDataArr.forEach((field, ii) => {\n retObj.data[insertInd][ii + start] = field.partialField.data[i];\n });\n if (addUid) {\n retObj.data[insertInd][tmpDataArr.length] = i;\n }\n // Creates an array of unique identifiers for each row\n retObj.uids.push(i);\n\n // If sorting needed then there is the need to expose the index\n // mapping from the old index to its new index\n if (reqSorting) { retObj.data[insertInd].push(i); }\n });\n\n // Handles the sort functionality\n if (reqSorting) {\n sortData(retObj, sortingDetails);\n }\n\n if (options.columnWise) {\n const tmpData = Array(...Array(retObj.schema.length)).map(() => []);\n retObj.data.forEach((tuple) => {\n tuple.forEach((data, i) => {\n tmpData[i].push(data);\n });\n });\n retObj.data = tmpData;\n }\n\n return retObj;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n\n/**\n * Performs the union operation between two dm instances.\n *\n * @todo Fix the conflicts between union and difference terminology here.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function difference (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n * @param {boolean} addData - If true only tuple will be added to the data.\n */\n function prepareDataHelper(dm, fieldsObj, addData) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n if (addData) { data.push(tuple); }\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm2, dm2FieldStoreFieldObj, false);\n prepareDataHelper(dm1, dm1FieldStoreFieldObj, true);\n\n return new DataModel(data, schema, { name });\n}\n\n","import { isArray } from '../utils';\nimport InvalidAwareTypes from '../invalid-aware-types';\n\n\nfunction getFilteredValues(arr) {\n return arr.filter(item => !(item instanceof InvalidAwareTypes));\n}\n/**\n * Reducer function that returns the sum of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the sum of the array.\n */\nfunction sum (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const filteredNumber = getFilteredValues(arr);\n const totalSum = filteredNumber.length ?\n filteredNumber.reduce((acc, curr) => acc + curr, 0)\n : InvalidAwareTypes.NULL;\n return totalSum;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that returns the average of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the mean value of the array.\n */\nfunction avg (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const totalSum = sum(arr);\n const len = arr.length || 1;\n return (Number.isNaN(totalSum) || totalSum instanceof InvalidAwareTypes) ?\n InvalidAwareTypes.NULL : totalSum / len;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the min value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the minimum value of the array.\n */\nfunction min (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.min(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the max value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the maximum value of the array.\n */\nfunction max (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.max(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the first value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the first value of the array.\n */\nfunction first (arr) {\n return arr[0];\n}\n\n/**\n * Reducer function that gives the last value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the last value of the array.\n */\nfunction last (arr) {\n return arr[arr.length - 1];\n}\n\n/**\n * Reducer function that gives the count value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the length of the array.\n */\nfunction count (arr) {\n if (isArray(arr)) {\n return arr.length;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Calculates the variance of the input array.\n *\n * @param {Array.} arr - The input array.\n * @return {number} Returns the variance of the input array.\n */\nfunction variance (arr) {\n let mean = avg(arr);\n return avg(arr.map(num => (num - mean) ** 2));\n}\n\n/**\n * Calculates the square root of the variance of the input array.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the square root of the variance.\n */\nfunction std (arr) {\n return Math.sqrt(variance(arr));\n}\n\n\nconst fnList = {\n sum,\n avg,\n min,\n max,\n first,\n last,\n count,\n std\n};\n\nconst defaultReducerName = 'sum';\n\nexport {\n defaultReducerName,\n sum as defReducer,\n fnList,\n};\n","import { defReducer, fnList } from '../operator';\n\n/**\n * A page level storage which stores, registers, unregisters reducers for all the datamodel instances. There is only one\n * reducer store available in a page. All the datamodel instances receive same instance of reducer store. DataModel\n * out of the box provides handful of {@link reducer | reducers} which can be used as reducer funciton.\n *\n * @public\n * @namespace DataModel\n */\nclass ReducerStore {\n constructor () {\n this.store = new Map();\n this.store.set('defReducer', defReducer);\n\n Object.entries(fnList).forEach((key) => {\n this.store.set(key[0], key[1]);\n });\n }\n\n /**\n * Changes the `defaultReducer` globally. For all the fields which does not have `defAggFn` mentioned in schema, the\n * value of `defaultReducer` is used for aggregation.\n *\n * @public\n * @param {string} [reducer='sum'] - The name of the default reducer. It picks up the definition from store by doing\n * name lookup. If no name is found then it takes `sum` as the default reducer.\n * @return {ReducerStore} Returns instance of the singleton store in page.\n */\n defaultReducer (...params) {\n if (!params.length) {\n return this.store.get('defReducer');\n }\n\n let reducer = params[0];\n\n if (typeof reducer === 'function') {\n this.store.set('defReducer', reducer);\n } else {\n reducer = String(reducer);\n if (Object.keys(fnList).indexOf(reducer) !== -1) {\n this.store.set('defReducer', fnList[reducer]);\n } else {\n throw new Error(`Reducer ${reducer} not found in registry`);\n }\n }\n return this;\n }\n\n /**\n *\n * Registers a {@link reducer | reducer}.\n * A {@link reducer | reducer} has to be registered before it is used.\n *\n * @example\n * // find the mean squared value of a given set\n * const reducerStore = DataModel.Reducers();\n *\n * reducers.register('meanSquared', (arr) => {\n * const squaredVal = arr.map(item => item * item);\n * let sum = 0;\n * for (let i = 0, l = squaredVal.length; i < l; i++) {\n * sum += squaredVal[i++];\n * }\n *\n * return sum;\n * })\n *\n * // datamodel (dm) is already prepared with cars.json\n * const dm1 = dm.groupBy(['origin'], {\n * accleration: 'meanSquared'\n * });\n *\n * @public\n *\n * @param {string} name formal name for a reducer. If the given name already exists in store it is overridden by new\n * definition.\n * @param {Function} reducer definition of {@link reducer} function.\n *\n * @return {Function} function for unregistering the reducer.\n */\n register (name, reducer) {\n if (typeof reducer !== 'function') {\n throw new Error('Reducer should be a function');\n }\n\n name = String(name);\n this.store.set(name, reducer);\n\n return () => { this.__unregister(name); };\n }\n\n __unregister (name) {\n if (this.store.has(name)) {\n this.store.delete(name);\n }\n }\n\n resolve (name) {\n if (name instanceof Function) {\n return name;\n }\n return this.store.get(name);\n }\n}\n\nconst reducerStore = (function () {\n let store = null;\n\n function getStore () {\n if (store === null) {\n store = new ReducerStore();\n }\n return store;\n }\n return getStore();\n}());\n\nexport default reducerStore;\n","import { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport DataModel from '../export';\nimport reducerStore from '../utils/reducer-store';\nimport { defaultReducerName } from './group-by-function';\nimport { FieldType } from '../enums';\n\n/**\n * This function sanitize the user given field and return a common Array structure field\n * list\n * @param {DataModel} dataModel the dataModel operating on\n * @param {Array} fieldArr user input of field Array\n * @return {Array} arrays of field name\n */\nfunction getFieldArr (dataModel, fieldArr) {\n const retArr = [];\n const fieldStore = dataModel.getFieldspace();\n const dimensions = fieldStore.getDimension();\n\n Object.entries(dimensions).forEach(([key]) => {\n if (fieldArr && fieldArr.length) {\n if (fieldArr.indexOf(key) !== -1) {\n retArr.push(key);\n }\n } else {\n retArr.push(key);\n }\n });\n\n return retArr;\n}\n\n/**\n * This sanitize the reducer provide by the user and create a common type of object.\n * user can give function Also\n * @param {DataModel} dataModel dataModel to worked on\n * @param {Object|function} [reducers={}] reducer provided by the users\n * @return {Object} object containing reducer function for every measure\n */\nfunction getReducerObj (dataModel, reducers = {}) {\n const retObj = {};\n const fieldStore = dataModel.getFieldspace();\n const measures = fieldStore.getMeasure();\n const defReducer = reducerStore.defaultReducer();\n\n Object.keys(measures).forEach((measureName) => {\n if (typeof reducers[measureName] !== 'string') {\n reducers[measureName] = measures[measureName].defAggFn();\n }\n const reducerFn = reducerStore.resolve(reducers[measureName]);\n if (reducerFn) {\n retObj[measureName] = reducerFn;\n } else {\n retObj[measureName] = defReducer;\n reducers[measureName] = defaultReducerName;\n }\n });\n return retObj;\n}\n\n/**\n * main function which perform the group-by operations which reduce the measures value is the\n * fields are common according to the reducer function provided\n * @param {DataModel} dataModel the dataModel to worked\n * @param {Array} fieldArr fields according to which the groupby should be worked\n * @param {Object|Function} reducers reducers function\n * @param {DataModel} existingDataModel Existing datamodel instance\n * @return {DataModel} new dataModel with the group by\n */\nfunction groupBy (dataModel, fieldArr, reducers, existingDataModel) {\n const sFieldArr = getFieldArr(dataModel, fieldArr);\n const reducerObj = getReducerObj(dataModel, reducers);\n const fieldStore = dataModel.getFieldspace();\n const fieldStoreObj = fieldStore.fieldsObj();\n const dbName = fieldStore.name;\n const dimensionArr = [];\n const measureArr = [];\n const schema = [];\n const hashMap = {};\n const data = [];\n let newDataModel;\n\n // Prepare the schema\n Object.entries(fieldStoreObj).forEach(([key, value]) => {\n if (sFieldArr.indexOf(key) !== -1 || reducerObj[key]) {\n schema.push(extend2({}, value.schema()));\n\n switch (value.schema().type) {\n case FieldType.MEASURE:\n measureArr.push(key);\n break;\n default:\n case FieldType.DIMENSION:\n dimensionArr.push(key);\n }\n }\n });\n // Prepare the data\n let rowCount = 0;\n rowDiffsetIterator(dataModel._rowDiffset, (i) => {\n let hash = '';\n dimensionArr.forEach((_) => {\n hash = `${hash}-${fieldStoreObj[_].partialField.data[i]}`;\n });\n if (hashMap[hash] === undefined) {\n hashMap[hash] = rowCount;\n data.push({});\n dimensionArr.forEach((_) => {\n data[rowCount][_] = fieldStoreObj[_].partialField.data[i];\n });\n measureArr.forEach((_) => {\n data[rowCount][_] = [fieldStoreObj[_].partialField.data[i]];\n });\n rowCount += 1;\n } else {\n measureArr.forEach((_) => {\n data[hashMap[hash]][_].push(fieldStoreObj[_].partialField.data[i]);\n });\n }\n });\n\n // reduction\n let cachedStore = {};\n let cloneProvider = () => dataModel.detachedRoot();\n data.forEach((row) => {\n const tuple = row;\n measureArr.forEach((_) => {\n tuple[_] = reducerObj[_](row[_], cloneProvider, cachedStore);\n });\n });\n if (existingDataModel) {\n existingDataModel.__calculateFieldspace();\n newDataModel = existingDataModel;\n }\n else {\n newDataModel = new DataModel(data, schema, { name: dbName });\n }\n return newDataModel;\n}\n\nexport { groupBy, getFieldArr, getReducerObj };\n","import { getCommonSchema } from './get-common-schema';\n\n/**\n * The filter function used in natural join.\n * It generates a function that will have the logic to join two\n * DataModel instances by the process of natural join.\n *\n * @param {DataModel} dm1 - The left DataModel instance.\n * @param {DataModel} dm2 - The right DataModel instance.\n * @return {Function} Returns a function that is used in cross-product operation.\n */\nexport function naturalJoinFilter (dm1, dm2) {\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n // const dm1FieldStoreName = dm1FieldStore.name;\n // const dm2FieldStoreName = dm2FieldStore.name;\n const commonSchemaArr = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n return (dm1Fields, dm2Fields) => {\n let retainTuple = true;\n commonSchemaArr.forEach((fieldName) => {\n if (dm1Fields[fieldName].value ===\n dm2Fields[fieldName].value && retainTuple) {\n retainTuple = true;\n } else {\n retainTuple = false;\n }\n });\n return retainTuple;\n };\n}\n","import DataModel from '../export';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n/**\n * Performs the union operation between two dm instances.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function union (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n */\n function prepareDataHelper (dm, fieldsObj) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n data.push(tuple);\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm1, dm1FieldStoreFieldObj);\n prepareDataHelper(dm2, dm2FieldStoreFieldObj);\n\n return new DataModel(data, schema, { name });\n}\n","import { crossProduct } from './cross-product';\nimport { JOINS } from '../constants';\nimport { union } from './union';\n\n\nexport function leftOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel1, dataModel2, filterFn, false, JOINS.LEFTOUTER);\n}\n\nexport function rightOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel2, dataModel1, filterFn, false, JOINS.RIGHTOUTER);\n}\n\nexport function fullOuterJoin (dataModel1, dataModel2, filterFn) {\n return union(leftOuterJoin(dataModel1, dataModel2, filterFn), rightOuterJoin(dataModel1, dataModel2, filterFn));\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\n\n/**\n * In {@link DataModel}, every tabular data consists of column, a column is stored as field.\n * Field contains all the data for a given column in an array.\n *\n * Each record consists of several fields; the fields of all records form the columns.\n * Examples of fields: name, gender, sex etc.\n *\n * In DataModel, each field can have multiple attributes which describes its data and behaviour.\n * A field can have two types of data: Measure and Dimension.\n *\n * A Dimension Field is the context on which a data is categorized and the measure is the numerical values that\n * quantify the data set.\n * In short a dimension is the lens through which you are looking at your measure data.\n *\n * Refer to {@link Schema} to get info about possible field attributes.\n *\n * @public\n * @class\n */\nexport default class Field {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n this.partialField = partialField;\n this.rowDiffset = rowDiffset;\n }\n\n /**\n * Generates the field type specific domain.\n *\n * @public\n * @abstract\n */\n domain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the the field schema.\n *\n * @public\n * @return {string} Returns the field schema.\n */\n schema () {\n return this.partialField.schema;\n }\n\n /**\n * Returns the name of the field.\n *\n * @public\n * @return {string} Returns the name of the field.\n */\n name () {\n return this.partialField.name;\n }\n\n /**\n * Returns the type of the field.\n *\n * @public\n * @return {string} Returns the type of the field.\n */\n type () {\n return this.partialField.schema.type;\n }\n\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return this.partialField.schema.subtype;\n }\n\n /**\n * Returns the description of the field.\n *\n * @public\n * @return {string} Returns the description of the field.\n */\n description () {\n return this.partialField.schema.description;\n }\n\n /**\n * Returns the display name of the field.\n *\n * @public\n * @return {string} Returns the display name of the field.\n */\n displayName () {\n return this.partialField.schema.displayName || this.partialField.schema.name;\n }\n\n /**\n * Returns the data associated with the field.\n *\n * @public\n * @return {Array} Returns the data.\n */\n data () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n data.push(this.partialField.data[i]);\n });\n return data;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @abstract\n */\n formattedData () {\n throw new Error('Not yet implemented');\n }\n}\n","import Field from '../field';\n\n/**\n * Represents dimension field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Dimension extends Field {\n /**\n * Returns the domain for the dimension field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { DimensionSubtype } from '../../enums';\nimport Dimension from '../dimension';\n/**\n * Represents categorical field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Categorical extends Dimension {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return DimensionSubtype.CATEGORICAL;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n return domain;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport Dimension from '../dimension';\nimport { DateTimeFormatter } from '../../utils';\nimport InvalidAwareTypes from '../../invalid-aware-types';\n\n/**\n * Represents temporal field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Temporal extends Dimension {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n super(partialField, rowDiffset);\n\n this._cachedMinDiff = null;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be\n // occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n\n return domain;\n }\n\n\n /**\n * Calculates the minimum consecutive difference from the associated field data.\n *\n * @public\n * @return {number} Returns the minimum consecutive diff in milliseconds.\n */\n minimumConsecutiveDifference () {\n if (this._cachedMinDiff) {\n return this._cachedMinDiff;\n }\n\n const sortedData = this.data().filter(item => !(item instanceof InvalidAwareTypes)).sort((a, b) => a - b);\n const arrLn = sortedData.length;\n let minDiff = Number.POSITIVE_INFINITY;\n let prevDatum;\n let nextDatum;\n let processedCount = 0;\n\n for (let i = 1; i < arrLn; i++) {\n prevDatum = sortedData[i - 1];\n nextDatum = sortedData[i];\n\n if (nextDatum === prevDatum) {\n continue;\n }\n\n minDiff = Math.min(minDiff, nextDatum - sortedData[i - 1]);\n processedCount++;\n }\n\n if (!processedCount) {\n minDiff = null;\n }\n this._cachedMinDiff = minDiff;\n\n return this._cachedMinDiff;\n }\n\n /**\n * Returns the format specified in the input schema while creating field.\n *\n * @public\n * @return {string} Returns the datetime format.\n */\n format () {\n return this.partialField.schema.format;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n data.push(datum);\n } else {\n data.push(DateTimeFormatter.formatAs(datum, this.format()));\n }\n });\n return data;\n }\n}\n\n","import Dimension from '../dimension';\n\n/**\n * Represents binned field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Binned extends Dimension {\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the last and first values of bins config array.\n */\n calculateDataDomain () {\n const binsArr = this.partialField.schema.bins;\n return [binsArr[0], binsArr[binsArr.length - 1]];\n }\n\n /**\n * Returns the bins config provided while creating the field instance.\n *\n * @public\n * @return {Array} Returns the bins array config.\n */\n bins () {\n return this.partialField.schema.bins;\n }\n}\n","import { formatNumber } from '../../utils';\nimport { defaultReducerName } from '../../operator/group-by-function';\nimport Field from '../field';\n\n/**\n * Represents measure field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Measure extends Field {\n /**\n * Returns the domain for the measure field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Returns the unit of the measure field.\n *\n * @public\n * @return {string} Returns unit of the field.\n */\n unit () {\n return this.partialField.schema.unit;\n }\n\n /**\n * Returns the aggregation function name of the measure field.\n *\n * @public\n * @return {string} Returns aggregation function name of the field.\n */\n defAggFn () {\n return this.partialField.schema.defAggFn || defaultReducerName;\n }\n\n /**\n * Returns the number format of the measure field.\n *\n * @public\n * @return {Function} Returns number format of the field.\n */\n numberFormat () {\n const { numberFormat } = this.partialField.schema;\n return numberFormat instanceof Function ? numberFormat : formatNumber;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { MeasureSubtype } from '../../enums';\nimport Measure from '../measure';\nimport InvalidAwareTypes from '../../invalid-aware-types';\n\n/**\n * Represents continuous field subtype.\n *\n * @public\n * @class\n * @extends Measure\n */\nexport default class Continuous extends Measure {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return MeasureSubtype.CONTINUOUS;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the min and max values.\n */\n calculateDataDomain () {\n let min = Number.POSITIVE_INFINITY;\n let max = Number.NEGATIVE_INFINITY;\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n return;\n }\n\n if (datum < min) {\n min = datum;\n }\n if (datum > max) {\n max = datum;\n }\n });\n\n return [min, max];\n }\n}\n","/**\n * A interface to represent a parser which is responsible to parse the field.\n *\n * @public\n * @interface\n */\nexport default class FieldParser {\n /**\n * Parses a single value of a field and return the sanitized form.\n *\n * @public\n * @abstract\n */\n parse () {\n throw new Error('Not yet implemented');\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the categorical values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class CategoricalParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the stringified form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the stringified value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n result = String(val).trim();\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { DateTimeFormatter } from '../../../utils';\nimport FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the temporal values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class TemporalParser extends FieldParser {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {Object} schema - The schema object for the corresponding field.\n */\n constructor (schema) {\n super();\n this.schema = schema;\n this._dtf = new DateTimeFormatter(this.schema.format);\n }\n\n /**\n * Parses a single value of a field and returns the millisecond value.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {number} Returns the millisecond value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let nativeDate = this._dtf.getNativeDate(val);\n result = nativeDate ? nativeDate.getTime() : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the binned values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class BinnedParser extends FieldParser {\n /**\n * Parses a single binned value of a field and returns the sanitized value.\n *\n * @public\n * @param {string} val - The value of the field.\n * @return {string} Returns the sanitized value.\n */\n parse (val) {\n const regex = /^\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*-\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*$/;\n val = String(val);\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let matched = val.match(regex);\n result = matched ? `${Number.parseFloat(matched[1])}-${Number.parseFloat(matched[2])}`\n : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the continuous values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class ContinuousParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the number form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the number value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let parsedVal = parseFloat(val, 10);\n result = Number.isNaN(parsedVal) ? InvalidAwareTypes.NA : parsedVal;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","/**\n * Stores the full data and the metadata of a field. It provides\n * a single source of data from which the future Field\n * instance can get a subset of it with a rowDiffset config.\n *\n * @class\n * @public\n */\nexport default class PartialField {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} name - The name of the field.\n * @param {Array} data - The data array.\n * @param {Object} schema - The schema object of the corresponding field.\n * @param {FieldParser} parser - The parser instance corresponding to that field.\n */\n constructor (name, data, schema, parser) {\n this.name = name;\n this.schema = schema;\n this.parser = parser;\n this.data = this._sanitize(data);\n }\n\n /**\n * Sanitizes the field data.\n *\n * @private\n * @param {Array} data - The actual input data.\n * @return {Array} Returns the sanitized data.\n */\n _sanitize (data) {\n return data.map(datum => this.parser.parse(datum));\n }\n}\n","import { FieldType, DimensionSubtype, MeasureSubtype } from './enums';\nimport {\n Categorical,\n Temporal,\n Binned,\n Continuous,\n CategoricalParser,\n TemporalParser,\n BinnedParser,\n ContinuousParser,\n PartialField\n} from './fields';\n\n/**\n * Creates a field instance according to the provided data and schema.\n *\n * @param {Array} data - The field data array.\n * @param {Object} schema - The field schema object.\n * @return {Field} Returns the newly created field instance.\n */\nfunction createUnitField(data, schema) {\n data = data || [];\n let partialField;\n\n switch (schema.type) {\n case FieldType.MEASURE:\n switch (schema.subtype) {\n case MeasureSubtype.CONTINUOUS:\n partialField = new PartialField(schema.name, data, schema, new ContinuousParser());\n return new Continuous(partialField, `0-${data.length - 1}`);\n default:\n partialField = new PartialField(schema.name, data, schema, new ContinuousParser());\n return new Continuous(partialField, `0-${data.length - 1}`);\n }\n case FieldType.DIMENSION:\n switch (schema.subtype) {\n case DimensionSubtype.CATEGORICAL:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n case DimensionSubtype.TEMPORAL:\n partialField = new PartialField(schema.name, data, schema, new TemporalParser(schema));\n return new Temporal(partialField, `0-${data.length - 1}`);\n case DimensionSubtype.BINNED:\n partialField = new PartialField(schema.name, data, schema, new BinnedParser());\n return new Binned(partialField, `0-${data.length - 1}`);\n default:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n }\n default:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n }\n}\n\n\n/**\n * Creates a field instance from partialField and rowDiffset.\n *\n * @param {PartialField} partialField - The corresponding partial field.\n * @param {string} rowDiffset - The data subset config.\n * @return {Field} Returns the newly created field instance.\n */\nexport function createUnitFieldFromPartial(partialField, rowDiffset) {\n const { schema } = partialField;\n\n switch (schema.type) {\n case FieldType.MEASURE:\n switch (schema.subtype) {\n case MeasureSubtype.CONTINUOUS:\n return new Continuous(partialField, rowDiffset);\n default:\n return new Continuous(partialField, rowDiffset);\n }\n case FieldType.DIMENSION:\n switch (schema.subtype) {\n case DimensionSubtype.CATEGORICAL:\n return new Categorical(partialField, rowDiffset);\n case DimensionSubtype.TEMPORAL:\n return new Temporal(partialField, rowDiffset);\n case DimensionSubtype.BINNED:\n return new Binned(partialField, rowDiffset);\n default:\n return new Categorical(partialField, rowDiffset);\n }\n default:\n return new Categorical(partialField, rowDiffset);\n }\n}\n\n/**\n * Creates the field instances with input data and schema.\n *\n * @param {Array} dataColumn - The data array for fields.\n * @param {Array} schema - The schema array for fields.\n * @param {Array} headers - The array of header names.\n * @return {Array.} Returns an array of newly created field instances.\n */\nexport function createFields(dataColumn, schema, headers) {\n const headersObj = {};\n\n if (!(headers && headers.length)) {\n headers = schema.map(item => item.name);\n }\n\n headers.forEach((header, i) => {\n headersObj[header] = i;\n });\n\n return schema.map(item => createUnitField(dataColumn[headersObj[item.name]], item));\n}\n","import { DataFormat } from './enums';\n\nexport default {\n dataFormat: DataFormat.AUTO\n};\n","import { columnMajor } from '../utils';\n\n/**\n * Parses and converts data formatted in DSV array to a manageable internal format.\n *\n * @param {Array.} arr - A 2D array containing of the DSV data.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv data is header or not.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * [\"a\", \"b\", \"c\"],\n * [1, 2, 3],\n * [4, 5, 6],\n * [7, 8, 9]\n * ];\n */\nfunction DSVArr (arr, options) {\n const defaultOption = {\n firstRowHeader: true,\n };\n options = Object.assign({}, defaultOption, options);\n\n let header;\n const columns = [];\n const push = columnMajor(columns);\n\n if (options.firstRowHeader) {\n // If header present then mutate the array.\n // Do in-place mutation to save space.\n header = arr.splice(0, 1)[0];\n } else {\n header = [];\n }\n\n arr.forEach(field => push(...field));\n\n return [header, columns];\n}\n\nexport default DSVArr;\n","var EOL = {},\n EOF = {},\n QUOTE = 34,\n NEWLINE = 10,\n RETURN = 13;\n\nfunction objectConverter(columns) {\n return new Function(\"d\", \"return {\" + columns.map(function(name, i) {\n return JSON.stringify(name) + \": d[\" + i + \"]\";\n }).join(\",\") + \"}\");\n}\n\nfunction customConverter(columns, f) {\n var object = objectConverter(columns);\n return function(row, i) {\n return f(object(row), i, columns);\n };\n}\n\n// Compute unique columns in order of discovery.\nfunction inferColumns(rows) {\n var columnSet = Object.create(null),\n columns = [];\n\n rows.forEach(function(row) {\n for (var column in row) {\n if (!(column in columnSet)) {\n columns.push(columnSet[column] = column);\n }\n }\n });\n\n return columns;\n}\n\nexport default function(delimiter) {\n var reFormat = new RegExp(\"[\\\"\" + delimiter + \"\\n\\r]\"),\n DELIMITER = delimiter.charCodeAt(0);\n\n function parse(text, f) {\n var convert, columns, rows = parseRows(text, function(row, i) {\n if (convert) return convert(row, i - 1);\n columns = row, convert = f ? customConverter(row, f) : objectConverter(row);\n });\n rows.columns = columns || [];\n return rows;\n }\n\n function parseRows(text, f) {\n var rows = [], // output rows\n N = text.length,\n I = 0, // current character index\n n = 0, // current line number\n t, // current token\n eof = N <= 0, // current token followed by EOF?\n eol = false; // current token followed by EOL?\n\n // Strip the trailing newline.\n if (text.charCodeAt(N - 1) === NEWLINE) --N;\n if (text.charCodeAt(N - 1) === RETURN) --N;\n\n function token() {\n if (eof) return EOF;\n if (eol) return eol = false, EOL;\n\n // Unescape quotes.\n var i, j = I, c;\n if (text.charCodeAt(j) === QUOTE) {\n while (I++ < N && text.charCodeAt(I) !== QUOTE || text.charCodeAt(++I) === QUOTE);\n if ((i = I) >= N) eof = true;\n else if ((c = text.charCodeAt(I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n return text.slice(j + 1, i - 1).replace(/\"\"/g, \"\\\"\");\n }\n\n // Find next delimiter or newline.\n while (I < N) {\n if ((c = text.charCodeAt(i = I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n else if (c !== DELIMITER) continue;\n return text.slice(j, i);\n }\n\n // Return last token before EOF.\n return eof = true, text.slice(j, N);\n }\n\n while ((t = token()) !== EOF) {\n var row = [];\n while (t !== EOL && t !== EOF) row.push(t), t = token();\n if (f && (row = f(row, n++)) == null) continue;\n rows.push(row);\n }\n\n return rows;\n }\n\n function format(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return [columns.map(formatValue).join(delimiter)].concat(rows.map(function(row) {\n return columns.map(function(column) {\n return formatValue(row[column]);\n }).join(delimiter);\n })).join(\"\\n\");\n }\n\n function formatRows(rows) {\n return rows.map(formatRow).join(\"\\n\");\n }\n\n function formatRow(row) {\n return row.map(formatValue).join(delimiter);\n }\n\n function formatValue(text) {\n return text == null ? \"\"\n : reFormat.test(text += \"\") ? \"\\\"\" + text.replace(/\"/g, \"\\\"\\\"\") + \"\\\"\"\n : text;\n }\n\n return {\n parse: parse,\n parseRows: parseRows,\n format: format,\n formatRows: formatRows\n };\n}\n","import dsv from \"./dsv\";\n\nvar csv = dsv(\",\");\n\nexport var csvParse = csv.parse;\nexport var csvParseRows = csv.parseRows;\nexport var csvFormat = csv.format;\nexport var csvFormatRows = csv.formatRows;\n","import dsv from \"./dsv\";\n\nvar tsv = dsv(\"\\t\");\n\nexport var tsvParse = tsv.parse;\nexport var tsvParseRows = tsv.parseRows;\nexport var tsvFormat = tsv.format;\nexport var tsvFormatRows = tsv.formatRows;\n","import { dsvFormat as d3Dsv } from 'd3-dsv';\nimport DSVArr from './dsv-arr';\n\n/**\n * Parses and converts data formatted in DSV string to a manageable internal format.\n *\n * @todo Support to be given for https://tools.ietf.org/html/rfc4180.\n * @todo Sample implementation https://github.com/knrz/CSV.js/.\n *\n * @param {string} str - The input DSV string.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv string data is header or not.\n * @param {string} [options.fieldSeparator=\",\"] - The separator of two consecutive field.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = `\n * a,b,c\n * 1,2,3\n * 4,5,6\n * 7,8,9\n * `\n */\nfunction DSVStr (str, options) {\n const defaultOption = {\n firstRowHeader: true,\n fieldSeparator: ','\n };\n options = Object.assign({}, defaultOption, options);\n\n const dsv = d3Dsv(options.fieldSeparator);\n return DSVArr(dsv.parseRows(str), options);\n}\n\nexport default DSVStr;\n","import { columnMajor } from '../utils';\n\n/**\n * Parses and converts data formatted in JSON to a manageable internal format.\n *\n * @param {Array.} arr - The input data formatted in JSON.\n * @return {Array.} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * {\n * \"a\": 1,\n * \"b\": 2,\n * \"c\": 3\n * },\n * {\n * \"a\": 4,\n * \"b\": 5,\n * \"c\": 6\n * },\n * {\n * \"a\": 7,\n * \"b\": 8,\n * \"c\": 9\n * }\n * ];\n */\nfunction FlatJSON (arr) {\n const header = {};\n let i = 0;\n let insertionIndex;\n const columns = [];\n const push = columnMajor(columns);\n\n arr.forEach((item) => {\n const fields = [];\n for (let key in item) {\n if (key in header) {\n insertionIndex = header[key];\n } else {\n header[key] = i++;\n insertionIndex = i - 1;\n }\n fields[insertionIndex] = item[key];\n }\n push(...fields);\n });\n\n return [Object.keys(header), columns];\n}\n\nexport default FlatJSON;\n","import FlatJSON from './flat-json';\nimport DSVArr from './dsv-arr';\nimport DSVStr from './dsv-str';\nimport { detectDataFormat } from '../utils';\n\n/**\n * Parses the input data and detect the format automatically.\n *\n * @param {string|Array} data - The input data.\n * @param {Object} options - An optional config specific to data format.\n * @return {Array.} Returns an array of headers and column major data.\n */\nfunction Auto (data, options) {\n const converters = { FlatJSON, DSVStr, DSVArr };\n const dataFormat = detectDataFormat(data);\n\n if (!dataFormat) {\n throw new Error('Couldn\\'t detect the data format');\n }\n\n return converters[dataFormat](data, options);\n}\n\nexport default Auto;\n","import { FieldType, FilteringMode, DimensionSubtype, MeasureSubtype, DataFormat } from './enums';\nimport fieldStore from './field-store';\nimport Value from './value';\nimport {\n rowDiffsetIterator\n} from './operator';\nimport { DM_DERIVATIVES, LOGICAL_OPERATORS } from './constants';\nimport { createFields, createUnitFieldFromPartial } from './field-creator';\nimport defaultConfig from './default-config';\nimport * as converter from './converter';\nimport { extend2, detectDataFormat } from './utils';\n\n/**\n * Prepares the selection data.\n */\nfunction prepareSelectionData (fields, i) {\n const resp = {};\n for (let field of fields) {\n resp[field.name()] = new Value(field.partialField.data[i], field);\n }\n return resp;\n}\n\nexport function prepareJoinData (fields) {\n const resp = {};\n Object.keys(fields).forEach((key) => { resp[key] = new Value(fields[key], key); });\n return resp;\n}\n\nexport const updateFields = ([rowDiffset, colIdentifier], partialFieldspace, fieldStoreName) => {\n let collID = colIdentifier.length ? colIdentifier.split(',') : [];\n let partialFieldMap = partialFieldspace.fieldsObj();\n let newFields = collID.map(coll => createUnitFieldFromPartial(partialFieldMap[coll].partialField, rowDiffset));\n return fieldStore.createNamespace(newFields, fieldStoreName);\n};\n\nexport const persistDerivation = (model, operation, config = {}, criteriaFn) => {\n let derivative;\n if (operation !== DM_DERIVATIVES.COMPOSE) {\n derivative = {\n op: operation,\n meta: config,\n criteria: criteriaFn\n };\n model._derivation.push(derivative);\n }\n else {\n derivative = [...criteriaFn];\n model._derivation.length = 0;\n model._derivation.push(...derivative);\n }\n};\n\nexport const selectHelper = (rowDiffset, fields, selectFn, config, sourceDm) => {\n const newRowDiffSet = [];\n let lastInsertedValue = -1;\n let { mode } = config;\n let li;\n let cachedStore = {};\n let cloneProvider = () => sourceDm.detachedRoot();\n const selectorHelperFn = index => selectFn(\n prepareSelectionData(fields, index),\n index,\n cloneProvider,\n cachedStore\n );\n\n let checker;\n if (mode === FilteringMode.INVERSE) {\n checker = index => !selectorHelperFn(index);\n } else {\n checker = index => selectorHelperFn(index);\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n if (checker(i)) {\n if (lastInsertedValue !== -1 && i === (lastInsertedValue + 1)) {\n li = newRowDiffSet.length - 1;\n newRowDiffSet[li] = `${newRowDiffSet[li].split('-')[0]}-${i}`;\n } else {\n newRowDiffSet.push(`${i}`);\n }\n lastInsertedValue = i;\n }\n });\n return newRowDiffSet.join(',');\n};\n\nexport const filterPropagationModel = (model, propModels, config = {}) => {\n const operation = config.operation || LOGICAL_OPERATORS.AND;\n const filterByMeasure = config.filterByMeasure || false;\n let fns = [];\n if (!propModels.length) {\n fns = [() => false];\n } else {\n fns = propModels.map(propModel => ((dataModel) => {\n const dataObj = dataModel.getData();\n const schema = dataObj.schema;\n const fieldsConfig = dataModel.getFieldsConfig();\n const fieldsSpace = dataModel.getFieldspace().fieldsObj();\n const data = dataObj.data;\n const domain = Object.values(fieldsConfig).reduce((acc, v) => {\n acc[v.def.name] = fieldsSpace[v.def.name].domain();\n return acc;\n }, {});\n\n return (fields) => {\n const include = !data.length ? false : data.some(row => schema.every((propField) => {\n if (!(propField.name in fields)) {\n return true;\n }\n const value = fields[propField.name].valueOf();\n if (filterByMeasure && propField.type === FieldType.MEASURE) {\n return value >= domain[propField.name][0] && value <= domain[propField.name][1];\n }\n\n if (propField.type !== FieldType.DIMENSION) {\n return true;\n }\n const idx = fieldsConfig[propField.name].index;\n return row[idx] === fields[propField.name].valueOf();\n }));\n return include;\n };\n })(propModel));\n }\n\n let filteredModel;\n if (operation === LOGICAL_OPERATORS.AND) {\n const clonedModel = model.clone(false, false);\n filteredModel = clonedModel.select(fields => fns.every(fn => fn(fields)), {\n saveChild: false,\n mode: FilteringMode.ALL\n });\n } else {\n filteredModel = model.clone(false, false).select(fields => fns.some(fn => fn(fields)), {\n mode: FilteringMode.ALL,\n saveChild: false\n });\n }\n\n return filteredModel;\n};\n\nexport const cloneWithSelect = (sourceDm, selectFn, selectConfig, cloneConfig) => {\n const cloned = sourceDm.clone(cloneConfig.saveChild);\n const rowDiffset = selectHelper(\n cloned._rowDiffset,\n cloned.getPartialFieldspace().fields,\n selectFn,\n selectConfig,\n sourceDm\n );\n cloned._rowDiffset = rowDiffset;\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivation(cloned, DM_DERIVATIVES.SELECT, { config: selectConfig }, selectFn);\n\n return cloned;\n};\n\nexport const cloneWithProject = (sourceDm, projField, config, allFields) => {\n const cloned = sourceDm.clone(config.saveChild);\n let projectionSet = projField;\n if (config.mode === FilteringMode.INVERSE) {\n projectionSet = allFields.filter(fieldName => projField.indexOf(fieldName) === -1);\n }\n // cloned._colIdentifier = sourceDm._colIdentifier.split(',')\n // .filter(coll => projectionSet.indexOf(coll) !== -1).join();\n cloned._colIdentifier = projectionSet.join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivation(\n cloned,\n DM_DERIVATIVES.PROJECT,\n { projField, config, actualProjField: projectionSet },\n null\n );\n\n return cloned;\n};\n\nexport const sanitizeUnitSchema = (unitSchema) => {\n // Do deep clone of the unit schema as the user might change it later.\n unitSchema = extend2({}, unitSchema);\n if (!unitSchema.type) {\n unitSchema.type = FieldType.DIMENSION;\n }\n\n if (!unitSchema.subtype) {\n switch (unitSchema.type) {\n case FieldType.MEASURE:\n unitSchema.subtype = MeasureSubtype.CONTINUOUS;\n break;\n default:\n case FieldType.DIMENSION:\n unitSchema.subtype = DimensionSubtype.CATEGORICAL;\n break;\n }\n }\n\n return unitSchema;\n};\n\nexport const sanitizeSchema = schema => schema.map(unitSchema => sanitizeUnitSchema(unitSchema));\n\nexport const updateData = (relation, data, schema, options) => {\n schema = sanitizeSchema(schema);\n options = Object.assign(Object.assign({}, defaultConfig), options);\n const converterFn = converter[options.dataFormat];\n\n if (!(converterFn && typeof converterFn === 'function')) {\n throw new Error(`No converter function found for ${options.dataFormat} format`);\n }\n\n const [header, formattedData] = converterFn(data, options);\n const fieldArr = createFields(formattedData, schema, header);\n\n // This will create a new fieldStore with the fields\n const nameSpace = fieldStore.createNamespace(fieldArr, options.name);\n relation._partialFieldspace = nameSpace;\n // If data is provided create the default colIdentifier and rowDiffset\n relation._rowDiffset = formattedData.length && formattedData[0].length ? `0-${formattedData[0].length - 1}` : '';\n relation._colIdentifier = (schema.map(_ => _.name)).join();\n relation._dataFormat = options.dataFormat === DataFormat.AUTO ? detectDataFormat(data) : options.dataFormat;\n return relation;\n};\n\nexport const fieldInSchema = (schema, field) => {\n let i = 0;\n\n for (; i < schema.length; ++i) {\n if (field === schema[i].name) {\n return {\n type: schema[i].subtype || schema[i].type,\n index: i\n };\n }\n }\n return null;\n};\n\n\nexport const getOperationArguments = (child) => {\n const derivation = child._derivation;\n let params = [];\n let operation;\n if (derivation && derivation.length === 1) {\n operation = derivation[0].op;\n switch (operation) {\n case DM_DERIVATIVES.SELECT:\n params = [derivation[0].criteria];\n break;\n case DM_DERIVATIVES.PROJECT:\n params = [derivation[0].meta.actualProjField];\n break;\n case DM_DERIVATIVES.GROUPBY:\n operation = 'groupBy';\n params = [derivation[0].meta.groupByString.split(','), derivation[0].criteria];\n break;\n default:\n break;\n }\n }\n\n return {\n operation,\n params\n };\n};\n\nconst applyExistingOperationOnModel = (propModel, dataModel) => {\n const { operation, params } = getOperationArguments(dataModel);\n let selectionModel = propModel[0];\n let rejectionModel = propModel[1];\n if (operation && params.length) {\n selectionModel = propModel[0][operation](...params, {\n saveChild: false\n });\n rejectionModel = propModel[1][operation](...params, {\n saveChild: false\n });\n }\n return [selectionModel, rejectionModel];\n};\n\nconst getFilteredModel = (propModel, path) => {\n for (let i = 0, len = path.length; i < len; i++) {\n const model = path[i];\n propModel = applyExistingOperationOnModel(propModel, model);\n }\n return propModel;\n};\n\nconst propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = {}) => {\n const nonTraversingModel = propModelInf.nonTraversingModel;\n const excludeModels = propModelInf.excludeModels || [];\n\n if (dataModel === nonTraversingModel) {\n return;\n }\n\n const propagate = excludeModels.length ? excludeModels.indexOf(dataModel) === -1 : true;\n\n propagate && dataModel.handlePropagation(propModel, config);\n\n const children = dataModel._children;\n children.forEach((child) => {\n let [selectionModel, rejectionModel] = applyExistingOperationOnModel(propModel, child);\n propagateIdentifiers(child, [selectionModel, rejectionModel], config, propModelInf);\n });\n};\n\nexport const getRootGroupByModel = (model) => {\n if (model._parent && model._derivation.find(d => d.op !== 'group')) {\n return getRootGroupByModel(model._parent);\n }\n return model;\n};\n\nexport const getRootDataModel = (model) => {\n while (model._parent) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getPathToRootModel = (model, path = []) => {\n while (model._parent) {\n path.push(model);\n model = model._parent;\n }\n return path;\n};\n\nexport const propagateToAllDataModels = (identifiers, rootModels, propagationInf, config) => {\n let criteria;\n let propModel;\n const { propagationNameSpace, propagateToSource } = propagationInf;\n const propagationSourceId = propagationInf.sourceId;\n const propagateInterpolatedValues = config.propagateInterpolatedValues;\n const filterFn = (entry) => {\n const filter = config.filterFn || (() => true);\n return filter(entry, config);\n };\n\n let criterias = [];\n\n if (identifiers === null && config.persistent !== true) {\n criterias = [{\n criteria: []\n }];\n } else {\n let actionCriterias = Object.values(propagationNameSpace.mutableActions);\n if (propagateToSource !== false) {\n actionCriterias = actionCriterias.filter(d => d.config.sourceId !== propagationSourceId);\n }\n\n const filteredCriteria = actionCriterias.filter(filterFn).map(action => action.config.criteria);\n\n const excludeModels = [];\n\n if (propagateToSource !== false) {\n const sourceActionCriterias = Object.values(propagationNameSpace.mutableActions);\n\n sourceActionCriterias.forEach((actionInf) => {\n const actionConf = actionInf.config;\n if (actionConf.applyOnSource === false && actionConf.action === config.action &&\n actionConf.sourceId !== propagationSourceId) {\n excludeModels.push(actionInf.model);\n criteria = sourceActionCriterias.filter(d => d !== actionInf).map(d => d.config.criteria);\n criteria.length && criterias.push({\n criteria,\n models: actionInf.model,\n path: getPathToRootModel(actionInf.model)\n });\n }\n });\n }\n\n\n criteria = [].concat(...[...filteredCriteria, identifiers]).filter(d => d !== null);\n criterias.push({\n criteria,\n excludeModels: [...excludeModels, ...config.excludeModels || []]\n });\n }\n\n const rootModel = rootModels.model;\n\n const propConfig = Object.assign({\n sourceIdentifiers: identifiers,\n propagationSourceId\n }, config);\n\n const rootGroupByModel = rootModels.groupByModel;\n if (propagateInterpolatedValues && rootGroupByModel) {\n propModel = filterPropagationModel(rootGroupByModel, criteria, {\n filterByMeasure: propagateInterpolatedValues\n });\n propagateIdentifiers(rootGroupByModel, propModel, propConfig);\n }\n\n criterias.forEach((inf) => {\n const propagationModel = filterPropagationModel(rootModel, inf.criteria);\n const path = inf.path;\n\n if (path) {\n const filteredModel = getFilteredModel(propagationModel, path.reverse());\n inf.models.handlePropagation(filteredModel, propConfig);\n } else {\n propagateIdentifiers(rootModel, propagationModel, propConfig, {\n excludeModels: inf.excludeModels,\n nonTraversingModel: propagateInterpolatedValues && rootGroupByModel\n });\n }\n });\n};\n\nexport const propagateImmutableActions = (propagationNameSpace, rootModels, propagationInf) => {\n const immutableActions = propagationNameSpace.immutableActions;\n\n for (const action in immutableActions) {\n const actionInf = immutableActions[action];\n const actionConf = actionInf.config;\n const propagationSourceId = propagationInf.config.sourceId;\n const filterImmutableAction = propagationInf.propConfig.filterImmutableAction ?\n propagationInf.propConfig.filterImmutableAction(actionConf, propagationInf.config) : true;\n if (actionConf.sourceId !== propagationSourceId && filterImmutableAction) {\n const criteriaModel = actionConf.criteria;\n propagateToAllDataModels(criteriaModel, rootModels, {\n propagationNameSpace,\n propagateToSource: false,\n sourceId: propagationSourceId\n }, actionConf);\n }\n }\n};\n\nexport const addToPropNamespace = (propagationNameSpace, config = {}, model) => {\n let sourceNamespace;\n const isMutableAction = config.isMutableAction;\n const criteria = config.criteria;\n const key = `${config.action}-${config.sourceId}`;\n\n if (isMutableAction) {\n sourceNamespace = propagationNameSpace.mutableActions;\n } else {\n sourceNamespace = propagationNameSpace.immutableActions;\n }\n\n if (criteria === null) {\n delete sourceNamespace[key];\n } else {\n sourceNamespace[key] = {\n model,\n config\n };\n }\n\n return this;\n};\n","import { FilteringMode } from './enums';\nimport { getUniqueId } from './utils';\nimport { persistDerivation, updateFields, cloneWithSelect, cloneWithProject, updateData } from './helper';\nimport { crossProduct, difference, naturalJoinFilter, union } from './operator';\nimport { DM_DERIVATIVES } from './constants';\n\n/**\n * Relation provides the definitions of basic operators of relational algebra like *selection*, *projection*, *union*,\n * *difference* etc.\n *\n * It is extended by {@link DataModel} to inherit the functionalities of relational algebra concept.\n *\n * @class\n * @public\n * @module Relation\n * @namespace DataModel\n */\nclass Relation {\n\n /**\n * Creates a new Relation instance by providing underlying data and schema.\n *\n * @private\n *\n * @param {Object | string | Relation} data - The input tabular data in dsv or json format or\n * an existing Relation instance object.\n * @param {Array} schema - An array of data schema.\n * @param {Object} [options] - The optional options.\n */\n constructor (...params) {\n let source;\n\n this._parent = null;\n this._derivation = [];\n this._children = [];\n\n if (params.length === 1 && ((source = params[0]) instanceof Relation)) {\n // parent datamodel was passed as part of source\n this._colIdentifier = source._colIdentifier;\n this._rowDiffset = source._rowDiffset;\n this._dataFormat = source._dataFormat;\n this._parent = source;\n this._partialFieldspace = this._parent._partialFieldspace;\n this._fieldStoreName = getUniqueId();\n this.__calculateFieldspace().calculateFieldsConfig();\n } else {\n updateData(this, ...params);\n this._fieldStoreName = this._partialFieldspace.name;\n this.__calculateFieldspace().calculateFieldsConfig();\n this._propagationNameSpace = {\n mutableActions: {},\n immutableActions: {}\n };\n }\n }\n\n /**\n * Retrieves the {@link Schema | schema} details for every {@link Field | field} as an array.\n *\n * @public\n *\n * @return {Array.} Array of fields schema.\n * ```\n * [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', numberFormat: (val) => `${val} miles / gallon` },\n * { name: 'Cylinder', type: 'dimension' },\n * { name: 'Displacement', type: 'measure', defAggFn: 'max' },\n * { name: 'HorsePower', type: 'measure', defAggFn: 'max' },\n * { name: 'Weight_in_lbs', type: 'measure', defAggFn: 'avg', },\n * { name: 'Acceleration', type: 'measure', defAggFn: 'avg' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin' }\n * ]\n * ```\n */\n getSchema () {\n return this.getFieldspace().fields.map(d => d.schema());\n }\n\n /**\n * Returns the name of the {@link DataModel} instance. If no name was specified during {@link DataModel}\n * initialization, then it returns a auto-generated name.\n *\n * @public\n *\n * @return {string} Name of the DataModel instance.\n */\n getName() {\n return this._fieldStoreName;\n }\n\n getFieldspace () {\n return this._fieldspace;\n }\n\n __calculateFieldspace () {\n this._fieldspace = updateFields([this._rowDiffset, this._colIdentifier],\n this.getPartialFieldspace(), this._fieldStoreName);\n return this;\n }\n\n getPartialFieldspace () {\n return this._partialFieldspace;\n }\n\n /**\n * Performs {@link link_of_cross_product | cross-product} between two {@link DataModel} instances and returns a\n * new {@link DataModel} instance containing the results. This operation is also called theta join.\n *\n * Cross product takes two set and create one set where each value of one set is paired with each value of another\n * set.\n *\n * This method takes an optional predicate which filters the generated result rows. If the predicate returns true\n * the combined row is included in the resulatant table.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.join(originDM)));\n *\n * console.log(carsDM.join(originDM,\n * obj => obj.[originDM.getName()].Origin === obj.[carsDM.getName()].Origin));\n *\n * @text\n * This is chained version of `join` operator. `join` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel to be joined with the current instance DataModel.\n * @param {SelectionPredicate} filterFn - The predicate function that will filter the result of the crossProduct.\n *\n * @return {DataModel} New DataModel instance created after joining.\n */\n join (joinWith, filterFn) {\n return crossProduct(this, joinWith, filterFn);\n }\n\n /**\n * {@link natural_join | Natural join} is a special kind of cross-product join where filtering of rows are performed\n * internally by resolving common fields are from both table and the rows with common value are included.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.naturalJoin(originDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel with which the current instance of DataModel on which the method is\n * called will be joined.\n * @return {DataModel} New DataModel instance created after joining.\n */\n naturalJoin (joinWith) {\n return crossProduct(this, joinWith, naturalJoinFilter(this, joinWith), true);\n }\n\n /**\n * {@link link_to_union | Union} operation can be termed as vertical stacking of all rows from both the DataModel\n * instances, provided that both of the {@link DataModel} instances should have same column names.\n *\n * @example\n * console.log(EuropeanMakerDM.union(USAMakerDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} unionWith - DataModel instance for which union has to be applied with the instance on which\n * the method is called\n *\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n union (unionWith) {\n return union(this, unionWith);\n }\n\n /**\n * {@link link_to_difference | Difference } operation only include rows which are present in the datamodel on which\n * it was called but not on the one passed as argument.\n *\n * @example\n * console.log(highPowerDM.difference(highExpensiveDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} differenceWith - DataModel instance for which difference has to be applied with the instance\n * on which the method is called\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n difference (differenceWith) {\n return difference(this, differenceWith);\n }\n\n /**\n * {@link link_to_selection | Selection} is a row filtering operation. It expects a predicate and an optional mode\n * which control which all rows should be included in the resultant DataModel instance.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection operation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resultant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * // with selection mode NORMAL:\n * const normDt = dt.select(fields => fields.Origin.value === \"USA\")\n * console.log(normDt));\n *\n * // with selection mode INVERSE:\n * const inverDt = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt);\n *\n * // with selection mode ALL:\n * const dtArr = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.ALL })\n * // print the selected parts\n * console.log(dtArr[0]);\n * // print the inverted parts\n * console.log(dtArr[1]);\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Function} selectFn - The predicate function which is called for each row with the current row.\n * ```\n * function (row, i, cloneProvider, store) { ... }\n * ```\n * @param {Object} config - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance.\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection.\n * @return {DataModel} Returns the new DataModel instance(s) after operation.\n */\n select (selectFn, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n\n const cloneConfig = { saveChild: config.saveChild };\n let oDm;\n\n if (config.mode === FilteringMode.ALL) {\n const selectDm = cloneWithSelect(\n this,\n selectFn,\n { mode: FilteringMode.NORMAL },\n cloneConfig\n );\n const rejectDm = cloneWithSelect(\n this,\n selectFn,\n { mode: FilteringMode.INVERSE },\n cloneConfig\n );\n oDm = [selectDm, rejectDm];\n } else {\n oDm = cloneWithSelect(\n this,\n selectFn,\n config,\n cloneConfig\n );\n }\n\n return oDm;\n }\n\n /**\n * Retrieves a boolean value if the current {@link DataModel} instance has data.\n *\n * @example\n * const schema = [\n * { name: 'CarName', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n * const data = [];\n *\n * const dt = new DataModel(data, schema);\n * console.log(dt.isEmpty());\n *\n * @public\n *\n * @return {Boolean} True if the datamodel has no data, otherwise false.\n */\n isEmpty () {\n return !this._rowDiffset.length || !this._colIdentifier.length;\n }\n\n /**\n * Creates a clone from the current DataModel instance with child parent relationship.\n *\n * @private\n * @param {boolean} [saveChild=true] - Whether the cloned instance would be recorded in the parent instance.\n * @return {DataModel} - Returns the newly cloned DataModel instance.\n */\n clone (saveChild = true, linkParent = true) {\n let retDataModel;\n if (linkParent === false) {\n const dataObj = this.getData({\n getAllFields: true\n });\n const data = dataObj.data;\n const schema = dataObj.schema;\n const jsonData = data.map((row) => {\n const rowObj = {};\n schema.forEach((field, i) => {\n rowObj[field.name] = row[i];\n });\n return rowObj;\n });\n retDataModel = new this.constructor(jsonData, schema);\n }\n else {\n retDataModel = new this.constructor(this);\n }\n\n if (saveChild) {\n this._children.push(retDataModel);\n }\n return retDataModel;\n }\n\n /**\n * {@link Projection} is filter column (field) operation. It expects list of fields' name and either include those\n * or exclude those based on {@link FilteringMode} on the resultant variable.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * const dm = new DataModel(data, schema);\n *\n * // with projection mode NORMAL:\n * const normDt = dt.project([\"Name\", \"HorsePower\"]);\n * console.log(normDt.getData());\n *\n * // with projection mode INVERSE:\n * const inverDt = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt.getData());\n *\n * // with selection mode ALL:\n * const dtArr = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.ALL })\n * // print the normal parts\n * console.log(dtArr[0].getData());\n * // print the inverted parts\n * console.log(dtArr[1].getData());\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {DataModel} Returns the new DataModel instance after operation.\n */\n project (projField, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const { mode } = config;\n\n let normalizedProjField = projField.reduce((acc, field) => {\n if (field.constructor.name === 'RegExp') {\n acc.push(...allFields.filter(fieldName => fieldName.search(field) !== -1));\n } else if (field in fieldConfig) {\n acc.push(field);\n }\n return acc;\n }, []);\n\n normalizedProjField = Array.from(new Set(normalizedProjField)).map(field => field.trim());\n let dataModel;\n\n if (mode === FilteringMode.ALL) {\n let projectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.NORMAL,\n saveChild: config.saveChild\n }, allFields);\n let rejectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.INVERSE,\n saveChild: config.saveChild\n }, allFields);\n dataModel = [projectionClone, rejectionClone];\n } else {\n let projectionClone = cloneWithProject(this, normalizedProjField, config, allFields);\n dataModel = projectionClone;\n }\n\n return dataModel;\n }\n\n getFieldsConfig () {\n return this._fieldConfig;\n }\n\n calculateFieldsConfig () {\n this._fieldConfig = this._fieldspace.fields.reduce((acc, fieldDef, i) => {\n acc[fieldDef.name()] = {\n index: i,\n def: { name: fieldDef.name(), type: fieldDef.type(), subtype: fieldDef.subtype() }\n };\n return acc;\n }, {});\n return this;\n }\n\n\n /**\n * Frees up the resources associated with the current DataModel instance and breaks all the links instance has in\n * the DAG.\n *\n * @public\n */\n dispose () {\n this._parent.removeChild(this);\n this._parent = null;\n }\n\n /**\n * Removes the specified child {@link DataModel} from the child list of the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\")\n * dt.removeChild(dt2);\n *\n * @private\n *\n * @param {DataModel} child - Delegates the parent to remove this child.\n */\n removeChild (child) {\n let idx = this._children.findIndex(sibling => sibling === child);\n idx !== -1 ? this._children.splice(idx, 1) : true;\n }\n\n /**\n * Adds the specified {@link DataModel} as a parent for the current {@link DataModel} instance.\n *\n * The optional criteriaQueue is an array containing the history of transaction performed on parent\n * {@link DataModel} to get the current one.\n *\n * @param {DataModel} parent - The datamodel instance which will act as parent.\n * @param {Array} criteriaQueue - Queue contains in-between operation meta-data.\n */\n addParent (parent, criteriaQueue = []) {\n persistDerivation(this, DM_DERIVATIVES.COMPOSE, null, criteriaQueue);\n this._parent = parent;\n parent._children.push(this);\n }\n\n /**\n * Returns the parent {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const parentDm = dt2.getParent();\n *\n * @return {DataModel} Returns the parent DataModel instance.\n */\n getParent () {\n return this._parent;\n }\n\n /**\n * Returns the immediate child {@link DataModel} instances.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const childDm1 = dt.select(fields => fields.Origin.value === \"USA\");\n * const childDm2 = dt.select(fields => fields.Origin.value === \"Japan\");\n * const childDm3 = dt.groupBy([\"Origin\"]);\n *\n * @return {DataModel[]} Returns the immediate child DataModel instances.\n */\n getChildren() {\n return this._children;\n }\n\n /**\n * Returns the in-between operation meta data while creating the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const derivations = dt3.getDerivations();\n *\n * @return {Any[]} Returns the derivation meta data.\n */\n getDerivations() {\n return this._derivation;\n }\n}\n\nexport default Relation;\n","/* eslint-disable default-case */\n\nimport { FieldType, DimensionSubtype, DataFormat } from './enums';\nimport {\n persistDerivation,\n getRootGroupByModel,\n propagateToAllDataModels,\n getRootDataModel,\n propagateImmutableActions,\n addToPropNamespace,\n sanitizeUnitSchema\n} from './helper';\nimport { DM_DERIVATIVES, PROPAGATION } from './constants';\nimport {\n dataBuilder,\n rowDiffsetIterator,\n groupBy\n} from './operator';\nimport { createBinnedFieldData } from './operator/bucket-creator';\nimport Relation from './relation';\nimport reducerStore from './utils/reducer-store';\nimport { createFields } from './field-creator';\nimport InvalidAwareTypes from './invalid-aware-types';\n\n/**\n * DataModel is an in-browser representation of tabular data. It supports\n * {@link https://en.wikipedia.org/wiki/Relational_algebra | relational algebra} operators as well as generic data\n * processing opearators.\n * DataModel extends {@link Relation} class which defines all the relational algebra opreators. DataModel gives\n * definition of generic data processing operators which are not relational algebra complient.\n *\n * @public\n * @class\n * @extends Relation\n * @memberof Datamodel\n */\nclass DataModel extends Relation {\n /**\n * Creates a new DataModel instance by providing data and schema. Data could be in the form of\n * - Flat JSON\n * - DSV String\n * - 2D Array\n *\n * By default DataModel finds suitable adapter to serialize the data. DataModel also expects a\n * {@link Schema | schema} for identifying the variables present in data.\n *\n * @constructor\n * @example\n * const data = loadData('cars.csv');\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', unit : 'cm', scale: '1000', numberformat: val => `${val}G`},\n * { name: 'Cylinders', type: 'dimension' },\n * { name: 'Displacement', type: 'measure' },\n * { name: 'Horsepower', type: 'measure' },\n * { name: 'Weight_in_lbs', type: 'measure' },\n * { name: 'Acceleration', type: 'measure' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin', type: 'dimension' }\n * ];\n * const dm = new DataModel(data, schema, { name: 'Cars' });\n * table(dm);\n *\n * @public\n *\n * @param {Array. | string | Array.} data Input data in any of the mentioned formats\n * @param {Array.} schema Defination of the variables. Order of the variables in data and order of the\n * variables in schema has to be same.\n * @param {object} [options] Optional arguments to specify more settings regarding the creation part\n * @param {string} [options.name] Name of the datamodel instance. If no name is given an auto generated name is\n * assigned to the instance.\n * @param {string} [options.fieldSeparator=','] specify field separator type if the data is of type dsv string.\n */\n constructor (...args) {\n super(...args);\n\n this._onPropagation = [];\n this._sortingDetails = [];\n }\n\n /**\n * Reducers are simple functions which reduces an array of numbers to a representative number of the set.\n * Like an array of numbers `[10, 20, 5, 15]` can be reduced to `12.5` if average / mean reducer function is\n * applied. All the measure fields in datamodel (variables in data) needs a reducer to handle aggregation.\n *\n * @public\n *\n * @return {ReducerStore} Singleton instance of {@link ReducerStore}.\n */\n static get Reducers () {\n return reducerStore;\n }\n\n /**\n * Configure null, undefined, invalid values in the source data\n *\n * @public\n *\n * @param {Object} [config] - Configuration to control how null, undefined and non-parsable values are\n * represented in DataModel.\n * @param {string} [config.undefined] - Define how an undefined value will be represented.\n * @param {string} [config.null] - Define how a null value will be represented.\n * @param {string} [config.invalid] - Define how a non-parsable value will be represented.\n */\n static configureInvalidAwareTypes (config) {\n return InvalidAwareTypes.invalidAwareVals(config);\n }\n\n /**\n * Retrieve the data attached to an instance in JSON format.\n *\n * @example\n * // DataModel instance is already prepared and assigned to dm variable\n * const data = dm.getData({\n * order: 'column',\n * formatter: {\n * origin: (val) => val === 'European Union' ? 'EU' : val;\n * }\n * });\n * console.log(data);\n *\n * @public\n *\n * @param {Object} [options] Options to control how the raw data is to be returned.\n * @param {string} [options.order='row'] Defines if data is retieved in row order or column order. Possible values\n * are `'rows'` and `'columns'`\n * @param {Function} [options.formatter=null] Formats the output data. This expects an object, where the keys are\n * the name of the variable needs to be formatted. The formatter function is called for each row passing the\n * value of the cell for a particular row as arguments. The formatter is a function in the form of\n * `function (value, rowId, schema) => { ... }`\n * Know more about {@link Fomatter}.\n *\n * @return {Array} Returns a multidimensional array of the data with schema. The return format looks like\n * ```\n * {\n * data,\n * schema\n * }\n * ```\n */\n getData (options) {\n const defOptions = {\n order: 'row',\n formatter: null,\n withUid: false,\n getAllFields: false,\n sort: []\n };\n options = Object.assign({}, defOptions, options);\n const fields = this.getPartialFieldspace().fields;\n\n const dataGenerated = dataBuilder.call(\n this,\n this.getPartialFieldspace().fields,\n this._rowDiffset,\n options.getAllFields ? fields.map(d => d.name()).join() : this._colIdentifier,\n options.sort,\n {\n columnWise: options.order === 'column',\n addUid: !!options.withUid\n }\n );\n\n if (!options.formatter) {\n return dataGenerated;\n }\n\n const { formatter } = options;\n const { data, schema, uids } = dataGenerated;\n const fieldNames = schema.map((e => e.name));\n const fmtFieldNames = Object.keys(formatter);\n const fmtFieldIdx = fmtFieldNames.reduce((acc, next) => {\n const idx = fieldNames.indexOf(next);\n if (idx !== -1) {\n acc.push([idx, formatter[next]]);\n }\n return acc;\n }, []);\n\n if (options.order === 'column') {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n data[fIdx].forEach((datum, datumIdx) => {\n data[fIdx][datumIdx] = fmtFn.call(\n undefined,\n datum,\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n } else {\n data.forEach((datum, datumIdx) => {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n datum[fIdx] = fmtFn.call(\n undefined,\n datum[fIdx],\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n }\n\n return dataGenerated;\n }\n\n /**\n * Groups the data using particular dimensions and by reducing measures. It expects a list of dimensions using which\n * it projects the datamodel and perform aggregations to reduce the duplicate tuples. Refer this\n * {@link link_to_one_example_with_group_by | document} to know the intuition behind groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupedDM = dm.groupBy(['Year'], { horsepower: 'max' } );\n * console.log(groupedDm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {DataModel} Returns a new DataModel instance after performing the groupby.\n */\n groupBy (fieldsArr, reducers = {}, config = { saveChild: true }) {\n const groupByString = `${fieldsArr.join()}`;\n let params = [this, fieldsArr, reducers];\n const newDataModel = groupBy(...params);\n\n persistDerivation(\n newDataModel,\n DM_DERIVATIVES.GROUPBY,\n { fieldsArr, groupByString, defaultReducer: reducerStore.defaultReducer() },\n reducers\n );\n\n if (config.saveChild) {\n this._children.push(newDataModel);\n }\n newDataModel._parent = this;\n\n return newDataModel;\n }\n\n /**\n * Performs sorting operation on the current {@link DataModel} instance according to the specified sorting details.\n * Like every other operator it doesn't mutate the current DataModel instance on which it was called, instead\n * returns a new DataModel instance containing the sorted data.\n *\n * DataModel support multi level sorting by listing the variables using which sorting needs to be performed and\n * the type of sorting `ASC` or `DESC`.\n *\n * In the following example, data is sorted by `Origin` field in `DESC` order in first level followed by another\n * level of sorting by `Acceleration` in `ASC` order.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * let sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\"] // Default value is ASC\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * // Sort with a custom sorting function\n * sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\", (a, b) => a - b] // Custom sorting function\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @text\n * DataModel also provides another sorting mechanism out of the box where sort is applied to a variable using\n * another variable which determines the order.\n * Like the above DataModel contains three fields `Origin`, `Name` and `Acceleration`. Now, the data in this\n * model can be sorted by `Origin` field according to the average value of all `Acceleration` for a\n * particular `Origin` value.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * const sortedDm = dm.sort([\n * ['Origin', ['Acceleration', (a, b) => avg(...a.Acceleration) - avg(...b.Acceleration)]]\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @public\n *\n * @param {Array.} sortingDetails - Sorting details based on which the sorting will be performed.\n * @return {DataModel} Returns a new instance of DataModel with sorted data.\n */\n sort (sortingDetails) {\n const rawData = this.getData({\n order: 'row',\n sort: sortingDetails\n });\n const header = rawData.schema.map(field => field.name);\n const dataInCSVArr = [header].concat(rawData.data);\n\n const sortedDm = new this.constructor(dataInCSVArr, rawData.schema, { dataFormat: 'DSVArr' });\n sortedDm._sortingDetails = sortingDetails;\n return sortedDm;\n }\n\n /**\n * Performs the serialization operation on the current {@link DataModel} instance according to the specified data\n * type. When an {@link DataModel} instance is created, it de-serializes the input data into its internal format,\n * and during its serialization process, it converts its internal data format to the specified data type and returns\n * that data regardless what type of data is used during the {@link DataModel} initialization.\n *\n * @example\n * // here dm is the pre-declared DataModel instance.\n * const csvData = dm.serialize(DataModel.DataFormat.DSV_STR, { fieldSeparator: \",\" });\n * console.log(csvData); // The csv formatted data.\n *\n * const jsonData = dm.serialize(DataModel.DataFormat.FLAT_JSON);\n * console.log(jsonData); // The json data.\n *\n * @public\n *\n * @param {string} type - The data type name for serialization.\n * @param {Object} options - The optional option object.\n * @param {string} options.fieldSeparator - The field separator character for DSV data type.\n * @return {Array|string} Returns the serialized data.\n */\n serialize (type, options) {\n type = type || this._dataFormat;\n options = Object.assign({}, { fieldSeparator: ',' }, options);\n\n const fields = this.getFieldspace().fields;\n const colData = fields.map(f => f.formattedData());\n const rowsCount = colData[0].length;\n let serializedData;\n let rowIdx;\n let colIdx;\n\n if (type === DataFormat.FLAT_JSON) {\n serializedData = [];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = {};\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row[fields[colIdx].name()] = colData[colIdx][rowIdx];\n }\n serializedData.push(row);\n }\n } else if (type === DataFormat.DSV_STR) {\n serializedData = [fields.map(f => f.name()).join(options.fieldSeparator)];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row.join(options.fieldSeparator));\n }\n serializedData = serializedData.join('\\n');\n } else if (type === DataFormat.DSV_ARR) {\n serializedData = [fields.map(f => f.name())];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row);\n }\n } else {\n throw new Error(`Data type ${type} is not supported`);\n }\n\n return serializedData;\n }\n\n addField (field) {\n const fieldName = field.name();\n this._colIdentifier += `,${fieldName}`;\n const partialFieldspace = this._partialFieldspace;\n\n if (!partialFieldspace.fieldsObj()[field.name()]) {\n partialFieldspace.fields.push(field);\n } else {\n const fieldIndex = partialFieldspace.fields.findIndex(fieldinst => fieldinst.name() === fieldName);\n fieldIndex >= 0 && (partialFieldspace.fields[fieldIndex] = field);\n }\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n\n this.__calculateFieldspace().calculateFieldsConfig();\n return this;\n }\n\n /**\n * Creates a new variable calculated from existing variables. This method expects the definition of the newly created\n * variable and a function which resolves the value of the new variable from existing variables.\n *\n * Can create a new measure based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const newDm = dataModel.calculateVariable({\n * name: 'powerToWeight',\n * type: 'measure'\n * }, ['horsepower', 'weight_in_lbs', (hp, weight) => hp / weight ]);\n *\n *\n * Can create a new dimension based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const child = dataModel.calculateVariable(\n * {\n * name: 'Efficiency',\n * type: 'dimension'\n * }, ['horsepower', (hp) => {\n * if (hp < 80) { return 'low'; },\n * else if (hp < 120) { return 'moderate'; }\n * else { return 'high' }\n * }]);\n *\n * @public\n *\n * @param {Object} schema - The schema of newly defined variable.\n * @param {Array.} dependency - An array containing the dependency variable names and a resolver\n * function as the last element.\n * @param {Object} config - An optional config object.\n * @param {boolean} [config.saveChild] - Whether the newly created DataModel will be a child.\n * @param {boolean} [config.replaceVar] - Whether the newly created variable will replace the existing variable.\n * @return {DataModel} Returns an instance of DataModel with the new field.\n */\n calculateVariable (schema, dependency, config) {\n schema = sanitizeUnitSchema(schema);\n config = Object.assign({}, { saveChild: true, replaceVar: false }, config);\n\n const fieldsConfig = this.getFieldsConfig();\n const depVars = dependency.slice(0, dependency.length - 1);\n const retrieveFn = dependency[dependency.length - 1];\n\n if (fieldsConfig[schema.name] && !config.replaceVar) {\n throw new Error(`${schema.name} field already exists in datamodel`);\n }\n\n const depFieldIndices = depVars.map((field) => {\n const fieldSpec = fieldsConfig[field];\n if (!fieldSpec) {\n // @todo dont throw error here, use warning in production mode\n throw new Error(`${field} is not a valid column name.`);\n }\n return fieldSpec.index;\n });\n\n const clone = this.clone();\n\n const fs = clone.getFieldspace().fields;\n const suppliedFields = depFieldIndices.map(idx => fs[idx]);\n\n let cachedStore = {};\n let cloneProvider = () => this.detachedRoot();\n\n const computedValues = [];\n rowDiffsetIterator(clone._rowDiffset, (i) => {\n const fieldsData = suppliedFields.map(field => field.partialField.data[i]);\n computedValues[i] = retrieveFn(...fieldsData, i, cloneProvider, cachedStore);\n });\n const [field] = createFields([computedValues], [schema], [schema.name]);\n clone.addField(field);\n\n persistDerivation(clone, DM_DERIVATIVES.CAL_VAR, { config: schema, fields: depVars }, retrieveFn);\n\n return clone;\n }\n\n /**\n * Propagates changes across all the connected DataModel instances.\n *\n * @param {Array} identifiers - A list of identifiers that were interacted with.\n * @param {Object} payload - The interaction specific details.\n *\n * @return {DataModel} DataModel instance.\n */\n propagate (identifiers, config = {}, addToNameSpace, propConfig = {}) {\n const isMutableAction = config.isMutableAction;\n const propagationSourceId = config.sourceId;\n const payload = config.payload;\n const rootModel = getRootDataModel(this);\n const propagationNameSpace = rootModel._propagationNameSpace;\n const rootGroupByModel = getRootGroupByModel(this);\n const rootModels = {\n groupByModel: rootGroupByModel,\n model: rootModel\n };\n\n addToNameSpace && addToPropNamespace(propagationNameSpace, config, this);\n propagateToAllDataModels(identifiers, rootModels, { propagationNameSpace, sourceId: propagationSourceId },\n Object.assign({\n payload\n }, config));\n\n if (isMutableAction) {\n propagateImmutableActions(propagationNameSpace, rootModels, {\n config,\n propConfig\n }, this);\n }\n\n return this;\n }\n\n /**\n * Associates a callback with an event name.\n *\n * @param {string} eventName - The name of the event.\n * @param {Function} callback - The callback to invoke.\n * @return {DataModel} Returns this current DataModel instance itself.\n */\n on (eventName, callback) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation.push(callback);\n break;\n }\n return this;\n }\n\n /**\n * Unsubscribes the callbacks for the provided event name.\n *\n * @param {string} eventName - The name of the event to unsubscribe.\n * @return {DataModel} Returns the current DataModel instance itself.\n */\n unsubscribe (eventName) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation = [];\n break;\n\n }\n return this;\n }\n\n /**\n * This method is used to invoke the method associated with propagation.\n *\n * @param {Object} payload The interaction payload.\n * @param {DataModel} identifiers The propagated DataModel.\n * @memberof DataModel\n */\n handlePropagation (propModel, payload) {\n let propListeners = this._onPropagation;\n propListeners.forEach(fn => fn.call(this, propModel, payload));\n }\n\n /**\n * Performs the binning operation on a measure field based on the binning configuration. Binning means discretizing\n * values of a measure. Binning configuration contains an array; subsequent values from the array marks the boundary\n * of buckets in [inclusive, exclusive) range format. This operation does not mutate the subject measure field,\n * instead, it creates a new field (variable) of type dimension and subtype binned.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non-uniform buckets,\n * - providing bins count,\n * - providing each bin size,\n *\n * When custom `buckets` are provided as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', buckets: [30, 80, 100, 110] }\n * const binnedDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binsCount` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', binsCount: 5, start: 0, end: 100 }\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binSize` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHorsepower', binSize: 20, start: 5}\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @public\n *\n * @param {string} measureFieldName - The name of the target measure field.\n * @param {Object} config - The config object.\n * @param {string} [config.name] - The name of the new field which will be created.\n * @param {string} [config.buckets] - An array containing the bucket ranges.\n * @param {string} [config.binSize] - The size of each bin. It is ignored when buckets are given.\n * @param {string} [config.binsCount] - The total number of bins to generate. It is ignored when buckets are given.\n * @param {string} [config.start] - The start value of the bucket ranges. It is ignored when buckets are given.\n * @param {string} [config.end] - The end value of the bucket ranges. It is ignored when buckets are given.\n * @return {DataModel} Returns a new {@link DataModel} instance with the new field.\n */\n bin (measureFieldName, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n if (!fieldsConfig[measureFieldName]) {\n throw new Error(`Field ${measureFieldName} doesn't exist`);\n }\n\n const binFieldName = config.name || `${measureFieldName}_binned`;\n\n if (fieldsConfig[binFieldName]) {\n throw new Error(`Field ${binFieldName} already exists`);\n }\n\n const measureField = this.getFieldspace().fieldsObj()[measureFieldName];\n const { binnedData, bins } = createBinnedFieldData(measureField, this._rowDiffset, config);\n\n const binField = createFields([binnedData], [\n {\n name: binFieldName,\n type: FieldType.DIMENSION,\n subtype: DimensionSubtype.BINNED,\n bins\n }], [binFieldName])[0];\n\n const clone = this.clone();\n clone.addField(binField);\n\n persistDerivation(clone, DM_DERIVATIVES.BIN, { measureFieldName, config, binFieldName }, null);\n\n return clone;\n }\n\n /**\n * Creates a new {@link DataModel} instance with completely detached root from current {@link DataModel} instance,\n * the new {@link DataModel} instance has no parent-children relationship with the current one, but has same data as\n * the current one.\n * This API is useful when a completely different {@link DataModel} but with same data as the current instance is\n * needed.\n *\n * @example\n * const dm = new DataModel(data, schema);\n * const detachedDm = dm.detachedRoot();\n *\n * // has different namespace\n * console.log(dm.getPartialFieldspace().name);\n * console.log(detachedDm.getPartialFieldspace().name);\n *\n * // has same data\n * console.log(dm.getData());\n * console.log(detachedDm.getData());\n *\n * @public\n *\n * @return {DataModel} Returns a detached {@link DataModel} instance.\n */\n detachedRoot () {\n const data = this.serialize(DataFormat.FLAT_JSON);\n const schema = this.getSchema();\n\n return new DataModel(data, schema);\n }\n}\n\nexport default DataModel;\n","import { fnList } from '../operator/group-by-function';\n\nexport const { sum, avg, min, max, first, last, count, std: sd } = fnList;\n","import DataModel from './datamodel';\nimport {\n compose,\n bin,\n select,\n project,\n groupby as groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union\n} from './operator';\nimport * as Stats from './stats';\nimport * as enums from './enums';\nimport { DateTimeFormatter } from './utils';\nimport { DataFormat, FilteringMode } from './constants';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport pkg from '../package.json';\n\nDataModel.Operators = {\n compose,\n bin,\n select,\n project,\n groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union\n};\nDataModel.Stats = Stats;\nObject.assign(DataModel, enums);\nDataModel.DateTimeFormatter = DateTimeFormatter;\nDataModel.DataFormat = DataFormat;\nDataModel.FilteringMode = FilteringMode;\nDataModel.InvalidAwareTypes = InvalidAwareTypes;\nDataModel.version = pkg.version;\n\nexport default DataModel;\n","\n/**\n * DataModel's opearators are exposed as composable functional operators as well as chainable operators. Chainable\n * operators are called on the instances of {@link Datamodel} and {@link Relation} class.\n *\n * Those same operators can be used as composable operators from `DataModel.Operators` namespace.\n *\n * All these operators have similar behaviour. All these operators when called with the argument returns a function\n * which expects a DataModel instance.\n *\n * @public\n * @module Operators\n * @namespace DataModel\n */\n\n/**\n * This is functional version of selection operator. {@link link_to_selection | Selection} is a row filtering operation.\n * It takes {@link SelectionPredicate | predicate} for filtering criteria and returns a function.\n * The returned function is called with the DataModel instance on which the action needs to be performed.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection opearation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * [Warn] Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * [Error] `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @example\n * const select = DataModel.Operators.select;\n * usaCarsFn = select(fields => fields.Origin.value === 'USA');\n * usaCarsDm = usaCarsFn(dm);\n * console.log(usaCarsDm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {SelectionPredicate} selectFn - Predicate funciton which is called for each row with the current row\n * ```\n * function (row, i) { ... }\n * ```\n * @param {Object} [config] - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const select = (...args) => dm => dm.select(...args);\n\n/**\n * This is functional version of projection operator. {@link link_to_projection | Projection} is a column filtering\n * operation.It expects list of fields name and either include those or exclude those based on {@link FilteringMode} on\n * the resultant variable.It returns a function which is called with the DataModel instance on which the action needs\n * to be performed.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const project = (...args) => dm => dm.project(...args);\n\n/**\n * This is functional version of binnig operator. Binning happens on a measure field based on a binning configuration.\n * Binning in DataModel does not aggregate the number of rows present in DataModel instance after binning, it just adds\n * a new field with the binned value. Refer binning {@link example_of_binning | example} to have a intuition of what\n * binning is and the use case.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non uniform buckets\n * - providing bin count\n * - providing each bin size\n *\n * When custom buckets are provided as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const buckets = {\n * start: 30\n * stops: [80, 100, 110]\n * };\n * const config = { buckets, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(dm);\n *\n * @text\n * When `binCount` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binCount: 5, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @text\n * When `binSize` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binSize: 200, name: 'binnedHorsepower' }\n * const binnedDm = dataModel.bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {String} name Name of measure which will be used to create bin\n * @param {Object} config Config required for bin creation\n * @param {Array.} config.bucketObj.stops Defination of bucket ranges. Two subsequent number from arrays\n * are picked and a range is created. The first number from range is inclusive and the second number from range\n * is exclusive.\n * @param {Number} [config.bucketObj.startAt] Force the start of the bin from a particular number.\n * If not mentioned, the start of the bin or the lower domain of the data if stops is not mentioned, else its\n * the first value of the stop.\n * @param {Number} config.binSize Bucket size for each bin\n * @param {Number} config.binCount Number of bins which will be created\n * @param {String} config.name Name of the new binned field to be created\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const bin = (...args) => dm => dm.bin(...args);\n\n/**\n * This is functional version of `groupBy` operator.Groups the data using particular dimensions and by reducing\n * measures. It expects a list of dimensions using which it projects the datamodel and perform aggregations to reduce\n * the duplicate tuples. Refer this {@link link_to_one_example_with_group_by | document} to know the intuition behind\n * groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupBy = DataModel.Operators.groupBy;\n * const groupedFn = groupBy(['Year'], { horsepower: 'max' } );\n * groupedDM = groupByFn(dm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const groupBy = (...args) => dm => dm.groupBy(...args);\n\n/**\n * Enables composing operators to run multiple operations and save group of operataion as named opration on a DataModel.\n * The resulting DataModel will be the result of all the operation provided. The operations provided will be executed in\n * a serial manner ie. result of one operation will be the input for the next operations (like pipe operator in unix).\n *\n * Suported operations in compose are\n * - `select`\n * - `project`\n * - `groupBy`\n * - `bin`\n * - `compose`\n *\n * @example\n * const compose = DataModel.Operators.compose;\n * const select = DataModel.Operators.select;\n * const project = DataModel.Operators.project;\n *\n * let composedFn = compose(\n * select(fields => fields.netprofit.value <= 15),\n * project(['netprofit', 'netsales']));\n *\n * const dataModel = new DataModel(data1, schema1);\n *\n * let composedDm = composedFn(dataModel);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} operators: An array of operation that will be applied on the\n * datatable.\n *\n * @returns {DataModel} Instance of resultant DataModel\n */\nexport const compose = (...operations) =>\n (dm, config = { saveChild: true }) => {\n let currentDM = dm;\n let frstChild;\n const derivations = [];\n const saveChild = config.saveChild;\n\n operations.forEach((operation) => {\n currentDM = operation(currentDM);\n derivations.push(...currentDM._derivation);\n if (!frstChild) {\n frstChild = currentDM;\n }\n });\n\n saveChild && currentDM.addParent(dm, derivations);\n if (derivations.length > 1) {\n frstChild.dispose();\n }\n\n return currentDM;\n };\n","/**\n * Wrapper on calculateVariable() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const calculateVariable = (...args) => dm => dm.calculateVariable(...args);\n\n/**\n * Wrapper on sort() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const sort = (...args) => dm => dm.sort(...args);\n","import { crossProduct } from './cross-product';\nimport { naturalJoinFilter } from './natural-join-filter-function';\n\nexport function naturalJoin (dataModel1, dataModel2) {\n return crossProduct(dataModel1, dataModel2, naturalJoinFilter(dataModel1, dataModel2), true);\n}\n"],"sourceRoot":""} \ No newline at end of file +{"version":3,"sources":["webpack://DataModel/webpack/universalModuleDefinition","webpack://DataModel/webpack/bootstrap","webpack://DataModel/./src/index.js","webpack://DataModel/./src/enums/data-format.js","webpack://DataModel/./src/enums/dimension-subtype.js","webpack://DataModel/./src/enums/measure-subtype.js","webpack://DataModel/./src/enums/field-type.js","webpack://DataModel/./src/enums/filtering-mode.js","webpack://DataModel/./src/utils/date-time-formatter.js","webpack://DataModel/./src/utils/column-major.js","webpack://DataModel/./src/utils/extend2.js","webpack://DataModel/./src/utils/helper.js","webpack://DataModel/./src/field-store.js","webpack://DataModel/./src/value.js","webpack://DataModel/./src/operator/row-diffset-iterator.js","webpack://DataModel/./src/invalid-aware-types.js","webpack://DataModel/./src/operator/bucket-creator.js","webpack://DataModel/./src/operator/get-common-schema.js","webpack://DataModel/./src/constants/index.js","webpack://DataModel/./src/operator/cross-product.js","webpack://DataModel/./src/operator/merge-sort.js","webpack://DataModel/./src/operator/data-builder.js","webpack://DataModel/./src/operator/difference.js","webpack://DataModel/./src/operator/group-by-function.js","webpack://DataModel/./src/utils/reducer-store.js","webpack://DataModel/./src/operator/group-by.js","webpack://DataModel/./src/operator/natural-join-filter-function.js","webpack://DataModel/./src/operator/union.js","webpack://DataModel/./src/operator/outer-join.js","webpack://DataModel/./src/fields/field/index.js","webpack://DataModel/./src/fields/dimension/index.js","webpack://DataModel/./src/fields/categorical/index.js","webpack://DataModel/./src/fields/temporal/index.js","webpack://DataModel/./src/fields/binned/index.js","webpack://DataModel/./src/fields/measure/index.js","webpack://DataModel/./src/fields/continuous/index.js","webpack://DataModel/./src/fields/parsers/field-parser/index.js","webpack://DataModel/./src/fields/parsers/categorical-parser/index.js","webpack://DataModel/./src/fields/parsers/temporal-parser/index.js","webpack://DataModel/./src/fields/parsers/binned-parser/index.js","webpack://DataModel/./src/fields/parsers/continuous-parser/index.js","webpack://DataModel/./src/fields/partial-field/index.js","webpack://DataModel/./src/field-creator.js","webpack://DataModel/./src/default-config.js","webpack://DataModel/./src/converter/dsv-arr.js","webpack://DataModel/./node_modules/d3-dsv/src/dsv.js","webpack://DataModel/./node_modules/d3-dsv/src/csv.js","webpack://DataModel/./node_modules/d3-dsv/src/tsv.js","webpack://DataModel/./src/converter/dsv-str.js","webpack://DataModel/./src/converter/flat-json.js","webpack://DataModel/./src/converter/auto-resolver.js","webpack://DataModel/./src/helper.js","webpack://DataModel/./src/relation.js","webpack://DataModel/./src/datamodel.js","webpack://DataModel/./src/stats/index.js","webpack://DataModel/./src/export.js","webpack://DataModel/./src/operator/compose.js","webpack://DataModel/./src/operator/pure-operators.js","webpack://DataModel/./src/operator/natural-join.js"],"names":["root","factory","exports","module","define","amd","window","installedModules","__webpack_require__","moduleId","i","l","modules","call","m","c","d","name","getter","o","Object","defineProperty","enumerable","get","r","Symbol","toStringTag","value","t","mode","__esModule","ns","create","key","bind","n","object","property","prototype","hasOwnProperty","p","s","DataModel","require","default","DataFormat","FLAT_JSON","DSV_STR","DSV_ARR","AUTO","DimensionSubtype","CATEGORICAL","TEMPORAL","GEO","BINNED","MeasureSubtype","CONTINUOUS","FieldType","MEASURE","DIMENSION","FilteringMode","NORMAL","INVERSE","ALL","convertToNativeDate","date","Date","pad","DateTimeFormatter","format","this","dtParams","undefined","nativeDate","RegExp","escape","text","replace","TOKEN_PREFIX","DATETIME_PARAM_SEQUENCE","YEAR","MONTH","DAY","HOUR","MINUTE","SECOND","MILLISECOND","defaultNumberParser","defVal","val","parsedVal","isFinite","parseInt","defaultRangeParser","range","nVal","toLowerCase","length","getTokenDefinitions","daysDef","short","long","monthsDef","H","index","extract","parser","formatter","getHours","toString","hours","P","M","getMinutes","S","getSeconds","K","getMilliseconds","a","join","day","getDay","A","e","getDate","b","month","getMonth","B","y","result","substring","presentDate","presentYear","Math","trunc","getFullYear","year","Y","getTokenFormalNames","definitions","HOUR_12","AMPM_UPPERCASE","AMPM_LOWERCASE","SHORT_DAY","LONG_DAY","DAY_OF_MONTH","DAY_OF_MONTH_CONSTANT_WIDTH","SHORT_MONTH","LONG_MONTH","MONTH_OF_YEAR","SHORT_YEAR","LONG_YEAR","tokenResolver","defaultResolver","arg","targetParam","arguments","hourFormat24","hourFormat12","ampmLower","ampmUpper","amOrpm","isPM","findTokens","tokenPrefix","tokenLiterals","keys","occurrence","forwardChar","indexOf","push","token","formatAs","nDate","formattedStr","String","formattedVal","parse","dateTimeStamp","options","extractTokenValue","dtParamSeq","noBreak","dtParamArr","args","resolverKey","resolverParams","resolverFn","param","resolvedVal","splice","apply","checkIfOnlyYear","unshift","tokenObj","lastOccurrenceIndex","occObj","occIndex","targetText","regexFormat","tokenArr","map","obj","occurrenceLength","extractValues","match","shift","getNativeDate","Number","Function","concat","_toConsumableArray","len","column_major","store","_len","fields","Array","_key","forEach","fieldIndex","from","OBJECTSTRING","objectToStrFn","objectToStr","arrayToStr","checkCyclicRef","parentArr","bIndex","extend2","obj1","obj2","skipUndef","_typeof","merge","tgtArr","srcArr","item","srcVal","tgtVal","str","cRef","isArray","getUniqueId","getTime","round","random","isArrEqual","arr1","arr2","formatNumber","detectDataFormat","data","isObject","fieldStore","createNamespace","fieldArr","dataId","fieldsObj","_cachedFieldsObj","field","getMeasure","measureFields","_cachedMeasure","schema","type","getDimension","dimensionFields","_cachedDimension","Value","_classCallCheck","configurable","writable","_value","rowDiffsetIterator","rowDiffset","callback","split","diffStr","diffStsArr","start","end","InvalidAwareTypes","invalid_aware_types_classCallCheck","config","assign","_invalidAwareValsMap","invalidAwareVals","NULL","NA","NIL","invalid","nil","null","generateBuckets","binSize","buckets","next","findBucketRange","bucketRanges","leftIdx","rightIdx","midIdx","floor","getCommonSchema","fs1","fs2","retArr","fs1Arr","DM_DERIVATIVES","JOINS","CROSS","LEFTOUTER","RIGHTOUTER","NATURAL","FULLOUTER","LOGICAL_OPERATORS","defaultFilterFn","crossProduct","dm1","dm2","filterFn","replaceCommonSchema","jointype","applicableFilterFn","dm1FieldStore","getFieldspace","dm2FieldStore","dm1FieldStoreName","dm2FieldStoreName","commonSchemaList","Error","tmpSchema","_rowDiffset","rowAdded","rowPosition","ii","tuple","userArg","partialField","dm1Fields","prepareJoinData","dm2Fields","detachedRoot","tupleObj","cellVal","iii","defSortFn","a1","b1","mergeSort","arr","sortFn","sort","lo","hi","mid","mainArr","auxArr","getSortFn","dataType","sortType","retFunc","groupData","hashMap","Map","groupedData","datum","fieldVal","has","set","createSortingFnArg","groupedDatum","targetFields","targetFieldDetails","label","reduce","acc","idx","dataBuilder","colIdentifier","sortingDetails","addUid","columnWise","retObj","uids","reqSorting","tmpDataArr","colName","insertInd","dataObj","fieldName","sortMeta","fDetails","fieldInSchema","sortingFn","slice","f","data_builder_toConsumableArray","pop","sortData","tmpData","difference","hashTable","schemaNameArr","dm1FieldStoreFieldObj","dm2FieldStoreFieldObj","_colIdentifier","prepareDataHelper","dm","addData","hashData","schemaName","getFilteredValues","filter","sum","filteredNumber","curr","avg","totalSum","isNaN","fnList","min","filteredValues","group_by_function_toConsumableArray","max","first","last","count","std","sqrt","mean","num","pow","variance","defaultReducerName","ReducerStore","_this","reducer_store_classCallCheck","defReducer","entries","reducer","_this2","__unregister","delete","reducerStore","groupBy","dataModel","reducers","existingDataModel","sFieldArr","dimensions","_ref","group_by_slicedToArray","getFieldArr","reducerObj","measures","defaultReducer","measureName","defAggFn","reducerFn","resolve","getReducerObj","fieldStoreObj","dbName","dimensionArr","measureArr","newDataModel","_ref3","_ref4","rowCount","hash","_","cachedStore","cloneProvider","row","__calculateFieldspace","naturalJoinFilter","commonSchemaArr","retainTuple","union","leftOuterJoin","dataModel1","dataModel2","rightOuterJoin","Field","field_classCallCheck","subtype","description","displayName","Dimension","_cachedDomain","calculateDataDomain","Categorical","Set","domain","add","Temporal","temporal_classCallCheck","temporal_possibleConstructorReturn","__proto__","getPrototypeOf","_cachedMinDiff","sortedData","arrLn","minDiff","POSITIVE_INFINITY","prevDatum","nextDatum","processedCount","_this3","Binned","binsArr","bins","Measure","unit","numberFormat","Continuous","NEGATIVE_INFINITY","FieldParser","CategoricalParser","isInvalid","getInvalidType","trim","TemporalParser","temporal_parser_classCallCheck","temporal_parser_possibleConstructorReturn","_dtf","BinnedParser","matched","parseFloat","ContinuousParser","PartialField","partial_field_classCallCheck","_sanitize","createFields","dataColumn","headers","headersObj","header","createUnitField","default_config","dataFormat","DSVArr","firstRowHeader","columns","columnMajor","EOL","EOF","QUOTE","NEWLINE","RETURN","objectConverter","JSON","stringify","src_dsv","delimiter","reFormat","DELIMITER","charCodeAt","parseRows","rows","N","I","eof","eol","j","formatRow","formatValue","test","convert","customConverter","columnSet","column","inferColumns","formatRows","csv","dsv","tsv","DSVStr","fieldSeparator","d3Dsv","FlatJSON","insertionIndex","Auto","converters","resp","updateFields","partialFieldspace","fieldStoreName","_ref2","helper_slicedToArray","collID","partialFieldMap","newFields","coll","createUnitFieldFromPartial","persistDerivation","model","operation","_model$_derivation","criteriaFn","derivative","op","meta","criteria","_derivation","src_helper_toConsumableArray","selectHelper","selectFn","sourceDm","newRowDiffSet","lastInsertedValue","li","selectorHelperFn","_iteratorNormalCompletion","_didIteratorError","_iteratorError","_step","_iterator","iterator","done","err","return","prepareSelectionData","checker","filterPropagationModel","propModels","filterByMeasure","fns","propModel","getData","fieldsConfig","getFieldsConfig","fieldsSpace","values","v","def","some","every","propField","valueOf","filteredModel","clone","select","fn","saveChild","cloneWithSelect","selectConfig","cloneConfig","cloned","getPartialFieldspace","calculateFieldsConfig","cloneWithProject","projField","allFields","projectionSet","actualProjField","sanitizeUnitSchema","unitSchema","updateData","relation","sanitizeSchema","defaultConfig","converterFn","converter","_converterFn","_converterFn2","formattedData","nameSpace","_partialFieldspace","_dataFormat","applyExistingOperationOnModel","_propModel$","_propModel$2","_getOperationArgument","child","derivation","params","groupByString","getOperationArguments","selectionModel","rejectionModel","propagateIdentifiers","propModelInf","nonTraversingModel","excludeModels","handlePropagation","_children","_applyExistingOperati","_applyExistingOperati2","getPathToRootModel","path","_parent","propagateToAllDataModels","identifiers","rootModels","propagationInf","propagationNameSpace","propagateToSource","propagationSourceId","sourceId","propagateInterpolatedValues","criterias","persistent","actionCriterias","mutableActions","filteredCriteria","entry","action","sourceActionCriterias","actionInf","actionConf","applyOnSource","models","rootModel","propConfig","sourceIdentifiers","rootGroupByModel","groupByModel","inf","propagationModel","getFilteredModel","reverse","Relation","relation_classCallCheck","source","_fieldStoreName","_propagationNameSpace","immutableActions","_fieldspace","joinWith","unionWith","differenceWith","defConfig","oDm","retDataModel","getAllFields","jsonData","rowObj","constructor","fieldConfig","normalizedProjField","relation_toConsumableArray","search","_fieldConfig","fieldDef","removeChild","findIndex","sibling","parent","criteriaQueue","datamodel_classCallCheck","datamodel_possibleConstructorReturn","_onPropagation","_sortingDetails","order","withUid","dataGenerated","fieldNames","fmtFieldIdx","elem","fIdx","fmtFn","datumIdx","fieldsArr","rawData","dataInCSVArr","sortedDm","colData","rowsCount","serializedData","rowIdx","colIdx","fieldinst","dependency","replaceVar","depVars","retrieveFn","depFieldIndices","fieldSpec","fs","suppliedFields","computedValues","fieldsData","_createFields","datamodel_slicedToArray","addField","addToNameSpace","isMutableAction","payload","getRootDataModel","getRootGroupByModel","find","sourceNamespace","addToPropNamespace","filterImmutableAction","criteriaModel","propagateImmutableActions","eventName","measureFieldName","binFieldName","_createBinnedFieldDat","measureField","binsCount","_measureField$domain","_measureField$domain2","_slicedToArray","dMin","dMax","ceil","abs","binnedData","createBinnedFieldData","binField","serialize","getSchema","sd","Operators","compose","_len5","operations","_key5","currentDM","frstChild","derivations","compose_toConsumableArray","addParent","dispose","bin","_len3","_key3","project","_len2","_key2","_len4","_key4","calculateVariable","naturalJoin","fullOuterJoin","Stats","enums","version","pkg"],"mappings":"CAAA,SAAAA,EAAAC,GACA,iBAAAC,SAAA,iBAAAC,OACAA,OAAAD,QAAAD,IACA,mBAAAG,eAAAC,IACAD,OAAA,eAAAH,GACA,iBAAAC,QACAA,QAAA,UAAAD,IAEAD,EAAA,UAAAC,IARA,CASCK,OAAA,WACD,mBCTA,IAAAC,EAAA,GAGA,SAAAC,EAAAC,GAGA,GAAAF,EAAAE,GACA,OAAAF,EAAAE,GAAAP,QAGA,IAAAC,EAAAI,EAAAE,GAAA,CACAC,EAAAD,EACAE,GAAA,EACAT,QAAA,IAUA,OANAU,EAAAH,GAAAI,KAAAV,EAAAD,QAAAC,IAAAD,QAAAM,GAGAL,EAAAQ,GAAA,EAGAR,EAAAD,QA0DA,OArDAM,EAAAM,EAAAF,EAGAJ,EAAAO,EAAAR,EAGAC,EAAAQ,EAAA,SAAAd,EAAAe,EAAAC,GACAV,EAAAW,EAAAjB,EAAAe,IACAG,OAAAC,eAAAnB,EAAAe,EAAA,CAA0CK,YAAA,EAAAC,IAAAL,KAK1CV,EAAAgB,EAAA,SAAAtB,GACA,oBAAAuB,eAAAC,aACAN,OAAAC,eAAAnB,EAAAuB,OAAAC,YAAA,CAAwDC,MAAA,WAExDP,OAAAC,eAAAnB,EAAA,cAAiDyB,OAAA,KAQjDnB,EAAAoB,EAAA,SAAAD,EAAAE,GAEA,GADA,EAAAA,IAAAF,EAAAnB,EAAAmB,IACA,EAAAE,EAAA,OAAAF,EACA,KAAAE,GAAA,iBAAAF,QAAAG,WAAA,OAAAH,EACA,IAAAI,EAAAX,OAAAY,OAAA,MAGA,GAFAxB,EAAAgB,EAAAO,GACAX,OAAAC,eAAAU,EAAA,WAAyCT,YAAA,EAAAK,UACzC,EAAAE,GAAA,iBAAAF,EAAA,QAAAM,KAAAN,EAAAnB,EAAAQ,EAAAe,EAAAE,EAAA,SAAAA,GAAgH,OAAAN,EAAAM,IAAqBC,KAAA,KAAAD,IACrI,OAAAF,GAIAvB,EAAA2B,EAAA,SAAAhC,GACA,IAAAe,EAAAf,KAAA2B,WACA,WAA2B,OAAA3B,EAAA,SAC3B,WAAiC,OAAAA,GAEjC,OADAK,EAAAQ,EAAAE,EAAA,IAAAA,GACAA,GAIAV,EAAAW,EAAA,SAAAiB,EAAAC,GAAsD,OAAAjB,OAAAkB,UAAAC,eAAA1B,KAAAuB,EAAAC,IAGtD7B,EAAAgC,EAAA,GAIAhC,IAAAiC,EAAA,62DClFA,IAAMC,EAAYC,EAAQ,GAE1BxC,EAAOD,QAAUwC,EAAUE,QAAUF,EAAUE,QAAUF,ouBCKzD,IAOeG,EAPI,CACfC,UAAW,WACXC,QAAS,SACTC,QAAS,SACTC,KAAM,QCEKC,EAPU,CACrBC,YAAa,cACbC,SAAU,WACVC,IAAK,MACLC,OAAQ,UCAGC,EAJQ,CACnBC,WAAY,cCKDC,EALG,CACdC,QAAS,UACTC,UAAW,aCGAC,EANO,CAClBC,OAAQ,SACRC,QAAS,UACTC,IAAK,OCHT,SAASC,EAAqBC,GAC1B,OAAIA,aAAgBC,KACTD,EAGJ,IAAIC,KAAKD,GASpB,SAASE,EAAKhC,GACV,OAAQA,EAAI,GAAL,IAAgBA,EAAOA,EA8BP,SAASiC,EAAmBC,GACnDC,KAAKD,OAASA,EACdC,KAAKC,cAAWC,EAChBF,KAAKG,gBAAaD,EAftBE,OAAOC,OAAS,SAAUC,GACtB,OAAOA,EAAKC,QAAQ,2BAA4B,SAkBpDT,EAAkBU,aAAe,IAIjCV,EAAkBW,wBAA0B,CACxCC,KAAM,EACNC,MAAO,EACPC,IAAK,EACLC,KAAM,EACNC,OAAQ,EACRC,OAAQ,EACRC,YAAa,GAUjBlB,EAAkBmB,oBAAsB,SAAUC,GAC9C,OAAO,SAAUC,GACb,IAAIC,EACJ,OAAIC,SAASD,EAAYE,SAASH,EAAK,KAC5BC,EAGJF,IAYfpB,EAAkByB,mBAAqB,SAAUC,EAAON,GACpD,OAAO,SAACC,GACJ,IACI9E,EADAD,SAGJ,IAAK+E,EAAO,OAAOD,EAEnB,IAAMO,EAAON,EAAIO,cAEjB,IAAKtF,EAAI,EAAGC,EAAImF,EAAMG,OAAQvF,EAAIC,EAAGD,IACjC,GAAIoF,EAAMpF,GAAGsF,gBAAkBD,EAC3B,OAAOrF,EAIf,YAAU8D,IAAN9D,EACO8E,EAEJ,OAqBfpB,EAAkB8B,oBAAsB,WACpC,IAAMC,EAAU,CACZC,MAAO,CACH,MACA,MACA,MACA,MACA,MACA,MACA,OAEJC,KAAM,CACF,SACA,SACA,UACA,YACA,WACA,SACA,aAGFC,EAAY,CACdF,MAAO,CACH,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,OAEJC,KAAM,CACF,UACA,WACA,QACA,QACA,MACA,OACA,OACA,SACA,YACA,UACA,WACA,aAsPR,MAlPoB,CAChBE,EAAG,CAECtF,KAAM,IACNuF,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAGP,OAFUzB,EAAoByB,GAErBmB,WAAWC,aAG5BlG,EAAG,CAECM,KAAM,IACNuF,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GACP,IACMqB,EADI9C,EAAoByB,GACdmB,WAAa,GAE7B,OAAkB,IAAVE,EAAc,GAAKA,GAAOD,aAG1CrE,EAAG,CAECvB,KAAM,IACNuF,MAAO,EACPC,QAJD,WAIc,MAAO,WACpBC,OAAQ,SAACjB,GACL,OAAIA,EACOA,EAAIO,cAER,MAEXW,UAAW,SAAClB,GAIR,OAHUzB,EAAoByB,GACdmB,WAEA,GAAK,KAAO,OAGpCG,EAAG,CAEC9F,KAAM,IACNuF,MAAO,EACPC,QAJD,WAIc,MAAO,WACpBC,OAAQ,SAACjB,GACL,OAAIA,EACOA,EAAIO,cAER,MAEXW,UAAW,SAAClB,GAIR,OAHUzB,EAAoByB,GACdmB,WAEA,GAAK,KAAO,OAGpCI,EAAG,CAEC/F,KAAM,IACNuF,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GACfwB,gBAKvBC,EAAG,CAECjG,KAAM,IACNuF,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GACZ0B,gBAK1BC,EAAG,CAECnG,KAAM,IACNuF,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAHUzB,EAAoByB,GACjB4B,kBAEHR,aAGlBS,EAAG,CAECrG,KAAM,IACNuF,MAAO,EACPC,QAJD,WAIc,UAAWN,EAAQC,MAAMmB,KAAK,KAA9B,KACbb,OAAQtC,EAAkByB,mBAAmBM,EAAQC,OACrDO,UAND,SAMYlB,GACP,IACM+B,EADIxD,EAAoByB,GAChBgC,SAEd,OAAQtB,EAAQC,MAAMoB,GAAMX,aAGpCa,EAAG,CAECzG,KAAM,IACNuF,MAAO,EACPC,QAJD,WAIc,UAAWN,EAAQE,KAAKkB,KAAK,KAA7B,KACbb,OAAQtC,EAAkByB,mBAAmBM,EAAQE,MACrDM,UAND,SAMYlB,GACP,IACM+B,EADIxD,EAAoByB,GAChBgC,SAEd,OAAQtB,EAAQE,KAAKmB,GAAMX,aAGnCc,EAAG,CAEC1G,KAAM,IACNuF,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAHUzB,EAAoByB,GAChBmC,UAEHf,aAGnB7F,EAAG,CAECC,KAAM,IACNuF,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GAChBmC,aAKtBC,EAAG,CAEC5G,KAAM,IACNuF,MAAO,EACPC,QAJD,WAIc,UAAWH,EAAUF,MAAMmB,KAAK,KAAhC,KACbb,OAAQtC,EAAkByB,mBAAmBS,EAAUF,OACvDO,UAND,SAMYlB,GACP,IACMqC,EADI9D,EAAoByB,GACdsC,WAEhB,OAAQzB,EAAUF,MAAM0B,GAAQjB,aAGxCmB,EAAG,CAEC/G,KAAM,IACNuF,MAAO,EACPC,QAJD,WAIc,UAAWH,EAAUD,KAAKkB,KAAK,KAA/B,KACbb,OAAQtC,EAAkByB,mBAAmBS,EAAUD,MACvDM,UAND,SAMYlB,GACP,IACMqC,EADI9D,EAAoByB,GACdsC,WAEhB,OAAQzB,EAAUD,KAAKyB,GAAQjB,aAGvC/F,EAAG,CAECG,KAAM,IACNuF,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OALD,SAKSjB,GAAO,OAAOrB,EAAkBmB,qBAAlBnB,CAAwCqB,GAAO,GACrEkB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GACdsC,WAEG,KAG3BE,EAAG,CAEChH,KAAM,IACNuF,MAAO,EACPC,QAJD,WAIc,MAAO,YACpBC,OALD,SAKSjB,GACJ,IAAIyC,SACJ,GAAIzC,EAAK,CACL,IAAM9E,EAAI8E,EAAIQ,OACdR,EAAMA,EAAI0C,UAAUxH,EAAI,EAAGA,GAE/B,IAAI+E,EAAYtB,EAAkBmB,qBAAlBnB,CAAwCqB,GACpD2C,EAAc,IAAIlE,KAClBmE,EAAcC,KAAKC,MAAOH,EAAYI,cAAiB,KAO3D,OAHIxE,EAFJkE,KAAYG,EAAc3C,GAEM8C,cAAgBJ,EAAYI,gBACxDN,MAAYG,EAAc,GAAI3C,GAE3B1B,EAAoBkE,GAAQM,eAEvC7B,UAtBD,SAsBYlB,GACP,IACIgD,EADMzE,EAAoByB,GACjB+C,cAAc3B,WACvBlG,SAOJ,OALI8H,IACA9H,EAAI8H,EAAKxC,OACTwC,EAAOA,EAAKN,UAAUxH,EAAI,EAAGA,IAG1B8H,IAGfC,EAAG,CAECzH,KAAM,IACNuF,MAAO,EACPC,QAJD,WAIc,MAAO,YACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAHUzB,EAAoByB,GACf+C,cAAc3B,eAgB7CzC,EAAkBuE,oBAAsB,WACpC,IAAMC,EAAcxE,EAAkB8B,sBAEtC,MAAO,CACHf,KAAMyD,EAAYrC,EAClBsC,QAASD,EAAYjI,EACrBmI,eAAgBF,EAAYpG,EAC5BuG,eAAgBH,EAAY7B,EAC5B3B,OAAQwD,EAAY5B,EACpB3B,OAAQuD,EAAY1B,EACpB8B,UAAWJ,EAAYtB,EACvB2B,SAAUL,EAAYlB,EACtBwB,aAAcN,EAAYjB,EAC1BwB,4BAA6BP,EAAY5H,EACzCoI,YAAaR,EAAYf,EACzBwB,WAAYT,EAAYZ,EACxBsB,cAAeV,EAAY9H,EAC3ByI,WAAYX,EAAYX,EACxBuB,UAAWZ,EAAYF,IAW/BtE,EAAkBqF,cAAgB,WAC9B,IAAMb,EAAcxE,EAAkB8B,sBAChCwD,EAAkB,WAMpB,IALA,IAAIhJ,EAAI,EACJiJ,SACAC,SACEjJ,EAAIkJ,UAAK5D,OAERvF,EAAIC,EAAGD,IACViJ,oBAAWjJ,OAAX8D,EAAAqF,UAAWnJ,IACXmJ,UAAA5D,QAASvF,OAAT8D,EAAAqF,UAASnJ,MACLkJ,EAAcD,GAItB,OAAKC,EAEEA,EAAY,GAAGlD,OAAOkD,EAAY,IAFd,MAK/B,MAAO,CACH5E,KAAM,CAAC4D,EAAYX,EAAGW,EAAYF,EAC9BgB,GAEJzE,MAAO,CAAC2D,EAAYf,EAAGe,EAAYZ,EAAGY,EAAY9H,EAC9C4I,GAEJxE,IAAK,CAAC0D,EAAYtB,EAAGsB,EAAYlB,EAAGkB,EAAYjB,EAAGiB,EAAY5H,EAC3D0I,GAEJvE,KAAM,CAACyD,EAAYrC,EAAGqC,EAAYjI,EAAGiI,EAAYpG,EAAGoG,EAAY7B,EAC5D,SAAU+C,EAAcC,EAAcC,EAAWC,GAC7C,IAAIL,SACAM,SACAC,SACA1E,SAcJ,OAZIsE,IAAiBG,EAAUF,GAAaC,IACJ,OAAhCC,EAAO,GAAGxD,OAAOwD,EAAO,MACxBC,GAAO,GAGXP,EAAcG,GAEdH,EADOG,GAGOD,EAGbF,GAELnE,EAAMmE,EAAY,GAAGlD,OAAOkD,EAAY,IACpCO,IACA1E,GAAO,IAEJA,GANoB,OASnCL,OAAQ,CAACwD,EAAY5B,EACjB0C,GAEJrE,OAAQ,CAACuD,EAAY1B,EACjBwC,KAUZtF,EAAkBgG,WAAa,SAAU/F,GAQrC,IAPA,IAAMgG,EAAcjG,EAAkBU,aAChC8D,EAAcxE,EAAkB8B,sBAChCoE,EAAgBlJ,OAAOmJ,KAAK3B,GAC5B4B,EAAa,GACf9J,SACA+J,UAEI/J,EAAI2D,EAAOqG,QAAQL,EAAa3J,EAAI,KAAO,GAC/C+J,EAAcpG,EAAO3D,EAAI,IACmB,IAAxC4J,EAAcI,QAAQD,IAE1BD,EAAWG,KAAK,CACZnE,MAAO9F,EACPkK,MAAOH,IAIf,OAAOD,GASXpG,EAAkByG,SAAW,SAAU5G,EAAMI,GACzC,IAQI1D,EAREmK,EAAQ9G,EAAoBC,GAC5BuG,EAAapG,EAAkBgG,WAAW/F,GAC1CuE,EAAcxE,EAAkB8B,sBAClC6E,EAAeC,OAAO3G,GACpBgG,EAAcjG,EAAkBU,aAClC8F,SACAK,SACAvK,SAGJ,IAAKA,EAAI,EAAGC,EAAI6J,EAAWvE,OAAQvF,EAAIC,EAAGD,IAEtCuK,EAAerC,EADfgC,EAAQJ,EAAW9J,GAAGkK,OACYjE,UAAUmE,GAC5CC,EAAeA,EAAalG,QAAQ,IAAIH,OAAO2F,EAAcO,EAAO,KAAMK,GAG9E,OAAOF,GAQX3G,EAAkB9B,UAAU4I,MAAQ,SAAUC,EAAeC,GACzD,IAAM3B,EAAgBrF,EAAkBqF,gBAClClF,EAAWD,KAAK+G,kBAAkBF,GAClCG,EAAalH,EAAkBW,wBAC/BwG,EAAUH,GAAWA,EAAQG,QAC7BC,EAAa,GACbC,EAAO,GACTC,SACAC,SACAC,SACAnG,SACA/E,SACAmL,SACAC,SACAnL,SACAuH,EAAS,GAEb,IAAKwD,KAAejC,EAChB,GAAK,GAAGlH,eAAe1B,KAAK4I,EAAeiC,GAA3C,CAMA,IAJAD,EAAKxF,OAAS,EAEd2F,GADAD,EAAiBlC,EAAciC,IACHK,OAAOJ,EAAe1F,OAAS,EAAG,GAAG,GAE5DvF,EAAI,EAAGC,EAAIgL,EAAe1F,OAAQvF,EAAIC,EAAGD,SAI9B8D,KAFZiB,EAAMlB,GADNsH,EAAQF,EAAejL,IACFO,OAGjBwK,EAAKd,KAAK,MAEVc,EAAKd,KAAK,CAACkB,EAAOpG,IAM1B,GAAI,OAFJqG,EAAcF,EAAWI,MAAM1H,KAAMmH,MAEuBF,EACxD,MAGJC,EAAWF,EAAWI,IAAgBI,EAU1C,OAPIN,EAAWvF,QAAU3B,KAAK2H,gBAAgBT,EAAWvF,QAErDiC,EAAOgE,QAAQV,EAAW,GAAI,EAAG,GAEjCtD,EAAOgE,QAAPF,MAAA9D,EAAkBsD,GAGftD,GAQX9D,EAAkB9B,UAAU+I,kBAAoB,SAAUF,GACtD,IAYIxK,EAZE0D,EAASC,KAAKD,OACduE,EAAcxE,EAAkB8B,sBAChCmE,EAAcjG,EAAkBU,aAChC0F,EAAapG,EAAkBgG,WAAW/F,GAC1C8H,EAAW,GAEbC,SACAC,SACAC,SACAC,SACAC,SAGA9L,SAEJ8L,EAAcxB,OAAO3G,GAErB,IAAMoI,EAAWjC,EAAWkC,IAAI,SAAAC,GAAA,OAAOA,EAAI/B,QACrCgC,EAAmBpC,EAAWvE,OACpC,IAAKvF,EAAIkM,EAAmB,EAAGlM,GAAK,EAAGA,KACnC4L,EAAW9B,EAAW9J,GAAG8F,OAEV,IAAMgG,EAAYvG,OAAS,QAKdzB,IAAxB4H,IACAA,EAAsBI,EAAYvG,QAGtCsG,EAAaC,EAAYrE,UAAUmE,EAAW,EAAGF,GACjDI,EAAcA,EAAYrE,UAAU,EAAGmE,EAAW,GAC9C5H,OAAOC,OAAO4H,GACdC,EAAYrE,UAAUiE,EAAqBI,EAAYvG,QAE3DmG,EAAsBE,GAblBF,EAAsBE,EAgB9B,IAAK5L,EAAI,EAAGA,EAAIkM,EAAkBlM,IAC9B2L,EAAS7B,EAAW9J,GACpB8L,EAAcA,EAAY3H,QAAQwF,EAAcgC,EAAOzB,MAAOhC,EAAYyD,EAAOzB,OAAOnE,WAG5F,IAAMoG,EAAgB1B,EAAc2B,MAAM,IAAIpI,OAAO8H,KAAiB,GAGtE,IAFAK,EAAcE,QAETrM,EAAI,EAAGC,EAAI8L,EAASxG,OAAQvF,EAAIC,EAAGD,IACpCyL,EAASM,EAAS/L,IAAMmM,EAAcnM,GAE1C,OAAOyL,GAQX/H,EAAkB9B,UAAU0K,cAAgB,SAAU7B,GAClD,IAAIlH,EAAO,KACX,GAAIgJ,OAAOtH,SAASwF,GAChBlH,EAAO,IAAIC,KAAKiH,QACb,IAAK7G,KAAKD,QAAUH,KAAKgH,MAAMC,GAClClH,EAAO,IAAIC,KAAKiH,OAEf,CACD,IAAM5G,EAAWD,KAAKC,SAAWD,KAAK4G,MAAMC,GACxC5G,EAAS0B,SACT3B,KAAKG,WAAL,IAAAyI,SAAA5K,UAAAJ,KAAA8J,MAAsB9H,KAAtB,OAAAiJ,6HAAAC,CAA8B7I,MAC9BN,EAAOK,KAAKG,YAGpB,OAAOR,GAGXG,EAAkB9B,UAAU2J,gBAAkB,SAASoB,GACnD,OAAe,IAARA,GAAa/I,KAAKD,OAAOyI,MAAM,QAAQ7G,QASlD7B,EAAkB9B,UAAUuI,SAAW,SAAUxG,EAAQ8G,GACrD,IAAI1G,SAQJ,OANI0G,EACA1G,EAAaH,KAAKG,WAAaH,KAAK0I,cAAc7B,IACzC1G,EAAaH,KAAKG,cAC3BA,EAAaH,KAAK0I,cAAc7B,IAG7B/G,EAAkByG,SAASpG,EAAYJ,ICruBnC,IAAAiJ,EAAA,SAACC,GACZ,IAAI7M,EAAI,EACR,OAAO,WAAe,QAAA8M,EAAA3D,UAAA5D,OAAXwH,EAAWC,MAAAF,GAAAG,EAAA,EAAAA,EAAAH,EAAAG,IAAXF,EAAWE,GAAA9D,UAAA8D,GAClBF,EAAOG,QAAQ,SAACnI,EAAKoI,GACXN,EAAMM,aAAuBH,QAC/BH,EAAMM,GAAcH,MAAMI,KAAK,CAAE7H,OAAQvF,KAE7C6M,EAAMM,GAAYlD,KAAKlF,KAE3B/E,kNCdFqN,EAAe,SACfC,EAAgB5M,OAAOkB,UAAUuE,SACjCoH,EAAc,kBACdC,EAAa,iBAEnB,SAASC,EAAexB,EAAKyB,GAIzB,IAHA,IAAI1N,EAAI0N,EAAUnI,OACdoI,GAAU,EAEP3N,GAAG,CACN,GAAIiM,IAAQyB,EAAU1N,GAElB,OADA2N,EAAS3N,EAGbA,GAAK,EAGT,OAAO2N,EA2GX,SAASC,EAASC,EAAMC,EAAMC,GAE1B,YAAI,IAAOF,EAAP,YAAAG,EAAOH,MAASR,SAAgB,IAAOS,EAAP,YAAAE,EAAOF,MAAST,EACzC,WAGP,IAAOS,EAAP,YAAAE,EAAOF,MAAST,GAAyB,OAATS,EACzBD,SAGP,IAAOA,EAAP,YAAAG,EAAOH,MAASR,IAChBQ,EAAOC,aAAgBd,MAAQ,GAAK,IAnH5C,SAASiB,EAAMJ,EAAMC,EAAMC,EAAWG,EAAQC,GAC1C,IAAIC,EACAC,EACAC,EACAC,EACAC,EAcJ,GATKL,GAKDD,EAAOjE,KAAK4D,GACZM,EAAOlE,KAAK6D,KALZI,EAAS,CAACL,GACVM,EAAS,CAACL,IAOVA,aAAgBd,MAChB,IAAKoB,EAAO,EAAGA,EAAON,EAAKvI,OAAQ6I,GAAQ,EAAG,CAC1C,IACIC,EAASR,EAAKO,GACdE,EAASR,EAAKM,GAElB,MAAOnH,GACH,eAGA,IAAOqH,EAAP,YAAAN,EAAOM,MAAWjB,EACZU,QAAwBjK,IAAXwK,IACfT,EAAKO,GAAQE,IAIF,OAAXD,SAAmB,IAAOA,EAAP,YAAAL,EAAOK,MAAWhB,IACrCgB,EAASR,EAAKO,GAAQE,aAAkBtB,MAAQ,GAAK,KAG3C,KADdwB,EAAOf,EAAea,EAAQH,IAE1BE,EAASR,EAAKO,GAAQF,EAAOM,GAG7BP,EAAMI,EAAQC,EAAQP,EAAWG,EAAQC,SAMrD,IAAKC,KAAQN,EAAM,CACf,IACIO,EAASR,EAAKO,GACdE,EAASR,EAAKM,GAElB,MAAOnH,GACH,SAGJ,GAAe,OAAXqH,SAAmB,IAAOA,EAAP,YAAAN,EAAOM,MAAWjB,GAKrCkB,EAAMjB,EAAcnN,KAAKmO,MACbf,GACO,OAAXc,SAAmB,IAAOA,EAAP,YAAAL,EAAOK,MAAWhB,IACrCgB,EAASR,EAAKO,GAAQ,KAGZ,KADdI,EAAOf,EAAea,EAAQH,IAE1BE,EAASR,EAAKO,GAAQF,EAAOM,GAG7BP,EAAMI,EAAQC,EAAQP,EAAWG,EAAQC,IAGxCI,IAAQf,GACE,OAAXa,GAAqBA,aAAkBrB,QACvCqB,EAASR,EAAKO,GAAQ,KAGZ,KADdI,EAAOf,EAAea,EAAQH,IAE1BE,EAASR,EAAKO,GAAQF,EAAOM,GAG7BP,EAAMI,EAAQC,EAAQP,EAAWG,EAAQC,IAI7CN,EAAKO,GAAQE,MAGhB,CACD,GAAIP,QAAwBjK,IAAXwK,EACb,SAEJT,EAAKO,GAAQE,GAIzB,OAAOT,EAiBPI,CAAMJ,EAAMC,EAAMC,GACXF,GCnIJ,SAASY,EAAS1J,GACrB,OAAOiI,MAAMyB,QAAQ1J,GA2ClB,IAAM2J,EAAc,wBAAY,IAAIlL,MAAOmL,UAAY/G,KAAKgH,MAAsB,IAAhBhH,KAAKiH,WASvE,SAASC,EAAWC,EAAMC,GAC7B,IAAKP,EAAQM,KAAUN,EAAQO,GAC3B,OAAOD,IAASC,EAGpB,GAAID,EAAKxJ,SAAWyJ,EAAKzJ,OACrB,OAAO,EAGX,IAAK,IAAIvF,EAAI,EAAGA,EAAI+O,EAAKxJ,OAAQvF,IAC7B,GAAI+O,EAAK/O,KAAOgP,EAAKhP,GACjB,OAAO,EAIf,OAAO,EASJ,SAASiP,EAAalK,GACzB,OAAOA,EASJ,IAAMmK,EAAmB,SAACC,GAC7B,MAnEsB,iBAmETA,EACFhN,EAAWE,QACXoM,EAAQU,IAASV,EAAQU,EAAK,IAC9BhN,EAAWG,QACXmM,EAAQU,KAA0B,IAAhBA,EAAK5J,QAlF/B,SAAmBR,GACtB,OAAOA,IAAQrE,OAAOqE,GAiF4BqK,CAASD,EAAK,KACrDhN,EAAWC,UAEf,MChDIiN,EApDI,CACfF,KAAM,GAENG,gBAHe,SAGEC,EAAUhP,GACvB,IAAMiP,EAASjP,GAAQmO,IA4CvB,OA1CA9K,KAAKuL,KAAKK,GAAU,CAChBjP,KAAMiP,EACNzC,OAAQwC,EAERE,UAJgB,WAKZ,IAAIA,EAAY7L,KAAK8L,iBAQrB,OANKD,IACDA,EAAY7L,KAAK8L,iBAAmB,GACpC9L,KAAKmJ,OAAOG,QAAQ,SAACyC,GACjBF,EAAUE,EAAMpP,QAAUoP,KAG3BF,GAEXG,WAfgB,WAgBZ,IAAIC,EAAgBjM,KAAKkM,eAUzB,OARKD,IACDA,EAAgBjM,KAAKkM,eAAiB,GACtClM,KAAKmJ,OAAOG,QAAQ,SAACyC,GACbA,EAAMI,SAASC,OAASjN,EAAUC,UAClC6M,EAAcF,EAAMpP,QAAUoP,MAInCE,GAEXI,aA5BgB,WA6BZ,IAAIC,EAAkBtM,KAAKuM,iBAU3B,OARKvM,KAAKuM,mBACND,EAAkBtM,KAAKuM,iBAAmB,GAC1CvM,KAAKmJ,OAAOG,QAAQ,SAACyC,GACbA,EAAMI,SAASC,OAASjN,EAAUE,YAClCiN,EAAgBP,EAAMpP,QAAUoP,MAIrCO,IAGRtM,KAAKuL,KAAKK,8PCKVY,aA1CX,SAAAA,EAAarL,EAAK4K,gGAAOU,CAAAzM,KAAAwM,GACrB1P,OAAOC,eAAeiD,KAAM,SAAU,CAClChD,YAAY,EACZ0P,cAAc,EACdC,UAAU,EACVtP,MAAO8D,IAGXnB,KAAK+L,MAAQA,+CAoBb,OAAOrF,OAAO1G,KAAK3C,yCAUnB,OAAO2C,KAAK3C,oCArBZ,OAAO2C,KAAK4M,gBCxBb,SAASC,EAAoBC,EAAYC,GACxCD,EAAWnL,OAAS,GACDmL,EAAWE,MAAM,KACzB1D,QAAQ,SAAC2D,GAChB,IAAMC,EAAaD,EAAQD,MAAM,KAC3BG,GAAUD,EAAW,GACrBE,IAAQF,EAAW,IAAMA,EAAW,IAC1C,GAAIE,GAAOD,EACP,IAAK,IAAI/Q,EAAI+Q,EAAO/Q,GAAKgR,EAAKhR,GAAK,EAC/B2Q,EAAS3Q,kQCVvBiR,aAqBF,SAAAA,EAAahQ,gGAAOiQ,CAAAtN,KAAAqN,GAChBrN,KAAK4M,OAASvP,0DAdOkQ,GACrB,OAAKA,EAGEzQ,OAAO0Q,OAAOH,EAAkBI,qBAAsBF,GAFlDF,EAAkBI,4DAsB7B,OAAOzN,KAAK4M,0CAUZ,OAAOlG,OAAO1G,KAAK4M,4CAGNzL,GACb,OAAQA,aAAekM,KAAwBA,EAAkBK,mBAAmBvM,0CAGlEA,GAClB,OAAOA,aAAekM,EAAoBlM,EAAMkM,EAAkBK,mBAAmBvM,YAO7FkM,EAAkBM,KAAO,IAAIN,EAAkB,QAC/CA,EAAkBO,GAAK,IAAIP,EAAkB,MAC7CA,EAAkBQ,IAAM,IAAIR,EAAkB,OAO9CA,EAAkBI,qBAAuB,CACrCK,QAAST,EAAkBO,GAC3BG,IAAKV,EAAkBQ,IACvBG,KAAMX,EAAkBM,KACxBzN,UAAWmN,EAAkBO,IAGlBP,2aC5ETY,EAAkB,SAACC,EAASf,EAAOC,GAIrC,IAHA,IAAMe,EAAU,GACZC,EAAOjB,EAEJiB,EAAOhB,GACVe,EAAQ9H,KAAK+H,GACbA,GAAQF,EAIZ,OAFAC,EAAQ9H,KAAK+H,GAEND,GAGLE,EAAkB,SAACC,EAAcjR,GAOnC,IANA,IAAIkR,EAAU,EACVC,EAAWF,EAAa3M,OAAS,EACjC8M,SACAjN,SAGG+M,GAAWC,GAAU,CAIxB,GAAInR,IAFJmE,EAAQ8M,EADRG,EAASF,EAAUvK,KAAK0K,OAAOF,EAAWD,GAAW,KAGlCpB,OAAS9P,EAAQmE,EAAM4L,IACtC,OAAO5L,EACAnE,GAASmE,EAAM4L,IACtBmB,EAAUE,EAAS,EACZpR,EAAQmE,EAAM2L,QACrBqB,EAAWC,EAAS,GAI5B,OAAO,MC5BJ,SAASE,EAAiBC,EAAKC,GAClC,IAAMC,EAAS,GACTC,EAAS,GASf,OARAH,EAAIzF,OAAOG,QAAQ,SAACyC,GAChBgD,EAAO1I,KAAK0F,EAAMI,SAASxP,QAE/BkS,EAAI1F,OAAOG,QAAQ,SAACyC,IAC6B,IAAzCgD,EAAO3I,QAAQ2F,EAAMI,SAASxP,OAC9BmS,EAAOzI,KAAK0F,EAAMI,SAASxP,QAG5BmS,ECfJ,IAUME,EACD,SADCA,EAEA,UAFAA,EAGA,QAHAA,EAIA,UAJAA,EAKA,qBALAA,EAMJ,MAGIC,EAAQ,CACjBC,MAAO,QACPC,UAAW,YACXC,WAAY,aACZC,QAAS,UACTC,UAAW,aAGFC,EACJ,MCrBT,SAASC,IAAoB,OAAO,EAY7B,SAASC,EAAcC,EAAKC,EAAKC,GAA+D,IAArDC,EAAqDtK,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,IAAAA,UAAA,GAAxBuK,EAAwBvK,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAAb0J,EAAMC,MACtF/C,EAAS,GACTZ,EAAO,GACPwE,EAAqBH,GAAYJ,EACjCQ,EAAgBN,EAAIO,gBACpBC,EAAgBP,EAAIM,gBACpBE,EAAoBH,EAAcrT,KAClCyT,EAAoBF,EAAcvT,KAClCA,EAAUqT,EAAcrT,KAAxB,IAAgCuT,EAAcvT,KAC9C0T,EAAmB1B,EAAgBqB,EAAeE,GAExD,GAAIC,IAAsBC,EACtB,MAAM,IAAIE,MAAM,8CA+EpB,OA5EAN,EAAc7G,OAAOG,QAAQ,SAACyC,GAC1B,IAAMwE,EAAYvG,EAAQ,GAAI+B,EAAMI,WACc,IAA9CkE,EAAiBjK,QAAQmK,EAAU5T,OAAiBkT,IACpDU,EAAU5T,KAAUqT,EAAcrT,KAAlC,IAA0C4T,EAAU5T,MAExDwP,EAAO9F,KAAKkK,KAEhBL,EAAc/G,OAAOG,QAAQ,SAACyC,GAC1B,IAAMwE,EAAYvG,EAAQ,GAAI+B,EAAMI,WACc,IAA9CkE,EAAiBjK,QAAQmK,EAAU5T,MAC9BkT,IACDU,EAAU5T,KAAUuT,EAAcvT,KAAlC,IAA0C4T,EAAU5T,KACpDwP,EAAO9F,KAAKkK,IAGhBpE,EAAO9F,KAAKkK,KAKpB1D,EAAmB6C,EAAIc,YAAa,SAACpU,GACjC,IAAIqU,GAAW,EACXC,SACJ7D,EAAmB8C,EAAIa,YAAa,SAACG,GACjC,IAAMC,EAAQ,GACRC,EAAU,GAChBA,EAAQV,GAAqB,GAC7BU,EAAQT,GAAqB,GAC7BJ,EAAc7G,OAAOG,QAAQ,SAACyC,GAC1B6E,EAAMvK,KAAK0F,EAAM+E,aAAavF,KAAKnP,IACnCyU,EAAQV,GAAmBpE,EAAMpP,QAAUoP,EAAM+E,aAAavF,KAAKnP,KAEvE8T,EAAc/G,OAAOG,QAAQ,SAACyC,IAC+B,IAAnDsE,EAAiBjK,QAAQ2F,EAAMI,SAASxP,OAAgBkT,GAC1De,EAAMvK,KAAK0F,EAAM+E,aAAavF,KAAKoF,IAEvCE,EAAQT,GAAmBrE,EAAMpP,QAAUoP,EAAM+E,aAAavF,KAAKoF,KAGvE,IAIMI,EAAYC,GAAgBH,EAAQV,IACpCc,EAAYD,GAAgBH,EAAQT,IAC1C,GAAIL,EAAmBgB,EAAWE,EALb,kBAAMvB,EAAIwB,gBACV,kBAAMvB,EAAIuB,gBAFb,IAMyE,CACvF,IAAMC,EAAW,GACjBP,EAAMtH,QAAQ,SAAC8H,EAASC,GACpBF,EAAShF,EAAOkF,GAAK1U,MAAQyU,IAE7BX,GAAYxB,EAAMC,QAAUY,EAC5BvE,EAAKmF,GAAeS,GAGpB5F,EAAKlF,KAAK8K,GACVV,GAAW,EACXC,EAActU,QAEf,IAAK0T,IAAab,EAAME,WAAaW,IAAab,EAAMG,cAAgBqB,EAAU,CACrF,IAAMU,EAAW,GACbpI,EAAMiH,EAAc7G,OAAOxH,OAAS,EACxCiP,EAAMtH,QAAQ,SAAC8H,EAASC,GAEhBF,EAAShF,EAAOkF,GAAK1U,MADrB0U,GAAOtI,EACsBqI,EAGA,OAGrCX,GAAW,EACXC,EAActU,EACdmP,EAAKlF,KAAK8K,QAKf,IAAI/S,GAAUmN,EAAMY,EAAQ,CAAExP,SC3GzC,SAAS2U,EAAWtO,EAAGO,GACnB,IAAMgO,KAAQvO,EACRwO,KAAQjO,EACd,OAAIgO,EAAKC,GACG,EAERD,EAAKC,EACE,EAEJ,EAqEJ,SAASC,EAAWC,GAAyB,IAApBC,EAAoBpM,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAAX+L,EAIrC,OAHII,EAAI/P,OAAS,GArBrB,SAASiQ,EAAMF,EAAKG,EAAIC,EAAIH,GACxB,GAAIG,IAAOD,EAAM,OAAOH,EAExB,IAAMK,EAAMF,EAAK7N,KAAK0K,OAAOoD,EAAKD,GAAM,GAKxC,OAJAD,EAAKF,EAAKG,EAAIE,EAAKJ,GACnBC,EAAKF,EAAKK,EAAM,EAAGD,EAAIH,GAzC3B,SAAgBD,EAAKG,EAAIE,EAAKD,EAAIH,GAG9B,IAFA,IAAMK,EAAUN,EACVO,EAAS,GACN7V,EAAIyV,EAAIzV,GAAK0V,EAAI1V,GAAK,EAC3B6V,EAAO7V,GAAK4V,EAAQ5V,GAKxB,IAHA,IAAI4G,EAAI6O,EACJtO,EAAIwO,EAAM,EAEL3V,EAAIyV,EAAIzV,GAAK0V,EAAI1V,GAAK,EACvB4G,EAAI+O,GACJC,EAAQ5V,GAAK6V,EAAO1O,GACpBA,GAAK,GACEA,EAAIuO,GACXE,EAAQ5V,GAAK6V,EAAOjP,GACpBA,GAAK,GACE2O,EAAOM,EAAOjP,GAAIiP,EAAO1O,KAAO,GACvCyO,EAAQ5V,GAAK6V,EAAOjP,GACpBA,GAAK,IAELgP,EAAQ5V,GAAK6V,EAAO1O,GACpBA,GAAK,GAqBb8G,CAAMqH,EAAKG,EAAIE,EAAKD,EAAIH,GAEjBD,EAcHE,CAAKF,EAAK,EAAGA,EAAI/P,OAAS,EAAGgQ,GAE1BD,0HC3EX,SAASQ,EAAWC,EAAUC,EAAUlQ,GACpC,IAAImQ,SACJ,OAAQF,GACR,KAAKlT,EAAeC,WACpB,KAAKN,EAAiBE,SAEduT,EADa,SAAbD,EACU,SAACpP,EAAGO,GAAJ,OAAUA,EAAErB,GAASc,EAAEd,IAEvB,SAACc,EAAGO,GAAJ,OAAUP,EAAEd,GAASqB,EAAErB,IAErC,MACJ,QACImQ,EAAU,SAACrP,EAAGO,GACV,IAAMgO,KAAQvO,EAAEd,GACVsP,KAAQjO,EAAErB,GAChB,OAAIqP,EAAKC,EACe,SAAbY,EAAsB,GAAK,EAElCb,EAAKC,EACe,SAAbY,GAAuB,EAAI,EAE/B,GAGf,OAAOC,EAUX,SAASC,EAAU/G,EAAMhC,GACrB,IAAMgJ,EAAU,IAAIC,IACdC,EAAc,GAYpB,OAVAlH,EAAKjC,QAAQ,SAACoJ,GACV,IAAMC,EAAWD,EAAMnJ,GACnBgJ,EAAQK,IAAID,GACZF,EAAYF,EAAQtV,IAAI0V,IAAW,GAAGtM,KAAKqM,IAE3CD,EAAYpM,KAAK,CAACsM,EAAU,CAACD,KAC7BH,EAAQM,IAAIF,EAAUF,EAAY9Q,OAAS,MAI5C8Q,EAYX,SAASK,EAAmBC,EAAcC,EAAcC,GACpD,IAAM5N,EAAM,CACR6N,MAAOH,EAAa,IAQxB,OALAC,EAAaG,OAAO,SAACC,EAAKhF,EAAMiF,GAE5B,OADAD,EAAIhF,GAAQ2E,EAAa,GAAG3K,IAAI,SAAAsK,GAAA,OAASA,EAAMO,EAAmBI,GAAKnR,SAChEkR,GACR/N,GAEIA,EA0EJ,SAASiO,GAAa7H,EAAYqB,EAAYyG,EAAeC,EAAgB1M,GAKhFA,EAAUhK,OAAO0Q,OAAO,GAJL,CACfiG,QAAQ,EACRC,YAAY,GAEwB5M,GAExC,IAAM6M,EAAS,CACXxH,OAAQ,GACRZ,KAAM,GACNqI,KAAM,IAEJH,EAAS3M,EAAQ2M,OACjBI,EAAaL,GAAkBA,EAAe7R,OAAS,EAEvDmS,EAAa,GAiDnB,GA/CgBP,EAAcvG,MAAM,KAE5B1D,QAAQ,SAACyK,GACb,IAAK,IAAI3X,EAAI,EAAGA,EAAIqP,EAAW9J,OAAQvF,GAAK,EACxC,GAAIqP,EAAWrP,GAAGO,SAAWoX,EAAS,CAClCD,EAAWzN,KAAKoF,EAAWrP,IAC3B,SAMZ0X,EAAWxK,QAAQ,SAACyC,GAEhB4H,EAAOxH,OAAO9F,KAAK0F,EAAMI,YAGzBsH,GACAE,EAAOxH,OAAO9F,KAAK,CACf1J,KAAM,MACNyP,KAAM,eAIdS,EAAmBC,EAAY,SAAC1Q,GAC5BuX,EAAOpI,KAAKlF,KAAK,IACjB,IAAM2N,EAAYL,EAAOpI,KAAK5J,OAAS,EAEvCmS,EAAWxK,QAAQ,SAACyC,EAAO4E,GACvBgD,EAAOpI,KAAKyI,GAAWrD,EAFf,GAE6B5E,EAAM+E,aAAavF,KAAKnP,KAE7DqX,IACAE,EAAOpI,KAAKyI,GAAWF,EAAWnS,QAAUvF,GAGhDuX,EAAOC,KAAKvN,KAAKjK,GAIbyX,GAAcF,EAAOpI,KAAKyI,GAAW3N,KAAKjK,KAI9CyX,GA7HR,SAAkBI,EAAST,GAOvB,IAPuC,IAC/BjI,EAAiB0I,EAAjB1I,KAAMY,EAAW8H,EAAX9H,OACV+H,SACAC,SACAC,SACAhY,EAAIoX,EAAe7R,OAAS,EAEzBvF,GAAK,EAAGA,IACX8X,EAAYV,EAAepX,GAAG,GAC9B+X,EAAWX,EAAepX,GAAG,IAC7BgY,EAAWC,GAAclI,EAAQ+H,MVhEf,mBUuEHC,EAEX1C,EAAUlG,EAAM,SAACvI,EAAGO,GAAJ,OAAU4Q,EAASnR,EAAEoR,EAASlS,OAAQqB,EAAE6Q,EAASlS,UAC1D2I,EAAQsJ,GAAW,WAC1B,IAAM1B,EAAcH,EAAU/G,EAAM6I,EAASlS,OACvCoS,EAAYH,EAASA,EAASxS,OAAS,GACvCqR,EAAemB,EAASI,MAAM,EAAGJ,EAASxS,OAAS,GACnDsR,EAAqBD,EAAa5K,IAAI,SAAAoM,GAAA,OAAKH,GAAclI,EAAQqI,KAEvE/B,EAAYnJ,QAAQ,SAACyJ,GACjBA,EAAa1M,KAAKyM,EAAmBC,EAAcC,EAAcC,MAGrExB,EAAUgB,EAAa,SAACzP,EAAGO,GACvB,IAAM/G,EAAIwG,EAAE,GACNnF,EAAI0F,EAAE,GACZ,OAAO+Q,EAAU9X,EAAGqB,KAIxB0N,EAAK5J,OAAS,EACd8Q,EAAYnJ,QAAQ,SAACoJ,GACjBnH,EAAKlF,KAALqB,MAAA6D,EAAAkJ,EAAa/B,EAAM,OAnBG,IAsB1ByB,EAA8C,SAAnCzN,OAAOyN,GAAUzS,cAA2B,OAAS,MAChE+P,EAAUlG,EAAM2G,EAAUkC,EAAShI,KAAM+H,EAAUC,EAASlS,UAIpE+R,EAAQL,KAAO,GACfrI,EAAKjC,QAAQ,SAACjM,GACV4W,EAAQL,KAAKvN,KAAKhJ,EAAMqX,SA6ExBC,CAAShB,EAAQH,GAGjB1M,EAAQ4M,WAAY,CACpB,IAAMkB,EAAUxL,qBAASA,MAAMuK,EAAOxH,OAAOxK,UAASyG,IAAI,iBAAM,KAChEuL,EAAOpI,KAAKjC,QAAQ,SAACsH,GACjBA,EAAMtH,QAAQ,SAACiC,EAAMnP,GACjBwY,EAAQxY,GAAGiK,KAAKkF,OAGxBoI,EAAOpI,KAAOqJ,EAGlB,OAAOjB,EC1NJ,SAASkB,GAAYnF,EAAKC,GAC7B,IAAMmF,EAAY,GACZ3I,EAAS,GACT4I,EAAgB,GAChBxJ,EAAO,GACPyE,EAAgBN,EAAIO,gBACpBC,EAAgBP,EAAIM,gBACpB+E,EAAwBhF,EAAcnE,YACtCoJ,EAAwB/E,EAAcrE,YACtClP,EAAUqT,EAAcrT,KAAxB,UAAsCuT,EAAcvT,KAG1D,IAAKuO,EAAWwE,EAAIwF,eAAelI,MAAM,KAAK4E,OAAQjC,EAAIuF,eAAelI,MAAM,KAAK4E,QAChF,OAAO,KAiBX,SAASuD,EAAkBC,EAAIvJ,EAAWwJ,GACtCxI,EAAmBuI,EAAG5E,YAAa,SAACpU,GAChC,IAAMwU,EAAQ,GACV0E,EAAW,GACfP,EAAczL,QAAQ,SAACiM,GACnB,IAAMlY,EAAQwO,EAAU0J,GAAYzE,aAAavF,KAAKnP,GACtDkZ,OAAgBjY,EAChBuT,EAAM2E,GAAclY,IAEnByX,EAAUQ,KACPD,GAAW9J,EAAKlF,KAAKuK,GACzBkE,EAAUQ,IAAY,KASlC,OAjCC5F,EAAIwF,eAAelI,MAAM,KAAM1D,QAAQ,SAAC4K,GACrC,IAAMnI,EAAQiJ,EAAsBd,GACpC/H,EAAO9F,KAAK2D,EAAQ,GAAI+B,EAAMI,WAC9B4I,EAAc1O,KAAK0F,EAAMI,SAASxP,QA2BtCwY,EAAkBxF,EAAKsF,GAAuB,GAC9CE,EAAkBzF,EAAKsF,GAAuB,GAEvC,IAAI5W,GAAUmN,EAAMY,EAAQ,CAAExP,kIC5DzC,SAAS6Y,GAAkB9D,GACvB,OAAOA,EAAI+D,OAAO,SAAAjL,GAAA,QAAUA,aAAgB6C,KAShD,SAASqI,GAAKhE,GACV,GAAI7G,EAAQ6G,MAAUA,EAAI,aAActI,OAAQ,CAC5C,IAAMuM,EAAiBH,GAAkB9D,GAIzC,OAHiBiE,EAAehU,OACZgU,EAAexC,OAAO,SAACC,EAAKwC,GAAN,OAAexC,EAAMwC,GAAM,GAC/CvI,EAAkBM,KAG5C,OAAON,EAAkBM,KAU7B,SAASkI,GAAKnE,GACV,GAAI7G,EAAQ6G,MAAUA,EAAI,aAActI,OAAQ,CAC5C,IAAM0M,EAAWJ,GAAIhE,GACf3I,EAAM2I,EAAI/P,QAAU,EAC1B,OAAQgH,OAAOoN,MAAMD,IAAaA,aAAoBzI,EAC7CA,EAAkBM,KAAOmI,EAAW/M,EAEjD,OAAOsE,EAAkBM,KAgG7B,IAAMqI,GAAS,CACXN,OACAG,OACAI,IAzFJ,SAAcvE,GACV,GAAI7G,EAAQ6G,MAAUA,EAAI,aAActI,OAAQ,CAE5C,IAAM8M,EAAiBV,GAAkB9D,GAEzC,OAAQwE,EAAevU,OAAUqC,KAAKiS,IAALvO,MAAA1D,KAAAmS,GAAYD,IAAkB7I,EAAkBM,KAErF,OAAON,EAAkBM,MAmFzByI,IAzEJ,SAAc1E,GACV,GAAI7G,EAAQ6G,MAAUA,EAAI,aAActI,OAAQ,CAE5C,IAAM8M,EAAiBV,GAAkB9D,GAEzC,OAAQwE,EAAevU,OAAUqC,KAAKoS,IAAL1O,MAAA1D,KAAAmS,GAAYD,IAAkB7I,EAAkBM,KAErF,OAAON,EAAkBM,MAmEzB0I,MAzDJ,SAAgB3E,GACZ,OAAOA,EAAI,IAyDX4E,KA/CJ,SAAe5E,GACX,OAAOA,EAAIA,EAAI/P,OAAS,IA+CxB4U,MArCJ,SAAgB7E,GACZ,OAAI7G,EAAQ6G,GACDA,EAAI/P,OAER0L,EAAkBM,MAkCzB6I,IAbJ,SAAc9E,GACV,OAAO1N,KAAKyS,KAbhB,SAAmB/E,GACf,IAAIgF,EAAOb,GAAInE,GACf,OAAOmE,GAAInE,EAAItJ,IAAI,SAAAuO,GAAA,OAAA3S,KAAA4S,IAAQD,EAAMD,EAAS,MAWzBG,CAASnF,MAexBoF,GAAqB,gQCxIrBC,cACF,SAAAA,IAAe,IAAAC,EAAAhX,kGAAAiX,CAAAjX,KAAA+W,GACX/W,KAAKiJ,MAAQ,IAAIuJ,IACjBxS,KAAKiJ,MAAM4J,IAAI,aAAcqE,IAE7Bpa,OAAOqa,QAAQnB,IAAQ1M,QAAQ,SAAC3L,GAC5BqZ,EAAK/N,MAAM4J,IAAIlV,EAAI,GAAIA,EAAI,0DAc/B,IAAK4H,UAAO5D,OACR,OAAO3B,KAAKiJ,MAAMhM,IAAI,cAG1B,IAAIma,0CAEJ,GAAuB,mBAAZA,EACPpX,KAAKiJ,MAAM4J,IAAI,aAAcuE,OAC1B,CAEH,GADAA,EAAU1Q,OAAO0Q,IAC6B,IAA1Cta,OAAOmJ,KAAK+P,IAAQ5P,QAAQgR,GAG5B,MAAM,IAAI9G,MAAJ,WAAqB8G,EAArB,0BAFNpX,KAAKiJ,MAAM4J,IAAI,aAAcmD,GAAOoB,IAK5C,OAAOpX,sCAmCDrD,EAAMya,GAAS,IAAAC,EAAArX,KACrB,GAAuB,mBAAZoX,EACP,MAAM,IAAI9G,MAAM,gCAMpB,OAHA3T,EAAO+J,OAAO/J,GACdqD,KAAKiJ,MAAM4J,IAAIlW,EAAMya,GAEd,WAAQC,EAAKC,aAAa3a,yCAGvBA,GACNqD,KAAKiJ,MAAM2J,IAAIjW,IACfqD,KAAKiJ,MAAMsO,OAAO5a,mCAIjBA,GACL,OAAIA,aAAgBiM,SACTjM,EAEJqD,KAAKiJ,MAAMhM,IAAIN,YAgBf6a,GAZO,WAClB,IAAIvO,EAAQ,KAQZ,OALkB,OAAVA,IACAA,EAAQ,IAAI8N,IAET9N,EAPO,uaCrCtB,SAASwO,GAASC,EAAW/L,EAAUgM,EAAUC,GAC7C,IAAMC,EAxDV,SAAsBH,EAAW/L,GAC7B,IAAMmD,EAAS,GAETgJ,EADaJ,EAAUzH,gBACC5D,eAY9B,OAVAvP,OAAOqa,QAAQW,GAAYxO,QAAQ,SAAAyO,GAAW,IAATpa,EAASqa,GAAAD,EAAA,MACtCpM,GAAYA,EAAShK,QACU,IAA3BgK,EAASvF,QAAQzI,IACjBmR,EAAOzI,KAAK1I,GAGhBmR,EAAOzI,KAAK1I,KAIbmR,EAyCWmJ,CAAYP,EAAW/L,GACnCuM,EAhCV,SAAwBR,GAA0B,IAAfC,EAAepS,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAAJ,GACpCoO,EAAS,GAETwE,EADaT,EAAUzH,gBACDjE,aACtBkL,EAAaM,GAAaY,iBAchC,OAZAtb,OAAOmJ,KAAKkS,GAAU7O,QAAQ,SAAC+O,GACU,iBAA1BV,EAASU,KAChBV,EAASU,GAAeF,EAASE,GAAaC,YAElD,IAAMC,EAAYf,GAAagB,QAAQb,EAASU,IAC5CE,EACA5E,EAAO0E,GAAeE,GAEtB5E,EAAO0E,GAAenB,EACtBS,EAASU,GAAevB,MAGzBnD,EAcY8E,CAAcf,EAAWC,GACtClM,EAAaiM,EAAUzH,gBACvByI,EAAgBjN,EAAWI,YAC3B8M,EAASlN,EAAW9O,KACpBic,EAAe,GACfC,EAAa,GACb1M,EAAS,GACToG,EAAU,GACVhH,EAAO,GACTuN,SAGJhc,OAAOqa,QAAQuB,GAAepP,QAAQ,SAAAyP,GAAkB,IAAAC,EAAAhB,GAAAe,EAAA,GAAhBpb,EAAgBqb,EAAA,GAAX3b,EAAW2b,EAAA,GACpD,IAAgC,IAA5BnB,EAAUzR,QAAQzI,IAAeua,EAAWva,GAG5C,OAFAwO,EAAO9F,KAAK2D,EAAQ,GAAI3M,EAAM8O,WAEtB9O,EAAM8O,SAASC,MACvB,KAAKjN,EAAUC,QACXyZ,EAAWxS,KAAK1I,GAChB,MACJ,QACA,KAAKwB,EAAUE,UACXuZ,EAAavS,KAAK1I,MAK9B,IAAIsb,EAAW,EACfpM,EAAmB6K,EAAUlH,YAAa,SAACpU,GACvC,IAAI8c,EAAO,GACXN,EAAatP,QAAQ,SAAC6P,GAClBD,EAAUA,EAAV,IAAkBR,EAAcS,GAAGrI,aAAavF,KAAKnP,UAEnC8D,IAAlBqS,EAAQ2G,IACR3G,EAAQ2G,GAAQD,EAChB1N,EAAKlF,KAAK,IACVuS,EAAatP,QAAQ,SAAC6P,GAClB5N,EAAK0N,GAAUE,GAAKT,EAAcS,GAAGrI,aAAavF,KAAKnP,KAE3Dyc,EAAWvP,QAAQ,SAAC6P,GAChB5N,EAAK0N,GAAUE,GAAK,CAACT,EAAcS,GAAGrI,aAAavF,KAAKnP,MAE5D6c,GAAY,GAEZJ,EAAWvP,QAAQ,SAAC6P,GAChB5N,EAAKgH,EAAQ2G,IAAOC,GAAG9S,KAAKqS,EAAcS,GAAGrI,aAAavF,KAAKnP,QAM3E,IAAIgd,EAAc,GACdC,EAAgB,kBAAM3B,EAAUxG,gBAcpC,OAbA3F,EAAKjC,QAAQ,SAACgQ,GACV,IAAM1I,EAAQ0I,EACdT,EAAWvP,QAAQ,SAAC6P,GAChBvI,EAAMuI,GAAKjB,EAAWiB,GAAGG,EAAIH,GAAIE,EAAeD,OAGpDxB,GACAA,EAAkB2B,wBAClBT,EAAelB,GAGfkB,EAAe,IAAI1a,GAAUmN,EAAMY,EAAQ,CAAExP,KAAMgc,IAEhDG,EC9HJ,SAASU,GAAmB9J,EAAKC,GACpC,IAIM8J,EAAkB9K,EAJFe,EAAIO,gBACJN,EAAIM,iBAK1B,OAAO,SAACc,EAAWE,GACf,IAAIyI,GAAc,EASlB,OARAD,EAAgBnQ,QAAQ,SAAC4K,GAGjBwF,IAFA3I,EAAUmD,GAAW7W,QACrB4T,EAAUiD,GAAW7W,QAASqc,KAM/BA,GCjBR,SAASC,GAAOjK,EAAKC,GACxB,IAAMmF,EAAY,GACZ3I,EAAS,GACT4I,EAAgB,GAChBxJ,EAAO,GACPyE,EAAgBN,EAAIO,gBACpBC,EAAgBP,EAAIM,gBACpB+E,EAAwBhF,EAAcnE,YACtCoJ,EAAwB/E,EAAcrE,YACtClP,EAAUqT,EAAcrT,KAAxB,UAAsCuT,EAAcvT,KAG1D,IAAKuO,EAAWwE,EAAIwF,eAAelI,MAAM,KAAK4E,OAAQjC,EAAIuF,eAAelI,MAAM,KAAK4E,QAChF,OAAO,KAgBX,SAASuD,EAAmBC,EAAIvJ,GAC5BgB,EAAmBuI,EAAG5E,YAAa,SAACpU,GAChC,IAAMwU,EAAQ,GACV0E,EAAW,GACfP,EAAczL,QAAQ,SAACiM,GACnB,IAAMlY,EAAQwO,EAAU0J,GAAYzE,aAAavF,KAAKnP,GACtDkZ,OAAgBjY,EAChBuT,EAAM2E,GAAclY,IAEnByX,EAAUQ,KACX/J,EAAKlF,KAAKuK,GACVkE,EAAUQ,IAAY,KASlC,OAhCC5F,EAAIwF,eAAelI,MAAM,KAAM1D,QAAQ,SAAC4K,GACrC,IAAMnI,EAAQiJ,EAAsBd,GACpC/H,EAAO9F,KAAK2D,EAAQ,GAAI+B,EAAMI,WAC9B4I,EAAc1O,KAAK0F,EAAMI,SAASxP,QA0BtCwY,EAAkBzF,EAAKsF,GACvBG,EAAkBxF,EAAKsF,GAEhB,IAAI7W,GAAUmN,EAAMY,EAAQ,CAAExP,SCvDlC,SAASid,GAAeC,EAAYC,EAAYlK,GACnD,OAAOH,EAAaoK,EAAYC,EAAYlK,GAAU,EAAOX,EAAME,WAGhE,SAAS4K,GAAgBF,EAAYC,EAAYlK,GACpD,OAAOH,EAAaqK,EAAYD,EAAYjK,GAAU,EAAOX,EAAMG,0QCWlD4K,cAQjB,SAAAA,EAAalJ,EAAchE,gGAAYmN,CAAAja,KAAAga,GACnCha,KAAK8Q,aAAeA,EACpB9Q,KAAK8M,WAAaA,8CAUlB,MAAM,IAAIwD,MAAM,wDAUhB,OAAOtQ,KAAK8Q,aAAa3E,sCAUzB,OAAOnM,KAAK8Q,aAAanU,oCAUzB,OAAOqD,KAAK8Q,aAAa3E,OAAOC,uCAUhC,OAAOpM,KAAK8Q,aAAa3E,OAAO+N,8CAUhC,OAAOla,KAAK8Q,aAAa3E,OAAOgO,kDAUhC,OAAOna,KAAK8Q,aAAa3E,OAAOiO,aAAepa,KAAK8Q,aAAa3E,OAAOxP,oCASpE,IAAAqa,EAAAhX,KACEuL,EAAO,GAIb,OAHAsB,EAAmB7M,KAAK8M,WAAY,SAAC1Q,GACjCmP,EAAKlF,KAAK2Q,EAAKlG,aAAavF,KAAKnP,MAE9BmP,0CAUP,MAAM,IAAI+E,MAAM,0RCpHH+J,irBAAkBL,yCAY/B,OAHKha,KAAKsa,gBACNta,KAAKsa,cAAgBta,KAAKua,uBAEvBva,KAAKsa,4DAUZ,MAAM,IAAIhK,MAAM,+DAWhB,OAAOtQ,KAAKuL,0QChCCiP,irBAAoBH,0CASjC,OAAOzb,EAAiBC,0DAUL,IAAAwY,EAAArX,KACbkZ,EAAO,IAAIuB,IACXC,EAAS,GAUf,OAPA7N,EAAmB7M,KAAK8M,WAAY,SAAC1Q,GACjC,IAAMsW,EAAQ2E,EAAKvG,aAAavF,KAAKnP,GAChC8c,EAAKtG,IAAIF,KACVwG,EAAKyB,IAAIjI,GACTgI,EAAOrU,KAAKqM,MAGbgI,qQC7BME,eAQjB,SAAAA,EAAa9J,EAAchE,gGAAY+N,CAAA7a,KAAA4a,GAAA,IAAA5D,mKAAA8D,CAAA9a,MAAA4a,EAAAG,WAAAje,OAAAke,eAAAJ,IAAAre,KAAAyD,KAC7B8Q,EAAchE,IADe,OAGnCkK,EAAKiE,eAAiB,KAHajE,qUARLqD,sDAqBX,IAAAhD,EAAArX,KACbkZ,EAAO,IAAIuB,IACXC,EAAS,GAYf,OARA7N,EAAmB7M,KAAK8M,WAAY,SAAC1Q,GACjC,IAAMsW,EAAQ2E,EAAKvG,aAAavF,KAAKnP,GAChC8c,EAAKtG,IAAIF,KACVwG,EAAKyB,IAAIjI,GACTgI,EAAOrU,KAAKqM,MAIbgI,yDAWP,GAAI1a,KAAKib,eACL,OAAOjb,KAAKib,eAUhB,IAPA,IAAMC,EAAalb,KAAKuL,OAAOkK,OAAO,SAAAjL,GAAA,QAAUA,aAAgB6C,KAAoBuE,KAAK,SAAC5O,EAAGO,GAAJ,OAAUP,EAAIO,IACjG4X,EAAQD,EAAWvZ,OACrByZ,EAAUzS,OAAO0S,kBACjBC,SACAC,SACAC,EAAiB,EAEZpf,EAAI,EAAGA,EAAI+e,EAAO/e,IACvBkf,EAAYJ,EAAW9e,EAAI,IAC3Bmf,EAAYL,EAAW9e,MAELkf,IAIlBF,EAAUpX,KAAKiS,IAAImF,EAASG,EAAYL,EAAW9e,EAAI,IACvDof,KAQJ,OALKA,IACDJ,EAAU,MAEdpb,KAAKib,eAAiBG,EAEfpb,KAAKib,gDAUZ,OAAOjb,KAAK8Q,aAAa3E,OAAOpM,+CAUnB,IAAA0b,EAAAzb,KACPuL,EAAO,GASb,OARAsB,EAAmB7M,KAAK8M,WAAY,SAAC1Q,GACjC,IAAMsW,EAAQ+I,EAAK3K,aAAavF,KAAKnP,GACjCsW,aAAiBrF,EACjB9B,EAAKlF,KAAKqM,GAEVnH,EAAKlF,KAAKvG,EAAkByG,SAASmM,EAAO+I,EAAK1b,aAGlDwL,qQC3GMmQ,irBAAerB,sDAS5B,IAAMsB,EAAU3b,KAAK8Q,aAAa3E,OAAOyP,KACzC,MAAO,CAACD,EAAQ,GAAIA,EAAQA,EAAQha,OAAS,mCAU7C,OAAO3B,KAAK8Q,aAAa3E,OAAOyP,wQClBnBC,irBAAgB7B,yCAY7B,OAHKha,KAAKsa,gBACNta,KAAKsa,cAAgBta,KAAKua,uBAEvBva,KAAKsa,6CAUZ,OAAOta,KAAK8Q,aAAa3E,OAAO2P,wCAUhC,OAAO9b,KAAK8Q,aAAa3E,OAAOmM,UAAYxB,0CAShC,IACJiF,EAAiB/b,KAAK8Q,aAAa3E,OAAnC4P,aACR,OAAOA,aAAwBnT,SAAWmT,EAAe1Q,gDAUzD,MAAM,IAAIiF,MAAM,+DAWhB,OAAOtQ,KAAKuL,0QC/DCyQ,irBAAmBH,0CAShC,OAAO5c,EAAeC,yDAUH,IAAAmY,EAAArX,KACfiW,EAAMtN,OAAO0S,kBACbjF,EAAMzN,OAAOsT,kBAiBjB,OAdApP,EAAmB7M,KAAK8M,WAAY,SAAC1Q,GACjC,IAAMsW,EAAQ2E,EAAKvG,aAAavF,KAAKnP,GACjCsW,aAAiBrF,IAIjBqF,EAAQuD,IACRA,EAAMvD,GAENA,EAAQ0D,IACRA,EAAM1D,MAIP,CAACuD,EAAKG,sQC5CA8F,4KAQb,MAAM,IAAI5L,MAAM,0RCJH6L,irBAA0BD,sCAQpC/a,GAQH,OALKkM,EAAkB+O,UAAUjb,GAGpBkM,EAAkBgP,eAAelb,GAFjCuF,OAAOvF,GAAKmb,0QCXZC,eAOjB,SAAAA,EAAapQ,gGAAQqQ,CAAAxc,KAAAuc,GAAA,IAAAvF,mKAAAyF,CAAAzc,MAAAuc,EAAAxB,WAAAje,OAAAke,eAAAuB,IAAAhgB,KAAAyD,OAAA,OAEjBgX,EAAK7K,OAASA,EACd6K,EAAK0F,KAAO,IAAI5c,EAAkBkX,EAAK7K,OAAOpM,QAH7BiX,qUAPmBkF,sCAoBjC/a,GACH,IAAIyC,SAEJ,GAAKyJ,EAAkB+O,UAAUjb,GAI7ByC,EAASyJ,EAAkBgP,eAAelb,OAJP,CACnC,IAAIhB,EAAaH,KAAK0c,KAAKhU,cAAcvH,GACzCyC,EAASzD,EAAaA,EAAW4K,UAAYsC,EAAkBO,GAInE,OAAOhK,qQC9BM+Y,irBAAqBT,sCAQ/B/a,GAEHA,EAAMuF,OAAOvF,GACb,IAAIyC,SAEJ,GAAKyJ,EAAkB+O,UAAUjb,GAK7ByC,EAASyJ,EAAkBgP,eAAelb,OALP,CACnC,IAAIyb,EAAUzb,EAAIqH,MALR,2DAMV5E,EAASgZ,EAAajU,OAAOkU,WAAWD,EAAQ,IAAvC,IAA8CjU,OAAOkU,WAAWD,EAAQ,IAC9DvP,EAAkBO,GAIzC,OAAOhK,qQCpBMkZ,irBAAyBZ,sCAQnC/a,GACH,IAAIyC,SAEJ,GAAKyJ,EAAkB+O,UAAUjb,GAI7ByC,EAASyJ,EAAkBgP,eAAelb,OAJP,CACnC,IAAIC,EAAYyb,WAAW1b,EAAK,IAChCyC,EAAS+E,OAAOoN,MAAM3U,GAAaiM,EAAkBO,GAAKxM,EAI9D,OAAOwC,qQCnBMmZ,cAUjB,SAAAA,EAAapgB,EAAM4O,EAAMY,EAAQ/J,gGAAQ4a,CAAAhd,KAAA+c,GACrC/c,KAAKrD,KAAOA,EACZqD,KAAKmM,OAASA,EACdnM,KAAKoC,OAASA,EACdpC,KAAKuL,KAAOvL,KAAKid,UAAU1R,gDAUpBA,GAAM,IAAAyL,EAAAhX,KACb,OAAOuL,EAAKnD,IAAI,SAAAsK,GAAA,OAASsE,EAAK5U,OAAOwE,MAAM8L,cCiE5C,SAASwK,GAAaC,EAAYhR,EAAQiR,GAC7C,IAAMC,EAAa,GAUnB,OARMD,GAAWA,EAAQzb,SACrByb,EAAUjR,EAAO/D,IAAI,SAAAoC,GAAA,OAAQA,EAAK7N,QAGtCygB,EAAQ9T,QAAQ,SAACgU,EAAQlhB,GACrBihB,EAAWC,GAAUlhB,IAGlB+P,EAAO/D,IAAI,SAAAoC,GAAA,OAzFtB,SAAyBe,EAAMY,GAC3BZ,EAAOA,GAAQ,GACf,IAAIuF,SAEJ,OAAQ3E,EAAOC,MACf,KAAKjN,EAAUC,QACX,OAAQ+M,EAAO+N,SACf,KAAKjb,EAAeC,WAGpB,QAEI,OADA4R,EAAe,IAAIiM,GAAa5Q,EAAOxP,KAAM4O,EAAMY,EAAQ,IAAI2Q,IACxD,IAAId,GAAWlL,EAAf,MAAkCvF,EAAK5J,OAAS,IAE/D,KAAKxC,EAAUE,UACX,OAAQ8M,EAAO+N,SACf,KAAKtb,EAAiBC,YAElB,OADAiS,EAAe,IAAIiM,GAAa5Q,EAAOxP,KAAM4O,EAAMY,EAAQ,IAAIgQ,IACxD,IAAI3B,GAAY1J,EAAhB,MAAmCvF,EAAK5J,OAAS,IAC5D,KAAK/C,EAAiBE,SAElB,OADAgS,EAAe,IAAIiM,GAAa5Q,EAAOxP,KAAM4O,EAAMY,EAAQ,IAAIoQ,GAAepQ,IACvE,IAAIyO,GAAS9J,EAAb,MAAgCvF,EAAK5J,OAAS,IACzD,KAAK/C,EAAiBI,OAElB,OADA8R,EAAe,IAAIiM,GAAa5Q,EAAOxP,KAAM4O,EAAMY,EAAQ,IAAIwQ,IACxD,IAAIjB,GAAO5K,EAAX,MAA8BvF,EAAK5J,OAAS,IACvD,QAEI,OADAmP,EAAe,IAAIiM,GAAa5Q,EAAOxP,KAAM4O,EAAMY,EAAQ,IAAIgQ,IACxD,IAAI3B,GAAY1J,EAAhB,MAAmCvF,EAAK5J,OAAS,IAEhE,QAEI,OADAmP,EAAe,IAAIiM,GAAa5Q,EAAOxP,KAAM4O,EAAMY,EAAQ,IAAIgQ,IACxD,IAAI3B,GAAY1J,EAAhB,MAAmCvF,EAAK5J,OAAS,KA0DlC4b,CAAgBJ,EAAWE,EAAW7S,EAAK7N,OAAQ6N,KC3GlE,IAAAgT,GAAA,CACXC,WAAYlf,EAAWI,MCuCZ+e,OAvBf,SAAiBhM,EAAK5K,GAIlBA,EAAUhK,OAAO0Q,OAAO,GAHF,CAClBmQ,gBAAgB,GAEuB7W,GAE3C,IAAIwW,SACEM,EAAU,GACVvX,EAAOwX,EAAYD,GAYzB,OAPIN,EAHAxW,EAAQ6W,eAGCjM,EAAIjK,OAAO,EAAG,GAAG,GAEjB,GAGbiK,EAAIpI,QAAQ,SAAAyC,GAAA,OAAS1F,sIAAQ0F,MAEtB,CAACuR,EAAQM,ICvChBE,GAAM,GACNC,GAAM,GACNC,GAAQ,GACRC,GAAU,GACVC,GAAS,GAEb,SAASC,GAAgBP,GACvB,OAAO,IAAIhV,SAAS,IAAK,WAAagV,EAAQxV,IAAI,SAASzL,EAAMP,GAC/D,OAAOgiB,KAAKC,UAAU1hB,GAAQ,OAASP,EAAI,MAC1C6G,KAAK,KAAO,KA0BF,IAAAqb,GAAA,SAASC,GACtB,IAAIC,EAAW,IAAIpe,OAAO,KAAQme,EAAY,SAC1CE,EAAYF,EAAUG,WAAW,GAWrC,SAASC,EAAUre,EAAMkU,GACvB,IAIIlX,EAJAshB,EAAO,GACPC,EAAIve,EAAKqB,OACTmd,EAAI,EACJjhB,EAAI,EAEJkhB,EAAMF,GAAK,EACXG,GAAM,EAMV,SAAS1Y,IACP,GAAIyY,EAAK,OAAOhB,GAChB,GAAIiB,EAAK,OAAOA,GAAM,EAAOlB,GAG7B,IAAI1hB,EAAUK,EAAPwiB,EAAIH,EACX,GAAIxe,EAAKoe,WAAWO,KAAOjB,GAAO,CAChC,KAAOc,IAAMD,GAAKve,EAAKoe,WAAWI,KAAOd,IAAS1d,EAAKoe,aAAaI,KAAOd,KAI3E,OAHK5hB,EAAI0iB,IAAMD,EAAGE,GAAM,GACdtiB,EAAI6D,EAAKoe,WAAWI,QAAUb,GAASe,GAAM,EAC9CviB,IAAMyhB,KAAUc,GAAM,EAAU1e,EAAKoe,WAAWI,KAAOb,MAAWa,GACpExe,EAAKiU,MAAM0K,EAAI,EAAG7iB,EAAI,GAAGmE,QAAQ,MAAO,KAIjD,KAAOue,EAAID,GAAG,CACZ,IAAKpiB,EAAI6D,EAAKoe,WAAWtiB,EAAI0iB,QAAUb,GAASe,GAAM,OACjD,GAAIviB,IAAMyhB,GAAUc,GAAM,EAAU1e,EAAKoe,WAAWI,KAAOb,MAAWa,OACtE,GAAIriB,IAAMgiB,EAAW,SAC1B,OAAOne,EAAKiU,MAAM0K,EAAG7iB,GAIvB,OAAO2iB,GAAM,EAAMze,EAAKiU,MAAM0K,EAAGJ,GAGnC,IA7BIve,EAAKoe,WAAWG,EAAI,KAAOZ,MAAWY,EACtCve,EAAKoe,WAAWG,EAAI,KAAOX,MAAUW,GA4BjCvhB,EAAIgJ,OAAayX,IAAK,CAE5B,IADA,IAAIzE,EAAM,GACHhc,IAAMwgB,IAAOxgB,IAAMygB,IAAKzE,EAAIjT,KAAK/I,GAAIA,EAAIgJ,IAC5CkO,GAA4B,OAAtB8E,EAAM9E,EAAE8E,EAAKzb,OACvB+gB,EAAKvY,KAAKiT,GAGZ,OAAOsF,EAgBT,SAASM,EAAU5F,GACjB,OAAOA,EAAIlR,IAAI+W,GAAalc,KAAKsb,GAGnC,SAASY,EAAY7e,GACnB,OAAe,MAARA,EAAe,GAChBke,EAASY,KAAK9e,GAAQ,IAAM,IAAOA,EAAKC,QAAQ,KAAM,MAAU,IAChED,EAGR,MAAO,CACLsG,MAlFF,SAAetG,EAAMkU,GACnB,IAAI6K,EAASzB,EAASgB,EAAOD,EAAUre,EAAM,SAASgZ,EAAKld,GACzD,GAAIijB,EAAS,OAAOA,EAAQ/F,EAAKld,EAAI,GACrCwhB,EAAUtE,EAAK+F,EAAU7K,EA9B/B,SAAyBoJ,EAASpJ,GAChC,IAAI1W,EAASqgB,GAAgBP,GAC7B,OAAO,SAAStE,EAAKld,GACnB,OAAOoY,EAAE1W,EAAOwb,GAAMld,EAAGwhB,IA2BM0B,CAAgBhG,EAAK9E,GAAK2J,GAAgB7E,KAGzE,OADAsF,EAAKhB,QAAUA,GAAW,GACnBgB,GA6EPD,UAAWA,EACX5e,OA1BF,SAAgB6e,EAAMhB,GAEpB,OADe,MAAXA,IAAiBA,EA9EzB,SAAsBgB,GACpB,IAAIW,EAAYziB,OAAOY,OAAO,MAC1BkgB,EAAU,GAUd,OARAgB,EAAKtV,QAAQ,SAASgQ,GACpB,IAAK,IAAIkG,KAAUlG,EACXkG,KAAUD,GACd3B,EAAQvX,KAAKkZ,EAAUC,GAAUA,KAKhC5B,EAkE0B6B,CAAab,IACrC,CAAChB,EAAQxV,IAAI+W,GAAalc,KAAKsb,IAAY1V,OAAO+V,EAAKxW,IAAI,SAASkR,GACzE,OAAOsE,EAAQxV,IAAI,SAASoX,GAC1B,OAAOL,EAAY7F,EAAIkG,MACtBvc,KAAKsb,MACNtb,KAAK,OAqBTyc,WAlBF,SAAoBd,GAClB,OAAOA,EAAKxW,IAAI8W,GAAWjc,KAAK,SCzGhC0c,GAAMC,GAAI,KCAVC,IDEkBF,GAAI/Y,MACA+Y,GAAIhB,UACPgB,GAAI5f,OACA4f,GAAID,WCLrBE,GAAI,OAEQC,GAAIjZ,MACAiZ,GAAIlB,UACPkB,GAAI9f,OACA8f,GAAIH,WC4BhBI,OAXf,SAAiBnV,EAAK7D,GAKlBA,EAAUhK,OAAO0Q,OAAO,GAJF,CAClBmQ,gBAAgB,EAChBoC,eAAgB,KAEuBjZ,GAE3C,IAAM8Y,EAAMI,GAAMlZ,EAAQiZ,gBAC1B,OAAOrC,GAAOkC,EAAIjB,UAAUhU,GAAM7D,ICoBvBmZ,OAxBf,SAAmBvO,GACf,IAAM4L,EAAS,GACXlhB,EAAI,EACJ8jB,SACEtC,EAAU,GACVvX,EAAOwX,EAAYD,GAgBzB,OAdAlM,EAAIpI,QAAQ,SAACkB,GACT,IAAMrB,EAAS,GACf,IAAK,IAAIxL,KAAO6M,EACR7M,KAAO2f,EACP4C,EAAiB5C,EAAO3f,IAExB2f,EAAO3f,GAAOvB,IACd8jB,EAAiB9jB,EAAI,GAEzB+M,EAAO+W,GAAkB1V,EAAK7M,GAElC0I,eAAQ8C,KAGL,CAACrM,OAAOmJ,KAAKqX,GAASM,IC1BlBuC,OAXf,SAAe5U,EAAMzE,GACjB,IAAMsZ,EAAa,CAAEH,YAAUH,UAAQpC,WACjCD,EAAanS,EAAiBC,GAEpC,IAAKkS,EACD,MAAM,IAAInN,MAAM,mCAGpB,OAAO8P,EAAW3C,GAAYlS,EAAMzE,iiBCGjC,SAASkK,GAAiB7H,GAC7B,IAAMkX,EAAO,GAEb,OADAvjB,OAAOmJ,KAAKkD,GAAQG,QAAQ,SAAC3L,GAAU0iB,EAAK1iB,GAAO,IAAI6O,EAAMrD,EAAOxL,GAAMA,KACnE0iB,EAGJ,IAAMC,GAAe,SAAAvI,EAA8BwI,EAAmBC,GAAmB,IAAAC,EAAAC,GAAA3I,EAAA,GAAlEjL,EAAkE2T,EAAA,GAAtDlN,EAAsDkN,EAAA,GACxFE,EAASpN,EAAc5R,OAAS4R,EAAcvG,MAAM,KAAO,GAC3D4T,EAAkBL,EAAkB1U,YACpCgV,EAAYF,EAAOvY,IAAI,SAAA0Y,GAAA,OT+BxB,SAAoChQ,EAAchE,GAAY,IACzDX,EAAW2E,EAAX3E,OAER,OAAQA,EAAOC,MACf,KAAKjN,EAAUC,QACX,OAAQ+M,EAAO+N,SACf,KAAKjb,EAAeC,WAEpB,QACI,OAAO,IAAI8c,GAAWlL,EAAchE,GAE5C,KAAK3N,EAAUE,UACX,OAAQ8M,EAAO+N,SACf,KAAKtb,EAAiBC,YAClB,OAAO,IAAI2b,GAAY1J,EAAchE,GACzC,KAAKlO,EAAiBE,SAClB,OAAO,IAAI8b,GAAS9J,EAAchE,GACtC,KAAKlO,EAAiBI,OAClB,OAAO,IAAI0c,GAAO5K,EAAchE,GACpC,QACI,OAAO,IAAI0N,GAAY1J,EAAchE,GAE7C,QACI,OAAO,IAAI0N,GAAY1J,EAAchE,IStDNiU,CAA2BH,EAAgBE,GAAMhQ,aAAchE,KAClG,OAAOrB,EAAWC,gBAAgBmV,EAAWL,IAGpCQ,GAAoB,SAACC,EAAOC,GAAuC,IAUvEC,EAV2C5T,EAA4BhI,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAAnB,GAAI6b,EAAe7b,UAAA,GACxE8b,SACAH,IAAclS,GACdqS,EAAa,CACTC,GAAIJ,EACJK,KAAMhU,EACNiU,SAAUJ,GAEdH,EAAMQ,YAAYpb,KAAKgb,KAGvBA,eAAiBD,IACjBH,EAAMQ,YAAY9f,OAAS,GAC3Bwf,EAAAF,EAAMQ,aAAYpb,KAAlBqB,MAAAyZ,EAAAO,GAA0BL,MAIrBM,GAAe,SAAC7U,EAAY3D,EAAQyY,EAAUrU,EAAQsU,GAC/D,IAAMC,EAAgB,GAClBC,GAAqB,EACnBxkB,EAASgQ,EAAThQ,KACFykB,SACA5I,EAAc,GACdC,EAAgB,kBAAMwI,EAAS3Q,gBAC7B+Q,EAAmB,SAAA/f,GAAA,OAAS0f,EA7CtC,SAA+BzY,EAAQ/M,GACnC,IAAMikB,EAAO,GADyB6B,GAAA,EAAAC,GAAA,EAAAC,OAAAliB,EAAA,IAEtC,QAAAmiB,EAAAC,EAAkBnZ,EAAlBhM,OAAAolB,cAAAL,GAAAG,EAAAC,EAAAlU,QAAAoU,MAAAN,GAAA,EAA0B,KAAjBnW,EAAiBsW,EAAAhlB,MACtBgjB,EAAKtU,EAAMpP,QAAU,IAAI6P,EAAMT,EAAM+E,aAAavF,KAAKnP,GAAI2P,IAHzB,MAAA0W,GAAAN,GAAA,EAAAC,EAAAK,EAAA,aAAAP,GAAAI,EAAAI,QAAAJ,EAAAI,SAAA,WAAAP,EAAA,MAAAC,GAKtC,OAAO/B,EAyCHsC,CAAqBxZ,EAAQjH,GAC7BA,EACAmX,EACAD,IAGAwJ,SAkBJ,OAhBIA,EADArlB,IAAS+B,EAAcE,QACb,SAAA0C,GAAA,OAAU+f,EAAiB/f,IAE3B,SAAAA,GAAA,OAAS+f,EAAiB/f,IAGxC2K,EAAmBC,EAAY,SAAC1Q,GACxBwmB,EAAQxmB,MACmB,IAAvB2lB,GAA4B3lB,IAAO2lB,EAAoB,GACvDC,EAAKF,EAAcngB,OAAS,EAC5BmgB,EAAcE,GAASF,EAAcE,GAAIhV,MAAM,KAAK,GAApD,IAA0D5Q,GAE1D0lB,EAAczb,KAAd,GAAsBjK,GAE1B2lB,EAAoB3lB,KAGrB0lB,EAAc7e,KAAK,MAGjB4f,GAAyB,SAAC5B,EAAO6B,GAA4B,IAAhBvV,EAAgBhI,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAAP,GACzD2b,EAAY3T,EAAO2T,WAAa3R,EAChCwT,EAAkBxV,EAAOwV,kBAAmB,EAC9CC,EAAM,GAINA,EAHCF,EAAWnhB,OAGNmhB,EAAW1a,IAAI,SAAA6a,GAAA,OACXhP,GAD0ByD,EA6BjCuL,GA5B2BC,UACpB/W,EAAS8H,EAAQ9H,OACjBgX,EAAezL,EAAU0L,kBACzBC,EAAc3L,EAAUzH,gBAAgBpE,YACxCN,EAAO0I,EAAQ1I,KACfmP,EAAS5d,OAAOwmB,OAAOH,GAAchQ,OAAO,SAACC,EAAKmQ,GAEpD,OADAnQ,EAAImQ,EAAEC,IAAI7mB,MAAQ0mB,EAAYE,EAAEC,IAAI7mB,MAAM+d,SACnCtH,GACR,IAEI,SAACjK,GAgBJ,QAfiBoC,EAAK5J,QAAiB4J,EAAKkY,KAAK,SAAAnK,GAAA,OAAOnN,EAAOuX,MAAM,SAACC,GAClE,KAAMA,EAAUhnB,QAAQwM,GACpB,OAAO,EAEX,IAAM9L,EAAQ8L,EAAOwa,EAAUhnB,MAAMinB,UACrC,GAAIb,GAAmBY,EAAUvX,OAASjN,EAAUC,QAChD,OAAO/B,GAASqd,EAAOiJ,EAAUhnB,MAAM,IAAMU,GAASqd,EAAOiJ,EAAUhnB,MAAM,GAGjF,GAAIgnB,EAAUvX,OAASjN,EAAUE,UAC7B,OAAO,EAEX,IAAMgU,EAAM8P,EAAaQ,EAAUhnB,MAAMuF,MACzC,OAAOoX,EAAIjG,KAASlK,EAAOwa,EAAUhnB,MAAMinB,eAzBpB,IAAClM,EAC1BzD,EACA9H,EACAgX,EACAE,EACA9X,EACAmP,IARJ,CAAC,kBAAM,IAkCjB,IAAImJ,SACA3C,IAAc3R,EAEdsU,EADoB5C,EAAM6C,OAAM,GAAO,GACXC,OAAO,SAAA5a,GAAA,OAAU6Z,EAAIU,MAAM,SAAAM,GAAA,OAAMA,EAAG7a,MAAU,CACtE8a,WAAW,EACX1mB,KAAM+B,EAAcG,MAGxBokB,EAAgB5C,EAAM6C,OAAM,GAAO,GAAOC,OAAO,SAAA5a,GAAA,OAAU6Z,EAAIS,KAAK,SAAAO,GAAA,OAAMA,EAAG7a,MAAU,CACnF5L,KAAM+B,EAAcG,IACpBwkB,WAAW,IAInB,OAAOJ,GAGEK,GAAkB,SAACrC,EAAUD,EAAUuC,EAAcC,GAC9D,IAAMC,EAASxC,EAASiC,MAAMM,EAAYH,WACpCnX,EAAa6U,GACf0C,EAAO7T,YACP6T,EAAOC,uBAAuBnb,OAC9ByY,EACAuC,EACAtC,GAOJ,OALAwC,EAAO7T,YAAc1D,EACrBuX,EAAO9K,wBAAwBgL,wBAE/BvD,GAAkBqD,EAAQrV,EAAuB,CAAEzB,OAAQ4W,GAAgBvC,GAEpEyC,GAGEG,GAAmB,SAAC3C,EAAU4C,EAAWlX,EAAQmX,GAC1D,IAAML,EAASxC,EAASiC,MAAMvW,EAAO0W,WACjCU,EAAgBF,EAgBpB,OAfIlX,EAAOhQ,OAAS+B,EAAcE,UAC9BmlB,EAAgBD,EAAUjP,OAAO,SAAAvB,GAAA,OAA+C,IAAlCuQ,EAAUre,QAAQ8N,MAIpEmQ,EAAOnP,eAAiByP,EAAc1hB,KAAK,KAC3CohB,EAAO9K,wBAAwBgL,wBAE/BvD,GACIqD,EACArV,EACA,CAAEyV,YAAWlX,SAAQqX,gBAAiBD,GACtC,MAGGN,GAGEQ,GAAqB,SAACC,GAO/B,IALAA,EAAa9a,EAAQ,GAAI8a,IACT1Y,OACZ0Y,EAAW1Y,KAAOjN,EAAUE,YAG3BylB,EAAW5K,QACZ,OAAQ4K,EAAW1Y,MACnB,KAAKjN,EAAUC,QACX0lB,EAAW5K,QAAUjb,EAAeC,WACpC,MACJ,QACA,KAAKC,EAAUE,UACXylB,EAAW5K,QAAUtb,EAAiBC,YAK9C,OAAOimB,GAKEC,GAAa,SAACC,EAAUzZ,EAAMY,EAAQrF,GAC/CqF,EAH0B,SAAAA,GAAA,OAAUA,EAAO/D,IAAI,SAAA0c,GAAA,OAAcD,GAAmBC,KAGvEG,CAAe9Y,GACxBrF,EAAUhK,OAAO0Q,OAAO1Q,OAAO0Q,OAAO,GAAI0X,IAAgBpe,GAC1D,IAAMqe,EAAcC,EAAUte,EAAQ2W,YAEtC,IAAM0H,GAAsC,mBAAhBA,EACxB,MAAM,IAAI7U,MAAJ,mCAA6CxJ,EAAQ2W,WAArD,WANiD,IAAA4H,EAS3BF,EAAY5Z,EAAMzE,GATSwe,EAAA5E,GAAA2E,EAAA,GASpD/H,EAToDgI,EAAA,GAS5CC,EAT4CD,EAAA,GAUrD3Z,EAAWuR,GAAaqI,EAAepZ,EAAQmR,GAG/CkI,EAAY/Z,EAAWC,gBAAgBC,EAAU7E,EAAQnK,MAM/D,OALAqoB,EAASS,mBAAqBD,EAE9BR,EAASxU,YAAc+U,EAAc5jB,QAAU4jB,EAAc,GAAG5jB,OAAzC,MAAuD4jB,EAAc,GAAG5jB,OAAS,GAAM,GAC9GqjB,EAAS9P,eAAkB/I,EAAO/D,IAAI,SAAA+Q,GAAA,OAAKA,EAAExc,OAAOsG,OACpD+hB,EAASU,YAAc5e,EAAQ2W,aAAelf,EAAWI,KAAO2M,EAAiBC,GAAQzE,EAAQ2W,WAC1FuH,GAGE3Q,GAAgB,SAAClI,EAAQJ,GAGlC,IAFA,IAAI3P,EAAI,EAEDA,EAAI+P,EAAOxK,SAAUvF,EACxB,GAAI2P,IAAUI,EAAO/P,GAAGO,KACpB,MAAO,CACHyP,KAAMD,EAAO/P,GAAG8d,SAAW/N,EAAO/P,GAAGgQ,KACrClK,MAAO9F,GAInB,OAAO,MAgCLupB,GAAgC,SAAC1C,EAAWvL,GAAc,IAI5BkO,EAAAC,EAJ4BC,EA5B3B,SAACC,GAClC,IAAMC,EAAaD,EAAMtE,YACrBwE,EAAS,GACT/E,SACJ,GAAI8E,GAAoC,IAAtBA,EAAWrkB,OAEzB,OADAuf,EAAY8E,EAAW,GAAG1E,IAE1B,KAAKtS,EACDiX,EAAS,CAACD,EAAW,GAAGxE,UACxB,MACJ,KAAKxS,EACDiX,EAAS,CAACD,EAAW,GAAGzE,KAAKqD,iBAC7B,MACJ,KAAK5V,EACDkS,EAAY,UACZ+E,EAAS,CAACD,EAAW,GAAGzE,KAAK2E,cAAclZ,MAAM,KAAMgZ,EAAW,GAAGxE,UAO7E,MAAO,CACHN,YACA+E,UAK0BE,CAAsBzO,GAA5CwJ,EADoD4E,EACpD5E,UAAW+E,EADyCH,EACzCG,OACfG,EAAiBnD,EAAU,GAC3BoD,EAAiBpD,EAAU,GAC3B/B,GAAa+E,EAAOtkB,SACpBykB,GAAiBR,EAAA3C,EAAU,IAAG/B,GAAbxZ,MAAAke,EAAAlE,GAA2BuE,GAA3Bpd,OAAA,CAAmC,CAChDob,WAAW,MAEfoC,GAAiBR,EAAA5C,EAAU,IAAG/B,GAAbxZ,MAAAme,EAAAnE,GAA2BuE,GAA3Bpd,OAAA,CAAmC,CAChDob,WAAW,OAGnB,MAAO,CAACmC,EAAgBC,IAWtBC,GAAuB,SAAvBA,EAAwB5O,EAAWuL,GAA8C,IAAnC1V,EAAmChI,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAA1B,GAAIghB,EAAsBhhB,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAAP,GACtEihB,EAAqBD,EAAaC,mBAClCC,EAAgBF,EAAaE,eAAiB,GAEhD/O,IAAc8O,MAIAC,EAAc9kB,SAA+C,IAAtC8kB,EAAcrgB,QAAQsR,KAElDA,EAAUgP,kBAAkBzD,EAAW1V,GAEnCmK,EAAUiP,UAClBrd,QAAQ,SAACyc,GAAU,IAAAa,EACejB,GAA8B1C,EAAW8C,GADxDc,EAAAnG,GAAAkG,EAAA,GACnBR,EADmBS,EAAA,GACHR,EADGQ,EAAA,GAExBP,EAAqBP,EAAO,CAACK,EAAgBC,GAAiB9Y,EAAQgZ,OAkBjEO,GAAqB,SAAC7F,GAC/B,IADoD,IAAd8F,EAAcxhB,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAAP,GACtC0b,EAAM+F,SACTD,EAAK1gB,KAAK4a,GACVA,EAAQA,EAAM+F,QAElB,OAAOD,GAGEE,GAA2B,SAACC,EAAaC,EAAYC,EAAgB7Z,GAC9E,IAAIiU,SACAyB,SACIoE,EAA4CD,EAA5CC,qBAAsBC,EAAsBF,EAAtBE,kBACxBC,EAAsBH,EAAeI,SACrCC,EAA8Bla,EAAOka,4BAMvCC,EAAY,GAEhB,GAAoB,OAAhBR,IAA8C,IAAtB3Z,EAAOoa,WAC/BD,EAAY,CAAC,CACTlG,SAAU,SAEX,KAAAzI,EACC6O,EAAkB9qB,OAAOwmB,OAAO+D,EAAqBQ,iBAC/B,IAAtBP,IACAM,EAAkBA,EAAgBnS,OAAO,SAAA/Y,GAAA,OAAKA,EAAE6Q,OAAOia,WAAaD,KAGxE,IAAMO,EAAmBF,EAAgBnS,OAjB5B,SAACsS,GAEd,OADexa,EAAOqC,UAAa,kBAAM,IAC3BmY,EAAOxa,KAeqCnF,IAAI,SAAA4f,GAAA,OAAUA,EAAOza,OAAOiU,WAEhFiF,EAAgB,GAEtB,IAA0B,IAAtBa,EAA6B,CAC7B,IAAMW,EAAwBnrB,OAAOwmB,OAAO+D,EAAqBQ,gBAEjEI,EAAsB3e,QAAQ,SAAC4e,GAC3B,IAAMC,EAAaD,EAAU3a,QACI,IAA7B4a,EAAWC,eAA2BD,EAAWH,SAAWza,EAAOya,QAC/DG,EAAWX,WAAaD,IAC5Bd,EAAcpgB,KAAK6hB,EAAUjH,QAC7BO,EAAWyG,EAAsBxS,OAAO,SAAA/Y,GAAA,OAAKA,IAAMwrB,IAAW9f,IAAI,SAAA1L,GAAA,OAAKA,EAAE6Q,OAAOiU,YACvE7f,QAAU+lB,EAAUrhB,KAAK,CAC9Bmb,WACA6G,OAAQH,EAAUjH,MAClB8F,KAAMD,GAAmBoB,EAAUjH,YAOnDO,GAAWzI,EAAA,IAAGlQ,OAAHnB,MAAAqR,EAAA,GAAAlQ,OAAA6Y,GAAiBoG,GAAjB,CAAmCZ,KAAczR,OAAO,SAAA/Y,GAAA,OAAW,OAANA,IACxEgrB,EAAUrhB,KAAK,CACXmb,WACAiF,wBAAmBA,EAAnB/E,GAAqCnU,EAAOkZ,eAAiB,OAIrE,IAAM6B,EAAYnB,EAAWlG,MAEvBsH,EAAazrB,OAAO0Q,OAAO,CAC7Bgb,kBAAmBtB,EACnBK,uBACDha,GAEGkb,EAAmBtB,EAAWuB,aAChCjB,GAA+BgB,IAC/BxF,EAAYJ,GAAuB4F,EAAkBjH,EAAU,CAC3DuB,gBAAiB0E,IAErBnB,GAAqBmC,EAAkBxF,EAAWsF,IAGtDb,EAAUpe,QAAQ,SAACqf,GACf,IAAMC,EAAmB/F,GAAuByF,EAAWK,EAAInH,UACzDuF,EAAO4B,EAAI5B,KAEjB,GAAIA,EAAM,CACN,IAAMlD,EA1HO,SAACZ,EAAW8D,GACjC,IAAK,IAAI3qB,EAAI,EAAG2M,EAAMge,EAAKplB,OAAQvF,EAAI2M,EAAK3M,IAAK,CAC7C,IAAM6kB,EAAQ8F,EAAK3qB,GACnB6mB,EAAY0C,GAA8B1C,EAAWhC,GAEzD,OAAOgC,EAqHuB4F,CAAiBD,EAAkB7B,EAAK+B,WAC9DH,EAAIN,OAAO3B,kBAAkB7C,EAAe0E,QAE5CjC,GAAqBgC,EAAWM,EAAkBL,EAAY,CAC1D9B,cAAekC,EAAIlC,cACnBD,mBAAoBiB,GAA+BgB,iQC+KpDM,cA/iBX,SAAAA,iGAAwBC,CAAAhpB,KAAA+oB,GACpB,IAAIE,SAEJjpB,KAAKgnB,QAAU,KACfhnB,KAAKyhB,YAAc,GACnBzhB,KAAK2mB,UAAY,GALG,QAAAzd,EAAA3D,UAAA5D,OAARskB,EAAQ7c,MAAAF,GAAAG,EAAA,EAAAA,EAAAH,EAAAG,IAAR4c,EAAQ5c,GAAA9D,UAAA8D,GAOE,IAAlB4c,EAAOtkB,SAAkBsnB,EAAShD,EAAO,cAAe8C,GAExD/oB,KAAKkV,eAAiB+T,EAAO/T,eAC7BlV,KAAKwQ,YAAcyY,EAAOzY,YAC1BxQ,KAAK0lB,YAAcuD,EAAOvD,YAC1B1lB,KAAKgnB,QAAUiC,EACfjpB,KAAKylB,mBAAqBzlB,KAAKgnB,QAAQvB,mBACvCzlB,KAAKkpB,gBAAkBpe,IACvB9K,KAAKuZ,wBAAwBgL,0BAE7BQ,GAAUA,cAAC/kB,MAAX6I,OAAoBod,IACpBjmB,KAAKkpB,gBAAkBlpB,KAAKylB,mBAAmB9oB,KAC/CqD,KAAKuZ,wBAAwBgL,wBAC7BvkB,KAAKmpB,sBAAwB,CACzBtB,eAAgB,GAChBuB,iBAAkB,oDA0B1B,OAAOppB,KAAKiQ,gBAAgB9G,OAAOf,IAAI,SAAA1L,GAAA,OAAKA,EAAEyP,6CAY9C,OAAOnM,KAAKkpB,wDAIZ,OAAOlpB,KAAKqpB,4DAMZ,OAFArpB,KAAKqpB,YAAc/I,GAAa,CAACtgB,KAAKwQ,YAAaxQ,KAAKkV,gBACnDlV,KAAKskB,uBAAwBtkB,KAAKkpB,iBAChClpB,oDAIP,OAAOA,KAAKylB,gDAiCV6D,EAAU1Z,GACZ,OAAOH,EAAazP,KAAMspB,EAAU1Z,uCAuB3B0Z,GACT,OAAO7Z,EAAazP,KAAMspB,EAAU9P,GAAkBxZ,KAAMspB,IAAW,iCAqBpEC,GACH,OAAO5P,GAAM3Z,KAAMupB,sCAoBXC,GACR,OAAO3U,GAAW7U,KAAMwpB,kCAkDpB5H,EAAUrU,GACd,IAAMkc,EAAY,CACdlsB,KAAM+B,EAAcC,OACpB0kB,WAAW,GAITG,EAAc,CAAEH,WAFtB1W,EAASzQ,OAAO0Q,OAAO,GAAIic,EAAWlc,IAEE0W,WACpCyF,SAEAnc,EAAOhQ,OAAS+B,EAAcG,IAa9BiqB,EAAM,CAZWxF,GACblkB,KACA4hB,EACA,CAAErkB,KAAM+B,EAAcC,QACtB6kB,GAEaF,GACblkB,KACA4hB,EACA,CAAErkB,KAAM+B,EAAcE,SACtB4kB,IAIJsF,EAAMxF,GACFlkB,KACA4hB,EACArU,EACA6W,GAIR,OAAOsF,oCAsBP,OAAQ1pB,KAAKwQ,YAAY7O,SAAW3B,KAAKkV,eAAevT,uCAUhB,IAArCsiB,IAAqC1e,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,KAAAA,UAAA,GACpCokB,SACJ,IAAmB,OAFqBpkB,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,KAAAA,UAAA,IAEd,CACtB,IAAM0O,EAAUjU,KAAKkjB,QAAQ,CACzB0G,cAAc,IAEZre,EAAO0I,EAAQ1I,KACfY,EAAS8H,EAAQ9H,OACjB0d,EAAWte,EAAKnD,IAAI,SAACkR,GACvB,IAAMwQ,EAAS,GAIf,OAHA3d,EAAO7C,QAAQ,SAACyC,EAAO3P,GACnB0tB,EAAO/d,EAAMpP,MAAQ2c,EAAIld,KAEtB0tB,IAEXH,EAAe,IAAI3pB,KAAK+pB,YAAYF,EAAU1d,QAG9Cwd,EAAe,IAAI3pB,KAAK+pB,YAAY/pB,MAMxC,OAHIikB,GACAjkB,KAAK2mB,UAAUtgB,KAAKsjB,GAEjBA,kCA8CFlF,EAAWlX,GAChB,IAAMkc,EAAY,CACdlsB,KAAM+B,EAAcC,OACpB0kB,WAAW,GAEf1W,EAASzQ,OAAO0Q,OAAO,GAAIic,EAAWlc,GACtC,IAAMyc,EAAchqB,KAAKojB,kBACnBsB,EAAY5nB,OAAOmJ,KAAK+jB,GACtBzsB,EAASgQ,EAAThQ,KAEJ0sB,EAAsBxF,EAAUtR,OAAO,SAACC,EAAKrH,GAM7C,MAL+B,WAA3BA,EAAMge,YAAYptB,KAClByW,EAAI/M,KAAJqB,MAAA0L,wHAAA8W,CAAYxF,EAAUjP,OAAO,SAAAvB,GAAA,OAA0C,IAA7BA,EAAUiW,OAAOpe,OACpDA,KAASie,GAChB5W,EAAI/M,KAAK0F,GAENqH,GACR,IAEH6W,EAAsB7gB,MAAMI,KAAK,IAAIiR,IAAIwP,IAAsB7hB,IAAI,SAAA2D,GAAA,OAASA,EAAMuQ,SAClF,IAAI5E,SAEAna,IAAS+B,EAAcG,IASvBiY,EAAY,CARU8M,GAAiBxkB,KAAMiqB,EAAqB,CAC9D1sB,KAAM+B,EAAcC,OACpB0kB,UAAW1W,EAAO0W,WACnBS,GACkBF,GAAiBxkB,KAAMiqB,EAAqB,CAC7D1sB,KAAM+B,EAAcE,QACpBykB,UAAW1W,EAAO0W,WACnBS,IAIHhN,EADsB8M,GAAiBxkB,KAAMiqB,EAAqB1c,EAAQmX,GAI9E,OAAOhN,4CAIP,OAAO1X,KAAKoqB,6DAWZ,OAPApqB,KAAKoqB,aAAepqB,KAAKqpB,YAAYlgB,OAAOgK,OAAO,SAACC,EAAKiX,EAAUjuB,GAK/D,OAJAgX,EAAIiX,EAAS1tB,QAAU,CACnBuF,MAAO9F,EACPonB,IAAK,CAAE7mB,KAAM0tB,EAAS1tB,OAAQyP,KAAMie,EAASje,OAAQ8N,QAASmQ,EAASnQ,YAEpE9G,GACR,IACIpT,uCAWPA,KAAKgnB,QAAQsD,YAAYtqB,MACzBA,KAAKgnB,QAAU,yCA6BNjB,GACT,IAAI1S,EAAMrT,KAAK2mB,UAAU4D,UAAU,SAAAC,GAAA,OAAWA,IAAYzE,KACjD,IAAT1S,GAAarT,KAAK2mB,UAAUlf,OAAO4L,EAAK,qCAYjCoX,GAA4B,IAApBC,EAAoBnlB,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAAJ,GAC/Byb,GAAkBhhB,KAAMgP,EAAwB,KAAM0b,GACtD1qB,KAAKgnB,QAAUyD,EACfA,EAAO9D,UAAUtgB,KAAKrG,0CA4BtB,OAAOA,KAAKgnB,8CA6BZ,OAAOhnB,KAAK2mB,mDA4BZ,OAAO3mB,KAAKyhB,mrBCwFLrjB,eAvlBX,SAAAA,IAAsB,IAAA2Z,+FAAA4S,CAAA3qB,KAAA5B,GAAA,QAAA8K,EAAA3D,UAAA5D,OAANwF,EAAMiC,MAAAF,GAAAG,EAAA,EAAAA,EAAAH,EAAAG,IAANlC,EAAMkC,GAAA9D,UAAA8D,GAAA,IAAA2N,mKAAA4T,CAAA5qB,MAAA+X,EAAA3Z,EAAA2c,WAAAje,OAAAke,eAAA5c,IAAA7B,KAAAmL,MAAAqQ,EAAA,CAAA/X,MAAA6I,OACT1B,KADS,OAGlB6P,EAAK6T,eAAiB,GACtB7T,EAAK8T,gBAAkB,GAJL9T,qUArCF+R,wCAwGXjiB,GAQLA,EAAUhK,OAAO0Q,OAAO,GAPL,CACfud,MAAO,MACP1oB,UAAW,KACX2oB,SAAS,EACTpB,cAAc,EACdhY,KAAM,IAE8B9K,GACxC,IAAMqC,EAASnJ,KAAKskB,uBAAuBnb,OAErC8hB,EAAgB3X,GAAY/W,KAC9ByD,KACAA,KAAKskB,uBAAuBnb,OAC5BnJ,KAAKwQ,YACL1J,EAAQ8iB,aAAezgB,EAAOf,IAAI,SAAA1L,GAAA,OAAKA,EAAEC,SAAQsG,OAASjD,KAAKkV,eAC/DpO,EAAQ8K,KACR,CACI8B,WAA8B,WAAlB5M,EAAQikB,MACpBtX,SAAU3M,EAAQkkB,UAI1B,IAAKlkB,EAAQzE,UACT,OAAO4oB,EAxBG,IA2BN5oB,EAAcyE,EAAdzE,UACAkJ,EAAuB0f,EAAvB1f,KAAMY,EAAiB8e,EAAjB9e,OAAQyH,EAASqX,EAATrX,KAChBsX,EAAa/e,EAAO/D,IAAK,SAAA/E,GAAA,OAAKA,EAAE1G,OAEhCwuB,EADgBruB,OAAOmJ,KAAK5D,GACA8Q,OAAO,SAACC,EAAKhF,GAC3C,IAAMiF,EAAM6X,EAAW9kB,QAAQgI,GAI/B,OAHa,IAATiF,GACAD,EAAI/M,KAAK,CAACgN,EAAKhR,EAAU+L,KAEtBgF,GACR,IAgCH,MA9BsB,WAAlBtM,EAAQikB,MACRI,EAAY7hB,QAAQ,SAAC8hB,GACjB,IAAMC,EAAOD,EAAK,GACZE,EAAQF,EAAK,GAEnB7f,EAAK8f,GAAM/hB,QAAQ,SAACoJ,EAAO6Y,GACvBhgB,EAAK8f,GAAME,GAAYD,EAAM/uB,UACzB2D,EACAwS,EACAkB,EAAK2X,GACLpf,EAAOkf,QAKnB9f,EAAKjC,QAAQ,SAACoJ,EAAO6Y,GACjBJ,EAAY7hB,QAAQ,SAAC8hB,GACjB,IAAMC,EAAOD,EAAK,GACZE,EAAQF,EAAK,GAEnB1Y,EAAM2Y,GAAQC,EAAM/uB,UAChB2D,EACAwS,EAAM2Y,GACNzX,EAAK2X,GACLpf,EAAOkf,QAMhBJ,kCA2BFO,GAAwD,IAA7C7T,EAA6CpS,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAAlC,GAAIgI,EAA8BhI,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAArB,CAAE0e,WAAW,GAC/CiC,KAAmBsF,EAAUvoB,OAC/BgjB,EAAS,CAACjmB,KAAMwrB,EAAW7T,GACzBmB,EAAerB,gBAAWwO,GAchC,OAZAjF,GACIlI,EACA9J,EACA,CAAEwc,YAAWtF,gBAAe9N,eAAgBZ,GAAaY,kBACzDT,GAGApK,EAAO0W,WACPjkB,KAAK2mB,UAAUtgB,KAAKyS,GAExBA,EAAakO,QAAUhnB,KAEhB8Y,+BAsDLtF,GACF,IAAMiY,EAAUzrB,KAAKkjB,QAAQ,CACzB6H,MAAO,MACPnZ,KAAM4B,IAGJkY,EAAe,CADND,EAAQtf,OAAO/D,IAAI,SAAA2D,GAAA,OAASA,EAAMpP,QACnBkM,OAAO4iB,EAAQlgB,MAEvCogB,EAAW,IAAI3rB,KAAK+pB,YAAY2B,EAAcD,EAAQtf,OAAQ,CAAEsR,WAAY,WAElF,OADAkO,EAASb,gBAAkBtX,EACpBmY,oCAwBAvf,EAAMtF,GACbsF,EAAOA,GAAQpM,KAAK0lB,YACpB5e,EAAUhK,OAAO0Q,OAAO,GAAI,CAAEuS,eAAgB,KAAOjZ,GAErD,IAAMqC,EAASnJ,KAAKiQ,gBAAgB9G,OAC9ByiB,EAAUziB,EAAOf,IAAI,SAAAoM,GAAA,OAAKA,EAAE+Q,kBAC5BsG,EAAYD,EAAQ,GAAGjqB,OACzBmqB,SACAC,SACAC,SAEJ,GAAI5f,IAAS7N,EAAWC,UAEpB,IADAstB,EAAiB,GACZC,EAAS,EAAGA,EAASF,EAAWE,IAAU,CAC3C,IAAMzS,EAAM,GACZ,IAAK0S,EAAS,EAAGA,EAAS7iB,EAAOxH,OAAQqqB,IACrC1S,EAAInQ,EAAO6iB,GAAQrvB,QAAUivB,EAAQI,GAAQD,GAEjDD,EAAezlB,KAAKiT,QAErB,GAAIlN,IAAS7N,EAAWE,QAAS,CAEpC,IADAqtB,EAAiB,CAAC3iB,EAAOf,IAAI,SAAAoM,GAAA,OAAKA,EAAE7X,SAAQsG,KAAK6D,EAAQiZ,iBACpDgM,EAAS,EAAGA,EAASF,EAAWE,IAAU,CAC3C,IAAMzS,EAAM,GACZ,IAAK0S,EAAS,EAAGA,EAAS7iB,EAAOxH,OAAQqqB,IACrC1S,EAAIjT,KAAKulB,EAAQI,GAAQD,IAE7BD,EAAezlB,KAAKiT,EAAIrW,KAAK6D,EAAQiZ,iBAEzC+L,EAAiBA,EAAe7oB,KAAK,UAClC,IAAImJ,IAAS7N,EAAWG,QAU3B,MAAM,IAAI4R,MAAJ,aAAuBlE,EAAvB,qBARN,IADA0f,EAAiB,CAAC3iB,EAAOf,IAAI,SAAAoM,GAAA,OAAKA,EAAE7X,UAC/BovB,EAAS,EAAGA,EAASF,EAAWE,IAAU,CAC3C,IAAMzS,EAAM,GACZ,IAAK0S,EAAS,EAAGA,EAAS7iB,EAAOxH,OAAQqqB,IACrC1S,EAAIjT,KAAKulB,EAAQI,GAAQD,IAE7BD,EAAezlB,KAAKiT,IAM5B,OAAOwS,mCAGD/f,GACN,IAAMmI,EAAYnI,EAAMpP,OACxBqD,KAAKkV,gBAAL,IAA2BhB,EAC3B,IAAMqM,EAAoBvgB,KAAKylB,mBAE/B,GAAKlF,EAAkB1U,YAAYE,EAAMpP,QAElC,CACH,IAAM4M,EAAagX,EAAkBpX,OAAOohB,UAAU,SAAA0B,GAAA,OAAaA,EAAUtvB,SAAWuX,IACxF3K,GAAc,IAAMgX,EAAkBpX,OAAOI,GAAcwC,QAH3DwU,EAAkBpX,OAAO9C,KAAK0F,GAYlC,OALAwU,EAAkBzU,iBAAmB,KACrCyU,EAAkBhU,iBAAmB,KACrCgU,EAAkBrU,eAAiB,KAEnClM,KAAKuZ,wBAAwBgL,wBACtBvkB,+CAuCQmM,EAAQ+f,EAAY3e,GAAQ,IAAA8J,EAAArX,KAC3CmM,EAAS0Y,GAAmB1Y,GAC5BoB,EAASzQ,OAAO0Q,OAAO,GAAI,CAAEyW,WAAW,EAAMkI,YAAY,GAAS5e,GAEnE,IAAM4V,EAAenjB,KAAKojB,kBACpBgJ,EAAUF,EAAW3X,MAAM,EAAG2X,EAAWvqB,OAAS,GAClD0qB,EAAaH,EAAWA,EAAWvqB,OAAS,GAElD,GAAIwhB,EAAahX,EAAOxP,QAAU4Q,EAAO4e,WACrC,MAAM,IAAI7b,MAASnE,EAAOxP,KAApB,sCAGV,IAAM2vB,EAAkBF,EAAQhkB,IAAI,SAAC2D,GACjC,IAAMwgB,EAAYpJ,EAAapX,GAC/B,IAAKwgB,EAED,MAAM,IAAIjc,MAASvE,EAAb,gCAEV,OAAOwgB,EAAUrqB,QAGf4hB,EAAQ9jB,KAAK8jB,QAEb0I,EAAK1I,EAAM7T,gBAAgB9G,OAC3BsjB,EAAiBH,EAAgBlkB,IAAI,SAAAiL,GAAA,OAAOmZ,EAAGnZ,KAEjD+F,EAAc,GACdC,EAAgB,kBAAMhC,EAAKnG,gBAEzBwb,EAAiB,GACvB7f,EAAmBiX,EAAMtT,YAAa,SAACpU,GACnC,IAAMuwB,EAAaF,EAAerkB,IAAI,SAAA2D,GAAA,OAASA,EAAM+E,aAAavF,KAAKnP,KACvEswB,EAAetwB,GAAKiwB,sIAAcM,GAAd9jB,OAAA,CAA0BzM,EAAGid,EAAeD,OAhCzB,IAAAwT,EAkC3B1P,GAAa,CAACwP,GAAiB,CAACvgB,GAAS,CAACA,EAAOxP,OAA1DoP,EAlCoC8gB,GAAAD,EAAA,MAuC3C,OAJA9I,EAAMgJ,SAAS/gB,GAEfiV,GAAkB8C,EAAO9U,EAAwB,CAAEzB,OAAQpB,EAAQhD,OAAQijB,GAAWC,GAE/EvI,oCAWAoD,GAA2D,IAA9C3Z,EAA8ChI,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAArC,GAAIwnB,EAAiCxnB,UAAA,GAAjBgjB,EAAiBhjB,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAAJ,GACxDynB,EAAkBzf,EAAOyf,gBACzBzF,EAAsBha,EAAOia,SAC7ByF,EAAU1f,EAAO0f,QACjB3E,EFlLkB,SAACrH,GAC7B,KAAOA,EAAM+F,SACT/F,EAAQA,EAAM+F,QAElB,OAAO/F,EE8KeiM,CAAiBltB,MAC7BqnB,EAAuBiB,EAAUa,sBAEjChC,EAAa,CACfuB,aF7LuB,SAAtByE,EAAuBlM,GAChC,OAAIA,EAAM+F,SAAW/F,EAAMQ,YAAY2L,KAAK,SAAA1wB,GAAA,MAAc,UAATA,EAAE4kB,KACxC6L,EAAoBlM,EAAM+F,SAE9B/F,EEuLsBkM,CAAoBntB,MAGzCihB,MAAOqH,GAgBX,OAbAyE,GFnE0B,SAAC1F,GAA6C,IAAvB9Z,EAAuBhI,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAAd,GAAI0b,EAAU1b,UAAA,GACxE8nB,SACEL,EAAkBzf,EAAOyf,gBACzBxL,EAAWjU,EAAOiU,SAClB7jB,EAAS4P,EAAOya,OAAhB,IAA0Bza,EAAOia,SAGnC6F,EADAL,EACkB3F,EAAqBQ,eAErBR,EAAqB+B,iBAG1B,OAAb5H,SACO6L,EAAgB1vB,GAEvB0vB,EAAgB1vB,GAAO,CACnBsjB,QACA1T,UEkDc+f,CAAmBjG,EAAsB9Z,EAAQvN,MACnEinB,GAAyBC,EAAaC,EAAY,CAAEE,uBAAsBG,SAAUD,GAChFzqB,OAAO0Q,OAAO,CACVyf,WACD1f,IAEHyf,GF7F6B,SAAC3F,EAAsBF,EAAYC,GACxE,IAAMgC,EAAmB/B,EAAqB+B,iBAE9C,IAAK,IAAMpB,KAAUoB,EAAkB,CACnC,IACMjB,EADYiB,EAAiBpB,GACNza,OACvBga,EAAsBH,EAAe7Z,OAAOia,SAC5C+F,GAAwBnG,EAAemB,WAAWgF,uBACpDnG,EAAemB,WAAWgF,sBAAsBpF,EAAYf,EAAe7Z,QAC/E,GAAI4a,EAAWX,WAAaD,GAAuBgG,EAAuB,CACtE,IAAMC,EAAgBrF,EAAW3G,SACjCyF,GAAyBuG,EAAerG,EAAY,CAChDE,uBACAC,mBAAmB,EACnBE,SAAUD,GACXY,KE+EHsF,CAA0BpG,EAAsBF,EAAY,CACxD5Z,SACAgb,eAIDvoB,gCAUP0tB,EAAW3gB,GACX,OAAQ2gB,GACR,InC/gBmB,cmCghBf1tB,KAAK6qB,eAAexkB,KAAK0G,GAG7B,OAAO/M,yCASE0tB,GACT,OAAQA,GACR,InC9hBmB,cmC+hBf1tB,KAAK6qB,eAAiB,GAI1B,OAAO7qB,+CAUQijB,EAAWgK,GAAS,IAAAxR,EAAAzb,KACfA,KAAK6qB,eACXvhB,QAAQ,SAAA0a,GAAA,OAAMA,EAAGznB,KAAKkf,EAAMwH,EAAWgK,iCA8CpDU,EAAkBpgB,GACnB,IAAM4V,EAAenjB,KAAKojB,kBAE1B,IAAKD,EAAawK,GACd,MAAM,IAAIrd,MAAJ,SAAmBqd,EAAnB,kBAGV,IAAMC,EAAergB,EAAO5Q,MAAWgxB,EAAlB,UAErB,GAAIxK,EAAayK,GACb,MAAM,IAAItd,MAAJ,SAAmBsd,EAAnB,mBAGV,IAb2BC,ErCljB5B,SAAgCC,EAAchhB,EAAYS,GAAQ,IAC/DY,EAA4CZ,EAA5CY,QAAS4f,EAAmCxgB,EAAnCwgB,UAAW7f,EAAwBX,EAAxBW,QAASf,EAAeI,EAAfJ,MAAOC,EAAQG,EAARH,IAD2B4gB,EAEhDF,EAAapT,SAFmCuT,EAAAC,EAAAF,EAAA,GAE9DG,EAF8DF,EAAA,GAExDG,EAFwDH,EAAA,GAIhE9f,IACDhB,EAAmB,IAAVA,KAAiBA,GAASA,EAAQghB,GAASA,EAAOhhB,EAC3DC,EAAe,IAARA,KAAeA,GAAOA,EAAMghB,GAAUA,EAAO,EAAKhhB,EAErD2gB,IACA7f,EAAUlK,KAAKqqB,KAAKrqB,KAAKsqB,IAAIlhB,EAAMD,GAAS4gB,IAGhD5f,EAAUF,EAAgBC,EAASf,EAAOC,IAG1Ce,EAAQ,GAAKggB,GACbhgB,EAAQvG,QAAQumB,GAEhBhgB,EAAQA,EAAQxM,OAAS,IAAMysB,GAC/BjgB,EAAQ9H,KAAK+nB,EAAO,GAIxB,IADA,IAAM9f,EAAe,GACZlS,EAAI,EAAGA,EAAI+R,EAAQxM,OAAS,EAAGvF,IACpCkS,EAAajI,KAAK,CACd8G,MAAOgB,EAAQ/R,GACfgR,IAAKe,EAAQ/R,EAAI,KAIzB,IAAMmyB,EAAa,GAYnB,OAXA1hB,EAAmBC,EAAY,SAAC1Q,GAC5B,IAAMsW,EAAQob,EAAahd,aAAavF,KAAKnP,GAC7C,GAAIsW,aAAiBrF,EACjBkhB,EAAWloB,KAAKqM,OADpB,CAKA,IAAMlR,EAAQ6M,EAAgBC,EAAcoE,GAC5C6b,EAAWloB,KAAQ7E,EAAM2L,MAAzB,IAAkC3L,EAAM4L,QAGrC,CAAEmhB,aAAY3S,KAAMzN,GqCshBMqgB,CADRxuB,KAAKiQ,gBAAgBpE,YAAY8hB,GACW3tB,KAAKwQ,YAAajD,GAA3EghB,EAdmBV,EAcnBU,WAAY3S,EAdOiS,EAcPjS,KAEd6S,EAAWvR,GAAa,CAACqR,GAAa,CACxC,CACI5xB,KAAMixB,EACNxhB,KAAMjN,EAAUE,UAChB6a,QAAStb,EAAiBI,OAC1B4c,SACA,CAACgS,IAAe,GAElB9J,EAAQ9jB,KAAK8jB,QAKnB,OAJAA,EAAMgJ,SAAS2B,GAEfzN,GAAkB8C,EAAO9U,EAAoB,CAAE2e,mBAAkBpgB,SAAQqgB,gBAAgB,MAElF9J,yCA8BP,OAAO,IAAI1lB,EAHE4B,KAAK0uB,UAAUnwB,EAAWC,WACxBwB,KAAK2uB,kEAljBWphB,GAC/B,OAAOF,EAAkBK,iBAAiBH,oCAf1C,OAAOiK,YCxFA9B,GAAoDM,GAApDN,IAAKG,GAA+CG,GAA/CH,IAAKI,GAA0CD,GAA1CC,IAAKG,GAAqCJ,GAArCI,IAAKC,GAAgCL,GAAhCK,MAAOC,GAAyBN,GAAzBM,KAAMC,GAAmBP,GAAnBO,MAAYqY,GAAO5Y,GAAZQ,YCsBvDpY,GAAUywB,UAAY,CAClBC,QC4LmB,mBAAAC,EAAAxpB,UAAA5D,OAAIqtB,EAAJ5lB,MAAA2lB,GAAAE,EAAA,EAAAA,EAAAF,EAAAE,IAAID,EAAJC,GAAA1pB,UAAA0pB,GAAA,OACnB,SAAC7Z,GAAqC,IAAjC7H,EAAiChI,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAAxB,CAAE0e,WAAW,GACnBiL,EAAY9Z,EACZ+Z,SACEC,EAAc,GACdnL,EAAY1W,EAAO0W,UAezB,OAbA+K,EAAW1lB,QAAQ,SAAC4X,GAChBgO,EAAYhO,EAAUgO,GACtBE,EAAY/oB,KAAZqB,MAAA0nB,wHAAAC,CAAoBH,EAAUzN,cACzB0N,IACDA,EAAYD,KAIpBjL,GAAaiL,EAAUI,UAAUla,EAAIga,GACjCA,EAAYztB,OAAS,GACrBwtB,EAAUI,UAGPL,ID/MXM,IC0He,mBAAAC,EAAAlqB,UAAA5D,OAAIwF,EAAJiC,MAAAqmB,GAAAC,EAAA,EAAAA,EAAAD,EAAAC,IAAIvoB,EAAJuoB,GAAAnqB,UAAAmqB,GAAA,OAAa,SAAAta,GAAA,OAAMA,EAAGoa,IAAH9nB,MAAA0N,EAAUjO,KDzH5C4c,OC8BkB,mBAAA7a,EAAA3D,UAAA5D,OAAIwF,EAAJiC,MAAAF,GAAAG,EAAA,EAAAA,EAAAH,EAAAG,IAAIlC,EAAJkC,GAAA9D,UAAA8D,GAAA,OAAa,SAAA+L,GAAA,OAAMA,EAAG2O,OAAHrc,MAAA0N,EAAajO,KD7BlDwoB,QC6DmB,mBAAAC,EAAArqB,UAAA5D,OAAIwF,EAAJiC,MAAAwmB,GAAAC,EAAA,EAAAA,EAAAD,EAAAC,IAAI1oB,EAAJ0oB,GAAAtqB,UAAAsqB,GAAA,OAAa,SAAAza,GAAA,OAAMA,EAAGua,QAAHjoB,MAAA0N,EAAcjO,KD5DpDsQ,QCoJmB,mBAAAqY,EAAAvqB,UAAA5D,OAAIwF,EAAJiC,MAAA0mB,GAAAC,EAAA,EAAAA,EAAAD,EAAAC,IAAI5oB,EAAJ4oB,GAAAxqB,UAAAwqB,GAAA,OAAa,SAAA3a,GAAA,OAAMA,EAAGqC,QAAH/P,MAAA0N,EAAcjO,KDnJpD6oB,kBEvB6B,mBAAA9mB,EAAA3D,UAAA5D,OAAIwF,EAAJiC,MAAAF,GAAAG,EAAA,EAAAA,EAAAH,EAAAG,IAAIlC,EAAJkC,GAAA9D,UAAA8D,GAAA,OAAa,SAAA+L,GAAA,OAAMA,EAAG4a,kBAAHtoB,MAAA0N,EAAwBjO,KFwBxEyK,KEfgB,mBAAAge,EAAArqB,UAAA5D,OAAIwF,EAAJiC,MAAAwmB,GAAAC,EAAA,EAAAA,EAAAD,EAAAC,IAAI1oB,EAAJ0oB,GAAAtqB,UAAAsqB,GAAA,OAAa,SAAAza,GAAA,OAAMA,EAAGxD,KAAHlK,MAAA0N,EAAWjO,KFgB9CsI,eACAoF,cACAob,YG/BG,SAAsBpW,EAAYC,GACrC,OAAOrK,EAAaoK,EAAYC,EAAYN,GAAkBK,EAAYC,IAAa,IH+BvFF,iBACAG,kBACAmW,c3BxBG,SAAwBrW,EAAYC,EAAYlK,GACnD,OAAO+J,GAAMC,GAAcC,EAAYC,EAAYlK,GAAWmK,GAAeF,EAAYC,EAAYlK,K2BwBrG+J,UAEJvb,GAAU+xB,MAAQA,EAClBrzB,OAAO0Q,OAAOpP,GAAWgyB,GACzBhyB,GAAU0B,kBAAoBA,EAC9B1B,GAAUG,WAAaA,EACvBH,GAAUkB,cAAgBA,EAC1BlB,GAAUiP,kBAAoBA,EAC9BjP,GAAUiyB,QAAUC,GAAID,QAETjyB","file":"datamodel.js","sourcesContent":["(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine(\"DataModel\", [], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"DataModel\"] = factory();\n\telse\n\t\troot[\"DataModel\"] = factory();\n})(window, function() {\nreturn "," \t// The module cache\n \tvar installedModules = {};\n\n \t// The require function\n \tfunction __webpack_require__(moduleId) {\n\n \t\t// Check if module is in cache\n \t\tif(installedModules[moduleId]) {\n \t\t\treturn installedModules[moduleId].exports;\n \t\t}\n \t\t// Create a new module (and put it into the cache)\n \t\tvar module = installedModules[moduleId] = {\n \t\t\ti: moduleId,\n \t\t\tl: false,\n \t\t\texports: {}\n \t\t};\n\n \t\t// Execute the module function\n \t\tmodules[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\n \t\t// Flag the module as loaded\n \t\tmodule.l = true;\n\n \t\t// Return the exports of the module\n \t\treturn module.exports;\n \t}\n\n\n \t// expose the modules object (__webpack_modules__)\n \t__webpack_require__.m = modules;\n\n \t// expose the module cache\n \t__webpack_require__.c = installedModules;\n\n \t// define getter function for harmony exports\n \t__webpack_require__.d = function(exports, name, getter) {\n \t\tif(!__webpack_require__.o(exports, name)) {\n \t\t\tObject.defineProperty(exports, name, { enumerable: true, get: getter });\n \t\t}\n \t};\n\n \t// define __esModule on exports\n \t__webpack_require__.r = function(exports) {\n \t\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n \t\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n \t\t}\n \t\tObject.defineProperty(exports, '__esModule', { value: true });\n \t};\n\n \t// create a fake namespace object\n \t// mode & 1: value is a module id, require it\n \t// mode & 2: merge all properties of value into the ns\n \t// mode & 4: return value when already ns object\n \t// mode & 8|1: behave like require\n \t__webpack_require__.t = function(value, mode) {\n \t\tif(mode & 1) value = __webpack_require__(value);\n \t\tif(mode & 8) return value;\n \t\tif((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;\n \t\tvar ns = Object.create(null);\n \t\t__webpack_require__.r(ns);\n \t\tObject.defineProperty(ns, 'default', { enumerable: true, value: value });\n \t\tif(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));\n \t\treturn ns;\n \t};\n\n \t// getDefaultExport function for compatibility with non-harmony modules\n \t__webpack_require__.n = function(module) {\n \t\tvar getter = module && module.__esModule ?\n \t\t\tfunction getDefault() { return module['default']; } :\n \t\t\tfunction getModuleExports() { return module; };\n \t\t__webpack_require__.d(getter, 'a', getter);\n \t\treturn getter;\n \t};\n\n \t// Object.prototype.hasOwnProperty.call\n \t__webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };\n\n \t// __webpack_public_path__\n \t__webpack_require__.p = \"\";\n\n\n \t// Load entry module and return exports\n \treturn __webpack_require__(__webpack_require__.s = 1);\n","const DataModel = require('./export');\n\nmodule.exports = DataModel.default ? DataModel.default : DataModel;\n","/**\n * DataFormat Enum defines the format of the input data.\n * Based on the format of the data the respective adapter is loaded.\n *\n * @readonly\n * @enum {string}\n */\nconst DataFormat = {\n FLAT_JSON: 'FlatJSON',\n DSV_STR: 'DSVStr',\n DSV_ARR: 'DSVArr',\n AUTO: 'Auto'\n};\n\nexport default DataFormat;\n","/**\n * DimensionSubtype enum defines the sub types of the Dimensional Field.\n *\n * @readonly\n * @enum {string}\n */\nconst DimensionSubtype = {\n CATEGORICAL: 'categorical',\n TEMPORAL: 'temporal',\n GEO: 'geo',\n BINNED: 'binned'\n};\n\nexport default DimensionSubtype;\n","/**\n * MeasureSubtype enum defines the sub types of the Measure Field.\n *\n * @readonly\n * @enum {string}\n */\nconst MeasureSubtype = {\n CONTINUOUS: 'continuous'\n};\n\nexport default MeasureSubtype;\n","/**\n * FieldType enum defines the high level field based on which visuals are controlled.\n * Measure in a high level is numeric field and Dimension in a high level is string field.\n *\n * @readonly\n * @enum {string}\n */\nconst FieldType = {\n MEASURE: 'measure',\n DIMENSION: 'dimension'\n};\n\nexport default FieldType;\n","/**\n * Filtering mode enum defines the filering modes of DataModel.\n *\n * @readonly\n * @enum {string}\n */\nconst FilteringMode = {\n NORMAL: 'normal',\n INVERSE: 'inverse',\n ALL: 'all'\n};\n\nexport default FilteringMode;\n","/**\n * Creates a JS native date object from input\n *\n * @param {string | number | Date} date Input using which date object to be created\n * @return {Date} : JS native date object\n */\nfunction convertToNativeDate (date) {\n if (date instanceof Date) {\n return date;\n }\n\n return new Date(date);\n}\n/**\n * Apply padding before a number if its less than 1o. This is used when constant digit's number to be returned\n * between 0 - 99\n *\n * @param {number} n Input to be padded\n * @return {string} Padded number\n */\nfunction pad (n) {\n return (n < 10) ? (`0${n}`) : n;\n}\n/*\n * DateFormatter utility to convert any date format to any other date format\n * DateFormatter parse a date time stamp specified by a user abiding by rules which are defined\n * by user in terms of token. It creates JS native date object from the user specified format.\n * That native date can also be displayed\n * in any specified format.\n * This utility class only takes care of format conversion only\n */\n\n/*\n * Escapes all the special character that are used in regular expression.\n * Like\n * RegExp.escape('sgfd-$') // Output: sgfd\\-\\$\n *\n * @param text {String} : text which is to be escaped\n */\nRegExp.escape = function (text) {\n return text.replace(/[-[\\]{}()*+?.,\\\\^$|#\\s]/g, '\\\\$&');\n};\n\n/**\n * DateTimeFormatter class to convert any user format of date time stamp to any other format\n * of date time stamp.\n *\n * @param {string} format Format of the date given. For the above date,\n * 'year: %Y, month: %b, day: %d'.\n * @class\n */\n/* istanbul ignore next */ function DateTimeFormatter (format) {\n this.format = format;\n this.dtParams = undefined;\n this.nativeDate = undefined;\n}\n\n// The identifier of the tokens\nDateTimeFormatter.TOKEN_PREFIX = '%';\n\n// JS native Date constructor takes the date params (year, month, etc) in a certail sequence.\n// This defines the sequence of the date parameters in the constructor.\nDateTimeFormatter.DATETIME_PARAM_SEQUENCE = {\n YEAR: 0,\n MONTH: 1,\n DAY: 2,\n HOUR: 3,\n MINUTE: 4,\n SECOND: 5,\n MILLISECOND: 6\n};\n\n/*\n * This is a default number parsing utility. It tries to parse a number in integer, if parsing is unsuccessful, it\n * gives back a default value.\n *\n * @param: defVal {Number} : Default no if the parsing to integer is not successful\n * @return {Function} : An closure function which is to be called by passing an the value which needs to be parsed.\n */\nDateTimeFormatter.defaultNumberParser = function (defVal) {\n return function (val) {\n let parsedVal;\n if (isFinite(parsedVal = parseInt(val, 10))) {\n return parsedVal;\n }\n\n return defVal;\n };\n};\n\n/*\n * This is a default number range utility. It tries to find an element in the range. If not found it returns a\n * default no as an index.\n *\n * @param: range {Array} : The list which is to be serached\n * @param: defVal {Number} : Default no if the serach and find does not return anything\n * @return {Function} : An closure function which is to be called by passing an the value which needs to be found\n */\nDateTimeFormatter.defaultRangeParser = function (range, defVal) {\n return (val) => {\n let i;\n let l;\n\n if (!val) { return defVal; }\n\n const nVal = val.toLowerCase();\n\n for (i = 0, l = range.length; i < l; i++) {\n if (range[i].toLowerCase() === nVal) {\n return i;\n }\n }\n\n if (i === undefined) {\n return defVal;\n }\n return null;\n };\n};\n\n/*\n * Defines the tokens which are supporter by the dateformatter. Using this definitation a value gets extracted from\n * the user specifed date string. This also formats the value for display purpose from native JS date.\n * The definition of each token contains the following named properties\n * {\n * %token_name% : {\n * name: name of the token, this is used in reverse lookup,\n * extract: a function that returns the regular expression to extract that piece of information. All the\n * regex should be gouped by using ()\n * parser: a function which receives value extracted by the above regex and parse it to get the date params\n * formatter: a formatter function that takes milliseconds or JS Date object and format the param\n * represented by the token only.\n * }\n * }\n *\n * @return {Object} : Definition of the all the supported tokens.\n */\nDateTimeFormatter.getTokenDefinitions = function () {\n const daysDef = {\n short: [\n 'Sun',\n 'Mon',\n 'Tue',\n 'Wed',\n 'Thu',\n 'Fri',\n 'Sat'\n ],\n long: [\n 'Sunday',\n 'Monday',\n 'Tuesday',\n 'Wednesday',\n 'Thursday',\n 'Friday',\n 'Saturday'\n ]\n };\n const monthsDef = {\n short: [\n 'Jan',\n 'Feb',\n 'Mar',\n 'Apr',\n 'May',\n 'Jun',\n 'Jul',\n 'Aug',\n 'Sep',\n 'Oct',\n 'Nov',\n 'Dec'\n ],\n long: [\n 'January',\n 'February',\n 'March',\n 'April',\n 'May',\n 'June',\n 'July',\n 'August',\n 'September',\n 'October',\n 'November',\n 'December'\n ]\n };\n\n const definitions = {\n H: {\n // 24 hours format\n name: 'H',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n\n return d.getHours().toString();\n }\n },\n l: {\n // 12 hours format\n name: 'l',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const hours = d.getHours() % 12;\n\n return (hours === 0 ? 12 : hours).toString();\n }\n },\n p: {\n // AM or PM\n name: 'p',\n index: 3,\n extract () { return '(AM|PM)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'AM' : 'PM');\n }\n },\n P: {\n // am or pm\n name: 'P',\n index: 3,\n extract () { return '(am|pm)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'am' : 'pm');\n }\n },\n M: {\n // Two digit minutes 00 - 59\n name: 'M',\n index: 4,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const mins = d.getMinutes();\n\n return pad(mins);\n }\n },\n S: {\n // Two digit seconds 00 - 59\n name: 'S',\n index: 5,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const seconds = d.getSeconds();\n\n return pad(seconds);\n }\n },\n K: {\n // Milliseconds\n name: 'K',\n index: 6,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const ms = d.getMilliseconds();\n\n return ms.toString();\n }\n },\n a: {\n // Short name of day, like Mon\n name: 'a',\n index: 2,\n extract () { return `(${daysDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.short[day]).toString();\n }\n },\n A: {\n // Long name of day, like Monday\n name: 'A',\n index: 2,\n extract () { return `(${daysDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.long[day]).toString();\n }\n },\n e: {\n // 8 of March, 11 of November\n name: 'e',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return day.toString();\n }\n },\n d: {\n // 08 of March, 11 of November\n name: 'd',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return pad(day);\n }\n },\n b: {\n // Short month, like Jan\n name: 'b',\n index: 1,\n extract () { return `(${monthsDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.short[month]).toString();\n }\n },\n B: {\n // Long month, like January\n name: 'B',\n index: 1,\n extract () { return `(${monthsDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.long[month]).toString();\n }\n },\n m: {\n // Two digit month of year like 01 for January\n name: 'm',\n index: 1,\n extract () { return '(\\\\d+)'; },\n parser (val) { return DateTimeFormatter.defaultNumberParser()(val) - 1; },\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return pad(month + 1);\n }\n },\n y: {\n // Short year like 90 for 1990\n name: 'y',\n index: 0,\n extract () { return '(\\\\d{2})'; },\n parser (val) {\n let result;\n if (val) {\n const l = val.length;\n val = val.substring(l - 2, l);\n }\n let parsedVal = DateTimeFormatter.defaultNumberParser()(val);\n let presentDate = new Date();\n let presentYear = Math.trunc((presentDate.getFullYear()) / 100);\n\n result = `${presentYear}${parsedVal}`;\n\n if (convertToNativeDate(result).getFullYear() > presentDate.getFullYear()) {\n result = `${presentYear - 1}${parsedVal}`;\n }\n return convertToNativeDate(result).getFullYear();\n },\n formatter (val) {\n const d = convertToNativeDate(val);\n let year = d.getFullYear().toString();\n let l;\n\n if (year) {\n l = year.length;\n year = year.substring(l - 2, l);\n }\n\n return year;\n }\n },\n Y: {\n // Long year like 1990\n name: 'Y',\n index: 0,\n extract () { return '(\\\\d{4})'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const year = d.getFullYear().toString();\n\n return year;\n }\n }\n };\n\n return definitions;\n};\n\n/*\n * The tokens which works internally is not user friendly in terms of memorizing the names. This gives a formal\n * definition to the informal notations.\n *\n * @return {Object} : Formal definition of the tokens\n */\nDateTimeFormatter.getTokenFormalNames = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n\n return {\n HOUR: definitions.H,\n HOUR_12: definitions.l,\n AMPM_UPPERCASE: definitions.p,\n AMPM_LOWERCASE: definitions.P,\n MINUTE: definitions.M,\n SECOND: definitions.S,\n SHORT_DAY: definitions.a,\n LONG_DAY: definitions.A,\n DAY_OF_MONTH: definitions.e,\n DAY_OF_MONTH_CONSTANT_WIDTH: definitions.d,\n SHORT_MONTH: definitions.b,\n LONG_MONTH: definitions.B,\n MONTH_OF_YEAR: definitions.m,\n SHORT_YEAR: definitions.y,\n LONG_YEAR: definitions.Y\n };\n};\n\n/*\n * This defines the rules and declares dependencies that resolves a date parameter (year, month etc) from\n * the date time parameter array.\n *\n * @return {Object} : An object that contains dependencies and a resolver function. The dependencies values are fed\n * to the resolver function in that particular sequence only.\n */\nDateTimeFormatter.tokenResolver = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const defaultResolver = (...args) => { // eslint-disable-line require-jsdoc\n let i = 0;\n let arg;\n let targetParam;\n const l = args.length;\n\n for (; i < l; i++) {\n arg = args[i];\n if (args[i]) {\n targetParam = arg;\n }\n }\n\n if (!targetParam) { return null; }\n\n return targetParam[0].parser(targetParam[1]);\n };\n\n return {\n YEAR: [definitions.y, definitions.Y,\n defaultResolver\n ],\n MONTH: [definitions.b, definitions.B, definitions.m,\n defaultResolver\n ],\n DAY: [definitions.a, definitions.A, definitions.e, definitions.d,\n defaultResolver\n ],\n HOUR: [definitions.H, definitions.l, definitions.p, definitions.P,\n function (hourFormat24, hourFormat12, ampmLower, ampmUpper) {\n let targetParam;\n let amOrpm;\n let isPM;\n let val;\n\n if (hourFormat12 && (amOrpm = (ampmLower || ampmUpper))) {\n if (amOrpm[0].parser(amOrpm[1]) === 'pm') {\n isPM = true;\n }\n\n targetParam = hourFormat12;\n } else if (hourFormat12) {\n targetParam = hourFormat12;\n } else {\n targetParam = hourFormat24;\n }\n\n if (!targetParam) { return null; }\n\n val = targetParam[0].parser(targetParam[1]);\n if (isPM) {\n val += 12;\n }\n return val;\n }\n ],\n MINUTE: [definitions.M,\n defaultResolver\n ],\n SECOND: [definitions.S,\n defaultResolver\n ]\n };\n};\n\n/*\n * Finds token from the format rule specified by a user.\n * @param format {String} : The format of the input date specified by the user\n * @return {Array} : An array of objects which contains the available token and their occurence index in the format\n */\nDateTimeFormatter.findTokens = function (format) {\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenLiterals = Object.keys(definitions);\n const occurrence = [];\n let i;\n let forwardChar;\n\n while ((i = format.indexOf(tokenPrefix, i + 1)) >= 0) {\n forwardChar = format[i + 1];\n if (tokenLiterals.indexOf(forwardChar) === -1) { continue; }\n\n occurrence.push({\n index: i,\n token: forwardChar\n });\n }\n\n return occurrence;\n};\n\n/*\n * Format any JS date to a specified date given by user.\n *\n * @param date {Number | Date} : The date object which is to be formatted\n * @param format {String} : The format using which the date will be formatted for display\n */\nDateTimeFormatter.formatAs = function (date, format) {\n const nDate = convertToNativeDate(date);\n const occurrence = DateTimeFormatter.findTokens(format);\n const definitions = DateTimeFormatter.getTokenDefinitions();\n let formattedStr = String(format);\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n let token;\n let formattedVal;\n let i;\n let l;\n\n for (i = 0, l = occurrence.length; i < l; i++) {\n token = occurrence[i].token;\n formattedVal = definitions[token].formatter(nDate);\n formattedStr = formattedStr.replace(new RegExp(tokenPrefix + token, 'g'), formattedVal);\n }\n\n return formattedStr;\n};\n\n/*\n * Parses the user specified date string to extract the date time params.\n *\n * @return {Array} : Value of date time params in an array [year, month, day, hour, minutes, seconds, milli]\n */\nDateTimeFormatter.prototype.parse = function (dateTimeStamp, options) {\n const tokenResolver = DateTimeFormatter.tokenResolver();\n const dtParams = this.extractTokenValue(dateTimeStamp);\n const dtParamSeq = DateTimeFormatter.DATETIME_PARAM_SEQUENCE;\n const noBreak = options && options.noBreak;\n const dtParamArr = [];\n const args = [];\n let resolverKey;\n let resolverParams;\n let resolverFn;\n let val;\n let i;\n let param;\n let resolvedVal;\n let l;\n let result = [];\n\n for (resolverKey in tokenResolver) {\n if (!{}.hasOwnProperty.call(tokenResolver, resolverKey)) { continue; }\n\n args.length = 0;\n resolverParams = tokenResolver[resolverKey];\n resolverFn = resolverParams.splice(resolverParams.length - 1, 1)[0];\n\n for (i = 0, l = resolverParams.length; i < l; i++) {\n param = resolverParams[i];\n val = dtParams[param.name];\n\n if (val === undefined) {\n args.push(null);\n } else {\n args.push([param, val]);\n }\n }\n\n resolvedVal = resolverFn.apply(this, args);\n\n if ((resolvedVal === undefined || resolvedVal === null) && !noBreak) {\n break;\n }\n\n dtParamArr[dtParamSeq[resolverKey]] = resolvedVal;\n }\n\n if (dtParamArr.length && this.checkIfOnlyYear(dtParamArr.length))\n {\n result.unshift(dtParamArr[0], 0, 1); }\n else {\n result.unshift(...dtParamArr);\n }\n\n return result;\n};\n\n/*\n * Extract the value of the token from user specified date time string.\n *\n * @return {Object} : An key value pair which contains the tokens as key and value as pair\n */\nDateTimeFormatter.prototype.extractTokenValue = function (dateTimeStamp) {\n const format = this.format;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const occurrence = DateTimeFormatter.findTokens(format);\n const tokenObj = {};\n\n let lastOccurrenceIndex;\n let occObj;\n let occIndex;\n let targetText;\n let regexFormat;\n\n let l;\n let i;\n\n regexFormat = String(format);\n\n const tokenArr = occurrence.map(obj => obj.token);\n const occurrenceLength = occurrence.length;\n for (i = occurrenceLength - 1; i >= 0; i--) {\n occIndex = occurrence[i].index;\n\n if (occIndex + 1 === regexFormat.length - 1) {\n lastOccurrenceIndex = occIndex;\n continue;\n }\n\n if (lastOccurrenceIndex === undefined) {\n lastOccurrenceIndex = regexFormat.length;\n }\n\n targetText = regexFormat.substring(occIndex + 2, lastOccurrenceIndex);\n regexFormat = regexFormat.substring(0, occIndex + 2) +\n RegExp.escape(targetText) +\n regexFormat.substring(lastOccurrenceIndex, regexFormat.length);\n\n lastOccurrenceIndex = occIndex;\n }\n\n for (i = 0; i < occurrenceLength; i++) {\n occObj = occurrence[i];\n regexFormat = regexFormat.replace(tokenPrefix + occObj.token, definitions[occObj.token].extract());\n }\n\n const extractValues = dateTimeStamp.match(new RegExp(regexFormat)) || [];\n extractValues.shift();\n\n for (i = 0, l = tokenArr.length; i < l; i++) {\n tokenObj[tokenArr[i]] = extractValues[i];\n }\n return tokenObj;\n};\n\n/*\n * Give back the JS native date formed from user specified date string\n *\n * @return {Date} : Native JS Date\n */\nDateTimeFormatter.prototype.getNativeDate = function (dateTimeStamp) {\n let date = null;\n if (Number.isFinite(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n } else if (!this.format && Date.parse(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n }\n else {\n const dtParams = this.dtParams = this.parse(dateTimeStamp);\n if (dtParams.length) {\n this.nativeDate = new Date(...dtParams);\n date = this.nativeDate;\n }\n }\n return date;\n};\n\nDateTimeFormatter.prototype.checkIfOnlyYear = function(len) {\n return len === 1 && this.format.match(/y|Y/g).length;\n};\n\n/*\n * Represents JS native date to a user specified format.\n *\n * @param format {String} : The format according to which the date is to be represented\n * @return {String} : The formatted date string\n */\nDateTimeFormatter.prototype.formatAs = function (format, dateTimeStamp) {\n let nativeDate;\n\n if (dateTimeStamp) {\n nativeDate = this.nativeDate = this.getNativeDate(dateTimeStamp);\n } else if (!(nativeDate = this.nativeDate)) {\n nativeDate = this.getNativeDate(dateTimeStamp);\n }\n\n return DateTimeFormatter.formatAs(nativeDate, format);\n};\n\nexport { DateTimeFormatter as default };\n","/**\n * The utility function to calculate major column.\n *\n * @param {Object} store - The store object.\n * @return {Function} Returns the push function.\n */\nexport default (store) => {\n let i = 0;\n return (...fields) => {\n fields.forEach((val, fieldIndex) => {\n if (!(store[fieldIndex] instanceof Array)) {\n store[fieldIndex] = Array.from({ length: i });\n }\n store[fieldIndex].push(val);\n });\n i++;\n };\n};\n","/* eslint-disable */\nconst OBJECTSTRING = 'object';\nconst objectToStrFn = Object.prototype.toString;\nconst objectToStr = '[object Object]';\nconst arrayToStr = '[object Array]';\n\nfunction checkCyclicRef(obj, parentArr) {\n let i = parentArr.length;\n let bIndex = -1;\n\n while (i) {\n if (obj === parentArr[i]) {\n bIndex = i;\n return bIndex;\n }\n i -= 1;\n }\n\n return bIndex;\n}\n\nfunction merge(obj1, obj2, skipUndef, tgtArr, srcArr) {\n var item,\n srcVal,\n tgtVal,\n str,\n cRef;\n // check whether obj2 is an array\n // if array then iterate through it's index\n // **** MOOTOOLS precution\n\n if (!srcArr) {\n tgtArr = [obj1];\n srcArr = [obj2];\n }\n else {\n tgtArr.push(obj1);\n srcArr.push(obj2);\n }\n\n if (obj2 instanceof Array) {\n for (item = 0; item < obj2.length; item += 1) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (typeof tgtVal !== OBJECTSTRING) {\n if (!(skipUndef && tgtVal === undefined)) {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = tgtVal instanceof Array ? [] : {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n }\n }\n else {\n for (item in obj2) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (tgtVal !== null && typeof tgtVal === OBJECTSTRING) {\n // Fix for issue BUG: FWXT-602\n // IE < 9 Object.prototype.toString.call(null) gives\n // '[object Object]' instead of '[object Null]'\n // that's why null value becomes Object in IE < 9\n str = objectToStrFn.call(tgtVal);\n if (str === objectToStr) {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else if (str === arrayToStr) {\n if (srcVal === null || !(srcVal instanceof Array)) {\n srcVal = obj1[item] = [];\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (skipUndef && tgtVal === undefined) {\n continue;\n }\n obj1[item] = tgtVal;\n }\n }\n }\n return obj1;\n}\n\n\nfunction extend2 (obj1, obj2, skipUndef) {\n //if none of the arguments are object then return back\n if (typeof obj1 !== OBJECTSTRING && typeof obj2 !== OBJECTSTRING) {\n return null;\n }\n\n if (typeof obj2 !== OBJECTSTRING || obj2 === null) {\n return obj1;\n }\n\n if (typeof obj1 !== OBJECTSTRING) {\n obj1 = obj2 instanceof Array ? [] : {};\n }\n merge(obj1, obj2, skipUndef);\n return obj1;\n}\n\nexport { extend2 as default };\n","import { DataFormat } from '../enums';\n\n/**\n * Checks whether the value is an array.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an array otherwise returns false.\n */\nexport function isArray (val) {\n return Array.isArray(val);\n}\n\n/**\n * Checks whether the value is an object.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an object otherwise returns false.\n */\nexport function isObject (val) {\n return val === Object(val);\n}\n\n/**\n * Checks whether the value is a string value.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is a string value otherwise returns false.\n */\nexport function isString (val) {\n return typeof val === 'string';\n}\n\n/**\n * Checks whether the value is callable.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is callable otherwise returns false.\n */\nexport function isCallable (val) {\n return typeof val === 'function';\n}\n\n/**\n * Returns the unique values from the input array.\n *\n * @param {Array} data - The input array.\n * @return {Array} Returns a new array of unique values.\n */\nexport function uniqueValues (data) {\n return [...new Set(data)];\n}\n\nexport const getUniqueId = () => `id-${new Date().getTime()}${Math.round(Math.random() * 10000)}`;\n\n/**\n * Checks Whether two arrays have same content.\n *\n * @param {Array} arr1 - The first array.\n * @param {Array} arr2 - The 2nd array.\n * @return {boolean} Returns whether two array have same content.\n */\nexport function isArrEqual(arr1, arr2) {\n if (!isArray(arr1) || !isArray(arr2)) {\n return arr1 === arr2;\n }\n\n if (arr1.length !== arr2.length) {\n return false;\n }\n\n for (let i = 0; i < arr1.length; i++) {\n if (arr1[i] !== arr2[i]) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * It is the default number format function for the measure field type.\n *\n * @param {any} val - The input value.\n * @return {number} Returns a number value.\n */\nexport function formatNumber(val) {\n return val;\n}\n\n/**\n * Returns the detected data format.\n *\n * @param {any} data - The input data to be tested.\n * @return {string} Returns the data format name.\n */\nexport const detectDataFormat = (data) => {\n if (isString(data)) {\n return DataFormat.DSV_STR;\n } else if (isArray(data) && isArray(data[0])) {\n return DataFormat.DSV_ARR;\n } else if (isArray(data) && (data.length === 0 || isObject(data[0]))) {\n return DataFormat.FLAT_JSON;\n }\n return null;\n};\n","import { FieldType } from './enums';\nimport { getUniqueId } from './utils';\n\nconst fieldStore = {\n data: {},\n\n createNamespace (fieldArr, name) {\n const dataId = name || getUniqueId();\n\n this.data[dataId] = {\n name: dataId,\n fields: fieldArr,\n\n fieldsObj () {\n let fieldsObj = this._cachedFieldsObj;\n\n if (!fieldsObj) {\n fieldsObj = this._cachedFieldsObj = {};\n this.fields.forEach((field) => {\n fieldsObj[field.name()] = field;\n });\n }\n return fieldsObj;\n },\n getMeasure () {\n let measureFields = this._cachedMeasure;\n\n if (!measureFields) {\n measureFields = this._cachedMeasure = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.MEASURE) {\n measureFields[field.name()] = field;\n }\n });\n }\n return measureFields;\n },\n getDimension () {\n let dimensionFields = this._cachedDimension;\n\n if (!this._cachedDimension) {\n dimensionFields = this._cachedDimension = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.DIMENSION) {\n dimensionFields[field.name()] = field;\n }\n });\n }\n return dimensionFields;\n },\n };\n return this.data[dataId];\n },\n};\n\nexport default fieldStore;\n","/**\n * The wrapper class on top of the primitive value of a field.\n *\n * @todo Need to have support for StringValue, NumberValue, DateTimeValue\n * and GeoValue. These types should expose predicate API mostly.\n */\nclass Value {\n\n /**\n * Creates new Value instance.\n *\n * @param {*} val - the primitive value from the field cell.\n * @param {string | Field} field - The field from which the value belongs.\n */\n constructor (val, field) {\n Object.defineProperty(this, '_value', {\n enumerable: false,\n configurable: false,\n writable: false,\n value: val\n });\n\n this.field = field;\n }\n\n /**\n * Returns the field value.\n *\n * @return {*} Returns the current value.\n */\n get value () {\n return this._value;\n }\n\n /**\n * Converts to human readable string.\n *\n * @override\n * @return {string} Returns a human readable string of the field value.\n *\n */\n toString () {\n return String(this.value);\n }\n\n /**\n * Returns the value of the field.\n *\n * @override\n * @return {*} Returns the field value.\n */\n valueOf () {\n return this.value;\n }\n}\n\nexport default Value;\n","/**\n * Iterates through the diffSet array and call the callback with the current\n * index.\n *\n * @param {string} rowDiffset - The row diffset string e.g. '0-4,6,10-13'.\n * @param {Function} callback - The callback function to be called with every index.\n */\nexport function rowDiffsetIterator (rowDiffset, callback) {\n if (rowDiffset.length > 0) {\n const rowDiffArr = rowDiffset.split(',');\n rowDiffArr.forEach((diffStr) => {\n const diffStsArr = diffStr.split('-');\n const start = +(diffStsArr[0]);\n const end = +(diffStsArr[1] || diffStsArr[0]);\n if (end >= start) {\n for (let i = start; i <= end; i += 1) {\n callback(i);\n }\n }\n });\n }\n}\n","/**\n * A parser to parser null, undefined, invalid and NIL values.\n *\n * @public\n * @class\n */\nclass InvalidAwareTypes {\n /**\n * Static method which gets/sets the invalid value registry.\n *\n * @public\n * @param {Object} config - The custom configuration supplied by user.\n * @return {Object} Returns the invalid values registry.\n */\n static invalidAwareVals (config) {\n if (!config) {\n return InvalidAwareTypes._invalidAwareValsMap;\n }\n return Object.assign(InvalidAwareTypes._invalidAwareValsMap, config);\n }\n\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} value - The value of the invalid data type.\n */\n constructor (value) {\n this._value = value;\n }\n\n /**\n * Returns the current value of the instance.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n value () {\n return this._value;\n }\n\n /**\n * Returns the current value of the instance in string format.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n toString () {\n return String(this._value);\n }\n\n static isInvalid(val) {\n return (val instanceof InvalidAwareTypes) || !!InvalidAwareTypes.invalidAwareVals()[val];\n }\n\n static getInvalidType(val) {\n return val instanceof InvalidAwareTypes ? val : InvalidAwareTypes.invalidAwareVals()[val];\n }\n}\n\n/**\n * Enums for Invalid types.\n */\nInvalidAwareTypes.NULL = new InvalidAwareTypes('null');\nInvalidAwareTypes.NA = new InvalidAwareTypes('na');\nInvalidAwareTypes.NIL = new InvalidAwareTypes('nil');\n\n/**\n * Default Registry for mapping the invalid values.\n *\n * @private\n */\nInvalidAwareTypes._invalidAwareValsMap = {\n invalid: InvalidAwareTypes.NA,\n nil: InvalidAwareTypes.NIL,\n null: InvalidAwareTypes.NULL,\n undefined: InvalidAwareTypes.NA\n};\n\nexport default InvalidAwareTypes;\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport InvalidAwareTypes from '../invalid-aware-types';\n\nconst generateBuckets = (binSize, start, end) => {\n const buckets = [];\n let next = start;\n\n while (next < end) {\n buckets.push(next);\n next += binSize;\n }\n buckets.push(next);\n\n return buckets;\n};\n\nconst findBucketRange = (bucketRanges, value) => {\n let leftIdx = 0;\n let rightIdx = bucketRanges.length - 1;\n let midIdx;\n let range;\n\n // Here use binary search as the bucketRanges is a sorted array\n while (leftIdx <= rightIdx) {\n midIdx = leftIdx + Math.floor((rightIdx - leftIdx) / 2);\n range = bucketRanges[midIdx];\n\n if (value >= range.start && value < range.end) {\n return range;\n } else if (value >= range.end) {\n leftIdx = midIdx + 1;\n } else if (value < range.start) {\n rightIdx = midIdx - 1;\n }\n }\n\n return null;\n};\n\n /**\n * Creates the bin data from input measure field and supplied configs.\n *\n * @param {Measure} measureField - The Measure field instance.\n * @param {string} rowDiffset - The datamodel rowDiffset values.\n * @param {Object} config - The config object.\n * @return {Object} Returns the binned data and the corresponding bins.\n */\nexport function createBinnedFieldData (measureField, rowDiffset, config) {\n let { buckets, binsCount, binSize, start, end } = config;\n const [dMin, dMax] = measureField.domain();\n\n if (!buckets) {\n start = (start !== 0 && (!start || start > dMin)) ? dMin : start;\n end = (end !== 0 && (!end || end < dMax)) ? (dMax + 1) : end;\n\n if (binsCount) {\n binSize = Math.ceil(Math.abs(end - start) / binsCount);\n }\n\n buckets = generateBuckets(binSize, start, end);\n }\n\n if (buckets[0] > dMin) {\n buckets.unshift(dMin);\n }\n if (buckets[buckets.length - 1] <= dMax) {\n buckets.push(dMax + 1);\n }\n\n const bucketRanges = [];\n for (let i = 0; i < buckets.length - 1; i++) {\n bucketRanges.push({\n start: buckets[i],\n end: buckets[i + 1]\n });\n }\n\n const binnedData = [];\n rowDiffsetIterator(rowDiffset, (i) => {\n const datum = measureField.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n binnedData.push(datum);\n return;\n }\n\n const range = findBucketRange(bucketRanges, datum);\n binnedData.push(`${range.start}-${range.end}`);\n });\n\n return { binnedData, bins: buckets };\n}\n","/**\n * The helper function that returns an array of common schema\n * from two fieldStore instances.\n *\n * @param {FieldStore} fs1 - The first FieldStore instance.\n * @param {FieldStore} fs2 - The second FieldStore instance.\n * @return {Array} An array containing the common schema.\n */\nexport function getCommonSchema (fs1, fs2) {\n const retArr = [];\n const fs1Arr = [];\n fs1.fields.forEach((field) => {\n fs1Arr.push(field.schema().name);\n });\n fs2.fields.forEach((field) => {\n if (fs1Arr.indexOf(field.schema().name) !== -1) {\n retArr.push(field.schema().name);\n }\n });\n return retArr;\n}\n","export { DataFormat, FilteringMode } from '../enums';\n/**\n * The event name for data propagation.\n */\nexport const PROPAGATION = 'propagation';\n\n/**\n * The name of the unique row id column in DataModel.\n */\nexport const ROW_ID = '__id__';\n\n/**\n * The enums for operation names performed on DataModel.\n */\nexport const DM_DERIVATIVES = {\n SELECT: 'select',\n PROJECT: 'project',\n GROUPBY: 'group',\n COMPOSE: 'compose',\n CAL_VAR: 'calculatedVariable',\n BIN: 'bin'\n};\n\nexport const JOINS = {\n CROSS: 'cross',\n LEFTOUTER: 'leftOuter',\n RIGHTOUTER: 'rightOuter',\n NATURAL: 'natural',\n FULLOUTER: 'fullOuter'\n};\n\nexport const LOGICAL_OPERATORS = {\n AND: 'and',\n OR: 'or'\n};\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { getCommonSchema } from './get-common-schema';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { JOINS } from '../constants';\nimport { prepareJoinData } from '../helper';\n/**\n * Default filter function for crossProduct.\n *\n * @return {boolean} Always returns true.\n */\nfunction defaultFilterFn() { return true; }\n\n/**\n * Implementation of cross product operation between two DataModel instances.\n * It internally creates the data and schema for the new DataModel.\n *\n * @param {DataModel} dataModel1 - The left DataModel instance.\n * @param {DataModel} dataModel2 - The right DataModel instance.\n * @param {Function} filterFn - The filter function which is used to filter the tuples.\n * @param {boolean} [replaceCommonSchema=false] - The flag if the common name schema should be there.\n * @return {DataModel} Returns The newly created DataModel instance from the crossProduct operation.\n */\nexport function crossProduct (dm1, dm2, filterFn, replaceCommonSchema = false, jointype = JOINS.CROSS) {\n const schema = [];\n const data = [];\n const applicableFilterFn = filterFn || defaultFilterFn;\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreName = dm1FieldStore.name;\n const dm2FieldStoreName = dm2FieldStore.name;\n const name = `${dm1FieldStore.name}.${dm2FieldStore.name}`;\n const commonSchemaList = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n if (dm1FieldStoreName === dm2FieldStoreName) {\n throw new Error('DataModels must have different alias names');\n }\n // Here prepare the schema\n dm1FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1 && !replaceCommonSchema) {\n tmpSchema.name = `${dm1FieldStore.name}.${tmpSchema.name}`;\n }\n schema.push(tmpSchema);\n });\n dm2FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1) {\n if (!replaceCommonSchema) {\n tmpSchema.name = `${dm2FieldStore.name}.${tmpSchema.name}`;\n schema.push(tmpSchema);\n }\n } else {\n schema.push(tmpSchema);\n }\n });\n\n // Here prepare Data\n rowDiffsetIterator(dm1._rowDiffset, (i) => {\n let rowAdded = false;\n let rowPosition;\n rowDiffsetIterator(dm2._rowDiffset, (ii) => {\n const tuple = [];\n const userArg = {};\n userArg[dm1FieldStoreName] = {};\n userArg[dm2FieldStoreName] = {};\n dm1FieldStore.fields.forEach((field) => {\n tuple.push(field.partialField.data[i]);\n userArg[dm1FieldStoreName][field.name()] = field.partialField.data[i];\n });\n dm2FieldStore.fields.forEach((field) => {\n if (!(commonSchemaList.indexOf(field.schema().name) !== -1 && replaceCommonSchema)) {\n tuple.push(field.partialField.data[ii]);\n }\n userArg[dm2FieldStoreName][field.name()] = field.partialField.data[ii];\n });\n\n let cachedStore = {};\n let cloneProvider1 = () => dm1.detachedRoot();\n let cloneProvider2 = () => dm2.detachedRoot();\n\n const dm1Fields = prepareJoinData(userArg[dm1FieldStoreName]);\n const dm2Fields = prepareJoinData(userArg[dm2FieldStoreName]);\n if (applicableFilterFn(dm1Fields, dm2Fields, cloneProvider1, cloneProvider2, cachedStore)) {\n const tupleObj = {};\n tuple.forEach((cellVal, iii) => {\n tupleObj[schema[iii].name] = cellVal;\n });\n if (rowAdded && JOINS.CROSS !== jointype) {\n data[rowPosition] = tupleObj;\n }\n else {\n data.push(tupleObj);\n rowAdded = true;\n rowPosition = i;\n }\n } else if ((jointype === JOINS.LEFTOUTER || jointype === JOINS.RIGHTOUTER) && !rowAdded) {\n const tupleObj = {};\n let len = dm1FieldStore.fields.length - 1;\n tuple.forEach((cellVal, iii) => {\n if (iii <= len) {\n tupleObj[schema[iii].name] = cellVal;\n }\n else {\n tupleObj[schema[iii].name] = null;\n }\n });\n rowAdded = true;\n rowPosition = i;\n data.push(tupleObj);\n }\n });\n });\n\n return new DataModel(data, schema, { name });\n}\n","/**\n * The default sort function.\n *\n * @param {*} a - The first value.\n * @param {*} b - The second value.\n * @return {number} Returns the comparison result e.g. 1 or 0 or -1.\n */\nfunction defSortFn (a, b) {\n const a1 = `${a}`;\n const b1 = `${b}`;\n if (a1 < b1) {\n return -1;\n }\n if (a1 > b1) {\n return 1;\n }\n return 0;\n}\n\n/**\n * The helper function for merge sort which creates the sorted array\n * from the two halves of the input array.\n *\n * @param {Array} arr - The target array which needs to be merged.\n * @param {number} lo - The starting index of the first array half.\n * @param {number} mid - The ending index of the first array half.\n * @param {number} hi - The ending index of the second array half.\n * @param {Function} sortFn - The sort function.\n */\nfunction merge (arr, lo, mid, hi, sortFn) {\n const mainArr = arr;\n const auxArr = [];\n for (let i = lo; i <= hi; i += 1) {\n auxArr[i] = mainArr[i];\n }\n let a = lo;\n let b = mid + 1;\n\n for (let i = lo; i <= hi; i += 1) {\n if (a > mid) {\n mainArr[i] = auxArr[b];\n b += 1;\n } else if (b > hi) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else if (sortFn(auxArr[a], auxArr[b]) <= 0) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else {\n mainArr[i] = auxArr[b];\n b += 1;\n }\n }\n}\n\n/**\n * The helper function for merge sort which would be called\n * recursively for sorting the array halves.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {number} lo - The starting index of the array half.\n * @param {number} hi - The ending index of the array half.\n * @param {Function} sortFn - The sort function.\n * @return {Array} Returns the target array itself.\n */\nfunction sort (arr, lo, hi, sortFn) {\n if (hi === lo) { return arr; }\n\n const mid = lo + Math.floor((hi - lo) / 2);\n sort(arr, lo, mid, sortFn);\n sort(arr, mid + 1, hi, sortFn);\n merge(arr, lo, mid, hi, sortFn);\n\n return arr;\n}\n\n/**\n * The implementation of merge sort.\n * It is used in DataModel for stable sorting as it is not sure\n * what the sorting algorithm used by browsers is stable or not.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {Function} [sortFn=defSortFn] - The sort function.\n * @return {Array} Returns the input array itself in sorted order.\n */\nexport function mergeSort (arr, sortFn = defSortFn) {\n if (arr.length > 1) {\n sort(arr, 0, arr.length - 1, sortFn);\n }\n return arr;\n}\n","import { DimensionSubtype, MeasureSubtype } from '../enums';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { mergeSort } from './merge-sort';\nimport { fieldInSchema } from '../helper';\nimport { isCallable, isArray, } from '../utils';\n/**\n * Generates the sorting functions to sort the data of a DataModel instance\n * according to the input data type.\n *\n * @param {string} dataType - The data type e.g. 'measure', 'datetime' etc.\n * @param {string} sortType - The sorting order i.e. 'asc' or 'desc'.\n * @param {integer} index - The index of the data which will be sorted.\n * @return {Function} Returns the the sorting function.\n */\nfunction getSortFn (dataType, sortType, index) {\n let retFunc;\n switch (dataType) {\n case MeasureSubtype.CONTINUOUS:\n case DimensionSubtype.TEMPORAL:\n if (sortType === 'desc') {\n retFunc = (a, b) => b[index] - a[index];\n } else {\n retFunc = (a, b) => a[index] - b[index];\n }\n break;\n default:\n retFunc = (a, b) => {\n const a1 = `${a[index]}`;\n const b1 = `${b[index]}`;\n if (a1 < b1) {\n return sortType === 'desc' ? 1 : -1;\n }\n if (a1 > b1) {\n return sortType === 'desc' ? -1 : 1;\n }\n return 0;\n };\n }\n return retFunc;\n}\n\n/**\n * Groups the data according to the specified target field.\n *\n * @param {Array} data - The input data array.\n * @param {number} fieldIndex - The target field index within schema array.\n * @return {Array} Returns an array containing the grouped data.\n */\nfunction groupData(data, fieldIndex) {\n const hashMap = new Map();\n const groupedData = [];\n\n data.forEach((datum) => {\n const fieldVal = datum[fieldIndex];\n if (hashMap.has(fieldVal)) {\n groupedData[hashMap.get(fieldVal)][1].push(datum);\n } else {\n groupedData.push([fieldVal, [datum]]);\n hashMap.set(fieldVal, groupedData.length - 1);\n }\n });\n\n return groupedData;\n}\n\n/**\n * Creates the argument value used for sorting function when sort is done\n * with another fields.\n *\n * @param {Array} groupedDatum - The grouped datum for a single dimension field value.\n * @param {Array} targetFields - An array of the sorting fields.\n * @param {Array} targetFieldDetails - An array of the sorting field details in schema.\n * @return {Object} Returns an object containing the value of sorting fields and the target field name.\n */\nfunction createSortingFnArg(groupedDatum, targetFields, targetFieldDetails) {\n const arg = {\n label: groupedDatum[0]\n };\n\n targetFields.reduce((acc, next, idx) => {\n acc[next] = groupedDatum[1].map(datum => datum[targetFieldDetails[idx].index]);\n return acc;\n }, arg);\n\n return arg;\n}\n\n/**\n * Sorts the data before return in dataBuilder.\n *\n * @param {Object} dataObj - An object containing the data and schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nfunction sortData(dataObj, sortingDetails) {\n const { data, schema } = dataObj;\n let fieldName;\n let sortMeta;\n let fDetails;\n let i = sortingDetails.length - 1;\n\n for (; i >= 0; i--) {\n fieldName = sortingDetails[i][0];\n sortMeta = sortingDetails[i][1];\n fDetails = fieldInSchema(schema, fieldName);\n\n if (!fDetails) {\n // eslint-disable-next-line no-continue\n continue;\n }\n\n if (isCallable(sortMeta)) {\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortMeta(a[fDetails.index], b[fDetails.index]));\n } else if (isArray(sortMeta)) {\n const groupedData = groupData(data, fDetails.index);\n const sortingFn = sortMeta[sortMeta.length - 1];\n const targetFields = sortMeta.slice(0, sortMeta.length - 1);\n const targetFieldDetails = targetFields.map(f => fieldInSchema(schema, f));\n\n groupedData.forEach((groupedDatum) => {\n groupedDatum.push(createSortingFnArg(groupedDatum, targetFields, targetFieldDetails));\n });\n\n mergeSort(groupedData, (a, b) => {\n const m = a[2];\n const n = b[2];\n return sortingFn(m, n);\n });\n\n // Empty the array\n data.length = 0;\n groupedData.forEach((datum) => {\n data.push(...datum[1]);\n });\n } else {\n sortMeta = String(sortMeta).toLowerCase() === 'desc' ? 'desc' : 'asc';\n mergeSort(data, getSortFn(fDetails.type, sortMeta, fDetails.index));\n }\n }\n\n dataObj.uids = [];\n data.forEach((value) => {\n dataObj.uids.push(value.pop());\n });\n}\n\n\n/**\n * Builds the actual data array.\n *\n * @param {Array} fieldStore - An array of field.\n * @param {string} rowDiffset - A string consisting of which rows to be included eg. '0-2,4,6';\n * @param {string} colIdentifier - A string consisting of the details of which column\n * to be included eg 'date,sales,profit';\n * @param {Object} sortingDetails - An object containing the sorting details of the DataModel instance.\n * @param {Object} options - The options required to create the type of the data.\n * @return {Object} Returns an object containing the multidimensional array and the relative schema.\n */\nexport function dataBuilder (fieldStore, rowDiffset, colIdentifier, sortingDetails, options) {\n const defOptions = {\n addUid: false,\n columnWise: false\n };\n options = Object.assign({}, defOptions, options);\n\n const retObj = {\n schema: [],\n data: [],\n uids: []\n };\n const addUid = options.addUid;\n const reqSorting = sortingDetails && sortingDetails.length > 0;\n // It stores the fields according to the colIdentifier argument\n const tmpDataArr = [];\n // Stores the fields according to the colIdentifier argument\n const colIArr = colIdentifier.split(',');\n\n colIArr.forEach((colName) => {\n for (let i = 0; i < fieldStore.length; i += 1) {\n if (fieldStore[i].name() === colName) {\n tmpDataArr.push(fieldStore[i]);\n break;\n }\n }\n });\n\n // Inserts the schema to the schema object\n tmpDataArr.forEach((field) => {\n /** @todo Need to use extend2 here otherwise user can overwrite the schema. */\n retObj.schema.push(field.schema());\n });\n\n if (addUid) {\n retObj.schema.push({\n name: 'uid',\n type: 'identifier'\n });\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n retObj.data.push([]);\n const insertInd = retObj.data.length - 1;\n let start = 0;\n tmpDataArr.forEach((field, ii) => {\n retObj.data[insertInd][ii + start] = field.partialField.data[i];\n });\n if (addUid) {\n retObj.data[insertInd][tmpDataArr.length] = i;\n }\n // Creates an array of unique identifiers for each row\n retObj.uids.push(i);\n\n // If sorting needed then there is the need to expose the index\n // mapping from the old index to its new index\n if (reqSorting) { retObj.data[insertInd].push(i); }\n });\n\n // Handles the sort functionality\n if (reqSorting) {\n sortData(retObj, sortingDetails);\n }\n\n if (options.columnWise) {\n const tmpData = Array(...Array(retObj.schema.length)).map(() => []);\n retObj.data.forEach((tuple) => {\n tuple.forEach((data, i) => {\n tmpData[i].push(data);\n });\n });\n retObj.data = tmpData;\n }\n\n return retObj;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n\n/**\n * Performs the union operation between two dm instances.\n *\n * @todo Fix the conflicts between union and difference terminology here.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function difference (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n * @param {boolean} addData - If true only tuple will be added to the data.\n */\n function prepareDataHelper(dm, fieldsObj, addData) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n if (addData) { data.push(tuple); }\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm2, dm2FieldStoreFieldObj, false);\n prepareDataHelper(dm1, dm1FieldStoreFieldObj, true);\n\n return new DataModel(data, schema, { name });\n}\n\n","import { isArray } from '../utils';\nimport InvalidAwareTypes from '../invalid-aware-types';\n\n\nfunction getFilteredValues(arr) {\n return arr.filter(item => !(item instanceof InvalidAwareTypes));\n}\n/**\n * Reducer function that returns the sum of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the sum of the array.\n */\nfunction sum (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const filteredNumber = getFilteredValues(arr);\n const totalSum = filteredNumber.length ?\n filteredNumber.reduce((acc, curr) => acc + curr, 0)\n : InvalidAwareTypes.NULL;\n return totalSum;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that returns the average of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the mean value of the array.\n */\nfunction avg (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const totalSum = sum(arr);\n const len = arr.length || 1;\n return (Number.isNaN(totalSum) || totalSum instanceof InvalidAwareTypes) ?\n InvalidAwareTypes.NULL : totalSum / len;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the min value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the minimum value of the array.\n */\nfunction min (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.min(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the max value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the maximum value of the array.\n */\nfunction max (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.max(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the first value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the first value of the array.\n */\nfunction first (arr) {\n return arr[0];\n}\n\n/**\n * Reducer function that gives the last value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the last value of the array.\n */\nfunction last (arr) {\n return arr[arr.length - 1];\n}\n\n/**\n * Reducer function that gives the count value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the length of the array.\n */\nfunction count (arr) {\n if (isArray(arr)) {\n return arr.length;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Calculates the variance of the input array.\n *\n * @param {Array.} arr - The input array.\n * @return {number} Returns the variance of the input array.\n */\nfunction variance (arr) {\n let mean = avg(arr);\n return avg(arr.map(num => (num - mean) ** 2));\n}\n\n/**\n * Calculates the square root of the variance of the input array.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the square root of the variance.\n */\nfunction std (arr) {\n return Math.sqrt(variance(arr));\n}\n\n\nconst fnList = {\n sum,\n avg,\n min,\n max,\n first,\n last,\n count,\n std\n};\n\nconst defaultReducerName = 'sum';\n\nexport {\n defaultReducerName,\n sum as defReducer,\n fnList,\n};\n","import { defReducer, fnList } from '../operator';\n\n/**\n * A page level storage which stores, registers, unregisters reducers for all the datamodel instances. There is only one\n * reducer store available in a page. All the datamodel instances receive same instance of reducer store. DataModel\n * out of the box provides handful of {@link reducer | reducers} which can be used as reducer funciton.\n *\n * @public\n * @namespace DataModel\n */\nclass ReducerStore {\n constructor () {\n this.store = new Map();\n this.store.set('defReducer', defReducer);\n\n Object.entries(fnList).forEach((key) => {\n this.store.set(key[0], key[1]);\n });\n }\n\n /**\n * Changes the `defaultReducer` globally. For all the fields which does not have `defAggFn` mentioned in schema, the\n * value of `defaultReducer` is used for aggregation.\n *\n * @public\n * @param {string} [reducer='sum'] - The name of the default reducer. It picks up the definition from store by doing\n * name lookup. If no name is found then it takes `sum` as the default reducer.\n * @return {ReducerStore} Returns instance of the singleton store in page.\n */\n defaultReducer (...params) {\n if (!params.length) {\n return this.store.get('defReducer');\n }\n\n let reducer = params[0];\n\n if (typeof reducer === 'function') {\n this.store.set('defReducer', reducer);\n } else {\n reducer = String(reducer);\n if (Object.keys(fnList).indexOf(reducer) !== -1) {\n this.store.set('defReducer', fnList[reducer]);\n } else {\n throw new Error(`Reducer ${reducer} not found in registry`);\n }\n }\n return this;\n }\n\n /**\n *\n * Registers a {@link reducer | reducer}.\n * A {@link reducer | reducer} has to be registered before it is used.\n *\n * @example\n * // find the mean squared value of a given set\n * const reducerStore = DataModel.Reducers();\n *\n * reducers.register('meanSquared', (arr) => {\n * const squaredVal = arr.map(item => item * item);\n * let sum = 0;\n * for (let i = 0, l = squaredVal.length; i < l; i++) {\n * sum += squaredVal[i++];\n * }\n *\n * return sum;\n * })\n *\n * // datamodel (dm) is already prepared with cars.json\n * const dm1 = dm.groupBy(['origin'], {\n * accleration: 'meanSquared'\n * });\n *\n * @public\n *\n * @param {string} name formal name for a reducer. If the given name already exists in store it is overridden by new\n * definition.\n * @param {Function} reducer definition of {@link reducer} function.\n *\n * @return {Function} function for unregistering the reducer.\n */\n register (name, reducer) {\n if (typeof reducer !== 'function') {\n throw new Error('Reducer should be a function');\n }\n\n name = String(name);\n this.store.set(name, reducer);\n\n return () => { this.__unregister(name); };\n }\n\n __unregister (name) {\n if (this.store.has(name)) {\n this.store.delete(name);\n }\n }\n\n resolve (name) {\n if (name instanceof Function) {\n return name;\n }\n return this.store.get(name);\n }\n}\n\nconst reducerStore = (function () {\n let store = null;\n\n function getStore () {\n if (store === null) {\n store = new ReducerStore();\n }\n return store;\n }\n return getStore();\n}());\n\nexport default reducerStore;\n","import { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport DataModel from '../export';\nimport reducerStore from '../utils/reducer-store';\nimport { defaultReducerName } from './group-by-function';\nimport { FieldType } from '../enums';\n\n/**\n * This function sanitize the user given field and return a common Array structure field\n * list\n * @param {DataModel} dataModel the dataModel operating on\n * @param {Array} fieldArr user input of field Array\n * @return {Array} arrays of field name\n */\nfunction getFieldArr (dataModel, fieldArr) {\n const retArr = [];\n const fieldStore = dataModel.getFieldspace();\n const dimensions = fieldStore.getDimension();\n\n Object.entries(dimensions).forEach(([key]) => {\n if (fieldArr && fieldArr.length) {\n if (fieldArr.indexOf(key) !== -1) {\n retArr.push(key);\n }\n } else {\n retArr.push(key);\n }\n });\n\n return retArr;\n}\n\n/**\n * This sanitize the reducer provide by the user and create a common type of object.\n * user can give function Also\n * @param {DataModel} dataModel dataModel to worked on\n * @param {Object|function} [reducers={}] reducer provided by the users\n * @return {Object} object containing reducer function for every measure\n */\nfunction getReducerObj (dataModel, reducers = {}) {\n const retObj = {};\n const fieldStore = dataModel.getFieldspace();\n const measures = fieldStore.getMeasure();\n const defReducer = reducerStore.defaultReducer();\n\n Object.keys(measures).forEach((measureName) => {\n if (typeof reducers[measureName] !== 'string') {\n reducers[measureName] = measures[measureName].defAggFn();\n }\n const reducerFn = reducerStore.resolve(reducers[measureName]);\n if (reducerFn) {\n retObj[measureName] = reducerFn;\n } else {\n retObj[measureName] = defReducer;\n reducers[measureName] = defaultReducerName;\n }\n });\n return retObj;\n}\n\n/**\n * main function which perform the group-by operations which reduce the measures value is the\n * fields are common according to the reducer function provided\n * @param {DataModel} dataModel the dataModel to worked\n * @param {Array} fieldArr fields according to which the groupby should be worked\n * @param {Object|Function} reducers reducers function\n * @param {DataModel} existingDataModel Existing datamodel instance\n * @return {DataModel} new dataModel with the group by\n */\nfunction groupBy (dataModel, fieldArr, reducers, existingDataModel) {\n const sFieldArr = getFieldArr(dataModel, fieldArr);\n const reducerObj = getReducerObj(dataModel, reducers);\n const fieldStore = dataModel.getFieldspace();\n const fieldStoreObj = fieldStore.fieldsObj();\n const dbName = fieldStore.name;\n const dimensionArr = [];\n const measureArr = [];\n const schema = [];\n const hashMap = {};\n const data = [];\n let newDataModel;\n\n // Prepare the schema\n Object.entries(fieldStoreObj).forEach(([key, value]) => {\n if (sFieldArr.indexOf(key) !== -1 || reducerObj[key]) {\n schema.push(extend2({}, value.schema()));\n\n switch (value.schema().type) {\n case FieldType.MEASURE:\n measureArr.push(key);\n break;\n default:\n case FieldType.DIMENSION:\n dimensionArr.push(key);\n }\n }\n });\n // Prepare the data\n let rowCount = 0;\n rowDiffsetIterator(dataModel._rowDiffset, (i) => {\n let hash = '';\n dimensionArr.forEach((_) => {\n hash = `${hash}-${fieldStoreObj[_].partialField.data[i]}`;\n });\n if (hashMap[hash] === undefined) {\n hashMap[hash] = rowCount;\n data.push({});\n dimensionArr.forEach((_) => {\n data[rowCount][_] = fieldStoreObj[_].partialField.data[i];\n });\n measureArr.forEach((_) => {\n data[rowCount][_] = [fieldStoreObj[_].partialField.data[i]];\n });\n rowCount += 1;\n } else {\n measureArr.forEach((_) => {\n data[hashMap[hash]][_].push(fieldStoreObj[_].partialField.data[i]);\n });\n }\n });\n\n // reduction\n let cachedStore = {};\n let cloneProvider = () => dataModel.detachedRoot();\n data.forEach((row) => {\n const tuple = row;\n measureArr.forEach((_) => {\n tuple[_] = reducerObj[_](row[_], cloneProvider, cachedStore);\n });\n });\n if (existingDataModel) {\n existingDataModel.__calculateFieldspace();\n newDataModel = existingDataModel;\n }\n else {\n newDataModel = new DataModel(data, schema, { name: dbName });\n }\n return newDataModel;\n}\n\nexport { groupBy, getFieldArr, getReducerObj };\n","import { getCommonSchema } from './get-common-schema';\n\n/**\n * The filter function used in natural join.\n * It generates a function that will have the logic to join two\n * DataModel instances by the process of natural join.\n *\n * @param {DataModel} dm1 - The left DataModel instance.\n * @param {DataModel} dm2 - The right DataModel instance.\n * @return {Function} Returns a function that is used in cross-product operation.\n */\nexport function naturalJoinFilter (dm1, dm2) {\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n // const dm1FieldStoreName = dm1FieldStore.name;\n // const dm2FieldStoreName = dm2FieldStore.name;\n const commonSchemaArr = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n return (dm1Fields, dm2Fields) => {\n let retainTuple = true;\n commonSchemaArr.forEach((fieldName) => {\n if (dm1Fields[fieldName].value ===\n dm2Fields[fieldName].value && retainTuple) {\n retainTuple = true;\n } else {\n retainTuple = false;\n }\n });\n return retainTuple;\n };\n}\n","import DataModel from '../export';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n/**\n * Performs the union operation between two dm instances.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function union (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n */\n function prepareDataHelper (dm, fieldsObj) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n data.push(tuple);\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm1, dm1FieldStoreFieldObj);\n prepareDataHelper(dm2, dm2FieldStoreFieldObj);\n\n return new DataModel(data, schema, { name });\n}\n","import { crossProduct } from './cross-product';\nimport { JOINS } from '../constants';\nimport { union } from './union';\n\n\nexport function leftOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel1, dataModel2, filterFn, false, JOINS.LEFTOUTER);\n}\n\nexport function rightOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel2, dataModel1, filterFn, false, JOINS.RIGHTOUTER);\n}\n\nexport function fullOuterJoin (dataModel1, dataModel2, filterFn) {\n return union(leftOuterJoin(dataModel1, dataModel2, filterFn), rightOuterJoin(dataModel1, dataModel2, filterFn));\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\n\n/**\n * In {@link DataModel}, every tabular data consists of column, a column is stored as field.\n * Field contains all the data for a given column in an array.\n *\n * Each record consists of several fields; the fields of all records form the columns.\n * Examples of fields: name, gender, sex etc.\n *\n * In DataModel, each field can have multiple attributes which describes its data and behaviour.\n * A field can have two types of data: Measure and Dimension.\n *\n * A Dimension Field is the context on which a data is categorized and the measure is the numerical values that\n * quantify the data set.\n * In short a dimension is the lens through which you are looking at your measure data.\n *\n * Refer to {@link Schema} to get info about possible field attributes.\n *\n * @public\n * @class\n */\nexport default class Field {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n this.partialField = partialField;\n this.rowDiffset = rowDiffset;\n }\n\n /**\n * Generates the field type specific domain.\n *\n * @public\n * @abstract\n */\n domain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the the field schema.\n *\n * @public\n * @return {string} Returns the field schema.\n */\n schema () {\n return this.partialField.schema;\n }\n\n /**\n * Returns the name of the field.\n *\n * @public\n * @return {string} Returns the name of the field.\n */\n name () {\n return this.partialField.name;\n }\n\n /**\n * Returns the type of the field.\n *\n * @public\n * @return {string} Returns the type of the field.\n */\n type () {\n return this.partialField.schema.type;\n }\n\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return this.partialField.schema.subtype;\n }\n\n /**\n * Returns the description of the field.\n *\n * @public\n * @return {string} Returns the description of the field.\n */\n description () {\n return this.partialField.schema.description;\n }\n\n /**\n * Returns the display name of the field.\n *\n * @public\n * @return {string} Returns the display name of the field.\n */\n displayName () {\n return this.partialField.schema.displayName || this.partialField.schema.name;\n }\n\n /**\n * Returns the data associated with the field.\n *\n * @public\n * @return {Array} Returns the data.\n */\n data () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n data.push(this.partialField.data[i]);\n });\n return data;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @abstract\n */\n formattedData () {\n throw new Error('Not yet implemented');\n }\n}\n","import Field from '../field';\n\n/**\n * Represents dimension field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Dimension extends Field {\n /**\n * Returns the domain for the dimension field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { DimensionSubtype } from '../../enums';\nimport Dimension from '../dimension';\n/**\n * Represents categorical field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Categorical extends Dimension {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return DimensionSubtype.CATEGORICAL;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n return domain;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport Dimension from '../dimension';\nimport { DateTimeFormatter } from '../../utils';\nimport InvalidAwareTypes from '../../invalid-aware-types';\n\n/**\n * Represents temporal field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Temporal extends Dimension {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n super(partialField, rowDiffset);\n\n this._cachedMinDiff = null;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be\n // occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n\n return domain;\n }\n\n\n /**\n * Calculates the minimum consecutive difference from the associated field data.\n *\n * @public\n * @return {number} Returns the minimum consecutive diff in milliseconds.\n */\n minimumConsecutiveDifference () {\n if (this._cachedMinDiff) {\n return this._cachedMinDiff;\n }\n\n const sortedData = this.data().filter(item => !(item instanceof InvalidAwareTypes)).sort((a, b) => a - b);\n const arrLn = sortedData.length;\n let minDiff = Number.POSITIVE_INFINITY;\n let prevDatum;\n let nextDatum;\n let processedCount = 0;\n\n for (let i = 1; i < arrLn; i++) {\n prevDatum = sortedData[i - 1];\n nextDatum = sortedData[i];\n\n if (nextDatum === prevDatum) {\n continue;\n }\n\n minDiff = Math.min(minDiff, nextDatum - sortedData[i - 1]);\n processedCount++;\n }\n\n if (!processedCount) {\n minDiff = null;\n }\n this._cachedMinDiff = minDiff;\n\n return this._cachedMinDiff;\n }\n\n /**\n * Returns the format specified in the input schema while creating field.\n *\n * @public\n * @return {string} Returns the datetime format.\n */\n format () {\n return this.partialField.schema.format;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n data.push(datum);\n } else {\n data.push(DateTimeFormatter.formatAs(datum, this.format()));\n }\n });\n return data;\n }\n}\n\n","import Dimension from '../dimension';\n\n/**\n * Represents binned field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Binned extends Dimension {\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the last and first values of bins config array.\n */\n calculateDataDomain () {\n const binsArr = this.partialField.schema.bins;\n return [binsArr[0], binsArr[binsArr.length - 1]];\n }\n\n /**\n * Returns the bins config provided while creating the field instance.\n *\n * @public\n * @return {Array} Returns the bins array config.\n */\n bins () {\n return this.partialField.schema.bins;\n }\n}\n","import { formatNumber } from '../../utils';\nimport { defaultReducerName } from '../../operator/group-by-function';\nimport Field from '../field';\n\n/**\n * Represents measure field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Measure extends Field {\n /**\n * Returns the domain for the measure field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Returns the unit of the measure field.\n *\n * @public\n * @return {string} Returns unit of the field.\n */\n unit () {\n return this.partialField.schema.unit;\n }\n\n /**\n * Returns the aggregation function name of the measure field.\n *\n * @public\n * @return {string} Returns aggregation function name of the field.\n */\n defAggFn () {\n return this.partialField.schema.defAggFn || defaultReducerName;\n }\n\n /**\n * Returns the number format of the measure field.\n *\n * @public\n * @return {Function} Returns number format of the field.\n */\n numberFormat () {\n const { numberFormat } = this.partialField.schema;\n return numberFormat instanceof Function ? numberFormat : formatNumber;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { MeasureSubtype } from '../../enums';\nimport Measure from '../measure';\nimport InvalidAwareTypes from '../../invalid-aware-types';\n\n/**\n * Represents continuous field subtype.\n *\n * @public\n * @class\n * @extends Measure\n */\nexport default class Continuous extends Measure {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return MeasureSubtype.CONTINUOUS;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the min and max values.\n */\n calculateDataDomain () {\n let min = Number.POSITIVE_INFINITY;\n let max = Number.NEGATIVE_INFINITY;\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n return;\n }\n\n if (datum < min) {\n min = datum;\n }\n if (datum > max) {\n max = datum;\n }\n });\n\n return [min, max];\n }\n}\n","/**\n * A interface to represent a parser which is responsible to parse the field.\n *\n * @public\n * @interface\n */\nexport default class FieldParser {\n /**\n * Parses a single value of a field and return the sanitized form.\n *\n * @public\n * @abstract\n */\n parse () {\n throw new Error('Not yet implemented');\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the categorical values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class CategoricalParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the stringified form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the stringified value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n result = String(val).trim();\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { DateTimeFormatter } from '../../../utils';\nimport FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the temporal values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class TemporalParser extends FieldParser {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {Object} schema - The schema object for the corresponding field.\n */\n constructor (schema) {\n super();\n this.schema = schema;\n this._dtf = new DateTimeFormatter(this.schema.format);\n }\n\n /**\n * Parses a single value of a field and returns the millisecond value.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {number} Returns the millisecond value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let nativeDate = this._dtf.getNativeDate(val);\n result = nativeDate ? nativeDate.getTime() : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the binned values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class BinnedParser extends FieldParser {\n /**\n * Parses a single binned value of a field and returns the sanitized value.\n *\n * @public\n * @param {string} val - The value of the field.\n * @return {string} Returns the sanitized value.\n */\n parse (val) {\n const regex = /^\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*-\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*$/;\n val = String(val);\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let matched = val.match(regex);\n result = matched ? `${Number.parseFloat(matched[1])}-${Number.parseFloat(matched[2])}`\n : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the continuous values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class ContinuousParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the number form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the number value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let parsedVal = parseFloat(val, 10);\n result = Number.isNaN(parsedVal) ? InvalidAwareTypes.NA : parsedVal;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","/**\n * Stores the full data and the metadata of a field. It provides\n * a single source of data from which the future Field\n * instance can get a subset of it with a rowDiffset config.\n *\n * @class\n * @public\n */\nexport default class PartialField {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} name - The name of the field.\n * @param {Array} data - The data array.\n * @param {Object} schema - The schema object of the corresponding field.\n * @param {FieldParser} parser - The parser instance corresponding to that field.\n */\n constructor (name, data, schema, parser) {\n this.name = name;\n this.schema = schema;\n this.parser = parser;\n this.data = this._sanitize(data);\n }\n\n /**\n * Sanitizes the field data.\n *\n * @private\n * @param {Array} data - The actual input data.\n * @return {Array} Returns the sanitized data.\n */\n _sanitize (data) {\n return data.map(datum => this.parser.parse(datum));\n }\n}\n","import { FieldType, DimensionSubtype, MeasureSubtype } from './enums';\nimport {\n Categorical,\n Temporal,\n Binned,\n Continuous,\n CategoricalParser,\n TemporalParser,\n BinnedParser,\n ContinuousParser,\n PartialField\n} from './fields';\n\n/**\n * Creates a field instance according to the provided data and schema.\n *\n * @param {Array} data - The field data array.\n * @param {Object} schema - The field schema object.\n * @return {Field} Returns the newly created field instance.\n */\nfunction createUnitField(data, schema) {\n data = data || [];\n let partialField;\n\n switch (schema.type) {\n case FieldType.MEASURE:\n switch (schema.subtype) {\n case MeasureSubtype.CONTINUOUS:\n partialField = new PartialField(schema.name, data, schema, new ContinuousParser());\n return new Continuous(partialField, `0-${data.length - 1}`);\n default:\n partialField = new PartialField(schema.name, data, schema, new ContinuousParser());\n return new Continuous(partialField, `0-${data.length - 1}`);\n }\n case FieldType.DIMENSION:\n switch (schema.subtype) {\n case DimensionSubtype.CATEGORICAL:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n case DimensionSubtype.TEMPORAL:\n partialField = new PartialField(schema.name, data, schema, new TemporalParser(schema));\n return new Temporal(partialField, `0-${data.length - 1}`);\n case DimensionSubtype.BINNED:\n partialField = new PartialField(schema.name, data, schema, new BinnedParser());\n return new Binned(partialField, `0-${data.length - 1}`);\n default:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n }\n default:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n }\n}\n\n\n/**\n * Creates a field instance from partialField and rowDiffset.\n *\n * @param {PartialField} partialField - The corresponding partial field.\n * @param {string} rowDiffset - The data subset config.\n * @return {Field} Returns the newly created field instance.\n */\nexport function createUnitFieldFromPartial(partialField, rowDiffset) {\n const { schema } = partialField;\n\n switch (schema.type) {\n case FieldType.MEASURE:\n switch (schema.subtype) {\n case MeasureSubtype.CONTINUOUS:\n return new Continuous(partialField, rowDiffset);\n default:\n return new Continuous(partialField, rowDiffset);\n }\n case FieldType.DIMENSION:\n switch (schema.subtype) {\n case DimensionSubtype.CATEGORICAL:\n return new Categorical(partialField, rowDiffset);\n case DimensionSubtype.TEMPORAL:\n return new Temporal(partialField, rowDiffset);\n case DimensionSubtype.BINNED:\n return new Binned(partialField, rowDiffset);\n default:\n return new Categorical(partialField, rowDiffset);\n }\n default:\n return new Categorical(partialField, rowDiffset);\n }\n}\n\n/**\n * Creates the field instances with input data and schema.\n *\n * @param {Array} dataColumn - The data array for fields.\n * @param {Array} schema - The schema array for fields.\n * @param {Array} headers - The array of header names.\n * @return {Array.} Returns an array of newly created field instances.\n */\nexport function createFields(dataColumn, schema, headers) {\n const headersObj = {};\n\n if (!(headers && headers.length)) {\n headers = schema.map(item => item.name);\n }\n\n headers.forEach((header, i) => {\n headersObj[header] = i;\n });\n\n return schema.map(item => createUnitField(dataColumn[headersObj[item.name]], item));\n}\n","import { DataFormat } from './enums';\n\nexport default {\n dataFormat: DataFormat.AUTO\n};\n","import { columnMajor } from '../utils';\n\n/**\n * Parses and converts data formatted in DSV array to a manageable internal format.\n *\n * @param {Array.} arr - A 2D array containing of the DSV data.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv data is header or not.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * [\"a\", \"b\", \"c\"],\n * [1, 2, 3],\n * [4, 5, 6],\n * [7, 8, 9]\n * ];\n */\nfunction DSVArr (arr, options) {\n const defaultOption = {\n firstRowHeader: true,\n };\n options = Object.assign({}, defaultOption, options);\n\n let header;\n const columns = [];\n const push = columnMajor(columns);\n\n if (options.firstRowHeader) {\n // If header present then mutate the array.\n // Do in-place mutation to save space.\n header = arr.splice(0, 1)[0];\n } else {\n header = [];\n }\n\n arr.forEach(field => push(...field));\n\n return [header, columns];\n}\n\nexport default DSVArr;\n","var EOL = {},\n EOF = {},\n QUOTE = 34,\n NEWLINE = 10,\n RETURN = 13;\n\nfunction objectConverter(columns) {\n return new Function(\"d\", \"return {\" + columns.map(function(name, i) {\n return JSON.stringify(name) + \": d[\" + i + \"]\";\n }).join(\",\") + \"}\");\n}\n\nfunction customConverter(columns, f) {\n var object = objectConverter(columns);\n return function(row, i) {\n return f(object(row), i, columns);\n };\n}\n\n// Compute unique columns in order of discovery.\nfunction inferColumns(rows) {\n var columnSet = Object.create(null),\n columns = [];\n\n rows.forEach(function(row) {\n for (var column in row) {\n if (!(column in columnSet)) {\n columns.push(columnSet[column] = column);\n }\n }\n });\n\n return columns;\n}\n\nexport default function(delimiter) {\n var reFormat = new RegExp(\"[\\\"\" + delimiter + \"\\n\\r]\"),\n DELIMITER = delimiter.charCodeAt(0);\n\n function parse(text, f) {\n var convert, columns, rows = parseRows(text, function(row, i) {\n if (convert) return convert(row, i - 1);\n columns = row, convert = f ? customConverter(row, f) : objectConverter(row);\n });\n rows.columns = columns || [];\n return rows;\n }\n\n function parseRows(text, f) {\n var rows = [], // output rows\n N = text.length,\n I = 0, // current character index\n n = 0, // current line number\n t, // current token\n eof = N <= 0, // current token followed by EOF?\n eol = false; // current token followed by EOL?\n\n // Strip the trailing newline.\n if (text.charCodeAt(N - 1) === NEWLINE) --N;\n if (text.charCodeAt(N - 1) === RETURN) --N;\n\n function token() {\n if (eof) return EOF;\n if (eol) return eol = false, EOL;\n\n // Unescape quotes.\n var i, j = I, c;\n if (text.charCodeAt(j) === QUOTE) {\n while (I++ < N && text.charCodeAt(I) !== QUOTE || text.charCodeAt(++I) === QUOTE);\n if ((i = I) >= N) eof = true;\n else if ((c = text.charCodeAt(I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n return text.slice(j + 1, i - 1).replace(/\"\"/g, \"\\\"\");\n }\n\n // Find next delimiter or newline.\n while (I < N) {\n if ((c = text.charCodeAt(i = I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n else if (c !== DELIMITER) continue;\n return text.slice(j, i);\n }\n\n // Return last token before EOF.\n return eof = true, text.slice(j, N);\n }\n\n while ((t = token()) !== EOF) {\n var row = [];\n while (t !== EOL && t !== EOF) row.push(t), t = token();\n if (f && (row = f(row, n++)) == null) continue;\n rows.push(row);\n }\n\n return rows;\n }\n\n function format(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return [columns.map(formatValue).join(delimiter)].concat(rows.map(function(row) {\n return columns.map(function(column) {\n return formatValue(row[column]);\n }).join(delimiter);\n })).join(\"\\n\");\n }\n\n function formatRows(rows) {\n return rows.map(formatRow).join(\"\\n\");\n }\n\n function formatRow(row) {\n return row.map(formatValue).join(delimiter);\n }\n\n function formatValue(text) {\n return text == null ? \"\"\n : reFormat.test(text += \"\") ? \"\\\"\" + text.replace(/\"/g, \"\\\"\\\"\") + \"\\\"\"\n : text;\n }\n\n return {\n parse: parse,\n parseRows: parseRows,\n format: format,\n formatRows: formatRows\n };\n}\n","import dsv from \"./dsv\";\n\nvar csv = dsv(\",\");\n\nexport var csvParse = csv.parse;\nexport var csvParseRows = csv.parseRows;\nexport var csvFormat = csv.format;\nexport var csvFormatRows = csv.formatRows;\n","import dsv from \"./dsv\";\n\nvar tsv = dsv(\"\\t\");\n\nexport var tsvParse = tsv.parse;\nexport var tsvParseRows = tsv.parseRows;\nexport var tsvFormat = tsv.format;\nexport var tsvFormatRows = tsv.formatRows;\n","import { dsvFormat as d3Dsv } from 'd3-dsv';\nimport DSVArr from './dsv-arr';\n\n/**\n * Parses and converts data formatted in DSV string to a manageable internal format.\n *\n * @todo Support to be given for https://tools.ietf.org/html/rfc4180.\n * @todo Sample implementation https://github.com/knrz/CSV.js/.\n *\n * @param {string} str - The input DSV string.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv string data is header or not.\n * @param {string} [options.fieldSeparator=\",\"] - The separator of two consecutive field.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = `\n * a,b,c\n * 1,2,3\n * 4,5,6\n * 7,8,9\n * `\n */\nfunction DSVStr (str, options) {\n const defaultOption = {\n firstRowHeader: true,\n fieldSeparator: ','\n };\n options = Object.assign({}, defaultOption, options);\n\n const dsv = d3Dsv(options.fieldSeparator);\n return DSVArr(dsv.parseRows(str), options);\n}\n\nexport default DSVStr;\n","import { columnMajor } from '../utils';\n\n/**\n * Parses and converts data formatted in JSON to a manageable internal format.\n *\n * @param {Array.} arr - The input data formatted in JSON.\n * @return {Array.} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * {\n * \"a\": 1,\n * \"b\": 2,\n * \"c\": 3\n * },\n * {\n * \"a\": 4,\n * \"b\": 5,\n * \"c\": 6\n * },\n * {\n * \"a\": 7,\n * \"b\": 8,\n * \"c\": 9\n * }\n * ];\n */\nfunction FlatJSON (arr) {\n const header = {};\n let i = 0;\n let insertionIndex;\n const columns = [];\n const push = columnMajor(columns);\n\n arr.forEach((item) => {\n const fields = [];\n for (let key in item) {\n if (key in header) {\n insertionIndex = header[key];\n } else {\n header[key] = i++;\n insertionIndex = i - 1;\n }\n fields[insertionIndex] = item[key];\n }\n push(...fields);\n });\n\n return [Object.keys(header), columns];\n}\n\nexport default FlatJSON;\n","import FlatJSON from './flat-json';\nimport DSVArr from './dsv-arr';\nimport DSVStr from './dsv-str';\nimport { detectDataFormat } from '../utils';\n\n/**\n * Parses the input data and detect the format automatically.\n *\n * @param {string|Array} data - The input data.\n * @param {Object} options - An optional config specific to data format.\n * @return {Array.} Returns an array of headers and column major data.\n */\nfunction Auto (data, options) {\n const converters = { FlatJSON, DSVStr, DSVArr };\n const dataFormat = detectDataFormat(data);\n\n if (!dataFormat) {\n throw new Error('Couldn\\'t detect the data format');\n }\n\n return converters[dataFormat](data, options);\n}\n\nexport default Auto;\n","import { FieldType, FilteringMode, DimensionSubtype, MeasureSubtype, DataFormat } from './enums';\nimport fieldStore from './field-store';\nimport Value from './value';\nimport {\n rowDiffsetIterator\n} from './operator';\nimport { DM_DERIVATIVES, LOGICAL_OPERATORS } from './constants';\nimport { createFields, createUnitFieldFromPartial } from './field-creator';\nimport defaultConfig from './default-config';\nimport * as converter from './converter';\nimport { extend2, detectDataFormat } from './utils';\n\n/**\n * Prepares the selection data.\n */\nfunction prepareSelectionData (fields, i) {\n const resp = {};\n for (let field of fields) {\n resp[field.name()] = new Value(field.partialField.data[i], field);\n }\n return resp;\n}\n\nexport function prepareJoinData (fields) {\n const resp = {};\n Object.keys(fields).forEach((key) => { resp[key] = new Value(fields[key], key); });\n return resp;\n}\n\nexport const updateFields = ([rowDiffset, colIdentifier], partialFieldspace, fieldStoreName) => {\n let collID = colIdentifier.length ? colIdentifier.split(',') : [];\n let partialFieldMap = partialFieldspace.fieldsObj();\n let newFields = collID.map(coll => createUnitFieldFromPartial(partialFieldMap[coll].partialField, rowDiffset));\n return fieldStore.createNamespace(newFields, fieldStoreName);\n};\n\nexport const persistDerivation = (model, operation, config = {}, criteriaFn) => {\n let derivative;\n if (operation !== DM_DERIVATIVES.COMPOSE) {\n derivative = {\n op: operation,\n meta: config,\n criteria: criteriaFn\n };\n model._derivation.push(derivative);\n }\n else {\n derivative = [...criteriaFn];\n model._derivation.length = 0;\n model._derivation.push(...derivative);\n }\n};\n\nexport const selectHelper = (rowDiffset, fields, selectFn, config, sourceDm) => {\n const newRowDiffSet = [];\n let lastInsertedValue = -1;\n let { mode } = config;\n let li;\n let cachedStore = {};\n let cloneProvider = () => sourceDm.detachedRoot();\n const selectorHelperFn = index => selectFn(\n prepareSelectionData(fields, index),\n index,\n cloneProvider,\n cachedStore\n );\n\n let checker;\n if (mode === FilteringMode.INVERSE) {\n checker = index => !selectorHelperFn(index);\n } else {\n checker = index => selectorHelperFn(index);\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n if (checker(i)) {\n if (lastInsertedValue !== -1 && i === (lastInsertedValue + 1)) {\n li = newRowDiffSet.length - 1;\n newRowDiffSet[li] = `${newRowDiffSet[li].split('-')[0]}-${i}`;\n } else {\n newRowDiffSet.push(`${i}`);\n }\n lastInsertedValue = i;\n }\n });\n return newRowDiffSet.join(',');\n};\n\nexport const filterPropagationModel = (model, propModels, config = {}) => {\n const operation = config.operation || LOGICAL_OPERATORS.AND;\n const filterByMeasure = config.filterByMeasure || false;\n let fns = [];\n if (!propModels.length) {\n fns = [() => false];\n } else {\n fns = propModels.map(propModel => ((dataModel) => {\n const dataObj = dataModel.getData();\n const schema = dataObj.schema;\n const fieldsConfig = dataModel.getFieldsConfig();\n const fieldsSpace = dataModel.getFieldspace().fieldsObj();\n const data = dataObj.data;\n const domain = Object.values(fieldsConfig).reduce((acc, v) => {\n acc[v.def.name] = fieldsSpace[v.def.name].domain();\n return acc;\n }, {});\n\n return (fields) => {\n const include = !data.length ? false : data.some(row => schema.every((propField) => {\n if (!(propField.name in fields)) {\n return true;\n }\n const value = fields[propField.name].valueOf();\n if (filterByMeasure && propField.type === FieldType.MEASURE) {\n return value >= domain[propField.name][0] && value <= domain[propField.name][1];\n }\n\n if (propField.type !== FieldType.DIMENSION) {\n return true;\n }\n const idx = fieldsConfig[propField.name].index;\n return row[idx] === fields[propField.name].valueOf();\n }));\n return include;\n };\n })(propModel));\n }\n\n let filteredModel;\n if (operation === LOGICAL_OPERATORS.AND) {\n const clonedModel = model.clone(false, false);\n filteredModel = clonedModel.select(fields => fns.every(fn => fn(fields)), {\n saveChild: false,\n mode: FilteringMode.ALL\n });\n } else {\n filteredModel = model.clone(false, false).select(fields => fns.some(fn => fn(fields)), {\n mode: FilteringMode.ALL,\n saveChild: false\n });\n }\n\n return filteredModel;\n};\n\nexport const cloneWithSelect = (sourceDm, selectFn, selectConfig, cloneConfig) => {\n const cloned = sourceDm.clone(cloneConfig.saveChild);\n const rowDiffset = selectHelper(\n cloned._rowDiffset,\n cloned.getPartialFieldspace().fields,\n selectFn,\n selectConfig,\n sourceDm\n );\n cloned._rowDiffset = rowDiffset;\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivation(cloned, DM_DERIVATIVES.SELECT, { config: selectConfig }, selectFn);\n\n return cloned;\n};\n\nexport const cloneWithProject = (sourceDm, projField, config, allFields) => {\n const cloned = sourceDm.clone(config.saveChild);\n let projectionSet = projField;\n if (config.mode === FilteringMode.INVERSE) {\n projectionSet = allFields.filter(fieldName => projField.indexOf(fieldName) === -1);\n }\n // cloned._colIdentifier = sourceDm._colIdentifier.split(',')\n // .filter(coll => projectionSet.indexOf(coll) !== -1).join();\n cloned._colIdentifier = projectionSet.join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivation(\n cloned,\n DM_DERIVATIVES.PROJECT,\n { projField, config, actualProjField: projectionSet },\n null\n );\n\n return cloned;\n};\n\nexport const sanitizeUnitSchema = (unitSchema) => {\n // Do deep clone of the unit schema as the user might change it later.\n unitSchema = extend2({}, unitSchema);\n if (!unitSchema.type) {\n unitSchema.type = FieldType.DIMENSION;\n }\n\n if (!unitSchema.subtype) {\n switch (unitSchema.type) {\n case FieldType.MEASURE:\n unitSchema.subtype = MeasureSubtype.CONTINUOUS;\n break;\n default:\n case FieldType.DIMENSION:\n unitSchema.subtype = DimensionSubtype.CATEGORICAL;\n break;\n }\n }\n\n return unitSchema;\n};\n\nexport const sanitizeSchema = schema => schema.map(unitSchema => sanitizeUnitSchema(unitSchema));\n\nexport const updateData = (relation, data, schema, options) => {\n schema = sanitizeSchema(schema);\n options = Object.assign(Object.assign({}, defaultConfig), options);\n const converterFn = converter[options.dataFormat];\n\n if (!(converterFn && typeof converterFn === 'function')) {\n throw new Error(`No converter function found for ${options.dataFormat} format`);\n }\n\n const [header, formattedData] = converterFn(data, options);\n const fieldArr = createFields(formattedData, schema, header);\n\n // This will create a new fieldStore with the fields\n const nameSpace = fieldStore.createNamespace(fieldArr, options.name);\n relation._partialFieldspace = nameSpace;\n // If data is provided create the default colIdentifier and rowDiffset\n relation._rowDiffset = formattedData.length && formattedData[0].length ? `0-${formattedData[0].length - 1}` : '';\n relation._colIdentifier = (schema.map(_ => _.name)).join();\n relation._dataFormat = options.dataFormat === DataFormat.AUTO ? detectDataFormat(data) : options.dataFormat;\n return relation;\n};\n\nexport const fieldInSchema = (schema, field) => {\n let i = 0;\n\n for (; i < schema.length; ++i) {\n if (field === schema[i].name) {\n return {\n type: schema[i].subtype || schema[i].type,\n index: i\n };\n }\n }\n return null;\n};\n\n\nexport const getOperationArguments = (child) => {\n const derivation = child._derivation;\n let params = [];\n let operation;\n if (derivation && derivation.length === 1) {\n operation = derivation[0].op;\n switch (operation) {\n case DM_DERIVATIVES.SELECT:\n params = [derivation[0].criteria];\n break;\n case DM_DERIVATIVES.PROJECT:\n params = [derivation[0].meta.actualProjField];\n break;\n case DM_DERIVATIVES.GROUPBY:\n operation = 'groupBy';\n params = [derivation[0].meta.groupByString.split(','), derivation[0].criteria];\n break;\n default:\n break;\n }\n }\n\n return {\n operation,\n params\n };\n};\n\nconst applyExistingOperationOnModel = (propModel, dataModel) => {\n const { operation, params } = getOperationArguments(dataModel);\n let selectionModel = propModel[0];\n let rejectionModel = propModel[1];\n if (operation && params.length) {\n selectionModel = propModel[0][operation](...params, {\n saveChild: false\n });\n rejectionModel = propModel[1][operation](...params, {\n saveChild: false\n });\n }\n return [selectionModel, rejectionModel];\n};\n\nconst getFilteredModel = (propModel, path) => {\n for (let i = 0, len = path.length; i < len; i++) {\n const model = path[i];\n propModel = applyExistingOperationOnModel(propModel, model);\n }\n return propModel;\n};\n\nconst propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = {}) => {\n const nonTraversingModel = propModelInf.nonTraversingModel;\n const excludeModels = propModelInf.excludeModels || [];\n\n if (dataModel === nonTraversingModel) {\n return;\n }\n\n const propagate = excludeModels.length ? excludeModels.indexOf(dataModel) === -1 : true;\n\n propagate && dataModel.handlePropagation(propModel, config);\n\n const children = dataModel._children;\n children.forEach((child) => {\n let [selectionModel, rejectionModel] = applyExistingOperationOnModel(propModel, child);\n propagateIdentifiers(child, [selectionModel, rejectionModel], config, propModelInf);\n });\n};\n\nexport const getRootGroupByModel = (model) => {\n if (model._parent && model._derivation.find(d => d.op !== 'group')) {\n return getRootGroupByModel(model._parent);\n }\n return model;\n};\n\nexport const getRootDataModel = (model) => {\n while (model._parent) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getPathToRootModel = (model, path = []) => {\n while (model._parent) {\n path.push(model);\n model = model._parent;\n }\n return path;\n};\n\nexport const propagateToAllDataModels = (identifiers, rootModels, propagationInf, config) => {\n let criteria;\n let propModel;\n const { propagationNameSpace, propagateToSource } = propagationInf;\n const propagationSourceId = propagationInf.sourceId;\n const propagateInterpolatedValues = config.propagateInterpolatedValues;\n const filterFn = (entry) => {\n const filter = config.filterFn || (() => true);\n return filter(entry, config);\n };\n\n let criterias = [];\n\n if (identifiers === null && config.persistent !== true) {\n criterias = [{\n criteria: []\n }];\n } else {\n let actionCriterias = Object.values(propagationNameSpace.mutableActions);\n if (propagateToSource !== false) {\n actionCriterias = actionCriterias.filter(d => d.config.sourceId !== propagationSourceId);\n }\n\n const filteredCriteria = actionCriterias.filter(filterFn).map(action => action.config.criteria);\n\n const excludeModels = [];\n\n if (propagateToSource !== false) {\n const sourceActionCriterias = Object.values(propagationNameSpace.mutableActions);\n\n sourceActionCriterias.forEach((actionInf) => {\n const actionConf = actionInf.config;\n if (actionConf.applyOnSource === false && actionConf.action === config.action &&\n actionConf.sourceId !== propagationSourceId) {\n excludeModels.push(actionInf.model);\n criteria = sourceActionCriterias.filter(d => d !== actionInf).map(d => d.config.criteria);\n criteria.length && criterias.push({\n criteria,\n models: actionInf.model,\n path: getPathToRootModel(actionInf.model)\n });\n }\n });\n }\n\n\n criteria = [].concat(...[...filteredCriteria, identifiers]).filter(d => d !== null);\n criterias.push({\n criteria,\n excludeModels: [...excludeModels, ...config.excludeModels || []]\n });\n }\n\n const rootModel = rootModels.model;\n\n const propConfig = Object.assign({\n sourceIdentifiers: identifiers,\n propagationSourceId\n }, config);\n\n const rootGroupByModel = rootModels.groupByModel;\n if (propagateInterpolatedValues && rootGroupByModel) {\n propModel = filterPropagationModel(rootGroupByModel, criteria, {\n filterByMeasure: propagateInterpolatedValues\n });\n propagateIdentifiers(rootGroupByModel, propModel, propConfig);\n }\n\n criterias.forEach((inf) => {\n const propagationModel = filterPropagationModel(rootModel, inf.criteria);\n const path = inf.path;\n\n if (path) {\n const filteredModel = getFilteredModel(propagationModel, path.reverse());\n inf.models.handlePropagation(filteredModel, propConfig);\n } else {\n propagateIdentifiers(rootModel, propagationModel, propConfig, {\n excludeModels: inf.excludeModels,\n nonTraversingModel: propagateInterpolatedValues && rootGroupByModel\n });\n }\n });\n};\n\nexport const propagateImmutableActions = (propagationNameSpace, rootModels, propagationInf) => {\n const immutableActions = propagationNameSpace.immutableActions;\n\n for (const action in immutableActions) {\n const actionInf = immutableActions[action];\n const actionConf = actionInf.config;\n const propagationSourceId = propagationInf.config.sourceId;\n const filterImmutableAction = propagationInf.propConfig.filterImmutableAction ?\n propagationInf.propConfig.filterImmutableAction(actionConf, propagationInf.config) : true;\n if (actionConf.sourceId !== propagationSourceId && filterImmutableAction) {\n const criteriaModel = actionConf.criteria;\n propagateToAllDataModels(criteriaModel, rootModels, {\n propagationNameSpace,\n propagateToSource: false,\n sourceId: propagationSourceId\n }, actionConf);\n }\n }\n};\n\nexport const addToPropNamespace = (propagationNameSpace, config = {}, model) => {\n let sourceNamespace;\n const isMutableAction = config.isMutableAction;\n const criteria = config.criteria;\n const key = `${config.action}-${config.sourceId}`;\n\n if (isMutableAction) {\n sourceNamespace = propagationNameSpace.mutableActions;\n } else {\n sourceNamespace = propagationNameSpace.immutableActions;\n }\n\n if (criteria === null) {\n delete sourceNamespace[key];\n } else {\n sourceNamespace[key] = {\n model,\n config\n };\n }\n\n return this;\n};\n","import { FilteringMode } from './enums';\nimport { getUniqueId } from './utils';\nimport { persistDerivation, updateFields, cloneWithSelect, cloneWithProject, updateData } from './helper';\nimport { crossProduct, difference, naturalJoinFilter, union } from './operator';\nimport { DM_DERIVATIVES } from './constants';\n\n/**\n * Relation provides the definitions of basic operators of relational algebra like *selection*, *projection*, *union*,\n * *difference* etc.\n *\n * It is extended by {@link DataModel} to inherit the functionalities of relational algebra concept.\n *\n * @class\n * @public\n * @module Relation\n * @namespace DataModel\n */\nclass Relation {\n\n /**\n * Creates a new Relation instance by providing underlying data and schema.\n *\n * @private\n *\n * @param {Object | string | Relation} data - The input tabular data in dsv or json format or\n * an existing Relation instance object.\n * @param {Array} schema - An array of data schema.\n * @param {Object} [options] - The optional options.\n */\n constructor (...params) {\n let source;\n\n this._parent = null;\n this._derivation = [];\n this._children = [];\n\n if (params.length === 1 && ((source = params[0]) instanceof Relation)) {\n // parent datamodel was passed as part of source\n this._colIdentifier = source._colIdentifier;\n this._rowDiffset = source._rowDiffset;\n this._dataFormat = source._dataFormat;\n this._parent = source;\n this._partialFieldspace = this._parent._partialFieldspace;\n this._fieldStoreName = getUniqueId();\n this.__calculateFieldspace().calculateFieldsConfig();\n } else {\n updateData(this, ...params);\n this._fieldStoreName = this._partialFieldspace.name;\n this.__calculateFieldspace().calculateFieldsConfig();\n this._propagationNameSpace = {\n mutableActions: {},\n immutableActions: {}\n };\n }\n }\n\n /**\n * Retrieves the {@link Schema | schema} details for every {@link Field | field} as an array.\n *\n * @public\n *\n * @return {Array.} Array of fields schema.\n * ```\n * [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', numberFormat: (val) => `${val} miles / gallon` },\n * { name: 'Cylinder', type: 'dimension' },\n * { name: 'Displacement', type: 'measure', defAggFn: 'max' },\n * { name: 'HorsePower', type: 'measure', defAggFn: 'max' },\n * { name: 'Weight_in_lbs', type: 'measure', defAggFn: 'avg', },\n * { name: 'Acceleration', type: 'measure', defAggFn: 'avg' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin' }\n * ]\n * ```\n */\n getSchema () {\n return this.getFieldspace().fields.map(d => d.schema());\n }\n\n /**\n * Returns the name of the {@link DataModel} instance. If no name was specified during {@link DataModel}\n * initialization, then it returns a auto-generated name.\n *\n * @public\n *\n * @return {string} Name of the DataModel instance.\n */\n getName() {\n return this._fieldStoreName;\n }\n\n getFieldspace () {\n return this._fieldspace;\n }\n\n __calculateFieldspace () {\n this._fieldspace = updateFields([this._rowDiffset, this._colIdentifier],\n this.getPartialFieldspace(), this._fieldStoreName);\n return this;\n }\n\n getPartialFieldspace () {\n return this._partialFieldspace;\n }\n\n /**\n * Performs {@link link_of_cross_product | cross-product} between two {@link DataModel} instances and returns a\n * new {@link DataModel} instance containing the results. This operation is also called theta join.\n *\n * Cross product takes two set and create one set where each value of one set is paired with each value of another\n * set.\n *\n * This method takes an optional predicate which filters the generated result rows. If the predicate returns true\n * the combined row is included in the resulatant table.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.join(originDM)));\n *\n * console.log(carsDM.join(originDM,\n * obj => obj.[originDM.getName()].Origin === obj.[carsDM.getName()].Origin));\n *\n * @text\n * This is chained version of `join` operator. `join` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel to be joined with the current instance DataModel.\n * @param {SelectionPredicate} filterFn - The predicate function that will filter the result of the crossProduct.\n *\n * @return {DataModel} New DataModel instance created after joining.\n */\n join (joinWith, filterFn) {\n return crossProduct(this, joinWith, filterFn);\n }\n\n /**\n * {@link natural_join | Natural join} is a special kind of cross-product join where filtering of rows are performed\n * internally by resolving common fields are from both table and the rows with common value are included.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.naturalJoin(originDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel with which the current instance of DataModel on which the method is\n * called will be joined.\n * @return {DataModel} New DataModel instance created after joining.\n */\n naturalJoin (joinWith) {\n return crossProduct(this, joinWith, naturalJoinFilter(this, joinWith), true);\n }\n\n /**\n * {@link link_to_union | Union} operation can be termed as vertical stacking of all rows from both the DataModel\n * instances, provided that both of the {@link DataModel} instances should have same column names.\n *\n * @example\n * console.log(EuropeanMakerDM.union(USAMakerDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} unionWith - DataModel instance for which union has to be applied with the instance on which\n * the method is called\n *\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n union (unionWith) {\n return union(this, unionWith);\n }\n\n /**\n * {@link link_to_difference | Difference } operation only include rows which are present in the datamodel on which\n * it was called but not on the one passed as argument.\n *\n * @example\n * console.log(highPowerDM.difference(highExpensiveDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} differenceWith - DataModel instance for which difference has to be applied with the instance\n * on which the method is called\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n difference (differenceWith) {\n return difference(this, differenceWith);\n }\n\n /**\n * {@link link_to_selection | Selection} is a row filtering operation. It expects a predicate and an optional mode\n * which control which all rows should be included in the resultant DataModel instance.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection operation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resultant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * // with selection mode NORMAL:\n * const normDt = dt.select(fields => fields.Origin.value === \"USA\")\n * console.log(normDt));\n *\n * // with selection mode INVERSE:\n * const inverDt = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt);\n *\n * // with selection mode ALL:\n * const dtArr = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.ALL })\n * // print the selected parts\n * console.log(dtArr[0]);\n * // print the inverted parts\n * console.log(dtArr[1]);\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Function} selectFn - The predicate function which is called for each row with the current row.\n * ```\n * function (row, i, cloneProvider, store) { ... }\n * ```\n * @param {Object} config - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance.\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection.\n * @return {DataModel} Returns the new DataModel instance(s) after operation.\n */\n select (selectFn, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n\n const cloneConfig = { saveChild: config.saveChild };\n let oDm;\n\n if (config.mode === FilteringMode.ALL) {\n const selectDm = cloneWithSelect(\n this,\n selectFn,\n { mode: FilteringMode.NORMAL },\n cloneConfig\n );\n const rejectDm = cloneWithSelect(\n this,\n selectFn,\n { mode: FilteringMode.INVERSE },\n cloneConfig\n );\n oDm = [selectDm, rejectDm];\n } else {\n oDm = cloneWithSelect(\n this,\n selectFn,\n config,\n cloneConfig\n );\n }\n\n return oDm;\n }\n\n /**\n * Retrieves a boolean value if the current {@link DataModel} instance has data.\n *\n * @example\n * const schema = [\n * { name: 'CarName', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n * const data = [];\n *\n * const dt = new DataModel(data, schema);\n * console.log(dt.isEmpty());\n *\n * @public\n *\n * @return {Boolean} True if the datamodel has no data, otherwise false.\n */\n isEmpty () {\n return !this._rowDiffset.length || !this._colIdentifier.length;\n }\n\n /**\n * Creates a clone from the current DataModel instance with child parent relationship.\n *\n * @private\n * @param {boolean} [saveChild=true] - Whether the cloned instance would be recorded in the parent instance.\n * @return {DataModel} - Returns the newly cloned DataModel instance.\n */\n clone (saveChild = true, linkParent = true) {\n let retDataModel;\n if (linkParent === false) {\n const dataObj = this.getData({\n getAllFields: true\n });\n const data = dataObj.data;\n const schema = dataObj.schema;\n const jsonData = data.map((row) => {\n const rowObj = {};\n schema.forEach((field, i) => {\n rowObj[field.name] = row[i];\n });\n return rowObj;\n });\n retDataModel = new this.constructor(jsonData, schema);\n }\n else {\n retDataModel = new this.constructor(this);\n }\n\n if (saveChild) {\n this._children.push(retDataModel);\n }\n return retDataModel;\n }\n\n /**\n * {@link Projection} is filter column (field) operation. It expects list of fields' name and either include those\n * or exclude those based on {@link FilteringMode} on the resultant variable.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * const dm = new DataModel(data, schema);\n *\n * // with projection mode NORMAL:\n * const normDt = dt.project([\"Name\", \"HorsePower\"]);\n * console.log(normDt.getData());\n *\n * // with projection mode INVERSE:\n * const inverDt = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt.getData());\n *\n * // with selection mode ALL:\n * const dtArr = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.ALL })\n * // print the normal parts\n * console.log(dtArr[0].getData());\n * // print the inverted parts\n * console.log(dtArr[1].getData());\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {DataModel} Returns the new DataModel instance after operation.\n */\n project (projField, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const { mode } = config;\n\n let normalizedProjField = projField.reduce((acc, field) => {\n if (field.constructor.name === 'RegExp') {\n acc.push(...allFields.filter(fieldName => fieldName.search(field) !== -1));\n } else if (field in fieldConfig) {\n acc.push(field);\n }\n return acc;\n }, []);\n\n normalizedProjField = Array.from(new Set(normalizedProjField)).map(field => field.trim());\n let dataModel;\n\n if (mode === FilteringMode.ALL) {\n let projectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.NORMAL,\n saveChild: config.saveChild\n }, allFields);\n let rejectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.INVERSE,\n saveChild: config.saveChild\n }, allFields);\n dataModel = [projectionClone, rejectionClone];\n } else {\n let projectionClone = cloneWithProject(this, normalizedProjField, config, allFields);\n dataModel = projectionClone;\n }\n\n return dataModel;\n }\n\n getFieldsConfig () {\n return this._fieldConfig;\n }\n\n calculateFieldsConfig () {\n this._fieldConfig = this._fieldspace.fields.reduce((acc, fieldDef, i) => {\n acc[fieldDef.name()] = {\n index: i,\n def: { name: fieldDef.name(), type: fieldDef.type(), subtype: fieldDef.subtype() }\n };\n return acc;\n }, {});\n return this;\n }\n\n\n /**\n * Frees up the resources associated with the current DataModel instance and breaks all the links instance has in\n * the DAG.\n *\n * @public\n */\n dispose () {\n this._parent.removeChild(this);\n this._parent = null;\n }\n\n /**\n * Removes the specified child {@link DataModel} from the child list of the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\")\n * dt.removeChild(dt2);\n *\n * @private\n *\n * @param {DataModel} child - Delegates the parent to remove this child.\n */\n removeChild (child) {\n let idx = this._children.findIndex(sibling => sibling === child);\n idx !== -1 ? this._children.splice(idx, 1) : true;\n }\n\n /**\n * Adds the specified {@link DataModel} as a parent for the current {@link DataModel} instance.\n *\n * The optional criteriaQueue is an array containing the history of transaction performed on parent\n * {@link DataModel} to get the current one.\n *\n * @param {DataModel} parent - The datamodel instance which will act as parent.\n * @param {Array} criteriaQueue - Queue contains in-between operation meta-data.\n */\n addParent (parent, criteriaQueue = []) {\n persistDerivation(this, DM_DERIVATIVES.COMPOSE, null, criteriaQueue);\n this._parent = parent;\n parent._children.push(this);\n }\n\n /**\n * Returns the parent {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const parentDm = dt2.getParent();\n *\n * @return {DataModel} Returns the parent DataModel instance.\n */\n getParent () {\n return this._parent;\n }\n\n /**\n * Returns the immediate child {@link DataModel} instances.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const childDm1 = dt.select(fields => fields.Origin.value === \"USA\");\n * const childDm2 = dt.select(fields => fields.Origin.value === \"Japan\");\n * const childDm3 = dt.groupBy([\"Origin\"]);\n *\n * @return {DataModel[]} Returns the immediate child DataModel instances.\n */\n getChildren() {\n return this._children;\n }\n\n /**\n * Returns the in-between operation meta data while creating the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const derivations = dt3.getDerivations();\n *\n * @return {Any[]} Returns the derivation meta data.\n */\n getDerivations() {\n return this._derivation;\n }\n}\n\nexport default Relation;\n","/* eslint-disable default-case */\n\nimport { FieldType, DimensionSubtype, DataFormat } from './enums';\nimport {\n persistDerivation,\n getRootGroupByModel,\n propagateToAllDataModels,\n getRootDataModel,\n propagateImmutableActions,\n addToPropNamespace,\n sanitizeUnitSchema\n} from './helper';\nimport { DM_DERIVATIVES, PROPAGATION } from './constants';\nimport {\n dataBuilder,\n rowDiffsetIterator,\n groupBy\n} from './operator';\nimport { createBinnedFieldData } from './operator/bucket-creator';\nimport Relation from './relation';\nimport reducerStore from './utils/reducer-store';\nimport { createFields } from './field-creator';\nimport InvalidAwareTypes from './invalid-aware-types';\n\n/**\n * DataModel is an in-browser representation of tabular data. It supports\n * {@link https://en.wikipedia.org/wiki/Relational_algebra | relational algebra} operators as well as generic data\n * processing opearators.\n * DataModel extends {@link Relation} class which defines all the relational algebra opreators. DataModel gives\n * definition of generic data processing operators which are not relational algebra complient.\n *\n * @public\n * @class\n * @extends Relation\n * @memberof Datamodel\n */\nclass DataModel extends Relation {\n /**\n * Creates a new DataModel instance by providing data and schema. Data could be in the form of\n * - Flat JSON\n * - DSV String\n * - 2D Array\n *\n * By default DataModel finds suitable adapter to serialize the data. DataModel also expects a\n * {@link Schema | schema} for identifying the variables present in data.\n *\n * @constructor\n * @example\n * const data = loadData('cars.csv');\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', unit : 'cm', scale: '1000', numberformat: val => `${val}G`},\n * { name: 'Cylinders', type: 'dimension' },\n * { name: 'Displacement', type: 'measure' },\n * { name: 'Horsepower', type: 'measure' },\n * { name: 'Weight_in_lbs', type: 'measure' },\n * { name: 'Acceleration', type: 'measure' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin', type: 'dimension' }\n * ];\n * const dm = new DataModel(data, schema, { name: 'Cars' });\n * table(dm);\n *\n * @public\n *\n * @param {Array. | string | Array.} data Input data in any of the mentioned formats\n * @param {Array.} schema Defination of the variables. Order of the variables in data and order of the\n * variables in schema has to be same.\n * @param {object} [options] Optional arguments to specify more settings regarding the creation part\n * @param {string} [options.name] Name of the datamodel instance. If no name is given an auto generated name is\n * assigned to the instance.\n * @param {string} [options.fieldSeparator=','] specify field separator type if the data is of type dsv string.\n */\n constructor (...args) {\n super(...args);\n\n this._onPropagation = [];\n this._sortingDetails = [];\n }\n\n /**\n * Reducers are simple functions which reduces an array of numbers to a representative number of the set.\n * Like an array of numbers `[10, 20, 5, 15]` can be reduced to `12.5` if average / mean reducer function is\n * applied. All the measure fields in datamodel (variables in data) needs a reducer to handle aggregation.\n *\n * @public\n *\n * @return {ReducerStore} Singleton instance of {@link ReducerStore}.\n */\n static get Reducers () {\n return reducerStore;\n }\n\n /**\n * Configure null, undefined, invalid values in the source data\n *\n * @public\n *\n * @param {Object} [config] - Configuration to control how null, undefined and non-parsable values are\n * represented in DataModel.\n * @param {string} [config.undefined] - Define how an undefined value will be represented.\n * @param {string} [config.null] - Define how a null value will be represented.\n * @param {string} [config.invalid] - Define how a non-parsable value will be represented.\n */\n static configureInvalidAwareTypes (config) {\n return InvalidAwareTypes.invalidAwareVals(config);\n }\n\n /**\n * Retrieve the data attached to an instance in JSON format.\n *\n * @example\n * // DataModel instance is already prepared and assigned to dm variable\n * const data = dm.getData({\n * order: 'column',\n * formatter: {\n * origin: (val) => val === 'European Union' ? 'EU' : val;\n * }\n * });\n * console.log(data);\n *\n * @public\n *\n * @param {Object} [options] Options to control how the raw data is to be returned.\n * @param {string} [options.order='row'] Defines if data is retieved in row order or column order. Possible values\n * are `'rows'` and `'columns'`\n * @param {Function} [options.formatter=null] Formats the output data. This expects an object, where the keys are\n * the name of the variable needs to be formatted. The formatter function is called for each row passing the\n * value of the cell for a particular row as arguments. The formatter is a function in the form of\n * `function (value, rowId, schema) => { ... }`\n * Know more about {@link Fomatter}.\n *\n * @return {Array} Returns a multidimensional array of the data with schema. The return format looks like\n * ```\n * {\n * data,\n * schema\n * }\n * ```\n */\n getData (options) {\n const defOptions = {\n order: 'row',\n formatter: null,\n withUid: false,\n getAllFields: false,\n sort: []\n };\n options = Object.assign({}, defOptions, options);\n const fields = this.getPartialFieldspace().fields;\n\n const dataGenerated = dataBuilder.call(\n this,\n this.getPartialFieldspace().fields,\n this._rowDiffset,\n options.getAllFields ? fields.map(d => d.name()).join() : this._colIdentifier,\n options.sort,\n {\n columnWise: options.order === 'column',\n addUid: !!options.withUid\n }\n );\n\n if (!options.formatter) {\n return dataGenerated;\n }\n\n const { formatter } = options;\n const { data, schema, uids } = dataGenerated;\n const fieldNames = schema.map((e => e.name));\n const fmtFieldNames = Object.keys(formatter);\n const fmtFieldIdx = fmtFieldNames.reduce((acc, next) => {\n const idx = fieldNames.indexOf(next);\n if (idx !== -1) {\n acc.push([idx, formatter[next]]);\n }\n return acc;\n }, []);\n\n if (options.order === 'column') {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n data[fIdx].forEach((datum, datumIdx) => {\n data[fIdx][datumIdx] = fmtFn.call(\n undefined,\n datum,\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n } else {\n data.forEach((datum, datumIdx) => {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n datum[fIdx] = fmtFn.call(\n undefined,\n datum[fIdx],\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n }\n\n return dataGenerated;\n }\n\n /**\n * Groups the data using particular dimensions and by reducing measures. It expects a list of dimensions using which\n * it projects the datamodel and perform aggregations to reduce the duplicate tuples. Refer this\n * {@link link_to_one_example_with_group_by | document} to know the intuition behind groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupedDM = dm.groupBy(['Year'], { horsepower: 'max' } );\n * console.log(groupedDm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {DataModel} Returns a new DataModel instance after performing the groupby.\n */\n groupBy (fieldsArr, reducers = {}, config = { saveChild: true }) {\n const groupByString = `${fieldsArr.join()}`;\n let params = [this, fieldsArr, reducers];\n const newDataModel = groupBy(...params);\n\n persistDerivation(\n newDataModel,\n DM_DERIVATIVES.GROUPBY,\n { fieldsArr, groupByString, defaultReducer: reducerStore.defaultReducer() },\n reducers\n );\n\n if (config.saveChild) {\n this._children.push(newDataModel);\n }\n newDataModel._parent = this;\n\n return newDataModel;\n }\n\n /**\n * Performs sorting operation on the current {@link DataModel} instance according to the specified sorting details.\n * Like every other operator it doesn't mutate the current DataModel instance on which it was called, instead\n * returns a new DataModel instance containing the sorted data.\n *\n * DataModel support multi level sorting by listing the variables using which sorting needs to be performed and\n * the type of sorting `ASC` or `DESC`.\n *\n * In the following example, data is sorted by `Origin` field in `DESC` order in first level followed by another\n * level of sorting by `Acceleration` in `ASC` order.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * let sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\"] // Default value is ASC\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * // Sort with a custom sorting function\n * sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\", (a, b) => a - b] // Custom sorting function\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @text\n * DataModel also provides another sorting mechanism out of the box where sort is applied to a variable using\n * another variable which determines the order.\n * Like the above DataModel contains three fields `Origin`, `Name` and `Acceleration`. Now, the data in this\n * model can be sorted by `Origin` field according to the average value of all `Acceleration` for a\n * particular `Origin` value.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * const sortedDm = dm.sort([\n * ['Origin', ['Acceleration', (a, b) => avg(...a.Acceleration) - avg(...b.Acceleration)]]\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @public\n *\n * @param {Array.} sortingDetails - Sorting details based on which the sorting will be performed.\n * @return {DataModel} Returns a new instance of DataModel with sorted data.\n */\n sort (sortingDetails) {\n const rawData = this.getData({\n order: 'row',\n sort: sortingDetails\n });\n const header = rawData.schema.map(field => field.name);\n const dataInCSVArr = [header].concat(rawData.data);\n\n const sortedDm = new this.constructor(dataInCSVArr, rawData.schema, { dataFormat: 'DSVArr' });\n sortedDm._sortingDetails = sortingDetails;\n return sortedDm;\n }\n\n /**\n * Performs the serialization operation on the current {@link DataModel} instance according to the specified data\n * type. When an {@link DataModel} instance is created, it de-serializes the input data into its internal format,\n * and during its serialization process, it converts its internal data format to the specified data type and returns\n * that data regardless what type of data is used during the {@link DataModel} initialization.\n *\n * @example\n * // here dm is the pre-declared DataModel instance.\n * const csvData = dm.serialize(DataModel.DataFormat.DSV_STR, { fieldSeparator: \",\" });\n * console.log(csvData); // The csv formatted data.\n *\n * const jsonData = dm.serialize(DataModel.DataFormat.FLAT_JSON);\n * console.log(jsonData); // The json data.\n *\n * @public\n *\n * @param {string} type - The data type name for serialization.\n * @param {Object} options - The optional option object.\n * @param {string} options.fieldSeparator - The field separator character for DSV data type.\n * @return {Array|string} Returns the serialized data.\n */\n serialize (type, options) {\n type = type || this._dataFormat;\n options = Object.assign({}, { fieldSeparator: ',' }, options);\n\n const fields = this.getFieldspace().fields;\n const colData = fields.map(f => f.formattedData());\n const rowsCount = colData[0].length;\n let serializedData;\n let rowIdx;\n let colIdx;\n\n if (type === DataFormat.FLAT_JSON) {\n serializedData = [];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = {};\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row[fields[colIdx].name()] = colData[colIdx][rowIdx];\n }\n serializedData.push(row);\n }\n } else if (type === DataFormat.DSV_STR) {\n serializedData = [fields.map(f => f.name()).join(options.fieldSeparator)];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row.join(options.fieldSeparator));\n }\n serializedData = serializedData.join('\\n');\n } else if (type === DataFormat.DSV_ARR) {\n serializedData = [fields.map(f => f.name())];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row);\n }\n } else {\n throw new Error(`Data type ${type} is not supported`);\n }\n\n return serializedData;\n }\n\n addField (field) {\n const fieldName = field.name();\n this._colIdentifier += `,${fieldName}`;\n const partialFieldspace = this._partialFieldspace;\n\n if (!partialFieldspace.fieldsObj()[field.name()]) {\n partialFieldspace.fields.push(field);\n } else {\n const fieldIndex = partialFieldspace.fields.findIndex(fieldinst => fieldinst.name() === fieldName);\n fieldIndex >= 0 && (partialFieldspace.fields[fieldIndex] = field);\n }\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n\n this.__calculateFieldspace().calculateFieldsConfig();\n return this;\n }\n\n /**\n * Creates a new variable calculated from existing variables. This method expects the definition of the newly created\n * variable and a function which resolves the value of the new variable from existing variables.\n *\n * Can create a new measure based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const newDm = dataModel.calculateVariable({\n * name: 'powerToWeight',\n * type: 'measure'\n * }, ['horsepower', 'weight_in_lbs', (hp, weight) => hp / weight ]);\n *\n *\n * Can create a new dimension based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const child = dataModel.calculateVariable(\n * {\n * name: 'Efficiency',\n * type: 'dimension'\n * }, ['horsepower', (hp) => {\n * if (hp < 80) { return 'low'; },\n * else if (hp < 120) { return 'moderate'; }\n * else { return 'high' }\n * }]);\n *\n * @public\n *\n * @param {Object} schema - The schema of newly defined variable.\n * @param {Array.} dependency - An array containing the dependency variable names and a resolver\n * function as the last element.\n * @param {Object} config - An optional config object.\n * @param {boolean} [config.saveChild] - Whether the newly created DataModel will be a child.\n * @param {boolean} [config.replaceVar] - Whether the newly created variable will replace the existing variable.\n * @return {DataModel} Returns an instance of DataModel with the new field.\n */\n calculateVariable (schema, dependency, config) {\n schema = sanitizeUnitSchema(schema);\n config = Object.assign({}, { saveChild: true, replaceVar: false }, config);\n\n const fieldsConfig = this.getFieldsConfig();\n const depVars = dependency.slice(0, dependency.length - 1);\n const retrieveFn = dependency[dependency.length - 1];\n\n if (fieldsConfig[schema.name] && !config.replaceVar) {\n throw new Error(`${schema.name} field already exists in datamodel`);\n }\n\n const depFieldIndices = depVars.map((field) => {\n const fieldSpec = fieldsConfig[field];\n if (!fieldSpec) {\n // @todo dont throw error here, use warning in production mode\n throw new Error(`${field} is not a valid column name.`);\n }\n return fieldSpec.index;\n });\n\n const clone = this.clone();\n\n const fs = clone.getFieldspace().fields;\n const suppliedFields = depFieldIndices.map(idx => fs[idx]);\n\n let cachedStore = {};\n let cloneProvider = () => this.detachedRoot();\n\n const computedValues = [];\n rowDiffsetIterator(clone._rowDiffset, (i) => {\n const fieldsData = suppliedFields.map(field => field.partialField.data[i]);\n computedValues[i] = retrieveFn(...fieldsData, i, cloneProvider, cachedStore);\n });\n const [field] = createFields([computedValues], [schema], [schema.name]);\n clone.addField(field);\n\n persistDerivation(clone, DM_DERIVATIVES.CAL_VAR, { config: schema, fields: depVars }, retrieveFn);\n\n return clone;\n }\n\n /**\n * Propagates changes across all the connected DataModel instances.\n *\n * @param {Array} identifiers - A list of identifiers that were interacted with.\n * @param {Object} payload - The interaction specific details.\n *\n * @return {DataModel} DataModel instance.\n */\n propagate (identifiers, config = {}, addToNameSpace, propConfig = {}) {\n const isMutableAction = config.isMutableAction;\n const propagationSourceId = config.sourceId;\n const payload = config.payload;\n const rootModel = getRootDataModel(this);\n const propagationNameSpace = rootModel._propagationNameSpace;\n const rootGroupByModel = getRootGroupByModel(this);\n const rootModels = {\n groupByModel: rootGroupByModel,\n model: rootModel\n };\n\n addToNameSpace && addToPropNamespace(propagationNameSpace, config, this);\n propagateToAllDataModels(identifiers, rootModels, { propagationNameSpace, sourceId: propagationSourceId },\n Object.assign({\n payload\n }, config));\n\n if (isMutableAction) {\n propagateImmutableActions(propagationNameSpace, rootModels, {\n config,\n propConfig\n }, this);\n }\n\n return this;\n }\n\n /**\n * Associates a callback with an event name.\n *\n * @param {string} eventName - The name of the event.\n * @param {Function} callback - The callback to invoke.\n * @return {DataModel} Returns this current DataModel instance itself.\n */\n on (eventName, callback) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation.push(callback);\n break;\n }\n return this;\n }\n\n /**\n * Unsubscribes the callbacks for the provided event name.\n *\n * @param {string} eventName - The name of the event to unsubscribe.\n * @return {DataModel} Returns the current DataModel instance itself.\n */\n unsubscribe (eventName) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation = [];\n break;\n\n }\n return this;\n }\n\n /**\n * This method is used to invoke the method associated with propagation.\n *\n * @param {Object} payload The interaction payload.\n * @param {DataModel} identifiers The propagated DataModel.\n * @memberof DataModel\n */\n handlePropagation (propModel, payload) {\n let propListeners = this._onPropagation;\n propListeners.forEach(fn => fn.call(this, propModel, payload));\n }\n\n /**\n * Performs the binning operation on a measure field based on the binning configuration. Binning means discretizing\n * values of a measure. Binning configuration contains an array; subsequent values from the array marks the boundary\n * of buckets in [inclusive, exclusive) range format. This operation does not mutate the subject measure field,\n * instead, it creates a new field (variable) of type dimension and subtype binned.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non-uniform buckets,\n * - providing bins count,\n * - providing each bin size,\n *\n * When custom `buckets` are provided as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', buckets: [30, 80, 100, 110] }\n * const binnedDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binsCount` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', binsCount: 5, start: 0, end: 100 }\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binSize` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHorsepower', binSize: 20, start: 5}\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @public\n *\n * @param {string} measureFieldName - The name of the target measure field.\n * @param {Object} config - The config object.\n * @param {string} [config.name] - The name of the new field which will be created.\n * @param {string} [config.buckets] - An array containing the bucket ranges.\n * @param {string} [config.binSize] - The size of each bin. It is ignored when buckets are given.\n * @param {string} [config.binsCount] - The total number of bins to generate. It is ignored when buckets are given.\n * @param {string} [config.start] - The start value of the bucket ranges. It is ignored when buckets are given.\n * @param {string} [config.end] - The end value of the bucket ranges. It is ignored when buckets are given.\n * @return {DataModel} Returns a new {@link DataModel} instance with the new field.\n */\n bin (measureFieldName, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n if (!fieldsConfig[measureFieldName]) {\n throw new Error(`Field ${measureFieldName} doesn't exist`);\n }\n\n const binFieldName = config.name || `${measureFieldName}_binned`;\n\n if (fieldsConfig[binFieldName]) {\n throw new Error(`Field ${binFieldName} already exists`);\n }\n\n const measureField = this.getFieldspace().fieldsObj()[measureFieldName];\n const { binnedData, bins } = createBinnedFieldData(measureField, this._rowDiffset, config);\n\n const binField = createFields([binnedData], [\n {\n name: binFieldName,\n type: FieldType.DIMENSION,\n subtype: DimensionSubtype.BINNED,\n bins\n }], [binFieldName])[0];\n\n const clone = this.clone();\n clone.addField(binField);\n\n persistDerivation(clone, DM_DERIVATIVES.BIN, { measureFieldName, config, binFieldName }, null);\n\n return clone;\n }\n\n /**\n * Creates a new {@link DataModel} instance with completely detached root from current {@link DataModel} instance,\n * the new {@link DataModel} instance has no parent-children relationship with the current one, but has same data as\n * the current one.\n * This API is useful when a completely different {@link DataModel} but with same data as the current instance is\n * needed.\n *\n * @example\n * const dm = new DataModel(data, schema);\n * const detachedDm = dm.detachedRoot();\n *\n * // has different namespace\n * console.log(dm.getPartialFieldspace().name);\n * console.log(detachedDm.getPartialFieldspace().name);\n *\n * // has same data\n * console.log(dm.getData());\n * console.log(detachedDm.getData());\n *\n * @public\n *\n * @return {DataModel} Returns a detached {@link DataModel} instance.\n */\n detachedRoot () {\n const data = this.serialize(DataFormat.FLAT_JSON);\n const schema = this.getSchema();\n\n return new DataModel(data, schema);\n }\n}\n\nexport default DataModel;\n","import { fnList } from '../operator/group-by-function';\n\nexport const { sum, avg, min, max, first, last, count, std: sd } = fnList;\n","import DataModel from './datamodel';\nimport {\n compose,\n bin,\n select,\n project,\n groupby as groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union\n} from './operator';\nimport * as Stats from './stats';\nimport * as enums from './enums';\nimport { DateTimeFormatter } from './utils';\nimport { DataFormat, FilteringMode } from './constants';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport pkg from '../package.json';\n\nDataModel.Operators = {\n compose,\n bin,\n select,\n project,\n groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union\n};\nDataModel.Stats = Stats;\nObject.assign(DataModel, enums);\nDataModel.DateTimeFormatter = DateTimeFormatter;\nDataModel.DataFormat = DataFormat;\nDataModel.FilteringMode = FilteringMode;\nDataModel.InvalidAwareTypes = InvalidAwareTypes;\nDataModel.version = pkg.version;\n\nexport default DataModel;\n","\n/**\n * DataModel's opearators are exposed as composable functional operators as well as chainable operators. Chainable\n * operators are called on the instances of {@link Datamodel} and {@link Relation} class.\n *\n * Those same operators can be used as composable operators from `DataModel.Operators` namespace.\n *\n * All these operators have similar behaviour. All these operators when called with the argument returns a function\n * which expects a DataModel instance.\n *\n * @public\n * @module Operators\n * @namespace DataModel\n */\n\n/**\n * This is functional version of selection operator. {@link link_to_selection | Selection} is a row filtering operation.\n * It takes {@link SelectionPredicate | predicate} for filtering criteria and returns a function.\n * The returned function is called with the DataModel instance on which the action needs to be performed.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection opearation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * [Warn] Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * [Error] `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @example\n * const select = DataModel.Operators.select;\n * usaCarsFn = select(fields => fields.Origin.value === 'USA');\n * usaCarsDm = usaCarsFn(dm);\n * console.log(usaCarsDm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {SelectionPredicate} selectFn - Predicate funciton which is called for each row with the current row\n * ```\n * function (row, i) { ... }\n * ```\n * @param {Object} [config] - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const select = (...args) => dm => dm.select(...args);\n\n/**\n * This is functional version of projection operator. {@link link_to_projection | Projection} is a column filtering\n * operation.It expects list of fields name and either include those or exclude those based on {@link FilteringMode} on\n * the resultant variable.It returns a function which is called with the DataModel instance on which the action needs\n * to be performed.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const project = (...args) => dm => dm.project(...args);\n\n/**\n * This is functional version of binnig operator. Binning happens on a measure field based on a binning configuration.\n * Binning in DataModel does not aggregate the number of rows present in DataModel instance after binning, it just adds\n * a new field with the binned value. Refer binning {@link example_of_binning | example} to have a intuition of what\n * binning is and the use case.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non uniform buckets\n * - providing bin count\n * - providing each bin size\n *\n * When custom buckets are provided as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const buckets = {\n * start: 30\n * stops: [80, 100, 110]\n * };\n * const config = { buckets, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(dm);\n *\n * @text\n * When `binCount` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binCount: 5, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @text\n * When `binSize` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binSize: 200, name: 'binnedHorsepower' }\n * const binnedDm = dataModel.bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {String} name Name of measure which will be used to create bin\n * @param {Object} config Config required for bin creation\n * @param {Array.} config.bucketObj.stops Defination of bucket ranges. Two subsequent number from arrays\n * are picked and a range is created. The first number from range is inclusive and the second number from range\n * is exclusive.\n * @param {Number} [config.bucketObj.startAt] Force the start of the bin from a particular number.\n * If not mentioned, the start of the bin or the lower domain of the data if stops is not mentioned, else its\n * the first value of the stop.\n * @param {Number} config.binSize Bucket size for each bin\n * @param {Number} config.binCount Number of bins which will be created\n * @param {String} config.name Name of the new binned field to be created\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const bin = (...args) => dm => dm.bin(...args);\n\n/**\n * This is functional version of `groupBy` operator.Groups the data using particular dimensions and by reducing\n * measures. It expects a list of dimensions using which it projects the datamodel and perform aggregations to reduce\n * the duplicate tuples. Refer this {@link link_to_one_example_with_group_by | document} to know the intuition behind\n * groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupBy = DataModel.Operators.groupBy;\n * const groupedFn = groupBy(['Year'], { horsepower: 'max' } );\n * groupedDM = groupByFn(dm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const groupBy = (...args) => dm => dm.groupBy(...args);\n\n/**\n * Enables composing operators to run multiple operations and save group of operataion as named opration on a DataModel.\n * The resulting DataModel will be the result of all the operation provided. The operations provided will be executed in\n * a serial manner ie. result of one operation will be the input for the next operations (like pipe operator in unix).\n *\n * Suported operations in compose are\n * - `select`\n * - `project`\n * - `groupBy`\n * - `bin`\n * - `compose`\n *\n * @example\n * const compose = DataModel.Operators.compose;\n * const select = DataModel.Operators.select;\n * const project = DataModel.Operators.project;\n *\n * let composedFn = compose(\n * select(fields => fields.netprofit.value <= 15),\n * project(['netprofit', 'netsales']));\n *\n * const dataModel = new DataModel(data1, schema1);\n *\n * let composedDm = composedFn(dataModel);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} operators: An array of operation that will be applied on the\n * datatable.\n *\n * @returns {DataModel} Instance of resultant DataModel\n */\nexport const compose = (...operations) =>\n (dm, config = { saveChild: true }) => {\n let currentDM = dm;\n let frstChild;\n const derivations = [];\n const saveChild = config.saveChild;\n\n operations.forEach((operation) => {\n currentDM = operation(currentDM);\n derivations.push(...currentDM._derivation);\n if (!frstChild) {\n frstChild = currentDM;\n }\n });\n\n saveChild && currentDM.addParent(dm, derivations);\n if (derivations.length > 1) {\n frstChild.dispose();\n }\n\n return currentDM;\n };\n","/**\n * Wrapper on calculateVariable() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const calculateVariable = (...args) => dm => dm.calculateVariable(...args);\n\n/**\n * Wrapper on sort() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const sort = (...args) => dm => dm.sort(...args);\n","import { crossProduct } from './cross-product';\nimport { naturalJoinFilter } from './natural-join-filter-function';\n\nexport function naturalJoin (dataModel1, dataModel2) {\n return crossProduct(dataModel1, dataModel2, naturalJoinFilter(dataModel1, dataModel2), true);\n}\n"],"sourceRoot":""} \ No newline at end of file diff --git a/example/js/datamodel.js b/example/js/datamodel.js index 29f4631..1ad9c86 100644 --- a/example/js/datamodel.js +++ b/example/js/datamodel.js @@ -1,2 +1,2 @@ -!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define("DataModel",[],t):"object"==typeof exports?exports.DataModel=t():e.DataModel=t()}(window,function(){return function(e){var t={};function n(r){if(t[r])return t[r].exports;var a=t[r]={i:r,l:!1,exports:{}};return e[r].call(a.exports,a,a.exports,n),a.l=!0,a.exports}return n.m=e,n.c=t,n.d=function(e,t,r){n.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:r})},n.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},n.t=function(e,t){if(1&t&&(e=n(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var r=Object.create(null);if(n.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var a in e)n.d(r,a,function(t){return e[t]}.bind(null,a));return r},n.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return n.d(t,"a",t),t},n.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},n.p="",n(n.s=1)}([function(e){e.exports={name:"datamodel",description:"Relational algebra compliant in-memory tabular data store",homepage:"https://github.com/chartshq/datamodel",version:"2.0.2",license:"MIT",main:"dist/datamodel.js",author:"Charts.com ",keywords:["datamodel","data","relational","algebra","model","muze","fusioncharts","table","tabular","operation"],repository:{type:"git",url:"https://github.com/chartshq/datamodel.git"},contributors:[{name:"Akash Goswami",email:"akash@charts.com"},{name:"Subhash Haldar",email:"subhash@charts.com"},{name:"Rousan Ali",email:"rousan@charts.com",url:"https://rousan.io"},{name:"Ujjal Kumar Dutta",email:"ujjal@charts.com"}],dependencies:{"d3-dsv":"^1.0.8"},devDependencies:{"babel-cli":"6.26.0","babel-core":"^6.26.3","babel-eslint":"6.1.2","babel-loader":"^7.1.4","babel-plugin-transform-runtime":"^6.23.0","babel-preset-env":"^1.7.0","babel-preset-es2015":"^6.24.1","babel-preset-flow":"^6.23.0",chai:"3.5.0","cross-env":"^5.0.5",eslint:"3.19.0","eslint-config-airbnb":"15.1.0","eslint-plugin-import":"2.7.0","eslint-plugin-jsx-a11y":"5.1.1","eslint-plugin-react":"7.3.0","istanbul-instrumenter-loader":"^3.0.0",jsdoc:"3.5.5",json2yaml:"^1.1.0",karma:"1.7.1","karma-chai":"0.1.0","karma-chrome-launcher":"2.1.1","karma-coverage-istanbul-reporter":"^1.3.0","karma-mocha":"1.3.0","karma-spec-reporter":"0.0.31","karma-webpack":"2.0.3",marked:"^0.5.0",mocha:"3.4.2","mocha-webpack":"0.7.0","transform-runtime":"0.0.0",webpack:"^4.12.0","webpack-cli":"^3.0.7","webpack-dev-server":"^3.1.4"},scripts:{test:"npm run lint && npm run ut",ut:"karma start karma.conf.js",utd:"karma start --single-run false --browsers Chrome karma.conf.js ",build:"webpack --mode production",start:"webpack-dev-server --config webpack.config.dev.js --mode development --open",lint:"eslint ./src","lint-errors":"eslint --quiet ./src",docs:"rm -rf yaml && mkdir yaml && jsdoc -c jsdoc.conf.json"}}},function(e,t,n){var r=n(2);e.exports=r.default?r.default:r},function(e,t,n){"use strict";n.r(t);var r={};n.r(r),n.d(r,"DataFormat",function(){return o}),n.d(r,"DimensionSubtype",function(){return u}),n.d(r,"MeasureSubtype",function(){return c}),n.d(r,"FieldType",function(){return f}),n.d(r,"FilteringMode",function(){return l});var a={};n.r(a),n.d(a,"DSVArr",function(){return Le}),n.d(a,"DSVStr",function(){return ze}),n.d(a,"FlatJSON",function(){return Xe}),n.d(a,"Auto",function(){return $e});var i={};n.r(i),n.d(i,"sum",function(){return yt}),n.d(i,"avg",function(){return gt}),n.d(i,"min",function(){return bt}),n.d(i,"max",function(){return wt}),n.d(i,"first",function(){return Ot}),n.d(i,"last",function(){return _t}),n.d(i,"count",function(){return Et}),n.d(i,"sd",function(){return At});var o={FLAT_JSON:"FlatJSON",DSV_STR:"DSVStr",DSV_ARR:"DSVArr",AUTO:"Auto"},u={CATEGORICAL:"categorical",TEMPORAL:"temporal",GEO:"geo",BINNED:"binned"},c={CONTINUOUS:"continuous"},f={MEASURE:"measure",DIMENSION:"dimension"},l={NORMAL:"normal",INVERSE:"inverse",ALL:"all"};function s(e){return e instanceof Date?e:new Date(e)}function p(e){return e<10?"0"+e:e}function d(e){this.format=e,this.dtParams=void 0,this.nativeDate=void 0}RegExp.escape=function(e){return e.replace(/[-[\]{}()*+?.,\\^$|#\s]/g,"\\$&")},d.TOKEN_PREFIX="%",d.DATETIME_PARAM_SEQUENCE={YEAR:0,MONTH:1,DAY:2,HOUR:3,MINUTE:4,SECOND:5,MILLISECOND:6},d.defaultNumberParser=function(e){return function(t){var n;return isFinite(n=parseInt(t,10))?n:e}},d.defaultRangeParser=function(e,t){return function(n){var r,a=void 0;if(!n)return t;var i=n.toLowerCase();for(a=0,r=e.length;a=0;)o=e[i+1],-1!==r.indexOf(o)&&a.push({index:i,token:o});return a},d.formatAs=function(e,t){var n,r=s(e),a=d.findTokens(t),i=d.getTokenDefinitions(),o=String(t),u=d.TOKEN_PREFIX,c=void 0,f=void 0,l=void 0;for(l=0,n=a.length;l=0;p--)(f=i[p].index)+1!==s.length-1?(void 0===u&&(u=s.length),l=s.substring(f+2,u),s=s.substring(0,f+2)+RegExp.escape(l)+s.substring(u,s.length),u=f):u=f;for(p=0;p0&&e.split(",").forEach(function(e){var n=e.split("-"),r=+n[0],a=+(n[1]||n[0]);if(a>=r)for(var i=r;i<=a;i+=1)t(i)})}var T=function(){return function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,a=!1,i=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){a=!0,i=e}finally{try{!r&&u.return&&u.return()}finally{if(a)throw i}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")}}();function P(e,t,n){var r=n.buckets,a=n.binCount,i=n.binSize,o=n.start,u=[],c=[],f=e.domain(),l=T(f,2),s=l[0],p=l[1],d=p,h=[],v=void 0,m=void 0,y=void 0,g=void 0;if(D(t,function(t){u.push({data:e.partialField.data[t],index:t})}),!r){var b=((p+=1)-s)%(i=i||(p-s)/a);for(a||0===b||(p=p+i-b),v=s+i;v<=p;)h.push(v),v+=i;r={start:o=o||s,stops:h}}m=0===r.start?0:r.start||s,r.stops.forEach(function(e){u.filter(function(t){return t.data>=m&&t.data=r.stops[r.stops.length-1]}).forEach(function(e){c[e.index]=r.stops[r.stops.length-1]+"-"+d}),r.stops.unshift(r.start),g=new Set(r.stops),sr.stops[r.stops.length-1]&&g.add(d),g=[].concat(function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t3&&void 0!==arguments[3]&&arguments[3],a=arguments.length>4&&void 0!==arguments[4]?arguments[4]:H.CROSS,i=[],o=[],u=n||Y,c=e.getFieldspace(),f=t.getFieldspace(),l=c.name,s=f.name,p=c.name+"."+f.name,d=x(c,f);if(l===s)throw new Error("DataModels must have different alias names");return c.fields.forEach(function(e){var t=O({},e.schema());-1===d.indexOf(t.name)||r||(t.name=c.name+"."+t.name),i.push(t)}),f.fields.forEach(function(e){var t=O({},e.schema());-1!==d.indexOf(t.name)?r||(t.name=f.name+"."+t.name,i.push(t)):i.push(t)}),D(e._rowDiffset,function(e){var n=!1,p=void 0;D(t._rowDiffset,function(t){var h=[],v={};v[l]={},v[s]={},c.fields.forEach(function(t){h.push(t.partialField.data[e]),v[l][t.name()]=t.partialField.data[e]}),f.fields.forEach(function(e){-1!==d.indexOf(e.schema().name)&&r||h.push(e.partialField.data[t]),v[s][e.name()]=e.partialField.data[t]});var m=tt(v[l]),y=tt(v[s]);if(u(m,y)){var g={};h.forEach(function(e,t){g[i[t].name]=e}),n&&H.CROSS!==a?o[p]=g:(o.push(g),n=!0,p=e)}else if((a===H.LEFTOUTER||a===H.RIGHTOUTER)&&!n){var b={},w=c.fields.length-1;h.forEach(function(e,t){b[i[t].name]=t<=w?e:null}),n=!0,p=e,o.push(b)}})}),new mt(o,i,{name:p})}function J(e,t){var n=""+e,r=""+t;return nr?1:0}function G(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:J;return e.length>1&&function e(t,n,r,a){if(r===n)return t;var i=n+Math.floor((r-n)/2);return e(t,n,i,a),e(t,i+1,r,a),function(e,t,n,r,a){for(var i=e,o=[],u=t;u<=r;u+=1)o[u]=i[u];for(var c=t,f=n+1,l=t;l<=r;l+=1)c>n?(i[l]=o[f],f+=1):f>r?(i[l]=o[c],c+=1):a(o[c],o[f])<=0?(i[l]=o[c],c+=1):(i[l]=o[f],f+=1)}(t,n,i,r,a),t}(e,0,e.length-1,t),e}function K(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);ti?"desc"===t?-1:1:0}}return r}function q(e,t){var n=new Map,r=[];return e.forEach(function(e){var a=e[t];n.has(a)?r[n.get(a)][1].push(e):(r.push([a,[e]]),n.set(a,r.length-1))}),r}function z(e,t,n){var r={label:e[0]};return t.reduce(function(t,r,a){return t[r]=e[1].map(function(e){return e[n[a].index]}),t},r),r}function X(e,t,n,r,a){var i={schema:[],data:[],uids:[]},o=(a=Object.assign({},{addUid:!1,columnWise:!1},a)).addUid,u=r&&r.length>0,c=[];if(n.split(",").forEach(function(t){for(var n=0;n=0;u--)a=t[u][0],i=t[u][1],(o=ct(r,a))&&(E(i)?G(n,function(e,t){return i(e[o.index],t[o.index])}):_(i)?function(){var e=q(n,o.index),t=i[i.length-1],a=i.slice(0,i.length-1),u=a.map(function(e){return ct(r,e)});e.forEach(function(e){e.push(z(e,a,u))}),G(e,function(e,n){var r=e[2],a=n[2];return t(r,a)}),n.length=0,e.forEach(function(e){n.push.apply(n,K(e[1]))})}():(i="desc"===String(i).toLowerCase()?"desc":"asc",G(n,W(o.type,i,o.index))));e.uids=[],n.forEach(function(t){e.uids.push(t.pop())})}(i,r),a.columnWise){var f=Array.apply(void 0,K(Array(i.schema.length))).map(function(){return[]});i.data.forEach(function(e){e.forEach(function(e,t){f[t].push(e)})}),i.data=f}return i}function $(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!j(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t,r){D(e._rowDiffset,function(e){var o={},u="";a.forEach(function(n){var r=t[n].partialField.data[e];u+="-"+r,o[n]=r}),n[u]||(r&&i.push(o),n[u]=!0)})}return e._colIdentifier.split(",").forEach(function(e){var t=c[e];r.push(O({},t.schema())),a.push(t.schema().name)}),s(t,f,!1),s(e,c,!0),new mt(i,r,{name:l})}function Q(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n={},r=t,a=e.getPartialFieldspace().getMeasure(),i=ae.defaultReducer();return"function"==typeof t&&(i=t),Object.entries(a).forEach(function(e){var o=ie(e,1)[0];"string"==typeof t[o]&&(r[o]=ae.resolve(r[o])?ae.resolve(r[o]):i),"function"!=typeof t[o]&&(r[o]=void 0),n[o]=r[o]||ae.resolve(a[o].defAggFn())||i}),n}(e,n),o=e.getPartialFieldspace(),u=o.fieldsObj(),c=o.name,l=[],s=[],p=[],d={},h=[],v=void 0;Object.entries(u).forEach(function(e){var t=ie(e,2),n=t[0],r=t[1];(-1!==a.indexOf(n)||i[n])&&(p.push(O({},r.schema())),r.schema().type===f.MEASURE?s.push(n):r.schema().type===f.DIMENSION&&l.push(n))});var m=0;return D(e._rowDiffset,function(e){var t="";l.forEach(function(n){t=t+"-"+u[n].partialField.data[e]}),void 0===d[t]?(d[t]=m,h.push({}),l.forEach(function(t){h[m][t]=u[t].partialField.data[e]}),s.forEach(function(t){h[m][t]=[u[t].partialField.data[e]]}),m+=1):s.forEach(function(n){h[d[t]][n].push(u[n].partialField.data[e])})}),h.forEach(function(e){var t=e;s.forEach(function(n){t[n]=i[n](e[n])})}),r?(r.__calculateFieldspace(),v=r):v=new St(h,p,{name:c}),v}function ue(e,t){var n=x(e.getFieldspace(),t.getFieldspace());return function(e,t){var r=!0;return n.forEach(function(n){r=!(e[n].value!==t[n].value||!r)}),r}}function ce(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!j(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t){D(e._rowDiffset,function(e){var r={},o="";a.forEach(function(n){var a=t[n].partialField.data[e];o+="-"+a,r[n]=a}),n[o]||(i.push(r),n[o]=!0)})}return e._colIdentifier.split(",").forEach(function(e){var t=c[e];r.push(O({},t.schema())),a.push(t.schema().name)}),s(e,c),s(t,f),new St(i,r,{name:l})}function fe(e,t,n){return B(e,t,n,!1,H.LEFTOUTER)}function le(e,t,n){return B(t,e,n,!1,H.RIGHTOUTER)}var se=function(){function e(e,t){for(var n=0;nn&&(n=a)}),[t,n]}}]),t}(),je=function(){function e(e,t){for(var n=0;n=i?c=!0:(r=e.charCodeAt(o++))===Be?f=!0:r===Je&&(f=!0,e.charCodeAt(o)===Be&&++o),e.slice(a+1,t-1).replace(/""/g,'"')}for(;o2&&void 0!==arguments[2]?arguments[2]:{},a=arguments[3],i=void 0;t!==I?(i={op:t,meta:r,criteria:a},e._derivation.push(i)):(i=[].concat(Ze(a)),e._derivation.length=0,(n=e._derivation).push.apply(n,Ze(i)))},at=function(e,t){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},r=n.operation||V,a=n.filterByMeasure||!1,i=[];i=t.length?t.map(function(e){return function(e){var t=e.getData(),n=t.schema,r=e.getFieldsConfig(),i=e.getFieldspace().fieldsObj(),o=t.data,u=Object.values(r).reduce(function(e,t){return e[t.def.name]=i[t.def.name].domain(),e},{});return function(e){return!!o.length&&o.some(function(t){return n.every(function(n){if(!(n.name in e))return!0;var i=e[n.name].valueOf();if(a&&n.type===f.MEASURE)return i>=u[n.name][0]&&i<=u[n.name][1];if(n.type!==f.DIMENSION)return!0;var o=r[n.name].index;return t[o]===e[n.name].valueOf()})})}}(e)}):[function(){return!1}];var o=void 0;r===V?o=e.clone(!1,!1).select(function(e){return i.every(function(t){return t(e)})},{saveChild:!1,mode:l.ALL}):o=e.clone(!1,!1).select(function(e){return i.some(function(t){return t(e)})},{mode:l.ALL,saveChild:!1});return o},it=function(e,t,n,r){var a=e.clone(r.saveChild),i=function(e,t,n,r){var a=[],i=-1,o=void 0,u=function(e){return n(et(t,e),e)};return r.mode===l.INVERSE&&(u=function(e){return!n(et(t,e))}),D(e,function(e){u(e)&&(-1!==i&&e===i+1?(o=a.length-1,a[o]=a[o].split("-")[0]+"-"+e):a.push(""+e),i=e)}),a.join(",")}(a._rowDiffset,a.getPartialFieldspace().fields,t,n);return a._rowDiffset=i,a.__calculateFieldspace().calculateFieldsConfig(),r.saveChild&&rt(a,R,{config:n},t),a},ot=function(e,t,n,r){var a=e.clone(n.saveChild),i=t;return n.mode===l.INVERSE&&(i=r.filter(function(e){return-1===t.indexOf(e)})),a._colIdentifier=i.join(","),a.__calculateFieldspace().calculateFieldsConfig(),n.saveChild&&rt(a,C,{projField:t,config:n,actualProjField:i},null),a},ut=function(e,t,n,r){r=Object.assign(Object.assign({},Ue),r);var i=a[r.dataFormat];if(!i||"function"!=typeof i)throw new Error("No converter function found for "+r.dataFormat+" format");var o=i(t,r),u=Qe(o,2),c=u[0],f=u[1],l=Ie(f,n,c),s=k.createNamespace(l,r.name);return e._partialFieldspace=s,e._rowDiffset=f.length&&f[0].length?"0-"+(f[0].length-1):"",e._colIdentifier=n.map(function(e){return e.name}).join(),e},ct=function(e,t){for(var n=0;n2&&void 0!==arguments[2]?arguments[2]:{},a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=a.nonTraversingModel,o=a.excludeModels||[];t!==i&&((!o.length||-1===o.indexOf(t))&&t.handlePropagation(n,r),t._children.forEach(function(t){var i=ft(n,t),o=Qe(i,2),u=o[0],c=o[1];e(t,[u,c],r,a)}))},st=function(e,t,n,r){var a=void 0,i=void 0,o=n.propagationNameSpace,u=n.propagateToSource,c=n.sourceId,f=r.propagateInterpolatedValues,l=[];if(null===e&&!0!==r.persistent)l=[{criteria:[]}];else{var s,p=Object.values(o.mutableActions);!1!==u&&(p=p.filter(function(e){return e.config.sourceId!==c}));var d=p.filter(function(e){return(r.filterFn||function(){return!0})(e,r)}).map(function(e){return e.config.criteria}),h=[];if(!1!==u){var v=Object.values(o.mutableActions);v.forEach(function(e){var t=e.config;!1===t.applyOnSource&&t.action===r.action&&t.sourceId!==c&&(h.push(e.model),(a=v.filter(function(t){return t!==e}).map(function(e){return e.config.criteria})).length&&l.push({criteria:a,models:e.model,path:function e(t){var n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[];return null!==t._parent&&(n.push(t),e(t._parent,n)),n}(e.model)}))})}a=(s=[]).concat.apply(s,[].concat(Ze(d),[e])).filter(function(e){return null!==e}),l.push({criteria:a,excludeModels:[].concat(h,Ze(r.excludeModels||[]))})}var m=t.model,y=Object.assign({sourceIdentifiers:e,propagationSourceId:c},r),g=t.groupByModel;f&&g&&(i=at(g,a,{filterByMeasure:f}),lt(g,i,y)),l.forEach(function(e){var t=at(m,e.criteria),n=e.path;if(n){var r=function(e,t){for(var n=0,r=t.length;n0&&void 0!==arguments[0])||arguments[0],t=void 0;if(!1===(!(arguments.length>1&&void 0!==arguments[1])||arguments[1])){var n=this.getData({getAllFields:!0}),r=n.data,a=n.schema,i=r.map(function(e){var t={};return a.forEach(function(n,r){t[n.name]=e[r]}),t});t=new this.constructor(i,a)}else t=new this.constructor(this);return e&&this._children.push(t),t}},{key:"project",value:function(e,t){var n={mode:l.NORMAL,saveChild:!0};t=Object.assign({},n,t);var r=this.getFieldsConfig(),a=Object.keys(r),i=t.mode,o=e.reduce(function(e,t){return"RegExp"===t.constructor.name?e.push.apply(e,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:[];rt(this,I,null,t),this._parent=e,e._children.push(this)}}]),e}(),ht=function(){return function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,a=!1,i=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){a=!0,i=e}finally{try{!r&&u.return&&u.return()}finally{if(a)throw i}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")}}(),vt=function(){function e(e,t){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:{},n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{saveChild:!0},r=""+e.join(),a=[this,e,t],i=oe.apply(void 0,a);return n.saveChild&&(this._children.push(i),rt(i,M,{fieldsArr:e,groupByString:r,defaultReducer:ae.defaultReducer()},t)),i._parent=this,i}},{key:"sort",value:function(e){var t=this.getData({order:"row",sort:e}),n=[t.schema.map(function(e){return e.name})].concat(t.data),r=new this.constructor(n,t.schema,{dataFormat:"DSVArr"});return r._sortingDetails=e,r}},{key:"addField",value:function(e){var t=e.name();this._colIdentifier+=","+t;var n=this._partialFieldspace;if(n.fieldsObj()[e.name()]){var r=n.fields.findIndex(function(e){return e.name()===t});r>=0&&(n.fields[r]=e)}else n.fields.push(e);return this.__calculateFieldspace().calculateFieldsConfig(),this}},{key:"calculateVariable",value:function(e,t){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{saveChild:!0,replaceVar:!1},r=this.getFieldsConfig(),a=t.slice(0,t.length-1),i=t[t.length-1];if(r[e.name]&&!n.replaceVar)throw new Error(e.name+" field already exists in model.");var o=a.map(function(e){var t=r[e];if(!t)throw new Error(e+" is not a valid column name.");return t.index}),u=this.clone(),c=u.getFieldspace().fields,f=o.map(function(e){return c[e]}),l=[];D(u._rowDiffset,function(e){var t=f.map(function(t){return t.partialField.data[e]});l[e]=i.apply(void 0,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},a=t.isMutableAction,i=t.sourceId,o=t.payload,u=function e(t){return t._parent?e(t._parent):t}(this),c=u._propagationNameSpace,f={groupByModel:function e(t){return t._parent&&t._derivation.find(function(e){return"group"!==e.op})?e(t._parent):t}(this),model:u};return n&&function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=void 0,a=t.isMutableAction,i=t.criteria,o=t.action+"-"+t.sourceId;r=a?e.mutableActions:e.immutableActions,null===i?delete r[o]:r[o]={model:n,config:t}}(c,t,this),st(e,f,{propagationNameSpace:c,sourceId:i},Object.assign({payload:o},t)),a&&function(e,t,n){var r=e.immutableActions;for(var a in r){var i=r[a].config,o=n.config.sourceId,u=!n.propConfig.filterImmutableAction||n.propConfig.filterImmutableAction(i,n.config);if(i.sourceId!==o&&u){var c=i.criteria;st(c,t,{propagationNameSpace:e,propagateToSource:!1,sourceId:o},i)}}}(c,f,{config:t,propConfig:r}),this}},{key:"on",value:function(e,t){switch(e){case"propagation":this._onPropagation.push(t)}return this}},{key:"unsubscribe",value:function(e){switch(e){case"propagation":this._onPropagation=[]}return this}},{key:"handlePropagation",value:function(e,t){var n=this;this._onPropagation.forEach(function(r){return r.call(n,e,t)})}},{key:"bin",value:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=this.clone(),r=t.name||e+"_binned";if(this.getFieldsConfig()[r]||!this.getFieldsConfig()[e])throw new Error("Field "+e+" already exists.");var a=P(this._partialFieldspace.fields.find(function(t){return t.name()===e}),this._rowDiffset,t),i=Ie([a.data],[{name:r,type:f.DIMENSION,subtype:u.BINNED,bins:{range:a.range,mid:a.mid}}],[r])[0];return n.addField(i),rt(n,L,{dimensionName:e,config:t,binFieldName:r},null),n}}],[{key:"Reducers",get:function(){return ae}}]),t}(),yt=te.sum,gt=te.avg,bt=te.min,wt=te.max,Ot=te.first,_t=te.last,Et=te.count,At=te.std,jt=n(0);mt.Operators={compose:function(){for(var e=arguments.length,t=Array(e),n=0;n1&&void 0!==arguments[1]?arguments[1]:{saveChild:!0}).saveChild;return t.forEach(function(e){n=e(n),a.push.apply(a,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&r.dispose(),n}},bin:function(){for(var e=arguments.length,t=Array(e),n=0;na.getFullYear()&&(t=""+(i-1)+r),s(t).getFullYear()},formatter:function(e){var t=s(e).getFullYear().toString(),n=void 0;return t&&(n=t.length,t=t.substring(n-2,n)),t}},Y:{name:"Y",index:0,extract:function(){return"(\\d{4})"},parser:p.defaultNumberParser(),formatter:function(e){return s(e).getFullYear().toString()}}}},p.getTokenFormalNames=function(){var e=p.getTokenDefinitions();return{HOUR:e.H,HOUR_12:e.l,AMPM_UPPERCASE:e.p,AMPM_LOWERCASE:e.P,MINUTE:e.M,SECOND:e.S,SHORT_DAY:e.a,LONG_DAY:e.A,DAY_OF_MONTH:e.e,DAY_OF_MONTH_CONSTANT_WIDTH:e.d,SHORT_MONTH:e.b,LONG_MONTH:e.B,MONTH_OF_YEAR:e.m,SHORT_YEAR:e.y,LONG_YEAR:e.Y}},p.tokenResolver=function(){var e=p.getTokenDefinitions(),t=function(){for(var e=0,t=void 0,n=void 0,r=arguments.length;e=0;)o=e[i+1],-1!==r.indexOf(o)&&a.push({index:i,token:o});return a},p.formatAs=function(e,t){var n,r=s(e),a=p.findTokens(t),i=p.getTokenDefinitions(),o=String(t),u=p.TOKEN_PREFIX,c=void 0,f=void 0,l=void 0;for(l=0,n=a.length;l=0;d--)(f=i[d].index)+1!==s.length-1?(void 0===u&&(u=s.length),l=s.substring(f+2,u),s=s.substring(0,f+2)+RegExp.escape(l)+s.substring(u,s.length),u=f):u=f;for(d=0;d0&&e.split(",").forEach(function(e){var n=e.split("-"),r=+n[0],a=+(n[1]||n[0]);if(a>=r)for(var i=r;i<=a;i+=1)t(i)})}var T=function(){function e(e,t){for(var n=0;n=(i=e[a=n+Math.floor((r-n)/2)]).start&&t=i.end?n=a+1:t3&&void 0!==arguments[3]&&arguments[3],a=arguments.length>4&&void 0!==arguments[4]?arguments[4]:J.CROSS,i=[],o=[],u=n||K,c=e.getFieldspace(),f=t.getFieldspace(),l=c.name,s=f.name,d=c.name+"."+f.name,p=C(c,f);if(l===s)throw new Error("DataModels must have different alias names");return c.fields.forEach(function(e){var t=_({},e.schema());-1===p.indexOf(t.name)||r||(t.name=c.name+"."+t.name),i.push(t)}),f.fields.forEach(function(e){var t=_({},e.schema());-1!==p.indexOf(t.name)?r||(t.name=f.name+"."+t.name,i.push(t)):i.push(t)}),D(e._rowDiffset,function(n){var d=!1,h=void 0;D(t._rowDiffset,function(v){var m=[],y={};y[l]={},y[s]={},c.fields.forEach(function(e){m.push(e.partialField.data[n]),y[l][e.name()]=e.partialField.data[n]}),f.fields.forEach(function(e){-1!==p.indexOf(e.schema().name)&&r||m.push(e.partialField.data[v]),y[s][e.name()]=e.partialField.data[v]});var g=ot(y[l]),b=ot(y[s]);if(u(g,b,function(){return e.detachedRoot()},function(){return t.detachedRoot()},{})){var w={};m.forEach(function(e,t){w[i[t].name]=e}),d&&J.CROSS!==a?o[h]=w:(o.push(w),d=!0,h=n)}else if((a===J.LEFTOUTER||a===J.RIGHTOUTER)&&!d){var _={},O=c.fields.length-1;m.forEach(function(e,t){_[i[t].name]=t<=O?e:null}),d=!0,h=n,o.push(_)}})}),new At(o,i,{name:d})}function z(e,t){var n=""+e,r=""+t;return nr?1:0}function q(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:z;return e.length>1&&function e(t,n,r,a){if(r===n)return t;var i=n+Math.floor((r-n)/2);return e(t,n,i,a),e(t,i+1,r,a),function(e,t,n,r,a){for(var i=e,o=[],u=t;u<=r;u+=1)o[u]=i[u];for(var c=t,f=n+1,l=t;l<=r;l+=1)c>n?(i[l]=o[f],f+=1):f>r?(i[l]=o[c],c+=1):a(o[c],o[f])<=0?(i[l]=o[c],c+=1):(i[l]=o[f],f+=1)}(t,n,i,r,a),t}(e,0,e.length-1,t),e}function X(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);ti?"desc"===t?-1:1:0}}return r}function Q(e,t){var n=new Map,r=[];return e.forEach(function(e){var a=e[t];n.has(a)?r[n.get(a)][1].push(e):(r.push([a,[e]]),n.set(a,r.length-1))}),r}function Z(e,t,n){var r={label:e[0]};return t.reduce(function(t,r,a){return t[r]=e[1].map(function(e){return e[n[a].index]}),t},r),r}function ee(e,t,n,r,a){a=Object.assign({},{addUid:!1,columnWise:!1},a);var i={schema:[],data:[],uids:[]},o=a.addUid,u=r&&r.length>0,c=[];if(n.split(",").forEach(function(t){for(var n=0;n=0;u--)a=t[u][0],i=t[u][1],(o=vt(r,a))&&("function"==typeof i?q(n,function(e,t){return i(e[o.index],t[o.index])}):O(i)?function(){var e=Q(n,o.index),t=i[i.length-1],a=i.slice(0,i.length-1),u=a.map(function(e){return vt(r,e)});e.forEach(function(e){e.push(Z(e,a,u))}),q(e,function(e,n){var r=e[2],a=n[2];return t(r,a)}),n.length=0,e.forEach(function(e){n.push.apply(n,X(e[1]))})}():(i="desc"===String(i).toLowerCase()?"desc":"asc",q(n,$(o.type,i,o.index))));e.uids=[],n.forEach(function(t){e.uids.push(t.pop())})}(i,r),a.columnWise){var f=Array.apply(void 0,X(Array(i.schema.length))).map(function(){return[]});i.data.forEach(function(e){e.forEach(function(e,t){f[t].push(e)})}),i.data=f}return i}function te(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!A(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t,r){D(e._rowDiffset,function(e){var o={},u="";a.forEach(function(n){var r=t[n].partialField.data[e];u+="-"+r,o[n]=r}),n[u]||(r&&i.push(o),n[u]=!0)})}return e._colIdentifier.split(",").forEach(function(e){var t=c[e];r.push(_({},t.schema())),a.push(t.schema().name)}),s(t,f,!1),s(e,c,!0),new At(i,r,{name:l})}function ne(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n={},r=e.getFieldspace().getMeasure(),a=le.defaultReducer();return Object.keys(r).forEach(function(e){"string"!=typeof t[e]&&(t[e]=r[e].defAggFn());var i=le.resolve(t[e]);i?n[e]=i:(n[e]=a,t[e]=ue)}),n}(e,n),o=e.getFieldspace(),u=o.fieldsObj(),c=o.name,l=[],s=[],d=[],p={},h=[],v=void 0;Object.entries(u).forEach(function(e){var t=se(e,2),n=t[0],r=t[1];if(-1!==a.indexOf(n)||i[n])switch(d.push(_({},r.schema())),r.schema().type){case f.MEASURE:s.push(n);break;default:case f.DIMENSION:l.push(n)}});var m=0;D(e._rowDiffset,function(e){var t="";l.forEach(function(n){t=t+"-"+u[n].partialField.data[e]}),void 0===p[t]?(p[t]=m,h.push({}),l.forEach(function(t){h[m][t]=u[t].partialField.data[e]}),s.forEach(function(t){h[m][t]=[u[t].partialField.data[e]]}),m+=1):s.forEach(function(n){h[p[t]][n].push(u[n].partialField.data[e])})});var y={},g=function(){return e.detachedRoot()};return h.forEach(function(e){var t=e;s.forEach(function(n){t[n]=i[n](e[n],g,y)})}),r?(r.__calculateFieldspace(),v=r):v=new Mt(h,d,{name:c}),v}function pe(e,t){var n=C(e.getFieldspace(),t.getFieldspace());return function(e,t){var r=!0;return n.forEach(function(n){r=!(e[n].value!==t[n].value||!r)}),r}}function he(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!A(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t){D(e._rowDiffset,function(e){var r={},o="";a.forEach(function(n){var a=t[n].partialField.data[e];o+="-"+a,r[n]=a}),n[o]||(i.push(r),n[o]=!0)})}return e._colIdentifier.split(",").forEach(function(e){var t=c[e];r.push(_({},t.schema())),a.push(t.schema().name)}),s(e,c),s(t,f),new Mt(i,r,{name:l})}function ve(e,t,n){return W(e,t,n,!1,J.LEFTOUTER)}function me(e,t,n){return W(t,e,n,!1,J.RIGHTOUTER)}var ye=function(){function e(e,t){for(var n=0;nn&&(n=a))}),[t,n]}}]),t}(),Te=function(){function e(e,t){for(var n=0;n=i?c=!0:(r=e.charCodeAt(o++))===qe?f=!0:r===Xe&&(f=!0,e.charCodeAt(o)===qe&&++o),e.slice(a+1,t-1).replace(/""/g,'"')}for(;o2&&void 0!==arguments[2]?arguments[2]:{},a=arguments[3],i=void 0;t!==H?(i={op:t,meta:r,criteria:a},e._derivation.push(i)):(i=[].concat(it(a)),e._derivation.length=0,(n=e._derivation).push.apply(n,it(i)))},ft=function(e,t,n,r,a){var i=[],o=-1,u=r.mode,c=void 0,f={},s=function(){return a.detachedRoot()},d=function(e){return n(function(e,t){var n={},r=!0,a=!1,i=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done);r=!0){var c=o.value;n[c.name()]=new F(c.partialField.data[t],c)}}catch(e){a=!0,i=e}finally{try{!r&&u.return&&u.return()}finally{if(a)throw i}}return n}(t,e),e,s,f)},p=void 0;return p=u===l.INVERSE?function(e){return!d(e)}:function(e){return d(e)},D(e,function(e){p(e)&&(-1!==o&&e===o+1?(c=i.length-1,i[c]=i[c].split("-")[0]+"-"+e):i.push(""+e),o=e)}),i.join(",")},lt=function(e,t){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},r=n.operation||G,a=n.filterByMeasure||!1,i=[];i=t.length?t.map(function(e){return n=(t=e).getData(),r=n.schema,i=t.getFieldsConfig(),o=t.getFieldspace().fieldsObj(),u=n.data,c=Object.values(i).reduce(function(e,t){return e[t.def.name]=o[t.def.name].domain(),e},{}),function(e){return!!u.length&&u.some(function(t){return r.every(function(n){if(!(n.name in e))return!0;var r=e[n.name].valueOf();if(a&&n.type===f.MEASURE)return r>=c[n.name][0]&&r<=c[n.name][1];if(n.type!==f.DIMENSION)return!0;var o=i[n.name].index;return t[o]===e[n.name].valueOf()})})};var t,n,r,i,o,u,c}):[function(){return!1}];var o=void 0;r===G?o=e.clone(!1,!1).select(function(e){return i.every(function(t){return t(e)})},{saveChild:!1,mode:l.ALL}):o=e.clone(!1,!1).select(function(e){return i.some(function(t){return t(e)})},{mode:l.ALL,saveChild:!1});return o},st=function(e,t,n,r){var a=e.clone(r.saveChild),i=ft(a._rowDiffset,a.getPartialFieldspace().fields,t,n,e);return a._rowDiffset=i,a.__calculateFieldspace().calculateFieldsConfig(),ct(a,L,{config:n},t),a},dt=function(e,t,n,r){var a=e.clone(n.saveChild),i=t;return n.mode===l.INVERSE&&(i=r.filter(function(e){return-1===t.indexOf(e)})),a._colIdentifier=i.join(","),a.__calculateFieldspace().calculateFieldsConfig(),ct(a,U,{projField:t,config:n,actualProjField:i},null),a},pt=function(e){if((e=_({},e)).type||(e.type=f.DIMENSION),!e.subtype)switch(e.type){case f.MEASURE:e.subtype=c.CONTINUOUS;break;default:case f.DIMENSION:e.subtype=u.CATEGORICAL}return e},ht=function(e,t,n,r){n=function(e){return e.map(function(e){return pt(e)})}(n),r=Object.assign(Object.assign({},Je),r);var i=a[r.dataFormat];if(!i||"function"!=typeof i)throw new Error("No converter function found for "+r.dataFormat+" format");var u=i(t,r),c=at(u,2),f=c[0],l=c[1],s=Be(l,n,f),d=k.createNamespace(s,r.name);return e._partialFieldspace=d,e._rowDiffset=l.length&&l[0].length?"0-"+(l[0].length-1):"",e._colIdentifier=n.map(function(e){return e.name}).join(),e._dataFormat=r.dataFormat===o.AUTO?S(t):r.dataFormat,e},vt=function(e,t){for(var n=0;n2&&void 0!==arguments[2]?arguments[2]:{},a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=a.nonTraversingModel,o=a.excludeModels||[];t!==i&&((!o.length||-1===o.indexOf(t))&&t.handlePropagation(n,r),t._children.forEach(function(t){var i=mt(n,t),o=at(i,2),u=o[0],c=o[1];e(t,[u,c],r,a)}))},gt=function(e){for(var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[];e._parent;)t.push(e),e=e._parent;return t},bt=function(e,t,n,r){var a=void 0,i=void 0,o=n.propagationNameSpace,u=n.propagateToSource,c=n.sourceId,f=r.propagateInterpolatedValues,l=[];if(null===e&&!0!==r.persistent)l=[{criteria:[]}];else{var s,d=Object.values(o.mutableActions);!1!==u&&(d=d.filter(function(e){return e.config.sourceId!==c}));var p=d.filter(function(e){return(r.filterFn||function(){return!0})(e,r)}).map(function(e){return e.config.criteria}),h=[];if(!1!==u){var v=Object.values(o.mutableActions);v.forEach(function(e){var t=e.config;!1===t.applyOnSource&&t.action===r.action&&t.sourceId!==c&&(h.push(e.model),(a=v.filter(function(t){return t!==e}).map(function(e){return e.config.criteria})).length&&l.push({criteria:a,models:e.model,path:gt(e.model)}))})}a=(s=[]).concat.apply(s,[].concat(it(p),[e])).filter(function(e){return null!==e}),l.push({criteria:a,excludeModels:[].concat(h,it(r.excludeModels||[]))})}var m=t.model,y=Object.assign({sourceIdentifiers:e,propagationSourceId:c},r),g=t.groupByModel;f&&g&&(i=lt(g,a,{filterByMeasure:f}),yt(g,i,y)),l.forEach(function(e){var t=lt(m,e.criteria),n=e.path;if(n){var r=function(e,t){for(var n=0,r=t.length;n0&&void 0!==arguments[0])||arguments[0],t=void 0;if(!1===(!(arguments.length>1&&void 0!==arguments[1])||arguments[1])){var n=this.getData({getAllFields:!0}),r=n.data,a=n.schema,i=r.map(function(e){var t={};return a.forEach(function(n,r){t[n.name]=e[r]}),t});t=new this.constructor(i,a)}else t=new this.constructor(this);return e&&this._children.push(t),t}},{key:"project",value:function(e,t){var n={mode:l.NORMAL,saveChild:!0};t=Object.assign({},n,t);var r=this.getFieldsConfig(),a=Object.keys(r),i=t.mode,o=e.reduce(function(e,t){return"RegExp"===t.constructor.name?e.push.apply(e,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:[];ct(this,H,null,t),this._parent=e,e._children.push(this)}},{key:"getParent",value:function(){return this._parent}},{key:"getChildren",value:function(){return this._children}},{key:"getDerivations",value:function(){return this._derivation}}]),e}(),Ot=function(){return function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,a=!1,i=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){a=!0,i=e}finally{try{!r&&u.return&&u.return()}finally{if(a)throw i}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")}}(),Et=function(){function e(e,t){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:{},n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{saveChild:!0},r=""+e.join(),a=[this,e,t],i=de.apply(void 0,a);return ct(i,V,{fieldsArr:e,groupByString:r,defaultReducer:le.defaultReducer()},t),n.saveChild&&this._children.push(i),i._parent=this,i}},{key:"sort",value:function(e){var t=this.getData({order:"row",sort:e}),n=[t.schema.map(function(e){return e.name})].concat(t.data),r=new this.constructor(n,t.schema,{dataFormat:"DSVArr"});return r._sortingDetails=e,r}},{key:"serialize",value:function(e,t){e=e||this._dataFormat,t=Object.assign({},{fieldSeparator:","},t);var n=this.getFieldspace().fields,r=n.map(function(e){return e.formattedData()}),a=r[0].length,i=void 0,u=void 0,c=void 0;if(e===o.FLAT_JSON)for(i=[],u=0;u=0&&(n.fields[r]=e)}else n.fields.push(e);return n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,this.__calculateFieldspace().calculateFieldsConfig(),this}},{key:"calculateVariable",value:function(e,t,n){var r=this;e=pt(e),n=Object.assign({},{saveChild:!0,replaceVar:!1},n);var a=this.getFieldsConfig(),i=t.slice(0,t.length-1),o=t[t.length-1];if(a[e.name]&&!n.replaceVar)throw new Error(e.name+" field already exists in datamodel");var u=i.map(function(e){var t=a[e];if(!t)throw new Error(e+" is not a valid column name.");return t.index}),c=this.clone(),f=c.getFieldspace().fields,l=u.map(function(e){return f[e]}),s={},d=function(){return r.detachedRoot()},p=[];D(c._rowDiffset,function(e){var t=l.map(function(t){return t.partialField.data[e]});p[e]=o.apply(void 0,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},a=t.isMutableAction,i=t.sourceId,o=t.payload,u=function(e){for(;e._parent;)e=e._parent;return e}(this),c=u._propagationNameSpace,f={groupByModel:function e(t){return t._parent&&t._derivation.find(function(e){return"group"!==e.op})?e(t._parent):t}(this),model:u};return n&&function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=void 0,a=t.isMutableAction,i=t.criteria,o=t.action+"-"+t.sourceId;r=a?e.mutableActions:e.immutableActions,null===i?delete r[o]:r[o]={model:n,config:t}}(c,t,this),bt(e,f,{propagationNameSpace:c,sourceId:i},Object.assign({payload:o},t)),a&&function(e,t,n){var r=e.immutableActions;for(var a in r){var i=r[a].config,o=n.config.sourceId,u=!n.propConfig.filterImmutableAction||n.propConfig.filterImmutableAction(i,n.config);if(i.sourceId!==o&&u){var c=i.criteria;bt(c,t,{propagationNameSpace:e,propagateToSource:!1,sourceId:o},i)}}}(c,f,{config:t,propConfig:r}),this}},{key:"on",value:function(e,t){switch(e){case"propagation":this._onPropagation.push(t)}return this}},{key:"unsubscribe",value:function(e){switch(e){case"propagation":this._onPropagation=[]}return this}},{key:"handlePropagation",value:function(e,t){var n=this;this._onPropagation.forEach(function(r){return r.call(n,e,t)})}},{key:"bin",value:function(e,t){var n=this.getFieldsConfig();if(!n[e])throw new Error("Field "+e+" doesn't exist");var r=t.name||e+"_binned";if(n[r])throw new Error("Field "+r+" already exists");var a=function(e,t,n){var r=n.buckets,a=n.binsCount,i=n.binSize,o=n.start,u=n.end,c=e.domain(),f=M(c,2),l=f[0],s=f[1];r||(o=0!==o&&(!o||o>l)?l:o,u=0!==u&&(!u||ul&&r.unshift(l),r[r.length-1]<=s&&r.push(s+1);for(var d=[],p=0;p1&&void 0!==arguments[1]?arguments[1]:{saveChild:!0},r=e,a=void 0,i=[],o=n.saveChild;return t.forEach(function(e){r=e(r),i.push.apply(i,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&a.dispose(),r}},bin:function(){for(var e=arguments.length,t=Array(e),n=0;n", "keywords": [ "datamodel", "data", @@ -25,20 +24,19 @@ "contributors": [ { "name": "Akash Goswami", - "email": "akash@charts.com" + "email": "akashgoswami90s@gmail.com" }, { - "name": "Subhash Haldar", - "email": "subhash@charts.com" + "name": "Subhash Haldar" }, { "name": "Rousan Ali", - "email": "rousan@charts.com", + "email": "rousanali786@gmail.com", "url": "https://rousan.io" }, { "name": "Ujjal Kumar Dutta", - "email": "ujjal@charts.com" + "email": "duttaujjalkumar@live.com" } ], "dependencies": { From 0fa4fb2a4f8a643ae1377921c938a1690cdbb29f Mon Sep 17 00:00:00 2001 From: Ranajit Banerjee Date: Fri, 22 Mar 2019 14:28:11 +0530 Subject: [PATCH 11/21] - Add enums for group by functions --- src/enums/group-by-functions.js | 10 ++++++++++ src/enums/index.js | 1 + src/operator/group-by-function.js | 21 +++++++++++---------- 3 files changed, 22 insertions(+), 10 deletions(-) create mode 100644 src/enums/group-by-functions.js diff --git a/src/enums/group-by-functions.js b/src/enums/group-by-functions.js new file mode 100644 index 0000000..e3d726b --- /dev/null +++ b/src/enums/group-by-functions.js @@ -0,0 +1,10 @@ +export const GROUP_BY_FUNCTIONS = { + SUM: 'sum', + AVG: 'avg', + MIN: 'min', + MAX: 'max', + FIRST: 'first', + LAST: 'last', + COUNT: 'count', + STD: 'std' +}; diff --git a/src/enums/index.js b/src/enums/index.js index 39bf314..2afa159 100644 --- a/src/enums/index.js +++ b/src/enums/index.js @@ -12,3 +12,4 @@ export { default as DimensionSubtype } from './dimension-subtype'; export { default as MeasureSubtype } from './measure-subtype'; export { default as FieldType } from './field-type'; export { default as FilteringMode } from './filtering-mode'; +export { GROUP_BY_FUNCTIONS } from './group-by-functions'; diff --git a/src/operator/group-by-function.js b/src/operator/group-by-function.js index 79498d2..4b7b397 100644 --- a/src/operator/group-by-function.js +++ b/src/operator/group-by-function.js @@ -1,7 +1,8 @@ import { isArray } from '../utils'; import InvalidAwareTypes from '../invalid-aware-types'; +import { GROUP_BY_FUNCTIONS } from '../enums'; - +const { SUM, AVG, FIRST, LAST, COUNT, STD, MIN, MAX } = GROUP_BY_FUNCTIONS; function getFilteredValues(arr) { return arr.filter(item => !(item instanceof InvalidAwareTypes)); } @@ -134,17 +135,17 @@ function std (arr) { const fnList = { - sum, - avg, - min, - max, - first, - last, - count, - std + [SUM]: sum, + [AVG]: avg, + [MIN]: min, + [MAX]: max, + [FIRST]: first, + [LAST]: last, + [COUNT]: count, + [STD]: std }; -const defaultReducerName = 'sum'; +const defaultReducerName = SUM; export { defaultReducerName, From ed369ce9374ef4f0700ceb391629e17af1a89ab6 Mon Sep 17 00:00:00 2001 From: Rousan Ali Date: Fri, 22 Mar 2019 14:37:03 +0530 Subject: [PATCH 12/21] Merge deps branch and resolve conflicts --- example/index.html | 2 +- example/samples/example3.js | 106 +++++++++++++++++++++++++++++++++--- example/samples/example4.js | 67 ++++++----------------- src/datamodel.js | 16 ++---- src/helper.js | 13 +++++ src/index.spec.js | 104 ++++++++++++++++++++++++++++++++--- src/relation.js | 6 +- 7 files changed, 232 insertions(+), 82 deletions(-) diff --git a/example/index.html b/example/index.html index de4b91c..912a004 100644 --- a/example/index.html +++ b/example/index.html @@ -13,7 +13,7 @@ - + diff --git a/example/samples/example3.js b/example/samples/example3.js index 966126b..5ad1de8 100644 --- a/example/samples/example3.js +++ b/example/samples/example3.js @@ -7,8 +7,24 @@ d3.json('../data/cars.json', (data) => { type: 'dimension' }, { - name: 'Maker', - type: 'dimension' + name: 'birthday', + type: 'dimension', + subtype: 'temporal', + format: '%Y-%m-%d' + }, + { + name: 'roll', + type: 'measure', + defAggFn: "avg", + as: "roll2" + } +]; + +const data = [ + { + name: 'Rousan', + birthday: '1995-07-05', + roll: 2 }, { name: 'Miles_per_Gallon', @@ -36,16 +52,88 @@ d3.json('../data/cars.json', (data) => { type: 'dimension' }, { - name: 'Cylinders', - type: 'dimension' + name: 'Akash', + birthday: '1994-01-03', + roll: 120 }, { - name: 'Year', - type: 'dimension', - subtype: 'temporal', - format: '%Y-%m-%d' + name: 'Rousan', + birthday: '1995-07-06', + roll: 93 } - ]; +]; + + +const dm = new DataModel(data, schema); +const dm2 = dm.project(["name", "roll"]); +// const schema = [ +// { name: 'Name', type: 'dimension' }, +// { name: 'HorsePower', type: 'measure' }, +// { name: 'Origin', type: 'dimension' } +// ]; +// const data = [ +// { Name: 'chevrolet chevelle malibu', Horsepower: 130, Origin: 'USA' }, +// { Name: 'citroen ds-21 pallas', Horsepower: 115, Origin: 'Europe' }, +// { Name: 'datsun pl510', Horsepower: 88, Origin: 'Japan' }, +// { Name: 'amc rebel sst', Horsepower: 150, Origin: 'USA' }, +// ]; +// const dt = new DataModel(schema, data); + +// const dt2 = dt.select(fields => fields.Origin.value === 'USA'); + +// const selectedDm = dm.select(fields => fields.roll.value > 10 || fields.roll.value < 0); + + + +// debugger; + +// const groupedDm = dm.groupBy(["name"], { +// roll: (vals, cloneProvider, store) => { +// if (!store.clonedDm) { +// store.clonedDm = cloneProvider(); +// } +// if (!store.avgRoll) { +// store.avgRoll = store.clonedDm.groupBy([""], { roll: "avg" }).getData().data[0][0]; +// } + +// return DataModel.Stats.avg(vals) - store.avgRoll; +// } +// }); +// const calDm = dm.calculateVariable({ +// name: "abc", +// type: "measure" +// }, ["roll", (roll, i, cloneProvider, store) => { +// if (!store.clonedDm) { +// store.clonedDm = cloneProvider(); +// } +// if (!store.avgRoll) { +// store.avgRoll = store.clonedDm.groupBy([""], {roll: "avg"}).getData().data[0][0]; +// } + +// return store.avgRoll - roll; +// }]); + +// const DataModel = window.DataModel; + +// const data1 = [ +// { profit: 10, sales: 20, city: 'a' }, +// { profit: 15, sales: 25, city: 'b' }, +// ]; +// const schema1 = [ +// { name: 'profit', type: 'measure' }, +// { name: 'sales', type: 'measure' }, +// { name: 'city', type: 'dimension' }, +// ]; +// const data2 = [ +// { population: 200, city: 'a' }, +// { population: 250, city: 'b' }, +// ]; +// const schema2 = [ +// { name: 'population', type: 'measure' }, +// { name: 'city', type: 'dimension' }, +// ]; +// const dataModel1 = new DataModel(data1, schema1, { name: 'ModelA' }); +// const dataModel2 = new DataModel(data2, schema2, { name: 'ModelB' }); let rootData = new DataModel(jsonData, schema); let dm = rootData.project(["Origin", "Acceleration"]); diff --git a/example/samples/example4.js b/example/samples/example4.js index 74b0c63..e39d2ef 100644 --- a/example/samples/example4.js +++ b/example/samples/example4.js @@ -1,52 +1,17 @@ -/* eslint-disable */ -d3.json('./data/cars.json', (data) => { - const jsonData = data, - schema = [{ - name: 'Name', - type: 'dimension' - }, { - name: 'Miles_per_Gallon', - type: 'measure', - unit : 'cm', - scale: '1000', - numberformat: '12-3-3' - }, { - name: 'Cylinders', - type: 'dimension' - }, { - name: 'Displacement', - type: 'measure' - }, { - name: 'Horsepower', - type: 'measure' - }, { - name: 'Weight_in_lbs', - type: 'measure', - }, { - name: 'Acceleration', - type: 'measure' - }, { - name: 'Year', - type: 'dimension', - }, { - name: 'Origin', - type: 'dimension' - }]; +const data = [ + { age: 30, job: 'management', marital: 'married' }, + { age: 59, job: 'blue-collar', marital: 'married' }, + { age: 35, job: 'management', marital: 'single' }, + { age: 57, job: 'self-employed', marital: 'married' }, + { age: 28, job: 'blue-collar', marital: 'married' }, + { age: 30, job: 'blue-collar', marital: 'single' }, +]; +const schema = [ + { name: 'age', type: 'measure' }, + { name: 'job', type: 'dimension' }, + { name: 'marital', type: 'dimension' } +]; +const rootDm = new DataModel(data, schema); - const rootData = new window.DataModel(jsonData, schema); - - const groupedDm = rootData.groupBy(['Origin', 'Cylinders']) - const binnedDm = groupedDm.bin('Miles_per_Gallon', { binsCount: 10}) - }); - - dm.calculateVariable ({ - name: "fieldName", - type: "measure|dimension" - }, ["existingField1", "existingField2", (existingField1, existingField2) => { - return "operation_value" - }]) - -// load('../../js/cars.csv') -// .then((res) => { -// dm = new DataModel(res.split('\n').map(line => line.split(',')), {}, { name: "myDataModel", dataFormat: 'DSVArr' }); -// }); +const dm = rootDm.select(fields => fields.age.value > 30); +const sortedDm = dm.sort([['age', 'ASC']]); \ No newline at end of file diff --git a/src/datamodel.js b/src/datamodel.js index 1a3bd43..2121d91 100644 --- a/src/datamodel.js +++ b/src/datamodel.js @@ -76,7 +76,6 @@ class DataModel extends Relation { super(...args); this._onPropagation = []; - this._sortingDetails = []; } /** @@ -309,12 +308,6 @@ class DataModel extends Relation { * @return {DataModel} Returns a new instance of DataModel with sorted data. */ sort (sortingDetails, config = { saveChild: false }) { - if (this._sortingDetails.length) { - const parent = this._parent; - this.dispose(); - return parent.sort(sortingDetails, config); - } - const rawData = this.getData({ order: 'row', sort: sortingDetails @@ -323,15 +316,16 @@ class DataModel extends Relation { const dataInCSVArr = [header].concat(rawData.data); const sortedDm = new this.constructor(dataInCSVArr, rawData.schema, { dataFormat: 'DSVArr' }); - sortedDm._sortingDetails = sortingDetails; - sortedDm._derivation = [...this._derivation]; persistDerivation(sortedDm, DM_DERIVATIVES.SORT, config, sortingDetails); + persistAncestorDerivation(this, sortedDm); if (config.saveChild) { - this._children.push(sortedDm); + sortedDm.setParent(this); + } else { + sortedDm.setParent(null); } - sortedDm._parent = this; + return sortedDm; } diff --git a/src/helper.js b/src/helper.js index 44914d3..637af3f 100644 --- a/src/helper.js +++ b/src/helper.js @@ -219,6 +219,18 @@ export const sanitizeUnitSchema = (unitSchema) => { export const sanitizeSchema = schema => schema.map(unitSchema => sanitizeUnitSchema(unitSchema)); +export const resolveFieldName = (schema, dataHeader) => { + schema.forEach((unitSchema) => { + const fieldNameAs = unitSchema.as; + if (!fieldNameAs) { return; } + + const idx = dataHeader.indexOf(unitSchema.name); + dataHeader[idx] = fieldNameAs; + unitSchema.name = fieldNameAs; + delete unitSchema.as; + }); +}; + export const updateData = (relation, data, schema, options) => { schema = sanitizeSchema(schema); options = Object.assign(Object.assign({}, defaultConfig), options); @@ -229,6 +241,7 @@ export const updateData = (relation, data, schema, options) => { } const [header, formattedData] = converterFn(data, options); + resolveFieldName(schema, header); const fieldArr = createFields(formattedData, schema, header); // This will create a new fieldStore with the fields diff --git a/src/index.spec.js b/src/index.spec.js index 77bec97..8038c59 100644 --- a/src/index.spec.js +++ b/src/index.spec.js @@ -45,6 +45,35 @@ describe('DataModel', () => { }); }); + describe('#getFieldsConfig', () => { + it('should return all field meta info', () => { + const schema = [ + { name: 'name', type: 'dimension' }, + { name: 'birthday', type: 'dimension', subtype: 'temporal', format: '%Y-%m-%d' } + ]; + + const data = [ + { name: 'Rousan', birthday: '1995-07-05', roll: 12 }, + { name: 'Sumant', birthday: '1996-08-04', roll: 89 }, + { name: 'Akash', birthday: '1994-01-03', roll: 33 } + ]; + const dataModel = new DataModel(data, schema); + const expected = { + name: { + index: 0, + def: { name: 'name', type: 'dimension', subtype: 'categorical' }, + }, + birthday: { + index: 1, + def: { name: 'birthday', type: 'dimension', subtype: 'temporal', format: '%Y-%m-%d' } + } + }; + + expect(dataModel.getFieldsConfig()).to.be.deep.equal(expected); + }); + }); + + describe('#clone', () => { it('should make a new copy of the current DataModel instance', () => { const data = [ @@ -122,6 +151,26 @@ describe('DataModel', () => { }); }); + context('Test for resolving schema', () => { + it('should take field alternative name in schema', () => { + const data = [ + { age: 30, job: 'unemployed', marital_status: 'married' }, + { age: 33, job: 'services', marital_status: 'married' }, + { age: 35, job: 'management', marital_status: 'single' } + ]; + const schema = [ + { name: 'age', type: 'measure' }, + { name: 'job', type: 'dimension' }, + { name: 'marital_status', type: 'dimension', as: 'marital' }, + ]; + const dm = new DataModel(data, schema); + + expect(dm.getFieldspace().fieldsObj().marital_status).to.be.undefined; + expect(!!dm.getFieldspace().fieldsObj().marital).to.be.true; + }); + }); + + context('Test for a failing data format type', () => { let mockedDm = () => new DataModel([], [], { dataFormat: 'erroneous-data-type' }); @@ -828,9 +877,6 @@ describe('DataModel', () => { }; expect(sortedDm).not.to.equal(dataModel); - expect(sortedDm._sortingDetails).to.deep.equal([ - ['age', 'desc'] - ]); expect(sortedDm.getData()).to.deep.equal(expData); }); @@ -870,10 +916,6 @@ describe('DataModel', () => { ], uids: [0, 1, 2, 3, 4, 5] }; - expect(sortedDm._sortingDetails).to.deep.equal([ - ['age', 'desc'], - ['job'], - ]); expect(sortedDm.getData()).to.deep.equal(expData); }); @@ -1021,6 +1063,54 @@ describe('DataModel', () => { }; expect(sortedDm.getData()).to.deep.equal(expected); }); + + it('should store derivation criteria info', () => { + const data = [ + { age: 30, job: 'management', marital: 'married' }, + { age: 59, job: 'blue-collar', marital: 'married' }, + { age: 35, job: 'management', marital: 'single' }, + { age: 57, job: 'self-employed', marital: 'married' }, + { age: 28, job: 'blue-collar', marital: 'married' }, + { age: 30, job: 'blue-collar', marital: 'single' }, + ]; + const schema = [ + { name: 'age', type: 'measure' }, + { name: 'job', type: 'dimension' }, + { name: 'marital', type: 'dimension' } + ]; + const rootDm = new DataModel(data, schema); + + const dm = rootDm.select(fields => fields.age.value > 30); + const sortedDm = dm.sort([['age', 'ASC']]); + expect(sortedDm.getDerivations()[0].op).to.eql(DM_DERIVATIVES.SORT); + expect(sortedDm.getAncestorDerivations()[0].op).to.eql(DM_DERIVATIVES.SELECT); + }); + + it('should control parent-child relationships on saveChild config', () => { + const data = [ + { age: 30, job: 'management', marital: 'married' }, + { age: 59, job: 'blue-collar', marital: 'married' }, + { age: 35, job: 'management', marital: 'single' }, + { age: 57, job: 'self-employed', marital: 'married' }, + { age: 28, job: 'blue-collar', marital: 'married' }, + { age: 30, job: 'blue-collar', marital: 'single' }, + ]; + const schema = [ + { name: 'age', type: 'measure' }, + { name: 'job', type: 'dimension' }, + { name: 'marital', type: 'dimension' } + ]; + + let rootDm = new DataModel(data, schema); + let dm = rootDm.sort([['age', 'ASC']], { saveChild: true }); + expect(dm.getParent()).to.be.equal(rootDm); + expect(rootDm.getChildren()[0]).to.be.equal(dm); + + rootDm = new DataModel(data, schema); + dm = rootDm.sort([['age', 'ASC']], { saveChild: false }); + expect(dm.getParent()).to.be.null; + expect(rootDm.getChildren().length).to.be.equal(0); + }); }); describe('#join', () => { diff --git a/src/relation.js b/src/relation.js index 60b3a6f..3b62c3e 100644 --- a/src/relation.js +++ b/src/relation.js @@ -415,10 +415,10 @@ class Relation { } calculateFieldsConfig () { - this._fieldConfig = this._fieldspace.fields.reduce((acc, fieldDef, i) => { - acc[fieldDef.name()] = { + this._fieldConfig = this._fieldspace.fields.reduce((acc, fieldObj, i) => { + acc[fieldObj.name()] = { index: i, - def: { name: fieldDef.name(), type: fieldDef.type(), subtype: fieldDef.subtype() } + def: fieldObj.schema(), }; return acc; }, {}); From fd843f0d7d38cedf281daf85c01874916f44316c Mon Sep 17 00:00:00 2001 From: Ranajit Banerjee Date: Fri, 22 Mar 2019 14:44:49 +0530 Subject: [PATCH 13/21] - Expose datamodel derivative enums --- dist/datamodel.js | 2 +- dist/datamodel.js.map | 2 +- src/export.js | 3 ++- src/operator/group-by-function.js | 1 + 4 files changed, 5 insertions(+), 3 deletions(-) diff --git a/dist/datamodel.js b/dist/datamodel.js index ea3c2ae..e2c6163 100644 --- a/dist/datamodel.js +++ b/dist/datamodel.js @@ -1,2 +1,2 @@ -!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define("DataModel",[],t):"object"==typeof exports?exports.DataModel=t():e.DataModel=t()}(window,function(){return function(e){var t={};function n(r){if(t[r])return t[r].exports;var a=t[r]={i:r,l:!1,exports:{}};return e[r].call(a.exports,a,a.exports,n),a.l=!0,a.exports}return n.m=e,n.c=t,n.d=function(e,t,r){n.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:r})},n.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},n.t=function(e,t){if(1&t&&(e=n(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var r=Object.create(null);if(n.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var a in e)n.d(r,a,function(t){return e[t]}.bind(null,a));return r},n.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return n.d(t,"a",t),t},n.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},n.p="",n(n.s=1)}([function(e){e.exports={name:"datamodel",description:"Relational algebra compliant in-memory tabular data store",homepage:"https://github.com/chartshq/datamodel",version:"2.1.0",license:"MIT",main:"dist/datamodel.js",author:"Charts.com ",keywords:["datamodel","data","relational","algebra","model","muze","fusioncharts","table","tabular","operation"],repository:{type:"git",url:"https://github.com/chartshq/datamodel.git"},contributors:[{name:"Akash Goswami",email:"akash@charts.com"},{name:"Subhash Haldar",email:"subhash@charts.com"},{name:"Rousan Ali",email:"rousan@charts.com",url:"https://rousan.io"},{name:"Ujjal Kumar Dutta",email:"ujjal@charts.com"}],dependencies:{"d3-dsv":"^1.0.8"},devDependencies:{"babel-cli":"6.26.0","babel-core":"^6.26.3","babel-eslint":"6.1.2","babel-loader":"^7.1.4","babel-plugin-transform-runtime":"^6.23.0","babel-preset-env":"^1.7.0","babel-preset-es2015":"^6.24.1","babel-preset-flow":"^6.23.0",chai:"3.5.0","cross-env":"^5.0.5",eslint:"3.19.0","eslint-config-airbnb":"15.1.0","eslint-plugin-import":"2.7.0","eslint-plugin-jsx-a11y":"5.1.1","eslint-plugin-react":"7.3.0","istanbul-instrumenter-loader":"^3.0.0",jsdoc:"3.5.5",json2yaml:"^1.1.0",karma:"1.7.1","karma-chai":"0.1.0","karma-chrome-launcher":"2.1.1","karma-coverage-istanbul-reporter":"^1.3.0","karma-mocha":"1.3.0","karma-spec-reporter":"0.0.31","karma-webpack":"2.0.3",marked:"^0.5.0",mocha:"3.4.2","mocha-webpack":"0.7.0","transform-runtime":"0.0.0",webpack:"^4.12.0","webpack-cli":"^3.0.7","webpack-dev-server":"^3.1.4"},scripts:{test:"npm run lint && npm run ut",ut:"karma start karma.conf.js",utd:"karma start --single-run false --browsers Chrome karma.conf.js ",build:"webpack --mode production",start:"webpack-dev-server --config webpack.config.dev.js --mode development --open",lint:"eslint ./src","lint-errors":"eslint --quiet ./src",docs:"rm -rf yaml && mkdir yaml && jsdoc -c jsdoc.conf.json"}}},function(e,t,n){var r=n(2);e.exports=r.default?r.default:r},function(e,t,n){"use strict";n.r(t);var r={};n.r(r),n.d(r,"DataFormat",function(){return o}),n.d(r,"DimensionSubtype",function(){return u}),n.d(r,"MeasureSubtype",function(){return c}),n.d(r,"FieldType",function(){return f}),n.d(r,"FilteringMode",function(){return l});var a={};n.r(a),n.d(a,"DSVArr",function(){return Ge}),n.d(a,"DSVStr",function(){return tt}),n.d(a,"FlatJSON",function(){return nt}),n.d(a,"Auto",function(){return rt});var i={};n.r(i),n.d(i,"sum",function(){return jt}),n.d(i,"avg",function(){return St}),n.d(i,"min",function(){return Nt}),n.d(i,"max",function(){return kt}),n.d(i,"first",function(){return Ft}),n.d(i,"last",function(){return Dt}),n.d(i,"count",function(){return Tt}),n.d(i,"sd",function(){return Rt});var o={FLAT_JSON:"FlatJSON",DSV_STR:"DSVStr",DSV_ARR:"DSVArr",AUTO:"Auto"},u={CATEGORICAL:"categorical",TEMPORAL:"temporal",GEO:"geo",BINNED:"binned"},c={CONTINUOUS:"continuous"},f={MEASURE:"measure",DIMENSION:"dimension"},l={NORMAL:"normal",INVERSE:"inverse",ALL:"all"};function s(e){return e instanceof Date?e:new Date(e)}function d(e){return e<10?"0"+e:e}function p(e){this.format=e,this.dtParams=void 0,this.nativeDate=void 0}RegExp.escape=function(e){return e.replace(/[-[\]{}()*+?.,\\^$|#\s]/g,"\\$&")},p.TOKEN_PREFIX="%",p.DATETIME_PARAM_SEQUENCE={YEAR:0,MONTH:1,DAY:2,HOUR:3,MINUTE:4,SECOND:5,MILLISECOND:6},p.defaultNumberParser=function(e){return function(t){var n;return isFinite(n=parseInt(t,10))?n:e}},p.defaultRangeParser=function(e,t){return function(n){var r,a=void 0;if(!n)return t;var i=n.toLowerCase();for(a=0,r=e.length;aa.getFullYear()&&(t=""+(i-1)+r),s(t).getFullYear()},formatter:function(e){var t=s(e).getFullYear().toString(),n=void 0;return t&&(n=t.length,t=t.substring(n-2,n)),t}},Y:{name:"Y",index:0,extract:function(){return"(\\d{4})"},parser:p.defaultNumberParser(),formatter:function(e){return s(e).getFullYear().toString()}}}},p.getTokenFormalNames=function(){var e=p.getTokenDefinitions();return{HOUR:e.H,HOUR_12:e.l,AMPM_UPPERCASE:e.p,AMPM_LOWERCASE:e.P,MINUTE:e.M,SECOND:e.S,SHORT_DAY:e.a,LONG_DAY:e.A,DAY_OF_MONTH:e.e,DAY_OF_MONTH_CONSTANT_WIDTH:e.d,SHORT_MONTH:e.b,LONG_MONTH:e.B,MONTH_OF_YEAR:e.m,SHORT_YEAR:e.y,LONG_YEAR:e.Y}},p.tokenResolver=function(){var e=p.getTokenDefinitions(),t=function(){for(var e=0,t=void 0,n=void 0,r=arguments.length;e=0;)o=e[i+1],-1!==r.indexOf(o)&&a.push({index:i,token:o});return a},p.formatAs=function(e,t){var n,r=s(e),a=p.findTokens(t),i=p.getTokenDefinitions(),o=String(t),u=p.TOKEN_PREFIX,c=void 0,f=void 0,l=void 0;for(l=0,n=a.length;l=0;d--)(f=i[d].index)+1!==s.length-1?(void 0===u&&(u=s.length),l=s.substring(f+2,u),s=s.substring(0,f+2)+RegExp.escape(l)+s.substring(u,s.length),u=f):u=f;for(d=0;d0&&e.split(",").forEach(function(e){var n=e.split("-"),r=+n[0],a=+(n[1]||n[0]);if(a>=r)for(var i=r;i<=a;i+=1)t(i)})}var T=function(){function e(e,t){for(var n=0;n=(i=e[a=n+Math.floor((r-n)/2)]).start&&t=i.end?n=a+1:t3&&void 0!==arguments[3]&&arguments[3],a=arguments.length>4&&void 0!==arguments[4]?arguments[4]:J.CROSS,i=[],o=[],u=n||K,c=e.getFieldspace(),f=t.getFieldspace(),l=c.name,s=f.name,d=c.name+"."+f.name,p=C(c,f);if(l===s)throw new Error("DataModels must have different alias names");return c.fields.forEach(function(e){var t=_({},e.schema());-1===p.indexOf(t.name)||r||(t.name=c.name+"."+t.name),i.push(t)}),f.fields.forEach(function(e){var t=_({},e.schema());-1!==p.indexOf(t.name)?r||(t.name=f.name+"."+t.name,i.push(t)):i.push(t)}),D(e._rowDiffset,function(n){var d=!1,h=void 0;D(t._rowDiffset,function(v){var m=[],y={};y[l]={},y[s]={},c.fields.forEach(function(e){m.push(e.partialField.data[n]),y[l][e.name()]=e.partialField.data[n]}),f.fields.forEach(function(e){-1!==p.indexOf(e.schema().name)&&r||m.push(e.partialField.data[v]),y[s][e.name()]=e.partialField.data[v]});var g=ot(y[l]),b=ot(y[s]);if(u(g,b,function(){return e.detachedRoot()},function(){return t.detachedRoot()},{})){var w={};m.forEach(function(e,t){w[i[t].name]=e}),d&&J.CROSS!==a?o[h]=w:(o.push(w),d=!0,h=n)}else if((a===J.LEFTOUTER||a===J.RIGHTOUTER)&&!d){var _={},O=c.fields.length-1;m.forEach(function(e,t){_[i[t].name]=t<=O?e:null}),d=!0,h=n,o.push(_)}})}),new At(o,i,{name:d})}function z(e,t){var n=""+e,r=""+t;return nr?1:0}function q(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:z;return e.length>1&&function e(t,n,r,a){if(r===n)return t;var i=n+Math.floor((r-n)/2);return e(t,n,i,a),e(t,i+1,r,a),function(e,t,n,r,a){for(var i=e,o=[],u=t;u<=r;u+=1)o[u]=i[u];for(var c=t,f=n+1,l=t;l<=r;l+=1)c>n?(i[l]=o[f],f+=1):f>r?(i[l]=o[c],c+=1):a(o[c],o[f])<=0?(i[l]=o[c],c+=1):(i[l]=o[f],f+=1)}(t,n,i,r,a),t}(e,0,e.length-1,t),e}function X(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);ti?"desc"===t?-1:1:0}}return r}function Q(e,t){var n=new Map,r=[];return e.forEach(function(e){var a=e[t];n.has(a)?r[n.get(a)][1].push(e):(r.push([a,[e]]),n.set(a,r.length-1))}),r}function Z(e,t,n){var r={label:e[0]};return t.reduce(function(t,r,a){return t[r]=e[1].map(function(e){return e[n[a].index]}),t},r),r}function ee(e,t,n,r,a){a=Object.assign({},{addUid:!1,columnWise:!1},a);var i={schema:[],data:[],uids:[]},o=a.addUid,u=r&&r.length>0,c=[];if(n.split(",").forEach(function(t){for(var n=0;n=0;u--)a=t[u][0],i=t[u][1],(o=vt(r,a))&&("function"==typeof i?q(n,function(e,t){return i(e[o.index],t[o.index])}):O(i)?function(){var e=Q(n,o.index),t=i[i.length-1],a=i.slice(0,i.length-1),u=a.map(function(e){return vt(r,e)});e.forEach(function(e){e.push(Z(e,a,u))}),q(e,function(e,n){var r=e[2],a=n[2];return t(r,a)}),n.length=0,e.forEach(function(e){n.push.apply(n,X(e[1]))})}():(i="desc"===String(i).toLowerCase()?"desc":"asc",q(n,$(o.type,i,o.index))));e.uids=[],n.forEach(function(t){e.uids.push(t.pop())})}(i,r),a.columnWise){var f=Array.apply(void 0,X(Array(i.schema.length))).map(function(){return[]});i.data.forEach(function(e){e.forEach(function(e,t){f[t].push(e)})}),i.data=f}return i}function te(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!A(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t,r){D(e._rowDiffset,function(e){var o={},u="";a.forEach(function(n){var r=t[n].partialField.data[e];u+="-"+r,o[n]=r}),n[u]||(r&&i.push(o),n[u]=!0)})}return e._colIdentifier.split(",").forEach(function(e){var t=c[e];r.push(_({},t.schema())),a.push(t.schema().name)}),s(t,f,!1),s(e,c,!0),new At(i,r,{name:l})}function ne(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n={},r=e.getFieldspace().getMeasure(),a=le.defaultReducer();return Object.keys(r).forEach(function(e){"string"!=typeof t[e]&&(t[e]=r[e].defAggFn());var i=le.resolve(t[e]);i?n[e]=i:(n[e]=a,t[e]=ue)}),n}(e,n),o=e.getFieldspace(),u=o.fieldsObj(),c=o.name,l=[],s=[],d=[],p={},h=[],v=void 0;Object.entries(u).forEach(function(e){var t=se(e,2),n=t[0],r=t[1];if(-1!==a.indexOf(n)||i[n])switch(d.push(_({},r.schema())),r.schema().type){case f.MEASURE:s.push(n);break;default:case f.DIMENSION:l.push(n)}});var m=0;D(e._rowDiffset,function(e){var t="";l.forEach(function(n){t=t+"-"+u[n].partialField.data[e]}),void 0===p[t]?(p[t]=m,h.push({}),l.forEach(function(t){h[m][t]=u[t].partialField.data[e]}),s.forEach(function(t){h[m][t]=[u[t].partialField.data[e]]}),m+=1):s.forEach(function(n){h[p[t]][n].push(u[n].partialField.data[e])})});var y={},g=function(){return e.detachedRoot()};return h.forEach(function(e){var t=e;s.forEach(function(n){t[n]=i[n](e[n],g,y)})}),r?(r.__calculateFieldspace(),v=r):v=new Mt(h,d,{name:c}),v}function pe(e,t){var n=C(e.getFieldspace(),t.getFieldspace());return function(e,t){var r=!0;return n.forEach(function(n){r=!(e[n].value!==t[n].value||!r)}),r}}function he(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!A(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t){D(e._rowDiffset,function(e){var r={},o="";a.forEach(function(n){var a=t[n].partialField.data[e];o+="-"+a,r[n]=a}),n[o]||(i.push(r),n[o]=!0)})}return e._colIdentifier.split(",").forEach(function(e){var t=c[e];r.push(_({},t.schema())),a.push(t.schema().name)}),s(e,c),s(t,f),new Mt(i,r,{name:l})}function ve(e,t,n){return W(e,t,n,!1,J.LEFTOUTER)}function me(e,t,n){return W(t,e,n,!1,J.RIGHTOUTER)}var ye=function(){function e(e,t){for(var n=0;nn&&(n=a))}),[t,n]}}]),t}(),Te=function(){function e(e,t){for(var n=0;n=i?c=!0:(r=e.charCodeAt(o++))===qe?f=!0:r===Xe&&(f=!0,e.charCodeAt(o)===qe&&++o),e.slice(a+1,t-1).replace(/""/g,'"')}for(;o2&&void 0!==arguments[2]?arguments[2]:{},a=arguments[3],i=void 0;t!==H?(i={op:t,meta:r,criteria:a},e._derivation.push(i)):(i=[].concat(it(a)),e._derivation.length=0,(n=e._derivation).push.apply(n,it(i)))},ft=function(e,t,n,r,a){var i=[],o=-1,u=r.mode,c=void 0,f={},s=function(){return a.detachedRoot()},d=function(e){return n(function(e,t){var n={},r=!0,a=!1,i=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done);r=!0){var c=o.value;n[c.name()]=new F(c.partialField.data[t],c)}}catch(e){a=!0,i=e}finally{try{!r&&u.return&&u.return()}finally{if(a)throw i}}return n}(t,e),e,s,f)},p=void 0;return p=u===l.INVERSE?function(e){return!d(e)}:function(e){return d(e)},D(e,function(e){p(e)&&(-1!==o&&e===o+1?(c=i.length-1,i[c]=i[c].split("-")[0]+"-"+e):i.push(""+e),o=e)}),i.join(",")},lt=function(e,t){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},r=n.operation||G,a=n.filterByMeasure||!1,i=[];i=t.length?t.map(function(e){return n=(t=e).getData(),r=n.schema,i=t.getFieldsConfig(),o=t.getFieldspace().fieldsObj(),u=n.data,c=Object.values(i).reduce(function(e,t){return e[t.def.name]=o[t.def.name].domain(),e},{}),function(e){return!!u.length&&u.some(function(t){return r.every(function(n){if(!(n.name in e))return!0;var r=e[n.name].valueOf();if(a&&n.type===f.MEASURE)return r>=c[n.name][0]&&r<=c[n.name][1];if(n.type!==f.DIMENSION)return!0;var o=i[n.name].index;return t[o]===e[n.name].valueOf()})})};var t,n,r,i,o,u,c}):[function(){return!1}];var o=void 0;r===G?o=e.clone(!1,!1).select(function(e){return i.every(function(t){return t(e)})},{saveChild:!1,mode:l.ALL}):o=e.clone(!1,!1).select(function(e){return i.some(function(t){return t(e)})},{mode:l.ALL,saveChild:!1});return o},st=function(e,t,n,r){var a=e.clone(r.saveChild),i=ft(a._rowDiffset,a.getPartialFieldspace().fields,t,n,e);return a._rowDiffset=i,a.__calculateFieldspace().calculateFieldsConfig(),ct(a,L,{config:n},t),a},dt=function(e,t,n,r){var a=e.clone(n.saveChild),i=t;return n.mode===l.INVERSE&&(i=r.filter(function(e){return-1===t.indexOf(e)})),a._colIdentifier=i.join(","),a.__calculateFieldspace().calculateFieldsConfig(),ct(a,U,{projField:t,config:n,actualProjField:i},null),a},pt=function(e){if((e=_({},e)).type||(e.type=f.DIMENSION),!e.subtype)switch(e.type){case f.MEASURE:e.subtype=c.CONTINUOUS;break;default:case f.DIMENSION:e.subtype=u.CATEGORICAL}return e},ht=function(e,t,n,r){n=function(e){return e.map(function(e){return pt(e)})}(n),r=Object.assign(Object.assign({},Je),r);var i=a[r.dataFormat];if(!i||"function"!=typeof i)throw new Error("No converter function found for "+r.dataFormat+" format");var u=i(t,r),c=at(u,2),f=c[0],l=c[1],s=Be(l,n,f),d=N.createNamespace(s,r.name);return e._partialFieldspace=d,e._rowDiffset=l.length&&l[0].length?"0-"+(l[0].length-1):"",e._colIdentifier=n.map(function(e){return e.name}).join(),e._dataFormat=r.dataFormat===o.AUTO?S(t):r.dataFormat,e},vt=function(e,t){for(var n=0;n2&&void 0!==arguments[2]?arguments[2]:{},a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=a.nonTraversingModel,o=a.excludeModels||[];t!==i&&((!o.length||-1===o.indexOf(t))&&t.handlePropagation(n,r),t._children.forEach(function(t){var i=mt(n,t),o=at(i,2),u=o[0],c=o[1];e(t,[u,c],r,a)}))},gt=function(e){for(var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[];e._parent;)t.push(e),e=e._parent;return t},bt=function(e,t,n,r){var a=void 0,i=void 0,o=n.propagationNameSpace,u=n.propagateToSource,c=n.sourceId,f=r.propagateInterpolatedValues,l=[];if(null===e&&!0!==r.persistent)l=[{criteria:[]}];else{var s,d=Object.values(o.mutableActions);!1!==u&&(d=d.filter(function(e){return e.config.sourceId!==c}));var p=d.filter(function(e){return(r.filterFn||function(){return!0})(e,r)}).map(function(e){return e.config.criteria}),h=[];if(!1!==u){var v=Object.values(o.mutableActions);v.forEach(function(e){var t=e.config;!1===t.applyOnSource&&t.action===r.action&&t.sourceId!==c&&(h.push(e.model),(a=v.filter(function(t){return t!==e}).map(function(e){return e.config.criteria})).length&&l.push({criteria:a,models:e.model,path:gt(e.model)}))})}a=(s=[]).concat.apply(s,[].concat(it(p),[e])).filter(function(e){return null!==e}),l.push({criteria:a,excludeModels:[].concat(h,it(r.excludeModels||[]))})}var m=t.model,y=Object.assign({sourceIdentifiers:e,propagationSourceId:c},r),g=t.groupByModel;f&&g&&(i=lt(g,a,{filterByMeasure:f}),yt(g,i,y)),l.forEach(function(e){var t=lt(m,e.criteria),n=e.path;if(n){var r=function(e,t){for(var n=0,r=t.length;n0&&void 0!==arguments[0])||arguments[0],t=void 0;if(!1===(!(arguments.length>1&&void 0!==arguments[1])||arguments[1])){var n=this.getData({getAllFields:!0}),r=n.data,a=n.schema,i=r.map(function(e){var t={};return a.forEach(function(n,r){t[n.name]=e[r]}),t});t=new this.constructor(i,a)}else t=new this.constructor(this);return e&&this._children.push(t),t}},{key:"project",value:function(e,t){var n={mode:l.NORMAL,saveChild:!0};t=Object.assign({},n,t);var r=this.getFieldsConfig(),a=Object.keys(r),i=t.mode,o=e.reduce(function(e,t){return"RegExp"===t.constructor.name?e.push.apply(e,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:[];ct(this,H,null,t),this._parent=e,e._children.push(this)}},{key:"getParent",value:function(){return this._parent}},{key:"getChildren",value:function(){return this._children}},{key:"getDerivations",value:function(){return this._derivation}}]),e}(),Ot=function(){return function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,a=!1,i=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){a=!0,i=e}finally{try{!r&&u.return&&u.return()}finally{if(a)throw i}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")}}(),Et=function(){function e(e,t){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:{},n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{saveChild:!0},r=""+e.join(),a=[this,e,t],i=de.apply(void 0,a);return ct(i,V,{fieldsArr:e,groupByString:r,defaultReducer:le.defaultReducer()},t),n.saveChild&&this._children.push(i),i._parent=this,i}},{key:"sort",value:function(e){var t=this.getData({order:"row",sort:e}),n=[t.schema.map(function(e){return e.name})].concat(t.data),r=new this.constructor(n,t.schema,{dataFormat:"DSVArr"});return r._sortingDetails=e,r}},{key:"serialize",value:function(e,t){e=e||this._dataFormat,t=Object.assign({},{fieldSeparator:","},t);var n=this.getFieldspace().fields,r=n.map(function(e){return e.formattedData()}),a=r[0].length,i=void 0,u=void 0,c=void 0;if(e===o.FLAT_JSON)for(i=[],u=0;u=0&&(n.fields[r]=e)}else n.fields.push(e);return n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,this.__calculateFieldspace().calculateFieldsConfig(),this}},{key:"calculateVariable",value:function(e,t,n){var r=this;e=pt(e),n=Object.assign({},{saveChild:!0,replaceVar:!1},n);var a=this.getFieldsConfig(),i=t.slice(0,t.length-1),o=t[t.length-1];if(a[e.name]&&!n.replaceVar)throw new Error(e.name+" field already exists in datamodel");var u=i.map(function(e){var t=a[e];if(!t)throw new Error(e+" is not a valid column name.");return t.index}),c=this.clone(),f=c.getFieldspace().fields,l=u.map(function(e){return f[e]}),s={},d=function(){return r.detachedRoot()},p=[];D(c._rowDiffset,function(e){var t=l.map(function(t){return t.partialField.data[e]});p[e]=o.apply(void 0,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},a=t.isMutableAction,i=t.sourceId,o=t.payload,u=function(e){for(;e._parent;)e=e._parent;return e}(this),c=u._propagationNameSpace,f={groupByModel:function e(t){return t._parent&&t._derivation.find(function(e){return"group"!==e.op})?e(t._parent):t}(this),model:u};return n&&function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=void 0,a=t.isMutableAction,i=t.criteria,o=t.action+"-"+t.sourceId;r=a?e.mutableActions:e.immutableActions,null===i?delete r[o]:r[o]={model:n,config:t}}(c,t,this),bt(e,f,{propagationNameSpace:c,sourceId:i},Object.assign({payload:o},t)),a&&function(e,t,n){var r=e.immutableActions;for(var a in r){var i=r[a].config,o=n.config.sourceId,u=!n.propConfig.filterImmutableAction||n.propConfig.filterImmutableAction(i,n.config);if(i.sourceId!==o&&u){var c=i.criteria;bt(c,t,{propagationNameSpace:e,propagateToSource:!1,sourceId:o},i)}}}(c,f,{config:t,propConfig:r}),this}},{key:"on",value:function(e,t){switch(e){case"propagation":this._onPropagation.push(t)}return this}},{key:"unsubscribe",value:function(e){switch(e){case"propagation":this._onPropagation=[]}return this}},{key:"handlePropagation",value:function(e,t){var n=this;this._onPropagation.forEach(function(r){return r.call(n,e,t)})}},{key:"bin",value:function(e,t){var n=this.getFieldsConfig();if(!n[e])throw new Error("Field "+e+" doesn't exist");var r=t.name||e+"_binned";if(n[r])throw new Error("Field "+r+" already exists");var a=function(e,t,n){var r=n.buckets,a=n.binsCount,i=n.binSize,o=n.start,u=n.end,c=e.domain(),f=M(c,2),l=f[0],s=f[1];r||(o=0!==o&&(!o||o>l)?l:o,u=0!==u&&(!u||ul&&r.unshift(l),r[r.length-1]<=s&&r.push(s+1);for(var d=[],p=0;p1&&void 0!==arguments[1]?arguments[1]:{saveChild:!0},r=e,a=void 0,i=[],o=n.saveChild;return t.forEach(function(e){r=e(r),i.push.apply(i,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&a.dispose(),r}},bin:function(){for(var e=arguments.length,t=Array(e),n=0;n",keywords:["datamodel","data","relational","algebra","model","muze","fusioncharts","table","tabular","operation"],repository:{type:"git",url:"https://github.com/chartshq/datamodel.git"},contributors:[{name:"Akash Goswami",email:"akash@charts.com"},{name:"Subhash Haldar",email:"subhash@charts.com"},{name:"Rousan Ali",email:"rousan@charts.com",url:"https://rousan.io"},{name:"Ujjal Kumar Dutta",email:"ujjal@charts.com"}],dependencies:{"d3-dsv":"^1.0.8"},devDependencies:{"babel-cli":"6.26.0","babel-core":"^6.26.3","babel-eslint":"6.1.2","babel-loader":"^7.1.4","babel-plugin-transform-runtime":"^6.23.0","babel-preset-env":"^1.7.0","babel-preset-es2015":"^6.24.1","babel-preset-flow":"^6.23.0",chai:"3.5.0","cross-env":"^5.0.5",eslint:"3.19.0","eslint-config-airbnb":"15.1.0","eslint-plugin-import":"2.7.0","eslint-plugin-jsx-a11y":"5.1.1","eslint-plugin-react":"7.3.0","istanbul-instrumenter-loader":"^3.0.0",jsdoc:"3.5.5",json2yaml:"^1.1.0",karma:"1.7.1","karma-chai":"0.1.0","karma-chrome-launcher":"2.1.1","karma-coverage-istanbul-reporter":"^1.3.0","karma-mocha":"1.3.0","karma-spec-reporter":"0.0.31","karma-webpack":"2.0.3",marked:"^0.5.0",mocha:"3.4.2","mocha-webpack":"0.7.0","transform-runtime":"0.0.0",webpack:"^4.12.0","webpack-cli":"^3.0.7","webpack-dev-server":"^3.1.4"},scripts:{test:"npm run lint && npm run ut",ut:"karma start karma.conf.js",utd:"karma start --single-run false --browsers Chrome karma.conf.js ",build:"webpack --mode production",start:"webpack-dev-server --config webpack.config.dev.js --mode development --open",lint:"eslint ./src","lint-errors":"eslint --quiet ./src",docs:"rm -rf yaml && mkdir yaml && jsdoc -c jsdoc.conf.json"}}},function(e,t,n){var r=n(2);e.exports=r.default?r.default:r},function(e,t,n){"use strict";n.r(t);var r={};n.r(r),n.d(r,"DataFormat",function(){return o}),n.d(r,"DimensionSubtype",function(){return u}),n.d(r,"MeasureSubtype",function(){return c}),n.d(r,"FieldType",function(){return f}),n.d(r,"FilteringMode",function(){return l}),n.d(r,"GROUP_BY_FUNCTIONS",function(){return s});var a={};n.r(a),n.d(a,"DSVArr",function(){return Qe}),n.d(a,"DSVStr",function(){return ct}),n.d(a,"FlatJSON",function(){return ft}),n.d(a,"Auto",function(){return lt});var i={};n.r(i),n.d(i,"sum",function(){return Pt}),n.d(i,"avg",function(){return It}),n.d(i,"min",function(){return Mt}),n.d(i,"max",function(){return Ct}),n.d(i,"first",function(){return xt}),n.d(i,"last",function(){return Lt}),n.d(i,"count",function(){return Ut}),n.d(i,"sd",function(){return Vt});var o={FLAT_JSON:"FlatJSON",DSV_STR:"DSVStr",DSV_ARR:"DSVArr",AUTO:"Auto"},u={CATEGORICAL:"categorical",TEMPORAL:"temporal",GEO:"geo",BINNED:"binned"},c={CONTINUOUS:"continuous"},f={MEASURE:"measure",DIMENSION:"dimension"},l={NORMAL:"normal",INVERSE:"inverse",ALL:"all"},s={SUM:"sum",AVG:"avg",MIN:"min",MAX:"max",FIRST:"first",LAST:"last",COUNT:"count",STD:"std"};function d(e){return e instanceof Date?e:new Date(e)}function p(e){return e<10?"0"+e:e}function h(e){this.format=e,this.dtParams=void 0,this.nativeDate=void 0}RegExp.escape=function(e){return e.replace(/[-[\]{}()*+?.,\\^$|#\s]/g,"\\$&")},h.TOKEN_PREFIX="%",h.DATETIME_PARAM_SEQUENCE={YEAR:0,MONTH:1,DAY:2,HOUR:3,MINUTE:4,SECOND:5,MILLISECOND:6},h.defaultNumberParser=function(e){return function(t){var n;return isFinite(n=parseInt(t,10))?n:e}},h.defaultRangeParser=function(e,t){return function(n){var r,a=void 0;if(!n)return t;var i=n.toLowerCase();for(a=0,r=e.length;aa.getFullYear()&&(t=""+(i-1)+r),d(t).getFullYear()},formatter:function(e){var t=d(e).getFullYear().toString(),n=void 0;return t&&(n=t.length,t=t.substring(n-2,n)),t}},Y:{name:"Y",index:0,extract:function(){return"(\\d{4})"},parser:h.defaultNumberParser(),formatter:function(e){return d(e).getFullYear().toString()}}}},h.getTokenFormalNames=function(){var e=h.getTokenDefinitions();return{HOUR:e.H,HOUR_12:e.l,AMPM_UPPERCASE:e.p,AMPM_LOWERCASE:e.P,MINUTE:e.M,SECOND:e.S,SHORT_DAY:e.a,LONG_DAY:e.A,DAY_OF_MONTH:e.e,DAY_OF_MONTH_CONSTANT_WIDTH:e.d,SHORT_MONTH:e.b,LONG_MONTH:e.B,MONTH_OF_YEAR:e.m,SHORT_YEAR:e.y,LONG_YEAR:e.Y}},h.tokenResolver=function(){var e=h.getTokenDefinitions(),t=function(){for(var e=0,t=void 0,n=void 0,r=arguments.length;e=0;)o=e[i+1],-1!==r.indexOf(o)&&a.push({index:i,token:o});return a},h.formatAs=function(e,t){var n,r=d(e),a=h.findTokens(t),i=h.getTokenDefinitions(),o=String(t),u=h.TOKEN_PREFIX,c=void 0,f=void 0,l=void 0;for(l=0,n=a.length;l=0;d--)(f=i[d].index)+1!==s.length-1?(void 0===u&&(u=s.length),l=s.substring(f+2,u),s=s.substring(0,f+2)+RegExp.escape(l)+s.substring(u,s.length),u=f):u=f;for(d=0;d0&&e.split(",").forEach(function(e){var n=e.split("-"),r=+n[0],a=+(n[1]||n[0]);if(a>=r)for(var i=r;i<=a;i+=1)t(i)})}var P=function(){function e(e,t){for(var n=0;n=(i=e[a=n+Math.floor((r-n)/2)]).start&&t=i.end?n=a+1:t3&&void 0!==arguments[3]&&arguments[3],a=arguments.length>4&&void 0!==arguments[4]?arguments[4]:V.CROSS,i=[],o=[],u=n||J,c=e.getFieldspace(),f=t.getFieldspace(),l=c.name,s=f.name,d=c.name+"."+f.name,p=B(c,f);if(l===s)throw new Error("DataModels must have different alias names");return c.fields.forEach(function(e){var t=w({},e.schema());-1===p.indexOf(t.name)||r||(t.name=c.name+"."+t.name),i.push(t)}),f.fields.forEach(function(e){var t=w({},e.schema());-1!==p.indexOf(t.name)?r||(t.name=f.name+"."+t.name,i.push(t)):i.push(t)}),R(e._rowDiffset,function(n){var d=!1,h=void 0;R(t._rowDiffset,function(v){var m=[],y={};y[l]={},y[s]={},c.fields.forEach(function(e){m.push(e.partialField.data[n]),y[l][e.name()]=e.partialField.data[n]}),f.fields.forEach(function(e){-1!==p.indexOf(e.schema().name)&&r||m.push(e.partialField.data[v]),y[s][e.name()]=e.partialField.data[v]});var g=pt(y[l]),b=pt(y[s]);if(u(g,b,function(){return e.detachedRoot()},function(){return t.detachedRoot()},{})){var _={};m.forEach(function(e,t){_[i[t].name]=e}),d&&V.CROSS!==a?o[h]=_:(o.push(_),d=!0,h=n)}else if((a===V.LEFTOUTER||a===V.RIGHTOUTER)&&!d){var O={},w=c.fields.length-1;m.forEach(function(e,t){O[i[t].name]=t<=w?e:null}),d=!0,h=n,o.push(O)}})}),new Rt(o,i,{name:d})}function K(e,t){var n=""+e,r=""+t;return nr?1:0}function W(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:K;return e.length>1&&function e(t,n,r,a){if(r===n)return t;var i=n+Math.floor((r-n)/2);return e(t,n,i,a),e(t,i+1,r,a),function(e,t,n,r,a){for(var i=e,o=[],u=t;u<=r;u+=1)o[u]=i[u];for(var c=t,f=n+1,l=t;l<=r;l+=1)c>n?(i[l]=o[f],f+=1):f>r?(i[l]=o[c],c+=1):a(o[c],o[f])<=0?(i[l]=o[c],c+=1):(i[l]=o[f],f+=1)}(t,n,i,r,a),t}(e,0,e.length-1,t),e}function z(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);ti?"desc"===t?-1:1:0}}return r}function q(e,t){var n=new Map,r=[];return e.forEach(function(e){var a=e[t];n.has(a)?r[n.get(a)][1].push(e):(r.push([a,[e]]),n.set(a,r.length-1))}),r}function $(e,t,n){var r={label:e[0]};return t.reduce(function(t,r,a){return t[r]=e[1].map(function(e){return e[n[a].index]}),t},r),r}function Q(e,t,n,r,a){var i={schema:[],data:[],uids:[]},o=(a=Object.assign({},{addUid:!1,columnWise:!1},a)).addUid,u=r&&r.length>0,c=[];if(n.split(",").forEach(function(t){for(var n=0;n=0;u--)a=t[u][0],i=t[u][1],(o=At(r,a))&&(A(i)?W(n,function(e,t){return i(e[o.index],t[o.index])}):E(i)?function(){var e=q(n,o.index),t=i[i.length-1],a=i.slice(0,i.length-1),u=a.map(function(e){return At(r,e)});e.forEach(function(e){e.push($(e,a,u))}),W(e,function(e,n){var r=e[2],a=n[2];return t(r,a)}),n.length=0,e.forEach(function(e){n.push.apply(n,z(e[1]))})}():(i="desc"===String(i).toLowerCase()?"desc":"asc",W(n,X(o.type,i,o.index))));e.uids=[],n.forEach(function(t){e.uids.push(t.pop())})}(i,r),a.columnWise){var f=Array.apply(void 0,z(Array(i.schema.length))).map(function(){return[]});i.data.forEach(function(e){e.forEach(function(e,t){f[t].push(e)})}),i.data=f}return i}function Z(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!j(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t,r){R(e._rowDiffset,function(e){var o={},u="";a.forEach(function(n){var r=t[n].partialField.data[e];u+="-"+r,o[n]=r}),n[u]||(r&&i.push(o),n[u]=!0)})}return e._colIdentifier.split(",").forEach(function(e){var t=c[e];r.push(w({},t.schema())),a.push(t.schema().name)}),s(t,f,!1),s(e,c,!0),new Rt(i,r,{name:l})}function ee(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function te(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n={},r=e.getFieldspace().getMeasure(),a=ye.defaultReducer();return Object.keys(r).forEach(function(e){"string"!=typeof t[e]&&(t[e]=r[e].defAggFn());var i=ye.resolve(t[e]);i?n[e]=i:(n[e]=a,t[e]=he)}),n}(e,n),o=e.getFieldspace(),u=o.fieldsObj(),c=o.name,l=[],s=[],d=[],p={},h=[],v=void 0;Object.entries(u).forEach(function(e){var t=ge(e,2),n=t[0],r=t[1];if(-1!==a.indexOf(n)||i[n])switch(d.push(w({},r.schema())),r.schema().type){case f.MEASURE:s.push(n);break;default:case f.DIMENSION:l.push(n)}});var m=0;R(e._rowDiffset,function(e){var t="";l.forEach(function(n){t=t+"-"+u[n].partialField.data[e]}),void 0===p[t]?(p[t]=m,h.push({}),l.forEach(function(t){h[m][t]=u[t].partialField.data[e]}),s.forEach(function(t){h[m][t]=[u[t].partialField.data[e]]}),m+=1):s.forEach(function(n){h[p[t]][n].push(u[n].partialField.data[e])})});var y={},g=function(){return e.detachedRoot()};return h.forEach(function(e){var t=e;s.forEach(function(n){t[n]=i[n](e[n],g,y)})}),r?(r.__calculateFieldspace(),v=r):v=new Ht(h,d,{name:c}),v}function _e(e,t){var n=B(e.getFieldspace(),t.getFieldspace());return function(e,t){var r=!0;return n.forEach(function(n){r=!(e[n].value!==t[n].value||!r)}),r}}function Oe(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!j(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t){R(e._rowDiffset,function(e){var r={},o="";a.forEach(function(n){var a=t[n].partialField.data[e];o+="-"+a,r[n]=a}),n[o]||(i.push(r),n[o]=!0)})}return e._colIdentifier.split(",").forEach(function(e){var t=c[e];r.push(w({},t.schema())),a.push(t.schema().name)}),s(e,c),s(t,f),new Ht(i,r,{name:l})}function we(e,t,n){return G(e,t,n,!1,V.LEFTOUTER)}function Ee(e,t,n){return G(t,e,n,!1,V.RIGHTOUTER)}var Ae=function(){function e(e,t){for(var n=0;nn&&(n=a))}),[t,n]}}]),t}(),Le=function(){function e(e,t){for(var n=0;n=i?c=!0:(r=e.charCodeAt(o++))===nt?f=!0:r===rt&&(f=!0,e.charCodeAt(o)===nt&&++o),e.slice(a+1,t-1).replace(/""/g,'"')}for(;o2&&void 0!==arguments[2]?arguments[2]:{},a=arguments[3];t===U.COMPOSE?(e._derivation.length=0,(n=e._derivation).push.apply(n,dt(a))):e._derivation.push({op:t,meta:r,criteria:a})},mt=function(e,t){var n;(n=t._ancestorDerivation).push.apply(n,dt(e._ancestorDerivation).concat(dt(e._derivation)))},yt=function(e,t,n,r,a){var i=[],o=-1,u=r.mode,c=void 0,f={},s=function(){return a.detachedRoot()},d=function(e){return n(function(e,t){var n={},r=!0,a=!1,i=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done);r=!0){var c=o.value;n[c.name()]=new T(c.partialField.data[t],c)}}catch(e){a=!0,i=e}finally{try{!r&&u.return&&u.return()}finally{if(a)throw i}}return n}(t,e),e,s,f)},p=void 0;return p=u===l.INVERSE?function(e){return!d(e)}:function(e){return d(e)},R(e,function(e){p(e)&&(-1!==o&&e===o+1?(c=i.length-1,i[c]=i[c].split("-")[0]+"-"+e):i.push(""+e),o=e)}),i.join(",")},gt=function(e){var t=e.clone(!1),n=e.getPartialFieldspace();return t._colIdentifier=n.fields.map(function(e){return e.name()}).join(","),n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,t.__calculateFieldspace().calculateFieldsConfig(),t},bt=function(e,t){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},r=n.operation||Y,a=n.filterByMeasure||!1,i=[];i=t.length?t.map(function(e){return function(e){var t=e.getData(),n=t.schema,r=e.getFieldsConfig(),i=e.getFieldspace().fieldsObj(),o=t.data,u=Object.values(r).reduce(function(e,t){return e[t.def.name]=i[t.def.name].domain(),e},{});return function(e){return!!o.length&&o.some(function(t){return n.every(function(n){if(!(n.name in e))return!0;var i=e[n.name].valueOf();if(a&&n.type===f.MEASURE)return i>=u[n.name][0]&&i<=u[n.name][1];if(n.type!==f.DIMENSION)return!0;var o=r[n.name].index;return t[o]===e[n.name].valueOf()})})}}(e)}):[function(){return!1}];return r===Y?gt(e).select(function(e){return i.every(function(t){return t(e)})},{saveChild:!1,mode:l.ALL}):gt(e).select(function(e){return i.some(function(t){return t(e)})},{mode:l.ALL,saveChild:!1})},_t=function(e,t,n,r){var a=e.clone(r.saveChild),i=yt(a._rowDiffset,a.getPartialFieldspace().fields,t,n,e);return a._rowDiffset=i,a.__calculateFieldspace().calculateFieldsConfig(),vt(a,U.SELECT,{config:n},t),mt(e,a),a},Ot=function(e,t,n,r){var a=e.clone(n.saveChild),i=t;return n.mode===l.INVERSE&&(i=r.filter(function(e){return-1===t.indexOf(e)})),a._colIdentifier=i.join(","),a.__calculateFieldspace().calculateFieldsConfig(),vt(a,U.PROJECT,{projField:t,config:n,actualProjField:i},null),mt(e,a),a},wt=function(e){if((e=w({},e)).type||(e.type=f.DIMENSION),!e.subtype)switch(e.type){case f.MEASURE:e.subtype=c.CONTINUOUS;break;default:case f.DIMENSION:e.subtype=u.CATEGORICAL}return e},Et=function(e,t,n,r){n=function(e){return e.map(function(e){return wt(e)})}(n),r=Object.assign(Object.assign({},$e),r);var i=a[r.dataFormat];if(!i||"function"!=typeof i)throw new Error("No converter function found for "+r.dataFormat+" format");var u=i(t,r),c=st(u,2),f=c[0],l=c[1],s=qe(l,n,f),d=k.createNamespace(s,r.name);return e._partialFieldspace=d,e._rowDiffset=l.length&&l[0].length?"0-"+(l[0].length-1):"",e._colIdentifier=n.map(function(e){return e.name}).join(),e._dataFormat=r.dataFormat===o.AUTO?F(t):r.dataFormat,e},At=function(e,t){for(var n=0;n2&&void 0!==arguments[2]?arguments[2]:{},a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=a.nonTraversingModel,o=a.excludeModels||[];t!==i&&((!o.length||-1===o.indexOf(t))&&t.handlePropagation(n,r),t._children.forEach(function(t){var i=St(n,t),o=st(i,2),u=o[0],c=o[1];e(t,[u,c],r,a)}))},Nt=function(e,t,n,r){var a=void 0,i=void 0,o=n.propagationNameSpace,u=n.propagateToSource,c=n.sourceId,f=r.propagateInterpolatedValues,l=[];if(null===e&&!0!==r.persistent)l=[{criteria:[]}];else{var s,d=Object.values(o.mutableActions);!1!==u&&(d=d.filter(function(e){return e.config.sourceId!==c}));var p=d.filter(function(e){return(r.filterFn||function(){return!0})(e,r)}).map(function(e){return e.config.criteria}),h=[];if(!1!==u){var v=Object.values(o.mutableActions);v.forEach(function(e){var t=e.config;!1===t.applyOnSource&&t.action===r.action&&t.sourceId!==c&&(h.push(e.model),(a=v.filter(function(t){return t!==e}).map(function(e){return e.config.criteria})).length&&l.push({criteria:a,models:e.model,path:function(e){for(var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[];e._parent;)t.push(e),e=e._parent;return t}(e.model)}))})}a=(s=[]).concat.apply(s,[].concat(dt(p),[e])).filter(function(e){return null!==e}),l.push({criteria:a,excludeModels:[].concat(h,dt(r.excludeModels||[]))})}var m=t.model,y=Object.assign({sourceIdentifiers:e,propagationSourceId:c},r),g=t.groupByModel;f&&g&&(i=bt(g,a,{filterByMeasure:f}),jt(g,i,y)),l.forEach(function(e){var t=bt(m,e.criteria),n=e.path;if(n){var r=function(e,t){for(var n=0,r=t.length;n0&&void 0!==arguments[0])||arguments[0],t=new this.constructor(this);return e?t.setParent(this):t.setParent(null),t}},{key:"project",value:function(e,t){var n={mode:l.NORMAL,saveChild:!0};t=Object.assign({},n,t);var r=this.getFieldsConfig(),a=Object.keys(r),i=t.mode,o=e.reduce(function(e,t){return"RegExp"===t.constructor.name?e.push.apply(e,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{saveChild:!0},r=""+e.join(),a=[this,e,t],i=be.apply(void 0,a);return vt(i,U.GROUPBY,{fieldsArr:e,groupByString:r,defaultReducer:ye.defaultReducer()},t),mt(this,i),n.saveChild?i.setParent(this):i.setParent(null),i}},{key:"sort",value:function(e){var t=this.getData({order:"row",sort:e}),n=[t.schema.map(function(e){return e.name})].concat(t.data),r=new this.constructor(n,t.schema,{dataFormat:"DSVArr"});return r._sortingDetails=e,r}},{key:"serialize",value:function(e,t){e=e||this._dataFormat,t=Object.assign({},{fieldSeparator:","},t);var n=this.getFieldspace().fields,r=n.map(function(e){return e.formattedData()}),a=r[0].length,i=void 0,u=void 0,c=void 0;if(e===o.FLAT_JSON)for(i=[],u=0;u=0&&(n.fields[r]=e)}else n.fields.push(e);return n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,this.__calculateFieldspace().calculateFieldsConfig(),this}},{key:"calculateVariable",value:function(e,t,n){var r=this;e=wt(e),n=Object.assign({},{saveChild:!0,replaceVar:!1},n);var a=this.getFieldsConfig(),i=t.slice(0,t.length-1),o=t[t.length-1];if(a[e.name]&&!n.replaceVar)throw new Error(e.name+" field already exists in datamodel");var u=i.map(function(e){var t=a[e];if(!t)throw new Error(e+" is not a valid column name.");return t.index}),c=this.clone(n.saveChild),f=c.getFieldspace().fields,l=u.map(function(e){return f[e]}),s={},d=function(){return r.detachedRoot()},p=[];R(c._rowDiffset,function(e){var t=l.map(function(t){return t.partialField.data[e]});p[e]=o.apply(void 0,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},a=t.isMutableAction,i=t.sourceId,o=t.payload,u=function(e){for(;e._parent;)e=e._parent;return e}(this),c=u._propagationNameSpace,f={groupByModel:function(e){for(;e._parent&&e._derivation.find(function(e){return e.op!==U.GROUPBY});)e=e._parent;return e}(this),model:u};return n&&function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=void 0,a=t.isMutableAction,i=t.criteria,o=t.action+"-"+t.sourceId;r=a?e.mutableActions:e.immutableActions,null===i?delete r[o]:r[o]={model:n,config:t}}(c,t,this),Nt(e,f,{propagationNameSpace:c,sourceId:i},Object.assign({payload:o},t)),a&&function(e,t,n){var r=e.immutableActions;for(var a in r){var i=r[a].config,o=n.config.sourceId,u=!n.propConfig.filterImmutableAction||n.propConfig.filterImmutableAction(i,n.config);if(i.sourceId!==o&&u){var c=i.criteria;Nt(c,t,{propagationNameSpace:e,propagateToSource:!1,sourceId:o},i)}}}(c,f,{config:t,propConfig:r}),this}},{key:"on",value:function(e,t){switch(e){case"propagation":this._onPropagation.push(t)}return this}},{key:"unsubscribe",value:function(e){switch(e){case"propagation":this._onPropagation=[]}return this}},{key:"handlePropagation",value:function(e,t){var n=this;this._onPropagation.forEach(function(r){return r.call(n,e,t)})}},{key:"bin",value:function(e,t){var n=this.getFieldsConfig();if(!n[e])throw new Error("Field "+e+" doesn't exist");var r=t.name||e+"_binned";if(n[r])throw new Error("Field "+r+" already exists");var a=function(e,t,n){var r=n.buckets,a=n.binsCount,i=n.binSize,o=n.start,u=n.end,c=e.domain(),f=C(c,2),l=f[0],s=f[1];r||(o=0!==o&&(!o||o>l)?l:o,u=0!==u&&(!u||ul&&r.unshift(l),r[r.length-1]<=s&&r.push(s+1);for(var d=[],p=0;p1&&void 0!==arguments[1]?arguments[1]:{saveChild:!0},r=e,a=void 0,i=[];return t.forEach(function(e){r=e(r),i.push.apply(i,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t {\n let i;\n let l;\n\n if (!val) { return defVal; }\n\n const nVal = val.toLowerCase();\n\n for (i = 0, l = range.length; i < l; i++) {\n if (range[i].toLowerCase() === nVal) {\n return i;\n }\n }\n\n if (i === undefined) {\n return defVal;\n }\n return null;\n };\n};\n\n/*\n * Defines the tokens which are supporter by the dateformatter. Using this definitation a value gets extracted from\n * the user specifed date string. This also formats the value for display purpose from native JS date.\n * The definition of each token contains the following named properties\n * {\n * %token_name% : {\n * name: name of the token, this is used in reverse lookup,\n * extract: a function that returns the regular expression to extract that piece of information. All the\n * regex should be gouped by using ()\n * parser: a function which receives value extracted by the above regex and parse it to get the date params\n * formatter: a formatter function that takes milliseconds or JS Date object and format the param\n * represented by the token only.\n * }\n * }\n *\n * @return {Object} : Definition of the all the supported tokens.\n */\nDateTimeFormatter.getTokenDefinitions = function () {\n const daysDef = {\n short: [\n 'Sun',\n 'Mon',\n 'Tue',\n 'Wed',\n 'Thu',\n 'Fri',\n 'Sat'\n ],\n long: [\n 'Sunday',\n 'Monday',\n 'Tuesday',\n 'Wednesday',\n 'Thursday',\n 'Friday',\n 'Saturday'\n ]\n };\n const monthsDef = {\n short: [\n 'Jan',\n 'Feb',\n 'Mar',\n 'Apr',\n 'May',\n 'Jun',\n 'Jul',\n 'Aug',\n 'Sep',\n 'Oct',\n 'Nov',\n 'Dec'\n ],\n long: [\n 'January',\n 'February',\n 'March',\n 'April',\n 'May',\n 'June',\n 'July',\n 'August',\n 'September',\n 'October',\n 'November',\n 'December'\n ]\n };\n\n const definitions = {\n H: {\n // 24 hours format\n name: 'H',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n\n return d.getHours().toString();\n }\n },\n l: {\n // 12 hours format\n name: 'l',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const hours = d.getHours() % 12;\n\n return (hours === 0 ? 12 : hours).toString();\n }\n },\n p: {\n // AM or PM\n name: 'p',\n index: 3,\n extract () { return '(AM|PM)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'AM' : 'PM');\n }\n },\n P: {\n // am or pm\n name: 'P',\n index: 3,\n extract () { return '(am|pm)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'am' : 'pm');\n }\n },\n M: {\n // Two digit minutes 00 - 59\n name: 'M',\n index: 4,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const mins = d.getMinutes();\n\n return pad(mins);\n }\n },\n S: {\n // Two digit seconds 00 - 59\n name: 'S',\n index: 5,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const seconds = d.getSeconds();\n\n return pad(seconds);\n }\n },\n K: {\n // Milliseconds\n name: 'K',\n index: 6,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const ms = d.getMilliseconds();\n\n return ms.toString();\n }\n },\n a: {\n // Short name of day, like Mon\n name: 'a',\n index: 2,\n extract () { return `(${daysDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.short[day]).toString();\n }\n },\n A: {\n // Long name of day, like Monday\n name: 'A',\n index: 2,\n extract () { return `(${daysDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.long[day]).toString();\n }\n },\n e: {\n // 8 of March, 11 of November\n name: 'e',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return day.toString();\n }\n },\n d: {\n // 08 of March, 11 of November\n name: 'd',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return pad(day);\n }\n },\n b: {\n // Short month, like Jan\n name: 'b',\n index: 1,\n extract () { return `(${monthsDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.short[month]).toString();\n }\n },\n B: {\n // Long month, like January\n name: 'B',\n index: 1,\n extract () { return `(${monthsDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.long[month]).toString();\n }\n },\n m: {\n // Two digit month of year like 01 for January\n name: 'm',\n index: 1,\n extract () { return '(\\\\d+)'; },\n parser (val) { return DateTimeFormatter.defaultNumberParser()(val) - 1; },\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return pad(month + 1);\n }\n },\n y: {\n // Short year like 90 for 1990\n name: 'y',\n index: 0,\n extract () { return '(\\\\d{2})'; },\n parser (val) {\n let result;\n if (val) {\n const l = val.length;\n val = val.substring(l - 2, l);\n }\n let parsedVal = DateTimeFormatter.defaultNumberParser()(val);\n let presentDate = new Date();\n let presentYear = Math.trunc((presentDate.getFullYear()) / 100);\n\n result = `${presentYear}${parsedVal}`;\n\n if (convertToNativeDate(result).getFullYear() > presentDate.getFullYear()) {\n result = `${presentYear - 1}${parsedVal}`;\n }\n return convertToNativeDate(result).getFullYear();\n },\n formatter (val) {\n const d = convertToNativeDate(val);\n let year = d.getFullYear().toString();\n let l;\n\n if (year) {\n l = year.length;\n year = year.substring(l - 2, l);\n }\n\n return year;\n }\n },\n Y: {\n // Long year like 1990\n name: 'Y',\n index: 0,\n extract () { return '(\\\\d{4})'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const year = d.getFullYear().toString();\n\n return year;\n }\n }\n };\n\n return definitions;\n};\n\n/*\n * The tokens which works internally is not user friendly in terms of memorizing the names. This gives a formal\n * definition to the informal notations.\n *\n * @return {Object} : Formal definition of the tokens\n */\nDateTimeFormatter.getTokenFormalNames = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n\n return {\n HOUR: definitions.H,\n HOUR_12: definitions.l,\n AMPM_UPPERCASE: definitions.p,\n AMPM_LOWERCASE: definitions.P,\n MINUTE: definitions.M,\n SECOND: definitions.S,\n SHORT_DAY: definitions.a,\n LONG_DAY: definitions.A,\n DAY_OF_MONTH: definitions.e,\n DAY_OF_MONTH_CONSTANT_WIDTH: definitions.d,\n SHORT_MONTH: definitions.b,\n LONG_MONTH: definitions.B,\n MONTH_OF_YEAR: definitions.m,\n SHORT_YEAR: definitions.y,\n LONG_YEAR: definitions.Y\n };\n};\n\n/*\n * This defines the rules and declares dependencies that resolves a date parameter (year, month etc) from\n * the date time parameter array.\n *\n * @return {Object} : An object that contains dependencies and a resolver function. The dependencies values are fed\n * to the resolver function in that particular sequence only.\n */\nDateTimeFormatter.tokenResolver = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const defaultResolver = (...args) => { // eslint-disable-line require-jsdoc\n let i = 0;\n let arg;\n let targetParam;\n const l = args.length;\n\n for (; i < l; i++) {\n arg = args[i];\n if (args[i]) {\n targetParam = arg;\n }\n }\n\n if (!targetParam) { return null; }\n\n return targetParam[0].parser(targetParam[1]);\n };\n\n return {\n YEAR: [definitions.y, definitions.Y,\n defaultResolver\n ],\n MONTH: [definitions.b, definitions.B, definitions.m,\n defaultResolver\n ],\n DAY: [definitions.a, definitions.A, definitions.e, definitions.d,\n defaultResolver\n ],\n HOUR: [definitions.H, definitions.l, definitions.p, definitions.P,\n function (hourFormat24, hourFormat12, ampmLower, ampmUpper) {\n let targetParam;\n let amOrpm;\n let isPM;\n let val;\n\n if (hourFormat12 && (amOrpm = (ampmLower || ampmUpper))) {\n if (amOrpm[0].parser(amOrpm[1]) === 'pm') {\n isPM = true;\n }\n\n targetParam = hourFormat12;\n } else if (hourFormat12) {\n targetParam = hourFormat12;\n } else {\n targetParam = hourFormat24;\n }\n\n if (!targetParam) { return null; }\n\n val = targetParam[0].parser(targetParam[1]);\n if (isPM) {\n val += 12;\n }\n return val;\n }\n ],\n MINUTE: [definitions.M,\n defaultResolver\n ],\n SECOND: [definitions.S,\n defaultResolver\n ]\n };\n};\n\n/*\n * Finds token from the format rule specified by a user.\n * @param format {String} : The format of the input date specified by the user\n * @return {Array} : An array of objects which contains the available token and their occurence index in the format\n */\nDateTimeFormatter.findTokens = function (format) {\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenLiterals = Object.keys(definitions);\n const occurrence = [];\n let i;\n let forwardChar;\n\n while ((i = format.indexOf(tokenPrefix, i + 1)) >= 0) {\n forwardChar = format[i + 1];\n if (tokenLiterals.indexOf(forwardChar) === -1) { continue; }\n\n occurrence.push({\n index: i,\n token: forwardChar\n });\n }\n\n return occurrence;\n};\n\n/*\n * Format any JS date to a specified date given by user.\n *\n * @param date {Number | Date} : The date object which is to be formatted\n * @param format {String} : The format using which the date will be formatted for display\n */\nDateTimeFormatter.formatAs = function (date, format) {\n const nDate = convertToNativeDate(date);\n const occurrence = DateTimeFormatter.findTokens(format);\n const definitions = DateTimeFormatter.getTokenDefinitions();\n let formattedStr = String(format);\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n let token;\n let formattedVal;\n let i;\n let l;\n\n for (i = 0, l = occurrence.length; i < l; i++) {\n token = occurrence[i].token;\n formattedVal = definitions[token].formatter(nDate);\n formattedStr = formattedStr.replace(new RegExp(tokenPrefix + token, 'g'), formattedVal);\n }\n\n return formattedStr;\n};\n\n/*\n * Parses the user specified date string to extract the date time params.\n *\n * @return {Array} : Value of date time params in an array [year, month, day, hour, minutes, seconds, milli]\n */\nDateTimeFormatter.prototype.parse = function (dateTimeStamp, options) {\n const tokenResolver = DateTimeFormatter.tokenResolver();\n const dtParams = this.extractTokenValue(dateTimeStamp);\n const dtParamSeq = DateTimeFormatter.DATETIME_PARAM_SEQUENCE;\n const noBreak = options && options.noBreak;\n const dtParamArr = [];\n const args = [];\n let resolverKey;\n let resolverParams;\n let resolverFn;\n let val;\n let i;\n let param;\n let resolvedVal;\n let l;\n let result = [];\n\n for (resolverKey in tokenResolver) {\n if (!{}.hasOwnProperty.call(tokenResolver, resolverKey)) { continue; }\n\n args.length = 0;\n resolverParams = tokenResolver[resolverKey];\n resolverFn = resolverParams.splice(resolverParams.length - 1, 1)[0];\n\n for (i = 0, l = resolverParams.length; i < l; i++) {\n param = resolverParams[i];\n val = dtParams[param.name];\n\n if (val === undefined) {\n args.push(null);\n } else {\n args.push([param, val]);\n }\n }\n\n resolvedVal = resolverFn.apply(this, args);\n\n if ((resolvedVal === undefined || resolvedVal === null) && !noBreak) {\n break;\n }\n\n dtParamArr[dtParamSeq[resolverKey]] = resolvedVal;\n }\n\n if (dtParamArr.length && this.checkIfOnlyYear(dtParamArr.length))\n {\n result.unshift(dtParamArr[0], 0, 1); }\n else {\n result.unshift(...dtParamArr);\n }\n\n return result;\n};\n\n/*\n * Extract the value of the token from user specified date time string.\n *\n * @return {Object} : An key value pair which contains the tokens as key and value as pair\n */\nDateTimeFormatter.prototype.extractTokenValue = function (dateTimeStamp) {\n const format = this.format;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const occurrence = DateTimeFormatter.findTokens(format);\n const tokenObj = {};\n\n let lastOccurrenceIndex;\n let occObj;\n let occIndex;\n let targetText;\n let regexFormat;\n\n let l;\n let i;\n\n regexFormat = String(format);\n\n const tokenArr = occurrence.map(obj => obj.token);\n const occurrenceLength = occurrence.length;\n for (i = occurrenceLength - 1; i >= 0; i--) {\n occIndex = occurrence[i].index;\n\n if (occIndex + 1 === regexFormat.length - 1) {\n lastOccurrenceIndex = occIndex;\n continue;\n }\n\n if (lastOccurrenceIndex === undefined) {\n lastOccurrenceIndex = regexFormat.length;\n }\n\n targetText = regexFormat.substring(occIndex + 2, lastOccurrenceIndex);\n regexFormat = regexFormat.substring(0, occIndex + 2) +\n RegExp.escape(targetText) +\n regexFormat.substring(lastOccurrenceIndex, regexFormat.length);\n\n lastOccurrenceIndex = occIndex;\n }\n\n for (i = 0; i < occurrenceLength; i++) {\n occObj = occurrence[i];\n regexFormat = regexFormat.replace(tokenPrefix + occObj.token, definitions[occObj.token].extract());\n }\n\n const extractValues = dateTimeStamp.match(new RegExp(regexFormat)) || [];\n extractValues.shift();\n\n for (i = 0, l = tokenArr.length; i < l; i++) {\n tokenObj[tokenArr[i]] = extractValues[i];\n }\n return tokenObj;\n};\n\n/*\n * Give back the JS native date formed from user specified date string\n *\n * @return {Date} : Native JS Date\n */\nDateTimeFormatter.prototype.getNativeDate = function (dateTimeStamp) {\n let date = null;\n if (Number.isFinite(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n } else if (!this.format && Date.parse(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n }\n else {\n const dtParams = this.dtParams = this.parse(dateTimeStamp);\n if (dtParams.length) {\n this.nativeDate = new Date(...dtParams);\n date = this.nativeDate;\n }\n }\n return date;\n};\n\nDateTimeFormatter.prototype.checkIfOnlyYear = function(len) {\n return len === 1 && this.format.match(/y|Y/g).length;\n};\n\n/*\n * Represents JS native date to a user specified format.\n *\n * @param format {String} : The format according to which the date is to be represented\n * @return {String} : The formatted date string\n */\nDateTimeFormatter.prototype.formatAs = function (format, dateTimeStamp) {\n let nativeDate;\n\n if (dateTimeStamp) {\n nativeDate = this.nativeDate = this.getNativeDate(dateTimeStamp);\n } else if (!(nativeDate = this.nativeDate)) {\n nativeDate = this.getNativeDate(dateTimeStamp);\n }\n\n return DateTimeFormatter.formatAs(nativeDate, format);\n};\n\nexport { DateTimeFormatter as default };\n","/**\n * The utility function to calculate major column.\n *\n * @param {Object} store - The store object.\n * @return {Function} Returns the push function.\n */\nexport default (store) => {\n let i = 0;\n return (...fields) => {\n fields.forEach((val, fieldIndex) => {\n if (!(store[fieldIndex] instanceof Array)) {\n store[fieldIndex] = Array.from({ length: i });\n }\n store[fieldIndex].push(val);\n });\n i++;\n };\n};\n","/* eslint-disable */\nconst OBJECTSTRING = 'object';\nconst objectToStrFn = Object.prototype.toString;\nconst objectToStr = '[object Object]';\nconst arrayToStr = '[object Array]';\n\nfunction checkCyclicRef(obj, parentArr) {\n let i = parentArr.length;\n let bIndex = -1;\n\n while (i) {\n if (obj === parentArr[i]) {\n bIndex = i;\n return bIndex;\n }\n i -= 1;\n }\n\n return bIndex;\n}\n\nfunction merge(obj1, obj2, skipUndef, tgtArr, srcArr) {\n var item,\n srcVal,\n tgtVal,\n str,\n cRef;\n // check whether obj2 is an array\n // if array then iterate through it's index\n // **** MOOTOOLS precution\n\n if (!srcArr) {\n tgtArr = [obj1];\n srcArr = [obj2];\n }\n else {\n tgtArr.push(obj1);\n srcArr.push(obj2);\n }\n\n if (obj2 instanceof Array) {\n for (item = 0; item < obj2.length; item += 1) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (typeof tgtVal !== OBJECTSTRING) {\n if (!(skipUndef && tgtVal === undefined)) {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = tgtVal instanceof Array ? [] : {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n }\n }\n else {\n for (item in obj2) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (tgtVal !== null && typeof tgtVal === OBJECTSTRING) {\n // Fix for issue BUG: FWXT-602\n // IE < 9 Object.prototype.toString.call(null) gives\n // '[object Object]' instead of '[object Null]'\n // that's why null value becomes Object in IE < 9\n str = objectToStrFn.call(tgtVal);\n if (str === objectToStr) {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else if (str === arrayToStr) {\n if (srcVal === null || !(srcVal instanceof Array)) {\n srcVal = obj1[item] = [];\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (skipUndef && tgtVal === undefined) {\n continue;\n }\n obj1[item] = tgtVal;\n }\n }\n }\n return obj1;\n}\n\n\nfunction extend2 (obj1, obj2, skipUndef) {\n //if none of the arguments are object then return back\n if (typeof obj1 !== OBJECTSTRING && typeof obj2 !== OBJECTSTRING) {\n return null;\n }\n\n if (typeof obj2 !== OBJECTSTRING || obj2 === null) {\n return obj1;\n }\n\n if (typeof obj1 !== OBJECTSTRING) {\n obj1 = obj2 instanceof Array ? [] : {};\n }\n merge(obj1, obj2, skipUndef);\n return obj1;\n}\n\nexport { extend2 as default };\n","import { DataFormat } from '../enums';\n\n/**\n * Checks whether the value is an array.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an array otherwise returns false.\n */\nexport function isArray (val) {\n return Array.isArray(val);\n}\n\n/**\n * Checks whether the value is an object.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an object otherwise returns false.\n */\nexport function isObject (val) {\n return val === Object(val);\n}\n\n/**\n * Checks whether the value is a string value.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is a string value otherwise returns false.\n */\nexport function isString (val) {\n return typeof val === 'string';\n}\n\n/**\n * Checks whether the value is callable.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is callable otherwise returns false.\n */\nexport function isCallable (val) {\n return typeof val === 'function';\n}\n\n/**\n * Returns the unique values from the input array.\n *\n * @param {Array} data - The input array.\n * @return {Array} Returns a new array of unique values.\n */\nexport function uniqueValues (data) {\n return [...new Set(data)];\n}\n\nexport const getUniqueId = () => `id-${new Date().getTime()}${Math.round(Math.random() * 10000)}`;\n\n/**\n * Checks Whether two arrays have same content.\n *\n * @param {Array} arr1 - The first array.\n * @param {Array} arr2 - The 2nd array.\n * @return {boolean} Returns whether two array have same content.\n */\nexport function isArrEqual(arr1, arr2) {\n if (!isArray(arr1) || !isArray(arr2)) {\n return arr1 === arr2;\n }\n\n if (arr1.length !== arr2.length) {\n return false;\n }\n\n for (let i = 0; i < arr1.length; i++) {\n if (arr1[i] !== arr2[i]) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * It is the default number format function for the measure field type.\n *\n * @param {any} val - The input value.\n * @return {number} Returns a number value.\n */\nexport function formatNumber(val) {\n return val;\n}\n\n/**\n * Returns the detected data format.\n *\n * @param {any} data - The input data to be tested.\n * @return {string} Returns the data format name.\n */\nexport const detectDataFormat = (data) => {\n if (isString(data)) {\n return DataFormat.DSV_STR;\n } else if (isArray(data) && isArray(data[0])) {\n return DataFormat.DSV_ARR;\n } else if (isArray(data) && (data.length === 0 || isObject(data[0]))) {\n return DataFormat.FLAT_JSON;\n }\n return null;\n};\n","import { FieldType } from './enums';\nimport { getUniqueId } from './utils';\n\nconst fieldStore = {\n data: {},\n\n createNamespace (fieldArr, name) {\n const dataId = name || getUniqueId();\n\n this.data[dataId] = {\n name: dataId,\n fields: fieldArr,\n\n fieldsObj () {\n let fieldsObj = this._cachedFieldsObj;\n\n if (!fieldsObj) {\n fieldsObj = this._cachedFieldsObj = {};\n this.fields.forEach((field) => {\n fieldsObj[field.name()] = field;\n });\n }\n return fieldsObj;\n },\n getMeasure () {\n let measureFields = this._cachedMeasure;\n\n if (!measureFields) {\n measureFields = this._cachedMeasure = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.MEASURE) {\n measureFields[field.name()] = field;\n }\n });\n }\n return measureFields;\n },\n getDimension () {\n let dimensionFields = this._cachedDimension;\n\n if (!this._cachedDimension) {\n dimensionFields = this._cachedDimension = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.DIMENSION) {\n dimensionFields[field.name()] = field;\n }\n });\n }\n return dimensionFields;\n },\n };\n return this.data[dataId];\n },\n};\n\nexport default fieldStore;\n","/**\n * The wrapper class on top of the primitive value of a field.\n *\n * @todo Need to have support for StringValue, NumberValue, DateTimeValue\n * and GeoValue. These types should expose predicate API mostly.\n */\nclass Value {\n\n /**\n * Creates new Value instance.\n *\n * @param {*} val - the primitive value from the field cell.\n * @param {string | Field} field - The field from which the value belongs.\n */\n constructor (val, field) {\n Object.defineProperty(this, '_value', {\n enumerable: false,\n configurable: false,\n writable: false,\n value: val\n });\n\n this.field = field;\n }\n\n /**\n * Returns the field value.\n *\n * @return {*} Returns the current value.\n */\n get value () {\n return this._value;\n }\n\n /**\n * Converts to human readable string.\n *\n * @override\n * @return {string} Returns a human readable string of the field value.\n *\n */\n toString () {\n return String(this.value);\n }\n\n /**\n * Returns the value of the field.\n *\n * @override\n * @return {*} Returns the field value.\n */\n valueOf () {\n return this.value;\n }\n}\n\nexport default Value;\n","/**\n * Iterates through the diffSet array and call the callback with the current\n * index.\n *\n * @param {string} rowDiffset - The row diffset string e.g. '0-4,6,10-13'.\n * @param {Function} callback - The callback function to be called with every index.\n */\nexport function rowDiffsetIterator (rowDiffset, callback) {\n if (rowDiffset.length > 0) {\n const rowDiffArr = rowDiffset.split(',');\n rowDiffArr.forEach((diffStr) => {\n const diffStsArr = diffStr.split('-');\n const start = +(diffStsArr[0]);\n const end = +(diffStsArr[1] || diffStsArr[0]);\n if (end >= start) {\n for (let i = start; i <= end; i += 1) {\n callback(i);\n }\n }\n });\n }\n}\n","/**\n * A parser to parser null, undefined, invalid and NIL values.\n *\n * @public\n * @class\n */\nclass InvalidAwareTypes {\n /**\n * Static method which gets/sets the invalid value registry.\n *\n * @public\n * @param {Object} config - The custom configuration supplied by user.\n * @return {Object} Returns the invalid values registry.\n */\n static invalidAwareVals (config) {\n if (!config) {\n return InvalidAwareTypes._invalidAwareValsMap;\n }\n return Object.assign(InvalidAwareTypes._invalidAwareValsMap, config);\n }\n\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} value - The value of the invalid data type.\n */\n constructor (value) {\n this._value = value;\n }\n\n /**\n * Returns the current value of the instance.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n value () {\n return this._value;\n }\n\n /**\n * Returns the current value of the instance in string format.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n toString () {\n return String(this._value);\n }\n\n static isInvalid(val) {\n return (val instanceof InvalidAwareTypes) || !!InvalidAwareTypes.invalidAwareVals()[val];\n }\n\n static getInvalidType(val) {\n return val instanceof InvalidAwareTypes ? val : InvalidAwareTypes.invalidAwareVals()[val];\n }\n}\n\n/**\n * Enums for Invalid types.\n */\nInvalidAwareTypes.NULL = new InvalidAwareTypes('null');\nInvalidAwareTypes.NA = new InvalidAwareTypes('na');\nInvalidAwareTypes.NIL = new InvalidAwareTypes('nil');\n\n/**\n * Default Registry for mapping the invalid values.\n *\n * @private\n */\nInvalidAwareTypes._invalidAwareValsMap = {\n invalid: InvalidAwareTypes.NA,\n nil: InvalidAwareTypes.NIL,\n null: InvalidAwareTypes.NULL,\n undefined: InvalidAwareTypes.NA\n};\n\nexport default InvalidAwareTypes;\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport InvalidAwareTypes from '../invalid-aware-types';\n\nconst generateBuckets = (binSize, start, end) => {\n const buckets = [];\n let next = start;\n\n while (next < end) {\n buckets.push(next);\n next += binSize;\n }\n buckets.push(next);\n\n return buckets;\n};\n\nconst findBucketRange = (bucketRanges, value) => {\n let leftIdx = 0;\n let rightIdx = bucketRanges.length - 1;\n let midIdx;\n let range;\n\n // Here use binary search as the bucketRanges is a sorted array\n while (leftIdx <= rightIdx) {\n midIdx = leftIdx + Math.floor((rightIdx - leftIdx) / 2);\n range = bucketRanges[midIdx];\n\n if (value >= range.start && value < range.end) {\n return range;\n } else if (value >= range.end) {\n leftIdx = midIdx + 1;\n } else if (value < range.start) {\n rightIdx = midIdx - 1;\n }\n }\n\n return null;\n};\n\n /**\n * Creates the bin data from input measure field and supplied configs.\n *\n * @param {Measure} measureField - The Measure field instance.\n * @param {string} rowDiffset - The datamodel rowDiffset values.\n * @param {Object} config - The config object.\n * @return {Object} Returns the binned data and the corresponding bins.\n */\nexport function createBinnedFieldData (measureField, rowDiffset, config) {\n let { buckets, binsCount, binSize, start, end } = config;\n const [dMin, dMax] = measureField.domain();\n\n if (!buckets) {\n start = (start !== 0 && (!start || start > dMin)) ? dMin : start;\n end = (end !== 0 && (!end || end < dMax)) ? (dMax + 1) : end;\n\n if (binsCount) {\n binSize = Math.ceil(Math.abs(end - start) / binsCount);\n }\n\n buckets = generateBuckets(binSize, start, end);\n }\n\n if (buckets[0] > dMin) {\n buckets.unshift(dMin);\n }\n if (buckets[buckets.length - 1] <= dMax) {\n buckets.push(dMax + 1);\n }\n\n const bucketRanges = [];\n for (let i = 0; i < buckets.length - 1; i++) {\n bucketRanges.push({\n start: buckets[i],\n end: buckets[i + 1]\n });\n }\n\n const binnedData = [];\n rowDiffsetIterator(rowDiffset, (i) => {\n const datum = measureField.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n binnedData.push(datum);\n return;\n }\n\n const range = findBucketRange(bucketRanges, datum);\n binnedData.push(`${range.start}-${range.end}`);\n });\n\n return { binnedData, bins: buckets };\n}\n","/**\n * The helper function that returns an array of common schema\n * from two fieldStore instances.\n *\n * @param {FieldStore} fs1 - The first FieldStore instance.\n * @param {FieldStore} fs2 - The second FieldStore instance.\n * @return {Array} An array containing the common schema.\n */\nexport function getCommonSchema (fs1, fs2) {\n const retArr = [];\n const fs1Arr = [];\n fs1.fields.forEach((field) => {\n fs1Arr.push(field.schema().name);\n });\n fs2.fields.forEach((field) => {\n if (fs1Arr.indexOf(field.schema().name) !== -1) {\n retArr.push(field.schema().name);\n }\n });\n return retArr;\n}\n","export { DataFormat, FilteringMode } from '../enums';\n/**\n * The event name for data propagation.\n */\nexport const PROPAGATION = 'propagation';\n\n/**\n * The name of the unique row id column in DataModel.\n */\nexport const ROW_ID = '__id__';\n\n/**\n * The enums for operation names performed on DataModel.\n */\nexport const DM_DERIVATIVES = {\n SELECT: 'select',\n PROJECT: 'project',\n GROUPBY: 'group',\n COMPOSE: 'compose',\n CAL_VAR: 'calculatedVariable',\n BIN: 'bin'\n};\n\nexport const JOINS = {\n CROSS: 'cross',\n LEFTOUTER: 'leftOuter',\n RIGHTOUTER: 'rightOuter',\n NATURAL: 'natural',\n FULLOUTER: 'fullOuter'\n};\n\nexport const LOGICAL_OPERATORS = {\n AND: 'and',\n OR: 'or'\n};\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { getCommonSchema } from './get-common-schema';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { JOINS } from '../constants';\nimport { prepareJoinData } from '../helper';\n/**\n * Default filter function for crossProduct.\n *\n * @return {boolean} Always returns true.\n */\nfunction defaultFilterFn() { return true; }\n\n/**\n * Implementation of cross product operation between two DataModel instances.\n * It internally creates the data and schema for the new DataModel.\n *\n * @param {DataModel} dataModel1 - The left DataModel instance.\n * @param {DataModel} dataModel2 - The right DataModel instance.\n * @param {Function} filterFn - The filter function which is used to filter the tuples.\n * @param {boolean} [replaceCommonSchema=false] - The flag if the common name schema should be there.\n * @return {DataModel} Returns The newly created DataModel instance from the crossProduct operation.\n */\nexport function crossProduct (dm1, dm2, filterFn, replaceCommonSchema = false, jointype = JOINS.CROSS) {\n const schema = [];\n const data = [];\n const applicableFilterFn = filterFn || defaultFilterFn;\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreName = dm1FieldStore.name;\n const dm2FieldStoreName = dm2FieldStore.name;\n const name = `${dm1FieldStore.name}.${dm2FieldStore.name}`;\n const commonSchemaList = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n if (dm1FieldStoreName === dm2FieldStoreName) {\n throw new Error('DataModels must have different alias names');\n }\n // Here prepare the schema\n dm1FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1 && !replaceCommonSchema) {\n tmpSchema.name = `${dm1FieldStore.name}.${tmpSchema.name}`;\n }\n schema.push(tmpSchema);\n });\n dm2FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1) {\n if (!replaceCommonSchema) {\n tmpSchema.name = `${dm2FieldStore.name}.${tmpSchema.name}`;\n schema.push(tmpSchema);\n }\n } else {\n schema.push(tmpSchema);\n }\n });\n\n // Here prepare Data\n rowDiffsetIterator(dm1._rowDiffset, (i) => {\n let rowAdded = false;\n let rowPosition;\n rowDiffsetIterator(dm2._rowDiffset, (ii) => {\n const tuple = [];\n const userArg = {};\n userArg[dm1FieldStoreName] = {};\n userArg[dm2FieldStoreName] = {};\n dm1FieldStore.fields.forEach((field) => {\n tuple.push(field.partialField.data[i]);\n userArg[dm1FieldStoreName][field.name()] = field.partialField.data[i];\n });\n dm2FieldStore.fields.forEach((field) => {\n if (!(commonSchemaList.indexOf(field.schema().name) !== -1 && replaceCommonSchema)) {\n tuple.push(field.partialField.data[ii]);\n }\n userArg[dm2FieldStoreName][field.name()] = field.partialField.data[ii];\n });\n\n let cachedStore = {};\n let cloneProvider1 = () => dm1.detachedRoot();\n let cloneProvider2 = () => dm2.detachedRoot();\n\n const dm1Fields = prepareJoinData(userArg[dm1FieldStoreName]);\n const dm2Fields = prepareJoinData(userArg[dm2FieldStoreName]);\n if (applicableFilterFn(dm1Fields, dm2Fields, cloneProvider1, cloneProvider2, cachedStore)) {\n const tupleObj = {};\n tuple.forEach((cellVal, iii) => {\n tupleObj[schema[iii].name] = cellVal;\n });\n if (rowAdded && JOINS.CROSS !== jointype) {\n data[rowPosition] = tupleObj;\n }\n else {\n data.push(tupleObj);\n rowAdded = true;\n rowPosition = i;\n }\n } else if ((jointype === JOINS.LEFTOUTER || jointype === JOINS.RIGHTOUTER) && !rowAdded) {\n const tupleObj = {};\n let len = dm1FieldStore.fields.length - 1;\n tuple.forEach((cellVal, iii) => {\n if (iii <= len) {\n tupleObj[schema[iii].name] = cellVal;\n }\n else {\n tupleObj[schema[iii].name] = null;\n }\n });\n rowAdded = true;\n rowPosition = i;\n data.push(tupleObj);\n }\n });\n });\n\n return new DataModel(data, schema, { name });\n}\n","/**\n * The default sort function.\n *\n * @param {*} a - The first value.\n * @param {*} b - The second value.\n * @return {number} Returns the comparison result e.g. 1 or 0 or -1.\n */\nfunction defSortFn (a, b) {\n const a1 = `${a}`;\n const b1 = `${b}`;\n if (a1 < b1) {\n return -1;\n }\n if (a1 > b1) {\n return 1;\n }\n return 0;\n}\n\n/**\n * The helper function for merge sort which creates the sorted array\n * from the two halves of the input array.\n *\n * @param {Array} arr - The target array which needs to be merged.\n * @param {number} lo - The starting index of the first array half.\n * @param {number} mid - The ending index of the first array half.\n * @param {number} hi - The ending index of the second array half.\n * @param {Function} sortFn - The sort function.\n */\nfunction merge (arr, lo, mid, hi, sortFn) {\n const mainArr = arr;\n const auxArr = [];\n for (let i = lo; i <= hi; i += 1) {\n auxArr[i] = mainArr[i];\n }\n let a = lo;\n let b = mid + 1;\n\n for (let i = lo; i <= hi; i += 1) {\n if (a > mid) {\n mainArr[i] = auxArr[b];\n b += 1;\n } else if (b > hi) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else if (sortFn(auxArr[a], auxArr[b]) <= 0) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else {\n mainArr[i] = auxArr[b];\n b += 1;\n }\n }\n}\n\n/**\n * The helper function for merge sort which would be called\n * recursively for sorting the array halves.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {number} lo - The starting index of the array half.\n * @param {number} hi - The ending index of the array half.\n * @param {Function} sortFn - The sort function.\n * @return {Array} Returns the target array itself.\n */\nfunction sort (arr, lo, hi, sortFn) {\n if (hi === lo) { return arr; }\n\n const mid = lo + Math.floor((hi - lo) / 2);\n sort(arr, lo, mid, sortFn);\n sort(arr, mid + 1, hi, sortFn);\n merge(arr, lo, mid, hi, sortFn);\n\n return arr;\n}\n\n/**\n * The implementation of merge sort.\n * It is used in DataModel for stable sorting as it is not sure\n * what the sorting algorithm used by browsers is stable or not.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {Function} [sortFn=defSortFn] - The sort function.\n * @return {Array} Returns the input array itself in sorted order.\n */\nexport function mergeSort (arr, sortFn = defSortFn) {\n if (arr.length > 1) {\n sort(arr, 0, arr.length - 1, sortFn);\n }\n return arr;\n}\n","import { DimensionSubtype, MeasureSubtype } from '../enums';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { mergeSort } from './merge-sort';\nimport { fieldInSchema } from '../helper';\nimport { isCallable, isArray, } from '../utils';\n/**\n * Generates the sorting functions to sort the data of a DataModel instance\n * according to the input data type.\n *\n * @param {string} dataType - The data type e.g. 'measure', 'datetime' etc.\n * @param {string} sortType - The sorting order i.e. 'asc' or 'desc'.\n * @param {integer} index - The index of the data which will be sorted.\n * @return {Function} Returns the the sorting function.\n */\nfunction getSortFn (dataType, sortType, index) {\n let retFunc;\n switch (dataType) {\n case MeasureSubtype.CONTINUOUS:\n case DimensionSubtype.TEMPORAL:\n if (sortType === 'desc') {\n retFunc = (a, b) => b[index] - a[index];\n } else {\n retFunc = (a, b) => a[index] - b[index];\n }\n break;\n default:\n retFunc = (a, b) => {\n const a1 = `${a[index]}`;\n const b1 = `${b[index]}`;\n if (a1 < b1) {\n return sortType === 'desc' ? 1 : -1;\n }\n if (a1 > b1) {\n return sortType === 'desc' ? -1 : 1;\n }\n return 0;\n };\n }\n return retFunc;\n}\n\n/**\n * Groups the data according to the specified target field.\n *\n * @param {Array} data - The input data array.\n * @param {number} fieldIndex - The target field index within schema array.\n * @return {Array} Returns an array containing the grouped data.\n */\nfunction groupData(data, fieldIndex) {\n const hashMap = new Map();\n const groupedData = [];\n\n data.forEach((datum) => {\n const fieldVal = datum[fieldIndex];\n if (hashMap.has(fieldVal)) {\n groupedData[hashMap.get(fieldVal)][1].push(datum);\n } else {\n groupedData.push([fieldVal, [datum]]);\n hashMap.set(fieldVal, groupedData.length - 1);\n }\n });\n\n return groupedData;\n}\n\n/**\n * Creates the argument value used for sorting function when sort is done\n * with another fields.\n *\n * @param {Array} groupedDatum - The grouped datum for a single dimension field value.\n * @param {Array} targetFields - An array of the sorting fields.\n * @param {Array} targetFieldDetails - An array of the sorting field details in schema.\n * @return {Object} Returns an object containing the value of sorting fields and the target field name.\n */\nfunction createSortingFnArg(groupedDatum, targetFields, targetFieldDetails) {\n const arg = {\n label: groupedDatum[0]\n };\n\n targetFields.reduce((acc, next, idx) => {\n acc[next] = groupedDatum[1].map(datum => datum[targetFieldDetails[idx].index]);\n return acc;\n }, arg);\n\n return arg;\n}\n\n/**\n * Sorts the data before return in dataBuilder.\n *\n * @param {Object} dataObj - An object containing the data and schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nfunction sortData(dataObj, sortingDetails) {\n const { data, schema } = dataObj;\n let fieldName;\n let sortMeta;\n let fDetails;\n let i = sortingDetails.length - 1;\n\n for (; i >= 0; i--) {\n fieldName = sortingDetails[i][0];\n sortMeta = sortingDetails[i][1];\n fDetails = fieldInSchema(schema, fieldName);\n\n if (!fDetails) {\n // eslint-disable-next-line no-continue\n continue;\n }\n\n if (isCallable(sortMeta)) {\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortMeta(a[fDetails.index], b[fDetails.index]));\n } else if (isArray(sortMeta)) {\n const groupedData = groupData(data, fDetails.index);\n const sortingFn = sortMeta[sortMeta.length - 1];\n const targetFields = sortMeta.slice(0, sortMeta.length - 1);\n const targetFieldDetails = targetFields.map(f => fieldInSchema(schema, f));\n\n groupedData.forEach((groupedDatum) => {\n groupedDatum.push(createSortingFnArg(groupedDatum, targetFields, targetFieldDetails));\n });\n\n mergeSort(groupedData, (a, b) => {\n const m = a[2];\n const n = b[2];\n return sortingFn(m, n);\n });\n\n // Empty the array\n data.length = 0;\n groupedData.forEach((datum) => {\n data.push(...datum[1]);\n });\n } else {\n sortMeta = String(sortMeta).toLowerCase() === 'desc' ? 'desc' : 'asc';\n mergeSort(data, getSortFn(fDetails.type, sortMeta, fDetails.index));\n }\n }\n\n dataObj.uids = [];\n data.forEach((value) => {\n dataObj.uids.push(value.pop());\n });\n}\n\n\n/**\n * Builds the actual data array.\n *\n * @param {Array} fieldStore - An array of field.\n * @param {string} rowDiffset - A string consisting of which rows to be included eg. '0-2,4,6';\n * @param {string} colIdentifier - A string consisting of the details of which column\n * to be included eg 'date,sales,profit';\n * @param {Object} sortingDetails - An object containing the sorting details of the DataModel instance.\n * @param {Object} options - The options required to create the type of the data.\n * @return {Object} Returns an object containing the multidimensional array and the relative schema.\n */\nexport function dataBuilder (fieldStore, rowDiffset, colIdentifier, sortingDetails, options) {\n const defOptions = {\n addUid: false,\n columnWise: false\n };\n options = Object.assign({}, defOptions, options);\n\n const retObj = {\n schema: [],\n data: [],\n uids: []\n };\n const addUid = options.addUid;\n const reqSorting = sortingDetails && sortingDetails.length > 0;\n // It stores the fields according to the colIdentifier argument\n const tmpDataArr = [];\n // Stores the fields according to the colIdentifier argument\n const colIArr = colIdentifier.split(',');\n\n colIArr.forEach((colName) => {\n for (let i = 0; i < fieldStore.length; i += 1) {\n if (fieldStore[i].name() === colName) {\n tmpDataArr.push(fieldStore[i]);\n break;\n }\n }\n });\n\n // Inserts the schema to the schema object\n tmpDataArr.forEach((field) => {\n /** @todo Need to use extend2 here otherwise user can overwrite the schema. */\n retObj.schema.push(field.schema());\n });\n\n if (addUid) {\n retObj.schema.push({\n name: 'uid',\n type: 'identifier'\n });\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n retObj.data.push([]);\n const insertInd = retObj.data.length - 1;\n let start = 0;\n tmpDataArr.forEach((field, ii) => {\n retObj.data[insertInd][ii + start] = field.partialField.data[i];\n });\n if (addUid) {\n retObj.data[insertInd][tmpDataArr.length] = i;\n }\n // Creates an array of unique identifiers for each row\n retObj.uids.push(i);\n\n // If sorting needed then there is the need to expose the index\n // mapping from the old index to its new index\n if (reqSorting) { retObj.data[insertInd].push(i); }\n });\n\n // Handles the sort functionality\n if (reqSorting) {\n sortData(retObj, sortingDetails);\n }\n\n if (options.columnWise) {\n const tmpData = Array(...Array(retObj.schema.length)).map(() => []);\n retObj.data.forEach((tuple) => {\n tuple.forEach((data, i) => {\n tmpData[i].push(data);\n });\n });\n retObj.data = tmpData;\n }\n\n return retObj;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n\n/**\n * Performs the union operation between two dm instances.\n *\n * @todo Fix the conflicts between union and difference terminology here.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function difference (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n * @param {boolean} addData - If true only tuple will be added to the data.\n */\n function prepareDataHelper(dm, fieldsObj, addData) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n if (addData) { data.push(tuple); }\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm2, dm2FieldStoreFieldObj, false);\n prepareDataHelper(dm1, dm1FieldStoreFieldObj, true);\n\n return new DataModel(data, schema, { name });\n}\n\n","import { isArray } from '../utils';\nimport InvalidAwareTypes from '../invalid-aware-types';\n\n\nfunction getFilteredValues(arr) {\n return arr.filter(item => !(item instanceof InvalidAwareTypes));\n}\n/**\n * Reducer function that returns the sum of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the sum of the array.\n */\nfunction sum (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const filteredNumber = getFilteredValues(arr);\n const totalSum = filteredNumber.length ?\n filteredNumber.reduce((acc, curr) => acc + curr, 0)\n : InvalidAwareTypes.NULL;\n return totalSum;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that returns the average of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the mean value of the array.\n */\nfunction avg (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const totalSum = sum(arr);\n const len = arr.length || 1;\n return (Number.isNaN(totalSum) || totalSum instanceof InvalidAwareTypes) ?\n InvalidAwareTypes.NULL : totalSum / len;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the min value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the minimum value of the array.\n */\nfunction min (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.min(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the max value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the maximum value of the array.\n */\nfunction max (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.max(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the first value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the first value of the array.\n */\nfunction first (arr) {\n return arr[0];\n}\n\n/**\n * Reducer function that gives the last value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the last value of the array.\n */\nfunction last (arr) {\n return arr[arr.length - 1];\n}\n\n/**\n * Reducer function that gives the count value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the length of the array.\n */\nfunction count (arr) {\n if (isArray(arr)) {\n return arr.length;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Calculates the variance of the input array.\n *\n * @param {Array.} arr - The input array.\n * @return {number} Returns the variance of the input array.\n */\nfunction variance (arr) {\n let mean = avg(arr);\n return avg(arr.map(num => (num - mean) ** 2));\n}\n\n/**\n * Calculates the square root of the variance of the input array.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the square root of the variance.\n */\nfunction std (arr) {\n return Math.sqrt(variance(arr));\n}\n\n\nconst fnList = {\n sum,\n avg,\n min,\n max,\n first,\n last,\n count,\n std\n};\n\nconst defaultReducerName = 'sum';\n\nexport {\n defaultReducerName,\n sum as defReducer,\n fnList,\n};\n","import { defReducer, fnList } from '../operator';\n\n/**\n * A page level storage which stores, registers, unregisters reducers for all the datamodel instances. There is only one\n * reducer store available in a page. All the datamodel instances receive same instance of reducer store. DataModel\n * out of the box provides handful of {@link reducer | reducers} which can be used as reducer funciton.\n *\n * @public\n * @namespace DataModel\n */\nclass ReducerStore {\n constructor () {\n this.store = new Map();\n this.store.set('defReducer', defReducer);\n\n Object.entries(fnList).forEach((key) => {\n this.store.set(key[0], key[1]);\n });\n }\n\n /**\n * Changes the `defaultReducer` globally. For all the fields which does not have `defAggFn` mentioned in schema, the\n * value of `defaultReducer` is used for aggregation.\n *\n * @public\n * @param {string} [reducer='sum'] - The name of the default reducer. It picks up the definition from store by doing\n * name lookup. If no name is found then it takes `sum` as the default reducer.\n * @return {ReducerStore} Returns instance of the singleton store in page.\n */\n defaultReducer (...params) {\n if (!params.length) {\n return this.store.get('defReducer');\n }\n\n let reducer = params[0];\n\n if (typeof reducer === 'function') {\n this.store.set('defReducer', reducer);\n } else {\n reducer = String(reducer);\n if (Object.keys(fnList).indexOf(reducer) !== -1) {\n this.store.set('defReducer', fnList[reducer]);\n } else {\n throw new Error(`Reducer ${reducer} not found in registry`);\n }\n }\n return this;\n }\n\n /**\n *\n * Registers a {@link reducer | reducer}.\n * A {@link reducer | reducer} has to be registered before it is used.\n *\n * @example\n * // find the mean squared value of a given set\n * const reducerStore = DataModel.Reducers();\n *\n * reducers.register('meanSquared', (arr) => {\n * const squaredVal = arr.map(item => item * item);\n * let sum = 0;\n * for (let i = 0, l = squaredVal.length; i < l; i++) {\n * sum += squaredVal[i++];\n * }\n *\n * return sum;\n * })\n *\n * // datamodel (dm) is already prepared with cars.json\n * const dm1 = dm.groupBy(['origin'], {\n * accleration: 'meanSquared'\n * });\n *\n * @public\n *\n * @param {string} name formal name for a reducer. If the given name already exists in store it is overridden by new\n * definition.\n * @param {Function} reducer definition of {@link reducer} function.\n *\n * @return {Function} function for unregistering the reducer.\n */\n register (name, reducer) {\n if (typeof reducer !== 'function') {\n throw new Error('Reducer should be a function');\n }\n\n name = String(name);\n this.store.set(name, reducer);\n\n return () => { this.__unregister(name); };\n }\n\n __unregister (name) {\n if (this.store.has(name)) {\n this.store.delete(name);\n }\n }\n\n resolve (name) {\n if (name instanceof Function) {\n return name;\n }\n return this.store.get(name);\n }\n}\n\nconst reducerStore = (function () {\n let store = null;\n\n function getStore () {\n if (store === null) {\n store = new ReducerStore();\n }\n return store;\n }\n return getStore();\n}());\n\nexport default reducerStore;\n","import { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport DataModel from '../export';\nimport reducerStore from '../utils/reducer-store';\nimport { defaultReducerName } from './group-by-function';\nimport { FieldType } from '../enums';\n\n/**\n * This function sanitize the user given field and return a common Array structure field\n * list\n * @param {DataModel} dataModel the dataModel operating on\n * @param {Array} fieldArr user input of field Array\n * @return {Array} arrays of field name\n */\nfunction getFieldArr (dataModel, fieldArr) {\n const retArr = [];\n const fieldStore = dataModel.getFieldspace();\n const dimensions = fieldStore.getDimension();\n\n Object.entries(dimensions).forEach(([key]) => {\n if (fieldArr && fieldArr.length) {\n if (fieldArr.indexOf(key) !== -1) {\n retArr.push(key);\n }\n } else {\n retArr.push(key);\n }\n });\n\n return retArr;\n}\n\n/**\n * This sanitize the reducer provide by the user and create a common type of object.\n * user can give function Also\n * @param {DataModel} dataModel dataModel to worked on\n * @param {Object|function} [reducers={}] reducer provided by the users\n * @return {Object} object containing reducer function for every measure\n */\nfunction getReducerObj (dataModel, reducers = {}) {\n const retObj = {};\n const fieldStore = dataModel.getFieldspace();\n const measures = fieldStore.getMeasure();\n const defReducer = reducerStore.defaultReducer();\n\n Object.keys(measures).forEach((measureName) => {\n if (typeof reducers[measureName] !== 'string') {\n reducers[measureName] = measures[measureName].defAggFn();\n }\n const reducerFn = reducerStore.resolve(reducers[measureName]);\n if (reducerFn) {\n retObj[measureName] = reducerFn;\n } else {\n retObj[measureName] = defReducer;\n reducers[measureName] = defaultReducerName;\n }\n });\n return retObj;\n}\n\n/**\n * main function which perform the group-by operations which reduce the measures value is the\n * fields are common according to the reducer function provided\n * @param {DataModel} dataModel the dataModel to worked\n * @param {Array} fieldArr fields according to which the groupby should be worked\n * @param {Object|Function} reducers reducers function\n * @param {DataModel} existingDataModel Existing datamodel instance\n * @return {DataModel} new dataModel with the group by\n */\nfunction groupBy (dataModel, fieldArr, reducers, existingDataModel) {\n const sFieldArr = getFieldArr(dataModel, fieldArr);\n const reducerObj = getReducerObj(dataModel, reducers);\n const fieldStore = dataModel.getFieldspace();\n const fieldStoreObj = fieldStore.fieldsObj();\n const dbName = fieldStore.name;\n const dimensionArr = [];\n const measureArr = [];\n const schema = [];\n const hashMap = {};\n const data = [];\n let newDataModel;\n\n // Prepare the schema\n Object.entries(fieldStoreObj).forEach(([key, value]) => {\n if (sFieldArr.indexOf(key) !== -1 || reducerObj[key]) {\n schema.push(extend2({}, value.schema()));\n\n switch (value.schema().type) {\n case FieldType.MEASURE:\n measureArr.push(key);\n break;\n default:\n case FieldType.DIMENSION:\n dimensionArr.push(key);\n }\n }\n });\n // Prepare the data\n let rowCount = 0;\n rowDiffsetIterator(dataModel._rowDiffset, (i) => {\n let hash = '';\n dimensionArr.forEach((_) => {\n hash = `${hash}-${fieldStoreObj[_].partialField.data[i]}`;\n });\n if (hashMap[hash] === undefined) {\n hashMap[hash] = rowCount;\n data.push({});\n dimensionArr.forEach((_) => {\n data[rowCount][_] = fieldStoreObj[_].partialField.data[i];\n });\n measureArr.forEach((_) => {\n data[rowCount][_] = [fieldStoreObj[_].partialField.data[i]];\n });\n rowCount += 1;\n } else {\n measureArr.forEach((_) => {\n data[hashMap[hash]][_].push(fieldStoreObj[_].partialField.data[i]);\n });\n }\n });\n\n // reduction\n let cachedStore = {};\n let cloneProvider = () => dataModel.detachedRoot();\n data.forEach((row) => {\n const tuple = row;\n measureArr.forEach((_) => {\n tuple[_] = reducerObj[_](row[_], cloneProvider, cachedStore);\n });\n });\n if (existingDataModel) {\n existingDataModel.__calculateFieldspace();\n newDataModel = existingDataModel;\n }\n else {\n newDataModel = new DataModel(data, schema, { name: dbName });\n }\n return newDataModel;\n}\n\nexport { groupBy, getFieldArr, getReducerObj };\n","import { getCommonSchema } from './get-common-schema';\n\n/**\n * The filter function used in natural join.\n * It generates a function that will have the logic to join two\n * DataModel instances by the process of natural join.\n *\n * @param {DataModel} dm1 - The left DataModel instance.\n * @param {DataModel} dm2 - The right DataModel instance.\n * @return {Function} Returns a function that is used in cross-product operation.\n */\nexport function naturalJoinFilter (dm1, dm2) {\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n // const dm1FieldStoreName = dm1FieldStore.name;\n // const dm2FieldStoreName = dm2FieldStore.name;\n const commonSchemaArr = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n return (dm1Fields, dm2Fields) => {\n let retainTuple = true;\n commonSchemaArr.forEach((fieldName) => {\n if (dm1Fields[fieldName].value ===\n dm2Fields[fieldName].value && retainTuple) {\n retainTuple = true;\n } else {\n retainTuple = false;\n }\n });\n return retainTuple;\n };\n}\n","import DataModel from '../export';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n/**\n * Performs the union operation between two dm instances.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function union (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n */\n function prepareDataHelper (dm, fieldsObj) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n data.push(tuple);\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm1, dm1FieldStoreFieldObj);\n prepareDataHelper(dm2, dm2FieldStoreFieldObj);\n\n return new DataModel(data, schema, { name });\n}\n","import { crossProduct } from './cross-product';\nimport { JOINS } from '../constants';\nimport { union } from './union';\n\n\nexport function leftOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel1, dataModel2, filterFn, false, JOINS.LEFTOUTER);\n}\n\nexport function rightOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel2, dataModel1, filterFn, false, JOINS.RIGHTOUTER);\n}\n\nexport function fullOuterJoin (dataModel1, dataModel2, filterFn) {\n return union(leftOuterJoin(dataModel1, dataModel2, filterFn), rightOuterJoin(dataModel1, dataModel2, filterFn));\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\n\n/**\n * In {@link DataModel}, every tabular data consists of column, a column is stored as field.\n * Field contains all the data for a given column in an array.\n *\n * Each record consists of several fields; the fields of all records form the columns.\n * Examples of fields: name, gender, sex etc.\n *\n * In DataModel, each field can have multiple attributes which describes its data and behaviour.\n * A field can have two types of data: Measure and Dimension.\n *\n * A Dimension Field is the context on which a data is categorized and the measure is the numerical values that\n * quantify the data set.\n * In short a dimension is the lens through which you are looking at your measure data.\n *\n * Refer to {@link Schema} to get info about possible field attributes.\n *\n * @public\n * @class\n */\nexport default class Field {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n this.partialField = partialField;\n this.rowDiffset = rowDiffset;\n }\n\n /**\n * Generates the field type specific domain.\n *\n * @public\n * @abstract\n */\n domain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the the field schema.\n *\n * @public\n * @return {string} Returns the field schema.\n */\n schema () {\n return this.partialField.schema;\n }\n\n /**\n * Returns the name of the field.\n *\n * @public\n * @return {string} Returns the name of the field.\n */\n name () {\n return this.partialField.name;\n }\n\n /**\n * Returns the type of the field.\n *\n * @public\n * @return {string} Returns the type of the field.\n */\n type () {\n return this.partialField.schema.type;\n }\n\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return this.partialField.schema.subtype;\n }\n\n /**\n * Returns the description of the field.\n *\n * @public\n * @return {string} Returns the description of the field.\n */\n description () {\n return this.partialField.schema.description;\n }\n\n /**\n * Returns the display name of the field.\n *\n * @public\n * @return {string} Returns the display name of the field.\n */\n displayName () {\n return this.partialField.schema.displayName || this.partialField.schema.name;\n }\n\n /**\n * Returns the data associated with the field.\n *\n * @public\n * @return {Array} Returns the data.\n */\n data () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n data.push(this.partialField.data[i]);\n });\n return data;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @abstract\n */\n formattedData () {\n throw new Error('Not yet implemented');\n }\n}\n","import Field from '../field';\n\n/**\n * Represents dimension field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Dimension extends Field {\n /**\n * Returns the domain for the dimension field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { DimensionSubtype } from '../../enums';\nimport Dimension from '../dimension';\n/**\n * Represents categorical field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Categorical extends Dimension {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return DimensionSubtype.CATEGORICAL;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n return domain;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport Dimension from '../dimension';\nimport { DateTimeFormatter } from '../../utils';\nimport InvalidAwareTypes from '../../invalid-aware-types';\n\n/**\n * Represents temporal field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Temporal extends Dimension {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n super(partialField, rowDiffset);\n\n this._cachedMinDiff = null;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be\n // occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n\n return domain;\n }\n\n\n /**\n * Calculates the minimum consecutive difference from the associated field data.\n *\n * @public\n * @return {number} Returns the minimum consecutive diff in milliseconds.\n */\n minimumConsecutiveDifference () {\n if (this._cachedMinDiff) {\n return this._cachedMinDiff;\n }\n\n const sortedData = this.data().filter(item => !(item instanceof InvalidAwareTypes)).sort((a, b) => a - b);\n const arrLn = sortedData.length;\n let minDiff = Number.POSITIVE_INFINITY;\n let prevDatum;\n let nextDatum;\n let processedCount = 0;\n\n for (let i = 1; i < arrLn; i++) {\n prevDatum = sortedData[i - 1];\n nextDatum = sortedData[i];\n\n if (nextDatum === prevDatum) {\n continue;\n }\n\n minDiff = Math.min(minDiff, nextDatum - sortedData[i - 1]);\n processedCount++;\n }\n\n if (!processedCount) {\n minDiff = null;\n }\n this._cachedMinDiff = minDiff;\n\n return this._cachedMinDiff;\n }\n\n /**\n * Returns the format specified in the input schema while creating field.\n *\n * @public\n * @return {string} Returns the datetime format.\n */\n format () {\n return this.partialField.schema.format;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n data.push(datum);\n } else {\n data.push(DateTimeFormatter.formatAs(datum, this.format()));\n }\n });\n return data;\n }\n}\n\n","import Dimension from '../dimension';\n\n/**\n * Represents binned field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Binned extends Dimension {\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the last and first values of bins config array.\n */\n calculateDataDomain () {\n const binsArr = this.partialField.schema.bins;\n return [binsArr[0], binsArr[binsArr.length - 1]];\n }\n\n /**\n * Returns the bins config provided while creating the field instance.\n *\n * @public\n * @return {Array} Returns the bins array config.\n */\n bins () {\n return this.partialField.schema.bins;\n }\n}\n","import { formatNumber } from '../../utils';\nimport { defaultReducerName } from '../../operator/group-by-function';\nimport Field from '../field';\n\n/**\n * Represents measure field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Measure extends Field {\n /**\n * Returns the domain for the measure field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Returns the unit of the measure field.\n *\n * @public\n * @return {string} Returns unit of the field.\n */\n unit () {\n return this.partialField.schema.unit;\n }\n\n /**\n * Returns the aggregation function name of the measure field.\n *\n * @public\n * @return {string} Returns aggregation function name of the field.\n */\n defAggFn () {\n return this.partialField.schema.defAggFn || defaultReducerName;\n }\n\n /**\n * Returns the number format of the measure field.\n *\n * @public\n * @return {Function} Returns number format of the field.\n */\n numberFormat () {\n const { numberFormat } = this.partialField.schema;\n return numberFormat instanceof Function ? numberFormat : formatNumber;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { MeasureSubtype } from '../../enums';\nimport Measure from '../measure';\nimport InvalidAwareTypes from '../../invalid-aware-types';\n\n/**\n * Represents continuous field subtype.\n *\n * @public\n * @class\n * @extends Measure\n */\nexport default class Continuous extends Measure {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return MeasureSubtype.CONTINUOUS;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the min and max values.\n */\n calculateDataDomain () {\n let min = Number.POSITIVE_INFINITY;\n let max = Number.NEGATIVE_INFINITY;\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n return;\n }\n\n if (datum < min) {\n min = datum;\n }\n if (datum > max) {\n max = datum;\n }\n });\n\n return [min, max];\n }\n}\n","/**\n * A interface to represent a parser which is responsible to parse the field.\n *\n * @public\n * @interface\n */\nexport default class FieldParser {\n /**\n * Parses a single value of a field and return the sanitized form.\n *\n * @public\n * @abstract\n */\n parse () {\n throw new Error('Not yet implemented');\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the categorical values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class CategoricalParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the stringified form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the stringified value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n result = String(val).trim();\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { DateTimeFormatter } from '../../../utils';\nimport FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the temporal values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class TemporalParser extends FieldParser {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {Object} schema - The schema object for the corresponding field.\n */\n constructor (schema) {\n super();\n this.schema = schema;\n this._dtf = new DateTimeFormatter(this.schema.format);\n }\n\n /**\n * Parses a single value of a field and returns the millisecond value.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {number} Returns the millisecond value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let nativeDate = this._dtf.getNativeDate(val);\n result = nativeDate ? nativeDate.getTime() : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the binned values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class BinnedParser extends FieldParser {\n /**\n * Parses a single binned value of a field and returns the sanitized value.\n *\n * @public\n * @param {string} val - The value of the field.\n * @return {string} Returns the sanitized value.\n */\n parse (val) {\n const regex = /^\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*-\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*$/;\n val = String(val);\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let matched = val.match(regex);\n result = matched ? `${Number.parseFloat(matched[1])}-${Number.parseFloat(matched[2])}`\n : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the continuous values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class ContinuousParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the number form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the number value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let parsedVal = parseFloat(val, 10);\n result = Number.isNaN(parsedVal) ? InvalidAwareTypes.NA : parsedVal;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","/**\n * Stores the full data and the metadata of a field. It provides\n * a single source of data from which the future Field\n * instance can get a subset of it with a rowDiffset config.\n *\n * @class\n * @public\n */\nexport default class PartialField {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} name - The name of the field.\n * @param {Array} data - The data array.\n * @param {Object} schema - The schema object of the corresponding field.\n * @param {FieldParser} parser - The parser instance corresponding to that field.\n */\n constructor (name, data, schema, parser) {\n this.name = name;\n this.schema = schema;\n this.parser = parser;\n this.data = this._sanitize(data);\n }\n\n /**\n * Sanitizes the field data.\n *\n * @private\n * @param {Array} data - The actual input data.\n * @return {Array} Returns the sanitized data.\n */\n _sanitize (data) {\n return data.map(datum => this.parser.parse(datum));\n }\n}\n","import { FieldType, DimensionSubtype, MeasureSubtype } from './enums';\nimport {\n Categorical,\n Temporal,\n Binned,\n Continuous,\n CategoricalParser,\n TemporalParser,\n BinnedParser,\n ContinuousParser,\n PartialField\n} from './fields';\n\n/**\n * Creates a field instance according to the provided data and schema.\n *\n * @param {Array} data - The field data array.\n * @param {Object} schema - The field schema object.\n * @return {Field} Returns the newly created field instance.\n */\nfunction createUnitField(data, schema) {\n data = data || [];\n let partialField;\n\n switch (schema.type) {\n case FieldType.MEASURE:\n switch (schema.subtype) {\n case MeasureSubtype.CONTINUOUS:\n partialField = new PartialField(schema.name, data, schema, new ContinuousParser());\n return new Continuous(partialField, `0-${data.length - 1}`);\n default:\n partialField = new PartialField(schema.name, data, schema, new ContinuousParser());\n return new Continuous(partialField, `0-${data.length - 1}`);\n }\n case FieldType.DIMENSION:\n switch (schema.subtype) {\n case DimensionSubtype.CATEGORICAL:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n case DimensionSubtype.TEMPORAL:\n partialField = new PartialField(schema.name, data, schema, new TemporalParser(schema));\n return new Temporal(partialField, `0-${data.length - 1}`);\n case DimensionSubtype.BINNED:\n partialField = new PartialField(schema.name, data, schema, new BinnedParser());\n return new Binned(partialField, `0-${data.length - 1}`);\n default:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n }\n default:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n }\n}\n\n\n/**\n * Creates a field instance from partialField and rowDiffset.\n *\n * @param {PartialField} partialField - The corresponding partial field.\n * @param {string} rowDiffset - The data subset config.\n * @return {Field} Returns the newly created field instance.\n */\nexport function createUnitFieldFromPartial(partialField, rowDiffset) {\n const { schema } = partialField;\n\n switch (schema.type) {\n case FieldType.MEASURE:\n switch (schema.subtype) {\n case MeasureSubtype.CONTINUOUS:\n return new Continuous(partialField, rowDiffset);\n default:\n return new Continuous(partialField, rowDiffset);\n }\n case FieldType.DIMENSION:\n switch (schema.subtype) {\n case DimensionSubtype.CATEGORICAL:\n return new Categorical(partialField, rowDiffset);\n case DimensionSubtype.TEMPORAL:\n return new Temporal(partialField, rowDiffset);\n case DimensionSubtype.BINNED:\n return new Binned(partialField, rowDiffset);\n default:\n return new Categorical(partialField, rowDiffset);\n }\n default:\n return new Categorical(partialField, rowDiffset);\n }\n}\n\n/**\n * Creates the field instances with input data and schema.\n *\n * @param {Array} dataColumn - The data array for fields.\n * @param {Array} schema - The schema array for fields.\n * @param {Array} headers - The array of header names.\n * @return {Array.} Returns an array of newly created field instances.\n */\nexport function createFields(dataColumn, schema, headers) {\n const headersObj = {};\n\n if (!(headers && headers.length)) {\n headers = schema.map(item => item.name);\n }\n\n headers.forEach((header, i) => {\n headersObj[header] = i;\n });\n\n return schema.map(item => createUnitField(dataColumn[headersObj[item.name]], item));\n}\n","import { DataFormat } from './enums';\n\nexport default {\n dataFormat: DataFormat.AUTO\n};\n","import { columnMajor } from '../utils';\n\n/**\n * Parses and converts data formatted in DSV array to a manageable internal format.\n *\n * @param {Array.} arr - A 2D array containing of the DSV data.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv data is header or not.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * [\"a\", \"b\", \"c\"],\n * [1, 2, 3],\n * [4, 5, 6],\n * [7, 8, 9]\n * ];\n */\nfunction DSVArr (arr, options) {\n const defaultOption = {\n firstRowHeader: true,\n };\n options = Object.assign({}, defaultOption, options);\n\n let header;\n const columns = [];\n const push = columnMajor(columns);\n\n if (options.firstRowHeader) {\n // If header present then mutate the array.\n // Do in-place mutation to save space.\n header = arr.splice(0, 1)[0];\n } else {\n header = [];\n }\n\n arr.forEach(field => push(...field));\n\n return [header, columns];\n}\n\nexport default DSVArr;\n","var EOL = {},\n EOF = {},\n QUOTE = 34,\n NEWLINE = 10,\n RETURN = 13;\n\nfunction objectConverter(columns) {\n return new Function(\"d\", \"return {\" + columns.map(function(name, i) {\n return JSON.stringify(name) + \": d[\" + i + \"]\";\n }).join(\",\") + \"}\");\n}\n\nfunction customConverter(columns, f) {\n var object = objectConverter(columns);\n return function(row, i) {\n return f(object(row), i, columns);\n };\n}\n\n// Compute unique columns in order of discovery.\nfunction inferColumns(rows) {\n var columnSet = Object.create(null),\n columns = [];\n\n rows.forEach(function(row) {\n for (var column in row) {\n if (!(column in columnSet)) {\n columns.push(columnSet[column] = column);\n }\n }\n });\n\n return columns;\n}\n\nexport default function(delimiter) {\n var reFormat = new RegExp(\"[\\\"\" + delimiter + \"\\n\\r]\"),\n DELIMITER = delimiter.charCodeAt(0);\n\n function parse(text, f) {\n var convert, columns, rows = parseRows(text, function(row, i) {\n if (convert) return convert(row, i - 1);\n columns = row, convert = f ? customConverter(row, f) : objectConverter(row);\n });\n rows.columns = columns || [];\n return rows;\n }\n\n function parseRows(text, f) {\n var rows = [], // output rows\n N = text.length,\n I = 0, // current character index\n n = 0, // current line number\n t, // current token\n eof = N <= 0, // current token followed by EOF?\n eol = false; // current token followed by EOL?\n\n // Strip the trailing newline.\n if (text.charCodeAt(N - 1) === NEWLINE) --N;\n if (text.charCodeAt(N - 1) === RETURN) --N;\n\n function token() {\n if (eof) return EOF;\n if (eol) return eol = false, EOL;\n\n // Unescape quotes.\n var i, j = I, c;\n if (text.charCodeAt(j) === QUOTE) {\n while (I++ < N && text.charCodeAt(I) !== QUOTE || text.charCodeAt(++I) === QUOTE);\n if ((i = I) >= N) eof = true;\n else if ((c = text.charCodeAt(I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n return text.slice(j + 1, i - 1).replace(/\"\"/g, \"\\\"\");\n }\n\n // Find next delimiter or newline.\n while (I < N) {\n if ((c = text.charCodeAt(i = I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n else if (c !== DELIMITER) continue;\n return text.slice(j, i);\n }\n\n // Return last token before EOF.\n return eof = true, text.slice(j, N);\n }\n\n while ((t = token()) !== EOF) {\n var row = [];\n while (t !== EOL && t !== EOF) row.push(t), t = token();\n if (f && (row = f(row, n++)) == null) continue;\n rows.push(row);\n }\n\n return rows;\n }\n\n function format(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return [columns.map(formatValue).join(delimiter)].concat(rows.map(function(row) {\n return columns.map(function(column) {\n return formatValue(row[column]);\n }).join(delimiter);\n })).join(\"\\n\");\n }\n\n function formatRows(rows) {\n return rows.map(formatRow).join(\"\\n\");\n }\n\n function formatRow(row) {\n return row.map(formatValue).join(delimiter);\n }\n\n function formatValue(text) {\n return text == null ? \"\"\n : reFormat.test(text += \"\") ? \"\\\"\" + text.replace(/\"/g, \"\\\"\\\"\") + \"\\\"\"\n : text;\n }\n\n return {\n parse: parse,\n parseRows: parseRows,\n format: format,\n formatRows: formatRows\n };\n}\n","import dsv from \"./dsv\";\n\nvar csv = dsv(\",\");\n\nexport var csvParse = csv.parse;\nexport var csvParseRows = csv.parseRows;\nexport var csvFormat = csv.format;\nexport var csvFormatRows = csv.formatRows;\n","import dsv from \"./dsv\";\n\nvar tsv = dsv(\"\\t\");\n\nexport var tsvParse = tsv.parse;\nexport var tsvParseRows = tsv.parseRows;\nexport var tsvFormat = tsv.format;\nexport var tsvFormatRows = tsv.formatRows;\n","import { dsvFormat as d3Dsv } from 'd3-dsv';\nimport DSVArr from './dsv-arr';\n\n/**\n * Parses and converts data formatted in DSV string to a manageable internal format.\n *\n * @todo Support to be given for https://tools.ietf.org/html/rfc4180.\n * @todo Sample implementation https://github.com/knrz/CSV.js/.\n *\n * @param {string} str - The input DSV string.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv string data is header or not.\n * @param {string} [options.fieldSeparator=\",\"] - The separator of two consecutive field.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = `\n * a,b,c\n * 1,2,3\n * 4,5,6\n * 7,8,9\n * `\n */\nfunction DSVStr (str, options) {\n const defaultOption = {\n firstRowHeader: true,\n fieldSeparator: ','\n };\n options = Object.assign({}, defaultOption, options);\n\n const dsv = d3Dsv(options.fieldSeparator);\n return DSVArr(dsv.parseRows(str), options);\n}\n\nexport default DSVStr;\n","import { columnMajor } from '../utils';\n\n/**\n * Parses and converts data formatted in JSON to a manageable internal format.\n *\n * @param {Array.} arr - The input data formatted in JSON.\n * @return {Array.} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * {\n * \"a\": 1,\n * \"b\": 2,\n * \"c\": 3\n * },\n * {\n * \"a\": 4,\n * \"b\": 5,\n * \"c\": 6\n * },\n * {\n * \"a\": 7,\n * \"b\": 8,\n * \"c\": 9\n * }\n * ];\n */\nfunction FlatJSON (arr) {\n const header = {};\n let i = 0;\n let insertionIndex;\n const columns = [];\n const push = columnMajor(columns);\n\n arr.forEach((item) => {\n const fields = [];\n for (let key in item) {\n if (key in header) {\n insertionIndex = header[key];\n } else {\n header[key] = i++;\n insertionIndex = i - 1;\n }\n fields[insertionIndex] = item[key];\n }\n push(...fields);\n });\n\n return [Object.keys(header), columns];\n}\n\nexport default FlatJSON;\n","import FlatJSON from './flat-json';\nimport DSVArr from './dsv-arr';\nimport DSVStr from './dsv-str';\nimport { detectDataFormat } from '../utils';\n\n/**\n * Parses the input data and detect the format automatically.\n *\n * @param {string|Array} data - The input data.\n * @param {Object} options - An optional config specific to data format.\n * @return {Array.} Returns an array of headers and column major data.\n */\nfunction Auto (data, options) {\n const converters = { FlatJSON, DSVStr, DSVArr };\n const dataFormat = detectDataFormat(data);\n\n if (!dataFormat) {\n throw new Error('Couldn\\'t detect the data format');\n }\n\n return converters[dataFormat](data, options);\n}\n\nexport default Auto;\n","import { FieldType, FilteringMode, DimensionSubtype, MeasureSubtype, DataFormat } from './enums';\nimport fieldStore from './field-store';\nimport Value from './value';\nimport {\n rowDiffsetIterator\n} from './operator';\nimport { DM_DERIVATIVES, LOGICAL_OPERATORS } from './constants';\nimport { createFields, createUnitFieldFromPartial } from './field-creator';\nimport defaultConfig from './default-config';\nimport * as converter from './converter';\nimport { extend2, detectDataFormat } from './utils';\n\n/**\n * Prepares the selection data.\n */\nfunction prepareSelectionData (fields, i) {\n const resp = {};\n for (let field of fields) {\n resp[field.name()] = new Value(field.partialField.data[i], field);\n }\n return resp;\n}\n\nexport function prepareJoinData (fields) {\n const resp = {};\n Object.keys(fields).forEach((key) => { resp[key] = new Value(fields[key], key); });\n return resp;\n}\n\nexport const updateFields = ([rowDiffset, colIdentifier], partialFieldspace, fieldStoreName) => {\n let collID = colIdentifier.length ? colIdentifier.split(',') : [];\n let partialFieldMap = partialFieldspace.fieldsObj();\n let newFields = collID.map(coll => createUnitFieldFromPartial(partialFieldMap[coll].partialField, rowDiffset));\n return fieldStore.createNamespace(newFields, fieldStoreName);\n};\n\nexport const persistDerivation = (model, operation, config = {}, criteriaFn) => {\n let derivative;\n if (operation !== DM_DERIVATIVES.COMPOSE) {\n derivative = {\n op: operation,\n meta: config,\n criteria: criteriaFn\n };\n model._derivation.push(derivative);\n }\n else {\n derivative = [...criteriaFn];\n model._derivation.length = 0;\n model._derivation.push(...derivative);\n }\n};\n\nexport const selectHelper = (rowDiffset, fields, selectFn, config, sourceDm) => {\n const newRowDiffSet = [];\n let lastInsertedValue = -1;\n let { mode } = config;\n let li;\n let cachedStore = {};\n let cloneProvider = () => sourceDm.detachedRoot();\n const selectorHelperFn = index => selectFn(\n prepareSelectionData(fields, index),\n index,\n cloneProvider,\n cachedStore\n );\n\n let checker;\n if (mode === FilteringMode.INVERSE) {\n checker = index => !selectorHelperFn(index);\n } else {\n checker = index => selectorHelperFn(index);\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n if (checker(i)) {\n if (lastInsertedValue !== -1 && i === (lastInsertedValue + 1)) {\n li = newRowDiffSet.length - 1;\n newRowDiffSet[li] = `${newRowDiffSet[li].split('-')[0]}-${i}`;\n } else {\n newRowDiffSet.push(`${i}`);\n }\n lastInsertedValue = i;\n }\n });\n return newRowDiffSet.join(',');\n};\n\nexport const filterPropagationModel = (model, propModels, config = {}) => {\n const operation = config.operation || LOGICAL_OPERATORS.AND;\n const filterByMeasure = config.filterByMeasure || false;\n let fns = [];\n if (!propModels.length) {\n fns = [() => false];\n } else {\n fns = propModels.map(propModel => ((dataModel) => {\n const dataObj = dataModel.getData();\n const schema = dataObj.schema;\n const fieldsConfig = dataModel.getFieldsConfig();\n const fieldsSpace = dataModel.getFieldspace().fieldsObj();\n const data = dataObj.data;\n const domain = Object.values(fieldsConfig).reduce((acc, v) => {\n acc[v.def.name] = fieldsSpace[v.def.name].domain();\n return acc;\n }, {});\n\n return (fields) => {\n const include = !data.length ? false : data.some(row => schema.every((propField) => {\n if (!(propField.name in fields)) {\n return true;\n }\n const value = fields[propField.name].valueOf();\n if (filterByMeasure && propField.type === FieldType.MEASURE) {\n return value >= domain[propField.name][0] && value <= domain[propField.name][1];\n }\n\n if (propField.type !== FieldType.DIMENSION) {\n return true;\n }\n const idx = fieldsConfig[propField.name].index;\n return row[idx] === fields[propField.name].valueOf();\n }));\n return include;\n };\n })(propModel));\n }\n\n let filteredModel;\n if (operation === LOGICAL_OPERATORS.AND) {\n const clonedModel = model.clone(false, false);\n filteredModel = clonedModel.select(fields => fns.every(fn => fn(fields)), {\n saveChild: false,\n mode: FilteringMode.ALL\n });\n } else {\n filteredModel = model.clone(false, false).select(fields => fns.some(fn => fn(fields)), {\n mode: FilteringMode.ALL,\n saveChild: false\n });\n }\n\n return filteredModel;\n};\n\nexport const cloneWithSelect = (sourceDm, selectFn, selectConfig, cloneConfig) => {\n const cloned = sourceDm.clone(cloneConfig.saveChild);\n const rowDiffset = selectHelper(\n cloned._rowDiffset,\n cloned.getPartialFieldspace().fields,\n selectFn,\n selectConfig,\n sourceDm\n );\n cloned._rowDiffset = rowDiffset;\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivation(cloned, DM_DERIVATIVES.SELECT, { config: selectConfig }, selectFn);\n\n return cloned;\n};\n\nexport const cloneWithProject = (sourceDm, projField, config, allFields) => {\n const cloned = sourceDm.clone(config.saveChild);\n let projectionSet = projField;\n if (config.mode === FilteringMode.INVERSE) {\n projectionSet = allFields.filter(fieldName => projField.indexOf(fieldName) === -1);\n }\n // cloned._colIdentifier = sourceDm._colIdentifier.split(',')\n // .filter(coll => projectionSet.indexOf(coll) !== -1).join();\n cloned._colIdentifier = projectionSet.join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivation(\n cloned,\n DM_DERIVATIVES.PROJECT,\n { projField, config, actualProjField: projectionSet },\n null\n );\n\n return cloned;\n};\n\nexport const sanitizeUnitSchema = (unitSchema) => {\n // Do deep clone of the unit schema as the user might change it later.\n unitSchema = extend2({}, unitSchema);\n if (!unitSchema.type) {\n unitSchema.type = FieldType.DIMENSION;\n }\n\n if (!unitSchema.subtype) {\n switch (unitSchema.type) {\n case FieldType.MEASURE:\n unitSchema.subtype = MeasureSubtype.CONTINUOUS;\n break;\n default:\n case FieldType.DIMENSION:\n unitSchema.subtype = DimensionSubtype.CATEGORICAL;\n break;\n }\n }\n\n return unitSchema;\n};\n\nexport const sanitizeSchema = schema => schema.map(unitSchema => sanitizeUnitSchema(unitSchema));\n\nexport const updateData = (relation, data, schema, options) => {\n schema = sanitizeSchema(schema);\n options = Object.assign(Object.assign({}, defaultConfig), options);\n const converterFn = converter[options.dataFormat];\n\n if (!(converterFn && typeof converterFn === 'function')) {\n throw new Error(`No converter function found for ${options.dataFormat} format`);\n }\n\n const [header, formattedData] = converterFn(data, options);\n const fieldArr = createFields(formattedData, schema, header);\n\n // This will create a new fieldStore with the fields\n const nameSpace = fieldStore.createNamespace(fieldArr, options.name);\n relation._partialFieldspace = nameSpace;\n // If data is provided create the default colIdentifier and rowDiffset\n relation._rowDiffset = formattedData.length && formattedData[0].length ? `0-${formattedData[0].length - 1}` : '';\n relation._colIdentifier = (schema.map(_ => _.name)).join();\n relation._dataFormat = options.dataFormat === DataFormat.AUTO ? detectDataFormat(data) : options.dataFormat;\n return relation;\n};\n\nexport const fieldInSchema = (schema, field) => {\n let i = 0;\n\n for (; i < schema.length; ++i) {\n if (field === schema[i].name) {\n return {\n type: schema[i].subtype || schema[i].type,\n index: i\n };\n }\n }\n return null;\n};\n\n\nexport const getOperationArguments = (child) => {\n const derivation = child._derivation;\n let params = [];\n let operation;\n if (derivation && derivation.length === 1) {\n operation = derivation[0].op;\n switch (operation) {\n case DM_DERIVATIVES.SELECT:\n params = [derivation[0].criteria];\n break;\n case DM_DERIVATIVES.PROJECT:\n params = [derivation[0].meta.actualProjField];\n break;\n case DM_DERIVATIVES.GROUPBY:\n operation = 'groupBy';\n params = [derivation[0].meta.groupByString.split(','), derivation[0].criteria];\n break;\n default:\n break;\n }\n }\n\n return {\n operation,\n params\n };\n};\n\nconst applyExistingOperationOnModel = (propModel, dataModel) => {\n const { operation, params } = getOperationArguments(dataModel);\n let selectionModel = propModel[0];\n let rejectionModel = propModel[1];\n if (operation && params.length) {\n selectionModel = propModel[0][operation](...params, {\n saveChild: false\n });\n rejectionModel = propModel[1][operation](...params, {\n saveChild: false\n });\n }\n return [selectionModel, rejectionModel];\n};\n\nconst getFilteredModel = (propModel, path) => {\n for (let i = 0, len = path.length; i < len; i++) {\n const model = path[i];\n propModel = applyExistingOperationOnModel(propModel, model);\n }\n return propModel;\n};\n\nconst propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = {}) => {\n const nonTraversingModel = propModelInf.nonTraversingModel;\n const excludeModels = propModelInf.excludeModels || [];\n\n if (dataModel === nonTraversingModel) {\n return;\n }\n\n const propagate = excludeModels.length ? excludeModels.indexOf(dataModel) === -1 : true;\n\n propagate && dataModel.handlePropagation(propModel, config);\n\n const children = dataModel._children;\n children.forEach((child) => {\n let [selectionModel, rejectionModel] = applyExistingOperationOnModel(propModel, child);\n propagateIdentifiers(child, [selectionModel, rejectionModel], config, propModelInf);\n });\n};\n\nexport const getRootGroupByModel = (model) => {\n if (model._parent && model._derivation.find(d => d.op !== 'group')) {\n return getRootGroupByModel(model._parent);\n }\n return model;\n};\n\nexport const getRootDataModel = (model) => {\n while (model._parent) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getPathToRootModel = (model, path = []) => {\n while (model._parent) {\n path.push(model);\n model = model._parent;\n }\n return path;\n};\n\nexport const propagateToAllDataModels = (identifiers, rootModels, propagationInf, config) => {\n let criteria;\n let propModel;\n const { propagationNameSpace, propagateToSource } = propagationInf;\n const propagationSourceId = propagationInf.sourceId;\n const propagateInterpolatedValues = config.propagateInterpolatedValues;\n const filterFn = (entry) => {\n const filter = config.filterFn || (() => true);\n return filter(entry, config);\n };\n\n let criterias = [];\n\n if (identifiers === null && config.persistent !== true) {\n criterias = [{\n criteria: []\n }];\n } else {\n let actionCriterias = Object.values(propagationNameSpace.mutableActions);\n if (propagateToSource !== false) {\n actionCriterias = actionCriterias.filter(d => d.config.sourceId !== propagationSourceId);\n }\n\n const filteredCriteria = actionCriterias.filter(filterFn).map(action => action.config.criteria);\n\n const excludeModels = [];\n\n if (propagateToSource !== false) {\n const sourceActionCriterias = Object.values(propagationNameSpace.mutableActions);\n\n sourceActionCriterias.forEach((actionInf) => {\n const actionConf = actionInf.config;\n if (actionConf.applyOnSource === false && actionConf.action === config.action &&\n actionConf.sourceId !== propagationSourceId) {\n excludeModels.push(actionInf.model);\n criteria = sourceActionCriterias.filter(d => d !== actionInf).map(d => d.config.criteria);\n criteria.length && criterias.push({\n criteria,\n models: actionInf.model,\n path: getPathToRootModel(actionInf.model)\n });\n }\n });\n }\n\n\n criteria = [].concat(...[...filteredCriteria, identifiers]).filter(d => d !== null);\n criterias.push({\n criteria,\n excludeModels: [...excludeModels, ...config.excludeModels || []]\n });\n }\n\n const rootModel = rootModels.model;\n\n const propConfig = Object.assign({\n sourceIdentifiers: identifiers,\n propagationSourceId\n }, config);\n\n const rootGroupByModel = rootModels.groupByModel;\n if (propagateInterpolatedValues && rootGroupByModel) {\n propModel = filterPropagationModel(rootGroupByModel, criteria, {\n filterByMeasure: propagateInterpolatedValues\n });\n propagateIdentifiers(rootGroupByModel, propModel, propConfig);\n }\n\n criterias.forEach((inf) => {\n const propagationModel = filterPropagationModel(rootModel, inf.criteria);\n const path = inf.path;\n\n if (path) {\n const filteredModel = getFilteredModel(propagationModel, path.reverse());\n inf.models.handlePropagation(filteredModel, propConfig);\n } else {\n propagateIdentifiers(rootModel, propagationModel, propConfig, {\n excludeModels: inf.excludeModels,\n nonTraversingModel: propagateInterpolatedValues && rootGroupByModel\n });\n }\n });\n};\n\nexport const propagateImmutableActions = (propagationNameSpace, rootModels, propagationInf) => {\n const immutableActions = propagationNameSpace.immutableActions;\n\n for (const action in immutableActions) {\n const actionInf = immutableActions[action];\n const actionConf = actionInf.config;\n const propagationSourceId = propagationInf.config.sourceId;\n const filterImmutableAction = propagationInf.propConfig.filterImmutableAction ?\n propagationInf.propConfig.filterImmutableAction(actionConf, propagationInf.config) : true;\n if (actionConf.sourceId !== propagationSourceId && filterImmutableAction) {\n const criteriaModel = actionConf.criteria;\n propagateToAllDataModels(criteriaModel, rootModels, {\n propagationNameSpace,\n propagateToSource: false,\n sourceId: propagationSourceId\n }, actionConf);\n }\n }\n};\n\nexport const addToPropNamespace = (propagationNameSpace, config = {}, model) => {\n let sourceNamespace;\n const isMutableAction = config.isMutableAction;\n const criteria = config.criteria;\n const key = `${config.action}-${config.sourceId}`;\n\n if (isMutableAction) {\n sourceNamespace = propagationNameSpace.mutableActions;\n } else {\n sourceNamespace = propagationNameSpace.immutableActions;\n }\n\n if (criteria === null) {\n delete sourceNamespace[key];\n } else {\n sourceNamespace[key] = {\n model,\n config\n };\n }\n\n return this;\n};\n","import { FilteringMode } from './enums';\nimport { getUniqueId } from './utils';\nimport { persistDerivation, updateFields, cloneWithSelect, cloneWithProject, updateData } from './helper';\nimport { crossProduct, difference, naturalJoinFilter, union } from './operator';\nimport { DM_DERIVATIVES } from './constants';\n\n/**\n * Relation provides the definitions of basic operators of relational algebra like *selection*, *projection*, *union*,\n * *difference* etc.\n *\n * It is extended by {@link DataModel} to inherit the functionalities of relational algebra concept.\n *\n * @class\n * @public\n * @module Relation\n * @namespace DataModel\n */\nclass Relation {\n\n /**\n * Creates a new Relation instance by providing underlying data and schema.\n *\n * @private\n *\n * @param {Object | string | Relation} data - The input tabular data in dsv or json format or\n * an existing Relation instance object.\n * @param {Array} schema - An array of data schema.\n * @param {Object} [options] - The optional options.\n */\n constructor (...params) {\n let source;\n\n this._parent = null;\n this._derivation = [];\n this._children = [];\n\n if (params.length === 1 && ((source = params[0]) instanceof Relation)) {\n // parent datamodel was passed as part of source\n this._colIdentifier = source._colIdentifier;\n this._rowDiffset = source._rowDiffset;\n this._dataFormat = source._dataFormat;\n this._parent = source;\n this._partialFieldspace = this._parent._partialFieldspace;\n this._fieldStoreName = getUniqueId();\n this.__calculateFieldspace().calculateFieldsConfig();\n } else {\n updateData(this, ...params);\n this._fieldStoreName = this._partialFieldspace.name;\n this.__calculateFieldspace().calculateFieldsConfig();\n this._propagationNameSpace = {\n mutableActions: {},\n immutableActions: {}\n };\n }\n }\n\n /**\n * Retrieves the {@link Schema | schema} details for every {@link Field | field} as an array.\n *\n * @public\n *\n * @return {Array.} Array of fields schema.\n * ```\n * [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', numberFormat: (val) => `${val} miles / gallon` },\n * { name: 'Cylinder', type: 'dimension' },\n * { name: 'Displacement', type: 'measure', defAggFn: 'max' },\n * { name: 'HorsePower', type: 'measure', defAggFn: 'max' },\n * { name: 'Weight_in_lbs', type: 'measure', defAggFn: 'avg', },\n * { name: 'Acceleration', type: 'measure', defAggFn: 'avg' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin' }\n * ]\n * ```\n */\n getSchema () {\n return this.getFieldspace().fields.map(d => d.schema());\n }\n\n /**\n * Returns the name of the {@link DataModel} instance. If no name was specified during {@link DataModel}\n * initialization, then it returns a auto-generated name.\n *\n * @public\n *\n * @return {string} Name of the DataModel instance.\n */\n getName() {\n return this._fieldStoreName;\n }\n\n getFieldspace () {\n return this._fieldspace;\n }\n\n __calculateFieldspace () {\n this._fieldspace = updateFields([this._rowDiffset, this._colIdentifier],\n this.getPartialFieldspace(), this._fieldStoreName);\n return this;\n }\n\n getPartialFieldspace () {\n return this._partialFieldspace;\n }\n\n /**\n * Performs {@link link_of_cross_product | cross-product} between two {@link DataModel} instances and returns a\n * new {@link DataModel} instance containing the results. This operation is also called theta join.\n *\n * Cross product takes two set and create one set where each value of one set is paired with each value of another\n * set.\n *\n * This method takes an optional predicate which filters the generated result rows. If the predicate returns true\n * the combined row is included in the resulatant table.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.join(originDM)));\n *\n * console.log(carsDM.join(originDM,\n * obj => obj.[originDM.getName()].Origin === obj.[carsDM.getName()].Origin));\n *\n * @text\n * This is chained version of `join` operator. `join` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel to be joined with the current instance DataModel.\n * @param {SelectionPredicate} filterFn - The predicate function that will filter the result of the crossProduct.\n *\n * @return {DataModel} New DataModel instance created after joining.\n */\n join (joinWith, filterFn) {\n return crossProduct(this, joinWith, filterFn);\n }\n\n /**\n * {@link natural_join | Natural join} is a special kind of cross-product join where filtering of rows are performed\n * internally by resolving common fields are from both table and the rows with common value are included.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.naturalJoin(originDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel with which the current instance of DataModel on which the method is\n * called will be joined.\n * @return {DataModel} New DataModel instance created after joining.\n */\n naturalJoin (joinWith) {\n return crossProduct(this, joinWith, naturalJoinFilter(this, joinWith), true);\n }\n\n /**\n * {@link link_to_union | Union} operation can be termed as vertical stacking of all rows from both the DataModel\n * instances, provided that both of the {@link DataModel} instances should have same column names.\n *\n * @example\n * console.log(EuropeanMakerDM.union(USAMakerDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} unionWith - DataModel instance for which union has to be applied with the instance on which\n * the method is called\n *\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n union (unionWith) {\n return union(this, unionWith);\n }\n\n /**\n * {@link link_to_difference | Difference } operation only include rows which are present in the datamodel on which\n * it was called but not on the one passed as argument.\n *\n * @example\n * console.log(highPowerDM.difference(highExpensiveDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} differenceWith - DataModel instance for which difference has to be applied with the instance\n * on which the method is called\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n difference (differenceWith) {\n return difference(this, differenceWith);\n }\n\n /**\n * {@link link_to_selection | Selection} is a row filtering operation. It expects a predicate and an optional mode\n * which control which all rows should be included in the resultant DataModel instance.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection operation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resultant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * // with selection mode NORMAL:\n * const normDt = dt.select(fields => fields.Origin.value === \"USA\")\n * console.log(normDt));\n *\n * // with selection mode INVERSE:\n * const inverDt = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt);\n *\n * // with selection mode ALL:\n * const dtArr = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.ALL })\n * // print the selected parts\n * console.log(dtArr[0]);\n * // print the inverted parts\n * console.log(dtArr[1]);\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Function} selectFn - The predicate function which is called for each row with the current row.\n * ```\n * function (row, i, cloneProvider, store) { ... }\n * ```\n * @param {Object} config - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance.\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection.\n * @return {DataModel} Returns the new DataModel instance(s) after operation.\n */\n select (selectFn, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n\n const cloneConfig = { saveChild: config.saveChild };\n let oDm;\n\n if (config.mode === FilteringMode.ALL) {\n const selectDm = cloneWithSelect(\n this,\n selectFn,\n { mode: FilteringMode.NORMAL },\n cloneConfig\n );\n const rejectDm = cloneWithSelect(\n this,\n selectFn,\n { mode: FilteringMode.INVERSE },\n cloneConfig\n );\n oDm = [selectDm, rejectDm];\n } else {\n oDm = cloneWithSelect(\n this,\n selectFn,\n config,\n cloneConfig\n );\n }\n\n return oDm;\n }\n\n /**\n * Retrieves a boolean value if the current {@link DataModel} instance has data.\n *\n * @example\n * const schema = [\n * { name: 'CarName', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n * const data = [];\n *\n * const dt = new DataModel(data, schema);\n * console.log(dt.isEmpty());\n *\n * @public\n *\n * @return {Boolean} True if the datamodel has no data, otherwise false.\n */\n isEmpty () {\n return !this._rowDiffset.length || !this._colIdentifier.length;\n }\n\n /**\n * Creates a clone from the current DataModel instance with child parent relationship.\n *\n * @private\n * @param {boolean} [saveChild=true] - Whether the cloned instance would be recorded in the parent instance.\n * @return {DataModel} - Returns the newly cloned DataModel instance.\n */\n clone (saveChild = true, linkParent = true) {\n let retDataModel;\n if (linkParent === false) {\n const dataObj = this.getData({\n getAllFields: true\n });\n const data = dataObj.data;\n const schema = dataObj.schema;\n const jsonData = data.map((row) => {\n const rowObj = {};\n schema.forEach((field, i) => {\n rowObj[field.name] = row[i];\n });\n return rowObj;\n });\n retDataModel = new this.constructor(jsonData, schema);\n }\n else {\n retDataModel = new this.constructor(this);\n }\n\n if (saveChild) {\n this._children.push(retDataModel);\n }\n return retDataModel;\n }\n\n /**\n * {@link Projection} is filter column (field) operation. It expects list of fields' name and either include those\n * or exclude those based on {@link FilteringMode} on the resultant variable.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * const dm = new DataModel(data, schema);\n *\n * // with projection mode NORMAL:\n * const normDt = dt.project([\"Name\", \"HorsePower\"]);\n * console.log(normDt.getData());\n *\n * // with projection mode INVERSE:\n * const inverDt = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt.getData());\n *\n * // with selection mode ALL:\n * const dtArr = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.ALL })\n * // print the normal parts\n * console.log(dtArr[0].getData());\n * // print the inverted parts\n * console.log(dtArr[1].getData());\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {DataModel} Returns the new DataModel instance after operation.\n */\n project (projField, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const { mode } = config;\n\n let normalizedProjField = projField.reduce((acc, field) => {\n if (field.constructor.name === 'RegExp') {\n acc.push(...allFields.filter(fieldName => fieldName.search(field) !== -1));\n } else if (field in fieldConfig) {\n acc.push(field);\n }\n return acc;\n }, []);\n\n normalizedProjField = Array.from(new Set(normalizedProjField)).map(field => field.trim());\n let dataModel;\n\n if (mode === FilteringMode.ALL) {\n let projectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.NORMAL,\n saveChild: config.saveChild\n }, allFields);\n let rejectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.INVERSE,\n saveChild: config.saveChild\n }, allFields);\n dataModel = [projectionClone, rejectionClone];\n } else {\n let projectionClone = cloneWithProject(this, normalizedProjField, config, allFields);\n dataModel = projectionClone;\n }\n\n return dataModel;\n }\n\n getFieldsConfig () {\n return this._fieldConfig;\n }\n\n calculateFieldsConfig () {\n this._fieldConfig = this._fieldspace.fields.reduce((acc, fieldDef, i) => {\n acc[fieldDef.name()] = {\n index: i,\n def: { name: fieldDef.name(), type: fieldDef.type(), subtype: fieldDef.subtype() }\n };\n return acc;\n }, {});\n return this;\n }\n\n\n /**\n * Frees up the resources associated with the current DataModel instance and breaks all the links instance has in\n * the DAG.\n *\n * @public\n */\n dispose () {\n this._parent.removeChild(this);\n this._parent = null;\n }\n\n /**\n * Removes the specified child {@link DataModel} from the child list of the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\")\n * dt.removeChild(dt2);\n *\n * @private\n *\n * @param {DataModel} child - Delegates the parent to remove this child.\n */\n removeChild (child) {\n let idx = this._children.findIndex(sibling => sibling === child);\n idx !== -1 ? this._children.splice(idx, 1) : true;\n }\n\n /**\n * Adds the specified {@link DataModel} as a parent for the current {@link DataModel} instance.\n *\n * The optional criteriaQueue is an array containing the history of transaction performed on parent\n * {@link DataModel} to get the current one.\n *\n * @param {DataModel} parent - The datamodel instance which will act as parent.\n * @param {Array} criteriaQueue - Queue contains in-between operation meta-data.\n */\n addParent (parent, criteriaQueue = []) {\n persistDerivation(this, DM_DERIVATIVES.COMPOSE, null, criteriaQueue);\n this._parent = parent;\n parent._children.push(this);\n }\n\n /**\n * Returns the parent {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const parentDm = dt2.getParent();\n *\n * @return {DataModel} Returns the parent DataModel instance.\n */\n getParent () {\n return this._parent;\n }\n\n /**\n * Returns the immediate child {@link DataModel} instances.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const childDm1 = dt.select(fields => fields.Origin.value === \"USA\");\n * const childDm2 = dt.select(fields => fields.Origin.value === \"Japan\");\n * const childDm3 = dt.groupBy([\"Origin\"]);\n *\n * @return {DataModel[]} Returns the immediate child DataModel instances.\n */\n getChildren() {\n return this._children;\n }\n\n /**\n * Returns the in-between operation meta data while creating the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const derivations = dt3.getDerivations();\n *\n * @return {Any[]} Returns the derivation meta data.\n */\n getDerivations() {\n return this._derivation;\n }\n}\n\nexport default Relation;\n","/* eslint-disable default-case */\n\nimport { FieldType, DimensionSubtype, DataFormat } from './enums';\nimport {\n persistDerivation,\n getRootGroupByModel,\n propagateToAllDataModels,\n getRootDataModel,\n propagateImmutableActions,\n addToPropNamespace,\n sanitizeUnitSchema\n} from './helper';\nimport { DM_DERIVATIVES, PROPAGATION } from './constants';\nimport {\n dataBuilder,\n rowDiffsetIterator,\n groupBy\n} from './operator';\nimport { createBinnedFieldData } from './operator/bucket-creator';\nimport Relation from './relation';\nimport reducerStore from './utils/reducer-store';\nimport { createFields } from './field-creator';\nimport InvalidAwareTypes from './invalid-aware-types';\n\n/**\n * DataModel is an in-browser representation of tabular data. It supports\n * {@link https://en.wikipedia.org/wiki/Relational_algebra | relational algebra} operators as well as generic data\n * processing opearators.\n * DataModel extends {@link Relation} class which defines all the relational algebra opreators. DataModel gives\n * definition of generic data processing operators which are not relational algebra complient.\n *\n * @public\n * @class\n * @extends Relation\n * @memberof Datamodel\n */\nclass DataModel extends Relation {\n /**\n * Creates a new DataModel instance by providing data and schema. Data could be in the form of\n * - Flat JSON\n * - DSV String\n * - 2D Array\n *\n * By default DataModel finds suitable adapter to serialize the data. DataModel also expects a\n * {@link Schema | schema} for identifying the variables present in data.\n *\n * @constructor\n * @example\n * const data = loadData('cars.csv');\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', unit : 'cm', scale: '1000', numberformat: val => `${val}G`},\n * { name: 'Cylinders', type: 'dimension' },\n * { name: 'Displacement', type: 'measure' },\n * { name: 'Horsepower', type: 'measure' },\n * { name: 'Weight_in_lbs', type: 'measure' },\n * { name: 'Acceleration', type: 'measure' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin', type: 'dimension' }\n * ];\n * const dm = new DataModel(data, schema, { name: 'Cars' });\n * table(dm);\n *\n * @public\n *\n * @param {Array. | string | Array.} data Input data in any of the mentioned formats\n * @param {Array.} schema Defination of the variables. Order of the variables in data and order of the\n * variables in schema has to be same.\n * @param {object} [options] Optional arguments to specify more settings regarding the creation part\n * @param {string} [options.name] Name of the datamodel instance. If no name is given an auto generated name is\n * assigned to the instance.\n * @param {string} [options.fieldSeparator=','] specify field separator type if the data is of type dsv string.\n */\n constructor (...args) {\n super(...args);\n\n this._onPropagation = [];\n this._sortingDetails = [];\n }\n\n /**\n * Reducers are simple functions which reduces an array of numbers to a representative number of the set.\n * Like an array of numbers `[10, 20, 5, 15]` can be reduced to `12.5` if average / mean reducer function is\n * applied. All the measure fields in datamodel (variables in data) needs a reducer to handle aggregation.\n *\n * @public\n *\n * @return {ReducerStore} Singleton instance of {@link ReducerStore}.\n */\n static get Reducers () {\n return reducerStore;\n }\n\n /**\n * Configure null, undefined, invalid values in the source data\n *\n * @public\n *\n * @param {Object} [config] - Configuration to control how null, undefined and non-parsable values are\n * represented in DataModel.\n * @param {string} [config.undefined] - Define how an undefined value will be represented.\n * @param {string} [config.null] - Define how a null value will be represented.\n * @param {string} [config.invalid] - Define how a non-parsable value will be represented.\n */\n static configureInvalidAwareTypes (config) {\n return InvalidAwareTypes.invalidAwareVals(config);\n }\n\n /**\n * Retrieve the data attached to an instance in JSON format.\n *\n * @example\n * // DataModel instance is already prepared and assigned to dm variable\n * const data = dm.getData({\n * order: 'column',\n * formatter: {\n * origin: (val) => val === 'European Union' ? 'EU' : val;\n * }\n * });\n * console.log(data);\n *\n * @public\n *\n * @param {Object} [options] Options to control how the raw data is to be returned.\n * @param {string} [options.order='row'] Defines if data is retieved in row order or column order. Possible values\n * are `'rows'` and `'columns'`\n * @param {Function} [options.formatter=null] Formats the output data. This expects an object, where the keys are\n * the name of the variable needs to be formatted. The formatter function is called for each row passing the\n * value of the cell for a particular row as arguments. The formatter is a function in the form of\n * `function (value, rowId, schema) => { ... }`\n * Know more about {@link Fomatter}.\n *\n * @return {Array} Returns a multidimensional array of the data with schema. The return format looks like\n * ```\n * {\n * data,\n * schema\n * }\n * ```\n */\n getData (options) {\n const defOptions = {\n order: 'row',\n formatter: null,\n withUid: false,\n getAllFields: false,\n sort: []\n };\n options = Object.assign({}, defOptions, options);\n const fields = this.getPartialFieldspace().fields;\n\n const dataGenerated = dataBuilder.call(\n this,\n this.getPartialFieldspace().fields,\n this._rowDiffset,\n options.getAllFields ? fields.map(d => d.name()).join() : this._colIdentifier,\n options.sort,\n {\n columnWise: options.order === 'column',\n addUid: !!options.withUid\n }\n );\n\n if (!options.formatter) {\n return dataGenerated;\n }\n\n const { formatter } = options;\n const { data, schema, uids } = dataGenerated;\n const fieldNames = schema.map((e => e.name));\n const fmtFieldNames = Object.keys(formatter);\n const fmtFieldIdx = fmtFieldNames.reduce((acc, next) => {\n const idx = fieldNames.indexOf(next);\n if (idx !== -1) {\n acc.push([idx, formatter[next]]);\n }\n return acc;\n }, []);\n\n if (options.order === 'column') {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n data[fIdx].forEach((datum, datumIdx) => {\n data[fIdx][datumIdx] = fmtFn.call(\n undefined,\n datum,\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n } else {\n data.forEach((datum, datumIdx) => {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n datum[fIdx] = fmtFn.call(\n undefined,\n datum[fIdx],\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n }\n\n return dataGenerated;\n }\n\n /**\n * Groups the data using particular dimensions and by reducing measures. It expects a list of dimensions using which\n * it projects the datamodel and perform aggregations to reduce the duplicate tuples. Refer this\n * {@link link_to_one_example_with_group_by | document} to know the intuition behind groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupedDM = dm.groupBy(['Year'], { horsepower: 'max' } );\n * console.log(groupedDm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {DataModel} Returns a new DataModel instance after performing the groupby.\n */\n groupBy (fieldsArr, reducers = {}, config = { saveChild: true }) {\n const groupByString = `${fieldsArr.join()}`;\n let params = [this, fieldsArr, reducers];\n const newDataModel = groupBy(...params);\n\n persistDerivation(\n newDataModel,\n DM_DERIVATIVES.GROUPBY,\n { fieldsArr, groupByString, defaultReducer: reducerStore.defaultReducer() },\n reducers\n );\n\n if (config.saveChild) {\n this._children.push(newDataModel);\n }\n newDataModel._parent = this;\n\n return newDataModel;\n }\n\n /**\n * Performs sorting operation on the current {@link DataModel} instance according to the specified sorting details.\n * Like every other operator it doesn't mutate the current DataModel instance on which it was called, instead\n * returns a new DataModel instance containing the sorted data.\n *\n * DataModel support multi level sorting by listing the variables using which sorting needs to be performed and\n * the type of sorting `ASC` or `DESC`.\n *\n * In the following example, data is sorted by `Origin` field in `DESC` order in first level followed by another\n * level of sorting by `Acceleration` in `ASC` order.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * let sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\"] // Default value is ASC\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * // Sort with a custom sorting function\n * sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\", (a, b) => a - b] // Custom sorting function\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @text\n * DataModel also provides another sorting mechanism out of the box where sort is applied to a variable using\n * another variable which determines the order.\n * Like the above DataModel contains three fields `Origin`, `Name` and `Acceleration`. Now, the data in this\n * model can be sorted by `Origin` field according to the average value of all `Acceleration` for a\n * particular `Origin` value.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * const sortedDm = dm.sort([\n * ['Origin', ['Acceleration', (a, b) => avg(...a.Acceleration) - avg(...b.Acceleration)]]\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @public\n *\n * @param {Array.} sortingDetails - Sorting details based on which the sorting will be performed.\n * @return {DataModel} Returns a new instance of DataModel with sorted data.\n */\n sort (sortingDetails) {\n const rawData = this.getData({\n order: 'row',\n sort: sortingDetails\n });\n const header = rawData.schema.map(field => field.name);\n const dataInCSVArr = [header].concat(rawData.data);\n\n const sortedDm = new this.constructor(dataInCSVArr, rawData.schema, { dataFormat: 'DSVArr' });\n sortedDm._sortingDetails = sortingDetails;\n return sortedDm;\n }\n\n /**\n * Performs the serialization operation on the current {@link DataModel} instance according to the specified data\n * type. When an {@link DataModel} instance is created, it de-serializes the input data into its internal format,\n * and during its serialization process, it converts its internal data format to the specified data type and returns\n * that data regardless what type of data is used during the {@link DataModel} initialization.\n *\n * @example\n * // here dm is the pre-declared DataModel instance.\n * const csvData = dm.serialize(DataModel.DataFormat.DSV_STR, { fieldSeparator: \",\" });\n * console.log(csvData); // The csv formatted data.\n *\n * const jsonData = dm.serialize(DataModel.DataFormat.FLAT_JSON);\n * console.log(jsonData); // The json data.\n *\n * @public\n *\n * @param {string} type - The data type name for serialization.\n * @param {Object} options - The optional option object.\n * @param {string} options.fieldSeparator - The field separator character for DSV data type.\n * @return {Array|string} Returns the serialized data.\n */\n serialize (type, options) {\n type = type || this._dataFormat;\n options = Object.assign({}, { fieldSeparator: ',' }, options);\n\n const fields = this.getFieldspace().fields;\n const colData = fields.map(f => f.formattedData());\n const rowsCount = colData[0].length;\n let serializedData;\n let rowIdx;\n let colIdx;\n\n if (type === DataFormat.FLAT_JSON) {\n serializedData = [];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = {};\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row[fields[colIdx].name()] = colData[colIdx][rowIdx];\n }\n serializedData.push(row);\n }\n } else if (type === DataFormat.DSV_STR) {\n serializedData = [fields.map(f => f.name()).join(options.fieldSeparator)];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row.join(options.fieldSeparator));\n }\n serializedData = serializedData.join('\\n');\n } else if (type === DataFormat.DSV_ARR) {\n serializedData = [fields.map(f => f.name())];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row);\n }\n } else {\n throw new Error(`Data type ${type} is not supported`);\n }\n\n return serializedData;\n }\n\n addField (field) {\n const fieldName = field.name();\n this._colIdentifier += `,${fieldName}`;\n const partialFieldspace = this._partialFieldspace;\n\n if (!partialFieldspace.fieldsObj()[field.name()]) {\n partialFieldspace.fields.push(field);\n } else {\n const fieldIndex = partialFieldspace.fields.findIndex(fieldinst => fieldinst.name() === fieldName);\n fieldIndex >= 0 && (partialFieldspace.fields[fieldIndex] = field);\n }\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n\n this.__calculateFieldspace().calculateFieldsConfig();\n return this;\n }\n\n /**\n * Creates a new variable calculated from existing variables. This method expects the definition of the newly created\n * variable and a function which resolves the value of the new variable from existing variables.\n *\n * Can create a new measure based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const newDm = dataModel.calculateVariable({\n * name: 'powerToWeight',\n * type: 'measure'\n * }, ['horsepower', 'weight_in_lbs', (hp, weight) => hp / weight ]);\n *\n *\n * Can create a new dimension based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const child = dataModel.calculateVariable(\n * {\n * name: 'Efficiency',\n * type: 'dimension'\n * }, ['horsepower', (hp) => {\n * if (hp < 80) { return 'low'; },\n * else if (hp < 120) { return 'moderate'; }\n * else { return 'high' }\n * }]);\n *\n * @public\n *\n * @param {Object} schema - The schema of newly defined variable.\n * @param {Array.} dependency - An array containing the dependency variable names and a resolver\n * function as the last element.\n * @param {Object} config - An optional config object.\n * @param {boolean} [config.saveChild] - Whether the newly created DataModel will be a child.\n * @param {boolean} [config.replaceVar] - Whether the newly created variable will replace the existing variable.\n * @return {DataModel} Returns an instance of DataModel with the new field.\n */\n calculateVariable (schema, dependency, config) {\n schema = sanitizeUnitSchema(schema);\n config = Object.assign({}, { saveChild: true, replaceVar: false }, config);\n\n const fieldsConfig = this.getFieldsConfig();\n const depVars = dependency.slice(0, dependency.length - 1);\n const retrieveFn = dependency[dependency.length - 1];\n\n if (fieldsConfig[schema.name] && !config.replaceVar) {\n throw new Error(`${schema.name} field already exists in datamodel`);\n }\n\n const depFieldIndices = depVars.map((field) => {\n const fieldSpec = fieldsConfig[field];\n if (!fieldSpec) {\n // @todo dont throw error here, use warning in production mode\n throw new Error(`${field} is not a valid column name.`);\n }\n return fieldSpec.index;\n });\n\n const clone = this.clone();\n\n const fs = clone.getFieldspace().fields;\n const suppliedFields = depFieldIndices.map(idx => fs[idx]);\n\n let cachedStore = {};\n let cloneProvider = () => this.detachedRoot();\n\n const computedValues = [];\n rowDiffsetIterator(clone._rowDiffset, (i) => {\n const fieldsData = suppliedFields.map(field => field.partialField.data[i]);\n computedValues[i] = retrieveFn(...fieldsData, i, cloneProvider, cachedStore);\n });\n const [field] = createFields([computedValues], [schema], [schema.name]);\n clone.addField(field);\n\n persistDerivation(clone, DM_DERIVATIVES.CAL_VAR, { config: schema, fields: depVars }, retrieveFn);\n\n return clone;\n }\n\n /**\n * Propagates changes across all the connected DataModel instances.\n *\n * @param {Array} identifiers - A list of identifiers that were interacted with.\n * @param {Object} payload - The interaction specific details.\n *\n * @return {DataModel} DataModel instance.\n */\n propagate (identifiers, config = {}, addToNameSpace, propConfig = {}) {\n const isMutableAction = config.isMutableAction;\n const propagationSourceId = config.sourceId;\n const payload = config.payload;\n const rootModel = getRootDataModel(this);\n const propagationNameSpace = rootModel._propagationNameSpace;\n const rootGroupByModel = getRootGroupByModel(this);\n const rootModels = {\n groupByModel: rootGroupByModel,\n model: rootModel\n };\n\n addToNameSpace && addToPropNamespace(propagationNameSpace, config, this);\n propagateToAllDataModels(identifiers, rootModels, { propagationNameSpace, sourceId: propagationSourceId },\n Object.assign({\n payload\n }, config));\n\n if (isMutableAction) {\n propagateImmutableActions(propagationNameSpace, rootModels, {\n config,\n propConfig\n }, this);\n }\n\n return this;\n }\n\n /**\n * Associates a callback with an event name.\n *\n * @param {string} eventName - The name of the event.\n * @param {Function} callback - The callback to invoke.\n * @return {DataModel} Returns this current DataModel instance itself.\n */\n on (eventName, callback) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation.push(callback);\n break;\n }\n return this;\n }\n\n /**\n * Unsubscribes the callbacks for the provided event name.\n *\n * @param {string} eventName - The name of the event to unsubscribe.\n * @return {DataModel} Returns the current DataModel instance itself.\n */\n unsubscribe (eventName) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation = [];\n break;\n\n }\n return this;\n }\n\n /**\n * This method is used to invoke the method associated with propagation.\n *\n * @param {Object} payload The interaction payload.\n * @param {DataModel} identifiers The propagated DataModel.\n * @memberof DataModel\n */\n handlePropagation (propModel, payload) {\n let propListeners = this._onPropagation;\n propListeners.forEach(fn => fn.call(this, propModel, payload));\n }\n\n /**\n * Performs the binning operation on a measure field based on the binning configuration. Binning means discretizing\n * values of a measure. Binning configuration contains an array; subsequent values from the array marks the boundary\n * of buckets in [inclusive, exclusive) range format. This operation does not mutate the subject measure field,\n * instead, it creates a new field (variable) of type dimension and subtype binned.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non-uniform buckets,\n * - providing bins count,\n * - providing each bin size,\n *\n * When custom `buckets` are provided as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', buckets: [30, 80, 100, 110] }\n * const binnedDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binsCount` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', binsCount: 5, start: 0, end: 100 }\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binSize` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHorsepower', binSize: 20, start: 5}\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @public\n *\n * @param {string} measureFieldName - The name of the target measure field.\n * @param {Object} config - The config object.\n * @param {string} [config.name] - The name of the new field which will be created.\n * @param {string} [config.buckets] - An array containing the bucket ranges.\n * @param {string} [config.binSize] - The size of each bin. It is ignored when buckets are given.\n * @param {string} [config.binsCount] - The total number of bins to generate. It is ignored when buckets are given.\n * @param {string} [config.start] - The start value of the bucket ranges. It is ignored when buckets are given.\n * @param {string} [config.end] - The end value of the bucket ranges. It is ignored when buckets are given.\n * @return {DataModel} Returns a new {@link DataModel} instance with the new field.\n */\n bin (measureFieldName, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n if (!fieldsConfig[measureFieldName]) {\n throw new Error(`Field ${measureFieldName} doesn't exist`);\n }\n\n const binFieldName = config.name || `${measureFieldName}_binned`;\n\n if (fieldsConfig[binFieldName]) {\n throw new Error(`Field ${binFieldName} already exists`);\n }\n\n const measureField = this.getFieldspace().fieldsObj()[measureFieldName];\n const { binnedData, bins } = createBinnedFieldData(measureField, this._rowDiffset, config);\n\n const binField = createFields([binnedData], [\n {\n name: binFieldName,\n type: FieldType.DIMENSION,\n subtype: DimensionSubtype.BINNED,\n bins\n }], [binFieldName])[0];\n\n const clone = this.clone();\n clone.addField(binField);\n\n persistDerivation(clone, DM_DERIVATIVES.BIN, { measureFieldName, config, binFieldName }, null);\n\n return clone;\n }\n\n /**\n * Creates a new {@link DataModel} instance with completely detached root from current {@link DataModel} instance,\n * the new {@link DataModel} instance has no parent-children relationship with the current one, but has same data as\n * the current one.\n * This API is useful when a completely different {@link DataModel} but with same data as the current instance is\n * needed.\n *\n * @example\n * const dm = new DataModel(data, schema);\n * const detachedDm = dm.detachedRoot();\n *\n * // has different namespace\n * console.log(dm.getPartialFieldspace().name);\n * console.log(detachedDm.getPartialFieldspace().name);\n *\n * // has same data\n * console.log(dm.getData());\n * console.log(detachedDm.getData());\n *\n * @public\n *\n * @return {DataModel} Returns a detached {@link DataModel} instance.\n */\n detachedRoot () {\n const data = this.serialize(DataFormat.FLAT_JSON);\n const schema = this.getSchema();\n\n return new DataModel(data, schema);\n }\n}\n\nexport default DataModel;\n","import { fnList } from '../operator/group-by-function';\n\nexport const { sum, avg, min, max, first, last, count, std: sd } = fnList;\n","import DataModel from './datamodel';\nimport {\n compose,\n bin,\n select,\n project,\n groupby as groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union\n} from './operator';\nimport * as Stats from './stats';\nimport * as enums from './enums';\nimport { DateTimeFormatter } from './utils';\nimport { DataFormat, FilteringMode } from './constants';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport pkg from '../package.json';\n\nDataModel.Operators = {\n compose,\n bin,\n select,\n project,\n groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union\n};\nDataModel.Stats = Stats;\nObject.assign(DataModel, enums);\nDataModel.DateTimeFormatter = DateTimeFormatter;\nDataModel.DataFormat = DataFormat;\nDataModel.FilteringMode = FilteringMode;\nDataModel.InvalidAwareTypes = InvalidAwareTypes;\nDataModel.version = pkg.version;\n\nexport default DataModel;\n","\n/**\n * DataModel's opearators are exposed as composable functional operators as well as chainable operators. Chainable\n * operators are called on the instances of {@link Datamodel} and {@link Relation} class.\n *\n * Those same operators can be used as composable operators from `DataModel.Operators` namespace.\n *\n * All these operators have similar behaviour. All these operators when called with the argument returns a function\n * which expects a DataModel instance.\n *\n * @public\n * @module Operators\n * @namespace DataModel\n */\n\n/**\n * This is functional version of selection operator. {@link link_to_selection | Selection} is a row filtering operation.\n * It takes {@link SelectionPredicate | predicate} for filtering criteria and returns a function.\n * The returned function is called with the DataModel instance on which the action needs to be performed.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection opearation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * [Warn] Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * [Error] `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @example\n * const select = DataModel.Operators.select;\n * usaCarsFn = select(fields => fields.Origin.value === 'USA');\n * usaCarsDm = usaCarsFn(dm);\n * console.log(usaCarsDm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {SelectionPredicate} selectFn - Predicate funciton which is called for each row with the current row\n * ```\n * function (row, i) { ... }\n * ```\n * @param {Object} [config] - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const select = (...args) => dm => dm.select(...args);\n\n/**\n * This is functional version of projection operator. {@link link_to_projection | Projection} is a column filtering\n * operation.It expects list of fields name and either include those or exclude those based on {@link FilteringMode} on\n * the resultant variable.It returns a function which is called with the DataModel instance on which the action needs\n * to be performed.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const project = (...args) => dm => dm.project(...args);\n\n/**\n * This is functional version of binnig operator. Binning happens on a measure field based on a binning configuration.\n * Binning in DataModel does not aggregate the number of rows present in DataModel instance after binning, it just adds\n * a new field with the binned value. Refer binning {@link example_of_binning | example} to have a intuition of what\n * binning is and the use case.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non uniform buckets\n * - providing bin count\n * - providing each bin size\n *\n * When custom buckets are provided as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const buckets = {\n * start: 30\n * stops: [80, 100, 110]\n * };\n * const config = { buckets, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(dm);\n *\n * @text\n * When `binCount` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binCount: 5, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @text\n * When `binSize` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binSize: 200, name: 'binnedHorsepower' }\n * const binnedDm = dataModel.bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {String} name Name of measure which will be used to create bin\n * @param {Object} config Config required for bin creation\n * @param {Array.} config.bucketObj.stops Defination of bucket ranges. Two subsequent number from arrays\n * are picked and a range is created. The first number from range is inclusive and the second number from range\n * is exclusive.\n * @param {Number} [config.bucketObj.startAt] Force the start of the bin from a particular number.\n * If not mentioned, the start of the bin or the lower domain of the data if stops is not mentioned, else its\n * the first value of the stop.\n * @param {Number} config.binSize Bucket size for each bin\n * @param {Number} config.binCount Number of bins which will be created\n * @param {String} config.name Name of the new binned field to be created\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const bin = (...args) => dm => dm.bin(...args);\n\n/**\n * This is functional version of `groupBy` operator.Groups the data using particular dimensions and by reducing\n * measures. It expects a list of dimensions using which it projects the datamodel and perform aggregations to reduce\n * the duplicate tuples. Refer this {@link link_to_one_example_with_group_by | document} to know the intuition behind\n * groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupBy = DataModel.Operators.groupBy;\n * const groupedFn = groupBy(['Year'], { horsepower: 'max' } );\n * groupedDM = groupByFn(dm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const groupBy = (...args) => dm => dm.groupBy(...args);\n\n/**\n * Enables composing operators to run multiple operations and save group of operataion as named opration on a DataModel.\n * The resulting DataModel will be the result of all the operation provided. The operations provided will be executed in\n * a serial manner ie. result of one operation will be the input for the next operations (like pipe operator in unix).\n *\n * Suported operations in compose are\n * - `select`\n * - `project`\n * - `groupBy`\n * - `bin`\n * - `compose`\n *\n * @example\n * const compose = DataModel.Operators.compose;\n * const select = DataModel.Operators.select;\n * const project = DataModel.Operators.project;\n *\n * let composedFn = compose(\n * select(fields => fields.netprofit.value <= 15),\n * project(['netprofit', 'netsales']));\n *\n * const dataModel = new DataModel(data1, schema1);\n *\n * let composedDm = composedFn(dataModel);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} operators: An array of operation that will be applied on the\n * datatable.\n *\n * @returns {DataModel} Instance of resultant DataModel\n */\nexport const compose = (...operations) =>\n (dm, config = { saveChild: true }) => {\n let currentDM = dm;\n let frstChild;\n const derivations = [];\n const saveChild = config.saveChild;\n\n operations.forEach((operation) => {\n currentDM = operation(currentDM);\n derivations.push(...currentDM._derivation);\n if (!frstChild) {\n frstChild = currentDM;\n }\n });\n\n saveChild && currentDM.addParent(dm, derivations);\n if (derivations.length > 1) {\n frstChild.dispose();\n }\n\n return currentDM;\n };\n","/**\n * Wrapper on calculateVariable() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const calculateVariable = (...args) => dm => dm.calculateVariable(...args);\n\n/**\n * Wrapper on sort() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const sort = (...args) => dm => dm.sort(...args);\n","import { crossProduct } from './cross-product';\nimport { naturalJoinFilter } from './natural-join-filter-function';\n\nexport function naturalJoin (dataModel1, dataModel2) {\n return crossProduct(dataModel1, dataModel2, naturalJoinFilter(dataModel1, dataModel2), true);\n}\n"],"sourceRoot":""} \ No newline at end of file +{"version":3,"sources":["webpack://DataModel/webpack/universalModuleDefinition","webpack://DataModel/webpack/bootstrap","webpack://DataModel/./src/index.js","webpack://DataModel/./src/enums/data-format.js","webpack://DataModel/./src/enums/dimension-subtype.js","webpack://DataModel/./src/enums/measure-subtype.js","webpack://DataModel/./src/enums/field-type.js","webpack://DataModel/./src/enums/filtering-mode.js","webpack://DataModel/./src/enums/group-by-functions.js","webpack://DataModel/./src/utils/date-time-formatter.js","webpack://DataModel/./src/utils/column-major.js","webpack://DataModel/./src/utils/extend2.js","webpack://DataModel/./src/utils/helper.js","webpack://DataModel/./src/field-store.js","webpack://DataModel/./src/value.js","webpack://DataModel/./src/operator/row-diffset-iterator.js","webpack://DataModel/./src/invalid-aware-types.js","webpack://DataModel/./src/operator/bucket-creator.js","webpack://DataModel/./src/constants/index.js","webpack://DataModel/./src/operator/compose.js","webpack://DataModel/./src/operator/get-common-schema.js","webpack://DataModel/./src/operator/cross-product.js","webpack://DataModel/./src/operator/merge-sort.js","webpack://DataModel/./src/operator/data-builder.js","webpack://DataModel/./src/operator/difference.js","webpack://DataModel/./src/operator/group-by-function.js","webpack://DataModel/./src/utils/reducer-store.js","webpack://DataModel/./src/operator/group-by.js","webpack://DataModel/./src/operator/natural-join-filter-function.js","webpack://DataModel/./src/operator/union.js","webpack://DataModel/./src/operator/outer-join.js","webpack://DataModel/./src/fields/field/index.js","webpack://DataModel/./src/fields/dimension/index.js","webpack://DataModel/./src/fields/categorical/index.js","webpack://DataModel/./src/fields/temporal/index.js","webpack://DataModel/./src/fields/binned/index.js","webpack://DataModel/./src/fields/measure/index.js","webpack://DataModel/./src/fields/continuous/index.js","webpack://DataModel/./src/fields/parsers/field-parser/index.js","webpack://DataModel/./src/fields/parsers/categorical-parser/index.js","webpack://DataModel/./src/fields/parsers/temporal-parser/index.js","webpack://DataModel/./src/fields/parsers/binned-parser/index.js","webpack://DataModel/./src/fields/parsers/continuous-parser/index.js","webpack://DataModel/./src/fields/partial-field/index.js","webpack://DataModel/./src/field-creator.js","webpack://DataModel/./src/default-config.js","webpack://DataModel/./src/converter/dsv-arr.js","webpack://DataModel/./node_modules/d3-dsv/src/dsv.js","webpack://DataModel/./node_modules/d3-dsv/src/csv.js","webpack://DataModel/./node_modules/d3-dsv/src/tsv.js","webpack://DataModel/./src/converter/dsv-str.js","webpack://DataModel/./src/converter/flat-json.js","webpack://DataModel/./src/converter/auto-resolver.js","webpack://DataModel/./src/helper.js","webpack://DataModel/./src/relation.js","webpack://DataModel/./src/datamodel.js","webpack://DataModel/./src/stats/index.js","webpack://DataModel/./src/export.js","webpack://DataModel/./src/operator/pure-operators.js","webpack://DataModel/./src/operator/natural-join.js"],"names":["root","factory","exports","module","define","amd","window","installedModules","__webpack_require__","moduleId","i","l","modules","call","m","c","d","name","getter","o","Object","defineProperty","enumerable","get","r","Symbol","toStringTag","value","t","mode","__esModule","ns","create","key","bind","n","object","property","prototype","hasOwnProperty","p","s","DataModel","default","data_format","FLAT_JSON","DSV_STR","DSV_ARR","AUTO","dimension_subtype","CATEGORICAL","TEMPORAL","GEO","BINNED","measure_subtype","CONTINUOUS","field_type","MEASURE","DIMENSION","filtering_mode","NORMAL","INVERSE","ALL","GROUP_BY_FUNCTIONS","SUM","AVG","MIN","MAX","FIRST","LAST","COUNT","STD","convertToNativeDate","date","Date","pad","DateTimeFormatter","format","this","dtParams","undefined","nativeDate","RegExp","escape","text","replace","TOKEN_PREFIX","DATETIME_PARAM_SEQUENCE","YEAR","MONTH","DAY","HOUR","MINUTE","SECOND","MILLISECOND","defaultNumberParser","defVal","val","parsedVal","isFinite","parseInt","defaultRangeParser","range","nVal","toLowerCase","length","getTokenDefinitions","daysDef","short","long","monthsDef","H","index","extract","parser","formatter","getHours","toString","hours","P","M","getMinutes","S","getSeconds","K","getMilliseconds","a","join","day","getDay","A","e","getDate","b","month","getMonth","B","y","result","substring","presentDate","presentYear","Math","trunc","getFullYear","year","Y","getTokenFormalNames","definitions","HOUR_12","AMPM_UPPERCASE","AMPM_LOWERCASE","SHORT_DAY","LONG_DAY","DAY_OF_MONTH","DAY_OF_MONTH_CONSTANT_WIDTH","SHORT_MONTH","LONG_MONTH","MONTH_OF_YEAR","SHORT_YEAR","LONG_YEAR","tokenResolver","defaultResolver","arg","targetParam","arguments","hourFormat24","hourFormat12","ampmLower","ampmUpper","amOrpm","isPM","findTokens","tokenPrefix","tokenLiterals","keys","occurrence","forwardChar","indexOf","push","token","formatAs","nDate","formattedStr","String","formattedVal","parse","dateTimeStamp","options","extractTokenValue","dtParamSeq","noBreak","dtParamArr","args","resolverKey","resolverParams","resolverFn","param","resolvedVal","splice","apply","checkIfOnlyYear","unshift","tokenObj","lastOccurrenceIndex","occObj","occIndex","targetText","regexFormat","tokenArr","map","obj","occurrenceLength","extractValues","match","shift","getNativeDate","Number","Function","concat","_toConsumableArray","len","column_major","store","_len","fields","Array","_key","forEach","fieldIndex","from","OBJECTSTRING","objectToStrFn","objectToStr","arrayToStr","checkCyclicRef","parentArr","bIndex","extend2","obj1","obj2","skipUndef","_typeof","merge","tgtArr","srcArr","item","srcVal","tgtVal","str","cRef","isArray","isCallable","getUniqueId","getTime","round","random","isArrEqual","arr1","arr2","formatNumber","helper_detectDataFormat","data","isString","isObject","field_store","createNamespace","fieldArr","dataId","fieldsObj","_cachedFieldsObj","field","getMeasure","measureFields","_cachedMeasure","schema","type","getDimension","dimensionFields","_cachedDimension","src_value","Value","_classCallCheck","configurable","writable","_value","rowDiffsetIterator","rowDiffset","callback","split","diffStr","diffStsArr","start","end","InvalidAwareTypes","invalid_aware_types_classCallCheck","config","assign","_invalidAwareValsMap","invalidAwareVals","NULL","NA","NIL","invalid","nil","null","invalid_aware_types","generateBuckets","binSize","buckets","next","findBucketRange","bucketRanges","leftIdx","rightIdx","midIdx","floor","DM_DERIVATIVES","SELECT","PROJECT","GROUPBY","COMPOSE","CAL_VAR","BIN","JOINS","CROSS","LEFTOUTER","RIGHTOUTER","NATURAL","FULLOUTER","LOGICAL_OPERATORS","getCommonSchema","fs1","fs2","retArr","fs1Arr","defaultFilterFn","crossProduct","dm1","dm2","filterFn","replaceCommonSchema","jointype","applicableFilterFn","dm1FieldStore","getFieldspace","dm2FieldStore","dm1FieldStoreName","dm2FieldStoreName","commonSchemaList","Error","tmpSchema","_rowDiffset","rowAdded","rowPosition","ii","tuple","userArg","partialField","dm1Fields","prepareJoinData","dm2Fields","detachedRoot","tupleObj","cellVal","iii","datamodel","defSortFn","a1","b1","mergeSort","arr","sortFn","merge_sort_sort","lo","hi","mid","mainArr","auxArr","merge_sort_merge","getSortFn","dataType","sortType","retFunc","groupData","hashMap","Map","groupedData","datum","fieldVal","has","set","createSortingFnArg","groupedDatum","targetFields","targetFieldDetails","label","reduce","acc","idx","dataBuilder","fieldStore","colIdentifier","sortingDetails","retObj","uids","addUid","columnWise","reqSorting","tmpDataArr","colName","insertInd","dataObj","fieldName","sortMeta","fDetails","fieldInSchema","sortingFn","slice","f","data_builder_toConsumableArray","pop","sortData","tmpData","difference_difference","hashTable","schemaNameArr","dm1FieldStoreFieldObj","dm2FieldStoreFieldObj","_colIdentifier","sort","prepareDataHelper","dm","addData","hashData","schemaName","getFilteredValues","filter","sum","filteredNumber","curr","avg","totalSum","isNaN","fnList","_defineProperty","_fnList","filteredValues","min","group_by_function_toConsumableArray","max","sqrt","mean","num","pow","variance","defaultReducerName","reducer_store_ReducerStore","ReducerStore","_this","reducer_store_classCallCheck","entries","reducer","_this2","__unregister","delete","reducer_store","group_by_groupBy","dataModel","reducers","existingDataModel","sFieldArr","dimensions","_ref","group_by_slicedToArray","getFieldArr","reducerObj","measures","defReducer","defaultReducer","measureName","defAggFn","reducerFn","resolve","getReducerObj","fieldStoreObj","dbName","dimensionArr","measureArr","newDataModel","_ref3","_ref4","rowCount","hash","_","cachedStore","cloneProvider","row","__calculateFieldspace","src_export","naturalJoinFilter","commonSchemaArr","retainTuple","union_union","leftOuterJoin","dataModel1","dataModel2","rightOuterJoin","fields_field","Field","field_classCallCheck","subtype","description","displayName","dimension","_cachedDomain","calculateDataDomain","categorical","Set","domain","add","temporal","Temporal","temporal_classCallCheck","temporal_possibleConstructorReturn","__proto__","getPrototypeOf","_cachedMinDiff","sortedData","arrLn","minDiff","POSITIVE_INFINITY","prevDatum","nextDatum","processedCount","_this3","binned","binsArr","bins","measure","unit","numberFormat","continuous","NEGATIVE_INFINITY","field_parser","categorical_parser","isInvalid","getInvalidType","trim","temporal_parser","TemporalParser","temporal_parser_classCallCheck","temporal_parser_possibleConstructorReturn","_dtf","binned_parser","matched","parseFloat","continuous_parser","partial_field","PartialField","partial_field_classCallCheck","_sanitize","createFields","dataColumn","headers","headersObj","header","createUnitField","default_config","dataFormat","dsv_arr","firstRowHeader","columns","dsv_arr_toConsumableArray","EOL","EOF","QUOTE","NEWLINE","RETURN","objectConverter","JSON","stringify","src_dsv","delimiter","reFormat","DELIMITER","charCodeAt","parseRows","rows","N","I","eof","eol","j","formatRow","formatValue","test","convert","customConverter","columnSet","column","inferColumns","formatRows","csv","tsv","dsv_str","fieldSeparator","dsv","flat_json","insertionIndex","auto_resolver","converters","FlatJSON","DSVStr","DSVArr","resp","helper_updateFields","partialFieldspace","fieldStoreName","_ref2","helper_slicedToArray","collID","partialFieldMap","newFields","coll","createUnitFieldFromPartial","helper_persistDerivation","model","operation","_model$_derivation","criteriaFn","_derivation","src_helper_toConsumableArray","op","meta","criteria","persistAncestorDerivation","sourceDm","newDm","_newDm$_ancestorDeriv","_ancestorDerivation","helper_selectHelper","selectFn","newRowDiffSet","lastInsertedValue","li","selectorHelperFn","_iteratorNormalCompletion","_didIteratorError","_iteratorError","_step","_iterator","iterator","done","err","return","prepareSelectionData","checker","cloneWithAllFields","clonedDm","clone","getPartialFieldspace","calculateFieldsConfig","helper_filterPropagationModel","propModels","filterByMeasure","fns","propModel","getData","fieldsConfig","getFieldsConfig","fieldsSpace","values","v","def","some","every","propField","valueOf","select","fn","saveChild","helper_cloneWithSelect","selectConfig","cloneConfig","cloned","helper_cloneWithProject","projField","allFields","projectionSet","actualProjField","helper_sanitizeUnitSchema","unitSchema","helper_updateData","relation","sanitizeSchema","converterFn","converter_namespaceObject","_converterFn","_converterFn2","formattedData","nameSpace","_partialFieldspace","_dataFormat","applyExistingOperationOnModel","derivations","getDerivations","selectionModel","rejectionModel","derivation","_selectionModel","_rejectionModel","_getDerivationArgumen","params","groupByString","helper_getDerivationArguments","propagateIdentifiers","propModelInf","nonTraversingModel","excludeModels","handlePropagation","_children","child","_applyExistingOperati","_applyExistingOperati2","propagateToAllDataModels","identifiers","rootModels","propagationInf","propagationNameSpace","propagateToSource","propagationSourceId","sourceId","propagateInterpolatedValues","criterias","persistent","actionCriterias","mutableActions","filteredCriteria","entry","action","sourceActionCriterias","actionInf","actionConf","applyOnSource","models","path","_parent","getPathToRootModel","rootModel","propConfig","sourceIdentifiers","rootGroupByModel","groupByModel","inf","propagationModel","filteredModel","getFilteredModel","reverse","src_relation","Relation","relation_classCallCheck","source","_fieldStoreName","updateData","_propagationNameSpace","immutableActions","_fieldspace","joinWith","unionWith","differenceWith","defConfig","oDm","constructor","setParent","fieldConfig","normalizedProjField","relation_toConsumableArray","search","_fieldConfig","fieldDef","removeChild","findIndex","sibling","parent","datamodel_classCallCheck","datamodel_possibleConstructorReturn","_onPropagation","_sortingDetails","order","withUid","getAllFields","dataGenerated","fieldNames","fmtFieldIdx","elem","fIdx","fmtFn","datumIdx","fieldsArr","groupBy","rawData","dataInCSVArr","sortedDm","colData","rowsCount","serializedData","rowIdx","colIdx","fieldinst","dependency","replaceVar","depVars","retrieveFn","depFieldIndices","fieldSpec","fs","suppliedFields","computedValues","fieldsData","datamodel_toConsumableArray","_createFields","datamodel_slicedToArray","addField","addToNameSpace","isMutableAction","payload","getRootDataModel","find","helper_getRootGroupByModel","sourceNamespace","addToPropNamespace","filterImmutableAction","criteriaModel","propagateImmutableActions","eventName","measureFieldName","binFieldName","_createBinnedFieldDat","measureField","binsCount","_measureField$domain","_measureField$domain2","_slicedToArray","dMin","dMax","ceil","abs","binnedData","createBinnedFieldData","binField","serialize","getSchema","stats_sum","stats_avg","stats_min","stats_max","stats_first","first","stats_last","last","stats_count","count","sd","std","Operators","compose","_len5","operations","_key5","currentDM","firstChild","compose_toConsumableArray","dispose","bin","_len3","_key3","project","_len2","_key2","_len4","_key4","calculateVariable","difference","naturalJoin","fullOuterJoin","union","Stats","stats_namespaceObject","enums_namespaceObject","DataFormat","FilteringMode","version","package_0","__webpack_exports__"],"mappings":"CAAA,SAAAA,EAAAC,GACA,iBAAAC,SAAA,iBAAAC,OACAA,OAAAD,QAAAD,IACA,mBAAAG,eAAAC,IACAD,OAAA,eAAAH,GACA,iBAAAC,QACAA,QAAA,UAAAD,IAEAD,EAAA,UAAAC,IARA,CASCK,OAAA,WACD,mBCTA,IAAAC,KAGA,SAAAC,EAAAC,GAGA,GAAAF,EAAAE,GACA,OAAAF,EAAAE,GAAAP,QAGA,IAAAC,EAAAI,EAAAE,IACAC,EAAAD,EACAE,GAAA,EACAT,YAUA,OANAU,EAAAH,GAAAI,KAAAV,EAAAD,QAAAC,IAAAD,QAAAM,GAGAL,EAAAQ,GAAA,EAGAR,EAAAD,QA0DA,OArDAM,EAAAM,EAAAF,EAGAJ,EAAAO,EAAAR,EAGAC,EAAAQ,EAAA,SAAAd,EAAAe,EAAAC,GACAV,EAAAW,EAAAjB,EAAAe,IACAG,OAAAC,eAAAnB,EAAAe,GAA0CK,YAAA,EAAAC,IAAAL,KAK1CV,EAAAgB,EAAA,SAAAtB,GACA,oBAAAuB,eAAAC,aACAN,OAAAC,eAAAnB,EAAAuB,OAAAC,aAAwDC,MAAA,WAExDP,OAAAC,eAAAnB,EAAA,cAAiDyB,OAAA,KAQjDnB,EAAAoB,EAAA,SAAAD,EAAAE,GAEA,GADA,EAAAA,IAAAF,EAAAnB,EAAAmB,IACA,EAAAE,EAAA,OAAAF,EACA,KAAAE,GAAA,iBAAAF,QAAAG,WAAA,OAAAH,EACA,IAAAI,EAAAX,OAAAY,OAAA,MAGA,GAFAxB,EAAAgB,EAAAO,GACAX,OAAAC,eAAAU,EAAA,WAAyCT,YAAA,EAAAK,UACzC,EAAAE,GAAA,iBAAAF,EAAA,QAAAM,KAAAN,EAAAnB,EAAAQ,EAAAe,EAAAE,EAAA,SAAAA,GAAgH,OAAAN,EAAAM,IAAqBC,KAAA,KAAAD,IACrI,OAAAF,GAIAvB,EAAA2B,EAAA,SAAAhC,GACA,IAAAe,EAAAf,KAAA2B,WACA,WAA2B,OAAA3B,EAAA,SAC3B,WAAiC,OAAAA,GAEjC,OADAK,EAAAQ,EAAAE,EAAA,IAAAA,GACAA,GAIAV,EAAAW,EAAA,SAAAiB,EAAAC,GAAsD,OAAAjB,OAAAkB,UAAAC,eAAA1B,KAAAuB,EAAAC,IAGtD7B,EAAAgC,EAAA,GAIAhC,IAAAiC,EAAA,25DClFA,IAAMC,EAAYlC,EAAQ,GAE1BL,EAAOD,QAAUwC,EAAUC,QAAUD,EAAUC,QAAUD,qxBCKzD,IAOeE,GANXC,UAAW,WACXC,QAAS,SACTC,QAAS,SACTC,KAAM,QCEKC,GANXC,YAAa,cACbC,SAAU,WACVC,IAAK,MACLC,OAAQ,UCAGC,GAHXC,WAAY,cCKDC,GAJXC,QAAS,UACTC,UAAW,aCGAC,GALXC,OAAQ,SACRC,QAAS,UACTC,IAAK,OCTIC,GACTC,IAAK,MACLC,IAAK,MACLC,IAAK,MACLC,IAAK,MACLC,MAAO,QACPC,KAAM,OACNC,MAAO,QACPC,IAAK,OCFT,SAASC,EAAqBC,GAC1B,OAAIA,aAAgBC,KACTD,EAGJ,IAAIC,KAAKD,GASpB,SAASE,EAAKxC,GACV,OAAQA,EAAI,GAAL,IAAgBA,EAAOA,EA8BP,SAASyC,EAAmBC,GACnDC,KAAKD,OAASA,EACdC,KAAKC,cAAWC,EAChBF,KAAKG,gBAAaD,EAftBE,OAAOC,OAAS,SAAUC,GACtB,OAAOA,EAAKC,QAAQ,2BAA4B,SAkBpDT,EAAkBU,aAAe,IAIjCV,EAAkBW,yBACdC,KAAM,EACNC,MAAO,EACPC,IAAK,EACLC,KAAM,EACNC,OAAQ,EACRC,OAAQ,EACRC,YAAa,GAUjBlB,EAAkBmB,oBAAsB,SAAUC,GAC9C,OAAO,SAAUC,GACb,IAAIC,EACJ,OAAIC,SAASD,EAAYE,SAASH,EAAK,KAC5BC,EAGJF,IAYfpB,EAAkByB,mBAAqB,SAAUC,EAAON,GACpD,OAAO,SAACC,GACJ,IACItF,EADAD,SAGJ,IAAKuF,EAAO,OAAOD,EAEnB,IAAMO,EAAON,EAAIO,cAEjB,IAAK9F,EAAI,EAAGC,EAAI2F,EAAMG,OAAQ/F,EAAIC,EAAGD,IACjC,GAAI4F,EAAM5F,GAAG8F,gBAAkBD,EAC3B,OAAO7F,EAIf,YAAUsE,IAANtE,EACOsF,EAEJ,OAqBfpB,EAAkB8B,oBAAsB,WACpC,IAAMC,GACFC,OACI,MACA,MACA,MACA,MACA,MACA,MACA,OAEJC,MACI,SACA,SACA,UACA,YACA,WACA,SACA,aAGFC,GACFF,OACI,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,OAEJC,MACI,UACA,WACA,QACA,QACA,MACA,OACA,OACA,SACA,YACA,UACA,WACA,aAsPR,OAjPIE,GAEI9F,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAGP,OAFUzB,EAAoByB,GAErBmB,WAAWC,aAG5B1G,GAEIM,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GACP,IACMqB,EADI9C,EAAoByB,GACdmB,WAAa,GAE7B,OAAkB,IAAVE,EAAc,GAAKA,GAAOD,aAG1C7E,GAEIvB,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,WACpBC,OAAQ,SAACjB,GACL,OAAIA,EACOA,EAAIO,cAER,MAEXW,UAAW,SAAClB,GAIR,OAHUzB,EAAoByB,GACdmB,WAEA,GAAK,KAAO,OAGpCG,GAEItG,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,WACpBC,OAAQ,SAACjB,GACL,OAAIA,EACOA,EAAIO,cAER,MAEXW,UAAW,SAAClB,GAIR,OAHUzB,EAAoByB,GACdmB,WAEA,GAAK,KAAO,OAGpCI,GAEIvG,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GACfwB,gBAKvBC,GAEIzG,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GACZ0B,gBAK1BC,GAEI3G,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAHUzB,EAAoByB,GACjB4B,kBAEHR,aAGlBS,GAEI7G,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,UAAWN,EAAQC,MAAMmB,KAAK,KAA9B,KACbb,OAAQtC,EAAkByB,mBAAmBM,EAAQC,OACrDO,UAND,SAMYlB,GACP,IACM+B,EADIxD,EAAoByB,GAChBgC,SAEd,OAAQtB,EAAQC,MAAMoB,GAAMX,aAGpCa,GAEIjH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,UAAWN,EAAQE,KAAKkB,KAAK,KAA7B,KACbb,OAAQtC,EAAkByB,mBAAmBM,EAAQE,MACrDM,UAND,SAMYlB,GACP,IACM+B,EADIxD,EAAoByB,GAChBgC,SAEd,OAAQtB,EAAQE,KAAKmB,GAAMX,aAGnCc,GAEIlH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAHUzB,EAAoByB,GAChBmC,UAEHf,aAGnBrG,GAEIC,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GAChBmC,aAKtBC,GAEIpH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,UAAWH,EAAUF,MAAMmB,KAAK,KAAhC,KACbb,OAAQtC,EAAkByB,mBAAmBS,EAAUF,OACvDO,UAND,SAMYlB,GACP,IACMqC,EADI9D,EAAoByB,GACdsC,WAEhB,OAAQzB,EAAUF,MAAM0B,GAAQjB,aAGxCmB,GAEIvH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,UAAWH,EAAUD,KAAKkB,KAAK,KAA/B,KACbb,OAAQtC,EAAkByB,mBAAmBS,EAAUD,MACvDM,UAND,SAMYlB,GACP,IACMqC,EADI9D,EAAoByB,GACdsC,WAEhB,OAAQzB,EAAUD,KAAKyB,GAAQjB,aAGvCvG,GAEIG,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OALD,SAKSjB,GAAO,OAAOrB,EAAkBmB,qBAAlBnB,CAAwCqB,GAAO,GACrEkB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GACdsC,WAEG,KAG3BE,GAEIxH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,YACpBC,OALD,SAKSjB,GACJ,IAAIyC,SACJ,GAAIzC,EAAK,CACL,IAAMtF,EAAIsF,EAAIQ,OACdR,EAAMA,EAAI0C,UAAUhI,EAAI,EAAGA,GAE/B,IAAIuF,EAAYtB,EAAkBmB,qBAAlBnB,CAAwCqB,GACpD2C,EAAc,IAAIlE,KAClBmE,EAAcC,KAAKC,MAAOH,EAAYI,cAAiB,KAO3D,OAHIxE,EAFJkE,KAAYG,EAAc3C,GAEM8C,cAAgBJ,EAAYI,gBACxDN,MAAYG,EAAc,GAAI3C,GAE3B1B,EAAoBkE,GAAQM,eAEvC7B,UAtBD,SAsBYlB,GACP,IACIgD,EADMzE,EAAoByB,GACjB+C,cAAc3B,WACvB1G,SAOJ,OALIsI,IACAtI,EAAIsI,EAAKxC,OACTwC,EAAOA,EAAKN,UAAUhI,EAAI,EAAGA,IAG1BsI,IAGfC,GAEIjI,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,YACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAHUzB,EAAoByB,GACf+C,cAAc3B,eAgB7CzC,EAAkBuE,oBAAsB,WACpC,IAAMC,EAAcxE,EAAkB8B,sBAEtC,OACIf,KAAMyD,EAAYrC,EAClBsC,QAASD,EAAYzI,EACrB2I,eAAgBF,EAAY5G,EAC5B+G,eAAgBH,EAAY7B,EAC5B3B,OAAQwD,EAAY5B,EACpB3B,OAAQuD,EAAY1B,EACpB8B,UAAWJ,EAAYtB,EACvB2B,SAAUL,EAAYlB,EACtBwB,aAAcN,EAAYjB,EAC1BwB,4BAA6BP,EAAYpI,EACzC4I,YAAaR,EAAYf,EACzBwB,WAAYT,EAAYZ,EACxBsB,cAAeV,EAAYtI,EAC3BiJ,WAAYX,EAAYX,EACxBuB,UAAWZ,EAAYF,IAW/BtE,EAAkBqF,cAAgB,WAC9B,IAAMb,EAAcxE,EAAkB8B,sBAChCwD,EAAkB,WAMpB,IALA,IAAIxJ,EAAI,EACJyJ,SACAC,SACEzJ,EAAI0J,UAAK5D,OAER/F,EAAIC,EAAGD,IACVyJ,oBAAWzJ,OAAXsE,EAAAqF,UAAW3J,IACX2J,UAAA5D,QAAS/F,OAATsE,EAAAqF,UAAS3J,MACL0J,EAAcD,GAItB,OAAKC,EAEEA,EAAY,GAAGlD,OAAOkD,EAAY,IAFd,MAK/B,OACI5E,MAAO4D,EAAYX,EAAGW,EAAYF,EAC9BgB,GAEJzE,OAAQ2D,EAAYf,EAAGe,EAAYZ,EAAGY,EAAYtI,EAC9CoJ,GAEJxE,KAAM0D,EAAYtB,EAAGsB,EAAYlB,EAAGkB,EAAYjB,EAAGiB,EAAYpI,EAC3DkJ,GAEJvE,MAAOyD,EAAYrC,EAAGqC,EAAYzI,EAAGyI,EAAY5G,EAAG4G,EAAY7B,EAC5D,SAAU+C,EAAcC,EAAcC,EAAWC,GAC7C,IAAIL,SACAM,SACAC,SACA1E,SAcJ,OAZIsE,IAAiBG,EAAUF,GAAaC,IACJ,OAAhCC,EAAO,GAAGxD,OAAOwD,EAAO,MACxBC,GAAO,GAGXP,EAAcG,GAEdH,EADOG,GAGOD,EAGbF,GAELnE,EAAMmE,EAAY,GAAGlD,OAAOkD,EAAY,IACpCO,IACA1E,GAAO,IAEJA,GANoB,OASnCL,QAASwD,EAAY5B,EACjB0C,GAEJrE,QAASuD,EAAY1B,EACjBwC,KAUZtF,EAAkBgG,WAAa,SAAU/F,GAQrC,IAPA,IAAMgG,EAAcjG,EAAkBU,aAChC8D,EAAcxE,EAAkB8B,sBAChCoE,EAAgB1J,OAAO2J,KAAK3B,GAC5B4B,KACFtK,SACAuK,UAEIvK,EAAImE,EAAOqG,QAAQL,EAAanK,EAAI,KAAO,GAC/CuK,EAAcpG,EAAOnE,EAAI,IACmB,IAAxCoK,EAAcI,QAAQD,IAE1BD,EAAWG,MACPnE,MAAOtG,EACP0K,MAAOH,IAIf,OAAOD,GASXpG,EAAkByG,SAAW,SAAU5G,EAAMI,GACzC,IAQIlE,EARE2K,EAAQ9G,EAAoBC,GAC5BuG,EAAapG,EAAkBgG,WAAW/F,GAC1CuE,EAAcxE,EAAkB8B,sBAClC6E,EAAeC,OAAO3G,GACpBgG,EAAcjG,EAAkBU,aAClC8F,SACAK,SACA/K,SAGJ,IAAKA,EAAI,EAAGC,EAAIqK,EAAWvE,OAAQ/F,EAAIC,EAAGD,IAEtC+K,EAAerC,EADfgC,EAAQJ,EAAWtK,GAAG0K,OACYjE,UAAUmE,GAC5CC,EAAeA,EAAalG,QAAQ,IAAIH,OAAO2F,EAAcO,EAAO,KAAMK,GAG9E,OAAOF,GAQX3G,EAAkBtC,UAAUoJ,MAAQ,SAAUC,EAAeC,GACzD,IAAM3B,EAAgBrF,EAAkBqF,gBAClClF,EAAWD,KAAK+G,kBAAkBF,GAClCG,EAAalH,EAAkBW,wBAC/BwG,EAAUH,GAAWA,EAAQG,QAC7BC,KACAC,KACFC,SACAC,SACAC,SACAnG,SACAvF,SACA2L,SACAC,SACA3L,SACA+H,KAEJ,IAAKwD,KAAejC,EAChB,MAAQ1H,eAAe1B,KAAKoJ,EAAeiC,GAA3C,CAMA,IAJAD,EAAKxF,OAAS,EAEd2F,GADAD,EAAiBlC,EAAciC,IACHK,OAAOJ,EAAe1F,OAAS,EAAG,GAAG,GAE5D/F,EAAI,EAAGC,EAAIwL,EAAe1F,OAAQ/F,EAAIC,EAAGD,SAI9BsE,KAFZiB,EAAMlB,GADNsH,EAAQF,EAAezL,IACFO,OAGjBgL,EAAKd,KAAK,MAEVc,EAAKd,MAAMkB,EAAOpG,IAM1B,SAAqBjB,KAFrBsH,EAAcF,EAAWI,MAAM1H,KAAMmH,KAEa,OAAhBK,KAA0BP,EACxD,MAGJC,EAAWF,EAAWI,IAAgBI,EAU1C,OAPIN,EAAWvF,QAAU3B,KAAK2H,gBAAgBT,EAAWvF,QAErDiC,EAAOgE,QAAQV,EAAW,GAAI,EAAG,GAEjCtD,EAAOgE,QAAPF,MAAA9D,EAAkBsD,GAGftD,GAQX9D,EAAkBtC,UAAUuJ,kBAAoB,SAAUF,GACtD,IAYIhL,EAZEkE,EAASC,KAAKD,OACduE,EAAcxE,EAAkB8B,sBAChCmE,EAAcjG,EAAkBU,aAChC0F,EAAapG,EAAkBgG,WAAW/F,GAC1C8H,KAEFC,SACAC,SACAC,SACAC,SACAC,SAGAtM,SAEJsM,EAAcxB,OAAO3G,GAErB,IAAMoI,EAAWjC,EAAWkC,IAAI,SAAAC,GAAA,OAAOA,EAAI/B,QACrCgC,EAAmBpC,EAAWvE,OACpC,IAAK/F,EAAI0M,EAAmB,EAAG1M,GAAK,EAAGA,KACnCoM,EAAW9B,EAAWtK,GAAGsG,OAEV,IAAMgG,EAAYvG,OAAS,QAKdzB,IAAxB4H,IACAA,EAAsBI,EAAYvG,QAGtCsG,EAAaC,EAAYrE,UAAUmE,EAAW,EAAGF,GACjDI,EAAcA,EAAYrE,UAAU,EAAGmE,EAAW,GAC9C5H,OAAOC,OAAO4H,GACdC,EAAYrE,UAAUiE,EAAqBI,EAAYvG,QAE3DmG,EAAsBE,GAblBF,EAAsBE,EAgB9B,IAAKpM,EAAI,EAAGA,EAAI0M,EAAkB1M,IAC9BmM,EAAS7B,EAAWtK,GACpBsM,EAAcA,EAAY3H,QAAQwF,EAAcgC,EAAOzB,MAAOhC,EAAYyD,EAAOzB,OAAOnE,WAG5F,IAAMoG,EAAgB1B,EAAc2B,MAAM,IAAIpI,OAAO8H,QAGrD,IAFAK,EAAcE,QAET7M,EAAI,EAAGC,EAAIsM,EAASxG,OAAQ/F,EAAIC,EAAGD,IACpCiM,EAASM,EAASvM,IAAM2M,EAAc3M,GAE1C,OAAOiM,GAQX/H,EAAkBtC,UAAUkL,cAAgB,SAAU7B,GAClD,IAAIlH,EAAO,KACX,GAAIgJ,OAAOtH,SAASwF,GAChBlH,EAAO,IAAIC,KAAKiH,QACb,IAAK7G,KAAKD,QAAUH,KAAKgH,MAAMC,GAClClH,EAAO,IAAIC,KAAKiH,OAEf,CACD,IAAM5G,EAAWD,KAAKC,SAAWD,KAAK4G,MAAMC,GACxC5G,EAAS0B,SACT3B,KAAKG,WAAL,IAAAyI,SAAApL,UAAAJ,KAAAsK,MAAsB9H,MAAtB,MAAAiJ,6HAAAC,CAA8B7I,MAC9BN,EAAOK,KAAKG,YAGpB,OAAOR,GAGXG,EAAkBtC,UAAUmK,gBAAkB,SAASoB,GACnD,OAAe,IAARA,GAAa/I,KAAKD,OAAOyI,MAAM,QAAQ7G,QASlD7B,EAAkBtC,UAAU+I,SAAW,SAAUxG,EAAQ8G,GACrD,IAAI1G,SAQJ,OANI0G,EACA1G,EAAaH,KAAKG,WAAaH,KAAK0I,cAAc7B,IACzC1G,EAAaH,KAAKG,cAC3BA,EAAaH,KAAK0I,cAAc7B,IAG7B/G,EAAkByG,SAASpG,EAAYJ,ICruBnC,IAAAiJ,EAAA,SAACC,GACZ,IAAIrN,EAAI,EACR,OAAO,WAAe,QAAAsN,EAAA3D,UAAA5D,OAAXwH,EAAWC,MAAAF,GAAAG,EAAA,EAAAA,EAAAH,EAAAG,IAAXF,EAAWE,GAAA9D,UAAA8D,GAClBF,EAAOG,QAAQ,SAACnI,EAAKoI,GACXN,EAAMM,aAAuBH,QAC/BH,EAAMM,GAAcH,MAAMI,MAAO7H,OAAQ/F,KAE7CqN,EAAMM,GAAYlD,KAAKlF,KAE3BvF,kNCdF6N,EAAe,SACfC,EAAgBpN,OAAOkB,UAAU+E,SACjCoH,EAAc,kBACdC,EAAa,iBAEnB,SAASC,EAAexB,EAAKyB,GAIzB,IAHA,IAAIlO,EAAIkO,EAAUnI,OACdoI,GAAU,EAEPnO,GAAG,CACN,GAAIyM,IAAQyB,EAAUlO,GAElB,OADAmO,EAASnO,EAGbA,GAAK,EAGT,OAAOmO,EA2GX,SAASC,EAASC,EAAMC,EAAMC,GAE1B,YAAI,IAAOF,EAAP,YAAAG,EAAOH,MAASR,SAAgB,IAAOS,EAAP,YAAAE,EAAOF,MAAST,EACzC,WAGP,IAAOS,EAAP,YAAAE,EAAOF,MAAST,GAAyB,OAATS,EACzBD,SAGP,IAAOA,EAAP,YAAAG,EAAOH,MAASR,IAChBQ,EAAOC,aAAgBd,aAnH/B,SAASiB,EAAMJ,EAAMC,EAAMC,EAAWG,EAAQC,GAC1C,IAAIC,EACAC,EACAC,EACAC,EACAC,EAcJ,GATKL,GAKDD,EAAOjE,KAAK4D,GACZM,EAAOlE,KAAK6D,KALZI,GAAUL,GACVM,GAAUL,IAOVA,aAAgBd,MAChB,IAAKoB,EAAO,EAAGA,EAAON,EAAKvI,OAAQ6I,GAAQ,EAAG,CAC1C,IACIC,EAASR,EAAKO,GACdE,EAASR,EAAKM,GAElB,MAAOnH,GACH,eAGA,IAAOqH,EAAP,YAAAN,EAAOM,MAAWjB,EACZU,QAAwBjK,IAAXwK,IACfT,EAAKO,GAAQE,IAIF,OAAXD,SAAmB,IAAOA,EAAP,YAAAL,EAAOK,MAAWhB,IACrCgB,EAASR,EAAKO,GAAQE,aAAkBtB,cAG9B,KADdwB,EAAOf,EAAea,EAAQH,IAE1BE,EAASR,EAAKO,GAAQF,EAAOM,GAG7BP,EAAMI,EAAQC,EAAQP,EAAWG,EAAQC,SAMrD,IAAKC,KAAQN,EAAM,CACf,IACIO,EAASR,EAAKO,GACdE,EAASR,EAAKM,GAElB,MAAOnH,GACH,SAGJ,GAAe,OAAXqH,SAAmB,IAAOA,EAAP,YAAAN,EAAOM,MAAWjB,GAKrCkB,EAAMjB,EAAc3N,KAAK2O,MACbf,GACO,OAAXc,SAAmB,IAAOA,EAAP,YAAAL,EAAOK,MAAWhB,IACrCgB,EAASR,EAAKO,QAGJ,KADdI,EAAOf,EAAea,EAAQH,IAE1BE,EAASR,EAAKO,GAAQF,EAAOM,GAG7BP,EAAMI,EAAQC,EAAQP,EAAWG,EAAQC,IAGxCI,IAAQf,GACE,OAAXa,GAAqBA,aAAkBrB,QACvCqB,EAASR,EAAKO,QAGJ,KADdI,EAAOf,EAAea,EAAQH,IAE1BE,EAASR,EAAKO,GAAQF,EAAOM,GAG7BP,EAAMI,EAAQC,EAAQP,EAAWG,EAAQC,IAI7CN,EAAKO,GAAQE,MAGhB,CACD,GAAIP,QAAwBjK,IAAXwK,EACb,SAEJT,EAAKO,GAAQE,GAIzB,OAAOT,EAiBPI,CAAMJ,EAAMC,EAAMC,GACXF,GCnIJ,SAASY,EAAS1J,GACrB,OAAOiI,MAAMyB,QAAQ1J,GA6BlB,SAAS2J,EAAY3J,GACxB,MAAsB,mBAARA,EAaX,IAAM4J,EAAc,wBAAY,IAAInL,MAAOoL,UAAYhH,KAAKiH,MAAsB,IAAhBjH,KAAKkH,WASvE,SAASC,EAAWC,EAAMC,GAC7B,IAAKR,EAAQO,KAAUP,EAAQQ,GAC3B,OAAOD,IAASC,EAGpB,GAAID,EAAKzJ,SAAW0J,EAAK1J,OACrB,OAAO,EAGX,IAAK,IAAI/F,EAAI,EAAGA,EAAIwP,EAAKzJ,OAAQ/F,IAC7B,GAAIwP,EAAKxP,KAAOyP,EAAKzP,GACjB,OAAO,EAIf,OAAO,EASJ,SAAS0P,EAAanK,GACzB,OAAOA,EASJ,IAAMoK,EAAmB,SAACC,GAC7B,OApEG,SAAmBrK,GACtB,MAAsB,iBAARA,EAmEVsK,CAASD,GACF1N,EAAWE,QACX6M,EAAQW,IAASX,EAAQW,EAAK,IAC9B1N,EAAWG,QACX4M,EAAQW,KAA0B,IAAhBA,EAAK7J,QAlF/B,SAAmBR,GACtB,OAAOA,IAAQ7E,OAAO6E,GAiF4BuK,CAASF,EAAK,KACrD1N,EAAWC,UAEf,MChDI4N,GAnDXH,QAEAI,gBAHe,SAGEC,EAAU1P,GACvB,IAAM2P,EAAS3P,GAAQ4O,IA4CvB,OA1CA/K,KAAKwL,KAAKM,IACN3P,KAAM2P,EACN3C,OAAQ0C,EAERE,UAJgB,WAKZ,IAAIA,EAAY/L,KAAKgM,iBAQrB,OANKD,IACDA,EAAY/L,KAAKgM,oBACjBhM,KAAKmJ,OAAOG,QAAQ,SAAC2C,GACjBF,EAAUE,EAAM9P,QAAU8P,KAG3BF,GAEXG,WAfgB,WAgBZ,IAAIC,EAAgBnM,KAAKoM,eAUzB,OARKD,IACDA,EAAgBnM,KAAKoM,kBACrBpM,KAAKmJ,OAAOG,QAAQ,SAAC2C,GACbA,EAAMI,SAASC,OAAS5N,EAAUC,UAClCwN,EAAcF,EAAM9P,QAAU8P,MAInCE,GAEXI,aA5BgB,WA6BZ,IAAIC,EAAkBxM,KAAKyM,iBAU3B,OARKzM,KAAKyM,mBACND,EAAkBxM,KAAKyM,oBACvBzM,KAAKmJ,OAAOG,QAAQ,SAAC2C,GACbA,EAAMI,SAASC,OAAS5N,EAAUE,YAClC4N,EAAgBP,EAAM9P,QAAU8P,MAIrCO,IAGRxM,KAAKwL,KAAKM,8PCKVY,aA1CX,SAAAC,EAAaxL,EAAK8K,gGAAOW,CAAA5M,KAAA2M,GACrBrQ,OAAOC,eAAeyD,KAAM,UACxBxD,YAAY,EACZqQ,cAAc,EACdC,UAAU,EACVjQ,MAAOsE,IAGXnB,KAAKiM,MAAQA,+CAoBb,OAAOvF,OAAO1G,KAAKnD,yCAUnB,OAAOmD,KAAKnD,oCArBZ,OAAOmD,KAAK+M,gBCxBb,SAASC,EAAoBC,EAAYC,GACxCD,EAAWtL,OAAS,GACDsL,EAAWE,MAAM,KACzB7D,QAAQ,SAAC8D,GAChB,IAAMC,EAAaD,EAAQD,MAAM,KAC3BG,GAAUD,EAAW,GACrBE,IAAQF,EAAW,IAAMA,EAAW,IAC1C,GAAIE,GAAOD,EACP,IAAK,IAAI1R,EAAI0R,EAAO1R,GAAK2R,EAAK3R,GAAK,EAC/BsR,EAAStR,kQCVvB4R,aAqBF,SAAAA,EAAa3Q,gGAAO4Q,CAAAzN,KAAAwN,GAChBxN,KAAK+M,OAASlQ,0DAdO6Q,GACrB,OAAKA,EAGEpR,OAAOqR,OAAOH,EAAkBI,qBAAsBF,GAFlDF,EAAkBI,4DAsB7B,OAAO5N,KAAK+M,0CAUZ,OAAOrG,OAAO1G,KAAK+M,4CAGN5L,GACb,OAAQA,aAAeqM,KAAwBA,EAAkBK,mBAAmB1M,0CAGlEA,GAClB,OAAOA,aAAeqM,EAAoBrM,EAAMqM,EAAkBK,mBAAmB1M,YAO7FqM,EAAkBM,KAAO,IAAIN,EAAkB,QAC/CA,EAAkBO,GAAK,IAAIP,EAAkB,MAC7CA,EAAkBQ,IAAM,IAAIR,EAAkB,OAO9CA,EAAkBI,sBACdK,QAAST,EAAkBO,GAC3BG,IAAKV,EAAkBQ,IACvBG,KAAMX,EAAkBM,KACxB5N,UAAWsN,EAAkBO,IAGlB,IAAAK,EAAA,qaC5ETC,EAAkB,SAACC,EAAShB,EAAOC,GAIrC,IAHA,IAAMgB,KACFC,EAAOlB,EAEJkB,EAAOjB,GACVgB,EAAQlI,KAAKmI,GACbA,GAAQF,EAIZ,OAFAC,EAAQlI,KAAKmI,GAEND,GAGLE,EAAkB,SAACC,EAAc7R,GAOnC,IANA,IAAI8R,EAAU,EACVC,EAAWF,EAAa/M,OAAS,EACjCkN,SACArN,SAGGmN,GAAWC,GAAU,CAIxB,GAAI/R,IAFJ2E,EAAQkN,EADRG,EAASF,EAAU3K,KAAK8K,OAAOF,EAAWD,GAAW,KAGlCrB,OAASzQ,EAAQ2E,EAAM+L,IACtC,OAAO/L,EACA3E,GAAS2E,EAAM+L,IACtBoB,EAAUE,EAAS,EACZhS,EAAQ2E,EAAM8L,QACrBsB,EAAWC,EAAS,GAI5B,OAAO,MChCJ,IAUME,GACTC,OAAQ,SACRC,QAAS,UACTC,QAAS,QACTC,QAAS,UACTC,QAAS,qBACTC,IAAK,OAGIC,GACTC,MAAO,QACPC,UAAW,YACXC,WAAY,aACZC,QAAS,UACTC,UAAW,aAGFC,EACJ,MC2BF,MCnDA,SAASC,EAAiBC,EAAKC,GAClC,IAAMC,KACAC,KASN,OARAH,EAAI3G,OAAOG,QAAQ,SAAC2C,GAChBgE,EAAO5J,KAAK4F,EAAMI,SAASlQ,QAE/B4T,EAAI5G,OAAOG,QAAQ,SAAC2C,IAC6B,IAAzCgE,EAAO7J,QAAQ6F,EAAMI,SAASlQ,OAC9B6T,EAAO3J,KAAK4F,EAAMI,SAASlQ,QAG5B6T,ECRX,SAASE,IAAoB,OAAO,EAY7B,SAASC,EAAcC,EAAKC,EAAKC,GAA+D,IAArDC,EAAqDhL,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,IAAAA,UAAA,GAAxBiL,EAAwBjL,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAAb+J,EAAMC,MACtFlD,KACAb,KACAiF,EAAqBH,GAAYJ,EACjCQ,EAAgBN,EAAIO,gBACpBC,EAAgBP,EAAIM,gBACpBE,EAAoBH,EAAcvU,KAClC2U,EAAoBF,EAAczU,KAClCA,EAAUuU,EAAcvU,KAAxB,IAAgCyU,EAAczU,KAC9C4U,EAAmBlB,EAAgBa,EAAeE,GAExD,GAAIC,IAAsBC,EACtB,MAAM,IAAIE,MAAM,8CA+EpB,OA5EAN,EAAcvH,OAAOG,QAAQ,SAAC2C,GAC1B,IAAMgF,EAAYjH,KAAYiC,EAAMI,WACc,IAA9C0E,EAAiB3K,QAAQ6K,EAAU9U,OAAiBoU,IACpDU,EAAU9U,KAAUuU,EAAcvU,KAAlC,IAA0C8U,EAAU9U,MAExDkQ,EAAOhG,KAAK4K,KAEhBL,EAAczH,OAAOG,QAAQ,SAAC2C,GAC1B,IAAMgF,EAAYjH,KAAYiC,EAAMI,WACc,IAA9C0E,EAAiB3K,QAAQ6K,EAAU9U,MAC9BoU,IACDU,EAAU9U,KAAUyU,EAAczU,KAAlC,IAA0C8U,EAAU9U,KACpDkQ,EAAOhG,KAAK4K,IAGhB5E,EAAOhG,KAAK4K,KAKpBjE,EAAmBoD,EAAIc,YAAa,SAACtV,GACjC,IAAIuV,GAAW,EACXC,SACJpE,EAAmBqD,EAAIa,YAAa,SAACG,GACjC,IAAMC,KACAC,KACNA,EAAQV,MACRU,EAAQT,MACRJ,EAAcvH,OAAOG,QAAQ,SAAC2C,GAC1BqF,EAAMjL,KAAK4F,EAAMuF,aAAahG,KAAK5P,IACnC2V,EAAQV,GAAmB5E,EAAM9P,QAAU8P,EAAMuF,aAAahG,KAAK5P,KAEvEgV,EAAczH,OAAOG,QAAQ,SAAC2C,IAC+B,IAAnD8E,EAAiB3K,QAAQ6F,EAAMI,SAASlQ,OAAgBoU,GAC1De,EAAMjL,KAAK4F,EAAMuF,aAAahG,KAAK6F,IAEvCE,EAAQT,GAAmB7E,EAAM9P,QAAU8P,EAAMuF,aAAahG,KAAK6F,KAGvE,IAIMI,EAAYC,GAAgBH,EAAQV,IACpCc,EAAYD,GAAgBH,EAAQT,IAC1C,GAAIL,EAAmBgB,EAAWE,EALb,kBAAMvB,EAAIwB,gBACV,kBAAMvB,EAAIuB,oBAI4D,CACvF,IAAMC,KACNP,EAAMhI,QAAQ,SAACwI,EAASC,GACpBF,EAASxF,EAAO0F,GAAK5V,MAAQ2V,IAE7BX,GAAY7B,EAAMC,QAAUiB,EAC5BhF,EAAK4F,GAAeS,GAGpBrG,EAAKnF,KAAKwL,GACVV,GAAW,EACXC,EAAcxV,QAEf,IAAK4U,IAAalB,EAAME,WAAagB,IAAalB,EAAMG,cAAgB0B,EAAU,CACrF,IAAMU,KACF9I,EAAM2H,EAAcvH,OAAOxH,OAAS,EACxC2P,EAAMhI,QAAQ,SAACwI,EAASC,GAEhBF,EAASxF,EAAO0F,GAAK5V,MADrB4V,GAAOhJ,EACsB+I,EAGA,OAGrCX,GAAW,EACXC,EAAcxV,EACd4P,EAAKnF,KAAKwL,QAKf,IAAIG,GAAUxG,EAAMa,GAAUlQ,SC3GzC,SAAS8V,EAAWjP,EAAGO,GACnB,IAAM2O,KAAQlP,EACRmP,KAAQ5O,EACd,OAAI2O,EAAKC,GACG,EAERD,EAAKC,EACE,EAEJ,EAqEJ,SAASC,EAAWC,GAAyB,IAApBC,EAAoB/M,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAAX0M,EAIrC,OAHII,EAAI1Q,OAAS,GArBrB,SAAS4Q,EAAMF,EAAKG,EAAIC,EAAIH,GACxB,GAAIG,IAAOD,EAAM,OAAOH,EAExB,IAAMK,EAAMF,EAAKxO,KAAK8K,OAAO2D,EAAKD,GAAM,GAKxC,OAJAD,EAAKF,EAAKG,EAAIE,EAAKJ,GACnBC,EAAKF,EAAKK,EAAM,EAAGD,EAAIH,GAzC3B,SAAgBD,EAAKG,EAAIE,EAAKD,EAAIH,GAG9B,IAFA,IAAMK,EAAUN,EACVO,KACGhX,EAAI4W,EAAI5W,GAAK6W,EAAI7W,GAAK,EAC3BgX,EAAOhX,GAAK+W,EAAQ/W,GAKxB,IAHA,IAAIoH,EAAIwP,EACJjP,EAAImP,EAAM,EAEL9W,EAAI4W,EAAI5W,GAAK6W,EAAI7W,GAAK,EACvBoH,EAAI0P,GACJC,EAAQ/W,GAAKgX,EAAOrP,GACpBA,GAAK,GACEA,EAAIkP,GACXE,EAAQ/W,GAAKgX,EAAO5P,GACpBA,GAAK,GACEsP,EAAOM,EAAO5P,GAAI4P,EAAOrP,KAAO,GACvCoP,EAAQ/W,GAAKgX,EAAO5P,GACpBA,GAAK,IAEL2P,EAAQ/W,GAAKgX,EAAOrP,GACpBA,GAAK,GAqBbsP,CAAMR,EAAKG,EAAIE,EAAKD,EAAIH,GAEjBD,EAcHE,CAAKF,EAAK,EAAGA,EAAI1Q,OAAS,EAAG2Q,GAE1BD,0HC3EX,SAASS,EAAWC,EAAUC,EAAU9Q,GACpC,IAAI+Q,SACJ,OAAQF,GACR,KAAKvU,EAAeC,WACpB,KAAKN,EAAiBE,SAEd4U,EADa,SAAbD,EACU,SAAChQ,EAAGO,GAAJ,OAAUA,EAAErB,GAASc,EAAEd,IAEvB,SAACc,EAAGO,GAAJ,OAAUP,EAAEd,GAASqB,EAAErB,IAErC,MACJ,QACI+Q,EAAU,SAACjQ,EAAGO,GACV,IAAM2O,KAAQlP,EAAEd,GACViQ,KAAQ5O,EAAErB,GAChB,OAAIgQ,EAAKC,EACe,SAAba,EAAsB,GAAK,EAElCd,EAAKC,EACe,SAAba,GAAuB,EAAI,EAE/B,GAGf,OAAOC,EAUX,SAASC,EAAU1H,EAAMjC,GACrB,IAAM4J,EAAU,IAAIC,IACdC,KAYN,OAVA7H,EAAKlC,QAAQ,SAACgK,GACV,IAAMC,EAAWD,EAAM/J,GACnB4J,EAAQK,IAAID,GACZF,EAAYF,EAAQ1W,IAAI8W,IAAW,GAAGlN,KAAKiN,IAE3CD,EAAYhN,MAAMkN,GAAWD,KAC7BH,EAAQM,IAAIF,EAAUF,EAAY1R,OAAS,MAI5C0R,EAYX,SAASK,EAAmBC,EAAcC,EAAcC,GACpD,IAAMxO,GACFyO,MAAOH,EAAa,IAQxB,OALAC,EAAaG,OAAO,SAACC,EAAKxF,EAAMyF,GAE5B,OADAD,EAAIxF,GAAQmF,EAAa,GAAGvL,IAAI,SAAAkL,GAAA,OAASA,EAAMO,EAAmBI,GAAK/R,SAChE8R,GACR3O,GAEIA,EA0EJ,SAAS6O,EAAaC,EAAYlH,EAAYmH,EAAeC,EAAgBvN,GAChF,IAMMwN,GACFjI,UACAb,QACA+I,SAEEC,GAPN1N,EAAUxK,OAAOqR,WAHb6G,QAAQ,EACRC,YAAY,GAEwB3N,IAOjB0N,OACjBE,EAAaL,GAAkBA,EAAe1S,OAAS,EAEvDgT,KAiDN,GA/CgBP,EAAcjH,MAAM,KAE5B7D,QAAQ,SAACsL,GACb,IAAK,IAAIhZ,EAAI,EAAGA,EAAIuY,EAAWxS,OAAQ/F,GAAK,EACxC,GAAIuY,EAAWvY,GAAGO,SAAWyY,EAAS,CAClCD,EAAWtO,KAAK8N,EAAWvY,IAC3B,SAMZ+Y,EAAWrL,QAAQ,SAAC2C,GAEhBqI,EAAOjI,OAAOhG,KAAK4F,EAAMI,YAGzBmI,GACAF,EAAOjI,OAAOhG,MACVlK,KAAM,MACNmQ,KAAM,eAIdU,EAAmBC,EAAY,SAACrR,GAC5B0Y,EAAO9I,KAAKnF,SACZ,IAAMwO,EAAYP,EAAO9I,KAAK7J,OAAS,EAEvCgT,EAAWrL,QAAQ,SAAC2C,EAAOoF,GACvBiD,EAAO9I,KAAKqJ,GAAWxD,EAFf,GAE6BpF,EAAMuF,aAAahG,KAAK5P,KAE7D4Y,IACAF,EAAO9I,KAAKqJ,GAAWF,EAAWhT,QAAU/F,GAGhD0Y,EAAOC,KAAKlO,KAAKzK,GAIb8Y,GAAcJ,EAAO9I,KAAKqJ,GAAWxO,KAAKzK,KAI9C8Y,GA7HR,SAAkBI,EAAST,GAOvB,IAPuC,IAC/B7I,EAAiBsJ,EAAjBtJ,KAAMa,EAAWyI,EAAXzI,OACV0I,SACAC,SACAC,SACArZ,EAAIyY,EAAe1S,OAAS,EAEzB/F,GAAK,EAAGA,IACXmZ,EAAYV,EAAezY,GAAG,GAC9BoZ,EAAWX,EAAezY,GAAG,IAC7BqZ,EAAWC,GAAc7I,EAAQ0I,MAO7BjK,EAAWkK,GAEX5C,EAAU5G,EAAM,SAACxI,EAAGO,GAAJ,OAAUyR,EAAShS,EAAEiS,EAAS/S,OAAQqB,EAAE0R,EAAS/S,UAC1D2I,EAAQmK,GAAW,WAC1B,IAAM3B,EAAcH,EAAU1H,EAAMyJ,EAAS/S,OACvCiT,EAAYH,EAASA,EAASrT,OAAS,GACvCiS,EAAeoB,EAASI,MAAM,EAAGJ,EAASrT,OAAS,GACnDkS,EAAqBD,EAAaxL,IAAI,SAAAiN,GAAA,OAAKH,GAAc7I,EAAQgJ,KAEvEhC,EAAY/J,QAAQ,SAACqK,GACjBA,EAAatN,KAAKqN,EAAmBC,EAAcC,EAAcC,MAGrEzB,EAAUiB,EAAa,SAACrQ,EAAGO,GACvB,IAAMvH,EAAIgH,EAAE,GACN3F,EAAIkG,EAAE,GACZ,OAAO4R,EAAUnZ,EAAGqB,KAIxBmO,EAAK7J,OAAS,EACd0R,EAAY/J,QAAQ,SAACgK,GACjB9H,EAAKnF,KAALqB,MAAA8D,EAAA8J,EAAahC,EAAM,OAnBG,IAsB1B0B,EAA8C,SAAnCtO,OAAOsO,GAAUtT,cAA2B,OAAS,MAChE0Q,EAAU5G,EAAMsH,EAAUmC,EAAS3I,KAAM0I,EAAUC,EAAS/S,UAIpE4S,EAAQP,QACR/I,EAAKlC,QAAQ,SAACzM,GACViY,EAAQP,KAAKlO,KAAKxJ,EAAM0Y,SA6ExBC,CAASlB,EAAQD,GAGjBvN,EAAQ2N,WAAY,CACpB,IAAMgB,EAAUrM,mBAAAkM,EAASlM,MAAMkL,EAAOjI,OAAO1K,UAASyG,IAAI,sBAC1DkM,EAAO9I,KAAKlC,QAAQ,SAACgI,GACjBA,EAAMhI,QAAQ,SAACkC,EAAM5P,GACjB6Z,EAAQ7Z,GAAGyK,KAAKmF,OAGxB8I,EAAO9I,KAAOiK,EAGlB,OAAOnB,EC1NJ,SAASoB,EAAYtF,EAAKC,GAC7B,IAAMsF,KACAtJ,KACAuJ,KACApK,KACAkF,EAAgBN,EAAIO,gBACpBC,EAAgBP,EAAIM,gBACpBkF,EAAwBnF,EAAc3E,YACtC+J,EAAwBlF,EAAc7E,YACtC5P,EAAUuU,EAAcvU,KAAxB,UAAsCyU,EAAczU,KAG1D,IAAKgP,EAAWiF,EAAI2F,eAAe5I,MAAM,KAAK6I,OAAQ3F,EAAI0F,eAAe5I,MAAM,KAAK6I,QAChF,OAAO,KAiBX,SAASC,EAAkBC,EAAInK,EAAWoK,GACtCnJ,EAAmBkJ,EAAGhF,YAAa,SAACtV,GAChC,IAAM0V,KACF8E,EAAW,GACfR,EAActM,QAAQ,SAAC+M,GACnB,IAAMxZ,EAAQkP,EAAUsK,GAAY7E,aAAahG,KAAK5P,GACtDwa,OAAgBvZ,EAChByU,EAAM+E,GAAcxZ,IAEnB8Y,EAAUS,KACPD,GAAW3K,EAAKnF,KAAKiL,GACzBqE,EAAUS,IAAY,KASlC,OAjCChG,EAAI2F,eAAe5I,MAAM,KAAM7D,QAAQ,SAACyL,GACrC,IAAM9I,EAAQ4J,EAAsBd,GACpC1I,EAAOhG,KAAK2D,KAAYiC,EAAMI,WAC9BuJ,EAAcvP,KAAK4F,EAAMI,SAASlQ,QA2BtC8Z,EAAkB5F,EAAKyF,GAAuB,GAC9CG,EAAkB7F,EAAKyF,GAAuB,GAEvC,IAAI7D,GAAUxG,EAAMa,GAAUlQ,+PC5DjC+C,GAAgDD,EAAhDC,IAAKC,GAA2CF,EAA3CE,IAAKG,GAAsCL,EAAtCK,MAAOC,GAA+BN,EAA/BM,KAAMC,GAAyBP,EAAzBO,MAAOC,GAAkBR,EAAlBQ,IAAKL,GAAaH,EAAbG,IAAKC,GAAQJ,EAARI,IAEhD,SAASiX,GAAkBjE,GACvB,OAAOA,EAAIkE,OAAO,SAAA/L,GAAA,QAAUA,aAAgB4D,KAShD,SAASoI,GAAKnE,GACV,GAAIxH,EAAQwH,MAAUA,EAAI,aAAcjJ,OAAQ,CAC5C,IAAMqN,EAAiBH,GAAkBjE,GAIzC,OAHiBoE,EAAe9U,OACZ8U,EAAe1C,OAAO,SAACC,EAAK0C,GAAN,OAAe1C,EAAM0C,GAAM,GAC/CtI,EAAkBN,KAG5C,OAAOM,EAAkBN,KAU7B,SAAS6I,GAAKtE,GACV,GAAIxH,EAAQwH,MAAUA,EAAI,aAAcjJ,OAAQ,CAC5C,IAAMwN,EAAWJ,GAAInE,GACftJ,EAAMsJ,EAAI1Q,QAAU,EAC1B,OAAQgH,OAAOkO,MAAMD,IAAaA,aAAoBxI,EAC7CA,EAAkBN,KAAO8I,EAAW7N,EAEjD,OAAOqF,EAAkBN,KAgG7B,IAAMgJ,YACD5X,GAAMsX,IADLO,GAAAC,EAED7X,GAAMwX,IAFLI,GAAAC,EAGD5X,GAzFL,SAAciT,GACV,GAAIxH,EAAQwH,MAAUA,EAAI,aAAcjJ,OAAQ,CAE5C,IAAM6N,EAAiBX,GAAkBjE,GAEzC,OAAQ4E,EAAetV,OAAUqC,KAAKkT,IAALxP,MAAA1D,KAAAmT,GAAYF,IAAkB7I,EAAkBN,KAErF,OAAOM,EAAkBN,OA+EvBiJ,GAAAC,EAID3X,GAzEL,SAAcgT,GACV,GAAIxH,EAAQwH,MAAUA,EAAI,aAAcjJ,OAAQ,CAE5C,IAAM6N,EAAiBX,GAAkBjE,GAEzC,OAAQ4E,EAAetV,OAAUqC,KAAKoT,IAAL1P,MAAA1D,KAAAmT,GAAYF,IAAkB7I,EAAkBN,KAErF,OAAOM,EAAkBN,OA8DvBiJ,GAAAC,EAKD1X,GAzDL,SAAgB+S,GACZ,OAAOA,EAAI,KAmDT0E,GAAAC,EAMDzX,GA/CL,SAAe8S,GACX,OAAOA,EAAIA,EAAI1Q,OAAS,KAwCtBoV,GAAAC,EAODxX,GArCL,SAAgB6S,GACZ,OAAIxH,EAAQwH,GACDA,EAAI1Q,OAERyM,EAAkBN,OA0BvBiJ,GAAAC,EAQDvX,GAbL,SAAc4S,GACV,OAAOrO,KAAKqT,KAbhB,SAAmBhF,GACf,IAAIiF,EAAOX,GAAItE,GACf,OAAOsE,GAAItE,EAAIjK,IAAI,SAAAmP,GAAA,OAAAvT,KAAAwT,IAAQD,EAAMD,EAAS,MAWzBG,CAASpF,MAIxB2E,GAWAU,GAAqBxY,6PC1IrByY,cACF,SAAAC,IAAe,IAAAC,EAAA7X,kGAAA8X,CAAA9X,KAAA4X,GACX5X,KAAKiJ,MAAQ,IAAImK,IACjBpT,KAAKiJ,MAAMwK,IAAI,aAAc+C,IAE7Bla,OAAOyb,QAAQjB,IAAQxN,QAAQ,SAACnM,GAC5B0a,EAAK5O,MAAMwK,IAAItW,EAAI,GAAIA,EAAI,0DAc/B,IAAKoI,UAAO5D,OACR,OAAO3B,KAAKiJ,MAAMxM,IAAI,cAG1B,IAAIub,0CAEJ,GAAuB,mBAAZA,EACPhY,KAAKiJ,MAAMwK,IAAI,aAAcuE,OAC1B,CAEH,GADAA,EAAUtR,OAAOsR,IAC6B,IAA1C1b,OAAO2J,KAAK6Q,IAAQ1Q,QAAQ4R,GAG5B,MAAM,IAAIhH,MAAJ,WAAqBgH,EAArB,0BAFNhY,KAAKiJ,MAAMwK,IAAI,aAAcqD,GAAOkB,IAK5C,OAAOhY,sCAmCD7D,EAAM6b,GAAS,IAAAC,EAAAjY,KACrB,GAAuB,mBAAZgY,EACP,MAAM,IAAIhH,MAAM,gCAMpB,OAHA7U,EAAOuK,OAAOvK,GACd6D,KAAKiJ,MAAMwK,IAAItX,EAAM6b,GAEd,WAAQC,EAAKC,aAAa/b,yCAGvBA,GACN6D,KAAKiJ,MAAMuK,IAAIrX,IACf6D,KAAKiJ,MAAMkP,OAAOhc,mCAIjBA,GACL,OAAIA,aAAgByM,SACTzM,EAEJ6D,KAAKiJ,MAAMxM,IAAIN,YAgBfic,GAZO,WAClB,IAAInP,EAAQ,KAQZ,OALkB,OAAVA,IACAA,EAAQ,IAAI0O,IAET1O,EAPO,uaCrCtB,SAASoP,GAASC,EAAWzM,EAAU0M,EAAUC,GAC7C,IAAMC,EAxDV,SAAsBH,EAAWzM,GAC7B,IAAMmE,KAEA0I,EADaJ,EAAU3H,gBACCpE,eAY9B,OAVAjQ,OAAOyb,QAAQW,GAAYpP,QAAQ,SAAAqP,GAAW,IAATxb,EAASyb,GAAAD,EAAA,MACtC9M,GAAYA,EAASlK,QACU,IAA3BkK,EAASzF,QAAQjJ,IACjB6S,EAAO3J,KAAKlJ,GAGhB6S,EAAO3J,KAAKlJ,KAIb6S,EAyCW6I,CAAYP,EAAWzM,GACnCiN,EAhCV,SAAwBR,GAA0B,IAAfC,EAAehT,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,MACxC+O,KAEAyE,EADaT,EAAU3H,gBACDzE,aACtB8M,EAAaZ,GAAaa,iBAchC,OAZA3c,OAAO2J,KAAK8S,GAAUzP,QAAQ,SAAC4P,GACU,iBAA1BX,EAASW,KAChBX,EAASW,GAAeH,EAASG,GAAaC,YAElD,IAAMC,EAAYhB,GAAaiB,QAAQd,EAASW,IAC5CE,EACA9E,EAAO4E,GAAeE,GAEtB9E,EAAO4E,GAAeF,EACtBT,EAASW,GAAexB,MAGzBpD,EAcYgF,CAAchB,EAAWC,GACtCpE,EAAamE,EAAU3H,gBACvB4I,EAAgBpF,EAAWpI,YAC3ByN,EAASrF,EAAWhY,KACpBsd,KACAC,KACArN,KACA8G,KACA3H,KACFmO,SAGJrd,OAAOyb,QAAQwB,GAAejQ,QAAQ,SAAAsQ,GAAkB,IAAAC,EAAAjB,GAAAgB,EAAA,GAAhBzc,EAAgB0c,EAAA,GAAXhd,EAAWgd,EAAA,GACpD,IAAgC,IAA5BpB,EAAUrS,QAAQjJ,IAAe2b,EAAW3b,GAG5C,OAFAkP,EAAOhG,KAAK2D,KAAYnN,EAAMwP,WAEtBxP,EAAMwP,SAASC,MACvB,KAAK5N,EAAUC,QACX+a,EAAWrT,KAAKlJ,GAChB,MACJ,QACA,KAAKuB,EAAUE,UACX6a,EAAapT,KAAKlJ,MAK9B,IAAI2c,EAAW,EACf9M,EAAmBsL,EAAUpH,YAAa,SAACtV,GACvC,IAAIme,EAAO,GACXN,EAAanQ,QAAQ,SAAC0Q,GAClBD,EAAUA,EAAV,IAAkBR,EAAcS,GAAGxI,aAAahG,KAAK5P,UAEnCsE,IAAlBiT,EAAQ4G,IACR5G,EAAQ4G,GAAQD,EAChBtO,EAAKnF,SACLoT,EAAanQ,QAAQ,SAAC0Q,GAClBxO,EAAKsO,GAAUE,GAAKT,EAAcS,GAAGxI,aAAahG,KAAK5P,KAE3D8d,EAAWpQ,QAAQ,SAAC0Q,GAChBxO,EAAKsO,GAAUE,IAAMT,EAAcS,GAAGxI,aAAahG,KAAK5P,MAE5Dke,GAAY,GAEZJ,EAAWpQ,QAAQ,SAAC0Q,GAChBxO,EAAK2H,EAAQ4G,IAAOC,GAAG3T,KAAKkT,EAAcS,GAAGxI,aAAahG,KAAK5P,QAM3E,IAAIqe,KACAC,EAAgB,kBAAM5B,EAAU1G,gBAcpC,OAbApG,EAAKlC,QAAQ,SAAC6Q,GACV,IAAM7I,EAAQ6I,EACdT,EAAWpQ,QAAQ,SAAC0Q,GAChB1I,EAAM0I,GAAKlB,EAAWkB,GAAGG,EAAIH,GAAIE,EAAeD,OAGpDzB,GACAA,EAAkB4B,wBAClBT,EAAenB,GAGfmB,EAAe,IAAIU,GAAU7O,EAAMa,GAAUlQ,KAAMqd,IAEhDG,EC9HJ,SAASW,GAAmBlK,EAAKC,GACpC,IAIMkK,EAAkB1K,EAJFO,EAAIO,gBACJN,EAAIM,iBAK1B,OAAO,SAACc,EAAWE,GACf,IAAI6I,GAAc,EASlB,OARAD,EAAgBjR,QAAQ,SAACyL,GAGjByF,IAFA/I,EAAUsD,GAAWlY,QACrB8U,EAAUoD,GAAWlY,QAAS2d,KAM/BA,GCjBR,SAASC,GAAOrK,EAAKC,GACxB,IAAMsF,KACAtJ,KACAuJ,KACApK,KACAkF,EAAgBN,EAAIO,gBACpBC,EAAgBP,EAAIM,gBACpBkF,EAAwBnF,EAAc3E,YACtC+J,EAAwBlF,EAAc7E,YACtC5P,EAAUuU,EAAcvU,KAAxB,UAAsCyU,EAAczU,KAG1D,IAAKgP,EAAWiF,EAAI2F,eAAe5I,MAAM,KAAK6I,OAAQ3F,EAAI0F,eAAe5I,MAAM,KAAK6I,QAChF,OAAO,KAgBX,SAASC,EAAmBC,EAAInK,GAC5BiB,EAAmBkJ,EAAGhF,YAAa,SAACtV,GAChC,IAAM0V,KACF8E,EAAW,GACfR,EAActM,QAAQ,SAAC+M,GACnB,IAAMxZ,EAAQkP,EAAUsK,GAAY7E,aAAahG,KAAK5P,GACtDwa,OAAgBvZ,EAChByU,EAAM+E,GAAcxZ,IAEnB8Y,EAAUS,KACX5K,EAAKnF,KAAKiL,GACVqE,EAAUS,IAAY,KASlC,OAhCChG,EAAI2F,eAAe5I,MAAM,KAAM7D,QAAQ,SAACyL,GACrC,IAAM9I,EAAQ4J,EAAsBd,GACpC1I,EAAOhG,KAAK2D,KAAYiC,EAAMI,WAC9BuJ,EAAcvP,KAAK4F,EAAMI,SAASlQ,QA0BtC8Z,EAAkB7F,EAAKyF,GACvBI,EAAkB5F,EAAKyF,GAEhB,IAAIuE,GAAU7O,EAAMa,GAAUlQ,SCvDlC,SAASue,GAAeC,EAAYC,EAAYtK,GACnD,OAAOH,EAAawK,EAAYC,EAAYtK,GAAU,EAAOhB,EAAME,WAGhE,SAASqL,GAAgBF,EAAYC,EAAYtK,GACpD,OAAOH,EAAayK,EAAYD,EAAYrK,GAAU,EAAOhB,EAAMG,0QCWlDqL,cAQjB,SAAAC,EAAavJ,EAAcvE,gGAAY+N,CAAAhb,KAAA+a,GACnC/a,KAAKwR,aAAeA,EACpBxR,KAAKiN,WAAaA,8CAUlB,MAAM,IAAI+D,MAAM,wDAUhB,OAAOhR,KAAKwR,aAAanF,sCAUzB,OAAOrM,KAAKwR,aAAarV,oCAUzB,OAAO6D,KAAKwR,aAAanF,OAAOC,uCAUhC,OAAOtM,KAAKwR,aAAanF,OAAO4O,8CAUhC,OAAOjb,KAAKwR,aAAanF,OAAO6O,kDAUhC,OAAOlb,KAAKwR,aAAanF,OAAO8O,aAAenb,KAAKwR,aAAanF,OAAOlQ,oCASpE,IAAA0b,EAAA7X,KACEwL,KAIN,OAHAwB,EAAmBhN,KAAKiN,WAAY,SAACrR,GACjC4P,EAAKnF,KAAKwR,EAAKrG,aAAahG,KAAK5P,MAE9B4P,0CAUP,MAAM,IAAIwF,MAAM,0RCpHHoK,irBAAkBN,yCAY/B,OAHK9a,KAAKqb,gBACNrb,KAAKqb,cAAgBrb,KAAKsb,uBAEvBtb,KAAKqb,4DAUZ,MAAM,IAAIrK,MAAM,+DAWhB,OAAOhR,KAAKwL,0QChCC+P,irBAAoBH,0CASjC,OAAOjd,EAAiBC,0DAUL,IAAA6Z,EAAAjY,KACb+Z,EAAO,IAAIyB,IACXC,KAUN,OAPAzO,EAAmBhN,KAAKiN,WAAY,SAACrR,GACjC,IAAM0X,EAAQ2E,EAAKzG,aAAahG,KAAK5P,GAChCme,EAAKvG,IAAIF,KACVyG,EAAK2B,IAAIpI,GACTmI,EAAOpV,KAAKiN,MAGbmI,qQC7BME,eAQjB,SAAAC,EAAapK,EAAcvE,gGAAY4O,CAAA7b,KAAA4b,GAAA,IAAA/D,mKAAAiE,CAAA9b,MAAA4b,EAAAG,WAAAzf,OAAA0f,eAAAJ,IAAA7f,KAAAiE,KAC7BwR,EAAcvE,IADe,OAGnC4K,EAAKoE,eAAiB,KAHapE,qUARLuD,sDAqBX,IAAAnD,EAAAjY,KACb+Z,EAAO,IAAIyB,IACXC,KAYN,OARAzO,EAAmBhN,KAAKiN,WAAY,SAACrR,GACjC,IAAM0X,EAAQ2E,EAAKzG,aAAahG,KAAK5P,GAChCme,EAAKvG,IAAIF,KACVyG,EAAK2B,IAAIpI,GACTmI,EAAOpV,KAAKiN,MAIbmI,yDAWP,GAAIzb,KAAKic,eACL,OAAOjc,KAAKic,eAUhB,IAPA,IAAMC,EAAalc,KAAKwL,OAAO+K,OAAO,SAAA/L,GAAA,QAAUA,aAAgB4D,KAAoB4H,KAAK,SAAChT,EAAGO,GAAJ,OAAUP,EAAIO,IACjG4Y,EAAQD,EAAWva,OACrBya,EAAUzT,OAAO0T,kBACjBC,SACAC,SACAC,EAAiB,EAEZ5gB,EAAI,EAAGA,EAAIugB,EAAOvgB,IACvB0gB,EAAYJ,EAAWtgB,EAAI,IAC3B2gB,EAAYL,EAAWtgB,MAEL0gB,IAIlBF,EAAUpY,KAAKkT,IAAIkF,EAASG,EAAYL,EAAWtgB,EAAI,IACvD4gB,KAQJ,OALKA,IACDJ,EAAU,MAEdpc,KAAKic,eAAiBG,EAEfpc,KAAKic,gDAUZ,OAAOjc,KAAKwR,aAAanF,OAAOtM,+CAUnB,IAAA0c,EAAAzc,KACPwL,KASN,OARAwB,EAAmBhN,KAAKiN,WAAY,SAACrR,GACjC,IAAM0X,EAAQmJ,EAAKjL,aAAahG,KAAK5P,GACjC0X,aAAiBlF,EACjB5C,EAAKnF,KAAKiN,GAEV9H,EAAKnF,KAAKvG,EAAkByG,SAAS+M,EAAOmJ,EAAK1c,aAGlDyL,qQC3GMkR,irBAAetB,sDAS5B,IAAMuB,EAAU3c,KAAKwR,aAAanF,OAAOuQ,KACzC,OAAQD,EAAQ,GAAIA,EAAQA,EAAQhb,OAAS,mCAU7C,OAAO3B,KAAKwR,aAAanF,OAAOuQ,wQClBnBC,irBAAgB/B,yCAY7B,OAHK9a,KAAKqb,gBACNrb,KAAKqb,cAAgBrb,KAAKsb,uBAEvBtb,KAAKqb,6CAUZ,OAAOrb,KAAKwR,aAAanF,OAAOyQ,wCAUhC,OAAO9c,KAAKwR,aAAanF,OAAO8M,UAAYzB,0CAShC,IACJqF,EAAiB/c,KAAKwR,aAAanF,OAAnC0Q,aACR,OAAOA,aAAwBnU,SAAWmU,EAAezR,gDAUzD,MAAM,IAAI0F,MAAM,+DAWhB,OAAOhR,KAAKwL,0QC/DCwR,irBAAmBH,0CAShC,OAAOre,EAAeC,yDAUH,IAAAwZ,EAAAjY,KACfkX,EAAMvO,OAAO0T,kBACbjF,EAAMzO,OAAOsU,kBAiBjB,OAdAjQ,EAAmBhN,KAAKiN,WAAY,SAACrR,GACjC,IAAM0X,EAAQ2E,EAAKzG,aAAahG,KAAK5P,GACjC0X,aAAiBlF,IAIjBkF,EAAQ4D,IACRA,EAAM5D,GAENA,EAAQ8D,IACRA,EAAM9D,OAIN4D,EAAKE,sQC5CA8F,4KAQb,MAAM,IAAIlM,MAAM,0RCJHmM,irBAA0BD,sCAQpC/b,GAQH,OALKiN,EAAkBgP,UAAUjc,GAGpBiN,EAAkBiP,eAAelc,GAFjCuF,OAAOvF,GAAKmc,0QCXZC,eAOjB,SAAAC,EAAanR,gGAAQoR,CAAAzd,KAAAwd,GAAA,IAAA3F,mKAAA6F,CAAA1d,MAAAwd,EAAAzB,WAAAzf,OAAA0f,eAAAwB,IAAAzhB,KAAAiE,OAAA,OAEjB6X,EAAKxL,OAASA,EACdwL,EAAK8F,KAAO,IAAI7d,EAAkB+X,EAAKxL,OAAOtM,QAH7B8X,qUAPmBqF,sCAoBjC/b,GACH,IAAIyC,SAEJ,GAAKwK,EAAkBgP,UAAUjc,GAI7ByC,EAASwK,EAAkBiP,eAAelc,OAJP,CACnC,IAAIhB,EAAaH,KAAK2d,KAAKjV,cAAcvH,GACzCyC,EAASzD,EAAaA,EAAW6K,UAAYoD,EAAkBL,GAInE,OAAOnK,qQC9BMga,irBAAqBV,sCAQ/B/b,GAEHA,EAAMuF,OAAOvF,GACb,IAAIyC,SAEJ,GAAKwK,EAAkBgP,UAAUjc,GAK7ByC,EAASwK,EAAkBiP,eAAelc,OALP,CACnC,IAAI0c,EAAU1c,EAAIqH,MALR,2DAMV5E,EAASia,EAAalV,OAAOmV,WAAWD,EAAQ,IAAvC,IAA8ClV,OAAOmV,WAAWD,EAAQ,IAC9DzP,EAAkBL,GAIzC,OAAOnK,qQCpBMma,irBAAyBb,sCAQnC/b,GACH,IAAIyC,SAEJ,GAAKwK,EAAkBgP,UAAUjc,GAI7ByC,EAASwK,EAAkBiP,eAAelc,OAJP,CACnC,IAAIC,EAAY0c,WAAW3c,EAAK,IAChCyC,EAAS+E,OAAOkO,MAAMzV,GAAagN,EAAkBL,GAAK3M,EAI9D,OAAOwC,qQCnBMoa,cAUjB,SAAAC,EAAa9hB,EAAMqP,EAAMa,EAAQjK,gGAAQ8b,CAAAle,KAAAie,GACrCje,KAAK7D,KAAOA,EACZ6D,KAAKqM,OAASA,EACdrM,KAAKoC,OAASA,EACdpC,KAAKwL,KAAOxL,KAAKme,UAAU3S,gDAUpBA,GAAM,IAAAqM,EAAA7X,KACb,OAAOwL,EAAKpD,IAAI,SAAAkL,GAAA,OAASuE,EAAKzV,OAAOwE,MAAM0M,cCiE5C,SAAS8K,GAAaC,EAAYhS,EAAQiS,GAC7C,IAAMC,KAUN,OARMD,GAAWA,EAAQ3c,SACrB2c,EAAUjS,EAAOjE,IAAI,SAAAoC,GAAA,OAAQA,EAAKrO,QAGtCmiB,EAAQhV,QAAQ,SAACkV,EAAQ5iB,GACrB2iB,EAAWC,GAAU5iB,IAGlByQ,EAAOjE,IAAI,SAAAoC,GAAA,OAzFtB,SAAyBgB,EAAMa,GAC3Bb,EAAOA,MACP,IAAIgG,SAEJ,OAAQnF,EAAOC,MACf,KAAK5N,EAAUC,QACX,OAAQ0N,EAAO4O,SACf,KAAKzc,EAAeC,WAGpB,QAEI,OADA+S,EAAe,IAAIwM,GAAa3R,EAAOlQ,KAAMqP,EAAMa,EAAQ,IAAI0R,IACxD,IAAIf,GAAWxL,EAAf,MAAkChG,EAAK7J,OAAS,IAE/D,KAAKjD,EAAUE,UACX,OAAQyN,EAAO4O,SACf,KAAK9c,EAAiBC,YAElB,OADAoT,EAAe,IAAIwM,GAAa3R,EAAOlQ,KAAMqP,EAAMa,EAAQ,IAAI8Q,IACxD,IAAI5B,GAAY/J,EAAhB,MAAmChG,EAAK7J,OAAS,IAC5D,KAAKxD,EAAiBE,SAElB,OADAmT,EAAe,IAAIwM,GAAa3R,EAAOlQ,KAAMqP,EAAMa,EAAQ,IAAIkR,GAAelR,IACvE,IAAIsP,GAASnK,EAAb,MAAgChG,EAAK7J,OAAS,IACzD,KAAKxD,EAAiBI,OAElB,OADAiT,EAAe,IAAIwM,GAAa3R,EAAOlQ,KAAMqP,EAAMa,EAAQ,IAAIuR,IACxD,IAAIlB,GAAOlL,EAAX,MAA8BhG,EAAK7J,OAAS,IACvD,QAEI,OADA6P,EAAe,IAAIwM,GAAa3R,EAAOlQ,KAAMqP,EAAMa,EAAQ,IAAI8Q,IACxD,IAAI5B,GAAY/J,EAAhB,MAAmChG,EAAK7J,OAAS,IAEhE,QAEI,OADA6P,EAAe,IAAIwM,GAAa3R,EAAOlQ,KAAMqP,EAAMa,EAAQ,IAAI8Q,IACxD,IAAI5B,GAAY/J,EAAhB,MAAmChG,EAAK7J,OAAS,KA0DlC8c,CAAgBJ,EAAWE,EAAW/T,EAAKrO,OAAQqO,KC3GlE,IAAAkU,IACXC,WAAY7gB,EAAWI,MCuCZ,IAAA0gB,GAvBf,SAAiBvM,EAAKvL,GAIlBA,EAAUxK,OAAOqR,WAFbkR,gBAAgB,GAEuB/X,GAE3C,IAAI0X,SACEM,KACAzY,EAAO2C,EAAY8V,GAYzB,OAPIN,EAHA1X,EAAQ+X,eAGCxM,EAAI5K,OAAO,EAAG,GAAG,MAK9B4K,EAAI/I,QAAQ,SAAA2C,GAAA,OAAS5F,qIAAA0Y,CAAQ9S,OAErBuS,EAAQM,ICvChBE,MACAC,MACAC,GAAQ,GACRC,GAAU,GACVC,GAAS,GAEb,SAASC,GAAgBP,GACvB,OAAO,IAAIlW,SAAS,IAAK,WAAakW,EAAQ1W,IAAI,SAASjM,EAAMP,GAC/D,OAAO0jB,KAAKC,UAAUpjB,GAAQ,OAASP,EAAI,MAC1CqH,KAAK,KAAO,KA0BF,IAAAuc,GAAA,SAASC,GACtB,IAAIC,EAAW,IAAItf,OAAO,KAAQqf,EAAY,SAC1CE,EAAYF,EAAUG,WAAW,GAWrC,SAASC,EAAUvf,EAAM+U,GACvB,IAIIvY,EAJAgjB,KACAC,EAAIzf,EAAKqB,OACTqe,EAAI,EACJ3iB,EAAI,EAEJ4iB,EAAMF,GAAK,EACXG,GAAM,EAMV,SAAS5Z,IACP,GAAI2Z,EAAK,OAAOhB,GAChB,GAAIiB,EAAK,OAAOA,GAAM,EAAOlB,GAG7B,IAAIpjB,EAAUK,EAAPkkB,EAAIH,EACX,GAAI1f,EAAKsf,WAAWO,KAAOjB,GAAO,CAChC,KAAOc,IAAMD,GAAKzf,EAAKsf,WAAWI,KAAOd,IAAS5e,EAAKsf,aAAaI,KAAOd,KAI3E,OAHKtjB,EAAIokB,IAAMD,EAAGE,GAAM,GACdhkB,EAAIqE,EAAKsf,WAAWI,QAAUb,GAASe,GAAM,EAC9CjkB,IAAMmjB,KAAUc,GAAM,EAAU5f,EAAKsf,WAAWI,KAAOb,MAAWa,GACpE1f,EAAK8U,MAAM+K,EAAI,EAAGvkB,EAAI,GAAG2E,QAAQ,MAAO,KAIjD,KAAOyf,EAAID,GAAG,CACZ,IAAK9jB,EAAIqE,EAAKsf,WAAWhkB,EAAIokB,QAAUb,GAASe,GAAM,OACjD,GAAIjkB,IAAMmjB,GAAUc,GAAM,EAAU5f,EAAKsf,WAAWI,KAAOb,MAAWa,OACtE,GAAI/jB,IAAM0jB,EAAW,SAC1B,OAAOrf,EAAK8U,MAAM+K,EAAGvkB,GAIvB,OAAOqkB,GAAM,EAAM3f,EAAK8U,MAAM+K,EAAGJ,GAGnC,IA7BIzf,EAAKsf,WAAWG,EAAI,KAAOZ,MAAWY,EACtCzf,EAAKsf,WAAWG,EAAI,KAAOX,MAAUW,GA4BjCjjB,EAAIwJ,OAAa2Y,IAAK,CAE5B,IADA,IAAI9E,KACGrd,IAAMkiB,IAAOliB,IAAMmiB,IAAK9E,EAAI9T,KAAKvJ,GAAIA,EAAIwJ,IAC5C+O,GAA4B,OAAtB8E,EAAM9E,EAAE8E,EAAK9c,OACvByiB,EAAKzZ,KAAK8T,GAGZ,OAAO2F,EAgBT,SAASM,EAAUjG,GACjB,OAAOA,EAAI/R,IAAIiY,GAAapd,KAAKwc,GAGnC,SAASY,EAAY/f,GACnB,OAAe,MAARA,EAAe,GAChBof,EAASY,KAAKhgB,GAAQ,IAAM,IAAOA,EAAKC,QAAQ,KAAM,MAAU,IAChED,EAGR,OACEsG,MAlFF,SAAetG,EAAM+U,GACnB,IAAIkL,EAASzB,EAASgB,EAAOD,EAAUvf,EAAM,SAAS6Z,EAAKve,GACzD,GAAI2kB,EAAS,OAAOA,EAAQpG,EAAKve,EAAI,GACrCkjB,EAAU3E,EAAKoG,EAAUlL,EA9B/B,SAAyByJ,EAASzJ,GAChC,IAAI/X,EAAS+hB,GAAgBP,GAC7B,OAAO,SAAS3E,EAAKve,GACnB,OAAOyZ,EAAE/X,EAAO6c,GAAMve,EAAGkjB,IA2BM0B,CAAgBrG,EAAK9E,GAAKgK,GAAgBlF,KAGzE,OADA2F,EAAKhB,QAAUA,MACRgB,GA6EPD,UAAWA,EACX9f,OA1BF,SAAgB+f,EAAMhB,GAEpB,OADe,MAAXA,IAAiBA,EA9EzB,SAAsBgB,GACpB,IAAIW,EAAYnkB,OAAOY,OAAO,MAC1B4hB,KAUJ,OARAgB,EAAKxW,QAAQ,SAAS6Q,GACpB,IAAK,IAAIuG,KAAUvG,EACXuG,KAAUD,GACd3B,EAAQzY,KAAKoa,EAAUC,GAAUA,KAKhC5B,EAkE0B6B,CAAab,KACpChB,EAAQ1W,IAAIiY,GAAapd,KAAKwc,IAAY5W,OAAOiX,EAAK1X,IAAI,SAAS+R,GACzE,OAAO2E,EAAQ1W,IAAI,SAASsY,GAC1B,OAAOL,EAAYlG,EAAIuG,MACtBzd,KAAKwc,MACNxc,KAAK,OAqBT2d,WAlBF,SAAoBd,GAClB,OAAOA,EAAK1X,IAAIgY,GAAWnd,KAAK,SCzGhC4d,GAAMrB,GAAI,KCAVsB,IDEkBD,GAAIja,MACAia,GAAIhB,UACPgB,GAAI9gB,OACA8gB,GAAID,WCLrBpB,GAAI,OAEQsB,GAAIla,MACAka,GAAIjB,UACPiB,GAAI/gB,OACA+gB,GAAIF,WC4BhB,IAAAG,GAXf,SAAiBpW,EAAK7D,GAKlBA,EAAUxK,OAAOqR,WAHbkR,gBAAgB,EAChBmC,eAAgB,KAEuBla,GAE3C,IAAMma,EAAMzB,GAAM1Y,EAAQka,gBAC1B,OAAOpC,GAAOqC,EAAIpB,UAAUlV,GAAM7D,ICoBvB,IAAAoa,GAxBf,SAAmB7O,GACf,IAAMmM,KACF5iB,EAAI,EACJulB,SACErC,KACAzY,EAAO2C,EAAY8V,GAgBzB,OAdAzM,EAAI/I,QAAQ,SAACkB,GACT,IAAMrB,KACN,IAAK,IAAIhM,KAAOqN,EACRrN,KAAOqhB,EACP2C,EAAiB3C,EAAOrhB,IAExBqhB,EAAOrhB,GAAOvB,IACdulB,EAAiBvlB,EAAI,GAEzBuN,EAAOgY,GAAkB3W,EAAKrN,GAElCkJ,eAAQ8C,MAGJ7M,OAAO2J,KAAKuY,GAASM,IC1BlB,IAAAsC,GAXf,SAAe5V,EAAM1E,GACjB,IAAMua,GAAeC,SAAAJ,GAAUK,OAAAR,GAAQS,OAAA5C,IACjCD,EAAapT,EAAiBC,GAEpC,IAAKmT,EACD,MAAM,IAAI3N,MAAM,mCAGpB,OAAOqQ,EAAW1C,GAAYnT,EAAM1E,iiBCGjC,SAAS4K,GAAiBvI,GAC7B,IAAMsY,KAEN,OADAnlB,OAAO2J,KAAKkD,GAAQG,QAAQ,SAACnM,GAAUskB,EAAKtkB,GAAO,IAAIuP,EAAMvD,EAAOhM,GAAMA,KACnEskB,EAGJ,IAAMC,GAAe,SAAA/I,EAA8BgJ,EAAmBC,GAAmB,IAAAC,EAAAC,GAAAnJ,EAAA,GAAlE1L,EAAkE4U,EAAA,GAAtDzN,EAAsDyN,EAAA,GACxFE,EAAS3N,EAAczS,OAASyS,EAAcjH,MAAM,QACpD6U,EAAkBL,EAAkB5V,YACpCkW,EAAYF,EAAO3Z,IAAI,SAAA8Z,GAAA,OT+BxB,SAAoC1Q,EAAcvE,GAAY,IACzDZ,EAAWmF,EAAXnF,OAER,OAAQA,EAAOC,MACf,KAAK5N,EAAUC,QACX,OAAQ0N,EAAO4O,SACf,KAAKzc,EAAeC,WAEpB,QACI,OAAO,IAAIue,GAAWxL,EAAcvE,GAE5C,KAAKvO,EAAUE,UACX,OAAQyN,EAAO4O,SACf,KAAK9c,EAAiBC,YAClB,OAAO,IAAImd,GAAY/J,EAAcvE,GACzC,KAAK9O,EAAiBE,SAClB,OAAO,IAAIsd,GAASnK,EAAcvE,GACtC,KAAK9O,EAAiBI,OAClB,OAAO,IAAIme,GAAOlL,EAAcvE,GACpC,QACI,OAAO,IAAIsO,GAAY/J,EAAcvE,GAE7C,QACI,OAAO,IAAIsO,GAAY/J,EAAcvE,IStDNkV,CAA2BH,EAAgBE,GAAM1Q,aAAcvE,KAClG,OAAOtB,EAAWC,gBAAgBqW,EAAWL,IAGpCQ,GAAoB,SAACC,EAAOC,GAAuC,IAClCC,EADM7U,EAA4BnI,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,MAAfid,EAAejd,UAAA,GACxE+c,IAAcvT,EAAeI,SAC7BkT,EAAMI,YAAY9gB,OAAS,GAC3B4gB,EAAAF,EAAMI,aAAYpc,KAAlBqB,MAAA6a,EAAAG,GAA0BF,KAE1BH,EAAMI,YAAYpc,MACdsc,GAAIL,EACJM,KAAMlV,EACNmV,SAAUL,KAKTM,GAA4B,SAACC,EAAUC,GAAU,IAAAC,GAC1DA,EAAAD,EAAME,qBAAoB7c,KAA1BqB,MAAAub,EAAAP,GAAkCK,EAASG,qBAA3Cra,OAAA6Z,GAAmEK,EAASN,gBAGnEU,GAAe,SAAClW,EAAY9D,EAAQia,EAAU1V,EAAQqV,GAC/D,IAAMM,KACFC,GAAqB,EACnBvmB,EAAS2Q,EAAT3Q,KACFwmB,SACAtJ,KACAC,EAAgB,kBAAM6I,EAASnR,gBAC7B4R,EAAmB,SAAAthB,GAAA,OAASkhB,EA7CtC,SAA+Bja,EAAQvN,GACnC,IAAM6lB,KADgCgC,GAAA,EAAAC,GAAA,EAAAC,OAAAzjB,EAAA,IAEtC,QAAA0jB,EAAAC,EAAkB1a,EAAlBxM,OAAAmnB,cAAAL,GAAAG,EAAAC,EAAArV,QAAAuV,MAAAN,GAAA,EAA0B,KAAjBxX,EAAiB2X,EAAA/mB,MACtB4kB,EAAKxV,EAAM9P,QAAU,IAAIuQ,EAAMT,EAAMuF,aAAahG,KAAK5P,GAAIqQ,IAHzB,MAAA+X,GAAAN,GAAA,EAAAC,EAAAK,EAAA,aAAAP,GAAAI,EAAAI,QAAAJ,EAAAI,SAAA,WAAAP,EAAA,MAAAC,GAKtC,OAAOlC,EAyCHyC,CAAqB/a,EAAQjH,GAC7BA,EACAgY,EACAD,IAGAkK,SAkBJ,OAhBIA,EADApnB,IAAS8B,EAAcE,QACb,SAAAmD,GAAA,OAAUshB,EAAiBthB,IAE3B,SAAAA,GAAA,OAASshB,EAAiBthB,IAGxC8K,EAAmBC,EAAY,SAACrR,GACxBuoB,EAAQvoB,MACmB,IAAvB0nB,GAA4B1nB,IAAO0nB,EAAoB,GACvDC,EAAKF,EAAc1hB,OAAS,EAC5B0hB,EAAcE,GAASF,EAAcE,GAAIpW,MAAM,KAAK,GAApD,IAA0DvR,GAE1DynB,EAAchd,KAAd,GAAsBzK,GAE1B0nB,EAAoB1nB,KAGrBynB,EAAcpgB,KAAK,MAGjBmhB,GAAqB,SAAC/B,GAC/B,IAAMgC,EAAWhC,EAAMiC,OAAM,GACvB3C,EAAoBU,EAAMkC,uBAShC,OARAF,EAAStO,eAAiB4L,EAAkBxY,OAAOf,IAAI,SAAAiN,GAAA,OAAKA,EAAElZ,SAAQ8G,KAAK,KAG3E0e,EAAkB3V,iBAAmB,KACrC2V,EAAkBlV,iBAAmB,KACrCkV,EAAkBvV,eAAiB,KACnCiY,EAASjK,wBAAwBoK,wBAE1BH,GAGEI,GAAyB,SAACpC,EAAOqC,GAA4B,IAAhBhX,EAAgBnI,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,MAChE+c,EAAY5U,EAAO4U,WAAa1S,EAChC+U,EAAkBjX,EAAOiX,kBAAmB,EAC9CC,KAIAA,EAHCF,EAAW/iB,OAGN+iB,EAAWtc,IAAI,SAAAyc,GAAA,OAAc,SAACvM,GAChC,IAAMxD,EAAUwD,EAAUwM,UACpBzY,EAASyI,EAAQzI,OACjB0Y,EAAezM,EAAU0M,kBACzBC,EAAc3M,EAAU3H,gBAAgB5E,YACxCP,EAAOsJ,EAAQtJ,KACfiQ,EAASnf,OAAO4oB,OAAOH,GAAchR,OAAO,SAACC,EAAKmR,GAEpD,OADAnR,EAAImR,EAAEC,IAAIjpB,MAAQ8oB,EAAYE,EAAEC,IAAIjpB,MAAMsf,SACnCzH,OAGX,OAAO,SAAC7K,GAgBJ,QAfiBqC,EAAK7J,QAAiB6J,EAAK6Z,KAAK,SAAAlL,GAAA,OAAO9N,EAAOiZ,MAAM,SAACC,GAClE,KAAMA,EAAUppB,QAAQgN,GACpB,OAAO,EAEX,IAAMtM,EAAQsM,EAAOoc,EAAUppB,MAAMqpB,UACrC,GAAIb,GAAmBY,EAAUjZ,OAAS5N,EAAUC,QAChD,OAAO9B,GAAS4e,EAAO8J,EAAUppB,MAAM,IAAMU,GAAS4e,EAAO8J,EAAUppB,MAAM,GAGjF,GAAIopB,EAAUjZ,OAAS5N,EAAUE,UAC7B,OAAO,EAEX,IAAMqV,EAAM8Q,EAAaQ,EAAUppB,MAAM+F,MACzC,OAAOiY,EAAIlG,KAAS9K,EAAOoc,EAAUppB,MAAMqpB,eAzBpB,CA6BhCX,MA/BI,kBAAM,IA+CjB,OAZIvC,IAAc1S,EACEwU,GAAmB/B,GAAOoD,OAAO,SAAAtc,GAAA,OAAUyb,EAAIU,MAAM,SAAAI,GAAA,OAAMA,EAAGvc,OAC1Ewc,WAAW,EACX5oB,KAAM8B,EAAcG,MAGRolB,GAAmB/B,GAAOoD,OAAO,SAAAtc,GAAA,OAAUyb,EAAIS,KAAK,SAAAK,GAAA,OAAMA,EAAGvc,OACzEpM,KAAM8B,EAAcG,IACpB2mB,WAAW,KAOVC,GAAkB,SAAC7C,EAAUK,EAAUyC,EAAcC,GAC9D,IAAMC,EAAShD,EAASuB,MAAMwB,EAAYH,WACpC1Y,EAAakW,GACf4C,EAAO7U,YACP6U,EAAOxB,uBAAuBpb,OAC9Bia,EACAyC,EACA9C,GAQJ,OANAgD,EAAO7U,YAAcjE,EACrB8Y,EAAO3L,wBAAwBoK,wBAE/BpC,GAAkB2D,EAAQhX,EAAeC,QAAUtB,OAAQmY,GAAgBzC,GAC3EN,GAA0BC,EAAUgD,GAE7BA,GAGEC,GAAmB,SAACjD,EAAUkD,EAAWvY,EAAQwY,GAC1D,IAAMH,EAAShD,EAASuB,MAAM5W,EAAOiY,WACjCQ,EAAgBF,EAiBpB,OAhBIvY,EAAO3Q,OAAS8B,EAAcE,UAC9BonB,EAAgBD,EAAU3P,OAAO,SAAAxB,GAAA,OAA+C,IAAlCkR,EAAU7f,QAAQ2O,MAIpEgR,EAAOhQ,eAAiBoQ,EAAcljB,KAAK,KAC3C8iB,EAAO3L,wBAAwBoK,wBAE/BpC,GACI2D,EACAhX,EAAeE,SACbgX,YAAWvY,SAAQ0Y,gBAAiBD,GACtC,MAEJrD,GAA0BC,EAAUgD,GAE7BA,GAGEM,GAAqB,SAACC,GAO/B,IALAA,EAAatc,KAAYsc,IACTha,OACZga,EAAWha,KAAO5N,EAAUE,YAG3B0nB,EAAWrL,QACZ,OAAQqL,EAAWha,MACnB,KAAK5N,EAAUC,QACX2nB,EAAWrL,QAAUzc,EAAeC,WACpC,MACJ,QACA,KAAKC,EAAUE,UACX0nB,EAAWrL,QAAU9c,EAAiBC,YAK9C,OAAOkoB,GAKEC,GAAa,SAACC,EAAUhb,EAAMa,EAAQvF,GAC/CuF,EAH0B,SAAAA,GAAA,OAAUA,EAAOjE,IAAI,SAAAke,GAAA,OAAcD,GAAmBC,KAGvEG,CAAepa,GACxBvF,EAAUxK,OAAOqR,OAAOrR,OAAOqR,UAAW+Q,IAAgB5X,GAC1D,IAAM4f,EAAcC,EAAU7f,EAAQ6X,YAEtC,IAAM+H,GAAsC,mBAAhBA,EACxB,MAAM,IAAI1V,MAAJ,mCAA6ClK,EAAQ6X,WAArD,WANiD,IAAAiI,EAS3BF,EAAYlb,EAAM1E,GATS+f,EAAA/E,GAAA8E,EAAA,GASpDpI,EAToDqI,EAAA,GAS5CC,EAT4CD,EAAA,GAUrDhb,EAAWuS,GAAa0I,EAAeza,EAAQmS,GAG/CuI,EAAYpb,EAAWC,gBAAgBC,EAAU/E,EAAQ3K,MAM/D,OALAqqB,EAASQ,mBAAqBD,EAE9BP,EAAStV,YAAc4V,EAAcnlB,QAAUmlB,EAAc,GAAGnlB,OAAzC,MAAuDmlB,EAAc,GAAGnlB,OAAS,GAAM,GAC9G6kB,EAASzQ,eAAkB1J,EAAOjE,IAAI,SAAA4R,GAAA,OAAKA,EAAE7d,OAAO8G,OACpDujB,EAASS,YAAcngB,EAAQ6X,aAAe7gB,EAAWI,KAAOqN,EAAiBC,GAAQ1E,EAAQ6X,WAC1F6H,GAGEtR,GAAgB,SAAC7I,EAAQJ,GAGlC,IAFA,IAAIrQ,EAAI,EAEDA,EAAIyQ,EAAO1K,SAAU/F,EACxB,GAAIqQ,IAAUI,EAAOzQ,GAAGO,KACpB,OACImQ,KAAMD,EAAOzQ,GAAGqf,SAAW5O,EAAOzQ,GAAG0Q,KACrCpK,MAAOtG,GAInB,OAAO,MA6BLsrB,GAAgC,SAACrC,EAAWvM,GAC9C,IAAM6O,EAAc7O,EAAU8O,iBAC1BC,EAAiBxC,EAAU,GAC3ByC,EAAiBzC,EAAU,GAkB/B,OAhBAsC,EAAY7d,QAAQ,SAACie,GACjB,GAAKA,EAAL,CADgC,IAMjBC,EAAAC,EANiBC,EA9BF,SAACH,GACnC,IAAII,KACArF,SAEJ,OADAA,EAAYiF,EAAW5E,IAEvB,KAAK5T,EAAeC,OAChB2Y,GAAUJ,EAAW1E,UACrB,MACJ,KAAK9T,EAAeE,QAChB0Y,GAAUJ,EAAW3E,KAAKwD,iBAC1B,MACJ,KAAKrX,EAAeG,QAChBoT,EAAY,UACZqF,GAAUJ,EAAW3E,KAAKgF,cAAcza,MAAM,KAAMoa,EAAW1E,UAC/D,MACJ,QACIP,EAAY,KAGhB,OACIA,YACAqF,UAc8BE,CAAuBN,GAA7CjF,EALwBoF,EAKxBpF,UAAWqF,EALaD,EAKbC,OACnB,GAAIrF,EACA+E,GAAiBG,EAAAH,GAAe/E,GAAf5a,MAAA8f,EAAA9E,GAA6BiF,GAA7B9e,SACb8c,WAAW,MAEf2B,GAAiBG,EAAAH,GAAehF,GAAf5a,MAAA+f,EAAA/E,GAA6BiF,GAA7B9e,SACb8c,WAAW,UAKf0B,EAAgBC,IAWtBQ,GAAuB,SAAvBA,EAAwBxP,EAAWuM,GAA8C,IAAnCnX,EAAmCnI,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,MAAtBwiB,EAAsBxiB,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,MAC7EyiB,EAAqBD,EAAaC,mBAClCC,EAAgBF,EAAaE,kBAE/B3P,IAAc0P,MAIAC,EAActmB,SAA+C,IAAtCsmB,EAAc7hB,QAAQkS,KAElDA,EAAU4P,kBAAkBrD,EAAWnX,GAEnC4K,EAAU6P,UAClB7e,QAAQ,SAAC8e,GAAU,IAAAC,EACenB,GAA8BrC,EAAWuD,GADxDE,EAAAxG,GAAAuG,EAAA,GACnBhB,EADmBiB,EAAA,GACHhB,EADGgB,EAAA,GAExBR,EAAqBM,GAAQf,EAAgBC,GAAiB5Z,EAAQqa,OA0BjEQ,GAA2B,SAACC,EAAaC,EAAYC,EAAgBhb,GAC9E,IAAImV,SACAgC,SACI8D,EAA4CD,EAA5CC,qBAAsBC,EAAsBF,EAAtBE,kBACxBC,EAAsBH,EAAeI,SACrCC,EAA8Brb,EAAOqb,4BAMvCC,KAEJ,GAAoB,OAAhBR,IAA8C,IAAtB9a,EAAOub,WAC/BD,IACInG,kBAED,KAAAjJ,EACCsP,EAAkB5sB,OAAO4oB,OAAOyD,EAAqBQ,iBAC/B,IAAtBP,IACAM,EAAkBA,EAAgB3S,OAAO,SAAAra,GAAA,OAAKA,EAAEwR,OAAOob,WAAaD,KAGxE,IAAMO,EAAmBF,EAAgB3S,OAjB5B,SAAC8S,GAEd,OADe3b,EAAO4C,UAAa,kBAAM,IAC3B+Y,EAAO3b,KAeqCtF,IAAI,SAAAkhB,GAAA,OAAUA,EAAO5b,OAAOmV,WAEhFoF,KAEN,IAA0B,IAAtBW,EAA6B,CAC7B,IAAMW,EAAwBjtB,OAAO4oB,OAAOyD,EAAqBQ,gBAEjEI,EAAsBjgB,QAAQ,SAACkgB,GAC3B,IAAMC,EAAaD,EAAU9b,QACI,IAA7B+b,EAAWC,eAA2BD,EAAWH,SAAW5b,EAAO4b,QAC/DG,EAAWX,WAAaD,IAC5BZ,EAAc5hB,KAAKmjB,EAAUnH,QAC7BQ,EAAW0G,EAAsBhT,OAAO,SAAAra,GAAA,OAAKA,IAAMstB,IAAWphB,IAAI,SAAAlM,GAAA,OAAKA,EAAEwR,OAAOmV,YACvElhB,QAAUqnB,EAAU3iB,MACzBwc,WACA8G,OAAQH,EAAUnH,MAClBuH,KA/CU,SAACvH,GAC/B,IADoD,IAAduH,EAAcrkB,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,MAC7C8c,EAAMwH,SACTD,EAAKvjB,KAAKgc,GACVA,EAAQA,EAAMwH,QAElB,OAAOD,EA0CmBE,CAAmBN,EAAUnH,YAOnDQ,GAAWjJ,MAAG/Q,OAAHnB,MAAAkS,KAAA/Q,OAAA6Z,GAAiB0G,IAAkBZ,KAAcjS,OAAO,SAAAra,GAAA,OAAW,OAANA,IACxE8sB,EAAU3iB,MACNwc,WACAoF,wBAAmBA,EAAnBvF,GAAqChV,EAAOua,sBAIpD,IAAM8B,EAAYtB,EAAWpG,MAEvB2H,EAAa1tB,OAAOqR,QACtBsc,kBAAmBzB,EACnBK,uBACDnb,GAEGwc,EAAmBzB,EAAW0B,aAChCpB,GAA+BmB,IAC/BrF,EAAYJ,GAAuByF,EAAkBrH,GACjD8B,gBAAiBoE,IAErBjB,GAAqBoC,EAAkBrF,EAAWmF,IAGtDhB,EAAU1f,QAAQ,SAAC8gB,GACf,IAAMC,EAAmB5F,GAAuBsF,EAAWK,EAAIvH,UACzD+G,EAAOQ,EAAIR,KAEjB,GAAIA,EAAM,CACN,IAAMU,EA1HO,SAACzF,EAAW+E,GACjC,IAAK,IAAIhuB,EAAI,EAAGmN,EAAM6gB,EAAKjoB,OAAQ/F,EAAImN,EAAKnN,IAAK,CAC7C,IAAMymB,EAAQuH,EAAKhuB,GACnBipB,EAAYqC,GAA8BrC,EAAWxC,GAEzD,OAAOwC,EAqHuB0F,CAAiBF,EAAkBT,EAAKY,WAC9DJ,EAAIT,OAAOzB,kBAAkBoC,EAAeN,QAE5ClC,GAAqBiC,EAAWM,EAAkBL,GAC9C/B,cAAemC,EAAInC,cACnBD,mBAAoBe,GAA+BmB,iQCqKpDO,cA3jBX,SAAAC,iGAAwBC,CAAA3qB,KAAA0qB,GACpB,IAAIE,SAEJ5qB,KAAK6pB,QAAU,KACf7pB,KAAKyiB,eACLziB,KAAKkjB,uBACLljB,KAAKmoB,aANe,QAAAjf,EAAA3D,UAAA5D,OAARgmB,EAAQve,MAAAF,GAAAG,EAAA,EAAAA,EAAAH,EAAAG,IAARse,EAAQte,GAAA9D,UAAA8D,GAQE,IAAlBse,EAAOhmB,SAAkBipB,EAASjD,EAAO,cAAe+C,GAExD1qB,KAAK+V,eAAiB6U,EAAO7U,eAC7B/V,KAAKkR,YAAc0Z,EAAO1Z,YAC1BlR,KAAKinB,YAAc2D,EAAO3D,YAC1BjnB,KAAK6pB,QAAUe,EACf5qB,KAAKgnB,mBAAqBhnB,KAAK6pB,QAAQ7C,mBACvChnB,KAAK6qB,gBAAkB9f,IACvB/K,KAAKoa,wBAAwBoK,0BAE7B+B,GAAUuE,cAAC9qB,MAAX6I,OAAoB8e,IACpB3nB,KAAK6qB,gBAAkB7qB,KAAKgnB,mBAAmB7qB,KAC/C6D,KAAKoa,wBAAwBoK,wBAC7BxkB,KAAK+qB,uBACD5B,kBACA6B,qEA0BR,OAAOhrB,KAAK2Q,gBAAgBxH,OAAOf,IAAI,SAAAlM,GAAA,OAAKA,EAAEmQ,6CAY9C,OAAOrM,KAAK6qB,wDAIZ,OAAO7qB,KAAKirB,4DAMZ,OAFAjrB,KAAKirB,YAAcvJ,IAAc1hB,KAAKkR,YAAalR,KAAK+V,gBACnD/V,KAAKukB,uBAAwBvkB,KAAK6qB,iBAChC7qB,oDAIP,OAAOA,KAAKgnB,gDAiCVkE,EAAU5a,GACZ,OAAOH,EAAanQ,KAAMkrB,EAAU5a,uCAuB3B4a,GACT,OAAO/a,EAAanQ,KAAMkrB,EAAU5Q,GAAkBta,KAAMkrB,IAAW,iCAqBpEC,GACH,OAAO1Q,GAAMza,KAAMmrB,sCAoBXC,GACR,OAAO1V,EAAW1V,KAAMorB,kCAkDpBhI,EAAU1V,GACd,IAAM2d,GACFtuB,KAAM8B,EAAcC,OACpB6mB,WAAW,GAITG,GAAgBH,WAFtBjY,EAASpR,OAAOqR,UAAW0d,EAAW3d,IAEEiY,WACpC2F,SAEA5d,EAAO3Q,OAAS8B,EAAcG,IAa9BssB,GAZiB1F,GACb5lB,KACAojB,GACErmB,KAAM8B,EAAcC,QACtBgnB,GAEaF,GACb5lB,KACAojB,GACErmB,KAAM8B,EAAcE,SACtB+mB,IAIJwF,EAAM1F,GACF5lB,KACAojB,EACA1V,EACAoY,GAIR,OAAOwF,oCAsBP,OAAQtrB,KAAKkR,YAAYvP,SAAW3B,KAAK+V,eAAepU,uCAUnC,IAAlBgkB,IAAkBpgB,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,KAAAA,UAAA,GACf8e,EAAW,IAAIrkB,KAAKurB,YAAYvrB,MAMtC,OALI2lB,EACAtB,EAASmH,UAAUxrB,MAEnBqkB,EAASmH,UAAU,MAEhBnH,kCA8CF4B,EAAWvY,GAChB,IAAM2d,GACFtuB,KAAM8B,EAAcC,OACpB6mB,WAAW,GAEfjY,EAASpR,OAAOqR,UAAW0d,EAAW3d,GACtC,IAAM+d,EAAczrB,KAAKglB,kBACnBkB,EAAY5pB,OAAO2J,KAAKwlB,GACtB1uB,EAAS2Q,EAAT3Q,KAEJ2uB,EAAsBzF,EAAUlS,OAAO,SAACC,EAAK/H,GAM7C,MAL+B,WAA3BA,EAAMsf,YAAYpvB,KAClB6X,EAAI3N,KAAJqB,MAAAsM,wHAAA2X,CAAYzF,EAAU3P,OAAO,SAAAxB,GAAA,OAA0C,IAA7BA,EAAU6W,OAAO3f,OACpDA,KAASwf,GAChBzX,EAAI3N,KAAK4F,GAEN+H,OAGX0X,EAAsBtiB,MAAMI,KAAK,IAAIgS,IAAIkQ,IAAsBtjB,IAAI,SAAA6D,GAAA,OAASA,EAAMqR,SAClF,IAAIhF,SAEAvb,IAAS8B,EAAcG,IASvBsZ,GARsB0N,GAAiBhmB,KAAM0rB,GACzC3uB,KAAM8B,EAAcC,OACpB6mB,UAAWjY,EAAOiY,WACnBO,GACkBF,GAAiBhmB,KAAM0rB,GACxC3uB,KAAM8B,EAAcE,QACpB4mB,UAAWjY,EAAOiY,WACnBO,IAIH5N,EADsB0N,GAAiBhmB,KAAM0rB,EAAqBhe,EAAQwY,GAI9E,OAAO5N,4CAIP,OAAOtY,KAAK6rB,6DAWZ,OAPA7rB,KAAK6rB,aAAe7rB,KAAKirB,YAAY9hB,OAAO4K,OAAO,SAACC,EAAK8X,EAAUlwB,GAK/D,OAJAoY,EAAI8X,EAAS3vB,SACT+F,MAAOtG,EACPwpB,KAAOjpB,KAAM2vB,EAAS3vB,OAAQmQ,KAAMwf,EAASxf,OAAQ2O,QAAS6Q,EAAS7Q,YAEpEjH,OAEJhU,uCAWPA,KAAK6pB,SAAW7pB,KAAK6pB,QAAQkC,YAAY/rB,MACzCA,KAAK6pB,QAAU,KACf7pB,KAAKmoB,UAAU7e,QAAQ,SAAC8e,GACpBA,EAAMyB,QAAU,OAEpB7pB,KAAKmoB,iDA6BIC,GACT,IAAInU,EAAMjU,KAAKmoB,UAAU6D,UAAU,SAAAC,GAAA,OAAWA,IAAY7D,KACjD,IAATnU,GAAajU,KAAKmoB,UAAU1gB,OAAOwM,EAAK,qCAQjCiY,GACPlsB,KAAK6pB,SAAW7pB,KAAK6pB,QAAQkC,YAAY/rB,MACzCA,KAAK6pB,QAAUqC,EACfA,GAAUA,EAAO/D,UAAU9hB,KAAKrG,0CA4BhC,OAAOA,KAAK6pB,8CA6BZ,OAAO7pB,KAAKmoB,mDA4BZ,OAAOnoB,KAAKyiB,6DA4BZ,OAAOziB,KAAKkjB,2rBCkFLlR,eA3lBX,SAAApU,IAAsB,IAAA+a,+FAAAwT,CAAAnsB,KAAApC,GAAA,QAAAsL,EAAA3D,UAAA5D,OAANwF,EAAMiC,MAAAF,GAAAG,EAAA,EAAAA,EAAAH,EAAAG,IAANlC,EAAMkC,GAAA9D,UAAA8D,GAAA,IAAAwO,mKAAAuU,CAAApsB,MAAA2Y,EAAA/a,EAAAme,WAAAzf,OAAA0f,eAAApe,IAAA7B,KAAA2L,MAAAiR,GAAA3Y,MAAA6I,OACT1B,KADS,OAGlB0Q,EAAKwU,kBACLxU,EAAKyU,mBAJazU,qUArCF4S,wCAwGX3jB,GAQLA,EAAUxK,OAAOqR,WANb4e,MAAO,MACPlqB,UAAW,KACXmqB,SAAS,EACTC,cAAc,EACdzW,SAEoClP,GACxC,IAAMqC,EAASnJ,KAAKukB,uBAAuBpb,OAErCujB,EAAgBxY,EAAYnY,KAC9BiE,KACAA,KAAKukB,uBAAuBpb,OAC5BnJ,KAAKkR,YACLpK,EAAQ2lB,aAAetjB,EAAOf,IAAI,SAAAlM,GAAA,OAAKA,EAAEC,SAAQ8G,OAASjD,KAAK+V,eAC/DjP,EAAQkP,MAEJvB,WAA8B,WAAlB3N,EAAQylB,MACpB/X,SAAU1N,EAAQ0lB,UAI1B,IAAK1lB,EAAQzE,UACT,OAAOqqB,EAxBG,IA2BNrqB,EAAcyE,EAAdzE,UACAmJ,EAAuBkhB,EAAvBlhB,KAAMa,EAAiBqgB,EAAjBrgB,OAAQkI,EAASmY,EAATnY,KAChBoY,EAAatgB,EAAOjE,IAAK,SAAA/E,GAAA,OAAKA,EAAElH,OAEhCywB,EADgBtwB,OAAO2J,KAAK5D,GACA0R,OAAO,SAACC,EAAKxF,GAC3C,IAAMyF,EAAM0Y,EAAWvmB,QAAQoI,GAI/B,OAHa,IAATyF,GACAD,EAAI3N,MAAM4N,EAAK5R,EAAUmM,KAEtBwF,OAiCX,MA9BsB,WAAlBlN,EAAQylB,MACRK,EAAYtjB,QAAQ,SAACujB,GACjB,IAAMC,EAAOD,EAAK,GACZE,EAAQF,EAAK,GAEnBrhB,EAAKshB,GAAMxjB,QAAQ,SAACgK,EAAO0Z,GACvBxhB,EAAKshB,GAAME,GAAYD,EAAMhxB,UACzBmE,EACAoT,EACAiB,EAAKyY,GACL3gB,EAAOygB,QAKnBthB,EAAKlC,QAAQ,SAACgK,EAAO0Z,GACjBJ,EAAYtjB,QAAQ,SAACujB,GACjB,IAAMC,EAAOD,EAAK,GACZE,EAAQF,EAAK,GAEnBvZ,EAAMwZ,GAAQC,EAAMhxB,UAChBmE,EACAoT,EAAMwZ,GACNvY,EAAKyY,GACL3gB,EAAOygB,QAMhBJ,kCA2BFO,GAAwD,IAA7C1U,EAA6ChT,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,MAA9BmI,EAA8BnI,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,IAAnBogB,WAAW,GAC/CiC,KAAmBqF,EAAUhqB,OAC/B0kB,GAAU3nB,KAAMitB,EAAW1U,GACzBoB,EAAetB,GAAA6U,aAAWvF,GAgBhC,OAdAvF,GACIzI,EACA5K,EAAeG,SACb+d,YAAWrF,gBAAe3O,eAAgBb,GAAaa,kBACzDV,GAEJuK,GAA0B9iB,KAAM2Z,GAE5BjM,EAAOiY,UACPhM,EAAa6R,UAAUxrB,MAEvB2Z,EAAa6R,UAAU,MAGpB7R,+BAsDLtF,GACF,IAAM8Y,EAAUntB,KAAK8kB,SACjByH,MAAO,MACPvW,KAAM3B,IAGJ+Y,GADSD,EAAQ9gB,OAAOjE,IAAI,SAAA6D,GAAA,OAASA,EAAM9P,QACnB0M,OAAOskB,EAAQ3hB,MAEvC6hB,EAAW,IAAIrtB,KAAKurB,YAAY6B,EAAcD,EAAQ9gB,QAAUsS,WAAY,WAElF,OADA0O,EAASf,gBAAkBjY,EACpBgZ,oCAwBA/gB,EAAMxF,GACbwF,EAAOA,GAAQtM,KAAKinB,YACpBngB,EAAUxK,OAAOqR,WAAaqT,eAAgB,KAAOla,GAErD,IAAMqC,EAASnJ,KAAK2Q,gBAAgBxH,OAC9BmkB,EAAUnkB,EAAOf,IAAI,SAAAiN,GAAA,OAAKA,EAAEyR,kBAC5ByG,EAAYD,EAAQ,GAAG3rB,OACzB6rB,SACAC,SACAC,SAEJ,GAAIphB,IAASxO,EAAWC,UAEpB,IADAyvB,KACKC,EAAS,EAAGA,EAASF,EAAWE,IAAU,CAC3C,IAAMtT,KACN,IAAKuT,EAAS,EAAGA,EAASvkB,EAAOxH,OAAQ+rB,IACrCvT,EAAIhR,EAAOukB,GAAQvxB,QAAUmxB,EAAQI,GAAQD,GAEjDD,EAAennB,KAAK8T,QAErB,GAAI7N,IAASxO,EAAWE,QAAS,CAEpC,IADAwvB,GAAkBrkB,EAAOf,IAAI,SAAAiN,GAAA,OAAKA,EAAElZ,SAAQ8G,KAAK6D,EAAQka,iBACpDyM,EAAS,EAAGA,EAASF,EAAWE,IAAU,CAC3C,IAAMtT,KACN,IAAKuT,EAAS,EAAGA,EAASvkB,EAAOxH,OAAQ+rB,IACrCvT,EAAI9T,KAAKinB,EAAQI,GAAQD,IAE7BD,EAAennB,KAAK8T,EAAIlX,KAAK6D,EAAQka,iBAEzCwM,EAAiBA,EAAevqB,KAAK,UAClC,IAAIqJ,IAASxO,EAAWG,QAU3B,MAAM,IAAI+S,MAAJ,aAAuB1E,EAAvB,qBARN,IADAkhB,GAAkBrkB,EAAOf,IAAI,SAAAiN,GAAA,OAAKA,EAAElZ,UAC/BsxB,EAAS,EAAGA,EAASF,EAAWE,IAAU,CAC3C,IAAMtT,KACN,IAAKuT,EAAS,EAAGA,EAASvkB,EAAOxH,OAAQ+rB,IACrCvT,EAAI9T,KAAKinB,EAAQI,GAAQD,IAE7BD,EAAennB,KAAK8T,IAM5B,OAAOqT,mCAGDvhB,GACN,IAAM8I,EAAY9I,EAAM9P,OACxB6D,KAAK+V,gBAAL,IAA2BhB,EAC3B,IAAM4M,EAAoB3hB,KAAKgnB,mBAE/B,GAAKrF,EAAkB5V,YAAYE,EAAM9P,QAElC,CACH,IAAMoN,EAAaoY,EAAkBxY,OAAO6iB,UAAU,SAAA2B,GAAA,OAAaA,EAAUxxB,SAAW4Y,IACxFxL,GAAc,IAAMoY,EAAkBxY,OAAOI,GAAc0C,QAH3D0V,EAAkBxY,OAAO9C,KAAK4F,GAYlC,OALA0V,EAAkB3V,iBAAmB,KACrC2V,EAAkBlV,iBAAmB,KACrCkV,EAAkBvV,eAAiB,KAEnCpM,KAAKoa,wBAAwBoK,wBACtBxkB,+CAuCQqM,EAAQuhB,EAAYlgB,GAAQ,IAAAuK,EAAAjY,KAC3CqM,EAASga,GAAmBha,GAC5BqB,EAASpR,OAAOqR,WAAagY,WAAW,EAAMkI,YAAY,GAASngB,GAEnE,IAAMqX,EAAe/kB,KAAKglB,kBACpB8I,EAAUF,EAAWxY,MAAM,EAAGwY,EAAWjsB,OAAS,GAClDosB,EAAaH,EAAWA,EAAWjsB,OAAS,GAElD,GAAIojB,EAAa1Y,EAAOlQ,QAAUuR,EAAOmgB,WACrC,MAAM,IAAI7c,MAAS3E,EAAOlQ,KAApB,sCAGV,IAAM6xB,EAAkBF,EAAQ1lB,IAAI,SAAC6D,GACjC,IAAMgiB,EAAYlJ,EAAa9Y,GAC/B,IAAKgiB,EAED,MAAM,IAAIjd,MAAS/E,EAAb,gCAEV,OAAOgiB,EAAU/rB,QAGfoiB,EAAQtkB,KAAKskB,MAAM5W,EAAOiY,WAE1BuI,EAAK5J,EAAM3T,gBAAgBxH,OAC3BglB,EAAiBH,EAAgB5lB,IAAI,SAAA6L,GAAA,OAAOia,EAAGja,KAEjDgG,KACAC,EAAgB,kBAAMjC,EAAKrG,gBAEzBwc,KACNphB,EAAmBsX,EAAMpT,YAAa,SAACtV,GACnC,IAAMyyB,EAAaF,EAAe/lB,IAAI,SAAA6D,GAAA,OAASA,EAAMuF,aAAahG,KAAK5P,KACvEwyB,EAAexyB,GAAKmyB,qIAAAO,CAAcD,GAAdxlB,QAA0BjN,EAAGse,EAAeD,OAhCzB,IAAAsU,EAkC3BnQ,IAAcgQ,IAAkB/hB,IAAUA,EAAOlQ,OAA1D8P,EAlCoCuiB,GAAAD,EAAA,MAwC3C,OALAjK,EAAMmK,SAASxiB,GAEfmW,GAAkBkC,EAAOvV,EAAeK,SAAW1B,OAAQrB,EAAQlD,OAAQ2kB,GAAWC,GACtFjL,GAA0B9iB,KAAMskB,GAEzBA,oCAWAkE,GAA2D,IAA9C9a,EAA8CnI,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,MAAjCmpB,EAAiCnpB,UAAA,GAAjBykB,EAAiBzkB,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,MAC5DopB,EAAkBjhB,EAAOihB,gBACzB9F,EAAsBnb,EAAOob,SAC7B8F,EAAUlhB,EAAOkhB,QACjB7E,EFjKkB,SAAC1H,GAC7B,KAAOA,EAAMwH,SACTxH,EAAQA,EAAMwH,QAElB,OAAOxH,EE6JewM,CAAiB7uB,MAC7B2oB,EAAuBoB,EAAUgB,sBAEjCtC,GACF0B,aF5KuB,SAAC9H,GAChC,KAAOA,EAAMwH,SAAWxH,EAAMI,YAAYqM,KAAK,SAAA5yB,GAAA,OAAKA,EAAEymB,KAAO5T,EAAeG,WACxEmT,EAAQA,EAAMwH,QAElB,OAAOxH,EEsKsB0M,CAAoB/uB,MAGzCqiB,MAAO0H,GAgBX,OAbA2E,GFlD0B,SAAC/F,GAA6C,IAAvBjb,EAAuBnI,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,MAAV8c,EAAU9c,UAAA,GACxEypB,SACEL,EAAkBjhB,EAAOihB,gBACzB9L,EAAWnV,EAAOmV,SAClB1lB,EAASuQ,EAAO4b,OAAhB,IAA0B5b,EAAOob,SAGnCkG,EADAL,EACkBhG,EAAqBQ,eAErBR,EAAqBqC,iBAG1B,OAAbnI,SACOmM,EAAgB7xB,GAEvB6xB,EAAgB7xB,IACZklB,QACA3U,UEiCcuhB,CAAmBtG,EAAsBjb,EAAQ1N,MACnEuoB,GAAyBC,EAAaC,GAAcE,uBAAsBG,SAAUD,GAChFvsB,OAAOqR,QACHihB,WACDlhB,IAEHihB,GF5E6B,SAAChG,EAAsBF,EAAYC,GACxE,IAAMsC,EAAmBrC,EAAqBqC,iBAE9C,IAAK,IAAM1B,KAAU0B,EAAkB,CACnC,IACMvB,EADYuB,EAAiB1B,GACN5b,OACvBmb,EAAsBH,EAAehb,OAAOob,SAC5CoG,GAAwBxG,EAAesB,WAAWkF,uBACpDxG,EAAesB,WAAWkF,sBAAsBzF,EAAYf,EAAehb,QAC/E,GAAI+b,EAAWX,WAAaD,GAAuBqG,EAAuB,CACtE,IAAMC,EAAgB1F,EAAW5G,SACjC0F,GAAyB4G,EAAe1G,GACpCE,uBACAC,mBAAmB,EACnBE,SAAUD,GACXY,KE8DH2F,CAA0BzG,EAAsBF,GAC5C/a,SACAsc,eAIDhqB,gCAUPqvB,EAAWniB,GACX,OAAQmiB,GACR,IrCnhBmB,cqCohBfrvB,KAAKqsB,eAAehmB,KAAK6G,GAG7B,OAAOlN,yCASEqvB,GACT,OAAQA,GACR,IrCliBmB,cqCmiBfrvB,KAAKqsB,kBAIT,OAAOrsB,+CAUQ6kB,EAAW+J,GAAS,IAAAnS,EAAAzc,KACfA,KAAKqsB,eACX/iB,QAAQ,SAAAoc,GAAA,OAAMA,EAAG3pB,KAAK0gB,EAAMoI,EAAW+J,iCA8CpDU,EAAkB5hB,GACnB,IAAMqX,EAAe/kB,KAAKglB,kBAE1B,IAAKD,EAAauK,GACd,MAAM,IAAIte,MAAJ,SAAmBse,EAAnB,kBAGV,IAAMC,EAAe7hB,EAAOvR,MAAWmzB,EAAlB,UAErB,GAAIvK,EAAawK,GACb,MAAM,IAAIve,MAAJ,SAAmBue,EAAnB,mBAGV,IAb2BC,EtCtjB5B,SAAgCC,EAAcxiB,EAAYS,GAAQ,IAC/Da,EAA4Cb,EAA5Ca,QAASmhB,EAAmChiB,EAAnCgiB,UAAWphB,EAAwBZ,EAAxBY,QAAShB,EAAeI,EAAfJ,MAAOC,EAAQG,EAARH,IAD2BoiB,EAEhDF,EAAahU,SAFmCmU,EAAAC,EAAAF,EAAA,GAE9DG,EAF8DF,EAAA,GAExDG,EAFwDH,EAAA,GAIhErhB,IACDjB,EAAmB,IAAVA,KAAiBA,GAASA,EAAQwiB,GAASA,EAAOxiB,EAC3DC,EAAe,IAARA,KAAeA,GAAOA,EAAMwiB,GAAUA,EAAO,EAAKxiB,EAErDmiB,IACAphB,EAAUtK,KAAKgsB,KAAKhsB,KAAKisB,IAAI1iB,EAAMD,GAASoiB,IAGhDnhB,EAAUF,EAAgBC,EAAShB,EAAOC,IAG1CgB,EAAQ,GAAKuhB,GACbvhB,EAAQ3G,QAAQkoB,GAEhBvhB,EAAQA,EAAQ5M,OAAS,IAAMouB,GAC/BxhB,EAAQlI,KAAK0pB,EAAO,GAIxB,IADA,IAAMrhB,KACG9S,EAAI,EAAGA,EAAI2S,EAAQ5M,OAAS,EAAG/F,IACpC8S,EAAarI,MACTiH,MAAOiB,EAAQ3S,GACf2R,IAAKgB,EAAQ3S,EAAI,KAIzB,IAAMs0B,KAYN,OAXAljB,EAAmBC,EAAY,SAACrR,GAC5B,IAAM0X,EAAQmc,EAAaje,aAAahG,KAAK5P,GAC7C,GAAI0X,aAAiBlF,EACjB8hB,EAAW7pB,KAAKiN,OADpB,CAKA,IAAM9R,EAAQiN,EAAgBC,EAAc4E,GAC5C4c,EAAW7pB,KAAQ7E,EAAM8L,MAAzB,IAAkC9L,EAAM+L,SAGnC2iB,aAAYtT,KAAMrO,GsC0hBM4hB,CADRnwB,KAAK2Q,gBAAgB5E,YAAYujB,GACWtvB,KAAKkR,YAAaxD,GAA3EwiB,EAdmBV,EAcnBU,WAAYtT,EAdO4S,EAcP5S,KAEdwT,EAAWhS,IAAc8R,KAEvB/zB,KAAMozB,EACNjjB,KAAM5N,EAAUE,UAChBqc,QAAS9c,EAAiBI,OAC1Bqe,UACC2S,IAAe,GAElBjL,EAAQtkB,KAAKskB,MAAM5W,EAAOiY,WAMhC,OALArB,EAAMmK,SAAS2B,GAEfhO,GAAkBkC,EAAOvV,EAAeM,KAAOigB,mBAAkB5hB,SAAQ6hB,gBAAgB,MACzFzM,GAA0B9iB,KAAMskB,GAEzBA,yCA8BP,OAAO,IAAI1mB,EAHEoC,KAAKqwB,UAAUvyB,EAAWC,WACxBiC,KAAKswB,kEAtjBW5iB,GAC/B,OAAOU,EAAkBP,iBAAiBH,oCAf1C,OAAO0K,YCzFAmY,GAAoDzZ,GAApDN,IAAKga,GAA+C1Z,GAA/CH,IAAK8Z,GAA0C3Z,GAA1CI,IAAKwZ,GAAqC5Z,GAArCM,IAAKuZ,GAAgC7Z,GAAhC8Z,MAAOC,GAAyB/Z,GAAzBga,KAAMC,GAAmBja,GAAnBka,MAAYC,GAAOna,GAAZoa,YCuBvDlf,GAAUmf,WACNC,QtC6LmB,mBAAAC,EAAA9rB,UAAA5D,OAAI2vB,EAAJloB,MAAAioB,GAAAE,EAAA,EAAAA,EAAAF,EAAAE,IAAID,EAAJC,GAAAhsB,UAAAgsB,GAAA,OACnB,SAACrb,GAAqC,IAAjCxI,EAAiCnI,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,IAAtBogB,WAAW,GACnB6L,EAAYtb,EACZub,SACEtK,KAyBN,OAvBAmK,EAAWhoB,QAAQ,SAACgZ,GAChBkP,EAAYlP,EAAUkP,GACtBrK,EAAY9gB,KAAZqB,MAAAyf,wHAAAuK,CAAoBF,EAAU/O,cACzBgP,IACDA,EAAaD,KAIjBC,GAAcA,IAAeD,GAC7BC,EAAWE,UAGfvP,GAAkBoP,EAAWziB,EAAeI,QAAS,KAAMgY,GAE3DqK,EAAUtO,uBACVJ,GAA0B5M,EAAIsb,GAE1B9jB,EAAOiY,UACP6L,EAAUhG,UAAUtV,GAEpBsb,EAAUhG,UAAU,MAGjBgG,IsCzNXI,ItC2He,mBAAAC,EAAAtsB,UAAA5D,OAAIwF,EAAJiC,MAAAyoB,GAAAC,EAAA,EAAAA,EAAAD,EAAAC,IAAI3qB,EAAJ2qB,GAAAvsB,UAAAusB,GAAA,OAAa,SAAA5b,GAAA,OAAMA,EAAG0b,IAAHlqB,MAAAwO,EAAU/O,KsC1H5Cse,OtC+BkB,mBAAAvc,EAAA3D,UAAA5D,OAAIwF,EAAJiC,MAAAF,GAAAG,EAAA,EAAAA,EAAAH,EAAAG,IAAIlC,EAAJkC,GAAA9D,UAAA8D,GAAA,OAAa,SAAA6M,GAAA,OAAMA,EAAGuP,OAAH/d,MAAAwO,EAAa/O,KsC9BlD4qB,QtC8DmB,mBAAAC,EAAAzsB,UAAA5D,OAAIwF,EAAJiC,MAAA4oB,GAAAC,EAAA,EAAAA,EAAAD,EAAAC,IAAI9qB,EAAJ8qB,GAAA1sB,UAAA0sB,GAAA,OAAa,SAAA/b,GAAA,OAAMA,EAAG6b,QAAHrqB,MAAAwO,EAAc/O,KsC7DpD+lB,QtCqJmB,mBAAAgF,EAAA3sB,UAAA5D,OAAIwF,EAAJiC,MAAA8oB,GAAAC,EAAA,EAAAA,EAAAD,EAAAC,IAAIhrB,EAAJgrB,GAAA5sB,UAAA4sB,GAAA,OAAa,SAAAjc,GAAA,OAAMA,EAAGgX,QAAHxlB,MAAAwO,EAAc/O,KsCpJpDirB,kBCxB6B,mBAAAlpB,EAAA3D,UAAA5D,OAAIwF,EAAJiC,MAAAF,GAAAG,EAAA,EAAAA,EAAAH,EAAAG,IAAIlC,EAAJkC,GAAA9D,UAAA8D,GAAA,OAAa,SAAA6M,GAAA,OAAMA,EAAGkc,kBAAH1qB,MAAAwO,EAAwB/O,KDyBxE6O,KChBgB,mBAAAgc,EAAAzsB,UAAA5D,OAAIwF,EAAJiC,MAAA4oB,GAAAC,EAAA,EAAAA,EAAAD,EAAAC,IAAI9qB,EAAJ8qB,GAAA1sB,UAAA0sB,GAAA,OAAa,SAAA/b,GAAA,OAAMA,EAAGF,KAAHtO,MAAAwO,EAAW/O,KDiB9CgJ,eACAkiB,WAAA3c,EACA4c,YEhCG,SAAsB3X,EAAYC,GACrC,OAAOzK,EAAawK,EAAYC,EAAYN,GAAkBK,EAAYC,IAAa,IFgCvFF,iBACAG,kBACA0X,c3BzBG,SAAwB5X,EAAYC,EAAYtK,GACnD,OAAOmK,GAAMC,GAAcC,EAAYC,EAAYtK,GAAWuK,GAAeF,EAAYC,EAAYtK,K2ByBrGkiB,MAAA/X,IAEJzI,GAAUygB,MAAQC,EAClBp2B,OAAOqR,OAAOqE,GAAW2gB,GAAS5jB,mBAClCiD,GAAUlS,kBAAoBA,EAC9BkS,GAAU4gB,WAAa90B,EACvBkU,GAAU6gB,cAAgBh0B,EAC1BmT,GAAUxE,kBAAoBY,EAC9B4D,GAAU8gB,QAAUC,GAAID,QAET,IAAAzY,GAAA2Y,EAAA","file":"datamodel.js","sourcesContent":["(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine(\"DataModel\", [], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"DataModel\"] = factory();\n\telse\n\t\troot[\"DataModel\"] = factory();\n})(window, function() {\nreturn "," \t// The module cache\n \tvar installedModules = {};\n\n \t// The require function\n \tfunction __webpack_require__(moduleId) {\n\n \t\t// Check if module is in cache\n \t\tif(installedModules[moduleId]) {\n \t\t\treturn installedModules[moduleId].exports;\n \t\t}\n \t\t// Create a new module (and put it into the cache)\n \t\tvar module = installedModules[moduleId] = {\n \t\t\ti: moduleId,\n \t\t\tl: false,\n \t\t\texports: {}\n \t\t};\n\n \t\t// Execute the module function\n \t\tmodules[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\n \t\t// Flag the module as loaded\n \t\tmodule.l = true;\n\n \t\t// Return the exports of the module\n \t\treturn module.exports;\n \t}\n\n\n \t// expose the modules object (__webpack_modules__)\n \t__webpack_require__.m = modules;\n\n \t// expose the module cache\n \t__webpack_require__.c = installedModules;\n\n \t// define getter function for harmony exports\n \t__webpack_require__.d = function(exports, name, getter) {\n \t\tif(!__webpack_require__.o(exports, name)) {\n \t\t\tObject.defineProperty(exports, name, { enumerable: true, get: getter });\n \t\t}\n \t};\n\n \t// define __esModule on exports\n \t__webpack_require__.r = function(exports) {\n \t\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n \t\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n \t\t}\n \t\tObject.defineProperty(exports, '__esModule', { value: true });\n \t};\n\n \t// create a fake namespace object\n \t// mode & 1: value is a module id, require it\n \t// mode & 2: merge all properties of value into the ns\n \t// mode & 4: return value when already ns object\n \t// mode & 8|1: behave like require\n \t__webpack_require__.t = function(value, mode) {\n \t\tif(mode & 1) value = __webpack_require__(value);\n \t\tif(mode & 8) return value;\n \t\tif((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;\n \t\tvar ns = Object.create(null);\n \t\t__webpack_require__.r(ns);\n \t\tObject.defineProperty(ns, 'default', { enumerable: true, value: value });\n \t\tif(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));\n \t\treturn ns;\n \t};\n\n \t// getDefaultExport function for compatibility with non-harmony modules\n \t__webpack_require__.n = function(module) {\n \t\tvar getter = module && module.__esModule ?\n \t\t\tfunction getDefault() { return module['default']; } :\n \t\t\tfunction getModuleExports() { return module; };\n \t\t__webpack_require__.d(getter, 'a', getter);\n \t\treturn getter;\n \t};\n\n \t// Object.prototype.hasOwnProperty.call\n \t__webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };\n\n \t// __webpack_public_path__\n \t__webpack_require__.p = \"\";\n\n\n \t// Load entry module and return exports\n \treturn __webpack_require__(__webpack_require__.s = 1);\n","const DataModel = require('./export');\n\nmodule.exports = DataModel.default ? DataModel.default : DataModel;\n","/**\n * DataFormat Enum defines the format of the input data.\n * Based on the format of the data the respective adapter is loaded.\n *\n * @readonly\n * @enum {string}\n */\nconst DataFormat = {\n FLAT_JSON: 'FlatJSON',\n DSV_STR: 'DSVStr',\n DSV_ARR: 'DSVArr',\n AUTO: 'Auto'\n};\n\nexport default DataFormat;\n","/**\n * DimensionSubtype enum defines the sub types of the Dimensional Field.\n *\n * @readonly\n * @enum {string}\n */\nconst DimensionSubtype = {\n CATEGORICAL: 'categorical',\n TEMPORAL: 'temporal',\n GEO: 'geo',\n BINNED: 'binned'\n};\n\nexport default DimensionSubtype;\n","/**\n * MeasureSubtype enum defines the sub types of the Measure Field.\n *\n * @readonly\n * @enum {string}\n */\nconst MeasureSubtype = {\n CONTINUOUS: 'continuous'\n};\n\nexport default MeasureSubtype;\n","/**\n * FieldType enum defines the high level field based on which visuals are controlled.\n * Measure in a high level is numeric field and Dimension in a high level is string field.\n *\n * @readonly\n * @enum {string}\n */\nconst FieldType = {\n MEASURE: 'measure',\n DIMENSION: 'dimension'\n};\n\nexport default FieldType;\n","/**\n * Filtering mode enum defines the filering modes of DataModel.\n *\n * @readonly\n * @enum {string}\n */\nconst FilteringMode = {\n NORMAL: 'normal',\n INVERSE: 'inverse',\n ALL: 'all'\n};\n\nexport default FilteringMode;\n","export const GROUP_BY_FUNCTIONS = {\n SUM: 'sum',\n AVG: 'avg',\n MIN: 'min',\n MAX: 'max',\n FIRST: 'first',\n LAST: 'last',\n COUNT: 'count',\n STD: 'std'\n};\n","/**\n * Creates a JS native date object from input\n *\n * @param {string | number | Date} date Input using which date object to be created\n * @return {Date} : JS native date object\n */\nfunction convertToNativeDate (date) {\n if (date instanceof Date) {\n return date;\n }\n\n return new Date(date);\n}\n/**\n * Apply padding before a number if its less than 1o. This is used when constant digit's number to be returned\n * between 0 - 99\n *\n * @param {number} n Input to be padded\n * @return {string} Padded number\n */\nfunction pad (n) {\n return (n < 10) ? (`0${n}`) : n;\n}\n/*\n * DateFormatter utility to convert any date format to any other date format\n * DateFormatter parse a date time stamp specified by a user abiding by rules which are defined\n * by user in terms of token. It creates JS native date object from the user specified format.\n * That native date can also be displayed\n * in any specified format.\n * This utility class only takes care of format conversion only\n */\n\n/*\n * Escapes all the special character that are used in regular expression.\n * Like\n * RegExp.escape('sgfd-$') // Output: sgfd\\-\\$\n *\n * @param text {String} : text which is to be escaped\n */\nRegExp.escape = function (text) {\n return text.replace(/[-[\\]{}()*+?.,\\\\^$|#\\s]/g, '\\\\$&');\n};\n\n/**\n * DateTimeFormatter class to convert any user format of date time stamp to any other format\n * of date time stamp.\n *\n * @param {string} format Format of the date given. For the above date,\n * 'year: %Y, month: %b, day: %d'.\n * @class\n */\n/* istanbul ignore next */ function DateTimeFormatter (format) {\n this.format = format;\n this.dtParams = undefined;\n this.nativeDate = undefined;\n}\n\n// The identifier of the tokens\nDateTimeFormatter.TOKEN_PREFIX = '%';\n\n// JS native Date constructor takes the date params (year, month, etc) in a certail sequence.\n// This defines the sequence of the date parameters in the constructor.\nDateTimeFormatter.DATETIME_PARAM_SEQUENCE = {\n YEAR: 0,\n MONTH: 1,\n DAY: 2,\n HOUR: 3,\n MINUTE: 4,\n SECOND: 5,\n MILLISECOND: 6\n};\n\n/*\n * This is a default number parsing utility. It tries to parse a number in integer, if parsing is unsuccessful, it\n * gives back a default value.\n *\n * @param: defVal {Number} : Default no if the parsing to integer is not successful\n * @return {Function} : An closure function which is to be called by passing an the value which needs to be parsed.\n */\nDateTimeFormatter.defaultNumberParser = function (defVal) {\n return function (val) {\n let parsedVal;\n if (isFinite(parsedVal = parseInt(val, 10))) {\n return parsedVal;\n }\n\n return defVal;\n };\n};\n\n/*\n * This is a default number range utility. It tries to find an element in the range. If not found it returns a\n * default no as an index.\n *\n * @param: range {Array} : The list which is to be serached\n * @param: defVal {Number} : Default no if the serach and find does not return anything\n * @return {Function} : An closure function which is to be called by passing an the value which needs to be found\n */\nDateTimeFormatter.defaultRangeParser = function (range, defVal) {\n return (val) => {\n let i;\n let l;\n\n if (!val) { return defVal; }\n\n const nVal = val.toLowerCase();\n\n for (i = 0, l = range.length; i < l; i++) {\n if (range[i].toLowerCase() === nVal) {\n return i;\n }\n }\n\n if (i === undefined) {\n return defVal;\n }\n return null;\n };\n};\n\n/*\n * Defines the tokens which are supporter by the dateformatter. Using this definitation a value gets extracted from\n * the user specifed date string. This also formats the value for display purpose from native JS date.\n * The definition of each token contains the following named properties\n * {\n * %token_name% : {\n * name: name of the token, this is used in reverse lookup,\n * extract: a function that returns the regular expression to extract that piece of information. All the\n * regex should be gouped by using ()\n * parser: a function which receives value extracted by the above regex and parse it to get the date params\n * formatter: a formatter function that takes milliseconds or JS Date object and format the param\n * represented by the token only.\n * }\n * }\n *\n * @return {Object} : Definition of the all the supported tokens.\n */\nDateTimeFormatter.getTokenDefinitions = function () {\n const daysDef = {\n short: [\n 'Sun',\n 'Mon',\n 'Tue',\n 'Wed',\n 'Thu',\n 'Fri',\n 'Sat'\n ],\n long: [\n 'Sunday',\n 'Monday',\n 'Tuesday',\n 'Wednesday',\n 'Thursday',\n 'Friday',\n 'Saturday'\n ]\n };\n const monthsDef = {\n short: [\n 'Jan',\n 'Feb',\n 'Mar',\n 'Apr',\n 'May',\n 'Jun',\n 'Jul',\n 'Aug',\n 'Sep',\n 'Oct',\n 'Nov',\n 'Dec'\n ],\n long: [\n 'January',\n 'February',\n 'March',\n 'April',\n 'May',\n 'June',\n 'July',\n 'August',\n 'September',\n 'October',\n 'November',\n 'December'\n ]\n };\n\n const definitions = {\n H: {\n // 24 hours format\n name: 'H',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n\n return d.getHours().toString();\n }\n },\n l: {\n // 12 hours format\n name: 'l',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const hours = d.getHours() % 12;\n\n return (hours === 0 ? 12 : hours).toString();\n }\n },\n p: {\n // AM or PM\n name: 'p',\n index: 3,\n extract () { return '(AM|PM)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'AM' : 'PM');\n }\n },\n P: {\n // am or pm\n name: 'P',\n index: 3,\n extract () { return '(am|pm)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'am' : 'pm');\n }\n },\n M: {\n // Two digit minutes 00 - 59\n name: 'M',\n index: 4,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const mins = d.getMinutes();\n\n return pad(mins);\n }\n },\n S: {\n // Two digit seconds 00 - 59\n name: 'S',\n index: 5,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const seconds = d.getSeconds();\n\n return pad(seconds);\n }\n },\n K: {\n // Milliseconds\n name: 'K',\n index: 6,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const ms = d.getMilliseconds();\n\n return ms.toString();\n }\n },\n a: {\n // Short name of day, like Mon\n name: 'a',\n index: 2,\n extract () { return `(${daysDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.short[day]).toString();\n }\n },\n A: {\n // Long name of day, like Monday\n name: 'A',\n index: 2,\n extract () { return `(${daysDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.long[day]).toString();\n }\n },\n e: {\n // 8 of March, 11 of November\n name: 'e',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return day.toString();\n }\n },\n d: {\n // 08 of March, 11 of November\n name: 'd',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return pad(day);\n }\n },\n b: {\n // Short month, like Jan\n name: 'b',\n index: 1,\n extract () { return `(${monthsDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.short[month]).toString();\n }\n },\n B: {\n // Long month, like January\n name: 'B',\n index: 1,\n extract () { return `(${monthsDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.long[month]).toString();\n }\n },\n m: {\n // Two digit month of year like 01 for January\n name: 'm',\n index: 1,\n extract () { return '(\\\\d+)'; },\n parser (val) { return DateTimeFormatter.defaultNumberParser()(val) - 1; },\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return pad(month + 1);\n }\n },\n y: {\n // Short year like 90 for 1990\n name: 'y',\n index: 0,\n extract () { return '(\\\\d{2})'; },\n parser (val) {\n let result;\n if (val) {\n const l = val.length;\n val = val.substring(l - 2, l);\n }\n let parsedVal = DateTimeFormatter.defaultNumberParser()(val);\n let presentDate = new Date();\n let presentYear = Math.trunc((presentDate.getFullYear()) / 100);\n\n result = `${presentYear}${parsedVal}`;\n\n if (convertToNativeDate(result).getFullYear() > presentDate.getFullYear()) {\n result = `${presentYear - 1}${parsedVal}`;\n }\n return convertToNativeDate(result).getFullYear();\n },\n formatter (val) {\n const d = convertToNativeDate(val);\n let year = d.getFullYear().toString();\n let l;\n\n if (year) {\n l = year.length;\n year = year.substring(l - 2, l);\n }\n\n return year;\n }\n },\n Y: {\n // Long year like 1990\n name: 'Y',\n index: 0,\n extract () { return '(\\\\d{4})'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const year = d.getFullYear().toString();\n\n return year;\n }\n }\n };\n\n return definitions;\n};\n\n/*\n * The tokens which works internally is not user friendly in terms of memorizing the names. This gives a formal\n * definition to the informal notations.\n *\n * @return {Object} : Formal definition of the tokens\n */\nDateTimeFormatter.getTokenFormalNames = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n\n return {\n HOUR: definitions.H,\n HOUR_12: definitions.l,\n AMPM_UPPERCASE: definitions.p,\n AMPM_LOWERCASE: definitions.P,\n MINUTE: definitions.M,\n SECOND: definitions.S,\n SHORT_DAY: definitions.a,\n LONG_DAY: definitions.A,\n DAY_OF_MONTH: definitions.e,\n DAY_OF_MONTH_CONSTANT_WIDTH: definitions.d,\n SHORT_MONTH: definitions.b,\n LONG_MONTH: definitions.B,\n MONTH_OF_YEAR: definitions.m,\n SHORT_YEAR: definitions.y,\n LONG_YEAR: definitions.Y\n };\n};\n\n/*\n * This defines the rules and declares dependencies that resolves a date parameter (year, month etc) from\n * the date time parameter array.\n *\n * @return {Object} : An object that contains dependencies and a resolver function. The dependencies values are fed\n * to the resolver function in that particular sequence only.\n */\nDateTimeFormatter.tokenResolver = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const defaultResolver = (...args) => { // eslint-disable-line require-jsdoc\n let i = 0;\n let arg;\n let targetParam;\n const l = args.length;\n\n for (; i < l; i++) {\n arg = args[i];\n if (args[i]) {\n targetParam = arg;\n }\n }\n\n if (!targetParam) { return null; }\n\n return targetParam[0].parser(targetParam[1]);\n };\n\n return {\n YEAR: [definitions.y, definitions.Y,\n defaultResolver\n ],\n MONTH: [definitions.b, definitions.B, definitions.m,\n defaultResolver\n ],\n DAY: [definitions.a, definitions.A, definitions.e, definitions.d,\n defaultResolver\n ],\n HOUR: [definitions.H, definitions.l, definitions.p, definitions.P,\n function (hourFormat24, hourFormat12, ampmLower, ampmUpper) {\n let targetParam;\n let amOrpm;\n let isPM;\n let val;\n\n if (hourFormat12 && (amOrpm = (ampmLower || ampmUpper))) {\n if (amOrpm[0].parser(amOrpm[1]) === 'pm') {\n isPM = true;\n }\n\n targetParam = hourFormat12;\n } else if (hourFormat12) {\n targetParam = hourFormat12;\n } else {\n targetParam = hourFormat24;\n }\n\n if (!targetParam) { return null; }\n\n val = targetParam[0].parser(targetParam[1]);\n if (isPM) {\n val += 12;\n }\n return val;\n }\n ],\n MINUTE: [definitions.M,\n defaultResolver\n ],\n SECOND: [definitions.S,\n defaultResolver\n ]\n };\n};\n\n/*\n * Finds token from the format rule specified by a user.\n * @param format {String} : The format of the input date specified by the user\n * @return {Array} : An array of objects which contains the available token and their occurence index in the format\n */\nDateTimeFormatter.findTokens = function (format) {\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenLiterals = Object.keys(definitions);\n const occurrence = [];\n let i;\n let forwardChar;\n\n while ((i = format.indexOf(tokenPrefix, i + 1)) >= 0) {\n forwardChar = format[i + 1];\n if (tokenLiterals.indexOf(forwardChar) === -1) { continue; }\n\n occurrence.push({\n index: i,\n token: forwardChar\n });\n }\n\n return occurrence;\n};\n\n/*\n * Format any JS date to a specified date given by user.\n *\n * @param date {Number | Date} : The date object which is to be formatted\n * @param format {String} : The format using which the date will be formatted for display\n */\nDateTimeFormatter.formatAs = function (date, format) {\n const nDate = convertToNativeDate(date);\n const occurrence = DateTimeFormatter.findTokens(format);\n const definitions = DateTimeFormatter.getTokenDefinitions();\n let formattedStr = String(format);\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n let token;\n let formattedVal;\n let i;\n let l;\n\n for (i = 0, l = occurrence.length; i < l; i++) {\n token = occurrence[i].token;\n formattedVal = definitions[token].formatter(nDate);\n formattedStr = formattedStr.replace(new RegExp(tokenPrefix + token, 'g'), formattedVal);\n }\n\n return formattedStr;\n};\n\n/*\n * Parses the user specified date string to extract the date time params.\n *\n * @return {Array} : Value of date time params in an array [year, month, day, hour, minutes, seconds, milli]\n */\nDateTimeFormatter.prototype.parse = function (dateTimeStamp, options) {\n const tokenResolver = DateTimeFormatter.tokenResolver();\n const dtParams = this.extractTokenValue(dateTimeStamp);\n const dtParamSeq = DateTimeFormatter.DATETIME_PARAM_SEQUENCE;\n const noBreak = options && options.noBreak;\n const dtParamArr = [];\n const args = [];\n let resolverKey;\n let resolverParams;\n let resolverFn;\n let val;\n let i;\n let param;\n let resolvedVal;\n let l;\n let result = [];\n\n for (resolverKey in tokenResolver) {\n if (!{}.hasOwnProperty.call(tokenResolver, resolverKey)) { continue; }\n\n args.length = 0;\n resolverParams = tokenResolver[resolverKey];\n resolverFn = resolverParams.splice(resolverParams.length - 1, 1)[0];\n\n for (i = 0, l = resolverParams.length; i < l; i++) {\n param = resolverParams[i];\n val = dtParams[param.name];\n\n if (val === undefined) {\n args.push(null);\n } else {\n args.push([param, val]);\n }\n }\n\n resolvedVal = resolverFn.apply(this, args);\n\n if ((resolvedVal === undefined || resolvedVal === null) && !noBreak) {\n break;\n }\n\n dtParamArr[dtParamSeq[resolverKey]] = resolvedVal;\n }\n\n if (dtParamArr.length && this.checkIfOnlyYear(dtParamArr.length))\n {\n result.unshift(dtParamArr[0], 0, 1); }\n else {\n result.unshift(...dtParamArr);\n }\n\n return result;\n};\n\n/*\n * Extract the value of the token from user specified date time string.\n *\n * @return {Object} : An key value pair which contains the tokens as key and value as pair\n */\nDateTimeFormatter.prototype.extractTokenValue = function (dateTimeStamp) {\n const format = this.format;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const occurrence = DateTimeFormatter.findTokens(format);\n const tokenObj = {};\n\n let lastOccurrenceIndex;\n let occObj;\n let occIndex;\n let targetText;\n let regexFormat;\n\n let l;\n let i;\n\n regexFormat = String(format);\n\n const tokenArr = occurrence.map(obj => obj.token);\n const occurrenceLength = occurrence.length;\n for (i = occurrenceLength - 1; i >= 0; i--) {\n occIndex = occurrence[i].index;\n\n if (occIndex + 1 === regexFormat.length - 1) {\n lastOccurrenceIndex = occIndex;\n continue;\n }\n\n if (lastOccurrenceIndex === undefined) {\n lastOccurrenceIndex = regexFormat.length;\n }\n\n targetText = regexFormat.substring(occIndex + 2, lastOccurrenceIndex);\n regexFormat = regexFormat.substring(0, occIndex + 2) +\n RegExp.escape(targetText) +\n regexFormat.substring(lastOccurrenceIndex, regexFormat.length);\n\n lastOccurrenceIndex = occIndex;\n }\n\n for (i = 0; i < occurrenceLength; i++) {\n occObj = occurrence[i];\n regexFormat = regexFormat.replace(tokenPrefix + occObj.token, definitions[occObj.token].extract());\n }\n\n const extractValues = dateTimeStamp.match(new RegExp(regexFormat)) || [];\n extractValues.shift();\n\n for (i = 0, l = tokenArr.length; i < l; i++) {\n tokenObj[tokenArr[i]] = extractValues[i];\n }\n return tokenObj;\n};\n\n/*\n * Give back the JS native date formed from user specified date string\n *\n * @return {Date} : Native JS Date\n */\nDateTimeFormatter.prototype.getNativeDate = function (dateTimeStamp) {\n let date = null;\n if (Number.isFinite(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n } else if (!this.format && Date.parse(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n }\n else {\n const dtParams = this.dtParams = this.parse(dateTimeStamp);\n if (dtParams.length) {\n this.nativeDate = new Date(...dtParams);\n date = this.nativeDate;\n }\n }\n return date;\n};\n\nDateTimeFormatter.prototype.checkIfOnlyYear = function(len) {\n return len === 1 && this.format.match(/y|Y/g).length;\n};\n\n/*\n * Represents JS native date to a user specified format.\n *\n * @param format {String} : The format according to which the date is to be represented\n * @return {String} : The formatted date string\n */\nDateTimeFormatter.prototype.formatAs = function (format, dateTimeStamp) {\n let nativeDate;\n\n if (dateTimeStamp) {\n nativeDate = this.nativeDate = this.getNativeDate(dateTimeStamp);\n } else if (!(nativeDate = this.nativeDate)) {\n nativeDate = this.getNativeDate(dateTimeStamp);\n }\n\n return DateTimeFormatter.formatAs(nativeDate, format);\n};\n\nexport { DateTimeFormatter as default };\n","/**\n * The utility function to calculate major column.\n *\n * @param {Object} store - The store object.\n * @return {Function} Returns the push function.\n */\nexport default (store) => {\n let i = 0;\n return (...fields) => {\n fields.forEach((val, fieldIndex) => {\n if (!(store[fieldIndex] instanceof Array)) {\n store[fieldIndex] = Array.from({ length: i });\n }\n store[fieldIndex].push(val);\n });\n i++;\n };\n};\n","/* eslint-disable */\nconst OBJECTSTRING = 'object';\nconst objectToStrFn = Object.prototype.toString;\nconst objectToStr = '[object Object]';\nconst arrayToStr = '[object Array]';\n\nfunction checkCyclicRef(obj, parentArr) {\n let i = parentArr.length;\n let bIndex = -1;\n\n while (i) {\n if (obj === parentArr[i]) {\n bIndex = i;\n return bIndex;\n }\n i -= 1;\n }\n\n return bIndex;\n}\n\nfunction merge(obj1, obj2, skipUndef, tgtArr, srcArr) {\n var item,\n srcVal,\n tgtVal,\n str,\n cRef;\n // check whether obj2 is an array\n // if array then iterate through it's index\n // **** MOOTOOLS precution\n\n if (!srcArr) {\n tgtArr = [obj1];\n srcArr = [obj2];\n }\n else {\n tgtArr.push(obj1);\n srcArr.push(obj2);\n }\n\n if (obj2 instanceof Array) {\n for (item = 0; item < obj2.length; item += 1) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (typeof tgtVal !== OBJECTSTRING) {\n if (!(skipUndef && tgtVal === undefined)) {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = tgtVal instanceof Array ? [] : {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n }\n }\n else {\n for (item in obj2) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (tgtVal !== null && typeof tgtVal === OBJECTSTRING) {\n // Fix for issue BUG: FWXT-602\n // IE < 9 Object.prototype.toString.call(null) gives\n // '[object Object]' instead of '[object Null]'\n // that's why null value becomes Object in IE < 9\n str = objectToStrFn.call(tgtVal);\n if (str === objectToStr) {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else if (str === arrayToStr) {\n if (srcVal === null || !(srcVal instanceof Array)) {\n srcVal = obj1[item] = [];\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (skipUndef && tgtVal === undefined) {\n continue;\n }\n obj1[item] = tgtVal;\n }\n }\n }\n return obj1;\n}\n\n\nfunction extend2 (obj1, obj2, skipUndef) {\n //if none of the arguments are object then return back\n if (typeof obj1 !== OBJECTSTRING && typeof obj2 !== OBJECTSTRING) {\n return null;\n }\n\n if (typeof obj2 !== OBJECTSTRING || obj2 === null) {\n return obj1;\n }\n\n if (typeof obj1 !== OBJECTSTRING) {\n obj1 = obj2 instanceof Array ? [] : {};\n }\n merge(obj1, obj2, skipUndef);\n return obj1;\n}\n\nexport { extend2 as default };\n","import { DataFormat } from '../enums';\n\n/**\n * Checks whether the value is an array.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an array otherwise returns false.\n */\nexport function isArray (val) {\n return Array.isArray(val);\n}\n\n/**\n * Checks whether the value is an object.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an object otherwise returns false.\n */\nexport function isObject (val) {\n return val === Object(val);\n}\n\n/**\n * Checks whether the value is a string value.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is a string value otherwise returns false.\n */\nexport function isString (val) {\n return typeof val === 'string';\n}\n\n/**\n * Checks whether the value is callable.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is callable otherwise returns false.\n */\nexport function isCallable (val) {\n return typeof val === 'function';\n}\n\n/**\n * Returns the unique values from the input array.\n *\n * @param {Array} data - The input array.\n * @return {Array} Returns a new array of unique values.\n */\nexport function uniqueValues (data) {\n return [...new Set(data)];\n}\n\nexport const getUniqueId = () => `id-${new Date().getTime()}${Math.round(Math.random() * 10000)}`;\n\n/**\n * Checks Whether two arrays have same content.\n *\n * @param {Array} arr1 - The first array.\n * @param {Array} arr2 - The 2nd array.\n * @return {boolean} Returns whether two array have same content.\n */\nexport function isArrEqual(arr1, arr2) {\n if (!isArray(arr1) || !isArray(arr2)) {\n return arr1 === arr2;\n }\n\n if (arr1.length !== arr2.length) {\n return false;\n }\n\n for (let i = 0; i < arr1.length; i++) {\n if (arr1[i] !== arr2[i]) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * It is the default number format function for the measure field type.\n *\n * @param {any} val - The input value.\n * @return {number} Returns a number value.\n */\nexport function formatNumber(val) {\n return val;\n}\n\n/**\n * Returns the detected data format.\n *\n * @param {any} data - The input data to be tested.\n * @return {string} Returns the data format name.\n */\nexport const detectDataFormat = (data) => {\n if (isString(data)) {\n return DataFormat.DSV_STR;\n } else if (isArray(data) && isArray(data[0])) {\n return DataFormat.DSV_ARR;\n } else if (isArray(data) && (data.length === 0 || isObject(data[0]))) {\n return DataFormat.FLAT_JSON;\n }\n return null;\n};\n","import { FieldType } from './enums';\nimport { getUniqueId } from './utils';\n\nconst fieldStore = {\n data: {},\n\n createNamespace (fieldArr, name) {\n const dataId = name || getUniqueId();\n\n this.data[dataId] = {\n name: dataId,\n fields: fieldArr,\n\n fieldsObj () {\n let fieldsObj = this._cachedFieldsObj;\n\n if (!fieldsObj) {\n fieldsObj = this._cachedFieldsObj = {};\n this.fields.forEach((field) => {\n fieldsObj[field.name()] = field;\n });\n }\n return fieldsObj;\n },\n getMeasure () {\n let measureFields = this._cachedMeasure;\n\n if (!measureFields) {\n measureFields = this._cachedMeasure = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.MEASURE) {\n measureFields[field.name()] = field;\n }\n });\n }\n return measureFields;\n },\n getDimension () {\n let dimensionFields = this._cachedDimension;\n\n if (!this._cachedDimension) {\n dimensionFields = this._cachedDimension = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.DIMENSION) {\n dimensionFields[field.name()] = field;\n }\n });\n }\n return dimensionFields;\n },\n };\n return this.data[dataId];\n },\n};\n\nexport default fieldStore;\n","/**\n * The wrapper class on top of the primitive value of a field.\n *\n * @todo Need to have support for StringValue, NumberValue, DateTimeValue\n * and GeoValue. These types should expose predicate API mostly.\n */\nclass Value {\n\n /**\n * Creates new Value instance.\n *\n * @param {*} val - the primitive value from the field cell.\n * @param {string | Field} field - The field from which the value belongs.\n */\n constructor (val, field) {\n Object.defineProperty(this, '_value', {\n enumerable: false,\n configurable: false,\n writable: false,\n value: val\n });\n\n this.field = field;\n }\n\n /**\n * Returns the field value.\n *\n * @return {*} Returns the current value.\n */\n get value () {\n return this._value;\n }\n\n /**\n * Converts to human readable string.\n *\n * @override\n * @return {string} Returns a human readable string of the field value.\n *\n */\n toString () {\n return String(this.value);\n }\n\n /**\n * Returns the value of the field.\n *\n * @override\n * @return {*} Returns the field value.\n */\n valueOf () {\n return this.value;\n }\n}\n\nexport default Value;\n","/**\n * Iterates through the diffSet array and call the callback with the current\n * index.\n *\n * @param {string} rowDiffset - The row diffset string e.g. '0-4,6,10-13'.\n * @param {Function} callback - The callback function to be called with every index.\n */\nexport function rowDiffsetIterator (rowDiffset, callback) {\n if (rowDiffset.length > 0) {\n const rowDiffArr = rowDiffset.split(',');\n rowDiffArr.forEach((diffStr) => {\n const diffStsArr = diffStr.split('-');\n const start = +(diffStsArr[0]);\n const end = +(diffStsArr[1] || diffStsArr[0]);\n if (end >= start) {\n for (let i = start; i <= end; i += 1) {\n callback(i);\n }\n }\n });\n }\n}\n","/**\n * A parser to parser null, undefined, invalid and NIL values.\n *\n * @public\n * @class\n */\nclass InvalidAwareTypes {\n /**\n * Static method which gets/sets the invalid value registry.\n *\n * @public\n * @param {Object} config - The custom configuration supplied by user.\n * @return {Object} Returns the invalid values registry.\n */\n static invalidAwareVals (config) {\n if (!config) {\n return InvalidAwareTypes._invalidAwareValsMap;\n }\n return Object.assign(InvalidAwareTypes._invalidAwareValsMap, config);\n }\n\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} value - The value of the invalid data type.\n */\n constructor (value) {\n this._value = value;\n }\n\n /**\n * Returns the current value of the instance.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n value () {\n return this._value;\n }\n\n /**\n * Returns the current value of the instance in string format.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n toString () {\n return String(this._value);\n }\n\n static isInvalid(val) {\n return (val instanceof InvalidAwareTypes) || !!InvalidAwareTypes.invalidAwareVals()[val];\n }\n\n static getInvalidType(val) {\n return val instanceof InvalidAwareTypes ? val : InvalidAwareTypes.invalidAwareVals()[val];\n }\n}\n\n/**\n * Enums for Invalid types.\n */\nInvalidAwareTypes.NULL = new InvalidAwareTypes('null');\nInvalidAwareTypes.NA = new InvalidAwareTypes('na');\nInvalidAwareTypes.NIL = new InvalidAwareTypes('nil');\n\n/**\n * Default Registry for mapping the invalid values.\n *\n * @private\n */\nInvalidAwareTypes._invalidAwareValsMap = {\n invalid: InvalidAwareTypes.NA,\n nil: InvalidAwareTypes.NIL,\n null: InvalidAwareTypes.NULL,\n undefined: InvalidAwareTypes.NA\n};\n\nexport default InvalidAwareTypes;\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport InvalidAwareTypes from '../invalid-aware-types';\n\nconst generateBuckets = (binSize, start, end) => {\n const buckets = [];\n let next = start;\n\n while (next < end) {\n buckets.push(next);\n next += binSize;\n }\n buckets.push(next);\n\n return buckets;\n};\n\nconst findBucketRange = (bucketRanges, value) => {\n let leftIdx = 0;\n let rightIdx = bucketRanges.length - 1;\n let midIdx;\n let range;\n\n // Here use binary search as the bucketRanges is a sorted array\n while (leftIdx <= rightIdx) {\n midIdx = leftIdx + Math.floor((rightIdx - leftIdx) / 2);\n range = bucketRanges[midIdx];\n\n if (value >= range.start && value < range.end) {\n return range;\n } else if (value >= range.end) {\n leftIdx = midIdx + 1;\n } else if (value < range.start) {\n rightIdx = midIdx - 1;\n }\n }\n\n return null;\n};\n\n /**\n * Creates the bin data from input measure field and supplied configs.\n *\n * @param {Measure} measureField - The Measure field instance.\n * @param {string} rowDiffset - The datamodel rowDiffset values.\n * @param {Object} config - The config object.\n * @return {Object} Returns the binned data and the corresponding bins.\n */\nexport function createBinnedFieldData (measureField, rowDiffset, config) {\n let { buckets, binsCount, binSize, start, end } = config;\n const [dMin, dMax] = measureField.domain();\n\n if (!buckets) {\n start = (start !== 0 && (!start || start > dMin)) ? dMin : start;\n end = (end !== 0 && (!end || end < dMax)) ? (dMax + 1) : end;\n\n if (binsCount) {\n binSize = Math.ceil(Math.abs(end - start) / binsCount);\n }\n\n buckets = generateBuckets(binSize, start, end);\n }\n\n if (buckets[0] > dMin) {\n buckets.unshift(dMin);\n }\n if (buckets[buckets.length - 1] <= dMax) {\n buckets.push(dMax + 1);\n }\n\n const bucketRanges = [];\n for (let i = 0; i < buckets.length - 1; i++) {\n bucketRanges.push({\n start: buckets[i],\n end: buckets[i + 1]\n });\n }\n\n const binnedData = [];\n rowDiffsetIterator(rowDiffset, (i) => {\n const datum = measureField.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n binnedData.push(datum);\n return;\n }\n\n const range = findBucketRange(bucketRanges, datum);\n binnedData.push(`${range.start}-${range.end}`);\n });\n\n return { binnedData, bins: buckets };\n}\n","export { DataFormat, FilteringMode } from '../enums';\n/**\n * The event name for data propagation.\n */\nexport const PROPAGATION = 'propagation';\n\n/**\n * The name of the unique row id column in DataModel.\n */\nexport const ROW_ID = '__id__';\n\n/**\n * The enums for operation names performed on DataModel.\n */\nexport const DM_DERIVATIVES = {\n SELECT: 'select',\n PROJECT: 'project',\n GROUPBY: 'group',\n COMPOSE: 'compose',\n CAL_VAR: 'calculatedVariable',\n BIN: 'bin'\n};\n\nexport const JOINS = {\n CROSS: 'cross',\n LEFTOUTER: 'leftOuter',\n RIGHTOUTER: 'rightOuter',\n NATURAL: 'natural',\n FULLOUTER: 'fullOuter'\n};\n\nexport const LOGICAL_OPERATORS = {\n AND: 'and',\n OR: 'or'\n};\n","import { persistDerivation, persistAncestorDerivation } from '../helper';\nimport { DM_DERIVATIVES } from '../constants';\n\n/**\n * DataModel's opearators are exposed as composable functional operators as well as chainable operators. Chainable\n * operators are called on the instances of {@link Datamodel} and {@link Relation} class.\n *\n * Those same operators can be used as composable operators from `DataModel.Operators` namespace.\n *\n * All these operators have similar behaviour. All these operators when called with the argument returns a function\n * which expects a DataModel instance.\n *\n * @public\n * @module Operators\n * @namespace DataModel\n */\n\n/**\n * This is functional version of selection operator. {@link link_to_selection | Selection} is a row filtering operation.\n * It takes {@link SelectionPredicate | predicate} for filtering criteria and returns a function.\n * The returned function is called with the DataModel instance on which the action needs to be performed.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection opearation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * [Warn] Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * [Error] `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @example\n * const select = DataModel.Operators.select;\n * usaCarsFn = select(fields => fields.Origin.value === 'USA');\n * usaCarsDm = usaCarsFn(dm);\n * console.log(usaCarsDm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {SelectionPredicate} selectFn - Predicate funciton which is called for each row with the current row\n * ```\n * function (row, i) { ... }\n * ```\n * @param {Object} [config] - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const select = (...args) => dm => dm.select(...args);\n\n/**\n * This is functional version of projection operator. {@link link_to_projection | Projection} is a column filtering\n * operation.It expects list of fields name and either include those or exclude those based on {@link FilteringMode} on\n * the resultant variable.It returns a function which is called with the DataModel instance on which the action needs\n * to be performed.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const project = (...args) => dm => dm.project(...args);\n\n/**\n * This is functional version of binnig operator. Binning happens on a measure field based on a binning configuration.\n * Binning in DataModel does not aggregate the number of rows present in DataModel instance after binning, it just adds\n * a new field with the binned value. Refer binning {@link example_of_binning | example} to have a intuition of what\n * binning is and the use case.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non uniform buckets\n * - providing bin count\n * - providing each bin size\n *\n * When custom buckets are provided as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const buckets = {\n * start: 30\n * stops: [80, 100, 110]\n * };\n * const config = { buckets, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(dm);\n *\n * @text\n * When `binCount` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binCount: 5, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @text\n * When `binSize` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binSize: 200, name: 'binnedHorsepower' }\n * const binnedDm = dataModel.bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {String} name Name of measure which will be used to create bin\n * @param {Object} config Config required for bin creation\n * @param {Array.} config.bucketObj.stops Defination of bucket ranges. Two subsequent number from arrays\n * are picked and a range is created. The first number from range is inclusive and the second number from range\n * is exclusive.\n * @param {Number} [config.bucketObj.startAt] Force the start of the bin from a particular number.\n * If not mentioned, the start of the bin or the lower domain of the data if stops is not mentioned, else its\n * the first value of the stop.\n * @param {Number} config.binSize Bucket size for each bin\n * @param {Number} config.binCount Number of bins which will be created\n * @param {String} config.name Name of the new binned field to be created\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const bin = (...args) => dm => dm.bin(...args);\n\n/**\n * This is functional version of `groupBy` operator.Groups the data using particular dimensions and by reducing\n * measures. It expects a list of dimensions using which it projects the datamodel and perform aggregations to reduce\n * the duplicate tuples. Refer this {@link link_to_one_example_with_group_by | document} to know the intuition behind\n * groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupBy = DataModel.Operators.groupBy;\n * const groupedFn = groupBy(['Year'], { horsepower: 'max' } );\n * groupedDM = groupByFn(dm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const groupBy = (...args) => dm => dm.groupBy(...args);\n\n/**\n * Enables composing operators to run multiple operations and save group of operataion as named opration on a DataModel.\n * The resulting DataModel will be the result of all the operation provided. The operations provided will be executed in\n * a serial manner ie. result of one operation will be the input for the next operations (like pipe operator in unix).\n *\n * Suported operations in compose are\n * - `select`\n * - `project`\n * - `groupBy`\n * - `bin`\n * - `compose`\n *\n * @example\n * const compose = DataModel.Operators.compose;\n * const select = DataModel.Operators.select;\n * const project = DataModel.Operators.project;\n *\n * let composedFn = compose(\n * select(fields => fields.netprofit.value <= 15),\n * project(['netprofit', 'netsales']));\n *\n * const dataModel = new DataModel(data1, schema1);\n *\n * let composedDm = composedFn(dataModel);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} operators: An array of operation that will be applied on the\n * datatable.\n *\n * @returns {DataModel} Instance of resultant DataModel\n */\nexport const compose = (...operations) =>\n (dm, config = { saveChild: true }) => {\n let currentDM = dm;\n let firstChild;\n const derivations = [];\n\n operations.forEach((operation) => {\n currentDM = operation(currentDM);\n derivations.push(...currentDM._derivation);\n if (!firstChild) {\n firstChild = currentDM;\n }\n });\n\n if (firstChild && firstChild !== currentDM) {\n firstChild.dispose();\n }\n\n persistDerivation(currentDM, DM_DERIVATIVES.COMPOSE, null, derivations);\n // reset all ancestorDerivation saved in-between compose\n currentDM._ancestorDerivation = [];\n persistAncestorDerivation(dm, currentDM);\n\n if (config.saveChild) {\n currentDM.setParent(dm);\n } else {\n currentDM.setParent(null);\n }\n\n return currentDM;\n };\n","/**\n * The helper function that returns an array of common schema\n * from two fieldStore instances.\n *\n * @param {FieldStore} fs1 - The first FieldStore instance.\n * @param {FieldStore} fs2 - The second FieldStore instance.\n * @return {Array} An array containing the common schema.\n */\nexport function getCommonSchema (fs1, fs2) {\n const retArr = [];\n const fs1Arr = [];\n fs1.fields.forEach((field) => {\n fs1Arr.push(field.schema().name);\n });\n fs2.fields.forEach((field) => {\n if (fs1Arr.indexOf(field.schema().name) !== -1) {\n retArr.push(field.schema().name);\n }\n });\n return retArr;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { getCommonSchema } from './get-common-schema';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { JOINS } from '../constants';\nimport { prepareJoinData } from '../helper';\n/**\n * Default filter function for crossProduct.\n *\n * @return {boolean} Always returns true.\n */\nfunction defaultFilterFn() { return true; }\n\n/**\n * Implementation of cross product operation between two DataModel instances.\n * It internally creates the data and schema for the new DataModel.\n *\n * @param {DataModel} dataModel1 - The left DataModel instance.\n * @param {DataModel} dataModel2 - The right DataModel instance.\n * @param {Function} filterFn - The filter function which is used to filter the tuples.\n * @param {boolean} [replaceCommonSchema=false] - The flag if the common name schema should be there.\n * @return {DataModel} Returns The newly created DataModel instance from the crossProduct operation.\n */\nexport function crossProduct (dm1, dm2, filterFn, replaceCommonSchema = false, jointype = JOINS.CROSS) {\n const schema = [];\n const data = [];\n const applicableFilterFn = filterFn || defaultFilterFn;\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreName = dm1FieldStore.name;\n const dm2FieldStoreName = dm2FieldStore.name;\n const name = `${dm1FieldStore.name}.${dm2FieldStore.name}`;\n const commonSchemaList = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n if (dm1FieldStoreName === dm2FieldStoreName) {\n throw new Error('DataModels must have different alias names');\n }\n // Here prepare the schema\n dm1FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1 && !replaceCommonSchema) {\n tmpSchema.name = `${dm1FieldStore.name}.${tmpSchema.name}`;\n }\n schema.push(tmpSchema);\n });\n dm2FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1) {\n if (!replaceCommonSchema) {\n tmpSchema.name = `${dm2FieldStore.name}.${tmpSchema.name}`;\n schema.push(tmpSchema);\n }\n } else {\n schema.push(tmpSchema);\n }\n });\n\n // Here prepare Data\n rowDiffsetIterator(dm1._rowDiffset, (i) => {\n let rowAdded = false;\n let rowPosition;\n rowDiffsetIterator(dm2._rowDiffset, (ii) => {\n const tuple = [];\n const userArg = {};\n userArg[dm1FieldStoreName] = {};\n userArg[dm2FieldStoreName] = {};\n dm1FieldStore.fields.forEach((field) => {\n tuple.push(field.partialField.data[i]);\n userArg[dm1FieldStoreName][field.name()] = field.partialField.data[i];\n });\n dm2FieldStore.fields.forEach((field) => {\n if (!(commonSchemaList.indexOf(field.schema().name) !== -1 && replaceCommonSchema)) {\n tuple.push(field.partialField.data[ii]);\n }\n userArg[dm2FieldStoreName][field.name()] = field.partialField.data[ii];\n });\n\n let cachedStore = {};\n let cloneProvider1 = () => dm1.detachedRoot();\n let cloneProvider2 = () => dm2.detachedRoot();\n\n const dm1Fields = prepareJoinData(userArg[dm1FieldStoreName]);\n const dm2Fields = prepareJoinData(userArg[dm2FieldStoreName]);\n if (applicableFilterFn(dm1Fields, dm2Fields, cloneProvider1, cloneProvider2, cachedStore)) {\n const tupleObj = {};\n tuple.forEach((cellVal, iii) => {\n tupleObj[schema[iii].name] = cellVal;\n });\n if (rowAdded && JOINS.CROSS !== jointype) {\n data[rowPosition] = tupleObj;\n }\n else {\n data.push(tupleObj);\n rowAdded = true;\n rowPosition = i;\n }\n } else if ((jointype === JOINS.LEFTOUTER || jointype === JOINS.RIGHTOUTER) && !rowAdded) {\n const tupleObj = {};\n let len = dm1FieldStore.fields.length - 1;\n tuple.forEach((cellVal, iii) => {\n if (iii <= len) {\n tupleObj[schema[iii].name] = cellVal;\n }\n else {\n tupleObj[schema[iii].name] = null;\n }\n });\n rowAdded = true;\n rowPosition = i;\n data.push(tupleObj);\n }\n });\n });\n\n return new DataModel(data, schema, { name });\n}\n","/**\n * The default sort function.\n *\n * @param {*} a - The first value.\n * @param {*} b - The second value.\n * @return {number} Returns the comparison result e.g. 1 or 0 or -1.\n */\nfunction defSortFn (a, b) {\n const a1 = `${a}`;\n const b1 = `${b}`;\n if (a1 < b1) {\n return -1;\n }\n if (a1 > b1) {\n return 1;\n }\n return 0;\n}\n\n/**\n * The helper function for merge sort which creates the sorted array\n * from the two halves of the input array.\n *\n * @param {Array} arr - The target array which needs to be merged.\n * @param {number} lo - The starting index of the first array half.\n * @param {number} mid - The ending index of the first array half.\n * @param {number} hi - The ending index of the second array half.\n * @param {Function} sortFn - The sort function.\n */\nfunction merge (arr, lo, mid, hi, sortFn) {\n const mainArr = arr;\n const auxArr = [];\n for (let i = lo; i <= hi; i += 1) {\n auxArr[i] = mainArr[i];\n }\n let a = lo;\n let b = mid + 1;\n\n for (let i = lo; i <= hi; i += 1) {\n if (a > mid) {\n mainArr[i] = auxArr[b];\n b += 1;\n } else if (b > hi) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else if (sortFn(auxArr[a], auxArr[b]) <= 0) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else {\n mainArr[i] = auxArr[b];\n b += 1;\n }\n }\n}\n\n/**\n * The helper function for merge sort which would be called\n * recursively for sorting the array halves.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {number} lo - The starting index of the array half.\n * @param {number} hi - The ending index of the array half.\n * @param {Function} sortFn - The sort function.\n * @return {Array} Returns the target array itself.\n */\nfunction sort (arr, lo, hi, sortFn) {\n if (hi === lo) { return arr; }\n\n const mid = lo + Math.floor((hi - lo) / 2);\n sort(arr, lo, mid, sortFn);\n sort(arr, mid + 1, hi, sortFn);\n merge(arr, lo, mid, hi, sortFn);\n\n return arr;\n}\n\n/**\n * The implementation of merge sort.\n * It is used in DataModel for stable sorting as it is not sure\n * what the sorting algorithm used by browsers is stable or not.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {Function} [sortFn=defSortFn] - The sort function.\n * @return {Array} Returns the input array itself in sorted order.\n */\nexport function mergeSort (arr, sortFn = defSortFn) {\n if (arr.length > 1) {\n sort(arr, 0, arr.length - 1, sortFn);\n }\n return arr;\n}\n","import { DimensionSubtype, MeasureSubtype } from '../enums';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { mergeSort } from './merge-sort';\nimport { fieldInSchema } from '../helper';\nimport { isCallable, isArray, } from '../utils';\n/**\n * Generates the sorting functions to sort the data of a DataModel instance\n * according to the input data type.\n *\n * @param {string} dataType - The data type e.g. 'measure', 'datetime' etc.\n * @param {string} sortType - The sorting order i.e. 'asc' or 'desc'.\n * @param {integer} index - The index of the data which will be sorted.\n * @return {Function} Returns the the sorting function.\n */\nfunction getSortFn (dataType, sortType, index) {\n let retFunc;\n switch (dataType) {\n case MeasureSubtype.CONTINUOUS:\n case DimensionSubtype.TEMPORAL:\n if (sortType === 'desc') {\n retFunc = (a, b) => b[index] - a[index];\n } else {\n retFunc = (a, b) => a[index] - b[index];\n }\n break;\n default:\n retFunc = (a, b) => {\n const a1 = `${a[index]}`;\n const b1 = `${b[index]}`;\n if (a1 < b1) {\n return sortType === 'desc' ? 1 : -1;\n }\n if (a1 > b1) {\n return sortType === 'desc' ? -1 : 1;\n }\n return 0;\n };\n }\n return retFunc;\n}\n\n/**\n * Groups the data according to the specified target field.\n *\n * @param {Array} data - The input data array.\n * @param {number} fieldIndex - The target field index within schema array.\n * @return {Array} Returns an array containing the grouped data.\n */\nfunction groupData(data, fieldIndex) {\n const hashMap = new Map();\n const groupedData = [];\n\n data.forEach((datum) => {\n const fieldVal = datum[fieldIndex];\n if (hashMap.has(fieldVal)) {\n groupedData[hashMap.get(fieldVal)][1].push(datum);\n } else {\n groupedData.push([fieldVal, [datum]]);\n hashMap.set(fieldVal, groupedData.length - 1);\n }\n });\n\n return groupedData;\n}\n\n/**\n * Creates the argument value used for sorting function when sort is done\n * with another fields.\n *\n * @param {Array} groupedDatum - The grouped datum for a single dimension field value.\n * @param {Array} targetFields - An array of the sorting fields.\n * @param {Array} targetFieldDetails - An array of the sorting field details in schema.\n * @return {Object} Returns an object containing the value of sorting fields and the target field name.\n */\nfunction createSortingFnArg(groupedDatum, targetFields, targetFieldDetails) {\n const arg = {\n label: groupedDatum[0]\n };\n\n targetFields.reduce((acc, next, idx) => {\n acc[next] = groupedDatum[1].map(datum => datum[targetFieldDetails[idx].index]);\n return acc;\n }, arg);\n\n return arg;\n}\n\n/**\n * Sorts the data before return in dataBuilder.\n *\n * @param {Object} dataObj - An object containing the data and schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nfunction sortData(dataObj, sortingDetails) {\n const { data, schema } = dataObj;\n let fieldName;\n let sortMeta;\n let fDetails;\n let i = sortingDetails.length - 1;\n\n for (; i >= 0; i--) {\n fieldName = sortingDetails[i][0];\n sortMeta = sortingDetails[i][1];\n fDetails = fieldInSchema(schema, fieldName);\n\n if (!fDetails) {\n // eslint-disable-next-line no-continue\n continue;\n }\n\n if (isCallable(sortMeta)) {\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortMeta(a[fDetails.index], b[fDetails.index]));\n } else if (isArray(sortMeta)) {\n const groupedData = groupData(data, fDetails.index);\n const sortingFn = sortMeta[sortMeta.length - 1];\n const targetFields = sortMeta.slice(0, sortMeta.length - 1);\n const targetFieldDetails = targetFields.map(f => fieldInSchema(schema, f));\n\n groupedData.forEach((groupedDatum) => {\n groupedDatum.push(createSortingFnArg(groupedDatum, targetFields, targetFieldDetails));\n });\n\n mergeSort(groupedData, (a, b) => {\n const m = a[2];\n const n = b[2];\n return sortingFn(m, n);\n });\n\n // Empty the array\n data.length = 0;\n groupedData.forEach((datum) => {\n data.push(...datum[1]);\n });\n } else {\n sortMeta = String(sortMeta).toLowerCase() === 'desc' ? 'desc' : 'asc';\n mergeSort(data, getSortFn(fDetails.type, sortMeta, fDetails.index));\n }\n }\n\n dataObj.uids = [];\n data.forEach((value) => {\n dataObj.uids.push(value.pop());\n });\n}\n\n\n/**\n * Builds the actual data array.\n *\n * @param {Array} fieldStore - An array of field.\n * @param {string} rowDiffset - A string consisting of which rows to be included eg. '0-2,4,6';\n * @param {string} colIdentifier - A string consisting of the details of which column\n * to be included eg 'date,sales,profit';\n * @param {Object} sortingDetails - An object containing the sorting details of the DataModel instance.\n * @param {Object} options - The options required to create the type of the data.\n * @return {Object} Returns an object containing the multidimensional array and the relative schema.\n */\nexport function dataBuilder (fieldStore, rowDiffset, colIdentifier, sortingDetails, options) {\n const defOptions = {\n addUid: false,\n columnWise: false\n };\n options = Object.assign({}, defOptions, options);\n\n const retObj = {\n schema: [],\n data: [],\n uids: []\n };\n const addUid = options.addUid;\n const reqSorting = sortingDetails && sortingDetails.length > 0;\n // It stores the fields according to the colIdentifier argument\n const tmpDataArr = [];\n // Stores the fields according to the colIdentifier argument\n const colIArr = colIdentifier.split(',');\n\n colIArr.forEach((colName) => {\n for (let i = 0; i < fieldStore.length; i += 1) {\n if (fieldStore[i].name() === colName) {\n tmpDataArr.push(fieldStore[i]);\n break;\n }\n }\n });\n\n // Inserts the schema to the schema object\n tmpDataArr.forEach((field) => {\n /** @todo Need to use extend2 here otherwise user can overwrite the schema. */\n retObj.schema.push(field.schema());\n });\n\n if (addUid) {\n retObj.schema.push({\n name: 'uid',\n type: 'identifier'\n });\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n retObj.data.push([]);\n const insertInd = retObj.data.length - 1;\n let start = 0;\n tmpDataArr.forEach((field, ii) => {\n retObj.data[insertInd][ii + start] = field.partialField.data[i];\n });\n if (addUid) {\n retObj.data[insertInd][tmpDataArr.length] = i;\n }\n // Creates an array of unique identifiers for each row\n retObj.uids.push(i);\n\n // If sorting needed then there is the need to expose the index\n // mapping from the old index to its new index\n if (reqSorting) { retObj.data[insertInd].push(i); }\n });\n\n // Handles the sort functionality\n if (reqSorting) {\n sortData(retObj, sortingDetails);\n }\n\n if (options.columnWise) {\n const tmpData = Array(...Array(retObj.schema.length)).map(() => []);\n retObj.data.forEach((tuple) => {\n tuple.forEach((data, i) => {\n tmpData[i].push(data);\n });\n });\n retObj.data = tmpData;\n }\n\n return retObj;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n\n/**\n * Performs the union operation between two dm instances.\n *\n * @todo Fix the conflicts between union and difference terminology here.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function difference (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n * @param {boolean} addData - If true only tuple will be added to the data.\n */\n function prepareDataHelper(dm, fieldsObj, addData) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n if (addData) { data.push(tuple); }\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm2, dm2FieldStoreFieldObj, false);\n prepareDataHelper(dm1, dm1FieldStoreFieldObj, true);\n\n return new DataModel(data, schema, { name });\n}\n\n","import { isArray } from '../utils';\nimport InvalidAwareTypes from '../invalid-aware-types';\nimport { GROUP_BY_FUNCTIONS } from '../enums';\n\nconst { SUM, AVG, FIRST, LAST, COUNT, STD, MIN, MAX } = GROUP_BY_FUNCTIONS;\n\nfunction getFilteredValues(arr) {\n return arr.filter(item => !(item instanceof InvalidAwareTypes));\n}\n/**\n * Reducer function that returns the sum of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the sum of the array.\n */\nfunction sum (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const filteredNumber = getFilteredValues(arr);\n const totalSum = filteredNumber.length ?\n filteredNumber.reduce((acc, curr) => acc + curr, 0)\n : InvalidAwareTypes.NULL;\n return totalSum;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that returns the average of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the mean value of the array.\n */\nfunction avg (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const totalSum = sum(arr);\n const len = arr.length || 1;\n return (Number.isNaN(totalSum) || totalSum instanceof InvalidAwareTypes) ?\n InvalidAwareTypes.NULL : totalSum / len;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the min value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the minimum value of the array.\n */\nfunction min (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.min(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the max value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the maximum value of the array.\n */\nfunction max (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.max(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the first value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the first value of the array.\n */\nfunction first (arr) {\n return arr[0];\n}\n\n/**\n * Reducer function that gives the last value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the last value of the array.\n */\nfunction last (arr) {\n return arr[arr.length - 1];\n}\n\n/**\n * Reducer function that gives the count value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the length of the array.\n */\nfunction count (arr) {\n if (isArray(arr)) {\n return arr.length;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Calculates the variance of the input array.\n *\n * @param {Array.} arr - The input array.\n * @return {number} Returns the variance of the input array.\n */\nfunction variance (arr) {\n let mean = avg(arr);\n return avg(arr.map(num => (num - mean) ** 2));\n}\n\n/**\n * Calculates the square root of the variance of the input array.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the square root of the variance.\n */\nfunction std (arr) {\n return Math.sqrt(variance(arr));\n}\n\n\nconst fnList = {\n [SUM]: sum,\n [AVG]: avg,\n [MIN]: min,\n [MAX]: max,\n [FIRST]: first,\n [LAST]: last,\n [COUNT]: count,\n [STD]: std\n};\n\nconst defaultReducerName = SUM;\n\nexport {\n defaultReducerName,\n sum as defReducer,\n fnList,\n};\n","import { defReducer, fnList } from '../operator';\n\n/**\n * A page level storage which stores, registers, unregisters reducers for all the datamodel instances. There is only one\n * reducer store available in a page. All the datamodel instances receive same instance of reducer store. DataModel\n * out of the box provides handful of {@link reducer | reducers} which can be used as reducer funciton.\n *\n * @public\n * @namespace DataModel\n */\nclass ReducerStore {\n constructor () {\n this.store = new Map();\n this.store.set('defReducer', defReducer);\n\n Object.entries(fnList).forEach((key) => {\n this.store.set(key[0], key[1]);\n });\n }\n\n /**\n * Changes the `defaultReducer` globally. For all the fields which does not have `defAggFn` mentioned in schema, the\n * value of `defaultReducer` is used for aggregation.\n *\n * @public\n * @param {string} [reducer='sum'] - The name of the default reducer. It picks up the definition from store by doing\n * name lookup. If no name is found then it takes `sum` as the default reducer.\n * @return {ReducerStore} Returns instance of the singleton store in page.\n */\n defaultReducer (...params) {\n if (!params.length) {\n return this.store.get('defReducer');\n }\n\n let reducer = params[0];\n\n if (typeof reducer === 'function') {\n this.store.set('defReducer', reducer);\n } else {\n reducer = String(reducer);\n if (Object.keys(fnList).indexOf(reducer) !== -1) {\n this.store.set('defReducer', fnList[reducer]);\n } else {\n throw new Error(`Reducer ${reducer} not found in registry`);\n }\n }\n return this;\n }\n\n /**\n *\n * Registers a {@link reducer | reducer}.\n * A {@link reducer | reducer} has to be registered before it is used.\n *\n * @example\n * // find the mean squared value of a given set\n * const reducerStore = DataModel.Reducers();\n *\n * reducers.register('meanSquared', (arr) => {\n * const squaredVal = arr.map(item => item * item);\n * let sum = 0;\n * for (let i = 0, l = squaredVal.length; i < l; i++) {\n * sum += squaredVal[i++];\n * }\n *\n * return sum;\n * })\n *\n * // datamodel (dm) is already prepared with cars.json\n * const dm1 = dm.groupBy(['origin'], {\n * accleration: 'meanSquared'\n * });\n *\n * @public\n *\n * @param {string} name formal name for a reducer. If the given name already exists in store it is overridden by new\n * definition.\n * @param {Function} reducer definition of {@link reducer} function.\n *\n * @return {Function} function for unregistering the reducer.\n */\n register (name, reducer) {\n if (typeof reducer !== 'function') {\n throw new Error('Reducer should be a function');\n }\n\n name = String(name);\n this.store.set(name, reducer);\n\n return () => { this.__unregister(name); };\n }\n\n __unregister (name) {\n if (this.store.has(name)) {\n this.store.delete(name);\n }\n }\n\n resolve (name) {\n if (name instanceof Function) {\n return name;\n }\n return this.store.get(name);\n }\n}\n\nconst reducerStore = (function () {\n let store = null;\n\n function getStore () {\n if (store === null) {\n store = new ReducerStore();\n }\n return store;\n }\n return getStore();\n}());\n\nexport default reducerStore;\n","import { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport DataModel from '../export';\nimport reducerStore from '../utils/reducer-store';\nimport { defaultReducerName } from './group-by-function';\nimport { FieldType } from '../enums';\n\n/**\n * This function sanitize the user given field and return a common Array structure field\n * list\n * @param {DataModel} dataModel the dataModel operating on\n * @param {Array} fieldArr user input of field Array\n * @return {Array} arrays of field name\n */\nfunction getFieldArr (dataModel, fieldArr) {\n const retArr = [];\n const fieldStore = dataModel.getFieldspace();\n const dimensions = fieldStore.getDimension();\n\n Object.entries(dimensions).forEach(([key]) => {\n if (fieldArr && fieldArr.length) {\n if (fieldArr.indexOf(key) !== -1) {\n retArr.push(key);\n }\n } else {\n retArr.push(key);\n }\n });\n\n return retArr;\n}\n\n/**\n * This sanitize the reducer provide by the user and create a common type of object.\n * user can give function Also\n * @param {DataModel} dataModel dataModel to worked on\n * @param {Object|function} [reducers={}] reducer provided by the users\n * @return {Object} object containing reducer function for every measure\n */\nfunction getReducerObj (dataModel, reducers = {}) {\n const retObj = {};\n const fieldStore = dataModel.getFieldspace();\n const measures = fieldStore.getMeasure();\n const defReducer = reducerStore.defaultReducer();\n\n Object.keys(measures).forEach((measureName) => {\n if (typeof reducers[measureName] !== 'string') {\n reducers[measureName] = measures[measureName].defAggFn();\n }\n const reducerFn = reducerStore.resolve(reducers[measureName]);\n if (reducerFn) {\n retObj[measureName] = reducerFn;\n } else {\n retObj[measureName] = defReducer;\n reducers[measureName] = defaultReducerName;\n }\n });\n return retObj;\n}\n\n/**\n * main function which perform the group-by operations which reduce the measures value is the\n * fields are common according to the reducer function provided\n * @param {DataModel} dataModel the dataModel to worked\n * @param {Array} fieldArr fields according to which the groupby should be worked\n * @param {Object|Function} reducers reducers function\n * @param {DataModel} existingDataModel Existing datamodel instance\n * @return {DataModel} new dataModel with the group by\n */\nfunction groupBy (dataModel, fieldArr, reducers, existingDataModel) {\n const sFieldArr = getFieldArr(dataModel, fieldArr);\n const reducerObj = getReducerObj(dataModel, reducers);\n const fieldStore = dataModel.getFieldspace();\n const fieldStoreObj = fieldStore.fieldsObj();\n const dbName = fieldStore.name;\n const dimensionArr = [];\n const measureArr = [];\n const schema = [];\n const hashMap = {};\n const data = [];\n let newDataModel;\n\n // Prepare the schema\n Object.entries(fieldStoreObj).forEach(([key, value]) => {\n if (sFieldArr.indexOf(key) !== -1 || reducerObj[key]) {\n schema.push(extend2({}, value.schema()));\n\n switch (value.schema().type) {\n case FieldType.MEASURE:\n measureArr.push(key);\n break;\n default:\n case FieldType.DIMENSION:\n dimensionArr.push(key);\n }\n }\n });\n // Prepare the data\n let rowCount = 0;\n rowDiffsetIterator(dataModel._rowDiffset, (i) => {\n let hash = '';\n dimensionArr.forEach((_) => {\n hash = `${hash}-${fieldStoreObj[_].partialField.data[i]}`;\n });\n if (hashMap[hash] === undefined) {\n hashMap[hash] = rowCount;\n data.push({});\n dimensionArr.forEach((_) => {\n data[rowCount][_] = fieldStoreObj[_].partialField.data[i];\n });\n measureArr.forEach((_) => {\n data[rowCount][_] = [fieldStoreObj[_].partialField.data[i]];\n });\n rowCount += 1;\n } else {\n measureArr.forEach((_) => {\n data[hashMap[hash]][_].push(fieldStoreObj[_].partialField.data[i]);\n });\n }\n });\n\n // reduction\n let cachedStore = {};\n let cloneProvider = () => dataModel.detachedRoot();\n data.forEach((row) => {\n const tuple = row;\n measureArr.forEach((_) => {\n tuple[_] = reducerObj[_](row[_], cloneProvider, cachedStore);\n });\n });\n if (existingDataModel) {\n existingDataModel.__calculateFieldspace();\n newDataModel = existingDataModel;\n }\n else {\n newDataModel = new DataModel(data, schema, { name: dbName });\n }\n return newDataModel;\n}\n\nexport { groupBy, getFieldArr, getReducerObj };\n","import { getCommonSchema } from './get-common-schema';\n\n/**\n * The filter function used in natural join.\n * It generates a function that will have the logic to join two\n * DataModel instances by the process of natural join.\n *\n * @param {DataModel} dm1 - The left DataModel instance.\n * @param {DataModel} dm2 - The right DataModel instance.\n * @return {Function} Returns a function that is used in cross-product operation.\n */\nexport function naturalJoinFilter (dm1, dm2) {\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n // const dm1FieldStoreName = dm1FieldStore.name;\n // const dm2FieldStoreName = dm2FieldStore.name;\n const commonSchemaArr = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n return (dm1Fields, dm2Fields) => {\n let retainTuple = true;\n commonSchemaArr.forEach((fieldName) => {\n if (dm1Fields[fieldName].value ===\n dm2Fields[fieldName].value && retainTuple) {\n retainTuple = true;\n } else {\n retainTuple = false;\n }\n });\n return retainTuple;\n };\n}\n","import DataModel from '../export';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n/**\n * Performs the union operation between two dm instances.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function union (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n */\n function prepareDataHelper (dm, fieldsObj) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n data.push(tuple);\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm1, dm1FieldStoreFieldObj);\n prepareDataHelper(dm2, dm2FieldStoreFieldObj);\n\n return new DataModel(data, schema, { name });\n}\n","import { crossProduct } from './cross-product';\nimport { JOINS } from '../constants';\nimport { union } from './union';\n\n\nexport function leftOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel1, dataModel2, filterFn, false, JOINS.LEFTOUTER);\n}\n\nexport function rightOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel2, dataModel1, filterFn, false, JOINS.RIGHTOUTER);\n}\n\nexport function fullOuterJoin (dataModel1, dataModel2, filterFn) {\n return union(leftOuterJoin(dataModel1, dataModel2, filterFn), rightOuterJoin(dataModel1, dataModel2, filterFn));\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\n\n/**\n * In {@link DataModel}, every tabular data consists of column, a column is stored as field.\n * Field contains all the data for a given column in an array.\n *\n * Each record consists of several fields; the fields of all records form the columns.\n * Examples of fields: name, gender, sex etc.\n *\n * In DataModel, each field can have multiple attributes which describes its data and behaviour.\n * A field can have two types of data: Measure and Dimension.\n *\n * A Dimension Field is the context on which a data is categorized and the measure is the numerical values that\n * quantify the data set.\n * In short a dimension is the lens through which you are looking at your measure data.\n *\n * Refer to {@link Schema} to get info about possible field attributes.\n *\n * @public\n * @class\n */\nexport default class Field {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n this.partialField = partialField;\n this.rowDiffset = rowDiffset;\n }\n\n /**\n * Generates the field type specific domain.\n *\n * @public\n * @abstract\n */\n domain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the the field schema.\n *\n * @public\n * @return {string} Returns the field schema.\n */\n schema () {\n return this.partialField.schema;\n }\n\n /**\n * Returns the name of the field.\n *\n * @public\n * @return {string} Returns the name of the field.\n */\n name () {\n return this.partialField.name;\n }\n\n /**\n * Returns the type of the field.\n *\n * @public\n * @return {string} Returns the type of the field.\n */\n type () {\n return this.partialField.schema.type;\n }\n\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return this.partialField.schema.subtype;\n }\n\n /**\n * Returns the description of the field.\n *\n * @public\n * @return {string} Returns the description of the field.\n */\n description () {\n return this.partialField.schema.description;\n }\n\n /**\n * Returns the display name of the field.\n *\n * @public\n * @return {string} Returns the display name of the field.\n */\n displayName () {\n return this.partialField.schema.displayName || this.partialField.schema.name;\n }\n\n /**\n * Returns the data associated with the field.\n *\n * @public\n * @return {Array} Returns the data.\n */\n data () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n data.push(this.partialField.data[i]);\n });\n return data;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @abstract\n */\n formattedData () {\n throw new Error('Not yet implemented');\n }\n}\n","import Field from '../field';\n\n/**\n * Represents dimension field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Dimension extends Field {\n /**\n * Returns the domain for the dimension field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { DimensionSubtype } from '../../enums';\nimport Dimension from '../dimension';\n/**\n * Represents categorical field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Categorical extends Dimension {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return DimensionSubtype.CATEGORICAL;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n return domain;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport Dimension from '../dimension';\nimport { DateTimeFormatter } from '../../utils';\nimport InvalidAwareTypes from '../../invalid-aware-types';\n\n/**\n * Represents temporal field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Temporal extends Dimension {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n super(partialField, rowDiffset);\n\n this._cachedMinDiff = null;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be\n // occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n\n return domain;\n }\n\n\n /**\n * Calculates the minimum consecutive difference from the associated field data.\n *\n * @public\n * @return {number} Returns the minimum consecutive diff in milliseconds.\n */\n minimumConsecutiveDifference () {\n if (this._cachedMinDiff) {\n return this._cachedMinDiff;\n }\n\n const sortedData = this.data().filter(item => !(item instanceof InvalidAwareTypes)).sort((a, b) => a - b);\n const arrLn = sortedData.length;\n let minDiff = Number.POSITIVE_INFINITY;\n let prevDatum;\n let nextDatum;\n let processedCount = 0;\n\n for (let i = 1; i < arrLn; i++) {\n prevDatum = sortedData[i - 1];\n nextDatum = sortedData[i];\n\n if (nextDatum === prevDatum) {\n continue;\n }\n\n minDiff = Math.min(minDiff, nextDatum - sortedData[i - 1]);\n processedCount++;\n }\n\n if (!processedCount) {\n minDiff = null;\n }\n this._cachedMinDiff = minDiff;\n\n return this._cachedMinDiff;\n }\n\n /**\n * Returns the format specified in the input schema while creating field.\n *\n * @public\n * @return {string} Returns the datetime format.\n */\n format () {\n return this.partialField.schema.format;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n data.push(datum);\n } else {\n data.push(DateTimeFormatter.formatAs(datum, this.format()));\n }\n });\n return data;\n }\n}\n\n","import Dimension from '../dimension';\n\n/**\n * Represents binned field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Binned extends Dimension {\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the last and first values of bins config array.\n */\n calculateDataDomain () {\n const binsArr = this.partialField.schema.bins;\n return [binsArr[0], binsArr[binsArr.length - 1]];\n }\n\n /**\n * Returns the bins config provided while creating the field instance.\n *\n * @public\n * @return {Array} Returns the bins array config.\n */\n bins () {\n return this.partialField.schema.bins;\n }\n}\n","import { formatNumber } from '../../utils';\nimport { defaultReducerName } from '../../operator/group-by-function';\nimport Field from '../field';\n\n/**\n * Represents measure field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Measure extends Field {\n /**\n * Returns the domain for the measure field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Returns the unit of the measure field.\n *\n * @public\n * @return {string} Returns unit of the field.\n */\n unit () {\n return this.partialField.schema.unit;\n }\n\n /**\n * Returns the aggregation function name of the measure field.\n *\n * @public\n * @return {string} Returns aggregation function name of the field.\n */\n defAggFn () {\n return this.partialField.schema.defAggFn || defaultReducerName;\n }\n\n /**\n * Returns the number format of the measure field.\n *\n * @public\n * @return {Function} Returns number format of the field.\n */\n numberFormat () {\n const { numberFormat } = this.partialField.schema;\n return numberFormat instanceof Function ? numberFormat : formatNumber;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { MeasureSubtype } from '../../enums';\nimport Measure from '../measure';\nimport InvalidAwareTypes from '../../invalid-aware-types';\n\n/**\n * Represents continuous field subtype.\n *\n * @public\n * @class\n * @extends Measure\n */\nexport default class Continuous extends Measure {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return MeasureSubtype.CONTINUOUS;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the min and max values.\n */\n calculateDataDomain () {\n let min = Number.POSITIVE_INFINITY;\n let max = Number.NEGATIVE_INFINITY;\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n return;\n }\n\n if (datum < min) {\n min = datum;\n }\n if (datum > max) {\n max = datum;\n }\n });\n\n return [min, max];\n }\n}\n","/**\n * A interface to represent a parser which is responsible to parse the field.\n *\n * @public\n * @interface\n */\nexport default class FieldParser {\n /**\n * Parses a single value of a field and return the sanitized form.\n *\n * @public\n * @abstract\n */\n parse () {\n throw new Error('Not yet implemented');\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the categorical values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class CategoricalParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the stringified form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the stringified value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n result = String(val).trim();\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { DateTimeFormatter } from '../../../utils';\nimport FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the temporal values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class TemporalParser extends FieldParser {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {Object} schema - The schema object for the corresponding field.\n */\n constructor (schema) {\n super();\n this.schema = schema;\n this._dtf = new DateTimeFormatter(this.schema.format);\n }\n\n /**\n * Parses a single value of a field and returns the millisecond value.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {number} Returns the millisecond value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let nativeDate = this._dtf.getNativeDate(val);\n result = nativeDate ? nativeDate.getTime() : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the binned values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class BinnedParser extends FieldParser {\n /**\n * Parses a single binned value of a field and returns the sanitized value.\n *\n * @public\n * @param {string} val - The value of the field.\n * @return {string} Returns the sanitized value.\n */\n parse (val) {\n const regex = /^\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*-\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*$/;\n val = String(val);\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let matched = val.match(regex);\n result = matched ? `${Number.parseFloat(matched[1])}-${Number.parseFloat(matched[2])}`\n : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the continuous values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class ContinuousParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the number form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the number value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let parsedVal = parseFloat(val, 10);\n result = Number.isNaN(parsedVal) ? InvalidAwareTypes.NA : parsedVal;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","/**\n * Stores the full data and the metadata of a field. It provides\n * a single source of data from which the future Field\n * instance can get a subset of it with a rowDiffset config.\n *\n * @class\n * @public\n */\nexport default class PartialField {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} name - The name of the field.\n * @param {Array} data - The data array.\n * @param {Object} schema - The schema object of the corresponding field.\n * @param {FieldParser} parser - The parser instance corresponding to that field.\n */\n constructor (name, data, schema, parser) {\n this.name = name;\n this.schema = schema;\n this.parser = parser;\n this.data = this._sanitize(data);\n }\n\n /**\n * Sanitizes the field data.\n *\n * @private\n * @param {Array} data - The actual input data.\n * @return {Array} Returns the sanitized data.\n */\n _sanitize (data) {\n return data.map(datum => this.parser.parse(datum));\n }\n}\n","import { FieldType, DimensionSubtype, MeasureSubtype } from './enums';\nimport {\n Categorical,\n Temporal,\n Binned,\n Continuous,\n CategoricalParser,\n TemporalParser,\n BinnedParser,\n ContinuousParser,\n PartialField\n} from './fields';\n\n/**\n * Creates a field instance according to the provided data and schema.\n *\n * @param {Array} data - The field data array.\n * @param {Object} schema - The field schema object.\n * @return {Field} Returns the newly created field instance.\n */\nfunction createUnitField(data, schema) {\n data = data || [];\n let partialField;\n\n switch (schema.type) {\n case FieldType.MEASURE:\n switch (schema.subtype) {\n case MeasureSubtype.CONTINUOUS:\n partialField = new PartialField(schema.name, data, schema, new ContinuousParser());\n return new Continuous(partialField, `0-${data.length - 1}`);\n default:\n partialField = new PartialField(schema.name, data, schema, new ContinuousParser());\n return new Continuous(partialField, `0-${data.length - 1}`);\n }\n case FieldType.DIMENSION:\n switch (schema.subtype) {\n case DimensionSubtype.CATEGORICAL:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n case DimensionSubtype.TEMPORAL:\n partialField = new PartialField(schema.name, data, schema, new TemporalParser(schema));\n return new Temporal(partialField, `0-${data.length - 1}`);\n case DimensionSubtype.BINNED:\n partialField = new PartialField(schema.name, data, schema, new BinnedParser());\n return new Binned(partialField, `0-${data.length - 1}`);\n default:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n }\n default:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n }\n}\n\n\n/**\n * Creates a field instance from partialField and rowDiffset.\n *\n * @param {PartialField} partialField - The corresponding partial field.\n * @param {string} rowDiffset - The data subset config.\n * @return {Field} Returns the newly created field instance.\n */\nexport function createUnitFieldFromPartial(partialField, rowDiffset) {\n const { schema } = partialField;\n\n switch (schema.type) {\n case FieldType.MEASURE:\n switch (schema.subtype) {\n case MeasureSubtype.CONTINUOUS:\n return new Continuous(partialField, rowDiffset);\n default:\n return new Continuous(partialField, rowDiffset);\n }\n case FieldType.DIMENSION:\n switch (schema.subtype) {\n case DimensionSubtype.CATEGORICAL:\n return new Categorical(partialField, rowDiffset);\n case DimensionSubtype.TEMPORAL:\n return new Temporal(partialField, rowDiffset);\n case DimensionSubtype.BINNED:\n return new Binned(partialField, rowDiffset);\n default:\n return new Categorical(partialField, rowDiffset);\n }\n default:\n return new Categorical(partialField, rowDiffset);\n }\n}\n\n/**\n * Creates the field instances with input data and schema.\n *\n * @param {Array} dataColumn - The data array for fields.\n * @param {Array} schema - The schema array for fields.\n * @param {Array} headers - The array of header names.\n * @return {Array.} Returns an array of newly created field instances.\n */\nexport function createFields(dataColumn, schema, headers) {\n const headersObj = {};\n\n if (!(headers && headers.length)) {\n headers = schema.map(item => item.name);\n }\n\n headers.forEach((header, i) => {\n headersObj[header] = i;\n });\n\n return schema.map(item => createUnitField(dataColumn[headersObj[item.name]], item));\n}\n","import { DataFormat } from './enums';\n\nexport default {\n dataFormat: DataFormat.AUTO\n};\n","import { columnMajor } from '../utils';\n\n/**\n * Parses and converts data formatted in DSV array to a manageable internal format.\n *\n * @param {Array.} arr - A 2D array containing of the DSV data.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv data is header or not.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * [\"a\", \"b\", \"c\"],\n * [1, 2, 3],\n * [4, 5, 6],\n * [7, 8, 9]\n * ];\n */\nfunction DSVArr (arr, options) {\n const defaultOption = {\n firstRowHeader: true,\n };\n options = Object.assign({}, defaultOption, options);\n\n let header;\n const columns = [];\n const push = columnMajor(columns);\n\n if (options.firstRowHeader) {\n // If header present then mutate the array.\n // Do in-place mutation to save space.\n header = arr.splice(0, 1)[0];\n } else {\n header = [];\n }\n\n arr.forEach(field => push(...field));\n\n return [header, columns];\n}\n\nexport default DSVArr;\n","var EOL = {},\n EOF = {},\n QUOTE = 34,\n NEWLINE = 10,\n RETURN = 13;\n\nfunction objectConverter(columns) {\n return new Function(\"d\", \"return {\" + columns.map(function(name, i) {\n return JSON.stringify(name) + \": d[\" + i + \"]\";\n }).join(\",\") + \"}\");\n}\n\nfunction customConverter(columns, f) {\n var object = objectConverter(columns);\n return function(row, i) {\n return f(object(row), i, columns);\n };\n}\n\n// Compute unique columns in order of discovery.\nfunction inferColumns(rows) {\n var columnSet = Object.create(null),\n columns = [];\n\n rows.forEach(function(row) {\n for (var column in row) {\n if (!(column in columnSet)) {\n columns.push(columnSet[column] = column);\n }\n }\n });\n\n return columns;\n}\n\nexport default function(delimiter) {\n var reFormat = new RegExp(\"[\\\"\" + delimiter + \"\\n\\r]\"),\n DELIMITER = delimiter.charCodeAt(0);\n\n function parse(text, f) {\n var convert, columns, rows = parseRows(text, function(row, i) {\n if (convert) return convert(row, i - 1);\n columns = row, convert = f ? customConverter(row, f) : objectConverter(row);\n });\n rows.columns = columns || [];\n return rows;\n }\n\n function parseRows(text, f) {\n var rows = [], // output rows\n N = text.length,\n I = 0, // current character index\n n = 0, // current line number\n t, // current token\n eof = N <= 0, // current token followed by EOF?\n eol = false; // current token followed by EOL?\n\n // Strip the trailing newline.\n if (text.charCodeAt(N - 1) === NEWLINE) --N;\n if (text.charCodeAt(N - 1) === RETURN) --N;\n\n function token() {\n if (eof) return EOF;\n if (eol) return eol = false, EOL;\n\n // Unescape quotes.\n var i, j = I, c;\n if (text.charCodeAt(j) === QUOTE) {\n while (I++ < N && text.charCodeAt(I) !== QUOTE || text.charCodeAt(++I) === QUOTE);\n if ((i = I) >= N) eof = true;\n else if ((c = text.charCodeAt(I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n return text.slice(j + 1, i - 1).replace(/\"\"/g, \"\\\"\");\n }\n\n // Find next delimiter or newline.\n while (I < N) {\n if ((c = text.charCodeAt(i = I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n else if (c !== DELIMITER) continue;\n return text.slice(j, i);\n }\n\n // Return last token before EOF.\n return eof = true, text.slice(j, N);\n }\n\n while ((t = token()) !== EOF) {\n var row = [];\n while (t !== EOL && t !== EOF) row.push(t), t = token();\n if (f && (row = f(row, n++)) == null) continue;\n rows.push(row);\n }\n\n return rows;\n }\n\n function format(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return [columns.map(formatValue).join(delimiter)].concat(rows.map(function(row) {\n return columns.map(function(column) {\n return formatValue(row[column]);\n }).join(delimiter);\n })).join(\"\\n\");\n }\n\n function formatRows(rows) {\n return rows.map(formatRow).join(\"\\n\");\n }\n\n function formatRow(row) {\n return row.map(formatValue).join(delimiter);\n }\n\n function formatValue(text) {\n return text == null ? \"\"\n : reFormat.test(text += \"\") ? \"\\\"\" + text.replace(/\"/g, \"\\\"\\\"\") + \"\\\"\"\n : text;\n }\n\n return {\n parse: parse,\n parseRows: parseRows,\n format: format,\n formatRows: formatRows\n };\n}\n","import dsv from \"./dsv\";\n\nvar csv = dsv(\",\");\n\nexport var csvParse = csv.parse;\nexport var csvParseRows = csv.parseRows;\nexport var csvFormat = csv.format;\nexport var csvFormatRows = csv.formatRows;\n","import dsv from \"./dsv\";\n\nvar tsv = dsv(\"\\t\");\n\nexport var tsvParse = tsv.parse;\nexport var tsvParseRows = tsv.parseRows;\nexport var tsvFormat = tsv.format;\nexport var tsvFormatRows = tsv.formatRows;\n","import { dsvFormat as d3Dsv } from 'd3-dsv';\nimport DSVArr from './dsv-arr';\n\n/**\n * Parses and converts data formatted in DSV string to a manageable internal format.\n *\n * @todo Support to be given for https://tools.ietf.org/html/rfc4180.\n * @todo Sample implementation https://github.com/knrz/CSV.js/.\n *\n * @param {string} str - The input DSV string.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv string data is header or not.\n * @param {string} [options.fieldSeparator=\",\"] - The separator of two consecutive field.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = `\n * a,b,c\n * 1,2,3\n * 4,5,6\n * 7,8,9\n * `\n */\nfunction DSVStr (str, options) {\n const defaultOption = {\n firstRowHeader: true,\n fieldSeparator: ','\n };\n options = Object.assign({}, defaultOption, options);\n\n const dsv = d3Dsv(options.fieldSeparator);\n return DSVArr(dsv.parseRows(str), options);\n}\n\nexport default DSVStr;\n","import { columnMajor } from '../utils';\n\n/**\n * Parses and converts data formatted in JSON to a manageable internal format.\n *\n * @param {Array.} arr - The input data formatted in JSON.\n * @return {Array.} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * {\n * \"a\": 1,\n * \"b\": 2,\n * \"c\": 3\n * },\n * {\n * \"a\": 4,\n * \"b\": 5,\n * \"c\": 6\n * },\n * {\n * \"a\": 7,\n * \"b\": 8,\n * \"c\": 9\n * }\n * ];\n */\nfunction FlatJSON (arr) {\n const header = {};\n let i = 0;\n let insertionIndex;\n const columns = [];\n const push = columnMajor(columns);\n\n arr.forEach((item) => {\n const fields = [];\n for (let key in item) {\n if (key in header) {\n insertionIndex = header[key];\n } else {\n header[key] = i++;\n insertionIndex = i - 1;\n }\n fields[insertionIndex] = item[key];\n }\n push(...fields);\n });\n\n return [Object.keys(header), columns];\n}\n\nexport default FlatJSON;\n","import FlatJSON from './flat-json';\nimport DSVArr from './dsv-arr';\nimport DSVStr from './dsv-str';\nimport { detectDataFormat } from '../utils';\n\n/**\n * Parses the input data and detect the format automatically.\n *\n * @param {string|Array} data - The input data.\n * @param {Object} options - An optional config specific to data format.\n * @return {Array.} Returns an array of headers and column major data.\n */\nfunction Auto (data, options) {\n const converters = { FlatJSON, DSVStr, DSVArr };\n const dataFormat = detectDataFormat(data);\n\n if (!dataFormat) {\n throw new Error('Couldn\\'t detect the data format');\n }\n\n return converters[dataFormat](data, options);\n}\n\nexport default Auto;\n","import { FieldType, FilteringMode, DimensionSubtype, MeasureSubtype, DataFormat } from './enums';\nimport fieldStore from './field-store';\nimport Value from './value';\nimport {\n rowDiffsetIterator\n} from './operator';\nimport { DM_DERIVATIVES, LOGICAL_OPERATORS } from './constants';\nimport { createFields, createUnitFieldFromPartial } from './field-creator';\nimport defaultConfig from './default-config';\nimport * as converter from './converter';\nimport { extend2, detectDataFormat } from './utils';\n\n/**\n * Prepares the selection data.\n */\nfunction prepareSelectionData (fields, i) {\n const resp = {};\n for (let field of fields) {\n resp[field.name()] = new Value(field.partialField.data[i], field);\n }\n return resp;\n}\n\nexport function prepareJoinData (fields) {\n const resp = {};\n Object.keys(fields).forEach((key) => { resp[key] = new Value(fields[key], key); });\n return resp;\n}\n\nexport const updateFields = ([rowDiffset, colIdentifier], partialFieldspace, fieldStoreName) => {\n let collID = colIdentifier.length ? colIdentifier.split(',') : [];\n let partialFieldMap = partialFieldspace.fieldsObj();\n let newFields = collID.map(coll => createUnitFieldFromPartial(partialFieldMap[coll].partialField, rowDiffset));\n return fieldStore.createNamespace(newFields, fieldStoreName);\n};\n\nexport const persistDerivation = (model, operation, config = {}, criteriaFn) => {\n if (operation === DM_DERIVATIVES.COMPOSE) {\n model._derivation.length = 0;\n model._derivation.push(...criteriaFn);\n } else {\n model._derivation.push({\n op: operation,\n meta: config,\n criteria: criteriaFn\n });\n }\n};\n\nexport const persistAncestorDerivation = (sourceDm, newDm) => {\n newDm._ancestorDerivation.push(...sourceDm._ancestorDerivation, ...sourceDm._derivation);\n};\n\nexport const selectHelper = (rowDiffset, fields, selectFn, config, sourceDm) => {\n const newRowDiffSet = [];\n let lastInsertedValue = -1;\n let { mode } = config;\n let li;\n let cachedStore = {};\n let cloneProvider = () => sourceDm.detachedRoot();\n const selectorHelperFn = index => selectFn(\n prepareSelectionData(fields, index),\n index,\n cloneProvider,\n cachedStore\n );\n\n let checker;\n if (mode === FilteringMode.INVERSE) {\n checker = index => !selectorHelperFn(index);\n } else {\n checker = index => selectorHelperFn(index);\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n if (checker(i)) {\n if (lastInsertedValue !== -1 && i === (lastInsertedValue + 1)) {\n li = newRowDiffSet.length - 1;\n newRowDiffSet[li] = `${newRowDiffSet[li].split('-')[0]}-${i}`;\n } else {\n newRowDiffSet.push(`${i}`);\n }\n lastInsertedValue = i;\n }\n });\n return newRowDiffSet.join(',');\n};\n\nexport const cloneWithAllFields = (model) => {\n const clonedDm = model.clone(false);\n const partialFieldspace = model.getPartialFieldspace();\n clonedDm._colIdentifier = partialFieldspace.fields.map(f => f.name()).join(',');\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n clonedDm.__calculateFieldspace().calculateFieldsConfig();\n\n return clonedDm;\n};\n\nexport const filterPropagationModel = (model, propModels, config = {}) => {\n const operation = config.operation || LOGICAL_OPERATORS.AND;\n const filterByMeasure = config.filterByMeasure || false;\n let fns = [];\n if (!propModels.length) {\n fns = [() => false];\n } else {\n fns = propModels.map(propModel => ((dataModel) => {\n const dataObj = dataModel.getData();\n const schema = dataObj.schema;\n const fieldsConfig = dataModel.getFieldsConfig();\n const fieldsSpace = dataModel.getFieldspace().fieldsObj();\n const data = dataObj.data;\n const domain = Object.values(fieldsConfig).reduce((acc, v) => {\n acc[v.def.name] = fieldsSpace[v.def.name].domain();\n return acc;\n }, {});\n\n return (fields) => {\n const include = !data.length ? false : data.some(row => schema.every((propField) => {\n if (!(propField.name in fields)) {\n return true;\n }\n const value = fields[propField.name].valueOf();\n if (filterByMeasure && propField.type === FieldType.MEASURE) {\n return value >= domain[propField.name][0] && value <= domain[propField.name][1];\n }\n\n if (propField.type !== FieldType.DIMENSION) {\n return true;\n }\n const idx = fieldsConfig[propField.name].index;\n return row[idx] === fields[propField.name].valueOf();\n }));\n return include;\n };\n })(propModel));\n }\n\n let filteredModel;\n if (operation === LOGICAL_OPERATORS.AND) {\n filteredModel = cloneWithAllFields(model).select(fields => fns.every(fn => fn(fields)), {\n saveChild: false,\n mode: FilteringMode.ALL\n });\n } else {\n filteredModel = cloneWithAllFields(model).select(fields => fns.some(fn => fn(fields)), {\n mode: FilteringMode.ALL,\n saveChild: false\n });\n }\n\n return filteredModel;\n};\n\nexport const cloneWithSelect = (sourceDm, selectFn, selectConfig, cloneConfig) => {\n const cloned = sourceDm.clone(cloneConfig.saveChild);\n const rowDiffset = selectHelper(\n cloned._rowDiffset,\n cloned.getPartialFieldspace().fields,\n selectFn,\n selectConfig,\n sourceDm\n );\n cloned._rowDiffset = rowDiffset;\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivation(cloned, DM_DERIVATIVES.SELECT, { config: selectConfig }, selectFn);\n persistAncestorDerivation(sourceDm, cloned);\n\n return cloned;\n};\n\nexport const cloneWithProject = (sourceDm, projField, config, allFields) => {\n const cloned = sourceDm.clone(config.saveChild);\n let projectionSet = projField;\n if (config.mode === FilteringMode.INVERSE) {\n projectionSet = allFields.filter(fieldName => projField.indexOf(fieldName) === -1);\n }\n // cloned._colIdentifier = sourceDm._colIdentifier.split(',')\n // .filter(coll => projectionSet.indexOf(coll) !== -1).join();\n cloned._colIdentifier = projectionSet.join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivation(\n cloned,\n DM_DERIVATIVES.PROJECT,\n { projField, config, actualProjField: projectionSet },\n null\n );\n persistAncestorDerivation(sourceDm, cloned);\n\n return cloned;\n};\n\nexport const sanitizeUnitSchema = (unitSchema) => {\n // Do deep clone of the unit schema as the user might change it later.\n unitSchema = extend2({}, unitSchema);\n if (!unitSchema.type) {\n unitSchema.type = FieldType.DIMENSION;\n }\n\n if (!unitSchema.subtype) {\n switch (unitSchema.type) {\n case FieldType.MEASURE:\n unitSchema.subtype = MeasureSubtype.CONTINUOUS;\n break;\n default:\n case FieldType.DIMENSION:\n unitSchema.subtype = DimensionSubtype.CATEGORICAL;\n break;\n }\n }\n\n return unitSchema;\n};\n\nexport const sanitizeSchema = schema => schema.map(unitSchema => sanitizeUnitSchema(unitSchema));\n\nexport const updateData = (relation, data, schema, options) => {\n schema = sanitizeSchema(schema);\n options = Object.assign(Object.assign({}, defaultConfig), options);\n const converterFn = converter[options.dataFormat];\n\n if (!(converterFn && typeof converterFn === 'function')) {\n throw new Error(`No converter function found for ${options.dataFormat} format`);\n }\n\n const [header, formattedData] = converterFn(data, options);\n const fieldArr = createFields(formattedData, schema, header);\n\n // This will create a new fieldStore with the fields\n const nameSpace = fieldStore.createNamespace(fieldArr, options.name);\n relation._partialFieldspace = nameSpace;\n // If data is provided create the default colIdentifier and rowDiffset\n relation._rowDiffset = formattedData.length && formattedData[0].length ? `0-${formattedData[0].length - 1}` : '';\n relation._colIdentifier = (schema.map(_ => _.name)).join();\n relation._dataFormat = options.dataFormat === DataFormat.AUTO ? detectDataFormat(data) : options.dataFormat;\n return relation;\n};\n\nexport const fieldInSchema = (schema, field) => {\n let i = 0;\n\n for (; i < schema.length; ++i) {\n if (field === schema[i].name) {\n return {\n type: schema[i].subtype || schema[i].type,\n index: i\n };\n }\n }\n return null;\n};\n\n\nexport const getDerivationArguments = (derivation) => {\n let params = [];\n let operation;\n operation = derivation.op;\n switch (operation) {\n case DM_DERIVATIVES.SELECT:\n params = [derivation.criteria];\n break;\n case DM_DERIVATIVES.PROJECT:\n params = [derivation.meta.actualProjField];\n break;\n case DM_DERIVATIVES.GROUPBY:\n operation = 'groupBy';\n params = [derivation.meta.groupByString.split(','), derivation.criteria];\n break;\n default:\n operation = null;\n }\n\n return {\n operation,\n params\n };\n};\n\nconst applyExistingOperationOnModel = (propModel, dataModel) => {\n const derivations = dataModel.getDerivations();\n let selectionModel = propModel[0];\n let rejectionModel = propModel[1];\n\n derivations.forEach((derivation) => {\n if (!derivation) {\n return;\n }\n\n const { operation, params } = getDerivationArguments(derivation);\n if (operation) {\n selectionModel = selectionModel[operation](...params, {\n saveChild: false\n });\n rejectionModel = rejectionModel[operation](...params, {\n saveChild: false\n });\n }\n });\n\n return [selectionModel, rejectionModel];\n};\n\nconst getFilteredModel = (propModel, path) => {\n for (let i = 0, len = path.length; i < len; i++) {\n const model = path[i];\n propModel = applyExistingOperationOnModel(propModel, model);\n }\n return propModel;\n};\n\nconst propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = {}) => {\n const nonTraversingModel = propModelInf.nonTraversingModel;\n const excludeModels = propModelInf.excludeModels || [];\n\n if (dataModel === nonTraversingModel) {\n return;\n }\n\n const propagate = excludeModels.length ? excludeModels.indexOf(dataModel) === -1 : true;\n\n propagate && dataModel.handlePropagation(propModel, config);\n\n const children = dataModel._children;\n children.forEach((child) => {\n let [selectionModel, rejectionModel] = applyExistingOperationOnModel(propModel, child);\n propagateIdentifiers(child, [selectionModel, rejectionModel], config, propModelInf);\n });\n};\n\nexport const getRootGroupByModel = (model) => {\n while (model._parent && model._derivation.find(d => d.op !== DM_DERIVATIVES.GROUPBY)) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getRootDataModel = (model) => {\n while (model._parent) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getPathToRootModel = (model, path = []) => {\n while (model._parent) {\n path.push(model);\n model = model._parent;\n }\n return path;\n};\n\nexport const propagateToAllDataModels = (identifiers, rootModels, propagationInf, config) => {\n let criteria;\n let propModel;\n const { propagationNameSpace, propagateToSource } = propagationInf;\n const propagationSourceId = propagationInf.sourceId;\n const propagateInterpolatedValues = config.propagateInterpolatedValues;\n const filterFn = (entry) => {\n const filter = config.filterFn || (() => true);\n return filter(entry, config);\n };\n\n let criterias = [];\n\n if (identifiers === null && config.persistent !== true) {\n criterias = [{\n criteria: []\n }];\n } else {\n let actionCriterias = Object.values(propagationNameSpace.mutableActions);\n if (propagateToSource !== false) {\n actionCriterias = actionCriterias.filter(d => d.config.sourceId !== propagationSourceId);\n }\n\n const filteredCriteria = actionCriterias.filter(filterFn).map(action => action.config.criteria);\n\n const excludeModels = [];\n\n if (propagateToSource !== false) {\n const sourceActionCriterias = Object.values(propagationNameSpace.mutableActions);\n\n sourceActionCriterias.forEach((actionInf) => {\n const actionConf = actionInf.config;\n if (actionConf.applyOnSource === false && actionConf.action === config.action &&\n actionConf.sourceId !== propagationSourceId) {\n excludeModels.push(actionInf.model);\n criteria = sourceActionCriterias.filter(d => d !== actionInf).map(d => d.config.criteria);\n criteria.length && criterias.push({\n criteria,\n models: actionInf.model,\n path: getPathToRootModel(actionInf.model)\n });\n }\n });\n }\n\n\n criteria = [].concat(...[...filteredCriteria, identifiers]).filter(d => d !== null);\n criterias.push({\n criteria,\n excludeModels: [...excludeModels, ...config.excludeModels || []]\n });\n }\n\n const rootModel = rootModels.model;\n\n const propConfig = Object.assign({\n sourceIdentifiers: identifiers,\n propagationSourceId\n }, config);\n\n const rootGroupByModel = rootModels.groupByModel;\n if (propagateInterpolatedValues && rootGroupByModel) {\n propModel = filterPropagationModel(rootGroupByModel, criteria, {\n filterByMeasure: propagateInterpolatedValues\n });\n propagateIdentifiers(rootGroupByModel, propModel, propConfig);\n }\n\n criterias.forEach((inf) => {\n const propagationModel = filterPropagationModel(rootModel, inf.criteria);\n const path = inf.path;\n\n if (path) {\n const filteredModel = getFilteredModel(propagationModel, path.reverse());\n inf.models.handlePropagation(filteredModel, propConfig);\n } else {\n propagateIdentifiers(rootModel, propagationModel, propConfig, {\n excludeModels: inf.excludeModels,\n nonTraversingModel: propagateInterpolatedValues && rootGroupByModel\n });\n }\n });\n};\n\nexport const propagateImmutableActions = (propagationNameSpace, rootModels, propagationInf) => {\n const immutableActions = propagationNameSpace.immutableActions;\n\n for (const action in immutableActions) {\n const actionInf = immutableActions[action];\n const actionConf = actionInf.config;\n const propagationSourceId = propagationInf.config.sourceId;\n const filterImmutableAction = propagationInf.propConfig.filterImmutableAction ?\n propagationInf.propConfig.filterImmutableAction(actionConf, propagationInf.config) : true;\n if (actionConf.sourceId !== propagationSourceId && filterImmutableAction) {\n const criteriaModel = actionConf.criteria;\n propagateToAllDataModels(criteriaModel, rootModels, {\n propagationNameSpace,\n propagateToSource: false,\n sourceId: propagationSourceId\n }, actionConf);\n }\n }\n};\n\nexport const addToPropNamespace = (propagationNameSpace, config = {}, model) => {\n let sourceNamespace;\n const isMutableAction = config.isMutableAction;\n const criteria = config.criteria;\n const key = `${config.action}-${config.sourceId}`;\n\n if (isMutableAction) {\n sourceNamespace = propagationNameSpace.mutableActions;\n } else {\n sourceNamespace = propagationNameSpace.immutableActions;\n }\n\n if (criteria === null) {\n delete sourceNamespace[key];\n } else {\n sourceNamespace[key] = {\n model,\n config\n };\n }\n\n return this;\n};\n","import { FilteringMode } from './enums';\nimport { getUniqueId } from './utils';\nimport { updateFields, cloneWithSelect, cloneWithProject, updateData } from './helper';\nimport { crossProduct, difference, naturalJoinFilter, union } from './operator';\n\n/**\n * Relation provides the definitions of basic operators of relational algebra like *selection*, *projection*, *union*,\n * *difference* etc.\n *\n * It is extended by {@link DataModel} to inherit the functionalities of relational algebra concept.\n *\n * @class\n * @public\n * @module Relation\n * @namespace DataModel\n */\nclass Relation {\n\n /**\n * Creates a new Relation instance by providing underlying data and schema.\n *\n * @private\n *\n * @param {Object | string | Relation} data - The input tabular data in dsv or json format or\n * an existing Relation instance object.\n * @param {Array} schema - An array of data schema.\n * @param {Object} [options] - The optional options.\n */\n constructor (...params) {\n let source;\n\n this._parent = null;\n this._derivation = [];\n this._ancestorDerivation = [];\n this._children = [];\n\n if (params.length === 1 && ((source = params[0]) instanceof Relation)) {\n // parent datamodel was passed as part of source\n this._colIdentifier = source._colIdentifier;\n this._rowDiffset = source._rowDiffset;\n this._dataFormat = source._dataFormat;\n this._parent = source;\n this._partialFieldspace = this._parent._partialFieldspace;\n this._fieldStoreName = getUniqueId();\n this.__calculateFieldspace().calculateFieldsConfig();\n } else {\n updateData(this, ...params);\n this._fieldStoreName = this._partialFieldspace.name;\n this.__calculateFieldspace().calculateFieldsConfig();\n this._propagationNameSpace = {\n mutableActions: {},\n immutableActions: {}\n };\n }\n }\n\n /**\n * Retrieves the {@link Schema | schema} details for every {@link Field | field} as an array.\n *\n * @public\n *\n * @return {Array.} Array of fields schema.\n * ```\n * [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', numberFormat: (val) => `${val} miles / gallon` },\n * { name: 'Cylinder', type: 'dimension' },\n * { name: 'Displacement', type: 'measure', defAggFn: 'max' },\n * { name: 'HorsePower', type: 'measure', defAggFn: 'max' },\n * { name: 'Weight_in_lbs', type: 'measure', defAggFn: 'avg', },\n * { name: 'Acceleration', type: 'measure', defAggFn: 'avg' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin' }\n * ]\n * ```\n */\n getSchema () {\n return this.getFieldspace().fields.map(d => d.schema());\n }\n\n /**\n * Returns the name of the {@link DataModel} instance. If no name was specified during {@link DataModel}\n * initialization, then it returns a auto-generated name.\n *\n * @public\n *\n * @return {string} Name of the DataModel instance.\n */\n getName() {\n return this._fieldStoreName;\n }\n\n getFieldspace () {\n return this._fieldspace;\n }\n\n __calculateFieldspace () {\n this._fieldspace = updateFields([this._rowDiffset, this._colIdentifier],\n this.getPartialFieldspace(), this._fieldStoreName);\n return this;\n }\n\n getPartialFieldspace () {\n return this._partialFieldspace;\n }\n\n /**\n * Performs {@link link_of_cross_product | cross-product} between two {@link DataModel} instances and returns a\n * new {@link DataModel} instance containing the results. This operation is also called theta join.\n *\n * Cross product takes two set and create one set where each value of one set is paired with each value of another\n * set.\n *\n * This method takes an optional predicate which filters the generated result rows. If the predicate returns true\n * the combined row is included in the resulatant table.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.join(originDM)));\n *\n * console.log(carsDM.join(originDM,\n * obj => obj.[originDM.getName()].Origin === obj.[carsDM.getName()].Origin));\n *\n * @text\n * This is chained version of `join` operator. `join` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel to be joined with the current instance DataModel.\n * @param {SelectionPredicate} filterFn - The predicate function that will filter the result of the crossProduct.\n *\n * @return {DataModel} New DataModel instance created after joining.\n */\n join (joinWith, filterFn) {\n return crossProduct(this, joinWith, filterFn);\n }\n\n /**\n * {@link natural_join | Natural join} is a special kind of cross-product join where filtering of rows are performed\n * internally by resolving common fields are from both table and the rows with common value are included.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.naturalJoin(originDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel with which the current instance of DataModel on which the method is\n * called will be joined.\n * @return {DataModel} New DataModel instance created after joining.\n */\n naturalJoin (joinWith) {\n return crossProduct(this, joinWith, naturalJoinFilter(this, joinWith), true);\n }\n\n /**\n * {@link link_to_union | Union} operation can be termed as vertical stacking of all rows from both the DataModel\n * instances, provided that both of the {@link DataModel} instances should have same column names.\n *\n * @example\n * console.log(EuropeanMakerDM.union(USAMakerDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} unionWith - DataModel instance for which union has to be applied with the instance on which\n * the method is called\n *\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n union (unionWith) {\n return union(this, unionWith);\n }\n\n /**\n * {@link link_to_difference | Difference } operation only include rows which are present in the datamodel on which\n * it was called but not on the one passed as argument.\n *\n * @example\n * console.log(highPowerDM.difference(highExpensiveDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} differenceWith - DataModel instance for which difference has to be applied with the instance\n * on which the method is called\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n difference (differenceWith) {\n return difference(this, differenceWith);\n }\n\n /**\n * {@link link_to_selection | Selection} is a row filtering operation. It expects a predicate and an optional mode\n * which control which all rows should be included in the resultant DataModel instance.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection operation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resultant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * // with selection mode NORMAL:\n * const normDt = dt.select(fields => fields.Origin.value === \"USA\")\n * console.log(normDt));\n *\n * // with selection mode INVERSE:\n * const inverDt = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt);\n *\n * // with selection mode ALL:\n * const dtArr = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.ALL })\n * // print the selected parts\n * console.log(dtArr[0]);\n * // print the inverted parts\n * console.log(dtArr[1]);\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Function} selectFn - The predicate function which is called for each row with the current row.\n * ```\n * function (row, i, cloneProvider, store) { ... }\n * ```\n * @param {Object} config - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance.\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection.\n * @return {DataModel} Returns the new DataModel instance(s) after operation.\n */\n select (selectFn, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n\n const cloneConfig = { saveChild: config.saveChild };\n let oDm;\n\n if (config.mode === FilteringMode.ALL) {\n const selectDm = cloneWithSelect(\n this,\n selectFn,\n { mode: FilteringMode.NORMAL },\n cloneConfig\n );\n const rejectDm = cloneWithSelect(\n this,\n selectFn,\n { mode: FilteringMode.INVERSE },\n cloneConfig\n );\n oDm = [selectDm, rejectDm];\n } else {\n oDm = cloneWithSelect(\n this,\n selectFn,\n config,\n cloneConfig\n );\n }\n\n return oDm;\n }\n\n /**\n * Retrieves a boolean value if the current {@link DataModel} instance has data.\n *\n * @example\n * const schema = [\n * { name: 'CarName', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n * const data = [];\n *\n * const dt = new DataModel(data, schema);\n * console.log(dt.isEmpty());\n *\n * @public\n *\n * @return {Boolean} True if the datamodel has no data, otherwise false.\n */\n isEmpty () {\n return !this._rowDiffset.length || !this._colIdentifier.length;\n }\n\n /**\n * Creates a clone from the current DataModel instance with child parent relationship.\n *\n * @private\n * @param {boolean} [saveChild=true] - Whether the cloned instance would be recorded in the parent instance.\n * @return {DataModel} - Returns the newly cloned DataModel instance.\n */\n clone (saveChild = true) {\n const clonedDm = new this.constructor(this);\n if (saveChild) {\n clonedDm.setParent(this);\n } else {\n clonedDm.setParent(null);\n }\n return clonedDm;\n }\n\n /**\n * {@link Projection} is filter column (field) operation. It expects list of fields' name and either include those\n * or exclude those based on {@link FilteringMode} on the resultant variable.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * const dm = new DataModel(data, schema);\n *\n * // with projection mode NORMAL:\n * const normDt = dt.project([\"Name\", \"HorsePower\"]);\n * console.log(normDt.getData());\n *\n * // with projection mode INVERSE:\n * const inverDt = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt.getData());\n *\n * // with selection mode ALL:\n * const dtArr = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.ALL })\n * // print the normal parts\n * console.log(dtArr[0].getData());\n * // print the inverted parts\n * console.log(dtArr[1].getData());\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {DataModel} Returns the new DataModel instance after operation.\n */\n project (projField, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const { mode } = config;\n\n let normalizedProjField = projField.reduce((acc, field) => {\n if (field.constructor.name === 'RegExp') {\n acc.push(...allFields.filter(fieldName => fieldName.search(field) !== -1));\n } else if (field in fieldConfig) {\n acc.push(field);\n }\n return acc;\n }, []);\n\n normalizedProjField = Array.from(new Set(normalizedProjField)).map(field => field.trim());\n let dataModel;\n\n if (mode === FilteringMode.ALL) {\n let projectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.NORMAL,\n saveChild: config.saveChild\n }, allFields);\n let rejectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.INVERSE,\n saveChild: config.saveChild\n }, allFields);\n dataModel = [projectionClone, rejectionClone];\n } else {\n let projectionClone = cloneWithProject(this, normalizedProjField, config, allFields);\n dataModel = projectionClone;\n }\n\n return dataModel;\n }\n\n getFieldsConfig () {\n return this._fieldConfig;\n }\n\n calculateFieldsConfig () {\n this._fieldConfig = this._fieldspace.fields.reduce((acc, fieldDef, i) => {\n acc[fieldDef.name()] = {\n index: i,\n def: { name: fieldDef.name(), type: fieldDef.type(), subtype: fieldDef.subtype() }\n };\n return acc;\n }, {});\n return this;\n }\n\n\n /**\n * Frees up the resources associated with the current DataModel instance and breaks all the links instance has in\n * the DAG.\n *\n * @public\n */\n dispose () {\n this._parent && this._parent.removeChild(this);\n this._parent = null;\n this._children.forEach((child) => {\n child._parent = null;\n });\n this._children = [];\n }\n\n /**\n * Removes the specified child {@link DataModel} from the child list of the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\")\n * dt.removeChild(dt2);\n *\n * @private\n *\n * @param {DataModel} child - Delegates the parent to remove this child.\n */\n removeChild (child) {\n let idx = this._children.findIndex(sibling => sibling === child);\n idx !== -1 ? this._children.splice(idx, 1) : true;\n }\n\n /**\n * Sets the specified {@link DataModel} as a parent for the current {@link DataModel} instance.\n *\n * @param {DataModel} parent - The datamodel instance which will act as parent.\n */\n setParent (parent) {\n this._parent && this._parent.removeChild(this);\n this._parent = parent;\n parent && parent._children.push(this);\n }\n\n /**\n * Returns the parent {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const parentDm = dt2.getParent();\n *\n * @return {DataModel} Returns the parent DataModel instance.\n */\n getParent () {\n return this._parent;\n }\n\n /**\n * Returns the immediate child {@link DataModel} instances.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const childDm1 = dt.select(fields => fields.Origin.value === \"USA\");\n * const childDm2 = dt.select(fields => fields.Origin.value === \"Japan\");\n * const childDm3 = dt.groupBy([\"Origin\"]);\n *\n * @return {DataModel[]} Returns the immediate child DataModel instances.\n */\n getChildren () {\n return this._children;\n }\n\n /**\n * Returns the in-between operation meta data while creating the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const derivations = dt3.getDerivations();\n *\n * @return {Any[]} Returns the derivation meta data.\n */\n getDerivations () {\n return this._derivation;\n }\n\n /**\n * Returns the in-between operation meta data happened from root {@link DataModel} to current instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const ancDerivations = dt3.getAncestorDerivations();\n *\n * @return {Any[]} Returns the previous derivation meta data.\n */\n getAncestorDerivations () {\n return this._ancestorDerivation;\n }\n}\n\nexport default Relation;\n","/* eslint-disable default-case */\n\nimport { FieldType, DimensionSubtype, DataFormat } from './enums';\nimport {\n persistDerivation,\n persistAncestorDerivation,\n getRootGroupByModel,\n propagateToAllDataModels,\n getRootDataModel,\n propagateImmutableActions,\n addToPropNamespace,\n sanitizeUnitSchema\n} from './helper';\nimport { DM_DERIVATIVES, PROPAGATION } from './constants';\nimport {\n dataBuilder,\n rowDiffsetIterator,\n groupBy\n} from './operator';\nimport { createBinnedFieldData } from './operator/bucket-creator';\nimport Relation from './relation';\nimport reducerStore from './utils/reducer-store';\nimport { createFields } from './field-creator';\nimport InvalidAwareTypes from './invalid-aware-types';\n\n/**\n * DataModel is an in-browser representation of tabular data. It supports\n * {@link https://en.wikipedia.org/wiki/Relational_algebra | relational algebra} operators as well as generic data\n * processing opearators.\n * DataModel extends {@link Relation} class which defines all the relational algebra opreators. DataModel gives\n * definition of generic data processing operators which are not relational algebra complient.\n *\n * @public\n * @class\n * @extends Relation\n * @memberof Datamodel\n */\nclass DataModel extends Relation {\n /**\n * Creates a new DataModel instance by providing data and schema. Data could be in the form of\n * - Flat JSON\n * - DSV String\n * - 2D Array\n *\n * By default DataModel finds suitable adapter to serialize the data. DataModel also expects a\n * {@link Schema | schema} for identifying the variables present in data.\n *\n * @constructor\n * @example\n * const data = loadData('cars.csv');\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', unit : 'cm', scale: '1000', numberformat: val => `${val}G`},\n * { name: 'Cylinders', type: 'dimension' },\n * { name: 'Displacement', type: 'measure' },\n * { name: 'Horsepower', type: 'measure' },\n * { name: 'Weight_in_lbs', type: 'measure' },\n * { name: 'Acceleration', type: 'measure' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin', type: 'dimension' }\n * ];\n * const dm = new DataModel(data, schema, { name: 'Cars' });\n * table(dm);\n *\n * @public\n *\n * @param {Array. | string | Array.} data Input data in any of the mentioned formats\n * @param {Array.} schema Defination of the variables. Order of the variables in data and order of the\n * variables in schema has to be same.\n * @param {object} [options] Optional arguments to specify more settings regarding the creation part\n * @param {string} [options.name] Name of the datamodel instance. If no name is given an auto generated name is\n * assigned to the instance.\n * @param {string} [options.fieldSeparator=','] specify field separator type if the data is of type dsv string.\n */\n constructor (...args) {\n super(...args);\n\n this._onPropagation = [];\n this._sortingDetails = [];\n }\n\n /**\n * Reducers are simple functions which reduces an array of numbers to a representative number of the set.\n * Like an array of numbers `[10, 20, 5, 15]` can be reduced to `12.5` if average / mean reducer function is\n * applied. All the measure fields in datamodel (variables in data) needs a reducer to handle aggregation.\n *\n * @public\n *\n * @return {ReducerStore} Singleton instance of {@link ReducerStore}.\n */\n static get Reducers () {\n return reducerStore;\n }\n\n /**\n * Configure null, undefined, invalid values in the source data\n *\n * @public\n *\n * @param {Object} [config] - Configuration to control how null, undefined and non-parsable values are\n * represented in DataModel.\n * @param {string} [config.undefined] - Define how an undefined value will be represented.\n * @param {string} [config.null] - Define how a null value will be represented.\n * @param {string} [config.invalid] - Define how a non-parsable value will be represented.\n */\n static configureInvalidAwareTypes (config) {\n return InvalidAwareTypes.invalidAwareVals(config);\n }\n\n /**\n * Retrieve the data attached to an instance in JSON format.\n *\n * @example\n * // DataModel instance is already prepared and assigned to dm variable\n * const data = dm.getData({\n * order: 'column',\n * formatter: {\n * origin: (val) => val === 'European Union' ? 'EU' : val;\n * }\n * });\n * console.log(data);\n *\n * @public\n *\n * @param {Object} [options] Options to control how the raw data is to be returned.\n * @param {string} [options.order='row'] Defines if data is retieved in row order or column order. Possible values\n * are `'rows'` and `'columns'`\n * @param {Function} [options.formatter=null] Formats the output data. This expects an object, where the keys are\n * the name of the variable needs to be formatted. The formatter function is called for each row passing the\n * value of the cell for a particular row as arguments. The formatter is a function in the form of\n * `function (value, rowId, schema) => { ... }`\n * Know more about {@link Fomatter}.\n *\n * @return {Array} Returns a multidimensional array of the data with schema. The return format looks like\n * ```\n * {\n * data,\n * schema\n * }\n * ```\n */\n getData (options) {\n const defOptions = {\n order: 'row',\n formatter: null,\n withUid: false,\n getAllFields: false,\n sort: []\n };\n options = Object.assign({}, defOptions, options);\n const fields = this.getPartialFieldspace().fields;\n\n const dataGenerated = dataBuilder.call(\n this,\n this.getPartialFieldspace().fields,\n this._rowDiffset,\n options.getAllFields ? fields.map(d => d.name()).join() : this._colIdentifier,\n options.sort,\n {\n columnWise: options.order === 'column',\n addUid: !!options.withUid\n }\n );\n\n if (!options.formatter) {\n return dataGenerated;\n }\n\n const { formatter } = options;\n const { data, schema, uids } = dataGenerated;\n const fieldNames = schema.map((e => e.name));\n const fmtFieldNames = Object.keys(formatter);\n const fmtFieldIdx = fmtFieldNames.reduce((acc, next) => {\n const idx = fieldNames.indexOf(next);\n if (idx !== -1) {\n acc.push([idx, formatter[next]]);\n }\n return acc;\n }, []);\n\n if (options.order === 'column') {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n data[fIdx].forEach((datum, datumIdx) => {\n data[fIdx][datumIdx] = fmtFn.call(\n undefined,\n datum,\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n } else {\n data.forEach((datum, datumIdx) => {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n datum[fIdx] = fmtFn.call(\n undefined,\n datum[fIdx],\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n }\n\n return dataGenerated;\n }\n\n /**\n * Groups the data using particular dimensions and by reducing measures. It expects a list of dimensions using which\n * it projects the datamodel and perform aggregations to reduce the duplicate tuples. Refer this\n * {@link link_to_one_example_with_group_by | document} to know the intuition behind groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupedDM = dm.groupBy(['Year'], { horsepower: 'max' } );\n * console.log(groupedDm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {DataModel} Returns a new DataModel instance after performing the groupby.\n */\n groupBy (fieldsArr, reducers = {}, config = { saveChild: true }) {\n const groupByString = `${fieldsArr.join()}`;\n let params = [this, fieldsArr, reducers];\n const newDataModel = groupBy(...params);\n\n persistDerivation(\n newDataModel,\n DM_DERIVATIVES.GROUPBY,\n { fieldsArr, groupByString, defaultReducer: reducerStore.defaultReducer() },\n reducers\n );\n persistAncestorDerivation(this, newDataModel);\n\n if (config.saveChild) {\n newDataModel.setParent(this);\n } else {\n newDataModel.setParent(null);\n }\n\n return newDataModel;\n }\n\n /**\n * Performs sorting operation on the current {@link DataModel} instance according to the specified sorting details.\n * Like every other operator it doesn't mutate the current DataModel instance on which it was called, instead\n * returns a new DataModel instance containing the sorted data.\n *\n * DataModel support multi level sorting by listing the variables using which sorting needs to be performed and\n * the type of sorting `ASC` or `DESC`.\n *\n * In the following example, data is sorted by `Origin` field in `DESC` order in first level followed by another\n * level of sorting by `Acceleration` in `ASC` order.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * let sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\"] // Default value is ASC\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * // Sort with a custom sorting function\n * sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\", (a, b) => a - b] // Custom sorting function\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @text\n * DataModel also provides another sorting mechanism out of the box where sort is applied to a variable using\n * another variable which determines the order.\n * Like the above DataModel contains three fields `Origin`, `Name` and `Acceleration`. Now, the data in this\n * model can be sorted by `Origin` field according to the average value of all `Acceleration` for a\n * particular `Origin` value.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * const sortedDm = dm.sort([\n * ['Origin', ['Acceleration', (a, b) => avg(...a.Acceleration) - avg(...b.Acceleration)]]\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @public\n *\n * @param {Array.} sortingDetails - Sorting details based on which the sorting will be performed.\n * @return {DataModel} Returns a new instance of DataModel with sorted data.\n */\n sort (sortingDetails) {\n const rawData = this.getData({\n order: 'row',\n sort: sortingDetails\n });\n const header = rawData.schema.map(field => field.name);\n const dataInCSVArr = [header].concat(rawData.data);\n\n const sortedDm = new this.constructor(dataInCSVArr, rawData.schema, { dataFormat: 'DSVArr' });\n sortedDm._sortingDetails = sortingDetails;\n return sortedDm;\n }\n\n /**\n * Performs the serialization operation on the current {@link DataModel} instance according to the specified data\n * type. When an {@link DataModel} instance is created, it de-serializes the input data into its internal format,\n * and during its serialization process, it converts its internal data format to the specified data type and returns\n * that data regardless what type of data is used during the {@link DataModel} initialization.\n *\n * @example\n * // here dm is the pre-declared DataModel instance.\n * const csvData = dm.serialize(DataModel.DataFormat.DSV_STR, { fieldSeparator: \",\" });\n * console.log(csvData); // The csv formatted data.\n *\n * const jsonData = dm.serialize(DataModel.DataFormat.FLAT_JSON);\n * console.log(jsonData); // The json data.\n *\n * @public\n *\n * @param {string} type - The data type name for serialization.\n * @param {Object} options - The optional option object.\n * @param {string} options.fieldSeparator - The field separator character for DSV data type.\n * @return {Array|string} Returns the serialized data.\n */\n serialize (type, options) {\n type = type || this._dataFormat;\n options = Object.assign({}, { fieldSeparator: ',' }, options);\n\n const fields = this.getFieldspace().fields;\n const colData = fields.map(f => f.formattedData());\n const rowsCount = colData[0].length;\n let serializedData;\n let rowIdx;\n let colIdx;\n\n if (type === DataFormat.FLAT_JSON) {\n serializedData = [];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = {};\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row[fields[colIdx].name()] = colData[colIdx][rowIdx];\n }\n serializedData.push(row);\n }\n } else if (type === DataFormat.DSV_STR) {\n serializedData = [fields.map(f => f.name()).join(options.fieldSeparator)];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row.join(options.fieldSeparator));\n }\n serializedData = serializedData.join('\\n');\n } else if (type === DataFormat.DSV_ARR) {\n serializedData = [fields.map(f => f.name())];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row);\n }\n } else {\n throw new Error(`Data type ${type} is not supported`);\n }\n\n return serializedData;\n }\n\n addField (field) {\n const fieldName = field.name();\n this._colIdentifier += `,${fieldName}`;\n const partialFieldspace = this._partialFieldspace;\n\n if (!partialFieldspace.fieldsObj()[field.name()]) {\n partialFieldspace.fields.push(field);\n } else {\n const fieldIndex = partialFieldspace.fields.findIndex(fieldinst => fieldinst.name() === fieldName);\n fieldIndex >= 0 && (partialFieldspace.fields[fieldIndex] = field);\n }\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n\n this.__calculateFieldspace().calculateFieldsConfig();\n return this;\n }\n\n /**\n * Creates a new variable calculated from existing variables. This method expects the definition of the newly created\n * variable and a function which resolves the value of the new variable from existing variables.\n *\n * Can create a new measure based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const newDm = dataModel.calculateVariable({\n * name: 'powerToWeight',\n * type: 'measure'\n * }, ['horsepower', 'weight_in_lbs', (hp, weight) => hp / weight ]);\n *\n *\n * Can create a new dimension based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const child = dataModel.calculateVariable(\n * {\n * name: 'Efficiency',\n * type: 'dimension'\n * }, ['horsepower', (hp) => {\n * if (hp < 80) { return 'low'; },\n * else if (hp < 120) { return 'moderate'; }\n * else { return 'high' }\n * }]);\n *\n * @public\n *\n * @param {Object} schema - The schema of newly defined variable.\n * @param {Array.} dependency - An array containing the dependency variable names and a resolver\n * function as the last element.\n * @param {Object} config - An optional config object.\n * @param {boolean} [config.saveChild] - Whether the newly created DataModel will be a child.\n * @param {boolean} [config.replaceVar] - Whether the newly created variable will replace the existing variable.\n * @return {DataModel} Returns an instance of DataModel with the new field.\n */\n calculateVariable (schema, dependency, config) {\n schema = sanitizeUnitSchema(schema);\n config = Object.assign({}, { saveChild: true, replaceVar: false }, config);\n\n const fieldsConfig = this.getFieldsConfig();\n const depVars = dependency.slice(0, dependency.length - 1);\n const retrieveFn = dependency[dependency.length - 1];\n\n if (fieldsConfig[schema.name] && !config.replaceVar) {\n throw new Error(`${schema.name} field already exists in datamodel`);\n }\n\n const depFieldIndices = depVars.map((field) => {\n const fieldSpec = fieldsConfig[field];\n if (!fieldSpec) {\n // @todo dont throw error here, use warning in production mode\n throw new Error(`${field} is not a valid column name.`);\n }\n return fieldSpec.index;\n });\n\n const clone = this.clone(config.saveChild);\n\n const fs = clone.getFieldspace().fields;\n const suppliedFields = depFieldIndices.map(idx => fs[idx]);\n\n let cachedStore = {};\n let cloneProvider = () => this.detachedRoot();\n\n const computedValues = [];\n rowDiffsetIterator(clone._rowDiffset, (i) => {\n const fieldsData = suppliedFields.map(field => field.partialField.data[i]);\n computedValues[i] = retrieveFn(...fieldsData, i, cloneProvider, cachedStore);\n });\n const [field] = createFields([computedValues], [schema], [schema.name]);\n clone.addField(field);\n\n persistDerivation(clone, DM_DERIVATIVES.CAL_VAR, { config: schema, fields: depVars }, retrieveFn);\n persistAncestorDerivation(this, clone);\n\n return clone;\n }\n\n /**\n * Propagates changes across all the connected DataModel instances.\n *\n * @param {Array} identifiers - A list of identifiers that were interacted with.\n * @param {Object} payload - The interaction specific details.\n *\n * @return {DataModel} DataModel instance.\n */\n propagate (identifiers, config = {}, addToNameSpace, propConfig = {}) {\n const isMutableAction = config.isMutableAction;\n const propagationSourceId = config.sourceId;\n const payload = config.payload;\n const rootModel = getRootDataModel(this);\n const propagationNameSpace = rootModel._propagationNameSpace;\n const rootGroupByModel = getRootGroupByModel(this);\n const rootModels = {\n groupByModel: rootGroupByModel,\n model: rootModel\n };\n\n addToNameSpace && addToPropNamespace(propagationNameSpace, config, this);\n propagateToAllDataModels(identifiers, rootModels, { propagationNameSpace, sourceId: propagationSourceId },\n Object.assign({\n payload\n }, config));\n\n if (isMutableAction) {\n propagateImmutableActions(propagationNameSpace, rootModels, {\n config,\n propConfig\n }, this);\n }\n\n return this;\n }\n\n /**\n * Associates a callback with an event name.\n *\n * @param {string} eventName - The name of the event.\n * @param {Function} callback - The callback to invoke.\n * @return {DataModel} Returns this current DataModel instance itself.\n */\n on (eventName, callback) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation.push(callback);\n break;\n }\n return this;\n }\n\n /**\n * Unsubscribes the callbacks for the provided event name.\n *\n * @param {string} eventName - The name of the event to unsubscribe.\n * @return {DataModel} Returns the current DataModel instance itself.\n */\n unsubscribe (eventName) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation = [];\n break;\n\n }\n return this;\n }\n\n /**\n * This method is used to invoke the method associated with propagation.\n *\n * @param {Object} payload The interaction payload.\n * @param {DataModel} identifiers The propagated DataModel.\n * @memberof DataModel\n */\n handlePropagation (propModel, payload) {\n let propListeners = this._onPropagation;\n propListeners.forEach(fn => fn.call(this, propModel, payload));\n }\n\n /**\n * Performs the binning operation on a measure field based on the binning configuration. Binning means discretizing\n * values of a measure. Binning configuration contains an array; subsequent values from the array marks the boundary\n * of buckets in [inclusive, exclusive) range format. This operation does not mutate the subject measure field,\n * instead, it creates a new field (variable) of type dimension and subtype binned.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non-uniform buckets,\n * - providing bins count,\n * - providing each bin size,\n *\n * When custom `buckets` are provided as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', buckets: [30, 80, 100, 110] }\n * const binnedDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binsCount` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', binsCount: 5, start: 0, end: 100 }\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binSize` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHorsepower', binSize: 20, start: 5}\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @public\n *\n * @param {string} measureFieldName - The name of the target measure field.\n * @param {Object} config - The config object.\n * @param {string} [config.name] - The name of the new field which will be created.\n * @param {string} [config.buckets] - An array containing the bucket ranges.\n * @param {string} [config.binSize] - The size of each bin. It is ignored when buckets are given.\n * @param {string} [config.binsCount] - The total number of bins to generate. It is ignored when buckets are given.\n * @param {string} [config.start] - The start value of the bucket ranges. It is ignored when buckets are given.\n * @param {string} [config.end] - The end value of the bucket ranges. It is ignored when buckets are given.\n * @return {DataModel} Returns a new {@link DataModel} instance with the new field.\n */\n bin (measureFieldName, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n if (!fieldsConfig[measureFieldName]) {\n throw new Error(`Field ${measureFieldName} doesn't exist`);\n }\n\n const binFieldName = config.name || `${measureFieldName}_binned`;\n\n if (fieldsConfig[binFieldName]) {\n throw new Error(`Field ${binFieldName} already exists`);\n }\n\n const measureField = this.getFieldspace().fieldsObj()[measureFieldName];\n const { binnedData, bins } = createBinnedFieldData(measureField, this._rowDiffset, config);\n\n const binField = createFields([binnedData], [\n {\n name: binFieldName,\n type: FieldType.DIMENSION,\n subtype: DimensionSubtype.BINNED,\n bins\n }], [binFieldName])[0];\n\n const clone = this.clone(config.saveChild);\n clone.addField(binField);\n\n persistDerivation(clone, DM_DERIVATIVES.BIN, { measureFieldName, config, binFieldName }, null);\n persistAncestorDerivation(this, clone);\n\n return clone;\n }\n\n /**\n * Creates a new {@link DataModel} instance with completely detached root from current {@link DataModel} instance,\n * the new {@link DataModel} instance has no parent-children relationship with the current one, but has same data as\n * the current one.\n * This API is useful when a completely different {@link DataModel} but with same data as the current instance is\n * needed.\n *\n * @example\n * const dm = new DataModel(data, schema);\n * const detachedDm = dm.detachedRoot();\n *\n * // has different namespace\n * console.log(dm.getPartialFieldspace().name);\n * console.log(detachedDm.getPartialFieldspace().name);\n *\n * // has same data\n * console.log(dm.getData());\n * console.log(detachedDm.getData());\n *\n * @public\n *\n * @return {DataModel} Returns a detached {@link DataModel} instance.\n */\n detachedRoot () {\n const data = this.serialize(DataFormat.FLAT_JSON);\n const schema = this.getSchema();\n\n return new DataModel(data, schema);\n }\n}\n\nexport default DataModel;\n","import { fnList } from '../operator/group-by-function';\n\nexport const { sum, avg, min, max, first, last, count, std: sd } = fnList;\n","import DataModel from './datamodel';\nimport {\n compose,\n bin,\n select,\n project,\n groupby as groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union\n} from './operator';\nimport * as Stats from './stats';\nimport * as enums from './enums';\nimport { DM_DERIVATIVES } from './constants';\nimport { DateTimeFormatter } from './utils';\nimport { DataFormat, FilteringMode } from './constants';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport pkg from '../package.json';\n\nDataModel.Operators = {\n compose,\n bin,\n select,\n project,\n groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union\n};\nDataModel.Stats = Stats;\nObject.assign(DataModel, enums, { DM_DERIVATIVES });\nDataModel.DateTimeFormatter = DateTimeFormatter;\nDataModel.DataFormat = DataFormat;\nDataModel.FilteringMode = FilteringMode;\nDataModel.InvalidAwareTypes = InvalidAwareTypes;\nDataModel.version = pkg.version;\n\nexport default DataModel;\n","/**\n * Wrapper on calculateVariable() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const calculateVariable = (...args) => dm => dm.calculateVariable(...args);\n\n/**\n * Wrapper on sort() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const sort = (...args) => dm => dm.sort(...args);\n","import { crossProduct } from './cross-product';\nimport { naturalJoinFilter } from './natural-join-filter-function';\n\nexport function naturalJoin (dataModel1, dataModel2) {\n return crossProduct(dataModel1, dataModel2, naturalJoinFilter(dataModel1, dataModel2), true);\n}\n"],"sourceRoot":""} \ No newline at end of file diff --git a/src/export.js b/src/export.js index 34135e9..02fea04 100644 --- a/src/export.js +++ b/src/export.js @@ -17,6 +17,7 @@ import { } from './operator'; import * as Stats from './stats'; import * as enums from './enums'; +import { DM_DERIVATIVES } from './constants'; import { DateTimeFormatter } from './utils'; import { DataFormat, FilteringMode } from './constants'; import InvalidAwareTypes from './invalid-aware-types'; @@ -39,7 +40,7 @@ DataModel.Operators = { union }; DataModel.Stats = Stats; -Object.assign(DataModel, enums); +Object.assign(DataModel, enums, { DM_DERIVATIVES }); DataModel.DateTimeFormatter = DateTimeFormatter; DataModel.DataFormat = DataFormat; DataModel.FilteringMode = FilteringMode; diff --git a/src/operator/group-by-function.js b/src/operator/group-by-function.js index 4b7b397..5d5f1cf 100644 --- a/src/operator/group-by-function.js +++ b/src/operator/group-by-function.js @@ -3,6 +3,7 @@ import InvalidAwareTypes from '../invalid-aware-types'; import { GROUP_BY_FUNCTIONS } from '../enums'; const { SUM, AVG, FIRST, LAST, COUNT, STD, MIN, MAX } = GROUP_BY_FUNCTIONS; + function getFilteredValues(arr) { return arr.filter(item => !(item instanceof InvalidAwareTypes)); } From 9c42eeb8d29056db26a85cec5022582dabc2081d Mon Sep 17 00:00:00 2001 From: Ranajit Banerjee Date: Fri, 22 Mar 2019 14:49:46 +0530 Subject: [PATCH 14/21] - Fix lint error --- src/export.js | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/export.js b/src/export.js index 02fea04..625b162 100644 --- a/src/export.js +++ b/src/export.js @@ -17,9 +17,8 @@ import { } from './operator'; import * as Stats from './stats'; import * as enums from './enums'; -import { DM_DERIVATIVES } from './constants'; import { DateTimeFormatter } from './utils'; -import { DataFormat, FilteringMode } from './constants'; +import { DataFormat, FilteringMode, DM_DERIVATIVES } from './constants'; import InvalidAwareTypes from './invalid-aware-types'; import pkg from '../package.json'; From 08b3a0519ccbf737a8f3d37d75d16f705eb8d0ac Mon Sep 17 00:00:00 2001 From: Ranajit Banerjee Date: Fri, 22 Mar 2019 15:18:25 +0530 Subject: [PATCH 15/21] - Export as default --- src/enums/group-by-functions.js | 10 +++++++++- src/enums/index.js | 2 +- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/src/enums/group-by-functions.js b/src/enums/group-by-functions.js index e3d726b..672b828 100644 --- a/src/enums/group-by-functions.js +++ b/src/enums/group-by-functions.js @@ -1,4 +1,10 @@ -export const GROUP_BY_FUNCTIONS = { +/** + * Group by function names + * + * @readonly + * @enum {string} + */ +const GROUP_BY_FUNCTIONS = { SUM: 'sum', AVG: 'avg', MIN: 'min', @@ -8,3 +14,5 @@ export const GROUP_BY_FUNCTIONS = { COUNT: 'count', STD: 'std' }; + +export default GROUP_BY_FUNCTIONS; diff --git a/src/enums/index.js b/src/enums/index.js index 2afa159..1e76f6b 100644 --- a/src/enums/index.js +++ b/src/enums/index.js @@ -12,4 +12,4 @@ export { default as DimensionSubtype } from './dimension-subtype'; export { default as MeasureSubtype } from './measure-subtype'; export { default as FieldType } from './field-type'; export { default as FilteringMode } from './filtering-mode'; -export { GROUP_BY_FUNCTIONS } from './group-by-functions'; +export { default as GROUP_BY_FUNCTIONS } from './group-by-functions'; From af859cc660072eab96196f7a58f1a6930cfadfc2 Mon Sep 17 00:00:00 2001 From: Ranajit Banerjee Date: Fri, 22 Mar 2019 16:04:12 +0530 Subject: [PATCH 16/21] - Refactor code --- dist/datamodel.js | 2 +- dist/datamodel.js.map | 2 +- src/export.js | 21 +++++++++++++-------- 3 files changed, 15 insertions(+), 10 deletions(-) diff --git a/dist/datamodel.js b/dist/datamodel.js index e2c6163..e97fd45 100644 --- a/dist/datamodel.js +++ b/dist/datamodel.js @@ -1,2 +1,2 @@ -!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define("DataModel",[],t):"object"==typeof exports?exports.DataModel=t():e.DataModel=t()}(window,function(){return function(e){var t={};function n(r){if(t[r])return t[r].exports;var a=t[r]={i:r,l:!1,exports:{}};return e[r].call(a.exports,a,a.exports,n),a.l=!0,a.exports}return n.m=e,n.c=t,n.d=function(e,t,r){n.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:r})},n.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},n.t=function(e,t){if(1&t&&(e=n(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var r=Object.create(null);if(n.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var a in e)n.d(r,a,function(t){return e[t]}.bind(null,a));return r},n.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return n.d(t,"a",t),t},n.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},n.p="",n(n.s=1)}([function(e){e.exports={name:"datamodel",description:"Relational algebra compliant in-memory tabular data store",homepage:"https://github.com/chartshq/datamodel",version:"2.1.0",license:"MIT",main:"dist/datamodel.js",author:"Charts.com ",keywords:["datamodel","data","relational","algebra","model","muze","fusioncharts","table","tabular","operation"],repository:{type:"git",url:"https://github.com/chartshq/datamodel.git"},contributors:[{name:"Akash Goswami",email:"akash@charts.com"},{name:"Subhash Haldar",email:"subhash@charts.com"},{name:"Rousan Ali",email:"rousan@charts.com",url:"https://rousan.io"},{name:"Ujjal Kumar Dutta",email:"ujjal@charts.com"}],dependencies:{"d3-dsv":"^1.0.8"},devDependencies:{"babel-cli":"6.26.0","babel-core":"^6.26.3","babel-eslint":"6.1.2","babel-loader":"^7.1.4","babel-plugin-transform-runtime":"^6.23.0","babel-preset-env":"^1.7.0","babel-preset-es2015":"^6.24.1","babel-preset-flow":"^6.23.0",chai:"3.5.0","cross-env":"^5.0.5",eslint:"3.19.0","eslint-config-airbnb":"15.1.0","eslint-plugin-import":"2.7.0","eslint-plugin-jsx-a11y":"5.1.1","eslint-plugin-react":"7.3.0","istanbul-instrumenter-loader":"^3.0.0",jsdoc:"3.5.5",json2yaml:"^1.1.0",karma:"1.7.1","karma-chai":"0.1.0","karma-chrome-launcher":"2.1.1","karma-coverage-istanbul-reporter":"^1.3.0","karma-mocha":"1.3.0","karma-spec-reporter":"0.0.31","karma-webpack":"2.0.3",marked:"^0.5.0",mocha:"3.4.2","mocha-webpack":"0.7.0","transform-runtime":"0.0.0",webpack:"^4.12.0","webpack-cli":"^3.0.7","webpack-dev-server":"^3.1.4"},scripts:{test:"npm run lint && npm run ut",ut:"karma start karma.conf.js",utd:"karma start --single-run false --browsers Chrome karma.conf.js ",build:"webpack --mode production",start:"webpack-dev-server --config webpack.config.dev.js --mode development --open",lint:"eslint ./src","lint-errors":"eslint --quiet ./src",docs:"rm -rf yaml && mkdir yaml && jsdoc -c jsdoc.conf.json"}}},function(e,t,n){var r=n(2);e.exports=r.default?r.default:r},function(e,t,n){"use strict";n.r(t);var r={};n.r(r),n.d(r,"DataFormat",function(){return o}),n.d(r,"DimensionSubtype",function(){return u}),n.d(r,"MeasureSubtype",function(){return c}),n.d(r,"FieldType",function(){return f}),n.d(r,"FilteringMode",function(){return l}),n.d(r,"GROUP_BY_FUNCTIONS",function(){return s});var a={};n.r(a),n.d(a,"DSVArr",function(){return Qe}),n.d(a,"DSVStr",function(){return ct}),n.d(a,"FlatJSON",function(){return ft}),n.d(a,"Auto",function(){return lt});var i={};n.r(i),n.d(i,"sum",function(){return Pt}),n.d(i,"avg",function(){return It}),n.d(i,"min",function(){return Mt}),n.d(i,"max",function(){return Ct}),n.d(i,"first",function(){return xt}),n.d(i,"last",function(){return Lt}),n.d(i,"count",function(){return Ut}),n.d(i,"sd",function(){return Vt});var o={FLAT_JSON:"FlatJSON",DSV_STR:"DSVStr",DSV_ARR:"DSVArr",AUTO:"Auto"},u={CATEGORICAL:"categorical",TEMPORAL:"temporal",GEO:"geo",BINNED:"binned"},c={CONTINUOUS:"continuous"},f={MEASURE:"measure",DIMENSION:"dimension"},l={NORMAL:"normal",INVERSE:"inverse",ALL:"all"},s={SUM:"sum",AVG:"avg",MIN:"min",MAX:"max",FIRST:"first",LAST:"last",COUNT:"count",STD:"std"};function d(e){return e instanceof Date?e:new Date(e)}function p(e){return e<10?"0"+e:e}function h(e){this.format=e,this.dtParams=void 0,this.nativeDate=void 0}RegExp.escape=function(e){return e.replace(/[-[\]{}()*+?.,\\^$|#\s]/g,"\\$&")},h.TOKEN_PREFIX="%",h.DATETIME_PARAM_SEQUENCE={YEAR:0,MONTH:1,DAY:2,HOUR:3,MINUTE:4,SECOND:5,MILLISECOND:6},h.defaultNumberParser=function(e){return function(t){var n;return isFinite(n=parseInt(t,10))?n:e}},h.defaultRangeParser=function(e,t){return function(n){var r,a=void 0;if(!n)return t;var i=n.toLowerCase();for(a=0,r=e.length;aa.getFullYear()&&(t=""+(i-1)+r),d(t).getFullYear()},formatter:function(e){var t=d(e).getFullYear().toString(),n=void 0;return t&&(n=t.length,t=t.substring(n-2,n)),t}},Y:{name:"Y",index:0,extract:function(){return"(\\d{4})"},parser:h.defaultNumberParser(),formatter:function(e){return d(e).getFullYear().toString()}}}},h.getTokenFormalNames=function(){var e=h.getTokenDefinitions();return{HOUR:e.H,HOUR_12:e.l,AMPM_UPPERCASE:e.p,AMPM_LOWERCASE:e.P,MINUTE:e.M,SECOND:e.S,SHORT_DAY:e.a,LONG_DAY:e.A,DAY_OF_MONTH:e.e,DAY_OF_MONTH_CONSTANT_WIDTH:e.d,SHORT_MONTH:e.b,LONG_MONTH:e.B,MONTH_OF_YEAR:e.m,SHORT_YEAR:e.y,LONG_YEAR:e.Y}},h.tokenResolver=function(){var e=h.getTokenDefinitions(),t=function(){for(var e=0,t=void 0,n=void 0,r=arguments.length;e=0;)o=e[i+1],-1!==r.indexOf(o)&&a.push({index:i,token:o});return a},h.formatAs=function(e,t){var n,r=d(e),a=h.findTokens(t),i=h.getTokenDefinitions(),o=String(t),u=h.TOKEN_PREFIX,c=void 0,f=void 0,l=void 0;for(l=0,n=a.length;l=0;d--)(f=i[d].index)+1!==s.length-1?(void 0===u&&(u=s.length),l=s.substring(f+2,u),s=s.substring(0,f+2)+RegExp.escape(l)+s.substring(u,s.length),u=f):u=f;for(d=0;d0&&e.split(",").forEach(function(e){var n=e.split("-"),r=+n[0],a=+(n[1]||n[0]);if(a>=r)for(var i=r;i<=a;i+=1)t(i)})}var P=function(){function e(e,t){for(var n=0;n=(i=e[a=n+Math.floor((r-n)/2)]).start&&t=i.end?n=a+1:t3&&void 0!==arguments[3]&&arguments[3],a=arguments.length>4&&void 0!==arguments[4]?arguments[4]:V.CROSS,i=[],o=[],u=n||J,c=e.getFieldspace(),f=t.getFieldspace(),l=c.name,s=f.name,d=c.name+"."+f.name,p=B(c,f);if(l===s)throw new Error("DataModels must have different alias names");return c.fields.forEach(function(e){var t=w({},e.schema());-1===p.indexOf(t.name)||r||(t.name=c.name+"."+t.name),i.push(t)}),f.fields.forEach(function(e){var t=w({},e.schema());-1!==p.indexOf(t.name)?r||(t.name=f.name+"."+t.name,i.push(t)):i.push(t)}),R(e._rowDiffset,function(n){var d=!1,h=void 0;R(t._rowDiffset,function(v){var m=[],y={};y[l]={},y[s]={},c.fields.forEach(function(e){m.push(e.partialField.data[n]),y[l][e.name()]=e.partialField.data[n]}),f.fields.forEach(function(e){-1!==p.indexOf(e.schema().name)&&r||m.push(e.partialField.data[v]),y[s][e.name()]=e.partialField.data[v]});var g=pt(y[l]),b=pt(y[s]);if(u(g,b,function(){return e.detachedRoot()},function(){return t.detachedRoot()},{})){var _={};m.forEach(function(e,t){_[i[t].name]=e}),d&&V.CROSS!==a?o[h]=_:(o.push(_),d=!0,h=n)}else if((a===V.LEFTOUTER||a===V.RIGHTOUTER)&&!d){var O={},w=c.fields.length-1;m.forEach(function(e,t){O[i[t].name]=t<=w?e:null}),d=!0,h=n,o.push(O)}})}),new Rt(o,i,{name:d})}function K(e,t){var n=""+e,r=""+t;return nr?1:0}function W(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:K;return e.length>1&&function e(t,n,r,a){if(r===n)return t;var i=n+Math.floor((r-n)/2);return e(t,n,i,a),e(t,i+1,r,a),function(e,t,n,r,a){for(var i=e,o=[],u=t;u<=r;u+=1)o[u]=i[u];for(var c=t,f=n+1,l=t;l<=r;l+=1)c>n?(i[l]=o[f],f+=1):f>r?(i[l]=o[c],c+=1):a(o[c],o[f])<=0?(i[l]=o[c],c+=1):(i[l]=o[f],f+=1)}(t,n,i,r,a),t}(e,0,e.length-1,t),e}function z(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);ti?"desc"===t?-1:1:0}}return r}function q(e,t){var n=new Map,r=[];return e.forEach(function(e){var a=e[t];n.has(a)?r[n.get(a)][1].push(e):(r.push([a,[e]]),n.set(a,r.length-1))}),r}function $(e,t,n){var r={label:e[0]};return t.reduce(function(t,r,a){return t[r]=e[1].map(function(e){return e[n[a].index]}),t},r),r}function Q(e,t,n,r,a){var i={schema:[],data:[],uids:[]},o=(a=Object.assign({},{addUid:!1,columnWise:!1},a)).addUid,u=r&&r.length>0,c=[];if(n.split(",").forEach(function(t){for(var n=0;n=0;u--)a=t[u][0],i=t[u][1],(o=At(r,a))&&(A(i)?W(n,function(e,t){return i(e[o.index],t[o.index])}):E(i)?function(){var e=q(n,o.index),t=i[i.length-1],a=i.slice(0,i.length-1),u=a.map(function(e){return At(r,e)});e.forEach(function(e){e.push($(e,a,u))}),W(e,function(e,n){var r=e[2],a=n[2];return t(r,a)}),n.length=0,e.forEach(function(e){n.push.apply(n,z(e[1]))})}():(i="desc"===String(i).toLowerCase()?"desc":"asc",W(n,X(o.type,i,o.index))));e.uids=[],n.forEach(function(t){e.uids.push(t.pop())})}(i,r),a.columnWise){var f=Array.apply(void 0,z(Array(i.schema.length))).map(function(){return[]});i.data.forEach(function(e){e.forEach(function(e,t){f[t].push(e)})}),i.data=f}return i}function Z(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!j(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t,r){R(e._rowDiffset,function(e){var o={},u="";a.forEach(function(n){var r=t[n].partialField.data[e];u+="-"+r,o[n]=r}),n[u]||(r&&i.push(o),n[u]=!0)})}return e._colIdentifier.split(",").forEach(function(e){var t=c[e];r.push(w({},t.schema())),a.push(t.schema().name)}),s(t,f,!1),s(e,c,!0),new Rt(i,r,{name:l})}function ee(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function te(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n={},r=e.getFieldspace().getMeasure(),a=ye.defaultReducer();return Object.keys(r).forEach(function(e){"string"!=typeof t[e]&&(t[e]=r[e].defAggFn());var i=ye.resolve(t[e]);i?n[e]=i:(n[e]=a,t[e]=he)}),n}(e,n),o=e.getFieldspace(),u=o.fieldsObj(),c=o.name,l=[],s=[],d=[],p={},h=[],v=void 0;Object.entries(u).forEach(function(e){var t=ge(e,2),n=t[0],r=t[1];if(-1!==a.indexOf(n)||i[n])switch(d.push(w({},r.schema())),r.schema().type){case f.MEASURE:s.push(n);break;default:case f.DIMENSION:l.push(n)}});var m=0;R(e._rowDiffset,function(e){var t="";l.forEach(function(n){t=t+"-"+u[n].partialField.data[e]}),void 0===p[t]?(p[t]=m,h.push({}),l.forEach(function(t){h[m][t]=u[t].partialField.data[e]}),s.forEach(function(t){h[m][t]=[u[t].partialField.data[e]]}),m+=1):s.forEach(function(n){h[p[t]][n].push(u[n].partialField.data[e])})});var y={},g=function(){return e.detachedRoot()};return h.forEach(function(e){var t=e;s.forEach(function(n){t[n]=i[n](e[n],g,y)})}),r?(r.__calculateFieldspace(),v=r):v=new Ht(h,d,{name:c}),v}function _e(e,t){var n=B(e.getFieldspace(),t.getFieldspace());return function(e,t){var r=!0;return n.forEach(function(n){r=!(e[n].value!==t[n].value||!r)}),r}}function Oe(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!j(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t){R(e._rowDiffset,function(e){var r={},o="";a.forEach(function(n){var a=t[n].partialField.data[e];o+="-"+a,r[n]=a}),n[o]||(i.push(r),n[o]=!0)})}return e._colIdentifier.split(",").forEach(function(e){var t=c[e];r.push(w({},t.schema())),a.push(t.schema().name)}),s(e,c),s(t,f),new Ht(i,r,{name:l})}function we(e,t,n){return G(e,t,n,!1,V.LEFTOUTER)}function Ee(e,t,n){return G(t,e,n,!1,V.RIGHTOUTER)}var Ae=function(){function e(e,t){for(var n=0;nn&&(n=a))}),[t,n]}}]),t}(),Le=function(){function e(e,t){for(var n=0;n=i?c=!0:(r=e.charCodeAt(o++))===nt?f=!0:r===rt&&(f=!0,e.charCodeAt(o)===nt&&++o),e.slice(a+1,t-1).replace(/""/g,'"')}for(;o2&&void 0!==arguments[2]?arguments[2]:{},a=arguments[3];t===U.COMPOSE?(e._derivation.length=0,(n=e._derivation).push.apply(n,dt(a))):e._derivation.push({op:t,meta:r,criteria:a})},mt=function(e,t){var n;(n=t._ancestorDerivation).push.apply(n,dt(e._ancestorDerivation).concat(dt(e._derivation)))},yt=function(e,t,n,r,a){var i=[],o=-1,u=r.mode,c=void 0,f={},s=function(){return a.detachedRoot()},d=function(e){return n(function(e,t){var n={},r=!0,a=!1,i=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done);r=!0){var c=o.value;n[c.name()]=new T(c.partialField.data[t],c)}}catch(e){a=!0,i=e}finally{try{!r&&u.return&&u.return()}finally{if(a)throw i}}return n}(t,e),e,s,f)},p=void 0;return p=u===l.INVERSE?function(e){return!d(e)}:function(e){return d(e)},R(e,function(e){p(e)&&(-1!==o&&e===o+1?(c=i.length-1,i[c]=i[c].split("-")[0]+"-"+e):i.push(""+e),o=e)}),i.join(",")},gt=function(e){var t=e.clone(!1),n=e.getPartialFieldspace();return t._colIdentifier=n.fields.map(function(e){return e.name()}).join(","),n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,t.__calculateFieldspace().calculateFieldsConfig(),t},bt=function(e,t){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},r=n.operation||Y,a=n.filterByMeasure||!1,i=[];i=t.length?t.map(function(e){return function(e){var t=e.getData(),n=t.schema,r=e.getFieldsConfig(),i=e.getFieldspace().fieldsObj(),o=t.data,u=Object.values(r).reduce(function(e,t){return e[t.def.name]=i[t.def.name].domain(),e},{});return function(e){return!!o.length&&o.some(function(t){return n.every(function(n){if(!(n.name in e))return!0;var i=e[n.name].valueOf();if(a&&n.type===f.MEASURE)return i>=u[n.name][0]&&i<=u[n.name][1];if(n.type!==f.DIMENSION)return!0;var o=r[n.name].index;return t[o]===e[n.name].valueOf()})})}}(e)}):[function(){return!1}];return r===Y?gt(e).select(function(e){return i.every(function(t){return t(e)})},{saveChild:!1,mode:l.ALL}):gt(e).select(function(e){return i.some(function(t){return t(e)})},{mode:l.ALL,saveChild:!1})},_t=function(e,t,n,r){var a=e.clone(r.saveChild),i=yt(a._rowDiffset,a.getPartialFieldspace().fields,t,n,e);return a._rowDiffset=i,a.__calculateFieldspace().calculateFieldsConfig(),vt(a,U.SELECT,{config:n},t),mt(e,a),a},Ot=function(e,t,n,r){var a=e.clone(n.saveChild),i=t;return n.mode===l.INVERSE&&(i=r.filter(function(e){return-1===t.indexOf(e)})),a._colIdentifier=i.join(","),a.__calculateFieldspace().calculateFieldsConfig(),vt(a,U.PROJECT,{projField:t,config:n,actualProjField:i},null),mt(e,a),a},wt=function(e){if((e=w({},e)).type||(e.type=f.DIMENSION),!e.subtype)switch(e.type){case f.MEASURE:e.subtype=c.CONTINUOUS;break;default:case f.DIMENSION:e.subtype=u.CATEGORICAL}return e},Et=function(e,t,n,r){n=function(e){return e.map(function(e){return wt(e)})}(n),r=Object.assign(Object.assign({},$e),r);var i=a[r.dataFormat];if(!i||"function"!=typeof i)throw new Error("No converter function found for "+r.dataFormat+" format");var u=i(t,r),c=st(u,2),f=c[0],l=c[1],s=qe(l,n,f),d=k.createNamespace(s,r.name);return e._partialFieldspace=d,e._rowDiffset=l.length&&l[0].length?"0-"+(l[0].length-1):"",e._colIdentifier=n.map(function(e){return e.name}).join(),e._dataFormat=r.dataFormat===o.AUTO?F(t):r.dataFormat,e},At=function(e,t){for(var n=0;n2&&void 0!==arguments[2]?arguments[2]:{},a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=a.nonTraversingModel,o=a.excludeModels||[];t!==i&&((!o.length||-1===o.indexOf(t))&&t.handlePropagation(n,r),t._children.forEach(function(t){var i=St(n,t),o=st(i,2),u=o[0],c=o[1];e(t,[u,c],r,a)}))},Nt=function(e,t,n,r){var a=void 0,i=void 0,o=n.propagationNameSpace,u=n.propagateToSource,c=n.sourceId,f=r.propagateInterpolatedValues,l=[];if(null===e&&!0!==r.persistent)l=[{criteria:[]}];else{var s,d=Object.values(o.mutableActions);!1!==u&&(d=d.filter(function(e){return e.config.sourceId!==c}));var p=d.filter(function(e){return(r.filterFn||function(){return!0})(e,r)}).map(function(e){return e.config.criteria}),h=[];if(!1!==u){var v=Object.values(o.mutableActions);v.forEach(function(e){var t=e.config;!1===t.applyOnSource&&t.action===r.action&&t.sourceId!==c&&(h.push(e.model),(a=v.filter(function(t){return t!==e}).map(function(e){return e.config.criteria})).length&&l.push({criteria:a,models:e.model,path:function(e){for(var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[];e._parent;)t.push(e),e=e._parent;return t}(e.model)}))})}a=(s=[]).concat.apply(s,[].concat(dt(p),[e])).filter(function(e){return null!==e}),l.push({criteria:a,excludeModels:[].concat(h,dt(r.excludeModels||[]))})}var m=t.model,y=Object.assign({sourceIdentifiers:e,propagationSourceId:c},r),g=t.groupByModel;f&&g&&(i=bt(g,a,{filterByMeasure:f}),jt(g,i,y)),l.forEach(function(e){var t=bt(m,e.criteria),n=e.path;if(n){var r=function(e,t){for(var n=0,r=t.length;n0&&void 0!==arguments[0])||arguments[0],t=new this.constructor(this);return e?t.setParent(this):t.setParent(null),t}},{key:"project",value:function(e,t){var n={mode:l.NORMAL,saveChild:!0};t=Object.assign({},n,t);var r=this.getFieldsConfig(),a=Object.keys(r),i=t.mode,o=e.reduce(function(e,t){return"RegExp"===t.constructor.name?e.push.apply(e,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{saveChild:!0},r=""+e.join(),a=[this,e,t],i=be.apply(void 0,a);return vt(i,U.GROUPBY,{fieldsArr:e,groupByString:r,defaultReducer:ye.defaultReducer()},t),mt(this,i),n.saveChild?i.setParent(this):i.setParent(null),i}},{key:"sort",value:function(e){var t=this.getData({order:"row",sort:e}),n=[t.schema.map(function(e){return e.name})].concat(t.data),r=new this.constructor(n,t.schema,{dataFormat:"DSVArr"});return r._sortingDetails=e,r}},{key:"serialize",value:function(e,t){e=e||this._dataFormat,t=Object.assign({},{fieldSeparator:","},t);var n=this.getFieldspace().fields,r=n.map(function(e){return e.formattedData()}),a=r[0].length,i=void 0,u=void 0,c=void 0;if(e===o.FLAT_JSON)for(i=[],u=0;u=0&&(n.fields[r]=e)}else n.fields.push(e);return n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,this.__calculateFieldspace().calculateFieldsConfig(),this}},{key:"calculateVariable",value:function(e,t,n){var r=this;e=wt(e),n=Object.assign({},{saveChild:!0,replaceVar:!1},n);var a=this.getFieldsConfig(),i=t.slice(0,t.length-1),o=t[t.length-1];if(a[e.name]&&!n.replaceVar)throw new Error(e.name+" field already exists in datamodel");var u=i.map(function(e){var t=a[e];if(!t)throw new Error(e+" is not a valid column name.");return t.index}),c=this.clone(n.saveChild),f=c.getFieldspace().fields,l=u.map(function(e){return f[e]}),s={},d=function(){return r.detachedRoot()},p=[];R(c._rowDiffset,function(e){var t=l.map(function(t){return t.partialField.data[e]});p[e]=o.apply(void 0,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},a=t.isMutableAction,i=t.sourceId,o=t.payload,u=function(e){for(;e._parent;)e=e._parent;return e}(this),c=u._propagationNameSpace,f={groupByModel:function(e){for(;e._parent&&e._derivation.find(function(e){return e.op!==U.GROUPBY});)e=e._parent;return e}(this),model:u};return n&&function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=void 0,a=t.isMutableAction,i=t.criteria,o=t.action+"-"+t.sourceId;r=a?e.mutableActions:e.immutableActions,null===i?delete r[o]:r[o]={model:n,config:t}}(c,t,this),Nt(e,f,{propagationNameSpace:c,sourceId:i},Object.assign({payload:o},t)),a&&function(e,t,n){var r=e.immutableActions;for(var a in r){var i=r[a].config,o=n.config.sourceId,u=!n.propConfig.filterImmutableAction||n.propConfig.filterImmutableAction(i,n.config);if(i.sourceId!==o&&u){var c=i.criteria;Nt(c,t,{propagationNameSpace:e,propagateToSource:!1,sourceId:o},i)}}}(c,f,{config:t,propConfig:r}),this}},{key:"on",value:function(e,t){switch(e){case"propagation":this._onPropagation.push(t)}return this}},{key:"unsubscribe",value:function(e){switch(e){case"propagation":this._onPropagation=[]}return this}},{key:"handlePropagation",value:function(e,t){var n=this;this._onPropagation.forEach(function(r){return r.call(n,e,t)})}},{key:"bin",value:function(e,t){var n=this.getFieldsConfig();if(!n[e])throw new Error("Field "+e+" doesn't exist");var r=t.name||e+"_binned";if(n[r])throw new Error("Field "+r+" already exists");var a=function(e,t,n){var r=n.buckets,a=n.binsCount,i=n.binSize,o=n.start,u=n.end,c=e.domain(),f=C(c,2),l=f[0],s=f[1];r||(o=0!==o&&(!o||o>l)?l:o,u=0!==u&&(!u||ul&&r.unshift(l),r[r.length-1]<=s&&r.push(s+1);for(var d=[],p=0;p1&&void 0!==arguments[1]?arguments[1]:{saveChild:!0},r=e,a=void 0,i=[];return t.forEach(function(e){r=e(r),i.push.apply(i,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t",keywords:["datamodel","data","relational","algebra","model","muze","fusioncharts","table","tabular","operation"],repository:{type:"git",url:"https://github.com/chartshq/datamodel.git"},contributors:[{name:"Akash Goswami",email:"akash@charts.com"},{name:"Subhash Haldar",email:"subhash@charts.com"},{name:"Rousan Ali",email:"rousan@charts.com",url:"https://rousan.io"},{name:"Ujjal Kumar Dutta",email:"ujjal@charts.com"}],dependencies:{"d3-dsv":"^1.0.8"},devDependencies:{"babel-cli":"6.26.0","babel-core":"^6.26.3","babel-eslint":"6.1.2","babel-loader":"^7.1.4","babel-plugin-transform-runtime":"^6.23.0","babel-preset-env":"^1.7.0","babel-preset-es2015":"^6.24.1","babel-preset-flow":"^6.23.0",chai:"3.5.0","cross-env":"^5.0.5",eslint:"3.19.0","eslint-config-airbnb":"15.1.0","eslint-plugin-import":"2.7.0","eslint-plugin-jsx-a11y":"5.1.1","eslint-plugin-react":"7.3.0","istanbul-instrumenter-loader":"^3.0.0",jsdoc:"3.5.5",json2yaml:"^1.1.0",karma:"1.7.1","karma-chai":"0.1.0","karma-chrome-launcher":"2.1.1","karma-coverage-istanbul-reporter":"^1.3.0","karma-mocha":"1.3.0","karma-spec-reporter":"0.0.31","karma-webpack":"2.0.3",marked:"^0.5.0",mocha:"3.4.2","mocha-webpack":"0.7.0","transform-runtime":"0.0.0",webpack:"^4.12.0","webpack-cli":"^3.0.7","webpack-dev-server":"^3.1.4"},scripts:{test:"npm run lint && npm run ut",ut:"karma start karma.conf.js",utd:"karma start --single-run false --browsers Chrome karma.conf.js ",build:"webpack --mode production",start:"webpack-dev-server --config webpack.config.dev.js --mode development --open",lint:"eslint ./src","lint-errors":"eslint --quiet ./src",docs:"rm -rf yaml && mkdir yaml && jsdoc -c jsdoc.conf.json"}}},function(e,t,n){var r=n(2);e.exports=r.default?r.default:r},function(e,t,n){"use strict";n.r(t);var r={};n.r(r),n.d(r,"DataFormat",function(){return o}),n.d(r,"DimensionSubtype",function(){return u}),n.d(r,"MeasureSubtype",function(){return c}),n.d(r,"FieldType",function(){return f}),n.d(r,"FilteringMode",function(){return l}),n.d(r,"GROUP_BY_FUNCTIONS",function(){return s});var a={};n.r(a),n.d(a,"DSVArr",function(){return Qe}),n.d(a,"DSVStr",function(){return ct}),n.d(a,"FlatJSON",function(){return ft}),n.d(a,"Auto",function(){return lt});var i={};n.r(i),n.d(i,"sum",function(){return Pt}),n.d(i,"avg",function(){return It}),n.d(i,"min",function(){return Mt}),n.d(i,"max",function(){return Ct}),n.d(i,"first",function(){return xt}),n.d(i,"last",function(){return Lt}),n.d(i,"count",function(){return Ut}),n.d(i,"sd",function(){return Vt});var o={FLAT_JSON:"FlatJSON",DSV_STR:"DSVStr",DSV_ARR:"DSVArr",AUTO:"Auto"},u={CATEGORICAL:"categorical",TEMPORAL:"temporal",GEO:"geo",BINNED:"binned"},c={CONTINUOUS:"continuous"},f={MEASURE:"measure",DIMENSION:"dimension"},l={NORMAL:"normal",INVERSE:"inverse",ALL:"all"},s={SUM:"sum",AVG:"avg",MIN:"min",MAX:"max",FIRST:"first",LAST:"last",COUNT:"count",STD:"std"};function d(e){return e instanceof Date?e:new Date(e)}function p(e){return e<10?"0"+e:e}function h(e){this.format=e,this.dtParams=void 0,this.nativeDate=void 0}RegExp.escape=function(e){return e.replace(/[-[\]{}()*+?.,\\^$|#\s]/g,"\\$&")},h.TOKEN_PREFIX="%",h.DATETIME_PARAM_SEQUENCE={YEAR:0,MONTH:1,DAY:2,HOUR:3,MINUTE:4,SECOND:5,MILLISECOND:6},h.defaultNumberParser=function(e){return function(t){var n;return isFinite(n=parseInt(t,10))?n:e}},h.defaultRangeParser=function(e,t){return function(n){var r,a=void 0;if(!n)return t;var i=n.toLowerCase();for(a=0,r=e.length;aa.getFullYear()&&(t=""+(i-1)+r),d(t).getFullYear()},formatter:function(e){var t=d(e).getFullYear().toString(),n=void 0;return t&&(n=t.length,t=t.substring(n-2,n)),t}},Y:{name:"Y",index:0,extract:function(){return"(\\d{4})"},parser:h.defaultNumberParser(),formatter:function(e){return d(e).getFullYear().toString()}}}},h.getTokenFormalNames=function(){var e=h.getTokenDefinitions();return{HOUR:e.H,HOUR_12:e.l,AMPM_UPPERCASE:e.p,AMPM_LOWERCASE:e.P,MINUTE:e.M,SECOND:e.S,SHORT_DAY:e.a,LONG_DAY:e.A,DAY_OF_MONTH:e.e,DAY_OF_MONTH_CONSTANT_WIDTH:e.d,SHORT_MONTH:e.b,LONG_MONTH:e.B,MONTH_OF_YEAR:e.m,SHORT_YEAR:e.y,LONG_YEAR:e.Y}},h.tokenResolver=function(){var e=h.getTokenDefinitions(),t=function(){for(var e=0,t=void 0,n=void 0,r=arguments.length;e=0;)o=e[i+1],-1!==r.indexOf(o)&&a.push({index:i,token:o});return a},h.formatAs=function(e,t){var n,r=d(e),a=h.findTokens(t),i=h.getTokenDefinitions(),o=String(t),u=h.TOKEN_PREFIX,c=void 0,f=void 0,l=void 0;for(l=0,n=a.length;l=0;d--)(f=i[d].index)+1!==s.length-1?(void 0===u&&(u=s.length),l=s.substring(f+2,u),s=s.substring(0,f+2)+RegExp.escape(l)+s.substring(u,s.length),u=f):u=f;for(d=0;d0&&e.split(",").forEach(function(e){var n=e.split("-"),r=+n[0],a=+(n[1]||n[0]);if(a>=r)for(var i=r;i<=a;i+=1)t(i)})}var P=function(){function e(e,t){for(var n=0;n=(i=e[a=n+Math.floor((r-n)/2)]).start&&t=i.end?n=a+1:t3&&void 0!==arguments[3]&&arguments[3],a=arguments.length>4&&void 0!==arguments[4]?arguments[4]:V.CROSS,i=[],o=[],u=n||J,c=e.getFieldspace(),f=t.getFieldspace(),l=c.name,s=f.name,d=c.name+"."+f.name,p=B(c,f);if(l===s)throw new Error("DataModels must have different alias names");return c.fields.forEach(function(e){var t=w({},e.schema());-1===p.indexOf(t.name)||r||(t.name=c.name+"."+t.name),i.push(t)}),f.fields.forEach(function(e){var t=w({},e.schema());-1!==p.indexOf(t.name)?r||(t.name=f.name+"."+t.name,i.push(t)):i.push(t)}),R(e._rowDiffset,function(n){var d=!1,h=void 0;R(t._rowDiffset,function(v){var m=[],y={};y[l]={},y[s]={},c.fields.forEach(function(e){m.push(e.partialField.data[n]),y[l][e.name()]=e.partialField.data[n]}),f.fields.forEach(function(e){-1!==p.indexOf(e.schema().name)&&r||m.push(e.partialField.data[v]),y[s][e.name()]=e.partialField.data[v]});var g=pt(y[l]),b=pt(y[s]);if(u(g,b,function(){return e.detachedRoot()},function(){return t.detachedRoot()},{})){var _={};m.forEach(function(e,t){_[i[t].name]=e}),d&&V.CROSS!==a?o[h]=_:(o.push(_),d=!0,h=n)}else if((a===V.LEFTOUTER||a===V.RIGHTOUTER)&&!d){var O={},w=c.fields.length-1;m.forEach(function(e,t){O[i[t].name]=t<=w?e:null}),d=!0,h=n,o.push(O)}})}),new Rt(o,i,{name:d})}function K(e,t){var n=""+e,r=""+t;return nr?1:0}function W(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:K;return e.length>1&&function e(t,n,r,a){if(r===n)return t;var i=n+Math.floor((r-n)/2);return e(t,n,i,a),e(t,i+1,r,a),function(e,t,n,r,a){for(var i=e,o=[],u=t;u<=r;u+=1)o[u]=i[u];for(var c=t,f=n+1,l=t;l<=r;l+=1)c>n?(i[l]=o[f],f+=1):f>r?(i[l]=o[c],c+=1):a(o[c],o[f])<=0?(i[l]=o[c],c+=1):(i[l]=o[f],f+=1)}(t,n,i,r,a),t}(e,0,e.length-1,t),e}function z(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);ti?"desc"===t?-1:1:0}}return r}function q(e,t){var n=new Map,r=[];return e.forEach(function(e){var a=e[t];n.has(a)?r[n.get(a)][1].push(e):(r.push([a,[e]]),n.set(a,r.length-1))}),r}function $(e,t,n){var r={label:e[0]};return t.reduce(function(t,r,a){return t[r]=e[1].map(function(e){return e[n[a].index]}),t},r),r}function Q(e,t,n,r,a){var i={schema:[],data:[],uids:[]},o=(a=Object.assign({},{addUid:!1,columnWise:!1},a)).addUid,u=r&&r.length>0,c=[];if(n.split(",").forEach(function(t){for(var n=0;n=0;u--)a=t[u][0],i=t[u][1],(o=At(r,a))&&(A(i)?W(n,function(e,t){return i(e[o.index],t[o.index])}):E(i)?function(){var e=q(n,o.index),t=i[i.length-1],a=i.slice(0,i.length-1),u=a.map(function(e){return At(r,e)});e.forEach(function(e){e.push($(e,a,u))}),W(e,function(e,n){var r=e[2],a=n[2];return t(r,a)}),n.length=0,e.forEach(function(e){n.push.apply(n,z(e[1]))})}():(i="desc"===String(i).toLowerCase()?"desc":"asc",W(n,X(o.type,i,o.index))));e.uids=[],n.forEach(function(t){e.uids.push(t.pop())})}(i,r),a.columnWise){var f=Array.apply(void 0,z(Array(i.schema.length))).map(function(){return[]});i.data.forEach(function(e){e.forEach(function(e,t){f[t].push(e)})}),i.data=f}return i}function Z(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!j(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t,r){R(e._rowDiffset,function(e){var o={},u="";a.forEach(function(n){var r=t[n].partialField.data[e];u+="-"+r,o[n]=r}),n[u]||(r&&i.push(o),n[u]=!0)})}return e._colIdentifier.split(",").forEach(function(e){var t=c[e];r.push(w({},t.schema())),a.push(t.schema().name)}),s(t,f,!1),s(e,c,!0),new Rt(i,r,{name:l})}function ee(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function te(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n={},r=e.getFieldspace().getMeasure(),a=ye.defaultReducer();return Object.keys(r).forEach(function(e){"string"!=typeof t[e]&&(t[e]=r[e].defAggFn());var i=ye.resolve(t[e]);i?n[e]=i:(n[e]=a,t[e]=he)}),n}(e,n),o=e.getFieldspace(),u=o.fieldsObj(),c=o.name,l=[],s=[],d=[],p={},h=[],v=void 0;Object.entries(u).forEach(function(e){var t=ge(e,2),n=t[0],r=t[1];if(-1!==a.indexOf(n)||i[n])switch(d.push(w({},r.schema())),r.schema().type){case f.MEASURE:s.push(n);break;default:case f.DIMENSION:l.push(n)}});var m=0;R(e._rowDiffset,function(e){var t="";l.forEach(function(n){t=t+"-"+u[n].partialField.data[e]}),void 0===p[t]?(p[t]=m,h.push({}),l.forEach(function(t){h[m][t]=u[t].partialField.data[e]}),s.forEach(function(t){h[m][t]=[u[t].partialField.data[e]]}),m+=1):s.forEach(function(n){h[p[t]][n].push(u[n].partialField.data[e])})});var y={},g=function(){return e.detachedRoot()};return h.forEach(function(e){var t=e;s.forEach(function(n){t[n]=i[n](e[n],g,y)})}),r?(r.__calculateFieldspace(),v=r):v=new Bt(h,d,{name:c}),v}function _e(e,t){var n=B(e.getFieldspace(),t.getFieldspace());return function(e,t){var r=!0;return n.forEach(function(n){r=!(e[n].value!==t[n].value||!r)}),r}}function Oe(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!j(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t){R(e._rowDiffset,function(e){var r={},o="";a.forEach(function(n){var a=t[n].partialField.data[e];o+="-"+a,r[n]=a}),n[o]||(i.push(r),n[o]=!0)})}return e._colIdentifier.split(",").forEach(function(e){var t=c[e];r.push(w({},t.schema())),a.push(t.schema().name)}),s(e,c),s(t,f),new Bt(i,r,{name:l})}function we(e,t,n){return G(e,t,n,!1,V.LEFTOUTER)}function Ee(e,t,n){return G(t,e,n,!1,V.RIGHTOUTER)}var Ae=function(){function e(e,t){for(var n=0;nn&&(n=a))}),[t,n]}}]),t}(),Le=function(){function e(e,t){for(var n=0;n=i?c=!0:(r=e.charCodeAt(o++))===nt?f=!0:r===rt&&(f=!0,e.charCodeAt(o)===nt&&++o),e.slice(a+1,t-1).replace(/""/g,'"')}for(;o2&&void 0!==arguments[2]?arguments[2]:{},a=arguments[3];t===U.COMPOSE?(e._derivation.length=0,(n=e._derivation).push.apply(n,dt(a))):e._derivation.push({op:t,meta:r,criteria:a})},mt=function(e,t){var n;(n=t._ancestorDerivation).push.apply(n,dt(e._ancestorDerivation).concat(dt(e._derivation)))},yt=function(e,t,n,r,a){var i=[],o=-1,u=r.mode,c=void 0,f={},s=function(){return a.detachedRoot()},d=function(e){return n(function(e,t){var n={},r=!0,a=!1,i=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done);r=!0){var c=o.value;n[c.name()]=new T(c.partialField.data[t],c)}}catch(e){a=!0,i=e}finally{try{!r&&u.return&&u.return()}finally{if(a)throw i}}return n}(t,e),e,s,f)},p=void 0;return p=u===l.INVERSE?function(e){return!d(e)}:function(e){return d(e)},R(e,function(e){p(e)&&(-1!==o&&e===o+1?(c=i.length-1,i[c]=i[c].split("-")[0]+"-"+e):i.push(""+e),o=e)}),i.join(",")},gt=function(e){var t=e.clone(!1),n=e.getPartialFieldspace();return t._colIdentifier=n.fields.map(function(e){return e.name()}).join(","),n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,t.__calculateFieldspace().calculateFieldsConfig(),t},bt=function(e,t){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},r=n.operation||Y,a=n.filterByMeasure||!1,i=[];i=t.length?t.map(function(e){return function(e){var t=e.getData(),n=t.schema,r=e.getFieldsConfig(),i=e.getFieldspace().fieldsObj(),o=t.data,u=Object.values(r).reduce(function(e,t){return e[t.def.name]=i[t.def.name].domain(),e},{});return function(e){return!!o.length&&o.some(function(t){return n.every(function(n){if(!(n.name in e))return!0;var i=e[n.name].valueOf();if(a&&n.type===f.MEASURE)return i>=u[n.name][0]&&i<=u[n.name][1];if(n.type!==f.DIMENSION)return!0;var o=r[n.name].index;return t[o]===e[n.name].valueOf()})})}}(e)}):[function(){return!1}];return r===Y?gt(e).select(function(e){return i.every(function(t){return t(e)})},{saveChild:!1,mode:l.ALL}):gt(e).select(function(e){return i.some(function(t){return t(e)})},{mode:l.ALL,saveChild:!1})},_t=function(e,t,n,r){var a=e.clone(r.saveChild),i=yt(a._rowDiffset,a.getPartialFieldspace().fields,t,n,e);return a._rowDiffset=i,a.__calculateFieldspace().calculateFieldsConfig(),vt(a,U.SELECT,{config:n},t),mt(e,a),a},Ot=function(e,t,n,r){var a=e.clone(n.saveChild),i=t;return n.mode===l.INVERSE&&(i=r.filter(function(e){return-1===t.indexOf(e)})),a._colIdentifier=i.join(","),a.__calculateFieldspace().calculateFieldsConfig(),vt(a,U.PROJECT,{projField:t,config:n,actualProjField:i},null),mt(e,a),a},wt=function(e){if((e=w({},e)).type||(e.type=f.DIMENSION),!e.subtype)switch(e.type){case f.MEASURE:e.subtype=c.CONTINUOUS;break;default:case f.DIMENSION:e.subtype=u.CATEGORICAL}return e},Et=function(e,t,n,r){n=function(e){return e.map(function(e){return wt(e)})}(n),r=Object.assign(Object.assign({},$e),r);var i=a[r.dataFormat];if(!i||"function"!=typeof i)throw new Error("No converter function found for "+r.dataFormat+" format");var u=i(t,r),c=st(u,2),f=c[0],l=c[1],s=qe(l,n,f),d=k.createNamespace(s,r.name);return e._partialFieldspace=d,e._rowDiffset=l.length&&l[0].length?"0-"+(l[0].length-1):"",e._colIdentifier=n.map(function(e){return e.name}).join(),e._dataFormat=r.dataFormat===o.AUTO?F(t):r.dataFormat,e},At=function(e,t){for(var n=0;n2&&void 0!==arguments[2]?arguments[2]:{},a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=a.nonTraversingModel,o=a.excludeModels||[];t!==i&&((!o.length||-1===o.indexOf(t))&&t.handlePropagation(n,r),t._children.forEach(function(t){var i=St(n,t),o=st(i,2),u=o[0],c=o[1];e(t,[u,c],r,a)}))},Nt=function(e,t,n,r){var a=void 0,i=void 0,o=n.propagationNameSpace,u=n.propagateToSource,c=n.sourceId,f=r.propagateInterpolatedValues,l=[];if(null===e&&!0!==r.persistent)l=[{criteria:[]}];else{var s,d=Object.values(o.mutableActions);!1!==u&&(d=d.filter(function(e){return e.config.sourceId!==c}));var p=d.filter(function(e){return(r.filterFn||function(){return!0})(e,r)}).map(function(e){return e.config.criteria}),h=[];if(!1!==u){var v=Object.values(o.mutableActions);v.forEach(function(e){var t=e.config;!1===t.applyOnSource&&t.action===r.action&&t.sourceId!==c&&(h.push(e.model),(a=v.filter(function(t){return t!==e}).map(function(e){return e.config.criteria})).length&&l.push({criteria:a,models:e.model,path:function(e){for(var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[];e._parent;)t.push(e),e=e._parent;return t}(e.model)}))})}a=(s=[]).concat.apply(s,[].concat(dt(p),[e])).filter(function(e){return null!==e}),l.push({criteria:a,excludeModels:[].concat(h,dt(r.excludeModels||[]))})}var m=t.model,y=Object.assign({sourceIdentifiers:e,propagationSourceId:c},r),g=t.groupByModel;f&&g&&(i=bt(g,a,{filterByMeasure:f}),jt(g,i,y)),l.forEach(function(e){var t=bt(m,e.criteria),n=e.path;if(n){var r=function(e,t){for(var n=0,r=t.length;n0&&void 0!==arguments[0])||arguments[0],t=new this.constructor(this);return e?t.setParent(this):t.setParent(null),t}},{key:"project",value:function(e,t){var n={mode:l.NORMAL,saveChild:!0};t=Object.assign({},n,t);var r=this.getFieldsConfig(),a=Object.keys(r),i=t.mode,o=e.reduce(function(e,t){return"RegExp"===t.constructor.name?e.push.apply(e,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{saveChild:!0},r=""+e.join(),a=[this,e,t],i=be.apply(void 0,a);return vt(i,U.GROUPBY,{fieldsArr:e,groupByString:r,defaultReducer:ye.defaultReducer()},t),mt(this,i),n.saveChild?i.setParent(this):i.setParent(null),i}},{key:"sort",value:function(e){var t=this.getData({order:"row",sort:e}),n=[t.schema.map(function(e){return e.name})].concat(t.data),r=new this.constructor(n,t.schema,{dataFormat:"DSVArr"});return r._sortingDetails=e,r}},{key:"serialize",value:function(e,t){e=e||this._dataFormat,t=Object.assign({},{fieldSeparator:","},t);var n=this.getFieldspace().fields,r=n.map(function(e){return e.formattedData()}),a=r[0].length,i=void 0,u=void 0,c=void 0;if(e===o.FLAT_JSON)for(i=[],u=0;u=0&&(n.fields[r]=e)}else n.fields.push(e);return n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,this.__calculateFieldspace().calculateFieldsConfig(),this}},{key:"calculateVariable",value:function(e,t,n){var r=this;e=wt(e),n=Object.assign({},{saveChild:!0,replaceVar:!1},n);var a=this.getFieldsConfig(),i=t.slice(0,t.length-1),o=t[t.length-1];if(a[e.name]&&!n.replaceVar)throw new Error(e.name+" field already exists in datamodel");var u=i.map(function(e){var t=a[e];if(!t)throw new Error(e+" is not a valid column name.");return t.index}),c=this.clone(n.saveChild),f=c.getFieldspace().fields,l=u.map(function(e){return f[e]}),s={},d=function(){return r.detachedRoot()},p=[];R(c._rowDiffset,function(e){var t=l.map(function(t){return t.partialField.data[e]});p[e]=o.apply(void 0,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},a=t.isMutableAction,i=t.sourceId,o=t.payload,u=function(e){for(;e._parent;)e=e._parent;return e}(this),c=u._propagationNameSpace,f={groupByModel:function(e){for(;e._parent&&e._derivation.find(function(e){return e.op!==U.GROUPBY});)e=e._parent;return e}(this),model:u};return n&&function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=void 0,a=t.isMutableAction,i=t.criteria,o=t.action+"-"+t.sourceId;r=a?e.mutableActions:e.immutableActions,null===i?delete r[o]:r[o]={model:n,config:t}}(c,t,this),Nt(e,f,{propagationNameSpace:c,sourceId:i},Object.assign({payload:o},t)),a&&function(e,t,n){var r=e.immutableActions;for(var a in r){var i=r[a].config,o=n.config.sourceId,u=!n.propConfig.filterImmutableAction||n.propConfig.filterImmutableAction(i,n.config);if(i.sourceId!==o&&u){var c=i.criteria;Nt(c,t,{propagationNameSpace:e,propagateToSource:!1,sourceId:o},i)}}}(c,f,{config:t,propConfig:r}),this}},{key:"on",value:function(e,t){switch(e){case"propagation":this._onPropagation.push(t)}return this}},{key:"unsubscribe",value:function(e){switch(e){case"propagation":this._onPropagation=[]}return this}},{key:"handlePropagation",value:function(e,t){var n=this;this._onPropagation.forEach(function(r){return r.call(n,e,t)})}},{key:"bin",value:function(e,t){var n=this.getFieldsConfig();if(!n[e])throw new Error("Field "+e+" doesn't exist");var r=t.name||e+"_binned";if(n[r])throw new Error("Field "+r+" already exists");var a=function(e,t,n){var r=n.buckets,a=n.binsCount,i=n.binSize,o=n.start,u=n.end,c=e.domain(),f=C(c,2),l=f[0],s=f[1];r||(o=0!==o&&(!o||o>l)?l:o,u=0!==u&&(!u||ul&&r.unshift(l),r[r.length-1]<=s&&r.push(s+1);for(var d=[],p=0;p1&&void 0!==arguments[1]?arguments[1]:{saveChild:!0},r=e,a=void 0,i=[];return t.forEach(function(e){r=e(r),i.push.apply(i,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t {\n let i;\n let l;\n\n if (!val) { return defVal; }\n\n const nVal = val.toLowerCase();\n\n for (i = 0, l = range.length; i < l; i++) {\n if (range[i].toLowerCase() === nVal) {\n return i;\n }\n }\n\n if (i === undefined) {\n return defVal;\n }\n return null;\n };\n};\n\n/*\n * Defines the tokens which are supporter by the dateformatter. Using this definitation a value gets extracted from\n * the user specifed date string. This also formats the value for display purpose from native JS date.\n * The definition of each token contains the following named properties\n * {\n * %token_name% : {\n * name: name of the token, this is used in reverse lookup,\n * extract: a function that returns the regular expression to extract that piece of information. All the\n * regex should be gouped by using ()\n * parser: a function which receives value extracted by the above regex and parse it to get the date params\n * formatter: a formatter function that takes milliseconds or JS Date object and format the param\n * represented by the token only.\n * }\n * }\n *\n * @return {Object} : Definition of the all the supported tokens.\n */\nDateTimeFormatter.getTokenDefinitions = function () {\n const daysDef = {\n short: [\n 'Sun',\n 'Mon',\n 'Tue',\n 'Wed',\n 'Thu',\n 'Fri',\n 'Sat'\n ],\n long: [\n 'Sunday',\n 'Monday',\n 'Tuesday',\n 'Wednesday',\n 'Thursday',\n 'Friday',\n 'Saturday'\n ]\n };\n const monthsDef = {\n short: [\n 'Jan',\n 'Feb',\n 'Mar',\n 'Apr',\n 'May',\n 'Jun',\n 'Jul',\n 'Aug',\n 'Sep',\n 'Oct',\n 'Nov',\n 'Dec'\n ],\n long: [\n 'January',\n 'February',\n 'March',\n 'April',\n 'May',\n 'June',\n 'July',\n 'August',\n 'September',\n 'October',\n 'November',\n 'December'\n ]\n };\n\n const definitions = {\n H: {\n // 24 hours format\n name: 'H',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n\n return d.getHours().toString();\n }\n },\n l: {\n // 12 hours format\n name: 'l',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const hours = d.getHours() % 12;\n\n return (hours === 0 ? 12 : hours).toString();\n }\n },\n p: {\n // AM or PM\n name: 'p',\n index: 3,\n extract () { return '(AM|PM)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'AM' : 'PM');\n }\n },\n P: {\n // am or pm\n name: 'P',\n index: 3,\n extract () { return '(am|pm)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'am' : 'pm');\n }\n },\n M: {\n // Two digit minutes 00 - 59\n name: 'M',\n index: 4,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const mins = d.getMinutes();\n\n return pad(mins);\n }\n },\n S: {\n // Two digit seconds 00 - 59\n name: 'S',\n index: 5,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const seconds = d.getSeconds();\n\n return pad(seconds);\n }\n },\n K: {\n // Milliseconds\n name: 'K',\n index: 6,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const ms = d.getMilliseconds();\n\n return ms.toString();\n }\n },\n a: {\n // Short name of day, like Mon\n name: 'a',\n index: 2,\n extract () { return `(${daysDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.short[day]).toString();\n }\n },\n A: {\n // Long name of day, like Monday\n name: 'A',\n index: 2,\n extract () { return `(${daysDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.long[day]).toString();\n }\n },\n e: {\n // 8 of March, 11 of November\n name: 'e',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return day.toString();\n }\n },\n d: {\n // 08 of March, 11 of November\n name: 'd',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return pad(day);\n }\n },\n b: {\n // Short month, like Jan\n name: 'b',\n index: 1,\n extract () { return `(${monthsDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.short[month]).toString();\n }\n },\n B: {\n // Long month, like January\n name: 'B',\n index: 1,\n extract () { return `(${monthsDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.long[month]).toString();\n }\n },\n m: {\n // Two digit month of year like 01 for January\n name: 'm',\n index: 1,\n extract () { return '(\\\\d+)'; },\n parser (val) { return DateTimeFormatter.defaultNumberParser()(val) - 1; },\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return pad(month + 1);\n }\n },\n y: {\n // Short year like 90 for 1990\n name: 'y',\n index: 0,\n extract () { return '(\\\\d{2})'; },\n parser (val) {\n let result;\n if (val) {\n const l = val.length;\n val = val.substring(l - 2, l);\n }\n let parsedVal = DateTimeFormatter.defaultNumberParser()(val);\n let presentDate = new Date();\n let presentYear = Math.trunc((presentDate.getFullYear()) / 100);\n\n result = `${presentYear}${parsedVal}`;\n\n if (convertToNativeDate(result).getFullYear() > presentDate.getFullYear()) {\n result = `${presentYear - 1}${parsedVal}`;\n }\n return convertToNativeDate(result).getFullYear();\n },\n formatter (val) {\n const d = convertToNativeDate(val);\n let year = d.getFullYear().toString();\n let l;\n\n if (year) {\n l = year.length;\n year = year.substring(l - 2, l);\n }\n\n return year;\n }\n },\n Y: {\n // Long year like 1990\n name: 'Y',\n index: 0,\n extract () { return '(\\\\d{4})'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const year = d.getFullYear().toString();\n\n return year;\n }\n }\n };\n\n return definitions;\n};\n\n/*\n * The tokens which works internally is not user friendly in terms of memorizing the names. This gives a formal\n * definition to the informal notations.\n *\n * @return {Object} : Formal definition of the tokens\n */\nDateTimeFormatter.getTokenFormalNames = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n\n return {\n HOUR: definitions.H,\n HOUR_12: definitions.l,\n AMPM_UPPERCASE: definitions.p,\n AMPM_LOWERCASE: definitions.P,\n MINUTE: definitions.M,\n SECOND: definitions.S,\n SHORT_DAY: definitions.a,\n LONG_DAY: definitions.A,\n DAY_OF_MONTH: definitions.e,\n DAY_OF_MONTH_CONSTANT_WIDTH: definitions.d,\n SHORT_MONTH: definitions.b,\n LONG_MONTH: definitions.B,\n MONTH_OF_YEAR: definitions.m,\n SHORT_YEAR: definitions.y,\n LONG_YEAR: definitions.Y\n };\n};\n\n/*\n * This defines the rules and declares dependencies that resolves a date parameter (year, month etc) from\n * the date time parameter array.\n *\n * @return {Object} : An object that contains dependencies and a resolver function. The dependencies values are fed\n * to the resolver function in that particular sequence only.\n */\nDateTimeFormatter.tokenResolver = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const defaultResolver = (...args) => { // eslint-disable-line require-jsdoc\n let i = 0;\n let arg;\n let targetParam;\n const l = args.length;\n\n for (; i < l; i++) {\n arg = args[i];\n if (args[i]) {\n targetParam = arg;\n }\n }\n\n if (!targetParam) { return null; }\n\n return targetParam[0].parser(targetParam[1]);\n };\n\n return {\n YEAR: [definitions.y, definitions.Y,\n defaultResolver\n ],\n MONTH: [definitions.b, definitions.B, definitions.m,\n defaultResolver\n ],\n DAY: [definitions.a, definitions.A, definitions.e, definitions.d,\n defaultResolver\n ],\n HOUR: [definitions.H, definitions.l, definitions.p, definitions.P,\n function (hourFormat24, hourFormat12, ampmLower, ampmUpper) {\n let targetParam;\n let amOrpm;\n let isPM;\n let val;\n\n if (hourFormat12 && (amOrpm = (ampmLower || ampmUpper))) {\n if (amOrpm[0].parser(amOrpm[1]) === 'pm') {\n isPM = true;\n }\n\n targetParam = hourFormat12;\n } else if (hourFormat12) {\n targetParam = hourFormat12;\n } else {\n targetParam = hourFormat24;\n }\n\n if (!targetParam) { return null; }\n\n val = targetParam[0].parser(targetParam[1]);\n if (isPM) {\n val += 12;\n }\n return val;\n }\n ],\n MINUTE: [definitions.M,\n defaultResolver\n ],\n SECOND: [definitions.S,\n defaultResolver\n ]\n };\n};\n\n/*\n * Finds token from the format rule specified by a user.\n * @param format {String} : The format of the input date specified by the user\n * @return {Array} : An array of objects which contains the available token and their occurence index in the format\n */\nDateTimeFormatter.findTokens = function (format) {\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenLiterals = Object.keys(definitions);\n const occurrence = [];\n let i;\n let forwardChar;\n\n while ((i = format.indexOf(tokenPrefix, i + 1)) >= 0) {\n forwardChar = format[i + 1];\n if (tokenLiterals.indexOf(forwardChar) === -1) { continue; }\n\n occurrence.push({\n index: i,\n token: forwardChar\n });\n }\n\n return occurrence;\n};\n\n/*\n * Format any JS date to a specified date given by user.\n *\n * @param date {Number | Date} : The date object which is to be formatted\n * @param format {String} : The format using which the date will be formatted for display\n */\nDateTimeFormatter.formatAs = function (date, format) {\n const nDate = convertToNativeDate(date);\n const occurrence = DateTimeFormatter.findTokens(format);\n const definitions = DateTimeFormatter.getTokenDefinitions();\n let formattedStr = String(format);\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n let token;\n let formattedVal;\n let i;\n let l;\n\n for (i = 0, l = occurrence.length; i < l; i++) {\n token = occurrence[i].token;\n formattedVal = definitions[token].formatter(nDate);\n formattedStr = formattedStr.replace(new RegExp(tokenPrefix + token, 'g'), formattedVal);\n }\n\n return formattedStr;\n};\n\n/*\n * Parses the user specified date string to extract the date time params.\n *\n * @return {Array} : Value of date time params in an array [year, month, day, hour, minutes, seconds, milli]\n */\nDateTimeFormatter.prototype.parse = function (dateTimeStamp, options) {\n const tokenResolver = DateTimeFormatter.tokenResolver();\n const dtParams = this.extractTokenValue(dateTimeStamp);\n const dtParamSeq = DateTimeFormatter.DATETIME_PARAM_SEQUENCE;\n const noBreak = options && options.noBreak;\n const dtParamArr = [];\n const args = [];\n let resolverKey;\n let resolverParams;\n let resolverFn;\n let val;\n let i;\n let param;\n let resolvedVal;\n let l;\n let result = [];\n\n for (resolverKey in tokenResolver) {\n if (!{}.hasOwnProperty.call(tokenResolver, resolverKey)) { continue; }\n\n args.length = 0;\n resolverParams = tokenResolver[resolverKey];\n resolverFn = resolverParams.splice(resolverParams.length - 1, 1)[0];\n\n for (i = 0, l = resolverParams.length; i < l; i++) {\n param = resolverParams[i];\n val = dtParams[param.name];\n\n if (val === undefined) {\n args.push(null);\n } else {\n args.push([param, val]);\n }\n }\n\n resolvedVal = resolverFn.apply(this, args);\n\n if ((resolvedVal === undefined || resolvedVal === null) && !noBreak) {\n break;\n }\n\n dtParamArr[dtParamSeq[resolverKey]] = resolvedVal;\n }\n\n if (dtParamArr.length && this.checkIfOnlyYear(dtParamArr.length))\n {\n result.unshift(dtParamArr[0], 0, 1); }\n else {\n result.unshift(...dtParamArr);\n }\n\n return result;\n};\n\n/*\n * Extract the value of the token from user specified date time string.\n *\n * @return {Object} : An key value pair which contains the tokens as key and value as pair\n */\nDateTimeFormatter.prototype.extractTokenValue = function (dateTimeStamp) {\n const format = this.format;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const occurrence = DateTimeFormatter.findTokens(format);\n const tokenObj = {};\n\n let lastOccurrenceIndex;\n let occObj;\n let occIndex;\n let targetText;\n let regexFormat;\n\n let l;\n let i;\n\n regexFormat = String(format);\n\n const tokenArr = occurrence.map(obj => obj.token);\n const occurrenceLength = occurrence.length;\n for (i = occurrenceLength - 1; i >= 0; i--) {\n occIndex = occurrence[i].index;\n\n if (occIndex + 1 === regexFormat.length - 1) {\n lastOccurrenceIndex = occIndex;\n continue;\n }\n\n if (lastOccurrenceIndex === undefined) {\n lastOccurrenceIndex = regexFormat.length;\n }\n\n targetText = regexFormat.substring(occIndex + 2, lastOccurrenceIndex);\n regexFormat = regexFormat.substring(0, occIndex + 2) +\n RegExp.escape(targetText) +\n regexFormat.substring(lastOccurrenceIndex, regexFormat.length);\n\n lastOccurrenceIndex = occIndex;\n }\n\n for (i = 0; i < occurrenceLength; i++) {\n occObj = occurrence[i];\n regexFormat = regexFormat.replace(tokenPrefix + occObj.token, definitions[occObj.token].extract());\n }\n\n const extractValues = dateTimeStamp.match(new RegExp(regexFormat)) || [];\n extractValues.shift();\n\n for (i = 0, l = tokenArr.length; i < l; i++) {\n tokenObj[tokenArr[i]] = extractValues[i];\n }\n return tokenObj;\n};\n\n/*\n * Give back the JS native date formed from user specified date string\n *\n * @return {Date} : Native JS Date\n */\nDateTimeFormatter.prototype.getNativeDate = function (dateTimeStamp) {\n let date = null;\n if (Number.isFinite(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n } else if (!this.format && Date.parse(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n }\n else {\n const dtParams = this.dtParams = this.parse(dateTimeStamp);\n if (dtParams.length) {\n this.nativeDate = new Date(...dtParams);\n date = this.nativeDate;\n }\n }\n return date;\n};\n\nDateTimeFormatter.prototype.checkIfOnlyYear = function(len) {\n return len === 1 && this.format.match(/y|Y/g).length;\n};\n\n/*\n * Represents JS native date to a user specified format.\n *\n * @param format {String} : The format according to which the date is to be represented\n * @return {String} : The formatted date string\n */\nDateTimeFormatter.prototype.formatAs = function (format, dateTimeStamp) {\n let nativeDate;\n\n if (dateTimeStamp) {\n nativeDate = this.nativeDate = this.getNativeDate(dateTimeStamp);\n } else if (!(nativeDate = this.nativeDate)) {\n nativeDate = this.getNativeDate(dateTimeStamp);\n }\n\n return DateTimeFormatter.formatAs(nativeDate, format);\n};\n\nexport { DateTimeFormatter as default };\n","/**\n * The utility function to calculate major column.\n *\n * @param {Object} store - The store object.\n * @return {Function} Returns the push function.\n */\nexport default (store) => {\n let i = 0;\n return (...fields) => {\n fields.forEach((val, fieldIndex) => {\n if (!(store[fieldIndex] instanceof Array)) {\n store[fieldIndex] = Array.from({ length: i });\n }\n store[fieldIndex].push(val);\n });\n i++;\n };\n};\n","/* eslint-disable */\nconst OBJECTSTRING = 'object';\nconst objectToStrFn = Object.prototype.toString;\nconst objectToStr = '[object Object]';\nconst arrayToStr = '[object Array]';\n\nfunction checkCyclicRef(obj, parentArr) {\n let i = parentArr.length;\n let bIndex = -1;\n\n while (i) {\n if (obj === parentArr[i]) {\n bIndex = i;\n return bIndex;\n }\n i -= 1;\n }\n\n return bIndex;\n}\n\nfunction merge(obj1, obj2, skipUndef, tgtArr, srcArr) {\n var item,\n srcVal,\n tgtVal,\n str,\n cRef;\n // check whether obj2 is an array\n // if array then iterate through it's index\n // **** MOOTOOLS precution\n\n if (!srcArr) {\n tgtArr = [obj1];\n srcArr = [obj2];\n }\n else {\n tgtArr.push(obj1);\n srcArr.push(obj2);\n }\n\n if (obj2 instanceof Array) {\n for (item = 0; item < obj2.length; item += 1) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (typeof tgtVal !== OBJECTSTRING) {\n if (!(skipUndef && tgtVal === undefined)) {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = tgtVal instanceof Array ? [] : {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n }\n }\n else {\n for (item in obj2) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (tgtVal !== null && typeof tgtVal === OBJECTSTRING) {\n // Fix for issue BUG: FWXT-602\n // IE < 9 Object.prototype.toString.call(null) gives\n // '[object Object]' instead of '[object Null]'\n // that's why null value becomes Object in IE < 9\n str = objectToStrFn.call(tgtVal);\n if (str === objectToStr) {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else if (str === arrayToStr) {\n if (srcVal === null || !(srcVal instanceof Array)) {\n srcVal = obj1[item] = [];\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (skipUndef && tgtVal === undefined) {\n continue;\n }\n obj1[item] = tgtVal;\n }\n }\n }\n return obj1;\n}\n\n\nfunction extend2 (obj1, obj2, skipUndef) {\n //if none of the arguments are object then return back\n if (typeof obj1 !== OBJECTSTRING && typeof obj2 !== OBJECTSTRING) {\n return null;\n }\n\n if (typeof obj2 !== OBJECTSTRING || obj2 === null) {\n return obj1;\n }\n\n if (typeof obj1 !== OBJECTSTRING) {\n obj1 = obj2 instanceof Array ? [] : {};\n }\n merge(obj1, obj2, skipUndef);\n return obj1;\n}\n\nexport { extend2 as default };\n","import { DataFormat } from '../enums';\n\n/**\n * Checks whether the value is an array.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an array otherwise returns false.\n */\nexport function isArray (val) {\n return Array.isArray(val);\n}\n\n/**\n * Checks whether the value is an object.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an object otherwise returns false.\n */\nexport function isObject (val) {\n return val === Object(val);\n}\n\n/**\n * Checks whether the value is a string value.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is a string value otherwise returns false.\n */\nexport function isString (val) {\n return typeof val === 'string';\n}\n\n/**\n * Checks whether the value is callable.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is callable otherwise returns false.\n */\nexport function isCallable (val) {\n return typeof val === 'function';\n}\n\n/**\n * Returns the unique values from the input array.\n *\n * @param {Array} data - The input array.\n * @return {Array} Returns a new array of unique values.\n */\nexport function uniqueValues (data) {\n return [...new Set(data)];\n}\n\nexport const getUniqueId = () => `id-${new Date().getTime()}${Math.round(Math.random() * 10000)}`;\n\n/**\n * Checks Whether two arrays have same content.\n *\n * @param {Array} arr1 - The first array.\n * @param {Array} arr2 - The 2nd array.\n * @return {boolean} Returns whether two array have same content.\n */\nexport function isArrEqual(arr1, arr2) {\n if (!isArray(arr1) || !isArray(arr2)) {\n return arr1 === arr2;\n }\n\n if (arr1.length !== arr2.length) {\n return false;\n }\n\n for (let i = 0; i < arr1.length; i++) {\n if (arr1[i] !== arr2[i]) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * It is the default number format function for the measure field type.\n *\n * @param {any} val - The input value.\n * @return {number} Returns a number value.\n */\nexport function formatNumber(val) {\n return val;\n}\n\n/**\n * Returns the detected data format.\n *\n * @param {any} data - The input data to be tested.\n * @return {string} Returns the data format name.\n */\nexport const detectDataFormat = (data) => {\n if (isString(data)) {\n return DataFormat.DSV_STR;\n } else if (isArray(data) && isArray(data[0])) {\n return DataFormat.DSV_ARR;\n } else if (isArray(data) && (data.length === 0 || isObject(data[0]))) {\n return DataFormat.FLAT_JSON;\n }\n return null;\n};\n","import { FieldType } from './enums';\nimport { getUniqueId } from './utils';\n\nconst fieldStore = {\n data: {},\n\n createNamespace (fieldArr, name) {\n const dataId = name || getUniqueId();\n\n this.data[dataId] = {\n name: dataId,\n fields: fieldArr,\n\n fieldsObj () {\n let fieldsObj = this._cachedFieldsObj;\n\n if (!fieldsObj) {\n fieldsObj = this._cachedFieldsObj = {};\n this.fields.forEach((field) => {\n fieldsObj[field.name()] = field;\n });\n }\n return fieldsObj;\n },\n getMeasure () {\n let measureFields = this._cachedMeasure;\n\n if (!measureFields) {\n measureFields = this._cachedMeasure = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.MEASURE) {\n measureFields[field.name()] = field;\n }\n });\n }\n return measureFields;\n },\n getDimension () {\n let dimensionFields = this._cachedDimension;\n\n if (!this._cachedDimension) {\n dimensionFields = this._cachedDimension = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.DIMENSION) {\n dimensionFields[field.name()] = field;\n }\n });\n }\n return dimensionFields;\n },\n };\n return this.data[dataId];\n },\n};\n\nexport default fieldStore;\n","/**\n * The wrapper class on top of the primitive value of a field.\n *\n * @todo Need to have support for StringValue, NumberValue, DateTimeValue\n * and GeoValue. These types should expose predicate API mostly.\n */\nclass Value {\n\n /**\n * Creates new Value instance.\n *\n * @param {*} val - the primitive value from the field cell.\n * @param {string | Field} field - The field from which the value belongs.\n */\n constructor (val, field) {\n Object.defineProperty(this, '_value', {\n enumerable: false,\n configurable: false,\n writable: false,\n value: val\n });\n\n this.field = field;\n }\n\n /**\n * Returns the field value.\n *\n * @return {*} Returns the current value.\n */\n get value () {\n return this._value;\n }\n\n /**\n * Converts to human readable string.\n *\n * @override\n * @return {string} Returns a human readable string of the field value.\n *\n */\n toString () {\n return String(this.value);\n }\n\n /**\n * Returns the value of the field.\n *\n * @override\n * @return {*} Returns the field value.\n */\n valueOf () {\n return this.value;\n }\n}\n\nexport default Value;\n","/**\n * Iterates through the diffSet array and call the callback with the current\n * index.\n *\n * @param {string} rowDiffset - The row diffset string e.g. '0-4,6,10-13'.\n * @param {Function} callback - The callback function to be called with every index.\n */\nexport function rowDiffsetIterator (rowDiffset, callback) {\n if (rowDiffset.length > 0) {\n const rowDiffArr = rowDiffset.split(',');\n rowDiffArr.forEach((diffStr) => {\n const diffStsArr = diffStr.split('-');\n const start = +(diffStsArr[0]);\n const end = +(diffStsArr[1] || diffStsArr[0]);\n if (end >= start) {\n for (let i = start; i <= end; i += 1) {\n callback(i);\n }\n }\n });\n }\n}\n","/**\n * A parser to parser null, undefined, invalid and NIL values.\n *\n * @public\n * @class\n */\nclass InvalidAwareTypes {\n /**\n * Static method which gets/sets the invalid value registry.\n *\n * @public\n * @param {Object} config - The custom configuration supplied by user.\n * @return {Object} Returns the invalid values registry.\n */\n static invalidAwareVals (config) {\n if (!config) {\n return InvalidAwareTypes._invalidAwareValsMap;\n }\n return Object.assign(InvalidAwareTypes._invalidAwareValsMap, config);\n }\n\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} value - The value of the invalid data type.\n */\n constructor (value) {\n this._value = value;\n }\n\n /**\n * Returns the current value of the instance.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n value () {\n return this._value;\n }\n\n /**\n * Returns the current value of the instance in string format.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n toString () {\n return String(this._value);\n }\n\n static isInvalid(val) {\n return (val instanceof InvalidAwareTypes) || !!InvalidAwareTypes.invalidAwareVals()[val];\n }\n\n static getInvalidType(val) {\n return val instanceof InvalidAwareTypes ? val : InvalidAwareTypes.invalidAwareVals()[val];\n }\n}\n\n/**\n * Enums for Invalid types.\n */\nInvalidAwareTypes.NULL = new InvalidAwareTypes('null');\nInvalidAwareTypes.NA = new InvalidAwareTypes('na');\nInvalidAwareTypes.NIL = new InvalidAwareTypes('nil');\n\n/**\n * Default Registry for mapping the invalid values.\n *\n * @private\n */\nInvalidAwareTypes._invalidAwareValsMap = {\n invalid: InvalidAwareTypes.NA,\n nil: InvalidAwareTypes.NIL,\n null: InvalidAwareTypes.NULL,\n undefined: InvalidAwareTypes.NA\n};\n\nexport default InvalidAwareTypes;\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport InvalidAwareTypes from '../invalid-aware-types';\n\nconst generateBuckets = (binSize, start, end) => {\n const buckets = [];\n let next = start;\n\n while (next < end) {\n buckets.push(next);\n next += binSize;\n }\n buckets.push(next);\n\n return buckets;\n};\n\nconst findBucketRange = (bucketRanges, value) => {\n let leftIdx = 0;\n let rightIdx = bucketRanges.length - 1;\n let midIdx;\n let range;\n\n // Here use binary search as the bucketRanges is a sorted array\n while (leftIdx <= rightIdx) {\n midIdx = leftIdx + Math.floor((rightIdx - leftIdx) / 2);\n range = bucketRanges[midIdx];\n\n if (value >= range.start && value < range.end) {\n return range;\n } else if (value >= range.end) {\n leftIdx = midIdx + 1;\n } else if (value < range.start) {\n rightIdx = midIdx - 1;\n }\n }\n\n return null;\n};\n\n /**\n * Creates the bin data from input measure field and supplied configs.\n *\n * @param {Measure} measureField - The Measure field instance.\n * @param {string} rowDiffset - The datamodel rowDiffset values.\n * @param {Object} config - The config object.\n * @return {Object} Returns the binned data and the corresponding bins.\n */\nexport function createBinnedFieldData (measureField, rowDiffset, config) {\n let { buckets, binsCount, binSize, start, end } = config;\n const [dMin, dMax] = measureField.domain();\n\n if (!buckets) {\n start = (start !== 0 && (!start || start > dMin)) ? dMin : start;\n end = (end !== 0 && (!end || end < dMax)) ? (dMax + 1) : end;\n\n if (binsCount) {\n binSize = Math.ceil(Math.abs(end - start) / binsCount);\n }\n\n buckets = generateBuckets(binSize, start, end);\n }\n\n if (buckets[0] > dMin) {\n buckets.unshift(dMin);\n }\n if (buckets[buckets.length - 1] <= dMax) {\n buckets.push(dMax + 1);\n }\n\n const bucketRanges = [];\n for (let i = 0; i < buckets.length - 1; i++) {\n bucketRanges.push({\n start: buckets[i],\n end: buckets[i + 1]\n });\n }\n\n const binnedData = [];\n rowDiffsetIterator(rowDiffset, (i) => {\n const datum = measureField.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n binnedData.push(datum);\n return;\n }\n\n const range = findBucketRange(bucketRanges, datum);\n binnedData.push(`${range.start}-${range.end}`);\n });\n\n return { binnedData, bins: buckets };\n}\n","export { DataFormat, FilteringMode } from '../enums';\n/**\n * The event name for data propagation.\n */\nexport const PROPAGATION = 'propagation';\n\n/**\n * The name of the unique row id column in DataModel.\n */\nexport const ROW_ID = '__id__';\n\n/**\n * The enums for operation names performed on DataModel.\n */\nexport const DM_DERIVATIVES = {\n SELECT: 'select',\n PROJECT: 'project',\n GROUPBY: 'group',\n COMPOSE: 'compose',\n CAL_VAR: 'calculatedVariable',\n BIN: 'bin'\n};\n\nexport const JOINS = {\n CROSS: 'cross',\n LEFTOUTER: 'leftOuter',\n RIGHTOUTER: 'rightOuter',\n NATURAL: 'natural',\n FULLOUTER: 'fullOuter'\n};\n\nexport const LOGICAL_OPERATORS = {\n AND: 'and',\n OR: 'or'\n};\n","import { persistDerivation, persistAncestorDerivation } from '../helper';\nimport { DM_DERIVATIVES } from '../constants';\n\n/**\n * DataModel's opearators are exposed as composable functional operators as well as chainable operators. Chainable\n * operators are called on the instances of {@link Datamodel} and {@link Relation} class.\n *\n * Those same operators can be used as composable operators from `DataModel.Operators` namespace.\n *\n * All these operators have similar behaviour. All these operators when called with the argument returns a function\n * which expects a DataModel instance.\n *\n * @public\n * @module Operators\n * @namespace DataModel\n */\n\n/**\n * This is functional version of selection operator. {@link link_to_selection | Selection} is a row filtering operation.\n * It takes {@link SelectionPredicate | predicate} for filtering criteria and returns a function.\n * The returned function is called with the DataModel instance on which the action needs to be performed.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection opearation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * [Warn] Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * [Error] `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @example\n * const select = DataModel.Operators.select;\n * usaCarsFn = select(fields => fields.Origin.value === 'USA');\n * usaCarsDm = usaCarsFn(dm);\n * console.log(usaCarsDm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {SelectionPredicate} selectFn - Predicate funciton which is called for each row with the current row\n * ```\n * function (row, i) { ... }\n * ```\n * @param {Object} [config] - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const select = (...args) => dm => dm.select(...args);\n\n/**\n * This is functional version of projection operator. {@link link_to_projection | Projection} is a column filtering\n * operation.It expects list of fields name and either include those or exclude those based on {@link FilteringMode} on\n * the resultant variable.It returns a function which is called with the DataModel instance on which the action needs\n * to be performed.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const project = (...args) => dm => dm.project(...args);\n\n/**\n * This is functional version of binnig operator. Binning happens on a measure field based on a binning configuration.\n * Binning in DataModel does not aggregate the number of rows present in DataModel instance after binning, it just adds\n * a new field with the binned value. Refer binning {@link example_of_binning | example} to have a intuition of what\n * binning is and the use case.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non uniform buckets\n * - providing bin count\n * - providing each bin size\n *\n * When custom buckets are provided as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const buckets = {\n * start: 30\n * stops: [80, 100, 110]\n * };\n * const config = { buckets, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(dm);\n *\n * @text\n * When `binCount` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binCount: 5, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @text\n * When `binSize` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binSize: 200, name: 'binnedHorsepower' }\n * const binnedDm = dataModel.bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {String} name Name of measure which will be used to create bin\n * @param {Object} config Config required for bin creation\n * @param {Array.} config.bucketObj.stops Defination of bucket ranges. Two subsequent number from arrays\n * are picked and a range is created. The first number from range is inclusive and the second number from range\n * is exclusive.\n * @param {Number} [config.bucketObj.startAt] Force the start of the bin from a particular number.\n * If not mentioned, the start of the bin or the lower domain of the data if stops is not mentioned, else its\n * the first value of the stop.\n * @param {Number} config.binSize Bucket size for each bin\n * @param {Number} config.binCount Number of bins which will be created\n * @param {String} config.name Name of the new binned field to be created\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const bin = (...args) => dm => dm.bin(...args);\n\n/**\n * This is functional version of `groupBy` operator.Groups the data using particular dimensions and by reducing\n * measures. It expects a list of dimensions using which it projects the datamodel and perform aggregations to reduce\n * the duplicate tuples. Refer this {@link link_to_one_example_with_group_by | document} to know the intuition behind\n * groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupBy = DataModel.Operators.groupBy;\n * const groupedFn = groupBy(['Year'], { horsepower: 'max' } );\n * groupedDM = groupByFn(dm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const groupBy = (...args) => dm => dm.groupBy(...args);\n\n/**\n * Enables composing operators to run multiple operations and save group of operataion as named opration on a DataModel.\n * The resulting DataModel will be the result of all the operation provided. The operations provided will be executed in\n * a serial manner ie. result of one operation will be the input for the next operations (like pipe operator in unix).\n *\n * Suported operations in compose are\n * - `select`\n * - `project`\n * - `groupBy`\n * - `bin`\n * - `compose`\n *\n * @example\n * const compose = DataModel.Operators.compose;\n * const select = DataModel.Operators.select;\n * const project = DataModel.Operators.project;\n *\n * let composedFn = compose(\n * select(fields => fields.netprofit.value <= 15),\n * project(['netprofit', 'netsales']));\n *\n * const dataModel = new DataModel(data1, schema1);\n *\n * let composedDm = composedFn(dataModel);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} operators: An array of operation that will be applied on the\n * datatable.\n *\n * @returns {DataModel} Instance of resultant DataModel\n */\nexport const compose = (...operations) =>\n (dm, config = { saveChild: true }) => {\n let currentDM = dm;\n let firstChild;\n const derivations = [];\n\n operations.forEach((operation) => {\n currentDM = operation(currentDM);\n derivations.push(...currentDM._derivation);\n if (!firstChild) {\n firstChild = currentDM;\n }\n });\n\n if (firstChild && firstChild !== currentDM) {\n firstChild.dispose();\n }\n\n persistDerivation(currentDM, DM_DERIVATIVES.COMPOSE, null, derivations);\n // reset all ancestorDerivation saved in-between compose\n currentDM._ancestorDerivation = [];\n persistAncestorDerivation(dm, currentDM);\n\n if (config.saveChild) {\n currentDM.setParent(dm);\n } else {\n currentDM.setParent(null);\n }\n\n return currentDM;\n };\n","/**\n * The helper function that returns an array of common schema\n * from two fieldStore instances.\n *\n * @param {FieldStore} fs1 - The first FieldStore instance.\n * @param {FieldStore} fs2 - The second FieldStore instance.\n * @return {Array} An array containing the common schema.\n */\nexport function getCommonSchema (fs1, fs2) {\n const retArr = [];\n const fs1Arr = [];\n fs1.fields.forEach((field) => {\n fs1Arr.push(field.schema().name);\n });\n fs2.fields.forEach((field) => {\n if (fs1Arr.indexOf(field.schema().name) !== -1) {\n retArr.push(field.schema().name);\n }\n });\n return retArr;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { getCommonSchema } from './get-common-schema';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { JOINS } from '../constants';\nimport { prepareJoinData } from '../helper';\n/**\n * Default filter function for crossProduct.\n *\n * @return {boolean} Always returns true.\n */\nfunction defaultFilterFn() { return true; }\n\n/**\n * Implementation of cross product operation between two DataModel instances.\n * It internally creates the data and schema for the new DataModel.\n *\n * @param {DataModel} dataModel1 - The left DataModel instance.\n * @param {DataModel} dataModel2 - The right DataModel instance.\n * @param {Function} filterFn - The filter function which is used to filter the tuples.\n * @param {boolean} [replaceCommonSchema=false] - The flag if the common name schema should be there.\n * @return {DataModel} Returns The newly created DataModel instance from the crossProduct operation.\n */\nexport function crossProduct (dm1, dm2, filterFn, replaceCommonSchema = false, jointype = JOINS.CROSS) {\n const schema = [];\n const data = [];\n const applicableFilterFn = filterFn || defaultFilterFn;\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreName = dm1FieldStore.name;\n const dm2FieldStoreName = dm2FieldStore.name;\n const name = `${dm1FieldStore.name}.${dm2FieldStore.name}`;\n const commonSchemaList = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n if (dm1FieldStoreName === dm2FieldStoreName) {\n throw new Error('DataModels must have different alias names');\n }\n // Here prepare the schema\n dm1FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1 && !replaceCommonSchema) {\n tmpSchema.name = `${dm1FieldStore.name}.${tmpSchema.name}`;\n }\n schema.push(tmpSchema);\n });\n dm2FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1) {\n if (!replaceCommonSchema) {\n tmpSchema.name = `${dm2FieldStore.name}.${tmpSchema.name}`;\n schema.push(tmpSchema);\n }\n } else {\n schema.push(tmpSchema);\n }\n });\n\n // Here prepare Data\n rowDiffsetIterator(dm1._rowDiffset, (i) => {\n let rowAdded = false;\n let rowPosition;\n rowDiffsetIterator(dm2._rowDiffset, (ii) => {\n const tuple = [];\n const userArg = {};\n userArg[dm1FieldStoreName] = {};\n userArg[dm2FieldStoreName] = {};\n dm1FieldStore.fields.forEach((field) => {\n tuple.push(field.partialField.data[i]);\n userArg[dm1FieldStoreName][field.name()] = field.partialField.data[i];\n });\n dm2FieldStore.fields.forEach((field) => {\n if (!(commonSchemaList.indexOf(field.schema().name) !== -1 && replaceCommonSchema)) {\n tuple.push(field.partialField.data[ii]);\n }\n userArg[dm2FieldStoreName][field.name()] = field.partialField.data[ii];\n });\n\n let cachedStore = {};\n let cloneProvider1 = () => dm1.detachedRoot();\n let cloneProvider2 = () => dm2.detachedRoot();\n\n const dm1Fields = prepareJoinData(userArg[dm1FieldStoreName]);\n const dm2Fields = prepareJoinData(userArg[dm2FieldStoreName]);\n if (applicableFilterFn(dm1Fields, dm2Fields, cloneProvider1, cloneProvider2, cachedStore)) {\n const tupleObj = {};\n tuple.forEach((cellVal, iii) => {\n tupleObj[schema[iii].name] = cellVal;\n });\n if (rowAdded && JOINS.CROSS !== jointype) {\n data[rowPosition] = tupleObj;\n }\n else {\n data.push(tupleObj);\n rowAdded = true;\n rowPosition = i;\n }\n } else if ((jointype === JOINS.LEFTOUTER || jointype === JOINS.RIGHTOUTER) && !rowAdded) {\n const tupleObj = {};\n let len = dm1FieldStore.fields.length - 1;\n tuple.forEach((cellVal, iii) => {\n if (iii <= len) {\n tupleObj[schema[iii].name] = cellVal;\n }\n else {\n tupleObj[schema[iii].name] = null;\n }\n });\n rowAdded = true;\n rowPosition = i;\n data.push(tupleObj);\n }\n });\n });\n\n return new DataModel(data, schema, { name });\n}\n","/**\n * The default sort function.\n *\n * @param {*} a - The first value.\n * @param {*} b - The second value.\n * @return {number} Returns the comparison result e.g. 1 or 0 or -1.\n */\nfunction defSortFn (a, b) {\n const a1 = `${a}`;\n const b1 = `${b}`;\n if (a1 < b1) {\n return -1;\n }\n if (a1 > b1) {\n return 1;\n }\n return 0;\n}\n\n/**\n * The helper function for merge sort which creates the sorted array\n * from the two halves of the input array.\n *\n * @param {Array} arr - The target array which needs to be merged.\n * @param {number} lo - The starting index of the first array half.\n * @param {number} mid - The ending index of the first array half.\n * @param {number} hi - The ending index of the second array half.\n * @param {Function} sortFn - The sort function.\n */\nfunction merge (arr, lo, mid, hi, sortFn) {\n const mainArr = arr;\n const auxArr = [];\n for (let i = lo; i <= hi; i += 1) {\n auxArr[i] = mainArr[i];\n }\n let a = lo;\n let b = mid + 1;\n\n for (let i = lo; i <= hi; i += 1) {\n if (a > mid) {\n mainArr[i] = auxArr[b];\n b += 1;\n } else if (b > hi) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else if (sortFn(auxArr[a], auxArr[b]) <= 0) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else {\n mainArr[i] = auxArr[b];\n b += 1;\n }\n }\n}\n\n/**\n * The helper function for merge sort which would be called\n * recursively for sorting the array halves.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {number} lo - The starting index of the array half.\n * @param {number} hi - The ending index of the array half.\n * @param {Function} sortFn - The sort function.\n * @return {Array} Returns the target array itself.\n */\nfunction sort (arr, lo, hi, sortFn) {\n if (hi === lo) { return arr; }\n\n const mid = lo + Math.floor((hi - lo) / 2);\n sort(arr, lo, mid, sortFn);\n sort(arr, mid + 1, hi, sortFn);\n merge(arr, lo, mid, hi, sortFn);\n\n return arr;\n}\n\n/**\n * The implementation of merge sort.\n * It is used in DataModel for stable sorting as it is not sure\n * what the sorting algorithm used by browsers is stable or not.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {Function} [sortFn=defSortFn] - The sort function.\n * @return {Array} Returns the input array itself in sorted order.\n */\nexport function mergeSort (arr, sortFn = defSortFn) {\n if (arr.length > 1) {\n sort(arr, 0, arr.length - 1, sortFn);\n }\n return arr;\n}\n","import { DimensionSubtype, MeasureSubtype } from '../enums';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { mergeSort } from './merge-sort';\nimport { fieldInSchema } from '../helper';\nimport { isCallable, isArray, } from '../utils';\n/**\n * Generates the sorting functions to sort the data of a DataModel instance\n * according to the input data type.\n *\n * @param {string} dataType - The data type e.g. 'measure', 'datetime' etc.\n * @param {string} sortType - The sorting order i.e. 'asc' or 'desc'.\n * @param {integer} index - The index of the data which will be sorted.\n * @return {Function} Returns the the sorting function.\n */\nfunction getSortFn (dataType, sortType, index) {\n let retFunc;\n switch (dataType) {\n case MeasureSubtype.CONTINUOUS:\n case DimensionSubtype.TEMPORAL:\n if (sortType === 'desc') {\n retFunc = (a, b) => b[index] - a[index];\n } else {\n retFunc = (a, b) => a[index] - b[index];\n }\n break;\n default:\n retFunc = (a, b) => {\n const a1 = `${a[index]}`;\n const b1 = `${b[index]}`;\n if (a1 < b1) {\n return sortType === 'desc' ? 1 : -1;\n }\n if (a1 > b1) {\n return sortType === 'desc' ? -1 : 1;\n }\n return 0;\n };\n }\n return retFunc;\n}\n\n/**\n * Groups the data according to the specified target field.\n *\n * @param {Array} data - The input data array.\n * @param {number} fieldIndex - The target field index within schema array.\n * @return {Array} Returns an array containing the grouped data.\n */\nfunction groupData(data, fieldIndex) {\n const hashMap = new Map();\n const groupedData = [];\n\n data.forEach((datum) => {\n const fieldVal = datum[fieldIndex];\n if (hashMap.has(fieldVal)) {\n groupedData[hashMap.get(fieldVal)][1].push(datum);\n } else {\n groupedData.push([fieldVal, [datum]]);\n hashMap.set(fieldVal, groupedData.length - 1);\n }\n });\n\n return groupedData;\n}\n\n/**\n * Creates the argument value used for sorting function when sort is done\n * with another fields.\n *\n * @param {Array} groupedDatum - The grouped datum for a single dimension field value.\n * @param {Array} targetFields - An array of the sorting fields.\n * @param {Array} targetFieldDetails - An array of the sorting field details in schema.\n * @return {Object} Returns an object containing the value of sorting fields and the target field name.\n */\nfunction createSortingFnArg(groupedDatum, targetFields, targetFieldDetails) {\n const arg = {\n label: groupedDatum[0]\n };\n\n targetFields.reduce((acc, next, idx) => {\n acc[next] = groupedDatum[1].map(datum => datum[targetFieldDetails[idx].index]);\n return acc;\n }, arg);\n\n return arg;\n}\n\n/**\n * Sorts the data before return in dataBuilder.\n *\n * @param {Object} dataObj - An object containing the data and schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nfunction sortData(dataObj, sortingDetails) {\n const { data, schema } = dataObj;\n let fieldName;\n let sortMeta;\n let fDetails;\n let i = sortingDetails.length - 1;\n\n for (; i >= 0; i--) {\n fieldName = sortingDetails[i][0];\n sortMeta = sortingDetails[i][1];\n fDetails = fieldInSchema(schema, fieldName);\n\n if (!fDetails) {\n // eslint-disable-next-line no-continue\n continue;\n }\n\n if (isCallable(sortMeta)) {\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortMeta(a[fDetails.index], b[fDetails.index]));\n } else if (isArray(sortMeta)) {\n const groupedData = groupData(data, fDetails.index);\n const sortingFn = sortMeta[sortMeta.length - 1];\n const targetFields = sortMeta.slice(0, sortMeta.length - 1);\n const targetFieldDetails = targetFields.map(f => fieldInSchema(schema, f));\n\n groupedData.forEach((groupedDatum) => {\n groupedDatum.push(createSortingFnArg(groupedDatum, targetFields, targetFieldDetails));\n });\n\n mergeSort(groupedData, (a, b) => {\n const m = a[2];\n const n = b[2];\n return sortingFn(m, n);\n });\n\n // Empty the array\n data.length = 0;\n groupedData.forEach((datum) => {\n data.push(...datum[1]);\n });\n } else {\n sortMeta = String(sortMeta).toLowerCase() === 'desc' ? 'desc' : 'asc';\n mergeSort(data, getSortFn(fDetails.type, sortMeta, fDetails.index));\n }\n }\n\n dataObj.uids = [];\n data.forEach((value) => {\n dataObj.uids.push(value.pop());\n });\n}\n\n\n/**\n * Builds the actual data array.\n *\n * @param {Array} fieldStore - An array of field.\n * @param {string} rowDiffset - A string consisting of which rows to be included eg. '0-2,4,6';\n * @param {string} colIdentifier - A string consisting of the details of which column\n * to be included eg 'date,sales,profit';\n * @param {Object} sortingDetails - An object containing the sorting details of the DataModel instance.\n * @param {Object} options - The options required to create the type of the data.\n * @return {Object} Returns an object containing the multidimensional array and the relative schema.\n */\nexport function dataBuilder (fieldStore, rowDiffset, colIdentifier, sortingDetails, options) {\n const defOptions = {\n addUid: false,\n columnWise: false\n };\n options = Object.assign({}, defOptions, options);\n\n const retObj = {\n schema: [],\n data: [],\n uids: []\n };\n const addUid = options.addUid;\n const reqSorting = sortingDetails && sortingDetails.length > 0;\n // It stores the fields according to the colIdentifier argument\n const tmpDataArr = [];\n // Stores the fields according to the colIdentifier argument\n const colIArr = colIdentifier.split(',');\n\n colIArr.forEach((colName) => {\n for (let i = 0; i < fieldStore.length; i += 1) {\n if (fieldStore[i].name() === colName) {\n tmpDataArr.push(fieldStore[i]);\n break;\n }\n }\n });\n\n // Inserts the schema to the schema object\n tmpDataArr.forEach((field) => {\n /** @todo Need to use extend2 here otherwise user can overwrite the schema. */\n retObj.schema.push(field.schema());\n });\n\n if (addUid) {\n retObj.schema.push({\n name: 'uid',\n type: 'identifier'\n });\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n retObj.data.push([]);\n const insertInd = retObj.data.length - 1;\n let start = 0;\n tmpDataArr.forEach((field, ii) => {\n retObj.data[insertInd][ii + start] = field.partialField.data[i];\n });\n if (addUid) {\n retObj.data[insertInd][tmpDataArr.length] = i;\n }\n // Creates an array of unique identifiers for each row\n retObj.uids.push(i);\n\n // If sorting needed then there is the need to expose the index\n // mapping from the old index to its new index\n if (reqSorting) { retObj.data[insertInd].push(i); }\n });\n\n // Handles the sort functionality\n if (reqSorting) {\n sortData(retObj, sortingDetails);\n }\n\n if (options.columnWise) {\n const tmpData = Array(...Array(retObj.schema.length)).map(() => []);\n retObj.data.forEach((tuple) => {\n tuple.forEach((data, i) => {\n tmpData[i].push(data);\n });\n });\n retObj.data = tmpData;\n }\n\n return retObj;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n\n/**\n * Performs the union operation between two dm instances.\n *\n * @todo Fix the conflicts between union and difference terminology here.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function difference (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n * @param {boolean} addData - If true only tuple will be added to the data.\n */\n function prepareDataHelper(dm, fieldsObj, addData) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n if (addData) { data.push(tuple); }\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm2, dm2FieldStoreFieldObj, false);\n prepareDataHelper(dm1, dm1FieldStoreFieldObj, true);\n\n return new DataModel(data, schema, { name });\n}\n\n","import { isArray } from '../utils';\nimport InvalidAwareTypes from '../invalid-aware-types';\nimport { GROUP_BY_FUNCTIONS } from '../enums';\n\nconst { SUM, AVG, FIRST, LAST, COUNT, STD, MIN, MAX } = GROUP_BY_FUNCTIONS;\n\nfunction getFilteredValues(arr) {\n return arr.filter(item => !(item instanceof InvalidAwareTypes));\n}\n/**\n * Reducer function that returns the sum of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the sum of the array.\n */\nfunction sum (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const filteredNumber = getFilteredValues(arr);\n const totalSum = filteredNumber.length ?\n filteredNumber.reduce((acc, curr) => acc + curr, 0)\n : InvalidAwareTypes.NULL;\n return totalSum;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that returns the average of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the mean value of the array.\n */\nfunction avg (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const totalSum = sum(arr);\n const len = arr.length || 1;\n return (Number.isNaN(totalSum) || totalSum instanceof InvalidAwareTypes) ?\n InvalidAwareTypes.NULL : totalSum / len;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the min value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the minimum value of the array.\n */\nfunction min (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.min(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the max value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the maximum value of the array.\n */\nfunction max (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.max(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the first value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the first value of the array.\n */\nfunction first (arr) {\n return arr[0];\n}\n\n/**\n * Reducer function that gives the last value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the last value of the array.\n */\nfunction last (arr) {\n return arr[arr.length - 1];\n}\n\n/**\n * Reducer function that gives the count value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the length of the array.\n */\nfunction count (arr) {\n if (isArray(arr)) {\n return arr.length;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Calculates the variance of the input array.\n *\n * @param {Array.} arr - The input array.\n * @return {number} Returns the variance of the input array.\n */\nfunction variance (arr) {\n let mean = avg(arr);\n return avg(arr.map(num => (num - mean) ** 2));\n}\n\n/**\n * Calculates the square root of the variance of the input array.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the square root of the variance.\n */\nfunction std (arr) {\n return Math.sqrt(variance(arr));\n}\n\n\nconst fnList = {\n [SUM]: sum,\n [AVG]: avg,\n [MIN]: min,\n [MAX]: max,\n [FIRST]: first,\n [LAST]: last,\n [COUNT]: count,\n [STD]: std\n};\n\nconst defaultReducerName = SUM;\n\nexport {\n defaultReducerName,\n sum as defReducer,\n fnList,\n};\n","import { defReducer, fnList } from '../operator';\n\n/**\n * A page level storage which stores, registers, unregisters reducers for all the datamodel instances. There is only one\n * reducer store available in a page. All the datamodel instances receive same instance of reducer store. DataModel\n * out of the box provides handful of {@link reducer | reducers} which can be used as reducer funciton.\n *\n * @public\n * @namespace DataModel\n */\nclass ReducerStore {\n constructor () {\n this.store = new Map();\n this.store.set('defReducer', defReducer);\n\n Object.entries(fnList).forEach((key) => {\n this.store.set(key[0], key[1]);\n });\n }\n\n /**\n * Changes the `defaultReducer` globally. For all the fields which does not have `defAggFn` mentioned in schema, the\n * value of `defaultReducer` is used for aggregation.\n *\n * @public\n * @param {string} [reducer='sum'] - The name of the default reducer. It picks up the definition from store by doing\n * name lookup. If no name is found then it takes `sum` as the default reducer.\n * @return {ReducerStore} Returns instance of the singleton store in page.\n */\n defaultReducer (...params) {\n if (!params.length) {\n return this.store.get('defReducer');\n }\n\n let reducer = params[0];\n\n if (typeof reducer === 'function') {\n this.store.set('defReducer', reducer);\n } else {\n reducer = String(reducer);\n if (Object.keys(fnList).indexOf(reducer) !== -1) {\n this.store.set('defReducer', fnList[reducer]);\n } else {\n throw new Error(`Reducer ${reducer} not found in registry`);\n }\n }\n return this;\n }\n\n /**\n *\n * Registers a {@link reducer | reducer}.\n * A {@link reducer | reducer} has to be registered before it is used.\n *\n * @example\n * // find the mean squared value of a given set\n * const reducerStore = DataModel.Reducers();\n *\n * reducers.register('meanSquared', (arr) => {\n * const squaredVal = arr.map(item => item * item);\n * let sum = 0;\n * for (let i = 0, l = squaredVal.length; i < l; i++) {\n * sum += squaredVal[i++];\n * }\n *\n * return sum;\n * })\n *\n * // datamodel (dm) is already prepared with cars.json\n * const dm1 = dm.groupBy(['origin'], {\n * accleration: 'meanSquared'\n * });\n *\n * @public\n *\n * @param {string} name formal name for a reducer. If the given name already exists in store it is overridden by new\n * definition.\n * @param {Function} reducer definition of {@link reducer} function.\n *\n * @return {Function} function for unregistering the reducer.\n */\n register (name, reducer) {\n if (typeof reducer !== 'function') {\n throw new Error('Reducer should be a function');\n }\n\n name = String(name);\n this.store.set(name, reducer);\n\n return () => { this.__unregister(name); };\n }\n\n __unregister (name) {\n if (this.store.has(name)) {\n this.store.delete(name);\n }\n }\n\n resolve (name) {\n if (name instanceof Function) {\n return name;\n }\n return this.store.get(name);\n }\n}\n\nconst reducerStore = (function () {\n let store = null;\n\n function getStore () {\n if (store === null) {\n store = new ReducerStore();\n }\n return store;\n }\n return getStore();\n}());\n\nexport default reducerStore;\n","import { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport DataModel from '../export';\nimport reducerStore from '../utils/reducer-store';\nimport { defaultReducerName } from './group-by-function';\nimport { FieldType } from '../enums';\n\n/**\n * This function sanitize the user given field and return a common Array structure field\n * list\n * @param {DataModel} dataModel the dataModel operating on\n * @param {Array} fieldArr user input of field Array\n * @return {Array} arrays of field name\n */\nfunction getFieldArr (dataModel, fieldArr) {\n const retArr = [];\n const fieldStore = dataModel.getFieldspace();\n const dimensions = fieldStore.getDimension();\n\n Object.entries(dimensions).forEach(([key]) => {\n if (fieldArr && fieldArr.length) {\n if (fieldArr.indexOf(key) !== -1) {\n retArr.push(key);\n }\n } else {\n retArr.push(key);\n }\n });\n\n return retArr;\n}\n\n/**\n * This sanitize the reducer provide by the user and create a common type of object.\n * user can give function Also\n * @param {DataModel} dataModel dataModel to worked on\n * @param {Object|function} [reducers={}] reducer provided by the users\n * @return {Object} object containing reducer function for every measure\n */\nfunction getReducerObj (dataModel, reducers = {}) {\n const retObj = {};\n const fieldStore = dataModel.getFieldspace();\n const measures = fieldStore.getMeasure();\n const defReducer = reducerStore.defaultReducer();\n\n Object.keys(measures).forEach((measureName) => {\n if (typeof reducers[measureName] !== 'string') {\n reducers[measureName] = measures[measureName].defAggFn();\n }\n const reducerFn = reducerStore.resolve(reducers[measureName]);\n if (reducerFn) {\n retObj[measureName] = reducerFn;\n } else {\n retObj[measureName] = defReducer;\n reducers[measureName] = defaultReducerName;\n }\n });\n return retObj;\n}\n\n/**\n * main function which perform the group-by operations which reduce the measures value is the\n * fields are common according to the reducer function provided\n * @param {DataModel} dataModel the dataModel to worked\n * @param {Array} fieldArr fields according to which the groupby should be worked\n * @param {Object|Function} reducers reducers function\n * @param {DataModel} existingDataModel Existing datamodel instance\n * @return {DataModel} new dataModel with the group by\n */\nfunction groupBy (dataModel, fieldArr, reducers, existingDataModel) {\n const sFieldArr = getFieldArr(dataModel, fieldArr);\n const reducerObj = getReducerObj(dataModel, reducers);\n const fieldStore = dataModel.getFieldspace();\n const fieldStoreObj = fieldStore.fieldsObj();\n const dbName = fieldStore.name;\n const dimensionArr = [];\n const measureArr = [];\n const schema = [];\n const hashMap = {};\n const data = [];\n let newDataModel;\n\n // Prepare the schema\n Object.entries(fieldStoreObj).forEach(([key, value]) => {\n if (sFieldArr.indexOf(key) !== -1 || reducerObj[key]) {\n schema.push(extend2({}, value.schema()));\n\n switch (value.schema().type) {\n case FieldType.MEASURE:\n measureArr.push(key);\n break;\n default:\n case FieldType.DIMENSION:\n dimensionArr.push(key);\n }\n }\n });\n // Prepare the data\n let rowCount = 0;\n rowDiffsetIterator(dataModel._rowDiffset, (i) => {\n let hash = '';\n dimensionArr.forEach((_) => {\n hash = `${hash}-${fieldStoreObj[_].partialField.data[i]}`;\n });\n if (hashMap[hash] === undefined) {\n hashMap[hash] = rowCount;\n data.push({});\n dimensionArr.forEach((_) => {\n data[rowCount][_] = fieldStoreObj[_].partialField.data[i];\n });\n measureArr.forEach((_) => {\n data[rowCount][_] = [fieldStoreObj[_].partialField.data[i]];\n });\n rowCount += 1;\n } else {\n measureArr.forEach((_) => {\n data[hashMap[hash]][_].push(fieldStoreObj[_].partialField.data[i]);\n });\n }\n });\n\n // reduction\n let cachedStore = {};\n let cloneProvider = () => dataModel.detachedRoot();\n data.forEach((row) => {\n const tuple = row;\n measureArr.forEach((_) => {\n tuple[_] = reducerObj[_](row[_], cloneProvider, cachedStore);\n });\n });\n if (existingDataModel) {\n existingDataModel.__calculateFieldspace();\n newDataModel = existingDataModel;\n }\n else {\n newDataModel = new DataModel(data, schema, { name: dbName });\n }\n return newDataModel;\n}\n\nexport { groupBy, getFieldArr, getReducerObj };\n","import { getCommonSchema } from './get-common-schema';\n\n/**\n * The filter function used in natural join.\n * It generates a function that will have the logic to join two\n * DataModel instances by the process of natural join.\n *\n * @param {DataModel} dm1 - The left DataModel instance.\n * @param {DataModel} dm2 - The right DataModel instance.\n * @return {Function} Returns a function that is used in cross-product operation.\n */\nexport function naturalJoinFilter (dm1, dm2) {\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n // const dm1FieldStoreName = dm1FieldStore.name;\n // const dm2FieldStoreName = dm2FieldStore.name;\n const commonSchemaArr = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n return (dm1Fields, dm2Fields) => {\n let retainTuple = true;\n commonSchemaArr.forEach((fieldName) => {\n if (dm1Fields[fieldName].value ===\n dm2Fields[fieldName].value && retainTuple) {\n retainTuple = true;\n } else {\n retainTuple = false;\n }\n });\n return retainTuple;\n };\n}\n","import DataModel from '../export';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n/**\n * Performs the union operation between two dm instances.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function union (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n */\n function prepareDataHelper (dm, fieldsObj) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n data.push(tuple);\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm1, dm1FieldStoreFieldObj);\n prepareDataHelper(dm2, dm2FieldStoreFieldObj);\n\n return new DataModel(data, schema, { name });\n}\n","import { crossProduct } from './cross-product';\nimport { JOINS } from '../constants';\nimport { union } from './union';\n\n\nexport function leftOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel1, dataModel2, filterFn, false, JOINS.LEFTOUTER);\n}\n\nexport function rightOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel2, dataModel1, filterFn, false, JOINS.RIGHTOUTER);\n}\n\nexport function fullOuterJoin (dataModel1, dataModel2, filterFn) {\n return union(leftOuterJoin(dataModel1, dataModel2, filterFn), rightOuterJoin(dataModel1, dataModel2, filterFn));\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\n\n/**\n * In {@link DataModel}, every tabular data consists of column, a column is stored as field.\n * Field contains all the data for a given column in an array.\n *\n * Each record consists of several fields; the fields of all records form the columns.\n * Examples of fields: name, gender, sex etc.\n *\n * In DataModel, each field can have multiple attributes which describes its data and behaviour.\n * A field can have two types of data: Measure and Dimension.\n *\n * A Dimension Field is the context on which a data is categorized and the measure is the numerical values that\n * quantify the data set.\n * In short a dimension is the lens through which you are looking at your measure data.\n *\n * Refer to {@link Schema} to get info about possible field attributes.\n *\n * @public\n * @class\n */\nexport default class Field {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n this.partialField = partialField;\n this.rowDiffset = rowDiffset;\n }\n\n /**\n * Generates the field type specific domain.\n *\n * @public\n * @abstract\n */\n domain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the the field schema.\n *\n * @public\n * @return {string} Returns the field schema.\n */\n schema () {\n return this.partialField.schema;\n }\n\n /**\n * Returns the name of the field.\n *\n * @public\n * @return {string} Returns the name of the field.\n */\n name () {\n return this.partialField.name;\n }\n\n /**\n * Returns the type of the field.\n *\n * @public\n * @return {string} Returns the type of the field.\n */\n type () {\n return this.partialField.schema.type;\n }\n\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return this.partialField.schema.subtype;\n }\n\n /**\n * Returns the description of the field.\n *\n * @public\n * @return {string} Returns the description of the field.\n */\n description () {\n return this.partialField.schema.description;\n }\n\n /**\n * Returns the display name of the field.\n *\n * @public\n * @return {string} Returns the display name of the field.\n */\n displayName () {\n return this.partialField.schema.displayName || this.partialField.schema.name;\n }\n\n /**\n * Returns the data associated with the field.\n *\n * @public\n * @return {Array} Returns the data.\n */\n data () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n data.push(this.partialField.data[i]);\n });\n return data;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @abstract\n */\n formattedData () {\n throw new Error('Not yet implemented');\n }\n}\n","import Field from '../field';\n\n/**\n * Represents dimension field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Dimension extends Field {\n /**\n * Returns the domain for the dimension field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { DimensionSubtype } from '../../enums';\nimport Dimension from '../dimension';\n/**\n * Represents categorical field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Categorical extends Dimension {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return DimensionSubtype.CATEGORICAL;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n return domain;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport Dimension from '../dimension';\nimport { DateTimeFormatter } from '../../utils';\nimport InvalidAwareTypes from '../../invalid-aware-types';\n\n/**\n * Represents temporal field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Temporal extends Dimension {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n super(partialField, rowDiffset);\n\n this._cachedMinDiff = null;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be\n // occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n\n return domain;\n }\n\n\n /**\n * Calculates the minimum consecutive difference from the associated field data.\n *\n * @public\n * @return {number} Returns the minimum consecutive diff in milliseconds.\n */\n minimumConsecutiveDifference () {\n if (this._cachedMinDiff) {\n return this._cachedMinDiff;\n }\n\n const sortedData = this.data().filter(item => !(item instanceof InvalidAwareTypes)).sort((a, b) => a - b);\n const arrLn = sortedData.length;\n let minDiff = Number.POSITIVE_INFINITY;\n let prevDatum;\n let nextDatum;\n let processedCount = 0;\n\n for (let i = 1; i < arrLn; i++) {\n prevDatum = sortedData[i - 1];\n nextDatum = sortedData[i];\n\n if (nextDatum === prevDatum) {\n continue;\n }\n\n minDiff = Math.min(minDiff, nextDatum - sortedData[i - 1]);\n processedCount++;\n }\n\n if (!processedCount) {\n minDiff = null;\n }\n this._cachedMinDiff = minDiff;\n\n return this._cachedMinDiff;\n }\n\n /**\n * Returns the format specified in the input schema while creating field.\n *\n * @public\n * @return {string} Returns the datetime format.\n */\n format () {\n return this.partialField.schema.format;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n data.push(datum);\n } else {\n data.push(DateTimeFormatter.formatAs(datum, this.format()));\n }\n });\n return data;\n }\n}\n\n","import Dimension from '../dimension';\n\n/**\n * Represents binned field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Binned extends Dimension {\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the last and first values of bins config array.\n */\n calculateDataDomain () {\n const binsArr = this.partialField.schema.bins;\n return [binsArr[0], binsArr[binsArr.length - 1]];\n }\n\n /**\n * Returns the bins config provided while creating the field instance.\n *\n * @public\n * @return {Array} Returns the bins array config.\n */\n bins () {\n return this.partialField.schema.bins;\n }\n}\n","import { formatNumber } from '../../utils';\nimport { defaultReducerName } from '../../operator/group-by-function';\nimport Field from '../field';\n\n/**\n * Represents measure field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Measure extends Field {\n /**\n * Returns the domain for the measure field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Returns the unit of the measure field.\n *\n * @public\n * @return {string} Returns unit of the field.\n */\n unit () {\n return this.partialField.schema.unit;\n }\n\n /**\n * Returns the aggregation function name of the measure field.\n *\n * @public\n * @return {string} Returns aggregation function name of the field.\n */\n defAggFn () {\n return this.partialField.schema.defAggFn || defaultReducerName;\n }\n\n /**\n * Returns the number format of the measure field.\n *\n * @public\n * @return {Function} Returns number format of the field.\n */\n numberFormat () {\n const { numberFormat } = this.partialField.schema;\n return numberFormat instanceof Function ? numberFormat : formatNumber;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { MeasureSubtype } from '../../enums';\nimport Measure from '../measure';\nimport InvalidAwareTypes from '../../invalid-aware-types';\n\n/**\n * Represents continuous field subtype.\n *\n * @public\n * @class\n * @extends Measure\n */\nexport default class Continuous extends Measure {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return MeasureSubtype.CONTINUOUS;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the min and max values.\n */\n calculateDataDomain () {\n let min = Number.POSITIVE_INFINITY;\n let max = Number.NEGATIVE_INFINITY;\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n return;\n }\n\n if (datum < min) {\n min = datum;\n }\n if (datum > max) {\n max = datum;\n }\n });\n\n return [min, max];\n }\n}\n","/**\n * A interface to represent a parser which is responsible to parse the field.\n *\n * @public\n * @interface\n */\nexport default class FieldParser {\n /**\n * Parses a single value of a field and return the sanitized form.\n *\n * @public\n * @abstract\n */\n parse () {\n throw new Error('Not yet implemented');\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the categorical values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class CategoricalParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the stringified form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the stringified value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n result = String(val).trim();\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { DateTimeFormatter } from '../../../utils';\nimport FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the temporal values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class TemporalParser extends FieldParser {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {Object} schema - The schema object for the corresponding field.\n */\n constructor (schema) {\n super();\n this.schema = schema;\n this._dtf = new DateTimeFormatter(this.schema.format);\n }\n\n /**\n * Parses a single value of a field and returns the millisecond value.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {number} Returns the millisecond value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let nativeDate = this._dtf.getNativeDate(val);\n result = nativeDate ? nativeDate.getTime() : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the binned values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class BinnedParser extends FieldParser {\n /**\n * Parses a single binned value of a field and returns the sanitized value.\n *\n * @public\n * @param {string} val - The value of the field.\n * @return {string} Returns the sanitized value.\n */\n parse (val) {\n const regex = /^\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*-\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*$/;\n val = String(val);\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let matched = val.match(regex);\n result = matched ? `${Number.parseFloat(matched[1])}-${Number.parseFloat(matched[2])}`\n : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the continuous values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class ContinuousParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the number form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the number value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let parsedVal = parseFloat(val, 10);\n result = Number.isNaN(parsedVal) ? InvalidAwareTypes.NA : parsedVal;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","/**\n * Stores the full data and the metadata of a field. It provides\n * a single source of data from which the future Field\n * instance can get a subset of it with a rowDiffset config.\n *\n * @class\n * @public\n */\nexport default class PartialField {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} name - The name of the field.\n * @param {Array} data - The data array.\n * @param {Object} schema - The schema object of the corresponding field.\n * @param {FieldParser} parser - The parser instance corresponding to that field.\n */\n constructor (name, data, schema, parser) {\n this.name = name;\n this.schema = schema;\n this.parser = parser;\n this.data = this._sanitize(data);\n }\n\n /**\n * Sanitizes the field data.\n *\n * @private\n * @param {Array} data - The actual input data.\n * @return {Array} Returns the sanitized data.\n */\n _sanitize (data) {\n return data.map(datum => this.parser.parse(datum));\n }\n}\n","import { FieldType, DimensionSubtype, MeasureSubtype } from './enums';\nimport {\n Categorical,\n Temporal,\n Binned,\n Continuous,\n CategoricalParser,\n TemporalParser,\n BinnedParser,\n ContinuousParser,\n PartialField\n} from './fields';\n\n/**\n * Creates a field instance according to the provided data and schema.\n *\n * @param {Array} data - The field data array.\n * @param {Object} schema - The field schema object.\n * @return {Field} Returns the newly created field instance.\n */\nfunction createUnitField(data, schema) {\n data = data || [];\n let partialField;\n\n switch (schema.type) {\n case FieldType.MEASURE:\n switch (schema.subtype) {\n case MeasureSubtype.CONTINUOUS:\n partialField = new PartialField(schema.name, data, schema, new ContinuousParser());\n return new Continuous(partialField, `0-${data.length - 1}`);\n default:\n partialField = new PartialField(schema.name, data, schema, new ContinuousParser());\n return new Continuous(partialField, `0-${data.length - 1}`);\n }\n case FieldType.DIMENSION:\n switch (schema.subtype) {\n case DimensionSubtype.CATEGORICAL:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n case DimensionSubtype.TEMPORAL:\n partialField = new PartialField(schema.name, data, schema, new TemporalParser(schema));\n return new Temporal(partialField, `0-${data.length - 1}`);\n case DimensionSubtype.BINNED:\n partialField = new PartialField(schema.name, data, schema, new BinnedParser());\n return new Binned(partialField, `0-${data.length - 1}`);\n default:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n }\n default:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n }\n}\n\n\n/**\n * Creates a field instance from partialField and rowDiffset.\n *\n * @param {PartialField} partialField - The corresponding partial field.\n * @param {string} rowDiffset - The data subset config.\n * @return {Field} Returns the newly created field instance.\n */\nexport function createUnitFieldFromPartial(partialField, rowDiffset) {\n const { schema } = partialField;\n\n switch (schema.type) {\n case FieldType.MEASURE:\n switch (schema.subtype) {\n case MeasureSubtype.CONTINUOUS:\n return new Continuous(partialField, rowDiffset);\n default:\n return new Continuous(partialField, rowDiffset);\n }\n case FieldType.DIMENSION:\n switch (schema.subtype) {\n case DimensionSubtype.CATEGORICAL:\n return new Categorical(partialField, rowDiffset);\n case DimensionSubtype.TEMPORAL:\n return new Temporal(partialField, rowDiffset);\n case DimensionSubtype.BINNED:\n return new Binned(partialField, rowDiffset);\n default:\n return new Categorical(partialField, rowDiffset);\n }\n default:\n return new Categorical(partialField, rowDiffset);\n }\n}\n\n/**\n * Creates the field instances with input data and schema.\n *\n * @param {Array} dataColumn - The data array for fields.\n * @param {Array} schema - The schema array for fields.\n * @param {Array} headers - The array of header names.\n * @return {Array.} Returns an array of newly created field instances.\n */\nexport function createFields(dataColumn, schema, headers) {\n const headersObj = {};\n\n if (!(headers && headers.length)) {\n headers = schema.map(item => item.name);\n }\n\n headers.forEach((header, i) => {\n headersObj[header] = i;\n });\n\n return schema.map(item => createUnitField(dataColumn[headersObj[item.name]], item));\n}\n","import { DataFormat } from './enums';\n\nexport default {\n dataFormat: DataFormat.AUTO\n};\n","import { columnMajor } from '../utils';\n\n/**\n * Parses and converts data formatted in DSV array to a manageable internal format.\n *\n * @param {Array.} arr - A 2D array containing of the DSV data.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv data is header or not.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * [\"a\", \"b\", \"c\"],\n * [1, 2, 3],\n * [4, 5, 6],\n * [7, 8, 9]\n * ];\n */\nfunction DSVArr (arr, options) {\n const defaultOption = {\n firstRowHeader: true,\n };\n options = Object.assign({}, defaultOption, options);\n\n let header;\n const columns = [];\n const push = columnMajor(columns);\n\n if (options.firstRowHeader) {\n // If header present then mutate the array.\n // Do in-place mutation to save space.\n header = arr.splice(0, 1)[0];\n } else {\n header = [];\n }\n\n arr.forEach(field => push(...field));\n\n return [header, columns];\n}\n\nexport default DSVArr;\n","var EOL = {},\n EOF = {},\n QUOTE = 34,\n NEWLINE = 10,\n RETURN = 13;\n\nfunction objectConverter(columns) {\n return new Function(\"d\", \"return {\" + columns.map(function(name, i) {\n return JSON.stringify(name) + \": d[\" + i + \"]\";\n }).join(\",\") + \"}\");\n}\n\nfunction customConverter(columns, f) {\n var object = objectConverter(columns);\n return function(row, i) {\n return f(object(row), i, columns);\n };\n}\n\n// Compute unique columns in order of discovery.\nfunction inferColumns(rows) {\n var columnSet = Object.create(null),\n columns = [];\n\n rows.forEach(function(row) {\n for (var column in row) {\n if (!(column in columnSet)) {\n columns.push(columnSet[column] = column);\n }\n }\n });\n\n return columns;\n}\n\nexport default function(delimiter) {\n var reFormat = new RegExp(\"[\\\"\" + delimiter + \"\\n\\r]\"),\n DELIMITER = delimiter.charCodeAt(0);\n\n function parse(text, f) {\n var convert, columns, rows = parseRows(text, function(row, i) {\n if (convert) return convert(row, i - 1);\n columns = row, convert = f ? customConverter(row, f) : objectConverter(row);\n });\n rows.columns = columns || [];\n return rows;\n }\n\n function parseRows(text, f) {\n var rows = [], // output rows\n N = text.length,\n I = 0, // current character index\n n = 0, // current line number\n t, // current token\n eof = N <= 0, // current token followed by EOF?\n eol = false; // current token followed by EOL?\n\n // Strip the trailing newline.\n if (text.charCodeAt(N - 1) === NEWLINE) --N;\n if (text.charCodeAt(N - 1) === RETURN) --N;\n\n function token() {\n if (eof) return EOF;\n if (eol) return eol = false, EOL;\n\n // Unescape quotes.\n var i, j = I, c;\n if (text.charCodeAt(j) === QUOTE) {\n while (I++ < N && text.charCodeAt(I) !== QUOTE || text.charCodeAt(++I) === QUOTE);\n if ((i = I) >= N) eof = true;\n else if ((c = text.charCodeAt(I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n return text.slice(j + 1, i - 1).replace(/\"\"/g, \"\\\"\");\n }\n\n // Find next delimiter or newline.\n while (I < N) {\n if ((c = text.charCodeAt(i = I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n else if (c !== DELIMITER) continue;\n return text.slice(j, i);\n }\n\n // Return last token before EOF.\n return eof = true, text.slice(j, N);\n }\n\n while ((t = token()) !== EOF) {\n var row = [];\n while (t !== EOL && t !== EOF) row.push(t), t = token();\n if (f && (row = f(row, n++)) == null) continue;\n rows.push(row);\n }\n\n return rows;\n }\n\n function format(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return [columns.map(formatValue).join(delimiter)].concat(rows.map(function(row) {\n return columns.map(function(column) {\n return formatValue(row[column]);\n }).join(delimiter);\n })).join(\"\\n\");\n }\n\n function formatRows(rows) {\n return rows.map(formatRow).join(\"\\n\");\n }\n\n function formatRow(row) {\n return row.map(formatValue).join(delimiter);\n }\n\n function formatValue(text) {\n return text == null ? \"\"\n : reFormat.test(text += \"\") ? \"\\\"\" + text.replace(/\"/g, \"\\\"\\\"\") + \"\\\"\"\n : text;\n }\n\n return {\n parse: parse,\n parseRows: parseRows,\n format: format,\n formatRows: formatRows\n };\n}\n","import dsv from \"./dsv\";\n\nvar csv = dsv(\",\");\n\nexport var csvParse = csv.parse;\nexport var csvParseRows = csv.parseRows;\nexport var csvFormat = csv.format;\nexport var csvFormatRows = csv.formatRows;\n","import dsv from \"./dsv\";\n\nvar tsv = dsv(\"\\t\");\n\nexport var tsvParse = tsv.parse;\nexport var tsvParseRows = tsv.parseRows;\nexport var tsvFormat = tsv.format;\nexport var tsvFormatRows = tsv.formatRows;\n","import { dsvFormat as d3Dsv } from 'd3-dsv';\nimport DSVArr from './dsv-arr';\n\n/**\n * Parses and converts data formatted in DSV string to a manageable internal format.\n *\n * @todo Support to be given for https://tools.ietf.org/html/rfc4180.\n * @todo Sample implementation https://github.com/knrz/CSV.js/.\n *\n * @param {string} str - The input DSV string.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv string data is header or not.\n * @param {string} [options.fieldSeparator=\",\"] - The separator of two consecutive field.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = `\n * a,b,c\n * 1,2,3\n * 4,5,6\n * 7,8,9\n * `\n */\nfunction DSVStr (str, options) {\n const defaultOption = {\n firstRowHeader: true,\n fieldSeparator: ','\n };\n options = Object.assign({}, defaultOption, options);\n\n const dsv = d3Dsv(options.fieldSeparator);\n return DSVArr(dsv.parseRows(str), options);\n}\n\nexport default DSVStr;\n","import { columnMajor } from '../utils';\n\n/**\n * Parses and converts data formatted in JSON to a manageable internal format.\n *\n * @param {Array.} arr - The input data formatted in JSON.\n * @return {Array.} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * {\n * \"a\": 1,\n * \"b\": 2,\n * \"c\": 3\n * },\n * {\n * \"a\": 4,\n * \"b\": 5,\n * \"c\": 6\n * },\n * {\n * \"a\": 7,\n * \"b\": 8,\n * \"c\": 9\n * }\n * ];\n */\nfunction FlatJSON (arr) {\n const header = {};\n let i = 0;\n let insertionIndex;\n const columns = [];\n const push = columnMajor(columns);\n\n arr.forEach((item) => {\n const fields = [];\n for (let key in item) {\n if (key in header) {\n insertionIndex = header[key];\n } else {\n header[key] = i++;\n insertionIndex = i - 1;\n }\n fields[insertionIndex] = item[key];\n }\n push(...fields);\n });\n\n return [Object.keys(header), columns];\n}\n\nexport default FlatJSON;\n","import FlatJSON from './flat-json';\nimport DSVArr from './dsv-arr';\nimport DSVStr from './dsv-str';\nimport { detectDataFormat } from '../utils';\n\n/**\n * Parses the input data and detect the format automatically.\n *\n * @param {string|Array} data - The input data.\n * @param {Object} options - An optional config specific to data format.\n * @return {Array.} Returns an array of headers and column major data.\n */\nfunction Auto (data, options) {\n const converters = { FlatJSON, DSVStr, DSVArr };\n const dataFormat = detectDataFormat(data);\n\n if (!dataFormat) {\n throw new Error('Couldn\\'t detect the data format');\n }\n\n return converters[dataFormat](data, options);\n}\n\nexport default Auto;\n","import { FieldType, FilteringMode, DimensionSubtype, MeasureSubtype, DataFormat } from './enums';\nimport fieldStore from './field-store';\nimport Value from './value';\nimport {\n rowDiffsetIterator\n} from './operator';\nimport { DM_DERIVATIVES, LOGICAL_OPERATORS } from './constants';\nimport { createFields, createUnitFieldFromPartial } from './field-creator';\nimport defaultConfig from './default-config';\nimport * as converter from './converter';\nimport { extend2, detectDataFormat } from './utils';\n\n/**\n * Prepares the selection data.\n */\nfunction prepareSelectionData (fields, i) {\n const resp = {};\n for (let field of fields) {\n resp[field.name()] = new Value(field.partialField.data[i], field);\n }\n return resp;\n}\n\nexport function prepareJoinData (fields) {\n const resp = {};\n Object.keys(fields).forEach((key) => { resp[key] = new Value(fields[key], key); });\n return resp;\n}\n\nexport const updateFields = ([rowDiffset, colIdentifier], partialFieldspace, fieldStoreName) => {\n let collID = colIdentifier.length ? colIdentifier.split(',') : [];\n let partialFieldMap = partialFieldspace.fieldsObj();\n let newFields = collID.map(coll => createUnitFieldFromPartial(partialFieldMap[coll].partialField, rowDiffset));\n return fieldStore.createNamespace(newFields, fieldStoreName);\n};\n\nexport const persistDerivation = (model, operation, config = {}, criteriaFn) => {\n if (operation === DM_DERIVATIVES.COMPOSE) {\n model._derivation.length = 0;\n model._derivation.push(...criteriaFn);\n } else {\n model._derivation.push({\n op: operation,\n meta: config,\n criteria: criteriaFn\n });\n }\n};\n\nexport const persistAncestorDerivation = (sourceDm, newDm) => {\n newDm._ancestorDerivation.push(...sourceDm._ancestorDerivation, ...sourceDm._derivation);\n};\n\nexport const selectHelper = (rowDiffset, fields, selectFn, config, sourceDm) => {\n const newRowDiffSet = [];\n let lastInsertedValue = -1;\n let { mode } = config;\n let li;\n let cachedStore = {};\n let cloneProvider = () => sourceDm.detachedRoot();\n const selectorHelperFn = index => selectFn(\n prepareSelectionData(fields, index),\n index,\n cloneProvider,\n cachedStore\n );\n\n let checker;\n if (mode === FilteringMode.INVERSE) {\n checker = index => !selectorHelperFn(index);\n } else {\n checker = index => selectorHelperFn(index);\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n if (checker(i)) {\n if (lastInsertedValue !== -1 && i === (lastInsertedValue + 1)) {\n li = newRowDiffSet.length - 1;\n newRowDiffSet[li] = `${newRowDiffSet[li].split('-')[0]}-${i}`;\n } else {\n newRowDiffSet.push(`${i}`);\n }\n lastInsertedValue = i;\n }\n });\n return newRowDiffSet.join(',');\n};\n\nexport const cloneWithAllFields = (model) => {\n const clonedDm = model.clone(false);\n const partialFieldspace = model.getPartialFieldspace();\n clonedDm._colIdentifier = partialFieldspace.fields.map(f => f.name()).join(',');\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n clonedDm.__calculateFieldspace().calculateFieldsConfig();\n\n return clonedDm;\n};\n\nexport const filterPropagationModel = (model, propModels, config = {}) => {\n const operation = config.operation || LOGICAL_OPERATORS.AND;\n const filterByMeasure = config.filterByMeasure || false;\n let fns = [];\n if (!propModels.length) {\n fns = [() => false];\n } else {\n fns = propModels.map(propModel => ((dataModel) => {\n const dataObj = dataModel.getData();\n const schema = dataObj.schema;\n const fieldsConfig = dataModel.getFieldsConfig();\n const fieldsSpace = dataModel.getFieldspace().fieldsObj();\n const data = dataObj.data;\n const domain = Object.values(fieldsConfig).reduce((acc, v) => {\n acc[v.def.name] = fieldsSpace[v.def.name].domain();\n return acc;\n }, {});\n\n return (fields) => {\n const include = !data.length ? false : data.some(row => schema.every((propField) => {\n if (!(propField.name in fields)) {\n return true;\n }\n const value = fields[propField.name].valueOf();\n if (filterByMeasure && propField.type === FieldType.MEASURE) {\n return value >= domain[propField.name][0] && value <= domain[propField.name][1];\n }\n\n if (propField.type !== FieldType.DIMENSION) {\n return true;\n }\n const idx = fieldsConfig[propField.name].index;\n return row[idx] === fields[propField.name].valueOf();\n }));\n return include;\n };\n })(propModel));\n }\n\n let filteredModel;\n if (operation === LOGICAL_OPERATORS.AND) {\n filteredModel = cloneWithAllFields(model).select(fields => fns.every(fn => fn(fields)), {\n saveChild: false,\n mode: FilteringMode.ALL\n });\n } else {\n filteredModel = cloneWithAllFields(model).select(fields => fns.some(fn => fn(fields)), {\n mode: FilteringMode.ALL,\n saveChild: false\n });\n }\n\n return filteredModel;\n};\n\nexport const cloneWithSelect = (sourceDm, selectFn, selectConfig, cloneConfig) => {\n const cloned = sourceDm.clone(cloneConfig.saveChild);\n const rowDiffset = selectHelper(\n cloned._rowDiffset,\n cloned.getPartialFieldspace().fields,\n selectFn,\n selectConfig,\n sourceDm\n );\n cloned._rowDiffset = rowDiffset;\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivation(cloned, DM_DERIVATIVES.SELECT, { config: selectConfig }, selectFn);\n persistAncestorDerivation(sourceDm, cloned);\n\n return cloned;\n};\n\nexport const cloneWithProject = (sourceDm, projField, config, allFields) => {\n const cloned = sourceDm.clone(config.saveChild);\n let projectionSet = projField;\n if (config.mode === FilteringMode.INVERSE) {\n projectionSet = allFields.filter(fieldName => projField.indexOf(fieldName) === -1);\n }\n // cloned._colIdentifier = sourceDm._colIdentifier.split(',')\n // .filter(coll => projectionSet.indexOf(coll) !== -1).join();\n cloned._colIdentifier = projectionSet.join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivation(\n cloned,\n DM_DERIVATIVES.PROJECT,\n { projField, config, actualProjField: projectionSet },\n null\n );\n persistAncestorDerivation(sourceDm, cloned);\n\n return cloned;\n};\n\nexport const sanitizeUnitSchema = (unitSchema) => {\n // Do deep clone of the unit schema as the user might change it later.\n unitSchema = extend2({}, unitSchema);\n if (!unitSchema.type) {\n unitSchema.type = FieldType.DIMENSION;\n }\n\n if (!unitSchema.subtype) {\n switch (unitSchema.type) {\n case FieldType.MEASURE:\n unitSchema.subtype = MeasureSubtype.CONTINUOUS;\n break;\n default:\n case FieldType.DIMENSION:\n unitSchema.subtype = DimensionSubtype.CATEGORICAL;\n break;\n }\n }\n\n return unitSchema;\n};\n\nexport const sanitizeSchema = schema => schema.map(unitSchema => sanitizeUnitSchema(unitSchema));\n\nexport const updateData = (relation, data, schema, options) => {\n schema = sanitizeSchema(schema);\n options = Object.assign(Object.assign({}, defaultConfig), options);\n const converterFn = converter[options.dataFormat];\n\n if (!(converterFn && typeof converterFn === 'function')) {\n throw new Error(`No converter function found for ${options.dataFormat} format`);\n }\n\n const [header, formattedData] = converterFn(data, options);\n const fieldArr = createFields(formattedData, schema, header);\n\n // This will create a new fieldStore with the fields\n const nameSpace = fieldStore.createNamespace(fieldArr, options.name);\n relation._partialFieldspace = nameSpace;\n // If data is provided create the default colIdentifier and rowDiffset\n relation._rowDiffset = formattedData.length && formattedData[0].length ? `0-${formattedData[0].length - 1}` : '';\n relation._colIdentifier = (schema.map(_ => _.name)).join();\n relation._dataFormat = options.dataFormat === DataFormat.AUTO ? detectDataFormat(data) : options.dataFormat;\n return relation;\n};\n\nexport const fieldInSchema = (schema, field) => {\n let i = 0;\n\n for (; i < schema.length; ++i) {\n if (field === schema[i].name) {\n return {\n type: schema[i].subtype || schema[i].type,\n index: i\n };\n }\n }\n return null;\n};\n\n\nexport const getDerivationArguments = (derivation) => {\n let params = [];\n let operation;\n operation = derivation.op;\n switch (operation) {\n case DM_DERIVATIVES.SELECT:\n params = [derivation.criteria];\n break;\n case DM_DERIVATIVES.PROJECT:\n params = [derivation.meta.actualProjField];\n break;\n case DM_DERIVATIVES.GROUPBY:\n operation = 'groupBy';\n params = [derivation.meta.groupByString.split(','), derivation.criteria];\n break;\n default:\n operation = null;\n }\n\n return {\n operation,\n params\n };\n};\n\nconst applyExistingOperationOnModel = (propModel, dataModel) => {\n const derivations = dataModel.getDerivations();\n let selectionModel = propModel[0];\n let rejectionModel = propModel[1];\n\n derivations.forEach((derivation) => {\n if (!derivation) {\n return;\n }\n\n const { operation, params } = getDerivationArguments(derivation);\n if (operation) {\n selectionModel = selectionModel[operation](...params, {\n saveChild: false\n });\n rejectionModel = rejectionModel[operation](...params, {\n saveChild: false\n });\n }\n });\n\n return [selectionModel, rejectionModel];\n};\n\nconst getFilteredModel = (propModel, path) => {\n for (let i = 0, len = path.length; i < len; i++) {\n const model = path[i];\n propModel = applyExistingOperationOnModel(propModel, model);\n }\n return propModel;\n};\n\nconst propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = {}) => {\n const nonTraversingModel = propModelInf.nonTraversingModel;\n const excludeModels = propModelInf.excludeModels || [];\n\n if (dataModel === nonTraversingModel) {\n return;\n }\n\n const propagate = excludeModels.length ? excludeModels.indexOf(dataModel) === -1 : true;\n\n propagate && dataModel.handlePropagation(propModel, config);\n\n const children = dataModel._children;\n children.forEach((child) => {\n let [selectionModel, rejectionModel] = applyExistingOperationOnModel(propModel, child);\n propagateIdentifiers(child, [selectionModel, rejectionModel], config, propModelInf);\n });\n};\n\nexport const getRootGroupByModel = (model) => {\n while (model._parent && model._derivation.find(d => d.op !== DM_DERIVATIVES.GROUPBY)) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getRootDataModel = (model) => {\n while (model._parent) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getPathToRootModel = (model, path = []) => {\n while (model._parent) {\n path.push(model);\n model = model._parent;\n }\n return path;\n};\n\nexport const propagateToAllDataModels = (identifiers, rootModels, propagationInf, config) => {\n let criteria;\n let propModel;\n const { propagationNameSpace, propagateToSource } = propagationInf;\n const propagationSourceId = propagationInf.sourceId;\n const propagateInterpolatedValues = config.propagateInterpolatedValues;\n const filterFn = (entry) => {\n const filter = config.filterFn || (() => true);\n return filter(entry, config);\n };\n\n let criterias = [];\n\n if (identifiers === null && config.persistent !== true) {\n criterias = [{\n criteria: []\n }];\n } else {\n let actionCriterias = Object.values(propagationNameSpace.mutableActions);\n if (propagateToSource !== false) {\n actionCriterias = actionCriterias.filter(d => d.config.sourceId !== propagationSourceId);\n }\n\n const filteredCriteria = actionCriterias.filter(filterFn).map(action => action.config.criteria);\n\n const excludeModels = [];\n\n if (propagateToSource !== false) {\n const sourceActionCriterias = Object.values(propagationNameSpace.mutableActions);\n\n sourceActionCriterias.forEach((actionInf) => {\n const actionConf = actionInf.config;\n if (actionConf.applyOnSource === false && actionConf.action === config.action &&\n actionConf.sourceId !== propagationSourceId) {\n excludeModels.push(actionInf.model);\n criteria = sourceActionCriterias.filter(d => d !== actionInf).map(d => d.config.criteria);\n criteria.length && criterias.push({\n criteria,\n models: actionInf.model,\n path: getPathToRootModel(actionInf.model)\n });\n }\n });\n }\n\n\n criteria = [].concat(...[...filteredCriteria, identifiers]).filter(d => d !== null);\n criterias.push({\n criteria,\n excludeModels: [...excludeModels, ...config.excludeModels || []]\n });\n }\n\n const rootModel = rootModels.model;\n\n const propConfig = Object.assign({\n sourceIdentifiers: identifiers,\n propagationSourceId\n }, config);\n\n const rootGroupByModel = rootModels.groupByModel;\n if (propagateInterpolatedValues && rootGroupByModel) {\n propModel = filterPropagationModel(rootGroupByModel, criteria, {\n filterByMeasure: propagateInterpolatedValues\n });\n propagateIdentifiers(rootGroupByModel, propModel, propConfig);\n }\n\n criterias.forEach((inf) => {\n const propagationModel = filterPropagationModel(rootModel, inf.criteria);\n const path = inf.path;\n\n if (path) {\n const filteredModel = getFilteredModel(propagationModel, path.reverse());\n inf.models.handlePropagation(filteredModel, propConfig);\n } else {\n propagateIdentifiers(rootModel, propagationModel, propConfig, {\n excludeModels: inf.excludeModels,\n nonTraversingModel: propagateInterpolatedValues && rootGroupByModel\n });\n }\n });\n};\n\nexport const propagateImmutableActions = (propagationNameSpace, rootModels, propagationInf) => {\n const immutableActions = propagationNameSpace.immutableActions;\n\n for (const action in immutableActions) {\n const actionInf = immutableActions[action];\n const actionConf = actionInf.config;\n const propagationSourceId = propagationInf.config.sourceId;\n const filterImmutableAction = propagationInf.propConfig.filterImmutableAction ?\n propagationInf.propConfig.filterImmutableAction(actionConf, propagationInf.config) : true;\n if (actionConf.sourceId !== propagationSourceId && filterImmutableAction) {\n const criteriaModel = actionConf.criteria;\n propagateToAllDataModels(criteriaModel, rootModels, {\n propagationNameSpace,\n propagateToSource: false,\n sourceId: propagationSourceId\n }, actionConf);\n }\n }\n};\n\nexport const addToPropNamespace = (propagationNameSpace, config = {}, model) => {\n let sourceNamespace;\n const isMutableAction = config.isMutableAction;\n const criteria = config.criteria;\n const key = `${config.action}-${config.sourceId}`;\n\n if (isMutableAction) {\n sourceNamespace = propagationNameSpace.mutableActions;\n } else {\n sourceNamespace = propagationNameSpace.immutableActions;\n }\n\n if (criteria === null) {\n delete sourceNamespace[key];\n } else {\n sourceNamespace[key] = {\n model,\n config\n };\n }\n\n return this;\n};\n","import { FilteringMode } from './enums';\nimport { getUniqueId } from './utils';\nimport { updateFields, cloneWithSelect, cloneWithProject, updateData } from './helper';\nimport { crossProduct, difference, naturalJoinFilter, union } from './operator';\n\n/**\n * Relation provides the definitions of basic operators of relational algebra like *selection*, *projection*, *union*,\n * *difference* etc.\n *\n * It is extended by {@link DataModel} to inherit the functionalities of relational algebra concept.\n *\n * @class\n * @public\n * @module Relation\n * @namespace DataModel\n */\nclass Relation {\n\n /**\n * Creates a new Relation instance by providing underlying data and schema.\n *\n * @private\n *\n * @param {Object | string | Relation} data - The input tabular data in dsv or json format or\n * an existing Relation instance object.\n * @param {Array} schema - An array of data schema.\n * @param {Object} [options] - The optional options.\n */\n constructor (...params) {\n let source;\n\n this._parent = null;\n this._derivation = [];\n this._ancestorDerivation = [];\n this._children = [];\n\n if (params.length === 1 && ((source = params[0]) instanceof Relation)) {\n // parent datamodel was passed as part of source\n this._colIdentifier = source._colIdentifier;\n this._rowDiffset = source._rowDiffset;\n this._dataFormat = source._dataFormat;\n this._parent = source;\n this._partialFieldspace = this._parent._partialFieldspace;\n this._fieldStoreName = getUniqueId();\n this.__calculateFieldspace().calculateFieldsConfig();\n } else {\n updateData(this, ...params);\n this._fieldStoreName = this._partialFieldspace.name;\n this.__calculateFieldspace().calculateFieldsConfig();\n this._propagationNameSpace = {\n mutableActions: {},\n immutableActions: {}\n };\n }\n }\n\n /**\n * Retrieves the {@link Schema | schema} details for every {@link Field | field} as an array.\n *\n * @public\n *\n * @return {Array.} Array of fields schema.\n * ```\n * [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', numberFormat: (val) => `${val} miles / gallon` },\n * { name: 'Cylinder', type: 'dimension' },\n * { name: 'Displacement', type: 'measure', defAggFn: 'max' },\n * { name: 'HorsePower', type: 'measure', defAggFn: 'max' },\n * { name: 'Weight_in_lbs', type: 'measure', defAggFn: 'avg', },\n * { name: 'Acceleration', type: 'measure', defAggFn: 'avg' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin' }\n * ]\n * ```\n */\n getSchema () {\n return this.getFieldspace().fields.map(d => d.schema());\n }\n\n /**\n * Returns the name of the {@link DataModel} instance. If no name was specified during {@link DataModel}\n * initialization, then it returns a auto-generated name.\n *\n * @public\n *\n * @return {string} Name of the DataModel instance.\n */\n getName() {\n return this._fieldStoreName;\n }\n\n getFieldspace () {\n return this._fieldspace;\n }\n\n __calculateFieldspace () {\n this._fieldspace = updateFields([this._rowDiffset, this._colIdentifier],\n this.getPartialFieldspace(), this._fieldStoreName);\n return this;\n }\n\n getPartialFieldspace () {\n return this._partialFieldspace;\n }\n\n /**\n * Performs {@link link_of_cross_product | cross-product} between two {@link DataModel} instances and returns a\n * new {@link DataModel} instance containing the results. This operation is also called theta join.\n *\n * Cross product takes two set and create one set where each value of one set is paired with each value of another\n * set.\n *\n * This method takes an optional predicate which filters the generated result rows. If the predicate returns true\n * the combined row is included in the resulatant table.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.join(originDM)));\n *\n * console.log(carsDM.join(originDM,\n * obj => obj.[originDM.getName()].Origin === obj.[carsDM.getName()].Origin));\n *\n * @text\n * This is chained version of `join` operator. `join` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel to be joined with the current instance DataModel.\n * @param {SelectionPredicate} filterFn - The predicate function that will filter the result of the crossProduct.\n *\n * @return {DataModel} New DataModel instance created after joining.\n */\n join (joinWith, filterFn) {\n return crossProduct(this, joinWith, filterFn);\n }\n\n /**\n * {@link natural_join | Natural join} is a special kind of cross-product join where filtering of rows are performed\n * internally by resolving common fields are from both table and the rows with common value are included.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.naturalJoin(originDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel with which the current instance of DataModel on which the method is\n * called will be joined.\n * @return {DataModel} New DataModel instance created after joining.\n */\n naturalJoin (joinWith) {\n return crossProduct(this, joinWith, naturalJoinFilter(this, joinWith), true);\n }\n\n /**\n * {@link link_to_union | Union} operation can be termed as vertical stacking of all rows from both the DataModel\n * instances, provided that both of the {@link DataModel} instances should have same column names.\n *\n * @example\n * console.log(EuropeanMakerDM.union(USAMakerDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} unionWith - DataModel instance for which union has to be applied with the instance on which\n * the method is called\n *\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n union (unionWith) {\n return union(this, unionWith);\n }\n\n /**\n * {@link link_to_difference | Difference } operation only include rows which are present in the datamodel on which\n * it was called but not on the one passed as argument.\n *\n * @example\n * console.log(highPowerDM.difference(highExpensiveDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} differenceWith - DataModel instance for which difference has to be applied with the instance\n * on which the method is called\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n difference (differenceWith) {\n return difference(this, differenceWith);\n }\n\n /**\n * {@link link_to_selection | Selection} is a row filtering operation. It expects a predicate and an optional mode\n * which control which all rows should be included in the resultant DataModel instance.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection operation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resultant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * // with selection mode NORMAL:\n * const normDt = dt.select(fields => fields.Origin.value === \"USA\")\n * console.log(normDt));\n *\n * // with selection mode INVERSE:\n * const inverDt = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt);\n *\n * // with selection mode ALL:\n * const dtArr = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.ALL })\n * // print the selected parts\n * console.log(dtArr[0]);\n * // print the inverted parts\n * console.log(dtArr[1]);\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Function} selectFn - The predicate function which is called for each row with the current row.\n * ```\n * function (row, i, cloneProvider, store) { ... }\n * ```\n * @param {Object} config - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance.\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection.\n * @return {DataModel} Returns the new DataModel instance(s) after operation.\n */\n select (selectFn, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n\n const cloneConfig = { saveChild: config.saveChild };\n let oDm;\n\n if (config.mode === FilteringMode.ALL) {\n const selectDm = cloneWithSelect(\n this,\n selectFn,\n { mode: FilteringMode.NORMAL },\n cloneConfig\n );\n const rejectDm = cloneWithSelect(\n this,\n selectFn,\n { mode: FilteringMode.INVERSE },\n cloneConfig\n );\n oDm = [selectDm, rejectDm];\n } else {\n oDm = cloneWithSelect(\n this,\n selectFn,\n config,\n cloneConfig\n );\n }\n\n return oDm;\n }\n\n /**\n * Retrieves a boolean value if the current {@link DataModel} instance has data.\n *\n * @example\n * const schema = [\n * { name: 'CarName', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n * const data = [];\n *\n * const dt = new DataModel(data, schema);\n * console.log(dt.isEmpty());\n *\n * @public\n *\n * @return {Boolean} True if the datamodel has no data, otherwise false.\n */\n isEmpty () {\n return !this._rowDiffset.length || !this._colIdentifier.length;\n }\n\n /**\n * Creates a clone from the current DataModel instance with child parent relationship.\n *\n * @private\n * @param {boolean} [saveChild=true] - Whether the cloned instance would be recorded in the parent instance.\n * @return {DataModel} - Returns the newly cloned DataModel instance.\n */\n clone (saveChild = true) {\n const clonedDm = new this.constructor(this);\n if (saveChild) {\n clonedDm.setParent(this);\n } else {\n clonedDm.setParent(null);\n }\n return clonedDm;\n }\n\n /**\n * {@link Projection} is filter column (field) operation. It expects list of fields' name and either include those\n * or exclude those based on {@link FilteringMode} on the resultant variable.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * const dm = new DataModel(data, schema);\n *\n * // with projection mode NORMAL:\n * const normDt = dt.project([\"Name\", \"HorsePower\"]);\n * console.log(normDt.getData());\n *\n * // with projection mode INVERSE:\n * const inverDt = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt.getData());\n *\n * // with selection mode ALL:\n * const dtArr = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.ALL })\n * // print the normal parts\n * console.log(dtArr[0].getData());\n * // print the inverted parts\n * console.log(dtArr[1].getData());\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {DataModel} Returns the new DataModel instance after operation.\n */\n project (projField, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const { mode } = config;\n\n let normalizedProjField = projField.reduce((acc, field) => {\n if (field.constructor.name === 'RegExp') {\n acc.push(...allFields.filter(fieldName => fieldName.search(field) !== -1));\n } else if (field in fieldConfig) {\n acc.push(field);\n }\n return acc;\n }, []);\n\n normalizedProjField = Array.from(new Set(normalizedProjField)).map(field => field.trim());\n let dataModel;\n\n if (mode === FilteringMode.ALL) {\n let projectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.NORMAL,\n saveChild: config.saveChild\n }, allFields);\n let rejectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.INVERSE,\n saveChild: config.saveChild\n }, allFields);\n dataModel = [projectionClone, rejectionClone];\n } else {\n let projectionClone = cloneWithProject(this, normalizedProjField, config, allFields);\n dataModel = projectionClone;\n }\n\n return dataModel;\n }\n\n getFieldsConfig () {\n return this._fieldConfig;\n }\n\n calculateFieldsConfig () {\n this._fieldConfig = this._fieldspace.fields.reduce((acc, fieldDef, i) => {\n acc[fieldDef.name()] = {\n index: i,\n def: { name: fieldDef.name(), type: fieldDef.type(), subtype: fieldDef.subtype() }\n };\n return acc;\n }, {});\n return this;\n }\n\n\n /**\n * Frees up the resources associated with the current DataModel instance and breaks all the links instance has in\n * the DAG.\n *\n * @public\n */\n dispose () {\n this._parent && this._parent.removeChild(this);\n this._parent = null;\n this._children.forEach((child) => {\n child._parent = null;\n });\n this._children = [];\n }\n\n /**\n * Removes the specified child {@link DataModel} from the child list of the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\")\n * dt.removeChild(dt2);\n *\n * @private\n *\n * @param {DataModel} child - Delegates the parent to remove this child.\n */\n removeChild (child) {\n let idx = this._children.findIndex(sibling => sibling === child);\n idx !== -1 ? this._children.splice(idx, 1) : true;\n }\n\n /**\n * Sets the specified {@link DataModel} as a parent for the current {@link DataModel} instance.\n *\n * @param {DataModel} parent - The datamodel instance which will act as parent.\n */\n setParent (parent) {\n this._parent && this._parent.removeChild(this);\n this._parent = parent;\n parent && parent._children.push(this);\n }\n\n /**\n * Returns the parent {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const parentDm = dt2.getParent();\n *\n * @return {DataModel} Returns the parent DataModel instance.\n */\n getParent () {\n return this._parent;\n }\n\n /**\n * Returns the immediate child {@link DataModel} instances.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const childDm1 = dt.select(fields => fields.Origin.value === \"USA\");\n * const childDm2 = dt.select(fields => fields.Origin.value === \"Japan\");\n * const childDm3 = dt.groupBy([\"Origin\"]);\n *\n * @return {DataModel[]} Returns the immediate child DataModel instances.\n */\n getChildren () {\n return this._children;\n }\n\n /**\n * Returns the in-between operation meta data while creating the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const derivations = dt3.getDerivations();\n *\n * @return {Any[]} Returns the derivation meta data.\n */\n getDerivations () {\n return this._derivation;\n }\n\n /**\n * Returns the in-between operation meta data happened from root {@link DataModel} to current instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const ancDerivations = dt3.getAncestorDerivations();\n *\n * @return {Any[]} Returns the previous derivation meta data.\n */\n getAncestorDerivations () {\n return this._ancestorDerivation;\n }\n}\n\nexport default Relation;\n","/* eslint-disable default-case */\n\nimport { FieldType, DimensionSubtype, DataFormat } from './enums';\nimport {\n persistDerivation,\n persistAncestorDerivation,\n getRootGroupByModel,\n propagateToAllDataModels,\n getRootDataModel,\n propagateImmutableActions,\n addToPropNamespace,\n sanitizeUnitSchema\n} from './helper';\nimport { DM_DERIVATIVES, PROPAGATION } from './constants';\nimport {\n dataBuilder,\n rowDiffsetIterator,\n groupBy\n} from './operator';\nimport { createBinnedFieldData } from './operator/bucket-creator';\nimport Relation from './relation';\nimport reducerStore from './utils/reducer-store';\nimport { createFields } from './field-creator';\nimport InvalidAwareTypes from './invalid-aware-types';\n\n/**\n * DataModel is an in-browser representation of tabular data. It supports\n * {@link https://en.wikipedia.org/wiki/Relational_algebra | relational algebra} operators as well as generic data\n * processing opearators.\n * DataModel extends {@link Relation} class which defines all the relational algebra opreators. DataModel gives\n * definition of generic data processing operators which are not relational algebra complient.\n *\n * @public\n * @class\n * @extends Relation\n * @memberof Datamodel\n */\nclass DataModel extends Relation {\n /**\n * Creates a new DataModel instance by providing data and schema. Data could be in the form of\n * - Flat JSON\n * - DSV String\n * - 2D Array\n *\n * By default DataModel finds suitable adapter to serialize the data. DataModel also expects a\n * {@link Schema | schema} for identifying the variables present in data.\n *\n * @constructor\n * @example\n * const data = loadData('cars.csv');\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', unit : 'cm', scale: '1000', numberformat: val => `${val}G`},\n * { name: 'Cylinders', type: 'dimension' },\n * { name: 'Displacement', type: 'measure' },\n * { name: 'Horsepower', type: 'measure' },\n * { name: 'Weight_in_lbs', type: 'measure' },\n * { name: 'Acceleration', type: 'measure' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin', type: 'dimension' }\n * ];\n * const dm = new DataModel(data, schema, { name: 'Cars' });\n * table(dm);\n *\n * @public\n *\n * @param {Array. | string | Array.} data Input data in any of the mentioned formats\n * @param {Array.} schema Defination of the variables. Order of the variables in data and order of the\n * variables in schema has to be same.\n * @param {object} [options] Optional arguments to specify more settings regarding the creation part\n * @param {string} [options.name] Name of the datamodel instance. If no name is given an auto generated name is\n * assigned to the instance.\n * @param {string} [options.fieldSeparator=','] specify field separator type if the data is of type dsv string.\n */\n constructor (...args) {\n super(...args);\n\n this._onPropagation = [];\n this._sortingDetails = [];\n }\n\n /**\n * Reducers are simple functions which reduces an array of numbers to a representative number of the set.\n * Like an array of numbers `[10, 20, 5, 15]` can be reduced to `12.5` if average / mean reducer function is\n * applied. All the measure fields in datamodel (variables in data) needs a reducer to handle aggregation.\n *\n * @public\n *\n * @return {ReducerStore} Singleton instance of {@link ReducerStore}.\n */\n static get Reducers () {\n return reducerStore;\n }\n\n /**\n * Configure null, undefined, invalid values in the source data\n *\n * @public\n *\n * @param {Object} [config] - Configuration to control how null, undefined and non-parsable values are\n * represented in DataModel.\n * @param {string} [config.undefined] - Define how an undefined value will be represented.\n * @param {string} [config.null] - Define how a null value will be represented.\n * @param {string} [config.invalid] - Define how a non-parsable value will be represented.\n */\n static configureInvalidAwareTypes (config) {\n return InvalidAwareTypes.invalidAwareVals(config);\n }\n\n /**\n * Retrieve the data attached to an instance in JSON format.\n *\n * @example\n * // DataModel instance is already prepared and assigned to dm variable\n * const data = dm.getData({\n * order: 'column',\n * formatter: {\n * origin: (val) => val === 'European Union' ? 'EU' : val;\n * }\n * });\n * console.log(data);\n *\n * @public\n *\n * @param {Object} [options] Options to control how the raw data is to be returned.\n * @param {string} [options.order='row'] Defines if data is retieved in row order or column order. Possible values\n * are `'rows'` and `'columns'`\n * @param {Function} [options.formatter=null] Formats the output data. This expects an object, where the keys are\n * the name of the variable needs to be formatted. The formatter function is called for each row passing the\n * value of the cell for a particular row as arguments. The formatter is a function in the form of\n * `function (value, rowId, schema) => { ... }`\n * Know more about {@link Fomatter}.\n *\n * @return {Array} Returns a multidimensional array of the data with schema. The return format looks like\n * ```\n * {\n * data,\n * schema\n * }\n * ```\n */\n getData (options) {\n const defOptions = {\n order: 'row',\n formatter: null,\n withUid: false,\n getAllFields: false,\n sort: []\n };\n options = Object.assign({}, defOptions, options);\n const fields = this.getPartialFieldspace().fields;\n\n const dataGenerated = dataBuilder.call(\n this,\n this.getPartialFieldspace().fields,\n this._rowDiffset,\n options.getAllFields ? fields.map(d => d.name()).join() : this._colIdentifier,\n options.sort,\n {\n columnWise: options.order === 'column',\n addUid: !!options.withUid\n }\n );\n\n if (!options.formatter) {\n return dataGenerated;\n }\n\n const { formatter } = options;\n const { data, schema, uids } = dataGenerated;\n const fieldNames = schema.map((e => e.name));\n const fmtFieldNames = Object.keys(formatter);\n const fmtFieldIdx = fmtFieldNames.reduce((acc, next) => {\n const idx = fieldNames.indexOf(next);\n if (idx !== -1) {\n acc.push([idx, formatter[next]]);\n }\n return acc;\n }, []);\n\n if (options.order === 'column') {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n data[fIdx].forEach((datum, datumIdx) => {\n data[fIdx][datumIdx] = fmtFn.call(\n undefined,\n datum,\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n } else {\n data.forEach((datum, datumIdx) => {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n datum[fIdx] = fmtFn.call(\n undefined,\n datum[fIdx],\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n }\n\n return dataGenerated;\n }\n\n /**\n * Groups the data using particular dimensions and by reducing measures. It expects a list of dimensions using which\n * it projects the datamodel and perform aggregations to reduce the duplicate tuples. Refer this\n * {@link link_to_one_example_with_group_by | document} to know the intuition behind groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupedDM = dm.groupBy(['Year'], { horsepower: 'max' } );\n * console.log(groupedDm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {DataModel} Returns a new DataModel instance after performing the groupby.\n */\n groupBy (fieldsArr, reducers = {}, config = { saveChild: true }) {\n const groupByString = `${fieldsArr.join()}`;\n let params = [this, fieldsArr, reducers];\n const newDataModel = groupBy(...params);\n\n persistDerivation(\n newDataModel,\n DM_DERIVATIVES.GROUPBY,\n { fieldsArr, groupByString, defaultReducer: reducerStore.defaultReducer() },\n reducers\n );\n persistAncestorDerivation(this, newDataModel);\n\n if (config.saveChild) {\n newDataModel.setParent(this);\n } else {\n newDataModel.setParent(null);\n }\n\n return newDataModel;\n }\n\n /**\n * Performs sorting operation on the current {@link DataModel} instance according to the specified sorting details.\n * Like every other operator it doesn't mutate the current DataModel instance on which it was called, instead\n * returns a new DataModel instance containing the sorted data.\n *\n * DataModel support multi level sorting by listing the variables using which sorting needs to be performed and\n * the type of sorting `ASC` or `DESC`.\n *\n * In the following example, data is sorted by `Origin` field in `DESC` order in first level followed by another\n * level of sorting by `Acceleration` in `ASC` order.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * let sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\"] // Default value is ASC\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * // Sort with a custom sorting function\n * sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\", (a, b) => a - b] // Custom sorting function\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @text\n * DataModel also provides another sorting mechanism out of the box where sort is applied to a variable using\n * another variable which determines the order.\n * Like the above DataModel contains three fields `Origin`, `Name` and `Acceleration`. Now, the data in this\n * model can be sorted by `Origin` field according to the average value of all `Acceleration` for a\n * particular `Origin` value.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * const sortedDm = dm.sort([\n * ['Origin', ['Acceleration', (a, b) => avg(...a.Acceleration) - avg(...b.Acceleration)]]\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @public\n *\n * @param {Array.} sortingDetails - Sorting details based on which the sorting will be performed.\n * @return {DataModel} Returns a new instance of DataModel with sorted data.\n */\n sort (sortingDetails) {\n const rawData = this.getData({\n order: 'row',\n sort: sortingDetails\n });\n const header = rawData.schema.map(field => field.name);\n const dataInCSVArr = [header].concat(rawData.data);\n\n const sortedDm = new this.constructor(dataInCSVArr, rawData.schema, { dataFormat: 'DSVArr' });\n sortedDm._sortingDetails = sortingDetails;\n return sortedDm;\n }\n\n /**\n * Performs the serialization operation on the current {@link DataModel} instance according to the specified data\n * type. When an {@link DataModel} instance is created, it de-serializes the input data into its internal format,\n * and during its serialization process, it converts its internal data format to the specified data type and returns\n * that data regardless what type of data is used during the {@link DataModel} initialization.\n *\n * @example\n * // here dm is the pre-declared DataModel instance.\n * const csvData = dm.serialize(DataModel.DataFormat.DSV_STR, { fieldSeparator: \",\" });\n * console.log(csvData); // The csv formatted data.\n *\n * const jsonData = dm.serialize(DataModel.DataFormat.FLAT_JSON);\n * console.log(jsonData); // The json data.\n *\n * @public\n *\n * @param {string} type - The data type name for serialization.\n * @param {Object} options - The optional option object.\n * @param {string} options.fieldSeparator - The field separator character for DSV data type.\n * @return {Array|string} Returns the serialized data.\n */\n serialize (type, options) {\n type = type || this._dataFormat;\n options = Object.assign({}, { fieldSeparator: ',' }, options);\n\n const fields = this.getFieldspace().fields;\n const colData = fields.map(f => f.formattedData());\n const rowsCount = colData[0].length;\n let serializedData;\n let rowIdx;\n let colIdx;\n\n if (type === DataFormat.FLAT_JSON) {\n serializedData = [];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = {};\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row[fields[colIdx].name()] = colData[colIdx][rowIdx];\n }\n serializedData.push(row);\n }\n } else if (type === DataFormat.DSV_STR) {\n serializedData = [fields.map(f => f.name()).join(options.fieldSeparator)];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row.join(options.fieldSeparator));\n }\n serializedData = serializedData.join('\\n');\n } else if (type === DataFormat.DSV_ARR) {\n serializedData = [fields.map(f => f.name())];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row);\n }\n } else {\n throw new Error(`Data type ${type} is not supported`);\n }\n\n return serializedData;\n }\n\n addField (field) {\n const fieldName = field.name();\n this._colIdentifier += `,${fieldName}`;\n const partialFieldspace = this._partialFieldspace;\n\n if (!partialFieldspace.fieldsObj()[field.name()]) {\n partialFieldspace.fields.push(field);\n } else {\n const fieldIndex = partialFieldspace.fields.findIndex(fieldinst => fieldinst.name() === fieldName);\n fieldIndex >= 0 && (partialFieldspace.fields[fieldIndex] = field);\n }\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n\n this.__calculateFieldspace().calculateFieldsConfig();\n return this;\n }\n\n /**\n * Creates a new variable calculated from existing variables. This method expects the definition of the newly created\n * variable and a function which resolves the value of the new variable from existing variables.\n *\n * Can create a new measure based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const newDm = dataModel.calculateVariable({\n * name: 'powerToWeight',\n * type: 'measure'\n * }, ['horsepower', 'weight_in_lbs', (hp, weight) => hp / weight ]);\n *\n *\n * Can create a new dimension based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const child = dataModel.calculateVariable(\n * {\n * name: 'Efficiency',\n * type: 'dimension'\n * }, ['horsepower', (hp) => {\n * if (hp < 80) { return 'low'; },\n * else if (hp < 120) { return 'moderate'; }\n * else { return 'high' }\n * }]);\n *\n * @public\n *\n * @param {Object} schema - The schema of newly defined variable.\n * @param {Array.} dependency - An array containing the dependency variable names and a resolver\n * function as the last element.\n * @param {Object} config - An optional config object.\n * @param {boolean} [config.saveChild] - Whether the newly created DataModel will be a child.\n * @param {boolean} [config.replaceVar] - Whether the newly created variable will replace the existing variable.\n * @return {DataModel} Returns an instance of DataModel with the new field.\n */\n calculateVariable (schema, dependency, config) {\n schema = sanitizeUnitSchema(schema);\n config = Object.assign({}, { saveChild: true, replaceVar: false }, config);\n\n const fieldsConfig = this.getFieldsConfig();\n const depVars = dependency.slice(0, dependency.length - 1);\n const retrieveFn = dependency[dependency.length - 1];\n\n if (fieldsConfig[schema.name] && !config.replaceVar) {\n throw new Error(`${schema.name} field already exists in datamodel`);\n }\n\n const depFieldIndices = depVars.map((field) => {\n const fieldSpec = fieldsConfig[field];\n if (!fieldSpec) {\n // @todo dont throw error here, use warning in production mode\n throw new Error(`${field} is not a valid column name.`);\n }\n return fieldSpec.index;\n });\n\n const clone = this.clone(config.saveChild);\n\n const fs = clone.getFieldspace().fields;\n const suppliedFields = depFieldIndices.map(idx => fs[idx]);\n\n let cachedStore = {};\n let cloneProvider = () => this.detachedRoot();\n\n const computedValues = [];\n rowDiffsetIterator(clone._rowDiffset, (i) => {\n const fieldsData = suppliedFields.map(field => field.partialField.data[i]);\n computedValues[i] = retrieveFn(...fieldsData, i, cloneProvider, cachedStore);\n });\n const [field] = createFields([computedValues], [schema], [schema.name]);\n clone.addField(field);\n\n persistDerivation(clone, DM_DERIVATIVES.CAL_VAR, { config: schema, fields: depVars }, retrieveFn);\n persistAncestorDerivation(this, clone);\n\n return clone;\n }\n\n /**\n * Propagates changes across all the connected DataModel instances.\n *\n * @param {Array} identifiers - A list of identifiers that were interacted with.\n * @param {Object} payload - The interaction specific details.\n *\n * @return {DataModel} DataModel instance.\n */\n propagate (identifiers, config = {}, addToNameSpace, propConfig = {}) {\n const isMutableAction = config.isMutableAction;\n const propagationSourceId = config.sourceId;\n const payload = config.payload;\n const rootModel = getRootDataModel(this);\n const propagationNameSpace = rootModel._propagationNameSpace;\n const rootGroupByModel = getRootGroupByModel(this);\n const rootModels = {\n groupByModel: rootGroupByModel,\n model: rootModel\n };\n\n addToNameSpace && addToPropNamespace(propagationNameSpace, config, this);\n propagateToAllDataModels(identifiers, rootModels, { propagationNameSpace, sourceId: propagationSourceId },\n Object.assign({\n payload\n }, config));\n\n if (isMutableAction) {\n propagateImmutableActions(propagationNameSpace, rootModels, {\n config,\n propConfig\n }, this);\n }\n\n return this;\n }\n\n /**\n * Associates a callback with an event name.\n *\n * @param {string} eventName - The name of the event.\n * @param {Function} callback - The callback to invoke.\n * @return {DataModel} Returns this current DataModel instance itself.\n */\n on (eventName, callback) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation.push(callback);\n break;\n }\n return this;\n }\n\n /**\n * Unsubscribes the callbacks for the provided event name.\n *\n * @param {string} eventName - The name of the event to unsubscribe.\n * @return {DataModel} Returns the current DataModel instance itself.\n */\n unsubscribe (eventName) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation = [];\n break;\n\n }\n return this;\n }\n\n /**\n * This method is used to invoke the method associated with propagation.\n *\n * @param {Object} payload The interaction payload.\n * @param {DataModel} identifiers The propagated DataModel.\n * @memberof DataModel\n */\n handlePropagation (propModel, payload) {\n let propListeners = this._onPropagation;\n propListeners.forEach(fn => fn.call(this, propModel, payload));\n }\n\n /**\n * Performs the binning operation on a measure field based on the binning configuration. Binning means discretizing\n * values of a measure. Binning configuration contains an array; subsequent values from the array marks the boundary\n * of buckets in [inclusive, exclusive) range format. This operation does not mutate the subject measure field,\n * instead, it creates a new field (variable) of type dimension and subtype binned.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non-uniform buckets,\n * - providing bins count,\n * - providing each bin size,\n *\n * When custom `buckets` are provided as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', buckets: [30, 80, 100, 110] }\n * const binnedDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binsCount` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', binsCount: 5, start: 0, end: 100 }\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binSize` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHorsepower', binSize: 20, start: 5}\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @public\n *\n * @param {string} measureFieldName - The name of the target measure field.\n * @param {Object} config - The config object.\n * @param {string} [config.name] - The name of the new field which will be created.\n * @param {string} [config.buckets] - An array containing the bucket ranges.\n * @param {string} [config.binSize] - The size of each bin. It is ignored when buckets are given.\n * @param {string} [config.binsCount] - The total number of bins to generate. It is ignored when buckets are given.\n * @param {string} [config.start] - The start value of the bucket ranges. It is ignored when buckets are given.\n * @param {string} [config.end] - The end value of the bucket ranges. It is ignored when buckets are given.\n * @return {DataModel} Returns a new {@link DataModel} instance with the new field.\n */\n bin (measureFieldName, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n if (!fieldsConfig[measureFieldName]) {\n throw new Error(`Field ${measureFieldName} doesn't exist`);\n }\n\n const binFieldName = config.name || `${measureFieldName}_binned`;\n\n if (fieldsConfig[binFieldName]) {\n throw new Error(`Field ${binFieldName} already exists`);\n }\n\n const measureField = this.getFieldspace().fieldsObj()[measureFieldName];\n const { binnedData, bins } = createBinnedFieldData(measureField, this._rowDiffset, config);\n\n const binField = createFields([binnedData], [\n {\n name: binFieldName,\n type: FieldType.DIMENSION,\n subtype: DimensionSubtype.BINNED,\n bins\n }], [binFieldName])[0];\n\n const clone = this.clone(config.saveChild);\n clone.addField(binField);\n\n persistDerivation(clone, DM_DERIVATIVES.BIN, { measureFieldName, config, binFieldName }, null);\n persistAncestorDerivation(this, clone);\n\n return clone;\n }\n\n /**\n * Creates a new {@link DataModel} instance with completely detached root from current {@link DataModel} instance,\n * the new {@link DataModel} instance has no parent-children relationship with the current one, but has same data as\n * the current one.\n * This API is useful when a completely different {@link DataModel} but with same data as the current instance is\n * needed.\n *\n * @example\n * const dm = new DataModel(data, schema);\n * const detachedDm = dm.detachedRoot();\n *\n * // has different namespace\n * console.log(dm.getPartialFieldspace().name);\n * console.log(detachedDm.getPartialFieldspace().name);\n *\n * // has same data\n * console.log(dm.getData());\n * console.log(detachedDm.getData());\n *\n * @public\n *\n * @return {DataModel} Returns a detached {@link DataModel} instance.\n */\n detachedRoot () {\n const data = this.serialize(DataFormat.FLAT_JSON);\n const schema = this.getSchema();\n\n return new DataModel(data, schema);\n }\n}\n\nexport default DataModel;\n","import { fnList } from '../operator/group-by-function';\n\nexport const { sum, avg, min, max, first, last, count, std: sd } = fnList;\n","import DataModel from './datamodel';\nimport {\n compose,\n bin,\n select,\n project,\n groupby as groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union\n} from './operator';\nimport * as Stats from './stats';\nimport * as enums from './enums';\nimport { DM_DERIVATIVES } from './constants';\nimport { DateTimeFormatter } from './utils';\nimport { DataFormat, FilteringMode } from './constants';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport pkg from '../package.json';\n\nDataModel.Operators = {\n compose,\n bin,\n select,\n project,\n groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union\n};\nDataModel.Stats = Stats;\nObject.assign(DataModel, enums, { DM_DERIVATIVES });\nDataModel.DateTimeFormatter = DateTimeFormatter;\nDataModel.DataFormat = DataFormat;\nDataModel.FilteringMode = FilteringMode;\nDataModel.InvalidAwareTypes = InvalidAwareTypes;\nDataModel.version = pkg.version;\n\nexport default DataModel;\n","/**\n * Wrapper on calculateVariable() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const calculateVariable = (...args) => dm => dm.calculateVariable(...args);\n\n/**\n * Wrapper on sort() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const sort = (...args) => dm => dm.sort(...args);\n","import { crossProduct } from './cross-product';\nimport { naturalJoinFilter } from './natural-join-filter-function';\n\nexport function naturalJoin (dataModel1, dataModel2) {\n return crossProduct(dataModel1, dataModel2, naturalJoinFilter(dataModel1, dataModel2), true);\n}\n"],"sourceRoot":""} \ No newline at end of file +{"version":3,"sources":["webpack://DataModel/webpack/universalModuleDefinition","webpack://DataModel/webpack/bootstrap","webpack://DataModel/./src/index.js","webpack://DataModel/./src/enums/data-format.js","webpack://DataModel/./src/enums/dimension-subtype.js","webpack://DataModel/./src/enums/measure-subtype.js","webpack://DataModel/./src/enums/field-type.js","webpack://DataModel/./src/enums/filtering-mode.js","webpack://DataModel/./src/enums/group-by-functions.js","webpack://DataModel/./src/utils/date-time-formatter.js","webpack://DataModel/./src/utils/column-major.js","webpack://DataModel/./src/utils/extend2.js","webpack://DataModel/./src/utils/helper.js","webpack://DataModel/./src/field-store.js","webpack://DataModel/./src/value.js","webpack://DataModel/./src/operator/row-diffset-iterator.js","webpack://DataModel/./src/invalid-aware-types.js","webpack://DataModel/./src/operator/bucket-creator.js","webpack://DataModel/./src/constants/index.js","webpack://DataModel/./src/operator/compose.js","webpack://DataModel/./src/operator/get-common-schema.js","webpack://DataModel/./src/operator/cross-product.js","webpack://DataModel/./src/operator/merge-sort.js","webpack://DataModel/./src/operator/data-builder.js","webpack://DataModel/./src/operator/difference.js","webpack://DataModel/./src/operator/group-by-function.js","webpack://DataModel/./src/utils/reducer-store.js","webpack://DataModel/./src/operator/group-by.js","webpack://DataModel/./src/operator/natural-join-filter-function.js","webpack://DataModel/./src/operator/union.js","webpack://DataModel/./src/operator/outer-join.js","webpack://DataModel/./src/fields/field/index.js","webpack://DataModel/./src/fields/dimension/index.js","webpack://DataModel/./src/fields/categorical/index.js","webpack://DataModel/./src/fields/temporal/index.js","webpack://DataModel/./src/fields/binned/index.js","webpack://DataModel/./src/fields/measure/index.js","webpack://DataModel/./src/fields/continuous/index.js","webpack://DataModel/./src/fields/parsers/field-parser/index.js","webpack://DataModel/./src/fields/parsers/categorical-parser/index.js","webpack://DataModel/./src/fields/parsers/temporal-parser/index.js","webpack://DataModel/./src/fields/parsers/binned-parser/index.js","webpack://DataModel/./src/fields/parsers/continuous-parser/index.js","webpack://DataModel/./src/fields/partial-field/index.js","webpack://DataModel/./src/field-creator.js","webpack://DataModel/./src/default-config.js","webpack://DataModel/./src/converter/dsv-arr.js","webpack://DataModel/./node_modules/d3-dsv/src/dsv.js","webpack://DataModel/./node_modules/d3-dsv/src/csv.js","webpack://DataModel/./node_modules/d3-dsv/src/tsv.js","webpack://DataModel/./src/converter/dsv-str.js","webpack://DataModel/./src/converter/flat-json.js","webpack://DataModel/./src/converter/auto-resolver.js","webpack://DataModel/./src/helper.js","webpack://DataModel/./src/relation.js","webpack://DataModel/./src/datamodel.js","webpack://DataModel/./src/stats/index.js","webpack://DataModel/./src/export.js","webpack://DataModel/./src/operator/pure-operators.js","webpack://DataModel/./src/operator/natural-join.js"],"names":["root","factory","exports","module","define","amd","window","installedModules","__webpack_require__","moduleId","i","l","modules","call","m","c","d","name","getter","o","Object","defineProperty","enumerable","get","r","Symbol","toStringTag","value","t","mode","__esModule","ns","create","key","bind","n","object","property","prototype","hasOwnProperty","p","s","DataModel","default","data_format","FLAT_JSON","DSV_STR","DSV_ARR","AUTO","dimension_subtype","CATEGORICAL","TEMPORAL","GEO","BINNED","measure_subtype","CONTINUOUS","field_type","MEASURE","DIMENSION","filtering_mode","NORMAL","INVERSE","ALL","group_by_functions","SUM","AVG","MIN","MAX","FIRST","LAST","COUNT","STD","convertToNativeDate","date","Date","pad","DateTimeFormatter","format","this","dtParams","undefined","nativeDate","RegExp","escape","text","replace","TOKEN_PREFIX","DATETIME_PARAM_SEQUENCE","YEAR","MONTH","DAY","HOUR","MINUTE","SECOND","MILLISECOND","defaultNumberParser","defVal","val","parsedVal","isFinite","parseInt","defaultRangeParser","range","nVal","toLowerCase","length","getTokenDefinitions","daysDef","short","long","monthsDef","H","index","extract","parser","formatter","getHours","toString","hours","P","M","getMinutes","S","getSeconds","K","getMilliseconds","a","join","day","getDay","A","e","getDate","b","month","getMonth","B","y","result","substring","presentDate","presentYear","Math","trunc","getFullYear","year","Y","getTokenFormalNames","definitions","HOUR_12","AMPM_UPPERCASE","AMPM_LOWERCASE","SHORT_DAY","LONG_DAY","DAY_OF_MONTH","DAY_OF_MONTH_CONSTANT_WIDTH","SHORT_MONTH","LONG_MONTH","MONTH_OF_YEAR","SHORT_YEAR","LONG_YEAR","tokenResolver","defaultResolver","arg","targetParam","arguments","hourFormat24","hourFormat12","ampmLower","ampmUpper","amOrpm","isPM","findTokens","tokenPrefix","tokenLiterals","keys","occurrence","forwardChar","indexOf","push","token","formatAs","nDate","formattedStr","String","formattedVal","parse","dateTimeStamp","options","extractTokenValue","dtParamSeq","noBreak","dtParamArr","args","resolverKey","resolverParams","resolverFn","param","resolvedVal","splice","apply","checkIfOnlyYear","unshift","tokenObj","lastOccurrenceIndex","occObj","occIndex","targetText","regexFormat","tokenArr","map","obj","occurrenceLength","extractValues","match","shift","getNativeDate","Number","Function","concat","_toConsumableArray","len","column_major","store","_len","fields","Array","_key","forEach","fieldIndex","from","OBJECTSTRING","objectToStrFn","objectToStr","arrayToStr","checkCyclicRef","parentArr","bIndex","extend2","obj1","obj2","skipUndef","_typeof","merge","tgtArr","srcArr","item","srcVal","tgtVal","str","cRef","isArray","isCallable","getUniqueId","getTime","round","random","isArrEqual","arr1","arr2","formatNumber","helper_detectDataFormat","data","isString","isObject","field_store","createNamespace","fieldArr","dataId","fieldsObj","_cachedFieldsObj","field","getMeasure","measureFields","_cachedMeasure","schema","type","getDimension","dimensionFields","_cachedDimension","src_value","Value","_classCallCheck","configurable","writable","_value","rowDiffsetIterator","rowDiffset","callback","split","diffStr","diffStsArr","start","end","InvalidAwareTypes","invalid_aware_types_classCallCheck","config","assign","_invalidAwareValsMap","invalidAwareVals","NULL","NA","NIL","invalid","nil","null","invalid_aware_types","generateBuckets","binSize","buckets","next","findBucketRange","bucketRanges","leftIdx","rightIdx","midIdx","floor","DM_DERIVATIVES","SELECT","PROJECT","GROUPBY","COMPOSE","CAL_VAR","BIN","JOINS","CROSS","LEFTOUTER","RIGHTOUTER","NATURAL","FULLOUTER","LOGICAL_OPERATORS","getCommonSchema","fs1","fs2","retArr","fs1Arr","defaultFilterFn","crossProduct","dm1","dm2","filterFn","replaceCommonSchema","jointype","applicableFilterFn","dm1FieldStore","getFieldspace","dm2FieldStore","dm1FieldStoreName","dm2FieldStoreName","commonSchemaList","Error","tmpSchema","_rowDiffset","rowAdded","rowPosition","ii","tuple","userArg","partialField","dm1Fields","prepareJoinData","dm2Fields","detachedRoot","tupleObj","cellVal","iii","datamodel","defSortFn","a1","b1","mergeSort","arr","sortFn","merge_sort_sort","lo","hi","mid","mainArr","auxArr","merge_sort_merge","getSortFn","dataType","sortType","retFunc","groupData","hashMap","Map","groupedData","datum","fieldVal","has","set","createSortingFnArg","groupedDatum","targetFields","targetFieldDetails","label","reduce","acc","idx","dataBuilder","fieldStore","colIdentifier","sortingDetails","retObj","uids","addUid","columnWise","reqSorting","tmpDataArr","colName","insertInd","dataObj","fieldName","sortMeta","fDetails","fieldInSchema","sortingFn","slice","f","data_builder_toConsumableArray","pop","sortData","tmpData","difference_difference","hashTable","schemaNameArr","dm1FieldStoreFieldObj","dm2FieldStoreFieldObj","_colIdentifier","sort","prepareDataHelper","dm","addData","hashData","schemaName","getFilteredValues","filter","sum","filteredNumber","curr","avg","totalSum","isNaN","fnList","_defineProperty","_fnList","filteredValues","min","group_by_function_toConsumableArray","max","sqrt","mean","num","pow","variance","defaultReducerName","reducer_store_ReducerStore","ReducerStore","_this","reducer_store_classCallCheck","entries","reducer","_this2","__unregister","delete","reducer_store","group_by_groupBy","dataModel","reducers","existingDataModel","sFieldArr","dimensions","_ref","group_by_slicedToArray","getFieldArr","reducerObj","measures","defReducer","defaultReducer","measureName","defAggFn","reducerFn","resolve","getReducerObj","fieldStoreObj","dbName","dimensionArr","measureArr","newDataModel","_ref3","_ref4","rowCount","hash","_","cachedStore","cloneProvider","row","__calculateFieldspace","src_export","naturalJoinFilter","commonSchemaArr","retainTuple","union_union","leftOuterJoin","dataModel1","dataModel2","rightOuterJoin","fields_field","Field","field_classCallCheck","subtype","description","displayName","dimension","_cachedDomain","calculateDataDomain","categorical","Set","domain","add","temporal","Temporal","temporal_classCallCheck","temporal_possibleConstructorReturn","__proto__","getPrototypeOf","_cachedMinDiff","sortedData","arrLn","minDiff","POSITIVE_INFINITY","prevDatum","nextDatum","processedCount","_this3","binned","binsArr","bins","measure","unit","numberFormat","continuous","NEGATIVE_INFINITY","field_parser","categorical_parser","isInvalid","getInvalidType","trim","temporal_parser","TemporalParser","temporal_parser_classCallCheck","temporal_parser_possibleConstructorReturn","_dtf","binned_parser","matched","parseFloat","continuous_parser","partial_field","PartialField","partial_field_classCallCheck","_sanitize","createFields","dataColumn","headers","headersObj","header","createUnitField","default_config","dataFormat","dsv_arr","firstRowHeader","columns","dsv_arr_toConsumableArray","EOL","EOF","QUOTE","NEWLINE","RETURN","objectConverter","JSON","stringify","src_dsv","delimiter","reFormat","DELIMITER","charCodeAt","parseRows","rows","N","I","eof","eol","j","formatRow","formatValue","test","convert","customConverter","columnSet","column","inferColumns","formatRows","csv","tsv","dsv_str","fieldSeparator","dsv","flat_json","insertionIndex","auto_resolver","converters","FlatJSON","DSVStr","DSVArr","resp","helper_updateFields","partialFieldspace","fieldStoreName","_ref2","helper_slicedToArray","collID","partialFieldMap","newFields","coll","createUnitFieldFromPartial","helper_persistDerivation","model","operation","_model$_derivation","criteriaFn","_derivation","src_helper_toConsumableArray","op","meta","criteria","persistAncestorDerivation","sourceDm","newDm","_newDm$_ancestorDeriv","_ancestorDerivation","helper_selectHelper","selectFn","newRowDiffSet","lastInsertedValue","li","selectorHelperFn","_iteratorNormalCompletion","_didIteratorError","_iteratorError","_step","_iterator","iterator","done","err","return","prepareSelectionData","checker","cloneWithAllFields","clonedDm","clone","getPartialFieldspace","calculateFieldsConfig","helper_filterPropagationModel","propModels","filterByMeasure","fns","propModel","getData","fieldsConfig","getFieldsConfig","fieldsSpace","values","v","def","some","every","propField","valueOf","select","fn","saveChild","helper_cloneWithSelect","selectConfig","cloneConfig","cloned","helper_cloneWithProject","projField","allFields","projectionSet","actualProjField","helper_sanitizeUnitSchema","unitSchema","helper_updateData","relation","sanitizeSchema","converterFn","converter_namespaceObject","_converterFn","_converterFn2","formattedData","nameSpace","_partialFieldspace","_dataFormat","applyExistingOperationOnModel","derivations","getDerivations","selectionModel","rejectionModel","derivation","_selectionModel","_rejectionModel","_getDerivationArgumen","params","groupByString","helper_getDerivationArguments","propagateIdentifiers","propModelInf","nonTraversingModel","excludeModels","handlePropagation","_children","child","_applyExistingOperati","_applyExistingOperati2","propagateToAllDataModels","identifiers","rootModels","propagationInf","propagationNameSpace","propagateToSource","propagationSourceId","sourceId","propagateInterpolatedValues","criterias","persistent","actionCriterias","mutableActions","filteredCriteria","entry","action","sourceActionCriterias","actionInf","actionConf","applyOnSource","models","path","_parent","getPathToRootModel","rootModel","propConfig","sourceIdentifiers","rootGroupByModel","groupByModel","inf","propagationModel","filteredModel","getFilteredModel","reverse","src_relation","Relation","relation_classCallCheck","source","_fieldStoreName","updateData","_propagationNameSpace","immutableActions","_fieldspace","joinWith","unionWith","differenceWith","defConfig","oDm","constructor","setParent","fieldConfig","normalizedProjField","relation_toConsumableArray","search","_fieldConfig","fieldDef","removeChild","findIndex","sibling","parent","datamodel_classCallCheck","datamodel_possibleConstructorReturn","_onPropagation","_sortingDetails","order","withUid","getAllFields","dataGenerated","fieldNames","fmtFieldIdx","elem","fIdx","fmtFn","datumIdx","fieldsArr","groupBy","rawData","dataInCSVArr","sortedDm","colData","rowsCount","serializedData","rowIdx","colIdx","fieldinst","dependency","replaceVar","depVars","retrieveFn","depFieldIndices","fieldSpec","fs","suppliedFields","computedValues","fieldsData","datamodel_toConsumableArray","_createFields","datamodel_slicedToArray","addField","addToNameSpace","isMutableAction","payload","getRootDataModel","find","helper_getRootGroupByModel","sourceNamespace","addToPropNamespace","filterImmutableAction","criteriaModel","propagateImmutableActions","eventName","measureFieldName","binFieldName","_createBinnedFieldDat","measureField","binsCount","_measureField$domain","_measureField$domain2","_slicedToArray","dMin","dMax","ceil","abs","binnedData","createBinnedFieldData","binField","serialize","getSchema","stats_sum","stats_avg","stats_min","stats_max","stats_first","first","stats_last","last","stats_count","count","sd","std","Operators","compose","_len5","operations","_key5","currentDM","firstChild","compose_toConsumableArray","dispose","bin","_len3","_key3","project","_len2","_key2","_len4","_key4","calculateVariable","difference","naturalJoin","fullOuterJoin","union","version","Stats","stats_namespaceObject","DataFormat","FilteringMode","enums_namespaceObject","__webpack_exports__"],"mappings":"CAAA,SAAAA,EAAAC,GACA,iBAAAC,SAAA,iBAAAC,OACAA,OAAAD,QAAAD,IACA,mBAAAG,eAAAC,IACAD,OAAA,eAAAH,GACA,iBAAAC,QACAA,QAAA,UAAAD,IAEAD,EAAA,UAAAC,IARA,CASCK,OAAA,WACD,mBCTA,IAAAC,KAGA,SAAAC,EAAAC,GAGA,GAAAF,EAAAE,GACA,OAAAF,EAAAE,GAAAP,QAGA,IAAAC,EAAAI,EAAAE,IACAC,EAAAD,EACAE,GAAA,EACAT,YAUA,OANAU,EAAAH,GAAAI,KAAAV,EAAAD,QAAAC,IAAAD,QAAAM,GAGAL,EAAAQ,GAAA,EAGAR,EAAAD,QA0DA,OArDAM,EAAAM,EAAAF,EAGAJ,EAAAO,EAAAR,EAGAC,EAAAQ,EAAA,SAAAd,EAAAe,EAAAC,GACAV,EAAAW,EAAAjB,EAAAe,IACAG,OAAAC,eAAAnB,EAAAe,GAA0CK,YAAA,EAAAC,IAAAL,KAK1CV,EAAAgB,EAAA,SAAAtB,GACA,oBAAAuB,eAAAC,aACAN,OAAAC,eAAAnB,EAAAuB,OAAAC,aAAwDC,MAAA,WAExDP,OAAAC,eAAAnB,EAAA,cAAiDyB,OAAA,KAQjDnB,EAAAoB,EAAA,SAAAD,EAAAE,GAEA,GADA,EAAAA,IAAAF,EAAAnB,EAAAmB,IACA,EAAAE,EAAA,OAAAF,EACA,KAAAE,GAAA,iBAAAF,QAAAG,WAAA,OAAAH,EACA,IAAAI,EAAAX,OAAAY,OAAA,MAGA,GAFAxB,EAAAgB,EAAAO,GACAX,OAAAC,eAAAU,EAAA,WAAyCT,YAAA,EAAAK,UACzC,EAAAE,GAAA,iBAAAF,EAAA,QAAAM,KAAAN,EAAAnB,EAAAQ,EAAAe,EAAAE,EAAA,SAAAA,GAAgH,OAAAN,EAAAM,IAAqBC,KAAA,KAAAD,IACrI,OAAAF,GAIAvB,EAAA2B,EAAA,SAAAhC,GACA,IAAAe,EAAAf,KAAA2B,WACA,WAA2B,OAAA3B,EAAA,SAC3B,WAAiC,OAAAA,GAEjC,OADAK,EAAAQ,EAAAE,EAAA,IAAAA,GACAA,GAIAV,EAAAW,EAAA,SAAAiB,EAAAC,GAAsD,OAAAjB,OAAAkB,UAAAC,eAAA1B,KAAAuB,EAAAC,IAGtD7B,EAAAgC,EAAA,GAIAhC,IAAAiC,EAAA,25DClFA,IAAMC,EAAYlC,EAAQ,GAE1BL,EAAOD,QAAUwC,EAAUC,QAAUD,EAAUC,QAAUD,qxBCKzD,IAOeE,GANXC,UAAW,WACXC,QAAS,SACTC,QAAS,SACTC,KAAM,QCEKC,GANXC,YAAa,cACbC,SAAU,WACVC,IAAK,MACLC,OAAQ,UCAGC,GAHXC,WAAY,cCKDC,GAJXC,QAAS,UACTC,UAAW,aCGAC,GALXC,OAAQ,SACRC,QAAS,UACTC,IAAK,OCQMC,GAVXC,IAAK,MACLC,IAAK,MACLC,IAAK,MACLC,IAAK,MACLC,MAAO,QACPC,KAAM,OACNC,MAAO,QACPC,IAAK,OCRT,SAASC,EAAqBC,GAC1B,OAAIA,aAAgBC,KACTD,EAGJ,IAAIC,KAAKD,GASpB,SAASE,EAAKxC,GACV,OAAQA,EAAI,GAAL,IAAgBA,EAAOA,EA8BP,SAASyC,EAAmBC,GACnDC,KAAKD,OAASA,EACdC,KAAKC,cAAWC,EAChBF,KAAKG,gBAAaD,EAftBE,OAAOC,OAAS,SAAUC,GACtB,OAAOA,EAAKC,QAAQ,2BAA4B,SAkBpDT,EAAkBU,aAAe,IAIjCV,EAAkBW,yBACdC,KAAM,EACNC,MAAO,EACPC,IAAK,EACLC,KAAM,EACNC,OAAQ,EACRC,OAAQ,EACRC,YAAa,GAUjBlB,EAAkBmB,oBAAsB,SAAUC,GAC9C,OAAO,SAAUC,GACb,IAAIC,EACJ,OAAIC,SAASD,EAAYE,SAASH,EAAK,KAC5BC,EAGJF,IAYfpB,EAAkByB,mBAAqB,SAAUC,EAAON,GACpD,OAAO,SAACC,GACJ,IACItF,EADAD,SAGJ,IAAKuF,EAAO,OAAOD,EAEnB,IAAMO,EAAON,EAAIO,cAEjB,IAAK9F,EAAI,EAAGC,EAAI2F,EAAMG,OAAQ/F,EAAIC,EAAGD,IACjC,GAAI4F,EAAM5F,GAAG8F,gBAAkBD,EAC3B,OAAO7F,EAIf,YAAUsE,IAANtE,EACOsF,EAEJ,OAqBfpB,EAAkB8B,oBAAsB,WACpC,IAAMC,GACFC,OACI,MACA,MACA,MACA,MACA,MACA,MACA,OAEJC,MACI,SACA,SACA,UACA,YACA,WACA,SACA,aAGFC,GACFF,OACI,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,OAEJC,MACI,UACA,WACA,QACA,QACA,MACA,OACA,OACA,SACA,YACA,UACA,WACA,aAsPR,OAjPIE,GAEI9F,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAGP,OAFUzB,EAAoByB,GAErBmB,WAAWC,aAG5B1G,GAEIM,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GACP,IACMqB,EADI9C,EAAoByB,GACdmB,WAAa,GAE7B,OAAkB,IAAVE,EAAc,GAAKA,GAAOD,aAG1C7E,GAEIvB,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,WACpBC,OAAQ,SAACjB,GACL,OAAIA,EACOA,EAAIO,cAER,MAEXW,UAAW,SAAClB,GAIR,OAHUzB,EAAoByB,GACdmB,WAEA,GAAK,KAAO,OAGpCG,GAEItG,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,WACpBC,OAAQ,SAACjB,GACL,OAAIA,EACOA,EAAIO,cAER,MAEXW,UAAW,SAAClB,GAIR,OAHUzB,EAAoByB,GACdmB,WAEA,GAAK,KAAO,OAGpCI,GAEIvG,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GACfwB,gBAKvBC,GAEIzG,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GACZ0B,gBAK1BC,GAEI3G,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAHUzB,EAAoByB,GACjB4B,kBAEHR,aAGlBS,GAEI7G,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,UAAWN,EAAQC,MAAMmB,KAAK,KAA9B,KACbb,OAAQtC,EAAkByB,mBAAmBM,EAAQC,OACrDO,UAND,SAMYlB,GACP,IACM+B,EADIxD,EAAoByB,GAChBgC,SAEd,OAAQtB,EAAQC,MAAMoB,GAAMX,aAGpCa,GAEIjH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,UAAWN,EAAQE,KAAKkB,KAAK,KAA7B,KACbb,OAAQtC,EAAkByB,mBAAmBM,EAAQE,MACrDM,UAND,SAMYlB,GACP,IACM+B,EADIxD,EAAoByB,GAChBgC,SAEd,OAAQtB,EAAQE,KAAKmB,GAAMX,aAGnCc,GAEIlH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAHUzB,EAAoByB,GAChBmC,UAEHf,aAGnBrG,GAEIC,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GAChBmC,aAKtBC,GAEIpH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,UAAWH,EAAUF,MAAMmB,KAAK,KAAhC,KACbb,OAAQtC,EAAkByB,mBAAmBS,EAAUF,OACvDO,UAND,SAMYlB,GACP,IACMqC,EADI9D,EAAoByB,GACdsC,WAEhB,OAAQzB,EAAUF,MAAM0B,GAAQjB,aAGxCmB,GAEIvH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,UAAWH,EAAUD,KAAKkB,KAAK,KAA/B,KACbb,OAAQtC,EAAkByB,mBAAmBS,EAAUD,MACvDM,UAND,SAMYlB,GACP,IACMqC,EADI9D,EAAoByB,GACdsC,WAEhB,OAAQzB,EAAUD,KAAKyB,GAAQjB,aAGvCvG,GAEIG,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OALD,SAKSjB,GAAO,OAAOrB,EAAkBmB,qBAAlBnB,CAAwCqB,GAAO,GACrEkB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GACdsC,WAEG,KAG3BE,GAEIxH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,YACpBC,OALD,SAKSjB,GACJ,IAAIyC,SACJ,GAAIzC,EAAK,CACL,IAAMtF,EAAIsF,EAAIQ,OACdR,EAAMA,EAAI0C,UAAUhI,EAAI,EAAGA,GAE/B,IAAIuF,EAAYtB,EAAkBmB,qBAAlBnB,CAAwCqB,GACpD2C,EAAc,IAAIlE,KAClBmE,EAAcC,KAAKC,MAAOH,EAAYI,cAAiB,KAO3D,OAHIxE,EAFJkE,KAAYG,EAAc3C,GAEM8C,cAAgBJ,EAAYI,gBACxDN,MAAYG,EAAc,GAAI3C,GAE3B1B,EAAoBkE,GAAQM,eAEvC7B,UAtBD,SAsBYlB,GACP,IACIgD,EADMzE,EAAoByB,GACjB+C,cAAc3B,WACvB1G,SAOJ,OALIsI,IACAtI,EAAIsI,EAAKxC,OACTwC,EAAOA,EAAKN,UAAUhI,EAAI,EAAGA,IAG1BsI,IAGfC,GAEIjI,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,YACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAHUzB,EAAoByB,GACf+C,cAAc3B,eAgB7CzC,EAAkBuE,oBAAsB,WACpC,IAAMC,EAAcxE,EAAkB8B,sBAEtC,OACIf,KAAMyD,EAAYrC,EAClBsC,QAASD,EAAYzI,EACrB2I,eAAgBF,EAAY5G,EAC5B+G,eAAgBH,EAAY7B,EAC5B3B,OAAQwD,EAAY5B,EACpB3B,OAAQuD,EAAY1B,EACpB8B,UAAWJ,EAAYtB,EACvB2B,SAAUL,EAAYlB,EACtBwB,aAAcN,EAAYjB,EAC1BwB,4BAA6BP,EAAYpI,EACzC4I,YAAaR,EAAYf,EACzBwB,WAAYT,EAAYZ,EACxBsB,cAAeV,EAAYtI,EAC3BiJ,WAAYX,EAAYX,EACxBuB,UAAWZ,EAAYF,IAW/BtE,EAAkBqF,cAAgB,WAC9B,IAAMb,EAAcxE,EAAkB8B,sBAChCwD,EAAkB,WAMpB,IALA,IAAIxJ,EAAI,EACJyJ,SACAC,SACEzJ,EAAI0J,UAAK5D,OAER/F,EAAIC,EAAGD,IACVyJ,oBAAWzJ,OAAXsE,EAAAqF,UAAW3J,IACX2J,UAAA5D,QAAS/F,OAATsE,EAAAqF,UAAS3J,MACL0J,EAAcD,GAItB,OAAKC,EAEEA,EAAY,GAAGlD,OAAOkD,EAAY,IAFd,MAK/B,OACI5E,MAAO4D,EAAYX,EAAGW,EAAYF,EAC9BgB,GAEJzE,OAAQ2D,EAAYf,EAAGe,EAAYZ,EAAGY,EAAYtI,EAC9CoJ,GAEJxE,KAAM0D,EAAYtB,EAAGsB,EAAYlB,EAAGkB,EAAYjB,EAAGiB,EAAYpI,EAC3DkJ,GAEJvE,MAAOyD,EAAYrC,EAAGqC,EAAYzI,EAAGyI,EAAY5G,EAAG4G,EAAY7B,EAC5D,SAAU+C,EAAcC,EAAcC,EAAWC,GAC7C,IAAIL,SACAM,SACAC,SACA1E,SAcJ,OAZIsE,IAAiBG,EAAUF,GAAaC,IACJ,OAAhCC,EAAO,GAAGxD,OAAOwD,EAAO,MACxBC,GAAO,GAGXP,EAAcG,GAEdH,EADOG,GAGOD,EAGbF,GAELnE,EAAMmE,EAAY,GAAGlD,OAAOkD,EAAY,IACpCO,IACA1E,GAAO,IAEJA,GANoB,OASnCL,QAASwD,EAAY5B,EACjB0C,GAEJrE,QAASuD,EAAY1B,EACjBwC,KAUZtF,EAAkBgG,WAAa,SAAU/F,GAQrC,IAPA,IAAMgG,EAAcjG,EAAkBU,aAChC8D,EAAcxE,EAAkB8B,sBAChCoE,EAAgB1J,OAAO2J,KAAK3B,GAC5B4B,KACFtK,SACAuK,UAEIvK,EAAImE,EAAOqG,QAAQL,EAAanK,EAAI,KAAO,GAC/CuK,EAAcpG,EAAOnE,EAAI,IACmB,IAAxCoK,EAAcI,QAAQD,IAE1BD,EAAWG,MACPnE,MAAOtG,EACP0K,MAAOH,IAIf,OAAOD,GASXpG,EAAkByG,SAAW,SAAU5G,EAAMI,GACzC,IAQIlE,EARE2K,EAAQ9G,EAAoBC,GAC5BuG,EAAapG,EAAkBgG,WAAW/F,GAC1CuE,EAAcxE,EAAkB8B,sBAClC6E,EAAeC,OAAO3G,GACpBgG,EAAcjG,EAAkBU,aAClC8F,SACAK,SACA/K,SAGJ,IAAKA,EAAI,EAAGC,EAAIqK,EAAWvE,OAAQ/F,EAAIC,EAAGD,IAEtC+K,EAAerC,EADfgC,EAAQJ,EAAWtK,GAAG0K,OACYjE,UAAUmE,GAC5CC,EAAeA,EAAalG,QAAQ,IAAIH,OAAO2F,EAAcO,EAAO,KAAMK,GAG9E,OAAOF,GAQX3G,EAAkBtC,UAAUoJ,MAAQ,SAAUC,EAAeC,GACzD,IAAM3B,EAAgBrF,EAAkBqF,gBAClClF,EAAWD,KAAK+G,kBAAkBF,GAClCG,EAAalH,EAAkBW,wBAC/BwG,EAAUH,GAAWA,EAAQG,QAC7BC,KACAC,KACFC,SACAC,SACAC,SACAnG,SACAvF,SACA2L,SACAC,SACA3L,SACA+H,KAEJ,IAAKwD,KAAejC,EAChB,MAAQ1H,eAAe1B,KAAKoJ,EAAeiC,GAA3C,CAMA,IAJAD,EAAKxF,OAAS,EAEd2F,GADAD,EAAiBlC,EAAciC,IACHK,OAAOJ,EAAe1F,OAAS,EAAG,GAAG,GAE5D/F,EAAI,EAAGC,EAAIwL,EAAe1F,OAAQ/F,EAAIC,EAAGD,SAI9BsE,KAFZiB,EAAMlB,GADNsH,EAAQF,EAAezL,IACFO,OAGjBgL,EAAKd,KAAK,MAEVc,EAAKd,MAAMkB,EAAOpG,IAM1B,SAAqBjB,KAFrBsH,EAAcF,EAAWI,MAAM1H,KAAMmH,KAEa,OAAhBK,KAA0BP,EACxD,MAGJC,EAAWF,EAAWI,IAAgBI,EAU1C,OAPIN,EAAWvF,QAAU3B,KAAK2H,gBAAgBT,EAAWvF,QAErDiC,EAAOgE,QAAQV,EAAW,GAAI,EAAG,GAEjCtD,EAAOgE,QAAPF,MAAA9D,EAAkBsD,GAGftD,GAQX9D,EAAkBtC,UAAUuJ,kBAAoB,SAAUF,GACtD,IAYIhL,EAZEkE,EAASC,KAAKD,OACduE,EAAcxE,EAAkB8B,sBAChCmE,EAAcjG,EAAkBU,aAChC0F,EAAapG,EAAkBgG,WAAW/F,GAC1C8H,KAEFC,SACAC,SACAC,SACAC,SACAC,SAGAtM,SAEJsM,EAAcxB,OAAO3G,GAErB,IAAMoI,EAAWjC,EAAWkC,IAAI,SAAAC,GAAA,OAAOA,EAAI/B,QACrCgC,EAAmBpC,EAAWvE,OACpC,IAAK/F,EAAI0M,EAAmB,EAAG1M,GAAK,EAAGA,KACnCoM,EAAW9B,EAAWtK,GAAGsG,OAEV,IAAMgG,EAAYvG,OAAS,QAKdzB,IAAxB4H,IACAA,EAAsBI,EAAYvG,QAGtCsG,EAAaC,EAAYrE,UAAUmE,EAAW,EAAGF,GACjDI,EAAcA,EAAYrE,UAAU,EAAGmE,EAAW,GAC9C5H,OAAOC,OAAO4H,GACdC,EAAYrE,UAAUiE,EAAqBI,EAAYvG,QAE3DmG,EAAsBE,GAblBF,EAAsBE,EAgB9B,IAAKpM,EAAI,EAAGA,EAAI0M,EAAkB1M,IAC9BmM,EAAS7B,EAAWtK,GACpBsM,EAAcA,EAAY3H,QAAQwF,EAAcgC,EAAOzB,MAAOhC,EAAYyD,EAAOzB,OAAOnE,WAG5F,IAAMoG,EAAgB1B,EAAc2B,MAAM,IAAIpI,OAAO8H,QAGrD,IAFAK,EAAcE,QAET7M,EAAI,EAAGC,EAAIsM,EAASxG,OAAQ/F,EAAIC,EAAGD,IACpCiM,EAASM,EAASvM,IAAM2M,EAAc3M,GAE1C,OAAOiM,GAQX/H,EAAkBtC,UAAUkL,cAAgB,SAAU7B,GAClD,IAAIlH,EAAO,KACX,GAAIgJ,OAAOtH,SAASwF,GAChBlH,EAAO,IAAIC,KAAKiH,QACb,IAAK7G,KAAKD,QAAUH,KAAKgH,MAAMC,GAClClH,EAAO,IAAIC,KAAKiH,OAEf,CACD,IAAM5G,EAAWD,KAAKC,SAAWD,KAAK4G,MAAMC,GACxC5G,EAAS0B,SACT3B,KAAKG,WAAL,IAAAyI,SAAApL,UAAAJ,KAAAsK,MAAsB9H,MAAtB,MAAAiJ,6HAAAC,CAA8B7I,MAC9BN,EAAOK,KAAKG,YAGpB,OAAOR,GAGXG,EAAkBtC,UAAUmK,gBAAkB,SAASoB,GACnD,OAAe,IAARA,GAAa/I,KAAKD,OAAOyI,MAAM,QAAQ7G,QASlD7B,EAAkBtC,UAAU+I,SAAW,SAAUxG,EAAQ8G,GACrD,IAAI1G,SAQJ,OANI0G,EACA1G,EAAaH,KAAKG,WAAaH,KAAK0I,cAAc7B,IACzC1G,EAAaH,KAAKG,cAC3BA,EAAaH,KAAK0I,cAAc7B,IAG7B/G,EAAkByG,SAASpG,EAAYJ,ICruBnC,IAAAiJ,EAAA,SAACC,GACZ,IAAIrN,EAAI,EACR,OAAO,WAAe,QAAAsN,EAAA3D,UAAA5D,OAAXwH,EAAWC,MAAAF,GAAAG,EAAA,EAAAA,EAAAH,EAAAG,IAAXF,EAAWE,GAAA9D,UAAA8D,GAClBF,EAAOG,QAAQ,SAACnI,EAAKoI,GACXN,EAAMM,aAAuBH,QAC/BH,EAAMM,GAAcH,MAAMI,MAAO7H,OAAQ/F,KAE7CqN,EAAMM,GAAYlD,KAAKlF,KAE3BvF,kNCdF6N,EAAe,SACfC,EAAgBpN,OAAOkB,UAAU+E,SACjCoH,EAAc,kBACdC,EAAa,iBAEnB,SAASC,EAAexB,EAAKyB,GAIzB,IAHA,IAAIlO,EAAIkO,EAAUnI,OACdoI,GAAU,EAEPnO,GAAG,CACN,GAAIyM,IAAQyB,EAAUlO,GAElB,OADAmO,EAASnO,EAGbA,GAAK,EAGT,OAAOmO,EA2GX,SAASC,EAASC,EAAMC,EAAMC,GAE1B,YAAI,IAAOF,EAAP,YAAAG,EAAOH,MAASR,SAAgB,IAAOS,EAAP,YAAAE,EAAOF,MAAST,EACzC,WAGP,IAAOS,EAAP,YAAAE,EAAOF,MAAST,GAAyB,OAATS,EACzBD,SAGP,IAAOA,EAAP,YAAAG,EAAOH,MAASR,IAChBQ,EAAOC,aAAgBd,aAnH/B,SAASiB,EAAMJ,EAAMC,EAAMC,EAAWG,EAAQC,GAC1C,IAAIC,EACAC,EACAC,EACAC,EACAC,EAcJ,GATKL,GAKDD,EAAOjE,KAAK4D,GACZM,EAAOlE,KAAK6D,KALZI,GAAUL,GACVM,GAAUL,IAOVA,aAAgBd,MAChB,IAAKoB,EAAO,EAAGA,EAAON,EAAKvI,OAAQ6I,GAAQ,EAAG,CAC1C,IACIC,EAASR,EAAKO,GACdE,EAASR,EAAKM,GAElB,MAAOnH,GACH,eAGA,IAAOqH,EAAP,YAAAN,EAAOM,MAAWjB,EACZU,QAAwBjK,IAAXwK,IACfT,EAAKO,GAAQE,IAIF,OAAXD,SAAmB,IAAOA,EAAP,YAAAL,EAAOK,MAAWhB,IACrCgB,EAASR,EAAKO,GAAQE,aAAkBtB,cAG9B,KADdwB,EAAOf,EAAea,EAAQH,IAE1BE,EAASR,EAAKO,GAAQF,EAAOM,GAG7BP,EAAMI,EAAQC,EAAQP,EAAWG,EAAQC,SAMrD,IAAKC,KAAQN,EAAM,CACf,IACIO,EAASR,EAAKO,GACdE,EAASR,EAAKM,GAElB,MAAOnH,GACH,SAGJ,GAAe,OAAXqH,SAAmB,IAAOA,EAAP,YAAAN,EAAOM,MAAWjB,GAKrCkB,EAAMjB,EAAc3N,KAAK2O,MACbf,GACO,OAAXc,SAAmB,IAAOA,EAAP,YAAAL,EAAOK,MAAWhB,IACrCgB,EAASR,EAAKO,QAGJ,KADdI,EAAOf,EAAea,EAAQH,IAE1BE,EAASR,EAAKO,GAAQF,EAAOM,GAG7BP,EAAMI,EAAQC,EAAQP,EAAWG,EAAQC,IAGxCI,IAAQf,GACE,OAAXa,GAAqBA,aAAkBrB,QACvCqB,EAASR,EAAKO,QAGJ,KADdI,EAAOf,EAAea,EAAQH,IAE1BE,EAASR,EAAKO,GAAQF,EAAOM,GAG7BP,EAAMI,EAAQC,EAAQP,EAAWG,EAAQC,IAI7CN,EAAKO,GAAQE,MAGhB,CACD,GAAIP,QAAwBjK,IAAXwK,EACb,SAEJT,EAAKO,GAAQE,GAIzB,OAAOT,EAiBPI,CAAMJ,EAAMC,EAAMC,GACXF,GCnIJ,SAASY,EAAS1J,GACrB,OAAOiI,MAAMyB,QAAQ1J,GA6BlB,SAAS2J,EAAY3J,GACxB,MAAsB,mBAARA,EAaX,IAAM4J,EAAc,wBAAY,IAAInL,MAAOoL,UAAYhH,KAAKiH,MAAsB,IAAhBjH,KAAKkH,WASvE,SAASC,EAAWC,EAAMC,GAC7B,IAAKR,EAAQO,KAAUP,EAAQQ,GAC3B,OAAOD,IAASC,EAGpB,GAAID,EAAKzJ,SAAW0J,EAAK1J,OACrB,OAAO,EAGX,IAAK,IAAI/F,EAAI,EAAGA,EAAIwP,EAAKzJ,OAAQ/F,IAC7B,GAAIwP,EAAKxP,KAAOyP,EAAKzP,GACjB,OAAO,EAIf,OAAO,EASJ,SAAS0P,EAAanK,GACzB,OAAOA,EASJ,IAAMoK,EAAmB,SAACC,GAC7B,OApEG,SAAmBrK,GACtB,MAAsB,iBAARA,EAmEVsK,CAASD,GACF1N,EAAWE,QACX6M,EAAQW,IAASX,EAAQW,EAAK,IAC9B1N,EAAWG,QACX4M,EAAQW,KAA0B,IAAhBA,EAAK7J,QAlF/B,SAAmBR,GACtB,OAAOA,IAAQ7E,OAAO6E,GAiF4BuK,CAASF,EAAK,KACrD1N,EAAWC,UAEf,MChDI4N,GAnDXH,QAEAI,gBAHe,SAGEC,EAAU1P,GACvB,IAAM2P,EAAS3P,GAAQ4O,IA4CvB,OA1CA/K,KAAKwL,KAAKM,IACN3P,KAAM2P,EACN3C,OAAQ0C,EAERE,UAJgB,WAKZ,IAAIA,EAAY/L,KAAKgM,iBAQrB,OANKD,IACDA,EAAY/L,KAAKgM,oBACjBhM,KAAKmJ,OAAOG,QAAQ,SAAC2C,GACjBF,EAAUE,EAAM9P,QAAU8P,KAG3BF,GAEXG,WAfgB,WAgBZ,IAAIC,EAAgBnM,KAAKoM,eAUzB,OARKD,IACDA,EAAgBnM,KAAKoM,kBACrBpM,KAAKmJ,OAAOG,QAAQ,SAAC2C,GACbA,EAAMI,SAASC,OAAS5N,EAAUC,UAClCwN,EAAcF,EAAM9P,QAAU8P,MAInCE,GAEXI,aA5BgB,WA6BZ,IAAIC,EAAkBxM,KAAKyM,iBAU3B,OARKzM,KAAKyM,mBACND,EAAkBxM,KAAKyM,oBACvBzM,KAAKmJ,OAAOG,QAAQ,SAAC2C,GACbA,EAAMI,SAASC,OAAS5N,EAAUE,YAClC4N,EAAgBP,EAAM9P,QAAU8P,MAIrCO,IAGRxM,KAAKwL,KAAKM,8PCKVY,aA1CX,SAAAC,EAAaxL,EAAK8K,gGAAOW,CAAA5M,KAAA2M,GACrBrQ,OAAOC,eAAeyD,KAAM,UACxBxD,YAAY,EACZqQ,cAAc,EACdC,UAAU,EACVjQ,MAAOsE,IAGXnB,KAAKiM,MAAQA,+CAoBb,OAAOvF,OAAO1G,KAAKnD,yCAUnB,OAAOmD,KAAKnD,oCArBZ,OAAOmD,KAAK+M,gBCxBb,SAASC,EAAoBC,EAAYC,GACxCD,EAAWtL,OAAS,GACDsL,EAAWE,MAAM,KACzB7D,QAAQ,SAAC8D,GAChB,IAAMC,EAAaD,EAAQD,MAAM,KAC3BG,GAAUD,EAAW,GACrBE,IAAQF,EAAW,IAAMA,EAAW,IAC1C,GAAIE,GAAOD,EACP,IAAK,IAAI1R,EAAI0R,EAAO1R,GAAK2R,EAAK3R,GAAK,EAC/BsR,EAAStR,kQCVvB4R,aAqBF,SAAAA,EAAa3Q,gGAAO4Q,CAAAzN,KAAAwN,GAChBxN,KAAK+M,OAASlQ,0DAdO6Q,GACrB,OAAKA,EAGEpR,OAAOqR,OAAOH,EAAkBI,qBAAsBF,GAFlDF,EAAkBI,4DAsB7B,OAAO5N,KAAK+M,0CAUZ,OAAOrG,OAAO1G,KAAK+M,4CAGN5L,GACb,OAAQA,aAAeqM,KAAwBA,EAAkBK,mBAAmB1M,0CAGlEA,GAClB,OAAOA,aAAeqM,EAAoBrM,EAAMqM,EAAkBK,mBAAmB1M,YAO7FqM,EAAkBM,KAAO,IAAIN,EAAkB,QAC/CA,EAAkBO,GAAK,IAAIP,EAAkB,MAC7CA,EAAkBQ,IAAM,IAAIR,EAAkB,OAO9CA,EAAkBI,sBACdK,QAAST,EAAkBO,GAC3BG,IAAKV,EAAkBQ,IACvBG,KAAMX,EAAkBM,KACxB5N,UAAWsN,EAAkBO,IAGlB,IAAAK,EAAA,qaC5ETC,EAAkB,SAACC,EAAShB,EAAOC,GAIrC,IAHA,IAAMgB,KACFC,EAAOlB,EAEJkB,EAAOjB,GACVgB,EAAQlI,KAAKmI,GACbA,GAAQF,EAIZ,OAFAC,EAAQlI,KAAKmI,GAEND,GAGLE,EAAkB,SAACC,EAAc7R,GAOnC,IANA,IAAI8R,EAAU,EACVC,EAAWF,EAAa/M,OAAS,EACjCkN,SACArN,SAGGmN,GAAWC,GAAU,CAIxB,GAAI/R,IAFJ2E,EAAQkN,EADRG,EAASF,EAAU3K,KAAK8K,OAAOF,EAAWD,GAAW,KAGlCrB,OAASzQ,EAAQ2E,EAAM+L,IACtC,OAAO/L,EACA3E,GAAS2E,EAAM+L,IACtBoB,EAAUE,EAAS,EACZhS,EAAQ2E,EAAM8L,QACrBsB,EAAWC,EAAS,GAI5B,OAAO,MChCJ,IAUME,GACTC,OAAQ,SACRC,QAAS,UACTC,QAAS,QACTC,QAAS,UACTC,QAAS,qBACTC,IAAK,OAGIC,GACTC,MAAO,QACPC,UAAW,YACXC,WAAY,aACZC,QAAS,UACTC,UAAW,aAGFC,EACJ,MC2BF,MCnDA,SAASC,EAAiBC,EAAKC,GAClC,IAAMC,KACAC,KASN,OARAH,EAAI3G,OAAOG,QAAQ,SAAC2C,GAChBgE,EAAO5J,KAAK4F,EAAMI,SAASlQ,QAE/B4T,EAAI5G,OAAOG,QAAQ,SAAC2C,IAC6B,IAAzCgE,EAAO7J,QAAQ6F,EAAMI,SAASlQ,OAC9B6T,EAAO3J,KAAK4F,EAAMI,SAASlQ,QAG5B6T,ECRX,SAASE,IAAoB,OAAO,EAY7B,SAASC,EAAcC,EAAKC,EAAKC,GAA+D,IAArDC,EAAqDhL,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,IAAAA,UAAA,GAAxBiL,EAAwBjL,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAAb+J,EAAMC,MACtFlD,KACAb,KACAiF,EAAqBH,GAAYJ,EACjCQ,EAAgBN,EAAIO,gBACpBC,EAAgBP,EAAIM,gBACpBE,EAAoBH,EAAcvU,KAClC2U,EAAoBF,EAAczU,KAClCA,EAAUuU,EAAcvU,KAAxB,IAAgCyU,EAAczU,KAC9C4U,EAAmBlB,EAAgBa,EAAeE,GAExD,GAAIC,IAAsBC,EACtB,MAAM,IAAIE,MAAM,8CA+EpB,OA5EAN,EAAcvH,OAAOG,QAAQ,SAAC2C,GAC1B,IAAMgF,EAAYjH,KAAYiC,EAAMI,WACc,IAA9C0E,EAAiB3K,QAAQ6K,EAAU9U,OAAiBoU,IACpDU,EAAU9U,KAAUuU,EAAcvU,KAAlC,IAA0C8U,EAAU9U,MAExDkQ,EAAOhG,KAAK4K,KAEhBL,EAAczH,OAAOG,QAAQ,SAAC2C,GAC1B,IAAMgF,EAAYjH,KAAYiC,EAAMI,WACc,IAA9C0E,EAAiB3K,QAAQ6K,EAAU9U,MAC9BoU,IACDU,EAAU9U,KAAUyU,EAAczU,KAAlC,IAA0C8U,EAAU9U,KACpDkQ,EAAOhG,KAAK4K,IAGhB5E,EAAOhG,KAAK4K,KAKpBjE,EAAmBoD,EAAIc,YAAa,SAACtV,GACjC,IAAIuV,GAAW,EACXC,SACJpE,EAAmBqD,EAAIa,YAAa,SAACG,GACjC,IAAMC,KACAC,KACNA,EAAQV,MACRU,EAAQT,MACRJ,EAAcvH,OAAOG,QAAQ,SAAC2C,GAC1BqF,EAAMjL,KAAK4F,EAAMuF,aAAahG,KAAK5P,IACnC2V,EAAQV,GAAmB5E,EAAM9P,QAAU8P,EAAMuF,aAAahG,KAAK5P,KAEvEgV,EAAczH,OAAOG,QAAQ,SAAC2C,IAC+B,IAAnD8E,EAAiB3K,QAAQ6F,EAAMI,SAASlQ,OAAgBoU,GAC1De,EAAMjL,KAAK4F,EAAMuF,aAAahG,KAAK6F,IAEvCE,EAAQT,GAAmB7E,EAAM9P,QAAU8P,EAAMuF,aAAahG,KAAK6F,KAGvE,IAIMI,EAAYC,GAAgBH,EAAQV,IACpCc,EAAYD,GAAgBH,EAAQT,IAC1C,GAAIL,EAAmBgB,EAAWE,EALb,kBAAMvB,EAAIwB,gBACV,kBAAMvB,EAAIuB,oBAI4D,CACvF,IAAMC,KACNP,EAAMhI,QAAQ,SAACwI,EAASC,GACpBF,EAASxF,EAAO0F,GAAK5V,MAAQ2V,IAE7BX,GAAY7B,EAAMC,QAAUiB,EAC5BhF,EAAK4F,GAAeS,GAGpBrG,EAAKnF,KAAKwL,GACVV,GAAW,EACXC,EAAcxV,QAEf,IAAK4U,IAAalB,EAAME,WAAagB,IAAalB,EAAMG,cAAgB0B,EAAU,CACrF,IAAMU,KACF9I,EAAM2H,EAAcvH,OAAOxH,OAAS,EACxC2P,EAAMhI,QAAQ,SAACwI,EAASC,GAEhBF,EAASxF,EAAO0F,GAAK5V,MADrB4V,GAAOhJ,EACsB+I,EAGA,OAGrCX,GAAW,EACXC,EAAcxV,EACd4P,EAAKnF,KAAKwL,QAKf,IAAIG,GAAUxG,EAAMa,GAAUlQ,SC3GzC,SAAS8V,EAAWjP,EAAGO,GACnB,IAAM2O,KAAQlP,EACRmP,KAAQ5O,EACd,OAAI2O,EAAKC,GACG,EAERD,EAAKC,EACE,EAEJ,EAqEJ,SAASC,EAAWC,GAAyB,IAApBC,EAAoB/M,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,GAAX0M,EAIrC,OAHII,EAAI1Q,OAAS,GArBrB,SAAS4Q,EAAMF,EAAKG,EAAIC,EAAIH,GACxB,GAAIG,IAAOD,EAAM,OAAOH,EAExB,IAAMK,EAAMF,EAAKxO,KAAK8K,OAAO2D,EAAKD,GAAM,GAKxC,OAJAD,EAAKF,EAAKG,EAAIE,EAAKJ,GACnBC,EAAKF,EAAKK,EAAM,EAAGD,EAAIH,GAzC3B,SAAgBD,EAAKG,EAAIE,EAAKD,EAAIH,GAG9B,IAFA,IAAMK,EAAUN,EACVO,KACGhX,EAAI4W,EAAI5W,GAAK6W,EAAI7W,GAAK,EAC3BgX,EAAOhX,GAAK+W,EAAQ/W,GAKxB,IAHA,IAAIoH,EAAIwP,EACJjP,EAAImP,EAAM,EAEL9W,EAAI4W,EAAI5W,GAAK6W,EAAI7W,GAAK,EACvBoH,EAAI0P,GACJC,EAAQ/W,GAAKgX,EAAOrP,GACpBA,GAAK,GACEA,EAAIkP,GACXE,EAAQ/W,GAAKgX,EAAO5P,GACpBA,GAAK,GACEsP,EAAOM,EAAO5P,GAAI4P,EAAOrP,KAAO,GACvCoP,EAAQ/W,GAAKgX,EAAO5P,GACpBA,GAAK,IAEL2P,EAAQ/W,GAAKgX,EAAOrP,GACpBA,GAAK,GAqBbsP,CAAMR,EAAKG,EAAIE,EAAKD,EAAIH,GAEjBD,EAcHE,CAAKF,EAAK,EAAGA,EAAI1Q,OAAS,EAAG2Q,GAE1BD,0HC3EX,SAASS,EAAWC,EAAUC,EAAU9Q,GACpC,IAAI+Q,SACJ,OAAQF,GACR,KAAKvU,EAAeC,WACpB,KAAKN,EAAiBE,SAEd4U,EADa,SAAbD,EACU,SAAChQ,EAAGO,GAAJ,OAAUA,EAAErB,GAASc,EAAEd,IAEvB,SAACc,EAAGO,GAAJ,OAAUP,EAAEd,GAASqB,EAAErB,IAErC,MACJ,QACI+Q,EAAU,SAACjQ,EAAGO,GACV,IAAM2O,KAAQlP,EAAEd,GACViQ,KAAQ5O,EAAErB,GAChB,OAAIgQ,EAAKC,EACe,SAAba,EAAsB,GAAK,EAElCd,EAAKC,EACe,SAAba,GAAuB,EAAI,EAE/B,GAGf,OAAOC,EAUX,SAASC,EAAU1H,EAAMjC,GACrB,IAAM4J,EAAU,IAAIC,IACdC,KAYN,OAVA7H,EAAKlC,QAAQ,SAACgK,GACV,IAAMC,EAAWD,EAAM/J,GACnB4J,EAAQK,IAAID,GACZF,EAAYF,EAAQ1W,IAAI8W,IAAW,GAAGlN,KAAKiN,IAE3CD,EAAYhN,MAAMkN,GAAWD,KAC7BH,EAAQM,IAAIF,EAAUF,EAAY1R,OAAS,MAI5C0R,EAYX,SAASK,EAAmBC,EAAcC,EAAcC,GACpD,IAAMxO,GACFyO,MAAOH,EAAa,IAQxB,OALAC,EAAaG,OAAO,SAACC,EAAKxF,EAAMyF,GAE5B,OADAD,EAAIxF,GAAQmF,EAAa,GAAGvL,IAAI,SAAAkL,GAAA,OAASA,EAAMO,EAAmBI,GAAK/R,SAChE8R,GACR3O,GAEIA,EA0EJ,SAAS6O,EAAaC,EAAYlH,EAAYmH,EAAeC,EAAgBvN,GAChF,IAMMwN,GACFjI,UACAb,QACA+I,SAEEC,GAPN1N,EAAUxK,OAAOqR,WAHb6G,QAAQ,EACRC,YAAY,GAEwB3N,IAOjB0N,OACjBE,EAAaL,GAAkBA,EAAe1S,OAAS,EAEvDgT,KAiDN,GA/CgBP,EAAcjH,MAAM,KAE5B7D,QAAQ,SAACsL,GACb,IAAK,IAAIhZ,EAAI,EAAGA,EAAIuY,EAAWxS,OAAQ/F,GAAK,EACxC,GAAIuY,EAAWvY,GAAGO,SAAWyY,EAAS,CAClCD,EAAWtO,KAAK8N,EAAWvY,IAC3B,SAMZ+Y,EAAWrL,QAAQ,SAAC2C,GAEhBqI,EAAOjI,OAAOhG,KAAK4F,EAAMI,YAGzBmI,GACAF,EAAOjI,OAAOhG,MACVlK,KAAM,MACNmQ,KAAM,eAIdU,EAAmBC,EAAY,SAACrR,GAC5B0Y,EAAO9I,KAAKnF,SACZ,IAAMwO,EAAYP,EAAO9I,KAAK7J,OAAS,EAEvCgT,EAAWrL,QAAQ,SAAC2C,EAAOoF,GACvBiD,EAAO9I,KAAKqJ,GAAWxD,EAFf,GAE6BpF,EAAMuF,aAAahG,KAAK5P,KAE7D4Y,IACAF,EAAO9I,KAAKqJ,GAAWF,EAAWhT,QAAU/F,GAGhD0Y,EAAOC,KAAKlO,KAAKzK,GAIb8Y,GAAcJ,EAAO9I,KAAKqJ,GAAWxO,KAAKzK,KAI9C8Y,GA7HR,SAAkBI,EAAST,GAOvB,IAPuC,IAC/B7I,EAAiBsJ,EAAjBtJ,KAAMa,EAAWyI,EAAXzI,OACV0I,SACAC,SACAC,SACArZ,EAAIyY,EAAe1S,OAAS,EAEzB/F,GAAK,EAAGA,IACXmZ,EAAYV,EAAezY,GAAG,GAC9BoZ,EAAWX,EAAezY,GAAG,IAC7BqZ,EAAWC,GAAc7I,EAAQ0I,MAO7BjK,EAAWkK,GAEX5C,EAAU5G,EAAM,SAACxI,EAAGO,GAAJ,OAAUyR,EAAShS,EAAEiS,EAAS/S,OAAQqB,EAAE0R,EAAS/S,UAC1D2I,EAAQmK,GAAW,WAC1B,IAAM3B,EAAcH,EAAU1H,EAAMyJ,EAAS/S,OACvCiT,EAAYH,EAASA,EAASrT,OAAS,GACvCiS,EAAeoB,EAASI,MAAM,EAAGJ,EAASrT,OAAS,GACnDkS,EAAqBD,EAAaxL,IAAI,SAAAiN,GAAA,OAAKH,GAAc7I,EAAQgJ,KAEvEhC,EAAY/J,QAAQ,SAACqK,GACjBA,EAAatN,KAAKqN,EAAmBC,EAAcC,EAAcC,MAGrEzB,EAAUiB,EAAa,SAACrQ,EAAGO,GACvB,IAAMvH,EAAIgH,EAAE,GACN3F,EAAIkG,EAAE,GACZ,OAAO4R,EAAUnZ,EAAGqB,KAIxBmO,EAAK7J,OAAS,EACd0R,EAAY/J,QAAQ,SAACgK,GACjB9H,EAAKnF,KAALqB,MAAA8D,EAAA8J,EAAahC,EAAM,OAnBG,IAsB1B0B,EAA8C,SAAnCtO,OAAOsO,GAAUtT,cAA2B,OAAS,MAChE0Q,EAAU5G,EAAMsH,EAAUmC,EAAS3I,KAAM0I,EAAUC,EAAS/S,UAIpE4S,EAAQP,QACR/I,EAAKlC,QAAQ,SAACzM,GACViY,EAAQP,KAAKlO,KAAKxJ,EAAM0Y,SA6ExBC,CAASlB,EAAQD,GAGjBvN,EAAQ2N,WAAY,CACpB,IAAMgB,EAAUrM,mBAAAkM,EAASlM,MAAMkL,EAAOjI,OAAO1K,UAASyG,IAAI,sBAC1DkM,EAAO9I,KAAKlC,QAAQ,SAACgI,GACjBA,EAAMhI,QAAQ,SAACkC,EAAM5P,GACjB6Z,EAAQ7Z,GAAGyK,KAAKmF,OAGxB8I,EAAO9I,KAAOiK,EAGlB,OAAOnB,EC1NJ,SAASoB,EAAYtF,EAAKC,GAC7B,IAAMsF,KACAtJ,KACAuJ,KACApK,KACAkF,EAAgBN,EAAIO,gBACpBC,EAAgBP,EAAIM,gBACpBkF,EAAwBnF,EAAc3E,YACtC+J,EAAwBlF,EAAc7E,YACtC5P,EAAUuU,EAAcvU,KAAxB,UAAsCyU,EAAczU,KAG1D,IAAKgP,EAAWiF,EAAI2F,eAAe5I,MAAM,KAAK6I,OAAQ3F,EAAI0F,eAAe5I,MAAM,KAAK6I,QAChF,OAAO,KAiBX,SAASC,EAAkBC,EAAInK,EAAWoK,GACtCnJ,EAAmBkJ,EAAGhF,YAAa,SAACtV,GAChC,IAAM0V,KACF8E,EAAW,GACfR,EAActM,QAAQ,SAAC+M,GACnB,IAAMxZ,EAAQkP,EAAUsK,GAAY7E,aAAahG,KAAK5P,GACtDwa,OAAgBvZ,EAChByU,EAAM+E,GAAcxZ,IAEnB8Y,EAAUS,KACPD,GAAW3K,EAAKnF,KAAKiL,GACzBqE,EAAUS,IAAY,KASlC,OAjCChG,EAAI2F,eAAe5I,MAAM,KAAM7D,QAAQ,SAACyL,GACrC,IAAM9I,EAAQ4J,EAAsBd,GACpC1I,EAAOhG,KAAK2D,KAAYiC,EAAMI,WAC9BuJ,EAAcvP,KAAK4F,EAAMI,SAASlQ,QA2BtC8Z,EAAkB5F,EAAKyF,GAAuB,GAC9CG,EAAkB7F,EAAKyF,GAAuB,GAEvC,IAAI7D,GAAUxG,EAAMa,GAAUlQ,+PC5DjC+C,GAAgDD,EAAhDC,IAAKC,GAA2CF,EAA3CE,IAAKG,GAAsCL,EAAtCK,MAAOC,GAA+BN,EAA/BM,KAAMC,GAAyBP,EAAzBO,MAAOC,GAAkBR,EAAlBQ,IAAKL,GAAaH,EAAbG,IAAKC,GAAQJ,EAARI,IAEhD,SAASiX,GAAkBjE,GACvB,OAAOA,EAAIkE,OAAO,SAAA/L,GAAA,QAAUA,aAAgB4D,KAShD,SAASoI,GAAKnE,GACV,GAAIxH,EAAQwH,MAAUA,EAAI,aAAcjJ,OAAQ,CAC5C,IAAMqN,EAAiBH,GAAkBjE,GAIzC,OAHiBoE,EAAe9U,OACZ8U,EAAe1C,OAAO,SAACC,EAAK0C,GAAN,OAAe1C,EAAM0C,GAAM,GAC/CtI,EAAkBN,KAG5C,OAAOM,EAAkBN,KAU7B,SAAS6I,GAAKtE,GACV,GAAIxH,EAAQwH,MAAUA,EAAI,aAAcjJ,OAAQ,CAC5C,IAAMwN,EAAWJ,GAAInE,GACftJ,EAAMsJ,EAAI1Q,QAAU,EAC1B,OAAQgH,OAAOkO,MAAMD,IAAaA,aAAoBxI,EAC7CA,EAAkBN,KAAO8I,EAAW7N,EAEjD,OAAOqF,EAAkBN,KAgG7B,IAAMgJ,YACD5X,GAAMsX,IADLO,GAAAC,EAED7X,GAAMwX,IAFLI,GAAAC,EAGD5X,GAzFL,SAAciT,GACV,GAAIxH,EAAQwH,MAAUA,EAAI,aAAcjJ,OAAQ,CAE5C,IAAM6N,EAAiBX,GAAkBjE,GAEzC,OAAQ4E,EAAetV,OAAUqC,KAAKkT,IAALxP,MAAA1D,KAAAmT,GAAYF,IAAkB7I,EAAkBN,KAErF,OAAOM,EAAkBN,OA+EvBiJ,GAAAC,EAID3X,GAzEL,SAAcgT,GACV,GAAIxH,EAAQwH,MAAUA,EAAI,aAAcjJ,OAAQ,CAE5C,IAAM6N,EAAiBX,GAAkBjE,GAEzC,OAAQ4E,EAAetV,OAAUqC,KAAKoT,IAAL1P,MAAA1D,KAAAmT,GAAYF,IAAkB7I,EAAkBN,KAErF,OAAOM,EAAkBN,OA8DvBiJ,GAAAC,EAKD1X,GAzDL,SAAgB+S,GACZ,OAAOA,EAAI,KAmDT0E,GAAAC,EAMDzX,GA/CL,SAAe8S,GACX,OAAOA,EAAIA,EAAI1Q,OAAS,KAwCtBoV,GAAAC,EAODxX,GArCL,SAAgB6S,GACZ,OAAIxH,EAAQwH,GACDA,EAAI1Q,OAERyM,EAAkBN,OA0BvBiJ,GAAAC,EAQDvX,GAbL,SAAc4S,GACV,OAAOrO,KAAKqT,KAbhB,SAAmBhF,GACf,IAAIiF,EAAOX,GAAItE,GACf,OAAOsE,GAAItE,EAAIjK,IAAI,SAAAmP,GAAA,OAAAvT,KAAAwT,IAAQD,EAAMD,EAAS,MAWzBG,CAASpF,MAIxB2E,GAWAU,GAAqBxY,6PC1IrByY,cACF,SAAAC,IAAe,IAAAC,EAAA7X,kGAAA8X,CAAA9X,KAAA4X,GACX5X,KAAKiJ,MAAQ,IAAImK,IACjBpT,KAAKiJ,MAAMwK,IAAI,aAAc+C,IAE7Bla,OAAOyb,QAAQjB,IAAQxN,QAAQ,SAACnM,GAC5B0a,EAAK5O,MAAMwK,IAAItW,EAAI,GAAIA,EAAI,0DAc/B,IAAKoI,UAAO5D,OACR,OAAO3B,KAAKiJ,MAAMxM,IAAI,cAG1B,IAAIub,0CAEJ,GAAuB,mBAAZA,EACPhY,KAAKiJ,MAAMwK,IAAI,aAAcuE,OAC1B,CAEH,GADAA,EAAUtR,OAAOsR,IAC6B,IAA1C1b,OAAO2J,KAAK6Q,IAAQ1Q,QAAQ4R,GAG5B,MAAM,IAAIhH,MAAJ,WAAqBgH,EAArB,0BAFNhY,KAAKiJ,MAAMwK,IAAI,aAAcqD,GAAOkB,IAK5C,OAAOhY,sCAmCD7D,EAAM6b,GAAS,IAAAC,EAAAjY,KACrB,GAAuB,mBAAZgY,EACP,MAAM,IAAIhH,MAAM,gCAMpB,OAHA7U,EAAOuK,OAAOvK,GACd6D,KAAKiJ,MAAMwK,IAAItX,EAAM6b,GAEd,WAAQC,EAAKC,aAAa/b,yCAGvBA,GACN6D,KAAKiJ,MAAMuK,IAAIrX,IACf6D,KAAKiJ,MAAMkP,OAAOhc,mCAIjBA,GACL,OAAIA,aAAgByM,SACTzM,EAEJ6D,KAAKiJ,MAAMxM,IAAIN,YAgBfic,GAZO,WAClB,IAAInP,EAAQ,KAQZ,OALkB,OAAVA,IACAA,EAAQ,IAAI0O,IAET1O,EAPO,uaCrCtB,SAASoP,GAASC,EAAWzM,EAAU0M,EAAUC,GAC7C,IAAMC,EAxDV,SAAsBH,EAAWzM,GAC7B,IAAMmE,KAEA0I,EADaJ,EAAU3H,gBACCpE,eAY9B,OAVAjQ,OAAOyb,QAAQW,GAAYpP,QAAQ,SAAAqP,GAAW,IAATxb,EAASyb,GAAAD,EAAA,MACtC9M,GAAYA,EAASlK,QACU,IAA3BkK,EAASzF,QAAQjJ,IACjB6S,EAAO3J,KAAKlJ,GAGhB6S,EAAO3J,KAAKlJ,KAIb6S,EAyCW6I,CAAYP,EAAWzM,GACnCiN,EAhCV,SAAwBR,GAA0B,IAAfC,EAAehT,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,MACxC+O,KAEAyE,EADaT,EAAU3H,gBACDzE,aACtB8M,EAAaZ,GAAaa,iBAchC,OAZA3c,OAAO2J,KAAK8S,GAAUzP,QAAQ,SAAC4P,GACU,iBAA1BX,EAASW,KAChBX,EAASW,GAAeH,EAASG,GAAaC,YAElD,IAAMC,EAAYhB,GAAaiB,QAAQd,EAASW,IAC5CE,EACA9E,EAAO4E,GAAeE,GAEtB9E,EAAO4E,GAAeF,EACtBT,EAASW,GAAexB,MAGzBpD,EAcYgF,CAAchB,EAAWC,GACtCpE,EAAamE,EAAU3H,gBACvB4I,EAAgBpF,EAAWpI,YAC3ByN,EAASrF,EAAWhY,KACpBsd,KACAC,KACArN,KACA8G,KACA3H,KACFmO,SAGJrd,OAAOyb,QAAQwB,GAAejQ,QAAQ,SAAAsQ,GAAkB,IAAAC,EAAAjB,GAAAgB,EAAA,GAAhBzc,EAAgB0c,EAAA,GAAXhd,EAAWgd,EAAA,GACpD,IAAgC,IAA5BpB,EAAUrS,QAAQjJ,IAAe2b,EAAW3b,GAG5C,OAFAkP,EAAOhG,KAAK2D,KAAYnN,EAAMwP,WAEtBxP,EAAMwP,SAASC,MACvB,KAAK5N,EAAUC,QACX+a,EAAWrT,KAAKlJ,GAChB,MACJ,QACA,KAAKuB,EAAUE,UACX6a,EAAapT,KAAKlJ,MAK9B,IAAI2c,EAAW,EACf9M,EAAmBsL,EAAUpH,YAAa,SAACtV,GACvC,IAAIme,EAAO,GACXN,EAAanQ,QAAQ,SAAC0Q,GAClBD,EAAUA,EAAV,IAAkBR,EAAcS,GAAGxI,aAAahG,KAAK5P,UAEnCsE,IAAlBiT,EAAQ4G,IACR5G,EAAQ4G,GAAQD,EAChBtO,EAAKnF,SACLoT,EAAanQ,QAAQ,SAAC0Q,GAClBxO,EAAKsO,GAAUE,GAAKT,EAAcS,GAAGxI,aAAahG,KAAK5P,KAE3D8d,EAAWpQ,QAAQ,SAAC0Q,GAChBxO,EAAKsO,GAAUE,IAAMT,EAAcS,GAAGxI,aAAahG,KAAK5P,MAE5Dke,GAAY,GAEZJ,EAAWpQ,QAAQ,SAAC0Q,GAChBxO,EAAK2H,EAAQ4G,IAAOC,GAAG3T,KAAKkT,EAAcS,GAAGxI,aAAahG,KAAK5P,QAM3E,IAAIqe,KACAC,EAAgB,kBAAM5B,EAAU1G,gBAcpC,OAbApG,EAAKlC,QAAQ,SAAC6Q,GACV,IAAM7I,EAAQ6I,EACdT,EAAWpQ,QAAQ,SAAC0Q,GAChB1I,EAAM0I,GAAKlB,EAAWkB,GAAGG,EAAIH,GAAIE,EAAeD,OAGpDzB,GACAA,EAAkB4B,wBAClBT,EAAenB,GAGfmB,EAAe,IAAIU,GAAU7O,EAAMa,GAAUlQ,KAAMqd,IAEhDG,EC9HJ,SAASW,GAAmBlK,EAAKC,GACpC,IAIMkK,EAAkB1K,EAJFO,EAAIO,gBACJN,EAAIM,iBAK1B,OAAO,SAACc,EAAWE,GACf,IAAI6I,GAAc,EASlB,OARAD,EAAgBjR,QAAQ,SAACyL,GAGjByF,IAFA/I,EAAUsD,GAAWlY,QACrB8U,EAAUoD,GAAWlY,QAAS2d,KAM/BA,GCjBR,SAASC,GAAOrK,EAAKC,GACxB,IAAMsF,KACAtJ,KACAuJ,KACApK,KACAkF,EAAgBN,EAAIO,gBACpBC,EAAgBP,EAAIM,gBACpBkF,EAAwBnF,EAAc3E,YACtC+J,EAAwBlF,EAAc7E,YACtC5P,EAAUuU,EAAcvU,KAAxB,UAAsCyU,EAAczU,KAG1D,IAAKgP,EAAWiF,EAAI2F,eAAe5I,MAAM,KAAK6I,OAAQ3F,EAAI0F,eAAe5I,MAAM,KAAK6I,QAChF,OAAO,KAgBX,SAASC,EAAmBC,EAAInK,GAC5BiB,EAAmBkJ,EAAGhF,YAAa,SAACtV,GAChC,IAAM0V,KACF8E,EAAW,GACfR,EAActM,QAAQ,SAAC+M,GACnB,IAAMxZ,EAAQkP,EAAUsK,GAAY7E,aAAahG,KAAK5P,GACtDwa,OAAgBvZ,EAChByU,EAAM+E,GAAcxZ,IAEnB8Y,EAAUS,KACX5K,EAAKnF,KAAKiL,GACVqE,EAAUS,IAAY,KASlC,OAhCChG,EAAI2F,eAAe5I,MAAM,KAAM7D,QAAQ,SAACyL,GACrC,IAAM9I,EAAQ4J,EAAsBd,GACpC1I,EAAOhG,KAAK2D,KAAYiC,EAAMI,WAC9BuJ,EAAcvP,KAAK4F,EAAMI,SAASlQ,QA0BtC8Z,EAAkB7F,EAAKyF,GACvBI,EAAkB5F,EAAKyF,GAEhB,IAAIuE,GAAU7O,EAAMa,GAAUlQ,SCvDlC,SAASue,GAAeC,EAAYC,EAAYtK,GACnD,OAAOH,EAAawK,EAAYC,EAAYtK,GAAU,EAAOhB,EAAME,WAGhE,SAASqL,GAAgBF,EAAYC,EAAYtK,GACpD,OAAOH,EAAayK,EAAYD,EAAYrK,GAAU,EAAOhB,EAAMG,0QCWlDqL,cAQjB,SAAAC,EAAavJ,EAAcvE,gGAAY+N,CAAAhb,KAAA+a,GACnC/a,KAAKwR,aAAeA,EACpBxR,KAAKiN,WAAaA,8CAUlB,MAAM,IAAI+D,MAAM,wDAUhB,OAAOhR,KAAKwR,aAAanF,sCAUzB,OAAOrM,KAAKwR,aAAarV,oCAUzB,OAAO6D,KAAKwR,aAAanF,OAAOC,uCAUhC,OAAOtM,KAAKwR,aAAanF,OAAO4O,8CAUhC,OAAOjb,KAAKwR,aAAanF,OAAO6O,kDAUhC,OAAOlb,KAAKwR,aAAanF,OAAO8O,aAAenb,KAAKwR,aAAanF,OAAOlQ,oCASpE,IAAA0b,EAAA7X,KACEwL,KAIN,OAHAwB,EAAmBhN,KAAKiN,WAAY,SAACrR,GACjC4P,EAAKnF,KAAKwR,EAAKrG,aAAahG,KAAK5P,MAE9B4P,0CAUP,MAAM,IAAIwF,MAAM,0RCpHHoK,irBAAkBN,yCAY/B,OAHK9a,KAAKqb,gBACNrb,KAAKqb,cAAgBrb,KAAKsb,uBAEvBtb,KAAKqb,4DAUZ,MAAM,IAAIrK,MAAM,+DAWhB,OAAOhR,KAAKwL,0QChCC+P,irBAAoBH,0CASjC,OAAOjd,EAAiBC,0DAUL,IAAA6Z,EAAAjY,KACb+Z,EAAO,IAAIyB,IACXC,KAUN,OAPAzO,EAAmBhN,KAAKiN,WAAY,SAACrR,GACjC,IAAM0X,EAAQ2E,EAAKzG,aAAahG,KAAK5P,GAChCme,EAAKvG,IAAIF,KACVyG,EAAK2B,IAAIpI,GACTmI,EAAOpV,KAAKiN,MAGbmI,qQC7BME,eAQjB,SAAAC,EAAapK,EAAcvE,gGAAY4O,CAAA7b,KAAA4b,GAAA,IAAA/D,mKAAAiE,CAAA9b,MAAA4b,EAAAG,WAAAzf,OAAA0f,eAAAJ,IAAA7f,KAAAiE,KAC7BwR,EAAcvE,IADe,OAGnC4K,EAAKoE,eAAiB,KAHapE,qUARLuD,sDAqBX,IAAAnD,EAAAjY,KACb+Z,EAAO,IAAIyB,IACXC,KAYN,OARAzO,EAAmBhN,KAAKiN,WAAY,SAACrR,GACjC,IAAM0X,EAAQ2E,EAAKzG,aAAahG,KAAK5P,GAChCme,EAAKvG,IAAIF,KACVyG,EAAK2B,IAAIpI,GACTmI,EAAOpV,KAAKiN,MAIbmI,yDAWP,GAAIzb,KAAKic,eACL,OAAOjc,KAAKic,eAUhB,IAPA,IAAMC,EAAalc,KAAKwL,OAAO+K,OAAO,SAAA/L,GAAA,QAAUA,aAAgB4D,KAAoB4H,KAAK,SAAChT,EAAGO,GAAJ,OAAUP,EAAIO,IACjG4Y,EAAQD,EAAWva,OACrBya,EAAUzT,OAAO0T,kBACjBC,SACAC,SACAC,EAAiB,EAEZ5gB,EAAI,EAAGA,EAAIugB,EAAOvgB,IACvB0gB,EAAYJ,EAAWtgB,EAAI,IAC3B2gB,EAAYL,EAAWtgB,MAEL0gB,IAIlBF,EAAUpY,KAAKkT,IAAIkF,EAASG,EAAYL,EAAWtgB,EAAI,IACvD4gB,KAQJ,OALKA,IACDJ,EAAU,MAEdpc,KAAKic,eAAiBG,EAEfpc,KAAKic,gDAUZ,OAAOjc,KAAKwR,aAAanF,OAAOtM,+CAUnB,IAAA0c,EAAAzc,KACPwL,KASN,OARAwB,EAAmBhN,KAAKiN,WAAY,SAACrR,GACjC,IAAM0X,EAAQmJ,EAAKjL,aAAahG,KAAK5P,GACjC0X,aAAiBlF,EACjB5C,EAAKnF,KAAKiN,GAEV9H,EAAKnF,KAAKvG,EAAkByG,SAAS+M,EAAOmJ,EAAK1c,aAGlDyL,qQC3GMkR,irBAAetB,sDAS5B,IAAMuB,EAAU3c,KAAKwR,aAAanF,OAAOuQ,KACzC,OAAQD,EAAQ,GAAIA,EAAQA,EAAQhb,OAAS,mCAU7C,OAAO3B,KAAKwR,aAAanF,OAAOuQ,wQClBnBC,irBAAgB/B,yCAY7B,OAHK9a,KAAKqb,gBACNrb,KAAKqb,cAAgBrb,KAAKsb,uBAEvBtb,KAAKqb,6CAUZ,OAAOrb,KAAKwR,aAAanF,OAAOyQ,wCAUhC,OAAO9c,KAAKwR,aAAanF,OAAO8M,UAAYzB,0CAShC,IACJqF,EAAiB/c,KAAKwR,aAAanF,OAAnC0Q,aACR,OAAOA,aAAwBnU,SAAWmU,EAAezR,gDAUzD,MAAM,IAAI0F,MAAM,+DAWhB,OAAOhR,KAAKwL,0QC/DCwR,irBAAmBH,0CAShC,OAAOre,EAAeC,yDAUH,IAAAwZ,EAAAjY,KACfkX,EAAMvO,OAAO0T,kBACbjF,EAAMzO,OAAOsU,kBAiBjB,OAdAjQ,EAAmBhN,KAAKiN,WAAY,SAACrR,GACjC,IAAM0X,EAAQ2E,EAAKzG,aAAahG,KAAK5P,GACjC0X,aAAiBlF,IAIjBkF,EAAQ4D,IACRA,EAAM5D,GAENA,EAAQ8D,IACRA,EAAM9D,OAIN4D,EAAKE,sQC5CA8F,4KAQb,MAAM,IAAIlM,MAAM,0RCJHmM,irBAA0BD,sCAQpC/b,GAQH,OALKiN,EAAkBgP,UAAUjc,GAGpBiN,EAAkBiP,eAAelc,GAFjCuF,OAAOvF,GAAKmc,0QCXZC,eAOjB,SAAAC,EAAanR,gGAAQoR,CAAAzd,KAAAwd,GAAA,IAAA3F,mKAAA6F,CAAA1d,MAAAwd,EAAAzB,WAAAzf,OAAA0f,eAAAwB,IAAAzhB,KAAAiE,OAAA,OAEjB6X,EAAKxL,OAASA,EACdwL,EAAK8F,KAAO,IAAI7d,EAAkB+X,EAAKxL,OAAOtM,QAH7B8X,qUAPmBqF,sCAoBjC/b,GACH,IAAIyC,SAEJ,GAAKwK,EAAkBgP,UAAUjc,GAI7ByC,EAASwK,EAAkBiP,eAAelc,OAJP,CACnC,IAAIhB,EAAaH,KAAK2d,KAAKjV,cAAcvH,GACzCyC,EAASzD,EAAaA,EAAW6K,UAAYoD,EAAkBL,GAInE,OAAOnK,qQC9BMga,irBAAqBV,sCAQ/B/b,GAEHA,EAAMuF,OAAOvF,GACb,IAAIyC,SAEJ,GAAKwK,EAAkBgP,UAAUjc,GAK7ByC,EAASwK,EAAkBiP,eAAelc,OALP,CACnC,IAAI0c,EAAU1c,EAAIqH,MALR,2DAMV5E,EAASia,EAAalV,OAAOmV,WAAWD,EAAQ,IAAvC,IAA8ClV,OAAOmV,WAAWD,EAAQ,IAC9DzP,EAAkBL,GAIzC,OAAOnK,qQCpBMma,irBAAyBb,sCAQnC/b,GACH,IAAIyC,SAEJ,GAAKwK,EAAkBgP,UAAUjc,GAI7ByC,EAASwK,EAAkBiP,eAAelc,OAJP,CACnC,IAAIC,EAAY0c,WAAW3c,EAAK,IAChCyC,EAAS+E,OAAOkO,MAAMzV,GAAagN,EAAkBL,GAAK3M,EAI9D,OAAOwC,qQCnBMoa,cAUjB,SAAAC,EAAa9hB,EAAMqP,EAAMa,EAAQjK,gGAAQ8b,CAAAle,KAAAie,GACrCje,KAAK7D,KAAOA,EACZ6D,KAAKqM,OAASA,EACdrM,KAAKoC,OAASA,EACdpC,KAAKwL,KAAOxL,KAAKme,UAAU3S,gDAUpBA,GAAM,IAAAqM,EAAA7X,KACb,OAAOwL,EAAKpD,IAAI,SAAAkL,GAAA,OAASuE,EAAKzV,OAAOwE,MAAM0M,cCiE5C,SAAS8K,GAAaC,EAAYhS,EAAQiS,GAC7C,IAAMC,KAUN,OARMD,GAAWA,EAAQ3c,SACrB2c,EAAUjS,EAAOjE,IAAI,SAAAoC,GAAA,OAAQA,EAAKrO,QAGtCmiB,EAAQhV,QAAQ,SAACkV,EAAQ5iB,GACrB2iB,EAAWC,GAAU5iB,IAGlByQ,EAAOjE,IAAI,SAAAoC,GAAA,OAzFtB,SAAyBgB,EAAMa,GAC3Bb,EAAOA,MACP,IAAIgG,SAEJ,OAAQnF,EAAOC,MACf,KAAK5N,EAAUC,QACX,OAAQ0N,EAAO4O,SACf,KAAKzc,EAAeC,WAGpB,QAEI,OADA+S,EAAe,IAAIwM,GAAa3R,EAAOlQ,KAAMqP,EAAMa,EAAQ,IAAI0R,IACxD,IAAIf,GAAWxL,EAAf,MAAkChG,EAAK7J,OAAS,IAE/D,KAAKjD,EAAUE,UACX,OAAQyN,EAAO4O,SACf,KAAK9c,EAAiBC,YAElB,OADAoT,EAAe,IAAIwM,GAAa3R,EAAOlQ,KAAMqP,EAAMa,EAAQ,IAAI8Q,IACxD,IAAI5B,GAAY/J,EAAhB,MAAmChG,EAAK7J,OAAS,IAC5D,KAAKxD,EAAiBE,SAElB,OADAmT,EAAe,IAAIwM,GAAa3R,EAAOlQ,KAAMqP,EAAMa,EAAQ,IAAIkR,GAAelR,IACvE,IAAIsP,GAASnK,EAAb,MAAgChG,EAAK7J,OAAS,IACzD,KAAKxD,EAAiBI,OAElB,OADAiT,EAAe,IAAIwM,GAAa3R,EAAOlQ,KAAMqP,EAAMa,EAAQ,IAAIuR,IACxD,IAAIlB,GAAOlL,EAAX,MAA8BhG,EAAK7J,OAAS,IACvD,QAEI,OADA6P,EAAe,IAAIwM,GAAa3R,EAAOlQ,KAAMqP,EAAMa,EAAQ,IAAI8Q,IACxD,IAAI5B,GAAY/J,EAAhB,MAAmChG,EAAK7J,OAAS,IAEhE,QAEI,OADA6P,EAAe,IAAIwM,GAAa3R,EAAOlQ,KAAMqP,EAAMa,EAAQ,IAAI8Q,IACxD,IAAI5B,GAAY/J,EAAhB,MAAmChG,EAAK7J,OAAS,KA0DlC8c,CAAgBJ,EAAWE,EAAW/T,EAAKrO,OAAQqO,KC3GlE,IAAAkU,IACXC,WAAY7gB,EAAWI,MCuCZ,IAAA0gB,GAvBf,SAAiBvM,EAAKvL,GAIlBA,EAAUxK,OAAOqR,WAFbkR,gBAAgB,GAEuB/X,GAE3C,IAAI0X,SACEM,KACAzY,EAAO2C,EAAY8V,GAYzB,OAPIN,EAHA1X,EAAQ+X,eAGCxM,EAAI5K,OAAO,EAAG,GAAG,MAK9B4K,EAAI/I,QAAQ,SAAA2C,GAAA,OAAS5F,qIAAA0Y,CAAQ9S,OAErBuS,EAAQM,ICvChBE,MACAC,MACAC,GAAQ,GACRC,GAAU,GACVC,GAAS,GAEb,SAASC,GAAgBP,GACvB,OAAO,IAAIlW,SAAS,IAAK,WAAakW,EAAQ1W,IAAI,SAASjM,EAAMP,GAC/D,OAAO0jB,KAAKC,UAAUpjB,GAAQ,OAASP,EAAI,MAC1CqH,KAAK,KAAO,KA0BF,IAAAuc,GAAA,SAASC,GACtB,IAAIC,EAAW,IAAItf,OAAO,KAAQqf,EAAY,SAC1CE,EAAYF,EAAUG,WAAW,GAWrC,SAASC,EAAUvf,EAAM+U,GACvB,IAIIvY,EAJAgjB,KACAC,EAAIzf,EAAKqB,OACTqe,EAAI,EACJ3iB,EAAI,EAEJ4iB,EAAMF,GAAK,EACXG,GAAM,EAMV,SAAS5Z,IACP,GAAI2Z,EAAK,OAAOhB,GAChB,GAAIiB,EAAK,OAAOA,GAAM,EAAOlB,GAG7B,IAAIpjB,EAAUK,EAAPkkB,EAAIH,EACX,GAAI1f,EAAKsf,WAAWO,KAAOjB,GAAO,CAChC,KAAOc,IAAMD,GAAKzf,EAAKsf,WAAWI,KAAOd,IAAS5e,EAAKsf,aAAaI,KAAOd,KAI3E,OAHKtjB,EAAIokB,IAAMD,EAAGE,GAAM,GACdhkB,EAAIqE,EAAKsf,WAAWI,QAAUb,GAASe,GAAM,EAC9CjkB,IAAMmjB,KAAUc,GAAM,EAAU5f,EAAKsf,WAAWI,KAAOb,MAAWa,GACpE1f,EAAK8U,MAAM+K,EAAI,EAAGvkB,EAAI,GAAG2E,QAAQ,MAAO,KAIjD,KAAOyf,EAAID,GAAG,CACZ,IAAK9jB,EAAIqE,EAAKsf,WAAWhkB,EAAIokB,QAAUb,GAASe,GAAM,OACjD,GAAIjkB,IAAMmjB,GAAUc,GAAM,EAAU5f,EAAKsf,WAAWI,KAAOb,MAAWa,OACtE,GAAI/jB,IAAM0jB,EAAW,SAC1B,OAAOrf,EAAK8U,MAAM+K,EAAGvkB,GAIvB,OAAOqkB,GAAM,EAAM3f,EAAK8U,MAAM+K,EAAGJ,GAGnC,IA7BIzf,EAAKsf,WAAWG,EAAI,KAAOZ,MAAWY,EACtCzf,EAAKsf,WAAWG,EAAI,KAAOX,MAAUW,GA4BjCjjB,EAAIwJ,OAAa2Y,IAAK,CAE5B,IADA,IAAI9E,KACGrd,IAAMkiB,IAAOliB,IAAMmiB,IAAK9E,EAAI9T,KAAKvJ,GAAIA,EAAIwJ,IAC5C+O,GAA4B,OAAtB8E,EAAM9E,EAAE8E,EAAK9c,OACvByiB,EAAKzZ,KAAK8T,GAGZ,OAAO2F,EAgBT,SAASM,EAAUjG,GACjB,OAAOA,EAAI/R,IAAIiY,GAAapd,KAAKwc,GAGnC,SAASY,EAAY/f,GACnB,OAAe,MAARA,EAAe,GAChBof,EAASY,KAAKhgB,GAAQ,IAAM,IAAOA,EAAKC,QAAQ,KAAM,MAAU,IAChED,EAGR,OACEsG,MAlFF,SAAetG,EAAM+U,GACnB,IAAIkL,EAASzB,EAASgB,EAAOD,EAAUvf,EAAM,SAAS6Z,EAAKve,GACzD,GAAI2kB,EAAS,OAAOA,EAAQpG,EAAKve,EAAI,GACrCkjB,EAAU3E,EAAKoG,EAAUlL,EA9B/B,SAAyByJ,EAASzJ,GAChC,IAAI/X,EAAS+hB,GAAgBP,GAC7B,OAAO,SAAS3E,EAAKve,GACnB,OAAOyZ,EAAE/X,EAAO6c,GAAMve,EAAGkjB,IA2BM0B,CAAgBrG,EAAK9E,GAAKgK,GAAgBlF,KAGzE,OADA2F,EAAKhB,QAAUA,MACRgB,GA6EPD,UAAWA,EACX9f,OA1BF,SAAgB+f,EAAMhB,GAEpB,OADe,MAAXA,IAAiBA,EA9EzB,SAAsBgB,GACpB,IAAIW,EAAYnkB,OAAOY,OAAO,MAC1B4hB,KAUJ,OARAgB,EAAKxW,QAAQ,SAAS6Q,GACpB,IAAK,IAAIuG,KAAUvG,EACXuG,KAAUD,GACd3B,EAAQzY,KAAKoa,EAAUC,GAAUA,KAKhC5B,EAkE0B6B,CAAab,KACpChB,EAAQ1W,IAAIiY,GAAapd,KAAKwc,IAAY5W,OAAOiX,EAAK1X,IAAI,SAAS+R,GACzE,OAAO2E,EAAQ1W,IAAI,SAASsY,GAC1B,OAAOL,EAAYlG,EAAIuG,MACtBzd,KAAKwc,MACNxc,KAAK,OAqBT2d,WAlBF,SAAoBd,GAClB,OAAOA,EAAK1X,IAAIgY,GAAWnd,KAAK,SCzGhC4d,GAAMrB,GAAI,KCAVsB,IDEkBD,GAAIja,MACAia,GAAIhB,UACPgB,GAAI9gB,OACA8gB,GAAID,WCLrBpB,GAAI,OAEQsB,GAAIla,MACAka,GAAIjB,UACPiB,GAAI/gB,OACA+gB,GAAIF,WC4BhB,IAAAG,GAXf,SAAiBpW,EAAK7D,GAKlBA,EAAUxK,OAAOqR,WAHbkR,gBAAgB,EAChBmC,eAAgB,KAEuBla,GAE3C,IAAMma,EAAMzB,GAAM1Y,EAAQka,gBAC1B,OAAOpC,GAAOqC,EAAIpB,UAAUlV,GAAM7D,ICoBvB,IAAAoa,GAxBf,SAAmB7O,GACf,IAAMmM,KACF5iB,EAAI,EACJulB,SACErC,KACAzY,EAAO2C,EAAY8V,GAgBzB,OAdAzM,EAAI/I,QAAQ,SAACkB,GACT,IAAMrB,KACN,IAAK,IAAIhM,KAAOqN,EACRrN,KAAOqhB,EACP2C,EAAiB3C,EAAOrhB,IAExBqhB,EAAOrhB,GAAOvB,IACdulB,EAAiBvlB,EAAI,GAEzBuN,EAAOgY,GAAkB3W,EAAKrN,GAElCkJ,eAAQ8C,MAGJ7M,OAAO2J,KAAKuY,GAASM,IC1BlB,IAAAsC,GAXf,SAAe5V,EAAM1E,GACjB,IAAMua,GAAeC,SAAAJ,GAAUK,OAAAR,GAAQS,OAAA5C,IACjCD,EAAapT,EAAiBC,GAEpC,IAAKmT,EACD,MAAM,IAAI3N,MAAM,mCAGpB,OAAOqQ,EAAW1C,GAAYnT,EAAM1E,iiBCGjC,SAAS4K,GAAiBvI,GAC7B,IAAMsY,KAEN,OADAnlB,OAAO2J,KAAKkD,GAAQG,QAAQ,SAACnM,GAAUskB,EAAKtkB,GAAO,IAAIuP,EAAMvD,EAAOhM,GAAMA,KACnEskB,EAGJ,IAAMC,GAAe,SAAA/I,EAA8BgJ,EAAmBC,GAAmB,IAAAC,EAAAC,GAAAnJ,EAAA,GAAlE1L,EAAkE4U,EAAA,GAAtDzN,EAAsDyN,EAAA,GACxFE,EAAS3N,EAAczS,OAASyS,EAAcjH,MAAM,QACpD6U,EAAkBL,EAAkB5V,YACpCkW,EAAYF,EAAO3Z,IAAI,SAAA8Z,GAAA,OT+BxB,SAAoC1Q,EAAcvE,GAAY,IACzDZ,EAAWmF,EAAXnF,OAER,OAAQA,EAAOC,MACf,KAAK5N,EAAUC,QACX,OAAQ0N,EAAO4O,SACf,KAAKzc,EAAeC,WAEpB,QACI,OAAO,IAAIue,GAAWxL,EAAcvE,GAE5C,KAAKvO,EAAUE,UACX,OAAQyN,EAAO4O,SACf,KAAK9c,EAAiBC,YAClB,OAAO,IAAImd,GAAY/J,EAAcvE,GACzC,KAAK9O,EAAiBE,SAClB,OAAO,IAAIsd,GAASnK,EAAcvE,GACtC,KAAK9O,EAAiBI,OAClB,OAAO,IAAIme,GAAOlL,EAAcvE,GACpC,QACI,OAAO,IAAIsO,GAAY/J,EAAcvE,GAE7C,QACI,OAAO,IAAIsO,GAAY/J,EAAcvE,IStDNkV,CAA2BH,EAAgBE,GAAM1Q,aAAcvE,KAClG,OAAOtB,EAAWC,gBAAgBqW,EAAWL,IAGpCQ,GAAoB,SAACC,EAAOC,GAAuC,IAClCC,EADM7U,EAA4BnI,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,MAAfid,EAAejd,UAAA,GACxE+c,IAAcvT,EAAeI,SAC7BkT,EAAMI,YAAY9gB,OAAS,GAC3B4gB,EAAAF,EAAMI,aAAYpc,KAAlBqB,MAAA6a,EAAAG,GAA0BF,KAE1BH,EAAMI,YAAYpc,MACdsc,GAAIL,EACJM,KAAMlV,EACNmV,SAAUL,KAKTM,GAA4B,SAACC,EAAUC,GAAU,IAAAC,GAC1DA,EAAAD,EAAME,qBAAoB7c,KAA1BqB,MAAAub,EAAAP,GAAkCK,EAASG,qBAA3Cra,OAAA6Z,GAAmEK,EAASN,gBAGnEU,GAAe,SAAClW,EAAY9D,EAAQia,EAAU1V,EAAQqV,GAC/D,IAAMM,KACFC,GAAqB,EACnBvmB,EAAS2Q,EAAT3Q,KACFwmB,SACAtJ,KACAC,EAAgB,kBAAM6I,EAASnR,gBAC7B4R,EAAmB,SAAAthB,GAAA,OAASkhB,EA7CtC,SAA+Bja,EAAQvN,GACnC,IAAM6lB,KADgCgC,GAAA,EAAAC,GAAA,EAAAC,OAAAzjB,EAAA,IAEtC,QAAA0jB,EAAAC,EAAkB1a,EAAlBxM,OAAAmnB,cAAAL,GAAAG,EAAAC,EAAArV,QAAAuV,MAAAN,GAAA,EAA0B,KAAjBxX,EAAiB2X,EAAA/mB,MACtB4kB,EAAKxV,EAAM9P,QAAU,IAAIuQ,EAAMT,EAAMuF,aAAahG,KAAK5P,GAAIqQ,IAHzB,MAAA+X,GAAAN,GAAA,EAAAC,EAAAK,EAAA,aAAAP,GAAAI,EAAAI,QAAAJ,EAAAI,SAAA,WAAAP,EAAA,MAAAC,GAKtC,OAAOlC,EAyCHyC,CAAqB/a,EAAQjH,GAC7BA,EACAgY,EACAD,IAGAkK,SAkBJ,OAhBIA,EADApnB,IAAS8B,EAAcE,QACb,SAAAmD,GAAA,OAAUshB,EAAiBthB,IAE3B,SAAAA,GAAA,OAASshB,EAAiBthB,IAGxC8K,EAAmBC,EAAY,SAACrR,GACxBuoB,EAAQvoB,MACmB,IAAvB0nB,GAA4B1nB,IAAO0nB,EAAoB,GACvDC,EAAKF,EAAc1hB,OAAS,EAC5B0hB,EAAcE,GAASF,EAAcE,GAAIpW,MAAM,KAAK,GAApD,IAA0DvR,GAE1DynB,EAAchd,KAAd,GAAsBzK,GAE1B0nB,EAAoB1nB,KAGrBynB,EAAcpgB,KAAK,MAGjBmhB,GAAqB,SAAC/B,GAC/B,IAAMgC,EAAWhC,EAAMiC,OAAM,GACvB3C,EAAoBU,EAAMkC,uBAShC,OARAF,EAAStO,eAAiB4L,EAAkBxY,OAAOf,IAAI,SAAAiN,GAAA,OAAKA,EAAElZ,SAAQ8G,KAAK,KAG3E0e,EAAkB3V,iBAAmB,KACrC2V,EAAkBlV,iBAAmB,KACrCkV,EAAkBvV,eAAiB,KACnCiY,EAASjK,wBAAwBoK,wBAE1BH,GAGEI,GAAyB,SAACpC,EAAOqC,GAA4B,IAAhBhX,EAAgBnI,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,MAChE+c,EAAY5U,EAAO4U,WAAa1S,EAChC+U,EAAkBjX,EAAOiX,kBAAmB,EAC9CC,KAIAA,EAHCF,EAAW/iB,OAGN+iB,EAAWtc,IAAI,SAAAyc,GAAA,OAAc,SAACvM,GAChC,IAAMxD,EAAUwD,EAAUwM,UACpBzY,EAASyI,EAAQzI,OACjB0Y,EAAezM,EAAU0M,kBACzBC,EAAc3M,EAAU3H,gBAAgB5E,YACxCP,EAAOsJ,EAAQtJ,KACfiQ,EAASnf,OAAO4oB,OAAOH,GAAchR,OAAO,SAACC,EAAKmR,GAEpD,OADAnR,EAAImR,EAAEC,IAAIjpB,MAAQ8oB,EAAYE,EAAEC,IAAIjpB,MAAMsf,SACnCzH,OAGX,OAAO,SAAC7K,GAgBJ,QAfiBqC,EAAK7J,QAAiB6J,EAAK6Z,KAAK,SAAAlL,GAAA,OAAO9N,EAAOiZ,MAAM,SAACC,GAClE,KAAMA,EAAUppB,QAAQgN,GACpB,OAAO,EAEX,IAAMtM,EAAQsM,EAAOoc,EAAUppB,MAAMqpB,UACrC,GAAIb,GAAmBY,EAAUjZ,OAAS5N,EAAUC,QAChD,OAAO9B,GAAS4e,EAAO8J,EAAUppB,MAAM,IAAMU,GAAS4e,EAAO8J,EAAUppB,MAAM,GAGjF,GAAIopB,EAAUjZ,OAAS5N,EAAUE,UAC7B,OAAO,EAEX,IAAMqV,EAAM8Q,EAAaQ,EAAUppB,MAAM+F,MACzC,OAAOiY,EAAIlG,KAAS9K,EAAOoc,EAAUppB,MAAMqpB,eAzBpB,CA6BhCX,MA/BI,kBAAM,IA+CjB,OAZIvC,IAAc1S,EACEwU,GAAmB/B,GAAOoD,OAAO,SAAAtc,GAAA,OAAUyb,EAAIU,MAAM,SAAAI,GAAA,OAAMA,EAAGvc,OAC1Ewc,WAAW,EACX5oB,KAAM8B,EAAcG,MAGRolB,GAAmB/B,GAAOoD,OAAO,SAAAtc,GAAA,OAAUyb,EAAIS,KAAK,SAAAK,GAAA,OAAMA,EAAGvc,OACzEpM,KAAM8B,EAAcG,IACpB2mB,WAAW,KAOVC,GAAkB,SAAC7C,EAAUK,EAAUyC,EAAcC,GAC9D,IAAMC,EAAShD,EAASuB,MAAMwB,EAAYH,WACpC1Y,EAAakW,GACf4C,EAAO7U,YACP6U,EAAOxB,uBAAuBpb,OAC9Bia,EACAyC,EACA9C,GAQJ,OANAgD,EAAO7U,YAAcjE,EACrB8Y,EAAO3L,wBAAwBoK,wBAE/BpC,GAAkB2D,EAAQhX,EAAeC,QAAUtB,OAAQmY,GAAgBzC,GAC3EN,GAA0BC,EAAUgD,GAE7BA,GAGEC,GAAmB,SAACjD,EAAUkD,EAAWvY,EAAQwY,GAC1D,IAAMH,EAAShD,EAASuB,MAAM5W,EAAOiY,WACjCQ,EAAgBF,EAiBpB,OAhBIvY,EAAO3Q,OAAS8B,EAAcE,UAC9BonB,EAAgBD,EAAU3P,OAAO,SAAAxB,GAAA,OAA+C,IAAlCkR,EAAU7f,QAAQ2O,MAIpEgR,EAAOhQ,eAAiBoQ,EAAcljB,KAAK,KAC3C8iB,EAAO3L,wBAAwBoK,wBAE/BpC,GACI2D,EACAhX,EAAeE,SACbgX,YAAWvY,SAAQ0Y,gBAAiBD,GACtC,MAEJrD,GAA0BC,EAAUgD,GAE7BA,GAGEM,GAAqB,SAACC,GAO/B,IALAA,EAAatc,KAAYsc,IACTha,OACZga,EAAWha,KAAO5N,EAAUE,YAG3B0nB,EAAWrL,QACZ,OAAQqL,EAAWha,MACnB,KAAK5N,EAAUC,QACX2nB,EAAWrL,QAAUzc,EAAeC,WACpC,MACJ,QACA,KAAKC,EAAUE,UACX0nB,EAAWrL,QAAU9c,EAAiBC,YAK9C,OAAOkoB,GAKEC,GAAa,SAACC,EAAUhb,EAAMa,EAAQvF,GAC/CuF,EAH0B,SAAAA,GAAA,OAAUA,EAAOjE,IAAI,SAAAke,GAAA,OAAcD,GAAmBC,KAGvEG,CAAepa,GACxBvF,EAAUxK,OAAOqR,OAAOrR,OAAOqR,UAAW+Q,IAAgB5X,GAC1D,IAAM4f,EAAcC,EAAU7f,EAAQ6X,YAEtC,IAAM+H,GAAsC,mBAAhBA,EACxB,MAAM,IAAI1V,MAAJ,mCAA6ClK,EAAQ6X,WAArD,WANiD,IAAAiI,EAS3BF,EAAYlb,EAAM1E,GATS+f,EAAA/E,GAAA8E,EAAA,GASpDpI,EAToDqI,EAAA,GAS5CC,EAT4CD,EAAA,GAUrDhb,EAAWuS,GAAa0I,EAAeza,EAAQmS,GAG/CuI,EAAYpb,EAAWC,gBAAgBC,EAAU/E,EAAQ3K,MAM/D,OALAqqB,EAASQ,mBAAqBD,EAE9BP,EAAStV,YAAc4V,EAAcnlB,QAAUmlB,EAAc,GAAGnlB,OAAzC,MAAuDmlB,EAAc,GAAGnlB,OAAS,GAAM,GAC9G6kB,EAASzQ,eAAkB1J,EAAOjE,IAAI,SAAA4R,GAAA,OAAKA,EAAE7d,OAAO8G,OACpDujB,EAASS,YAAcngB,EAAQ6X,aAAe7gB,EAAWI,KAAOqN,EAAiBC,GAAQ1E,EAAQ6X,WAC1F6H,GAGEtR,GAAgB,SAAC7I,EAAQJ,GAGlC,IAFA,IAAIrQ,EAAI,EAEDA,EAAIyQ,EAAO1K,SAAU/F,EACxB,GAAIqQ,IAAUI,EAAOzQ,GAAGO,KACpB,OACImQ,KAAMD,EAAOzQ,GAAGqf,SAAW5O,EAAOzQ,GAAG0Q,KACrCpK,MAAOtG,GAInB,OAAO,MA6BLsrB,GAAgC,SAACrC,EAAWvM,GAC9C,IAAM6O,EAAc7O,EAAU8O,iBAC1BC,EAAiBxC,EAAU,GAC3ByC,EAAiBzC,EAAU,GAkB/B,OAhBAsC,EAAY7d,QAAQ,SAACie,GACjB,GAAKA,EAAL,CADgC,IAMjBC,EAAAC,EANiBC,EA9BF,SAACH,GACnC,IAAII,KACArF,SAEJ,OADAA,EAAYiF,EAAW5E,IAEvB,KAAK5T,EAAeC,OAChB2Y,GAAUJ,EAAW1E,UACrB,MACJ,KAAK9T,EAAeE,QAChB0Y,GAAUJ,EAAW3E,KAAKwD,iBAC1B,MACJ,KAAKrX,EAAeG,QAChBoT,EAAY,UACZqF,GAAUJ,EAAW3E,KAAKgF,cAAcza,MAAM,KAAMoa,EAAW1E,UAC/D,MACJ,QACIP,EAAY,KAGhB,OACIA,YACAqF,UAc8BE,CAAuBN,GAA7CjF,EALwBoF,EAKxBpF,UAAWqF,EALaD,EAKbC,OACnB,GAAIrF,EACA+E,GAAiBG,EAAAH,GAAe/E,GAAf5a,MAAA8f,EAAA9E,GAA6BiF,GAA7B9e,SACb8c,WAAW,MAEf2B,GAAiBG,EAAAH,GAAehF,GAAf5a,MAAA+f,EAAA/E,GAA6BiF,GAA7B9e,SACb8c,WAAW,UAKf0B,EAAgBC,IAWtBQ,GAAuB,SAAvBA,EAAwBxP,EAAWuM,GAA8C,IAAnCnX,EAAmCnI,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,MAAtBwiB,EAAsBxiB,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,MAC7EyiB,EAAqBD,EAAaC,mBAClCC,EAAgBF,EAAaE,kBAE/B3P,IAAc0P,MAIAC,EAActmB,SAA+C,IAAtCsmB,EAAc7hB,QAAQkS,KAElDA,EAAU4P,kBAAkBrD,EAAWnX,GAEnC4K,EAAU6P,UAClB7e,QAAQ,SAAC8e,GAAU,IAAAC,EACenB,GAA8BrC,EAAWuD,GADxDE,EAAAxG,GAAAuG,EAAA,GACnBhB,EADmBiB,EAAA,GACHhB,EADGgB,EAAA,GAExBR,EAAqBM,GAAQf,EAAgBC,GAAiB5Z,EAAQqa,OA0BjEQ,GAA2B,SAACC,EAAaC,EAAYC,EAAgBhb,GAC9E,IAAImV,SACAgC,SACI8D,EAA4CD,EAA5CC,qBAAsBC,EAAsBF,EAAtBE,kBACxBC,EAAsBH,EAAeI,SACrCC,EAA8Brb,EAAOqb,4BAMvCC,KAEJ,GAAoB,OAAhBR,IAA8C,IAAtB9a,EAAOub,WAC/BD,IACInG,kBAED,KAAAjJ,EACCsP,EAAkB5sB,OAAO4oB,OAAOyD,EAAqBQ,iBAC/B,IAAtBP,IACAM,EAAkBA,EAAgB3S,OAAO,SAAAra,GAAA,OAAKA,EAAEwR,OAAOob,WAAaD,KAGxE,IAAMO,EAAmBF,EAAgB3S,OAjB5B,SAAC8S,GAEd,OADe3b,EAAO4C,UAAa,kBAAM,IAC3B+Y,EAAO3b,KAeqCtF,IAAI,SAAAkhB,GAAA,OAAUA,EAAO5b,OAAOmV,WAEhFoF,KAEN,IAA0B,IAAtBW,EAA6B,CAC7B,IAAMW,EAAwBjtB,OAAO4oB,OAAOyD,EAAqBQ,gBAEjEI,EAAsBjgB,QAAQ,SAACkgB,GAC3B,IAAMC,EAAaD,EAAU9b,QACI,IAA7B+b,EAAWC,eAA2BD,EAAWH,SAAW5b,EAAO4b,QAC/DG,EAAWX,WAAaD,IAC5BZ,EAAc5hB,KAAKmjB,EAAUnH,QAC7BQ,EAAW0G,EAAsBhT,OAAO,SAAAra,GAAA,OAAKA,IAAMstB,IAAWphB,IAAI,SAAAlM,GAAA,OAAKA,EAAEwR,OAAOmV,YACvElhB,QAAUqnB,EAAU3iB,MACzBwc,WACA8G,OAAQH,EAAUnH,MAClBuH,KA/CU,SAACvH,GAC/B,IADoD,IAAduH,EAAcrkB,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,MAC7C8c,EAAMwH,SACTD,EAAKvjB,KAAKgc,GACVA,EAAQA,EAAMwH,QAElB,OAAOD,EA0CmBE,CAAmBN,EAAUnH,YAOnDQ,GAAWjJ,MAAG/Q,OAAHnB,MAAAkS,KAAA/Q,OAAA6Z,GAAiB0G,IAAkBZ,KAAcjS,OAAO,SAAAra,GAAA,OAAW,OAANA,IACxE8sB,EAAU3iB,MACNwc,WACAoF,wBAAmBA,EAAnBvF,GAAqChV,EAAOua,sBAIpD,IAAM8B,EAAYtB,EAAWpG,MAEvB2H,EAAa1tB,OAAOqR,QACtBsc,kBAAmBzB,EACnBK,uBACDnb,GAEGwc,EAAmBzB,EAAW0B,aAChCpB,GAA+BmB,IAC/BrF,EAAYJ,GAAuByF,EAAkBrH,GACjD8B,gBAAiBoE,IAErBjB,GAAqBoC,EAAkBrF,EAAWmF,IAGtDhB,EAAU1f,QAAQ,SAAC8gB,GACf,IAAMC,EAAmB5F,GAAuBsF,EAAWK,EAAIvH,UACzD+G,EAAOQ,EAAIR,KAEjB,GAAIA,EAAM,CACN,IAAMU,EA1HO,SAACzF,EAAW+E,GACjC,IAAK,IAAIhuB,EAAI,EAAGmN,EAAM6gB,EAAKjoB,OAAQ/F,EAAImN,EAAKnN,IAAK,CAC7C,IAAMymB,EAAQuH,EAAKhuB,GACnBipB,EAAYqC,GAA8BrC,EAAWxC,GAEzD,OAAOwC,EAqHuB0F,CAAiBF,EAAkBT,EAAKY,WAC9DJ,EAAIT,OAAOzB,kBAAkBoC,EAAeN,QAE5ClC,GAAqBiC,EAAWM,EAAkBL,GAC9C/B,cAAemC,EAAInC,cACnBD,mBAAoBe,GAA+BmB,iQCqKpDO,cA3jBX,SAAAC,iGAAwBC,CAAA3qB,KAAA0qB,GACpB,IAAIE,SAEJ5qB,KAAK6pB,QAAU,KACf7pB,KAAKyiB,eACLziB,KAAKkjB,uBACLljB,KAAKmoB,aANe,QAAAjf,EAAA3D,UAAA5D,OAARgmB,EAAQve,MAAAF,GAAAG,EAAA,EAAAA,EAAAH,EAAAG,IAARse,EAAQte,GAAA9D,UAAA8D,GAQE,IAAlBse,EAAOhmB,SAAkBipB,EAASjD,EAAO,cAAe+C,GAExD1qB,KAAK+V,eAAiB6U,EAAO7U,eAC7B/V,KAAKkR,YAAc0Z,EAAO1Z,YAC1BlR,KAAKinB,YAAc2D,EAAO3D,YAC1BjnB,KAAK6pB,QAAUe,EACf5qB,KAAKgnB,mBAAqBhnB,KAAK6pB,QAAQ7C,mBACvChnB,KAAK6qB,gBAAkB9f,IACvB/K,KAAKoa,wBAAwBoK,0BAE7B+B,GAAUuE,cAAC9qB,MAAX6I,OAAoB8e,IACpB3nB,KAAK6qB,gBAAkB7qB,KAAKgnB,mBAAmB7qB,KAC/C6D,KAAKoa,wBAAwBoK,wBAC7BxkB,KAAK+qB,uBACD5B,kBACA6B,qEA0BR,OAAOhrB,KAAK2Q,gBAAgBxH,OAAOf,IAAI,SAAAlM,GAAA,OAAKA,EAAEmQ,6CAY9C,OAAOrM,KAAK6qB,wDAIZ,OAAO7qB,KAAKirB,4DAMZ,OAFAjrB,KAAKirB,YAAcvJ,IAAc1hB,KAAKkR,YAAalR,KAAK+V,gBACnD/V,KAAKukB,uBAAwBvkB,KAAK6qB,iBAChC7qB,oDAIP,OAAOA,KAAKgnB,gDAiCVkE,EAAU5a,GACZ,OAAOH,EAAanQ,KAAMkrB,EAAU5a,uCAuB3B4a,GACT,OAAO/a,EAAanQ,KAAMkrB,EAAU5Q,GAAkBta,KAAMkrB,IAAW,iCAqBpEC,GACH,OAAO1Q,GAAMza,KAAMmrB,sCAoBXC,GACR,OAAO1V,EAAW1V,KAAMorB,kCAkDpBhI,EAAU1V,GACd,IAAM2d,GACFtuB,KAAM8B,EAAcC,OACpB6mB,WAAW,GAITG,GAAgBH,WAFtBjY,EAASpR,OAAOqR,UAAW0d,EAAW3d,IAEEiY,WACpC2F,SAEA5d,EAAO3Q,OAAS8B,EAAcG,IAa9BssB,GAZiB1F,GACb5lB,KACAojB,GACErmB,KAAM8B,EAAcC,QACtBgnB,GAEaF,GACb5lB,KACAojB,GACErmB,KAAM8B,EAAcE,SACtB+mB,IAIJwF,EAAM1F,GACF5lB,KACAojB,EACA1V,EACAoY,GAIR,OAAOwF,oCAsBP,OAAQtrB,KAAKkR,YAAYvP,SAAW3B,KAAK+V,eAAepU,uCAUnC,IAAlBgkB,IAAkBpgB,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,KAAAA,UAAA,GACf8e,EAAW,IAAIrkB,KAAKurB,YAAYvrB,MAMtC,OALI2lB,EACAtB,EAASmH,UAAUxrB,MAEnBqkB,EAASmH,UAAU,MAEhBnH,kCA8CF4B,EAAWvY,GAChB,IAAM2d,GACFtuB,KAAM8B,EAAcC,OACpB6mB,WAAW,GAEfjY,EAASpR,OAAOqR,UAAW0d,EAAW3d,GACtC,IAAM+d,EAAczrB,KAAKglB,kBACnBkB,EAAY5pB,OAAO2J,KAAKwlB,GACtB1uB,EAAS2Q,EAAT3Q,KAEJ2uB,EAAsBzF,EAAUlS,OAAO,SAACC,EAAK/H,GAM7C,MAL+B,WAA3BA,EAAMsf,YAAYpvB,KAClB6X,EAAI3N,KAAJqB,MAAAsM,wHAAA2X,CAAYzF,EAAU3P,OAAO,SAAAxB,GAAA,OAA0C,IAA7BA,EAAU6W,OAAO3f,OACpDA,KAASwf,GAChBzX,EAAI3N,KAAK4F,GAEN+H,OAGX0X,EAAsBtiB,MAAMI,KAAK,IAAIgS,IAAIkQ,IAAsBtjB,IAAI,SAAA6D,GAAA,OAASA,EAAMqR,SAClF,IAAIhF,SAEAvb,IAAS8B,EAAcG,IASvBsZ,GARsB0N,GAAiBhmB,KAAM0rB,GACzC3uB,KAAM8B,EAAcC,OACpB6mB,UAAWjY,EAAOiY,WACnBO,GACkBF,GAAiBhmB,KAAM0rB,GACxC3uB,KAAM8B,EAAcE,QACpB4mB,UAAWjY,EAAOiY,WACnBO,IAIH5N,EADsB0N,GAAiBhmB,KAAM0rB,EAAqBhe,EAAQwY,GAI9E,OAAO5N,4CAIP,OAAOtY,KAAK6rB,6DAWZ,OAPA7rB,KAAK6rB,aAAe7rB,KAAKirB,YAAY9hB,OAAO4K,OAAO,SAACC,EAAK8X,EAAUlwB,GAK/D,OAJAoY,EAAI8X,EAAS3vB,SACT+F,MAAOtG,EACPwpB,KAAOjpB,KAAM2vB,EAAS3vB,OAAQmQ,KAAMwf,EAASxf,OAAQ2O,QAAS6Q,EAAS7Q,YAEpEjH,OAEJhU,uCAWPA,KAAK6pB,SAAW7pB,KAAK6pB,QAAQkC,YAAY/rB,MACzCA,KAAK6pB,QAAU,KACf7pB,KAAKmoB,UAAU7e,QAAQ,SAAC8e,GACpBA,EAAMyB,QAAU,OAEpB7pB,KAAKmoB,iDA6BIC,GACT,IAAInU,EAAMjU,KAAKmoB,UAAU6D,UAAU,SAAAC,GAAA,OAAWA,IAAY7D,KACjD,IAATnU,GAAajU,KAAKmoB,UAAU1gB,OAAOwM,EAAK,qCAQjCiY,GACPlsB,KAAK6pB,SAAW7pB,KAAK6pB,QAAQkC,YAAY/rB,MACzCA,KAAK6pB,QAAUqC,EACfA,GAAUA,EAAO/D,UAAU9hB,KAAKrG,0CA4BhC,OAAOA,KAAK6pB,8CA6BZ,OAAO7pB,KAAKmoB,mDA4BZ,OAAOnoB,KAAKyiB,6DA4BZ,OAAOziB,KAAKkjB,2rBCkFLlR,eA3lBX,SAAApU,IAAsB,IAAA+a,+FAAAwT,CAAAnsB,KAAApC,GAAA,QAAAsL,EAAA3D,UAAA5D,OAANwF,EAAMiC,MAAAF,GAAAG,EAAA,EAAAA,EAAAH,EAAAG,IAANlC,EAAMkC,GAAA9D,UAAA8D,GAAA,IAAAwO,mKAAAuU,CAAApsB,MAAA2Y,EAAA/a,EAAAme,WAAAzf,OAAA0f,eAAApe,IAAA7B,KAAA2L,MAAAiR,GAAA3Y,MAAA6I,OACT1B,KADS,OAGlB0Q,EAAKwU,kBACLxU,EAAKyU,mBAJazU,qUArCF4S,wCAwGX3jB,GAQLA,EAAUxK,OAAOqR,WANb4e,MAAO,MACPlqB,UAAW,KACXmqB,SAAS,EACTC,cAAc,EACdzW,SAEoClP,GACxC,IAAMqC,EAASnJ,KAAKukB,uBAAuBpb,OAErCujB,EAAgBxY,EAAYnY,KAC9BiE,KACAA,KAAKukB,uBAAuBpb,OAC5BnJ,KAAKkR,YACLpK,EAAQ2lB,aAAetjB,EAAOf,IAAI,SAAAlM,GAAA,OAAKA,EAAEC,SAAQ8G,OAASjD,KAAK+V,eAC/DjP,EAAQkP,MAEJvB,WAA8B,WAAlB3N,EAAQylB,MACpB/X,SAAU1N,EAAQ0lB,UAI1B,IAAK1lB,EAAQzE,UACT,OAAOqqB,EAxBG,IA2BNrqB,EAAcyE,EAAdzE,UACAmJ,EAAuBkhB,EAAvBlhB,KAAMa,EAAiBqgB,EAAjBrgB,OAAQkI,EAASmY,EAATnY,KAChBoY,EAAatgB,EAAOjE,IAAK,SAAA/E,GAAA,OAAKA,EAAElH,OAEhCywB,EADgBtwB,OAAO2J,KAAK5D,GACA0R,OAAO,SAACC,EAAKxF,GAC3C,IAAMyF,EAAM0Y,EAAWvmB,QAAQoI,GAI/B,OAHa,IAATyF,GACAD,EAAI3N,MAAM4N,EAAK5R,EAAUmM,KAEtBwF,OAiCX,MA9BsB,WAAlBlN,EAAQylB,MACRK,EAAYtjB,QAAQ,SAACujB,GACjB,IAAMC,EAAOD,EAAK,GACZE,EAAQF,EAAK,GAEnBrhB,EAAKshB,GAAMxjB,QAAQ,SAACgK,EAAO0Z,GACvBxhB,EAAKshB,GAAME,GAAYD,EAAMhxB,UACzBmE,EACAoT,EACAiB,EAAKyY,GACL3gB,EAAOygB,QAKnBthB,EAAKlC,QAAQ,SAACgK,EAAO0Z,GACjBJ,EAAYtjB,QAAQ,SAACujB,GACjB,IAAMC,EAAOD,EAAK,GACZE,EAAQF,EAAK,GAEnBvZ,EAAMwZ,GAAQC,EAAMhxB,UAChBmE,EACAoT,EAAMwZ,GACNvY,EAAKyY,GACL3gB,EAAOygB,QAMhBJ,kCA2BFO,GAAwD,IAA7C1U,EAA6ChT,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,MAA9BmI,EAA8BnI,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,IAAnBogB,WAAW,GAC/CiC,KAAmBqF,EAAUhqB,OAC/B0kB,GAAU3nB,KAAMitB,EAAW1U,GACzBoB,EAAetB,GAAA6U,aAAWvF,GAgBhC,OAdAvF,GACIzI,EACA5K,EAAeG,SACb+d,YAAWrF,gBAAe3O,eAAgBb,GAAaa,kBACzDV,GAEJuK,GAA0B9iB,KAAM2Z,GAE5BjM,EAAOiY,UACPhM,EAAa6R,UAAUxrB,MAEvB2Z,EAAa6R,UAAU,MAGpB7R,+BAsDLtF,GACF,IAAM8Y,EAAUntB,KAAK8kB,SACjByH,MAAO,MACPvW,KAAM3B,IAGJ+Y,GADSD,EAAQ9gB,OAAOjE,IAAI,SAAA6D,GAAA,OAASA,EAAM9P,QACnB0M,OAAOskB,EAAQ3hB,MAEvC6hB,EAAW,IAAIrtB,KAAKurB,YAAY6B,EAAcD,EAAQ9gB,QAAUsS,WAAY,WAElF,OADA0O,EAASf,gBAAkBjY,EACpBgZ,oCAwBA/gB,EAAMxF,GACbwF,EAAOA,GAAQtM,KAAKinB,YACpBngB,EAAUxK,OAAOqR,WAAaqT,eAAgB,KAAOla,GAErD,IAAMqC,EAASnJ,KAAK2Q,gBAAgBxH,OAC9BmkB,EAAUnkB,EAAOf,IAAI,SAAAiN,GAAA,OAAKA,EAAEyR,kBAC5ByG,EAAYD,EAAQ,GAAG3rB,OACzB6rB,SACAC,SACAC,SAEJ,GAAIphB,IAASxO,EAAWC,UAEpB,IADAyvB,KACKC,EAAS,EAAGA,EAASF,EAAWE,IAAU,CAC3C,IAAMtT,KACN,IAAKuT,EAAS,EAAGA,EAASvkB,EAAOxH,OAAQ+rB,IACrCvT,EAAIhR,EAAOukB,GAAQvxB,QAAUmxB,EAAQI,GAAQD,GAEjDD,EAAennB,KAAK8T,QAErB,GAAI7N,IAASxO,EAAWE,QAAS,CAEpC,IADAwvB,GAAkBrkB,EAAOf,IAAI,SAAAiN,GAAA,OAAKA,EAAElZ,SAAQ8G,KAAK6D,EAAQka,iBACpDyM,EAAS,EAAGA,EAASF,EAAWE,IAAU,CAC3C,IAAMtT,KACN,IAAKuT,EAAS,EAAGA,EAASvkB,EAAOxH,OAAQ+rB,IACrCvT,EAAI9T,KAAKinB,EAAQI,GAAQD,IAE7BD,EAAennB,KAAK8T,EAAIlX,KAAK6D,EAAQka,iBAEzCwM,EAAiBA,EAAevqB,KAAK,UAClC,IAAIqJ,IAASxO,EAAWG,QAU3B,MAAM,IAAI+S,MAAJ,aAAuB1E,EAAvB,qBARN,IADAkhB,GAAkBrkB,EAAOf,IAAI,SAAAiN,GAAA,OAAKA,EAAElZ,UAC/BsxB,EAAS,EAAGA,EAASF,EAAWE,IAAU,CAC3C,IAAMtT,KACN,IAAKuT,EAAS,EAAGA,EAASvkB,EAAOxH,OAAQ+rB,IACrCvT,EAAI9T,KAAKinB,EAAQI,GAAQD,IAE7BD,EAAennB,KAAK8T,IAM5B,OAAOqT,mCAGDvhB,GACN,IAAM8I,EAAY9I,EAAM9P,OACxB6D,KAAK+V,gBAAL,IAA2BhB,EAC3B,IAAM4M,EAAoB3hB,KAAKgnB,mBAE/B,GAAKrF,EAAkB5V,YAAYE,EAAM9P,QAElC,CACH,IAAMoN,EAAaoY,EAAkBxY,OAAO6iB,UAAU,SAAA2B,GAAA,OAAaA,EAAUxxB,SAAW4Y,IACxFxL,GAAc,IAAMoY,EAAkBxY,OAAOI,GAAc0C,QAH3D0V,EAAkBxY,OAAO9C,KAAK4F,GAYlC,OALA0V,EAAkB3V,iBAAmB,KACrC2V,EAAkBlV,iBAAmB,KACrCkV,EAAkBvV,eAAiB,KAEnCpM,KAAKoa,wBAAwBoK,wBACtBxkB,+CAuCQqM,EAAQuhB,EAAYlgB,GAAQ,IAAAuK,EAAAjY,KAC3CqM,EAASga,GAAmBha,GAC5BqB,EAASpR,OAAOqR,WAAagY,WAAW,EAAMkI,YAAY,GAASngB,GAEnE,IAAMqX,EAAe/kB,KAAKglB,kBACpB8I,EAAUF,EAAWxY,MAAM,EAAGwY,EAAWjsB,OAAS,GAClDosB,EAAaH,EAAWA,EAAWjsB,OAAS,GAElD,GAAIojB,EAAa1Y,EAAOlQ,QAAUuR,EAAOmgB,WACrC,MAAM,IAAI7c,MAAS3E,EAAOlQ,KAApB,sCAGV,IAAM6xB,EAAkBF,EAAQ1lB,IAAI,SAAC6D,GACjC,IAAMgiB,EAAYlJ,EAAa9Y,GAC/B,IAAKgiB,EAED,MAAM,IAAIjd,MAAS/E,EAAb,gCAEV,OAAOgiB,EAAU/rB,QAGfoiB,EAAQtkB,KAAKskB,MAAM5W,EAAOiY,WAE1BuI,EAAK5J,EAAM3T,gBAAgBxH,OAC3BglB,EAAiBH,EAAgB5lB,IAAI,SAAA6L,GAAA,OAAOia,EAAGja,KAEjDgG,KACAC,EAAgB,kBAAMjC,EAAKrG,gBAEzBwc,KACNphB,EAAmBsX,EAAMpT,YAAa,SAACtV,GACnC,IAAMyyB,EAAaF,EAAe/lB,IAAI,SAAA6D,GAAA,OAASA,EAAMuF,aAAahG,KAAK5P,KACvEwyB,EAAexyB,GAAKmyB,qIAAAO,CAAcD,GAAdxlB,QAA0BjN,EAAGse,EAAeD,OAhCzB,IAAAsU,EAkC3BnQ,IAAcgQ,IAAkB/hB,IAAUA,EAAOlQ,OAA1D8P,EAlCoCuiB,GAAAD,EAAA,MAwC3C,OALAjK,EAAMmK,SAASxiB,GAEfmW,GAAkBkC,EAAOvV,EAAeK,SAAW1B,OAAQrB,EAAQlD,OAAQ2kB,GAAWC,GACtFjL,GAA0B9iB,KAAMskB,GAEzBA,oCAWAkE,GAA2D,IAA9C9a,EAA8CnI,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,MAAjCmpB,EAAiCnpB,UAAA,GAAjBykB,EAAiBzkB,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,MAC5DopB,EAAkBjhB,EAAOihB,gBACzB9F,EAAsBnb,EAAOob,SAC7B8F,EAAUlhB,EAAOkhB,QACjB7E,EFjKkB,SAAC1H,GAC7B,KAAOA,EAAMwH,SACTxH,EAAQA,EAAMwH,QAElB,OAAOxH,EE6JewM,CAAiB7uB,MAC7B2oB,EAAuBoB,EAAUgB,sBAEjCtC,GACF0B,aF5KuB,SAAC9H,GAChC,KAAOA,EAAMwH,SAAWxH,EAAMI,YAAYqM,KAAK,SAAA5yB,GAAA,OAAKA,EAAEymB,KAAO5T,EAAeG,WACxEmT,EAAQA,EAAMwH,QAElB,OAAOxH,EEsKsB0M,CAAoB/uB,MAGzCqiB,MAAO0H,GAgBX,OAbA2E,GFlD0B,SAAC/F,GAA6C,IAAvBjb,EAAuBnI,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,MAAV8c,EAAU9c,UAAA,GACxEypB,SACEL,EAAkBjhB,EAAOihB,gBACzB9L,EAAWnV,EAAOmV,SAClB1lB,EAASuQ,EAAO4b,OAAhB,IAA0B5b,EAAOob,SAGnCkG,EADAL,EACkBhG,EAAqBQ,eAErBR,EAAqBqC,iBAG1B,OAAbnI,SACOmM,EAAgB7xB,GAEvB6xB,EAAgB7xB,IACZklB,QACA3U,UEiCcuhB,CAAmBtG,EAAsBjb,EAAQ1N,MACnEuoB,GAAyBC,EAAaC,GAAcE,uBAAsBG,SAAUD,GAChFvsB,OAAOqR,QACHihB,WACDlhB,IAEHihB,GF5E6B,SAAChG,EAAsBF,EAAYC,GACxE,IAAMsC,EAAmBrC,EAAqBqC,iBAE9C,IAAK,IAAM1B,KAAU0B,EAAkB,CACnC,IACMvB,EADYuB,EAAiB1B,GACN5b,OACvBmb,EAAsBH,EAAehb,OAAOob,SAC5CoG,GAAwBxG,EAAesB,WAAWkF,uBACpDxG,EAAesB,WAAWkF,sBAAsBzF,EAAYf,EAAehb,QAC/E,GAAI+b,EAAWX,WAAaD,GAAuBqG,EAAuB,CACtE,IAAMC,EAAgB1F,EAAW5G,SACjC0F,GAAyB4G,EAAe1G,GACpCE,uBACAC,mBAAmB,EACnBE,SAAUD,GACXY,KE8DH2F,CAA0BzG,EAAsBF,GAC5C/a,SACAsc,eAIDhqB,gCAUPqvB,EAAWniB,GACX,OAAQmiB,GACR,IrCnhBmB,cqCohBfrvB,KAAKqsB,eAAehmB,KAAK6G,GAG7B,OAAOlN,yCASEqvB,GACT,OAAQA,GACR,IrCliBmB,cqCmiBfrvB,KAAKqsB,kBAIT,OAAOrsB,+CAUQ6kB,EAAW+J,GAAS,IAAAnS,EAAAzc,KACfA,KAAKqsB,eACX/iB,QAAQ,SAAAoc,GAAA,OAAMA,EAAG3pB,KAAK0gB,EAAMoI,EAAW+J,iCA8CpDU,EAAkB5hB,GACnB,IAAMqX,EAAe/kB,KAAKglB,kBAE1B,IAAKD,EAAauK,GACd,MAAM,IAAIte,MAAJ,SAAmBse,EAAnB,kBAGV,IAAMC,EAAe7hB,EAAOvR,MAAWmzB,EAAlB,UAErB,GAAIvK,EAAawK,GACb,MAAM,IAAIve,MAAJ,SAAmBue,EAAnB,mBAGV,IAb2BC,EtCtjB5B,SAAgCC,EAAcxiB,EAAYS,GAAQ,IAC/Da,EAA4Cb,EAA5Ca,QAASmhB,EAAmChiB,EAAnCgiB,UAAWphB,EAAwBZ,EAAxBY,QAAShB,EAAeI,EAAfJ,MAAOC,EAAQG,EAARH,IAD2BoiB,EAEhDF,EAAahU,SAFmCmU,EAAAC,EAAAF,EAAA,GAE9DG,EAF8DF,EAAA,GAExDG,EAFwDH,EAAA,GAIhErhB,IACDjB,EAAmB,IAAVA,KAAiBA,GAASA,EAAQwiB,GAASA,EAAOxiB,EAC3DC,EAAe,IAARA,KAAeA,GAAOA,EAAMwiB,GAAUA,EAAO,EAAKxiB,EAErDmiB,IACAphB,EAAUtK,KAAKgsB,KAAKhsB,KAAKisB,IAAI1iB,EAAMD,GAASoiB,IAGhDnhB,EAAUF,EAAgBC,EAAShB,EAAOC,IAG1CgB,EAAQ,GAAKuhB,GACbvhB,EAAQ3G,QAAQkoB,GAEhBvhB,EAAQA,EAAQ5M,OAAS,IAAMouB,GAC/BxhB,EAAQlI,KAAK0pB,EAAO,GAIxB,IADA,IAAMrhB,KACG9S,EAAI,EAAGA,EAAI2S,EAAQ5M,OAAS,EAAG/F,IACpC8S,EAAarI,MACTiH,MAAOiB,EAAQ3S,GACf2R,IAAKgB,EAAQ3S,EAAI,KAIzB,IAAMs0B,KAYN,OAXAljB,EAAmBC,EAAY,SAACrR,GAC5B,IAAM0X,EAAQmc,EAAaje,aAAahG,KAAK5P,GAC7C,GAAI0X,aAAiBlF,EACjB8hB,EAAW7pB,KAAKiN,OADpB,CAKA,IAAM9R,EAAQiN,EAAgBC,EAAc4E,GAC5C4c,EAAW7pB,KAAQ7E,EAAM8L,MAAzB,IAAkC9L,EAAM+L,SAGnC2iB,aAAYtT,KAAMrO,GsC0hBM4hB,CADRnwB,KAAK2Q,gBAAgB5E,YAAYujB,GACWtvB,KAAKkR,YAAaxD,GAA3EwiB,EAdmBV,EAcnBU,WAAYtT,EAdO4S,EAcP5S,KAEdwT,EAAWhS,IAAc8R,KAEvB/zB,KAAMozB,EACNjjB,KAAM5N,EAAUE,UAChBqc,QAAS9c,EAAiBI,OAC1Bqe,UACC2S,IAAe,GAElBjL,EAAQtkB,KAAKskB,MAAM5W,EAAOiY,WAMhC,OALArB,EAAMmK,SAAS2B,GAEfhO,GAAkBkC,EAAOvV,EAAeM,KAAOigB,mBAAkB5hB,SAAQ6hB,gBAAgB,MACzFzM,GAA0B9iB,KAAMskB,GAEzBA,yCA8BP,OAAO,IAAI1mB,EAHEoC,KAAKqwB,UAAUvyB,EAAWC,WACxBiC,KAAKswB,kEAtjBW5iB,GAC/B,OAAOU,EAAkBP,iBAAiBH,oCAf1C,OAAO0K,YCzFAmY,GAAoDzZ,GAApDN,IAAKga,GAA+C1Z,GAA/CH,IAAK8Z,GAA0C3Z,GAA1CI,IAAKwZ,GAAqC5Z,GAArCM,IAAKuZ,GAAgC7Z,GAAhC8Z,MAAOC,GAAyB/Z,GAAzBga,KAAMC,GAAmBja,GAAnBka,MAAYC,GAAOna,GAAZoa,ICsBjDC,IACFC,QtC8LmB,mBAAAC,EAAA9rB,UAAA5D,OAAI2vB,EAAJloB,MAAAioB,GAAAE,EAAA,EAAAA,EAAAF,EAAAE,IAAID,EAAJC,GAAAhsB,UAAAgsB,GAAA,OACnB,SAACrb,GAAqC,IAAjCxI,EAAiCnI,UAAA5D,OAAA,QAAAzB,IAAAqF,UAAA,GAAAA,UAAA,IAAtBogB,WAAW,GACnB6L,EAAYtb,EACZub,SACEtK,KAyBN,OAvBAmK,EAAWhoB,QAAQ,SAACgZ,GAChBkP,EAAYlP,EAAUkP,GACtBrK,EAAY9gB,KAAZqB,MAAAyf,wHAAAuK,CAAoBF,EAAU/O,cACzBgP,IACDA,EAAaD,KAIjBC,GAAcA,IAAeD,GAC7BC,EAAWE,UAGfvP,GAAkBoP,EAAWziB,EAAeI,QAAS,KAAMgY,GAE3DqK,EAAUtO,uBACVJ,GAA0B5M,EAAIsb,GAE1B9jB,EAAOiY,UACP6L,EAAUhG,UAAUtV,GAEpBsb,EAAUhG,UAAU,MAGjBgG,IsC1NXI,ItC4He,mBAAAC,EAAAtsB,UAAA5D,OAAIwF,EAAJiC,MAAAyoB,GAAAC,EAAA,EAAAA,EAAAD,EAAAC,IAAI3qB,EAAJ2qB,GAAAvsB,UAAAusB,GAAA,OAAa,SAAA5b,GAAA,OAAMA,EAAG0b,IAAHlqB,MAAAwO,EAAU/O,KsC3H5Cse,OtCgCkB,mBAAAvc,EAAA3D,UAAA5D,OAAIwF,EAAJiC,MAAAF,GAAAG,EAAA,EAAAA,EAAAH,EAAAG,IAAIlC,EAAJkC,GAAA9D,UAAA8D,GAAA,OAAa,SAAA6M,GAAA,OAAMA,EAAGuP,OAAH/d,MAAAwO,EAAa/O,KsC/BlD4qB,QtC+DmB,mBAAAC,EAAAzsB,UAAA5D,OAAIwF,EAAJiC,MAAA4oB,GAAAC,EAAA,EAAAA,EAAAD,EAAAC,IAAI9qB,EAAJ8qB,GAAA1sB,UAAA0sB,GAAA,OAAa,SAAA/b,GAAA,OAAMA,EAAG6b,QAAHrqB,MAAAwO,EAAc/O,KsC9DpD+lB,QtCsJmB,mBAAAgF,EAAA3sB,UAAA5D,OAAIwF,EAAJiC,MAAA8oB,GAAAC,EAAA,EAAAA,EAAAD,EAAAC,IAAIhrB,EAAJgrB,GAAA5sB,UAAA4sB,GAAA,OAAa,SAAAjc,GAAA,OAAMA,EAAGgX,QAAHxlB,MAAAwO,EAAc/O,KsCrJpDirB,kBCvB6B,mBAAAlpB,EAAA3D,UAAA5D,OAAIwF,EAAJiC,MAAAF,GAAAG,EAAA,EAAAA,EAAAH,EAAAG,IAAIlC,EAAJkC,GAAA9D,UAAA8D,GAAA,OAAa,SAAA6M,GAAA,OAAMA,EAAGkc,kBAAH1qB,MAAAwO,EAAwB/O,KDwBxE6O,KCfgB,mBAAAgc,EAAAzsB,UAAA5D,OAAIwF,EAAJiC,MAAA4oB,GAAAC,EAAA,EAAAA,EAAAD,EAAAC,IAAI9qB,EAAJ8qB,GAAA1sB,UAAA0sB,GAAA,OAAa,SAAA/b,GAAA,OAAMA,EAAGF,KAAHtO,MAAAwO,EAAW/O,KDgB9CgJ,eACAkiB,WAAA3c,EACA4c,YE/BG,SAAsB3X,EAAYC,GACrC,OAAOzK,EAAawK,EAAYC,EAAYN,GAAkBK,EAAYC,IAAa,IF+BvFF,iBACAG,kBACA0X,c3BxBG,SAAwB5X,EAAYC,EAAYtK,GACnD,OAAOmK,GAAMC,GAAcC,EAAYC,EAAYtK,GAAWuK,GAAeF,EAAYC,EAAYtK,K2BwBrGkiB,MAAA/X,IAGEgY,QAAcA,QACpBn2B,OAAOqR,OAAOqE,IACVmf,aACAuB,MAAAC,EACA5jB,iBACAjP,oBACA8yB,WAAA90B,EACA+0B,cAAAh0B,EACA2O,kBAAAY,EACAqkB,YACDK,GAEY,IAAAzY,GAAA0Y,EAAA","file":"datamodel.js","sourcesContent":["(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine(\"DataModel\", [], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"DataModel\"] = factory();\n\telse\n\t\troot[\"DataModel\"] = factory();\n})(window, function() {\nreturn "," \t// The module cache\n \tvar installedModules = {};\n\n \t// The require function\n \tfunction __webpack_require__(moduleId) {\n\n \t\t// Check if module is in cache\n \t\tif(installedModules[moduleId]) {\n \t\t\treturn installedModules[moduleId].exports;\n \t\t}\n \t\t// Create a new module (and put it into the cache)\n \t\tvar module = installedModules[moduleId] = {\n \t\t\ti: moduleId,\n \t\t\tl: false,\n \t\t\texports: {}\n \t\t};\n\n \t\t// Execute the module function\n \t\tmodules[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\n \t\t// Flag the module as loaded\n \t\tmodule.l = true;\n\n \t\t// Return the exports of the module\n \t\treturn module.exports;\n \t}\n\n\n \t// expose the modules object (__webpack_modules__)\n \t__webpack_require__.m = modules;\n\n \t// expose the module cache\n \t__webpack_require__.c = installedModules;\n\n \t// define getter function for harmony exports\n \t__webpack_require__.d = function(exports, name, getter) {\n \t\tif(!__webpack_require__.o(exports, name)) {\n \t\t\tObject.defineProperty(exports, name, { enumerable: true, get: getter });\n \t\t}\n \t};\n\n \t// define __esModule on exports\n \t__webpack_require__.r = function(exports) {\n \t\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n \t\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n \t\t}\n \t\tObject.defineProperty(exports, '__esModule', { value: true });\n \t};\n\n \t// create a fake namespace object\n \t// mode & 1: value is a module id, require it\n \t// mode & 2: merge all properties of value into the ns\n \t// mode & 4: return value when already ns object\n \t// mode & 8|1: behave like require\n \t__webpack_require__.t = function(value, mode) {\n \t\tif(mode & 1) value = __webpack_require__(value);\n \t\tif(mode & 8) return value;\n \t\tif((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;\n \t\tvar ns = Object.create(null);\n \t\t__webpack_require__.r(ns);\n \t\tObject.defineProperty(ns, 'default', { enumerable: true, value: value });\n \t\tif(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));\n \t\treturn ns;\n \t};\n\n \t// getDefaultExport function for compatibility with non-harmony modules\n \t__webpack_require__.n = function(module) {\n \t\tvar getter = module && module.__esModule ?\n \t\t\tfunction getDefault() { return module['default']; } :\n \t\t\tfunction getModuleExports() { return module; };\n \t\t__webpack_require__.d(getter, 'a', getter);\n \t\treturn getter;\n \t};\n\n \t// Object.prototype.hasOwnProperty.call\n \t__webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };\n\n \t// __webpack_public_path__\n \t__webpack_require__.p = \"\";\n\n\n \t// Load entry module and return exports\n \treturn __webpack_require__(__webpack_require__.s = 1);\n","const DataModel = require('./export');\n\nmodule.exports = DataModel.default ? DataModel.default : DataModel;\n","/**\n * DataFormat Enum defines the format of the input data.\n * Based on the format of the data the respective adapter is loaded.\n *\n * @readonly\n * @enum {string}\n */\nconst DataFormat = {\n FLAT_JSON: 'FlatJSON',\n DSV_STR: 'DSVStr',\n DSV_ARR: 'DSVArr',\n AUTO: 'Auto'\n};\n\nexport default DataFormat;\n","/**\n * DimensionSubtype enum defines the sub types of the Dimensional Field.\n *\n * @readonly\n * @enum {string}\n */\nconst DimensionSubtype = {\n CATEGORICAL: 'categorical',\n TEMPORAL: 'temporal',\n GEO: 'geo',\n BINNED: 'binned'\n};\n\nexport default DimensionSubtype;\n","/**\n * MeasureSubtype enum defines the sub types of the Measure Field.\n *\n * @readonly\n * @enum {string}\n */\nconst MeasureSubtype = {\n CONTINUOUS: 'continuous'\n};\n\nexport default MeasureSubtype;\n","/**\n * FieldType enum defines the high level field based on which visuals are controlled.\n * Measure in a high level is numeric field and Dimension in a high level is string field.\n *\n * @readonly\n * @enum {string}\n */\nconst FieldType = {\n MEASURE: 'measure',\n DIMENSION: 'dimension'\n};\n\nexport default FieldType;\n","/**\n * Filtering mode enum defines the filering modes of DataModel.\n *\n * @readonly\n * @enum {string}\n */\nconst FilteringMode = {\n NORMAL: 'normal',\n INVERSE: 'inverse',\n ALL: 'all'\n};\n\nexport default FilteringMode;\n","/**\n * Group by function names\n *\n * @readonly\n * @enum {string}\n */\nconst GROUP_BY_FUNCTIONS = {\n SUM: 'sum',\n AVG: 'avg',\n MIN: 'min',\n MAX: 'max',\n FIRST: 'first',\n LAST: 'last',\n COUNT: 'count',\n STD: 'std'\n};\n\nexport default GROUP_BY_FUNCTIONS;\n","/**\n * Creates a JS native date object from input\n *\n * @param {string | number | Date} date Input using which date object to be created\n * @return {Date} : JS native date object\n */\nfunction convertToNativeDate (date) {\n if (date instanceof Date) {\n return date;\n }\n\n return new Date(date);\n}\n/**\n * Apply padding before a number if its less than 1o. This is used when constant digit's number to be returned\n * between 0 - 99\n *\n * @param {number} n Input to be padded\n * @return {string} Padded number\n */\nfunction pad (n) {\n return (n < 10) ? (`0${n}`) : n;\n}\n/*\n * DateFormatter utility to convert any date format to any other date format\n * DateFormatter parse a date time stamp specified by a user abiding by rules which are defined\n * by user in terms of token. It creates JS native date object from the user specified format.\n * That native date can also be displayed\n * in any specified format.\n * This utility class only takes care of format conversion only\n */\n\n/*\n * Escapes all the special character that are used in regular expression.\n * Like\n * RegExp.escape('sgfd-$') // Output: sgfd\\-\\$\n *\n * @param text {String} : text which is to be escaped\n */\nRegExp.escape = function (text) {\n return text.replace(/[-[\\]{}()*+?.,\\\\^$|#\\s]/g, '\\\\$&');\n};\n\n/**\n * DateTimeFormatter class to convert any user format of date time stamp to any other format\n * of date time stamp.\n *\n * @param {string} format Format of the date given. For the above date,\n * 'year: %Y, month: %b, day: %d'.\n * @class\n */\n/* istanbul ignore next */ function DateTimeFormatter (format) {\n this.format = format;\n this.dtParams = undefined;\n this.nativeDate = undefined;\n}\n\n// The identifier of the tokens\nDateTimeFormatter.TOKEN_PREFIX = '%';\n\n// JS native Date constructor takes the date params (year, month, etc) in a certail sequence.\n// This defines the sequence of the date parameters in the constructor.\nDateTimeFormatter.DATETIME_PARAM_SEQUENCE = {\n YEAR: 0,\n MONTH: 1,\n DAY: 2,\n HOUR: 3,\n MINUTE: 4,\n SECOND: 5,\n MILLISECOND: 6\n};\n\n/*\n * This is a default number parsing utility. It tries to parse a number in integer, if parsing is unsuccessful, it\n * gives back a default value.\n *\n * @param: defVal {Number} : Default no if the parsing to integer is not successful\n * @return {Function} : An closure function which is to be called by passing an the value which needs to be parsed.\n */\nDateTimeFormatter.defaultNumberParser = function (defVal) {\n return function (val) {\n let parsedVal;\n if (isFinite(parsedVal = parseInt(val, 10))) {\n return parsedVal;\n }\n\n return defVal;\n };\n};\n\n/*\n * This is a default number range utility. It tries to find an element in the range. If not found it returns a\n * default no as an index.\n *\n * @param: range {Array} : The list which is to be serached\n * @param: defVal {Number} : Default no if the serach and find does not return anything\n * @return {Function} : An closure function which is to be called by passing an the value which needs to be found\n */\nDateTimeFormatter.defaultRangeParser = function (range, defVal) {\n return (val) => {\n let i;\n let l;\n\n if (!val) { return defVal; }\n\n const nVal = val.toLowerCase();\n\n for (i = 0, l = range.length; i < l; i++) {\n if (range[i].toLowerCase() === nVal) {\n return i;\n }\n }\n\n if (i === undefined) {\n return defVal;\n }\n return null;\n };\n};\n\n/*\n * Defines the tokens which are supporter by the dateformatter. Using this definitation a value gets extracted from\n * the user specifed date string. This also formats the value for display purpose from native JS date.\n * The definition of each token contains the following named properties\n * {\n * %token_name% : {\n * name: name of the token, this is used in reverse lookup,\n * extract: a function that returns the regular expression to extract that piece of information. All the\n * regex should be gouped by using ()\n * parser: a function which receives value extracted by the above regex and parse it to get the date params\n * formatter: a formatter function that takes milliseconds or JS Date object and format the param\n * represented by the token only.\n * }\n * }\n *\n * @return {Object} : Definition of the all the supported tokens.\n */\nDateTimeFormatter.getTokenDefinitions = function () {\n const daysDef = {\n short: [\n 'Sun',\n 'Mon',\n 'Tue',\n 'Wed',\n 'Thu',\n 'Fri',\n 'Sat'\n ],\n long: [\n 'Sunday',\n 'Monday',\n 'Tuesday',\n 'Wednesday',\n 'Thursday',\n 'Friday',\n 'Saturday'\n ]\n };\n const monthsDef = {\n short: [\n 'Jan',\n 'Feb',\n 'Mar',\n 'Apr',\n 'May',\n 'Jun',\n 'Jul',\n 'Aug',\n 'Sep',\n 'Oct',\n 'Nov',\n 'Dec'\n ],\n long: [\n 'January',\n 'February',\n 'March',\n 'April',\n 'May',\n 'June',\n 'July',\n 'August',\n 'September',\n 'October',\n 'November',\n 'December'\n ]\n };\n\n const definitions = {\n H: {\n // 24 hours format\n name: 'H',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n\n return d.getHours().toString();\n }\n },\n l: {\n // 12 hours format\n name: 'l',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const hours = d.getHours() % 12;\n\n return (hours === 0 ? 12 : hours).toString();\n }\n },\n p: {\n // AM or PM\n name: 'p',\n index: 3,\n extract () { return '(AM|PM)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'AM' : 'PM');\n }\n },\n P: {\n // am or pm\n name: 'P',\n index: 3,\n extract () { return '(am|pm)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'am' : 'pm');\n }\n },\n M: {\n // Two digit minutes 00 - 59\n name: 'M',\n index: 4,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const mins = d.getMinutes();\n\n return pad(mins);\n }\n },\n S: {\n // Two digit seconds 00 - 59\n name: 'S',\n index: 5,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const seconds = d.getSeconds();\n\n return pad(seconds);\n }\n },\n K: {\n // Milliseconds\n name: 'K',\n index: 6,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const ms = d.getMilliseconds();\n\n return ms.toString();\n }\n },\n a: {\n // Short name of day, like Mon\n name: 'a',\n index: 2,\n extract () { return `(${daysDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.short[day]).toString();\n }\n },\n A: {\n // Long name of day, like Monday\n name: 'A',\n index: 2,\n extract () { return `(${daysDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.long[day]).toString();\n }\n },\n e: {\n // 8 of March, 11 of November\n name: 'e',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return day.toString();\n }\n },\n d: {\n // 08 of March, 11 of November\n name: 'd',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return pad(day);\n }\n },\n b: {\n // Short month, like Jan\n name: 'b',\n index: 1,\n extract () { return `(${monthsDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.short[month]).toString();\n }\n },\n B: {\n // Long month, like January\n name: 'B',\n index: 1,\n extract () { return `(${monthsDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.long[month]).toString();\n }\n },\n m: {\n // Two digit month of year like 01 for January\n name: 'm',\n index: 1,\n extract () { return '(\\\\d+)'; },\n parser (val) { return DateTimeFormatter.defaultNumberParser()(val) - 1; },\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return pad(month + 1);\n }\n },\n y: {\n // Short year like 90 for 1990\n name: 'y',\n index: 0,\n extract () { return '(\\\\d{2})'; },\n parser (val) {\n let result;\n if (val) {\n const l = val.length;\n val = val.substring(l - 2, l);\n }\n let parsedVal = DateTimeFormatter.defaultNumberParser()(val);\n let presentDate = new Date();\n let presentYear = Math.trunc((presentDate.getFullYear()) / 100);\n\n result = `${presentYear}${parsedVal}`;\n\n if (convertToNativeDate(result).getFullYear() > presentDate.getFullYear()) {\n result = `${presentYear - 1}${parsedVal}`;\n }\n return convertToNativeDate(result).getFullYear();\n },\n formatter (val) {\n const d = convertToNativeDate(val);\n let year = d.getFullYear().toString();\n let l;\n\n if (year) {\n l = year.length;\n year = year.substring(l - 2, l);\n }\n\n return year;\n }\n },\n Y: {\n // Long year like 1990\n name: 'Y',\n index: 0,\n extract () { return '(\\\\d{4})'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const year = d.getFullYear().toString();\n\n return year;\n }\n }\n };\n\n return definitions;\n};\n\n/*\n * The tokens which works internally is not user friendly in terms of memorizing the names. This gives a formal\n * definition to the informal notations.\n *\n * @return {Object} : Formal definition of the tokens\n */\nDateTimeFormatter.getTokenFormalNames = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n\n return {\n HOUR: definitions.H,\n HOUR_12: definitions.l,\n AMPM_UPPERCASE: definitions.p,\n AMPM_LOWERCASE: definitions.P,\n MINUTE: definitions.M,\n SECOND: definitions.S,\n SHORT_DAY: definitions.a,\n LONG_DAY: definitions.A,\n DAY_OF_MONTH: definitions.e,\n DAY_OF_MONTH_CONSTANT_WIDTH: definitions.d,\n SHORT_MONTH: definitions.b,\n LONG_MONTH: definitions.B,\n MONTH_OF_YEAR: definitions.m,\n SHORT_YEAR: definitions.y,\n LONG_YEAR: definitions.Y\n };\n};\n\n/*\n * This defines the rules and declares dependencies that resolves a date parameter (year, month etc) from\n * the date time parameter array.\n *\n * @return {Object} : An object that contains dependencies and a resolver function. The dependencies values are fed\n * to the resolver function in that particular sequence only.\n */\nDateTimeFormatter.tokenResolver = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const defaultResolver = (...args) => { // eslint-disable-line require-jsdoc\n let i = 0;\n let arg;\n let targetParam;\n const l = args.length;\n\n for (; i < l; i++) {\n arg = args[i];\n if (args[i]) {\n targetParam = arg;\n }\n }\n\n if (!targetParam) { return null; }\n\n return targetParam[0].parser(targetParam[1]);\n };\n\n return {\n YEAR: [definitions.y, definitions.Y,\n defaultResolver\n ],\n MONTH: [definitions.b, definitions.B, definitions.m,\n defaultResolver\n ],\n DAY: [definitions.a, definitions.A, definitions.e, definitions.d,\n defaultResolver\n ],\n HOUR: [definitions.H, definitions.l, definitions.p, definitions.P,\n function (hourFormat24, hourFormat12, ampmLower, ampmUpper) {\n let targetParam;\n let amOrpm;\n let isPM;\n let val;\n\n if (hourFormat12 && (amOrpm = (ampmLower || ampmUpper))) {\n if (amOrpm[0].parser(amOrpm[1]) === 'pm') {\n isPM = true;\n }\n\n targetParam = hourFormat12;\n } else if (hourFormat12) {\n targetParam = hourFormat12;\n } else {\n targetParam = hourFormat24;\n }\n\n if (!targetParam) { return null; }\n\n val = targetParam[0].parser(targetParam[1]);\n if (isPM) {\n val += 12;\n }\n return val;\n }\n ],\n MINUTE: [definitions.M,\n defaultResolver\n ],\n SECOND: [definitions.S,\n defaultResolver\n ]\n };\n};\n\n/*\n * Finds token from the format rule specified by a user.\n * @param format {String} : The format of the input date specified by the user\n * @return {Array} : An array of objects which contains the available token and their occurence index in the format\n */\nDateTimeFormatter.findTokens = function (format) {\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenLiterals = Object.keys(definitions);\n const occurrence = [];\n let i;\n let forwardChar;\n\n while ((i = format.indexOf(tokenPrefix, i + 1)) >= 0) {\n forwardChar = format[i + 1];\n if (tokenLiterals.indexOf(forwardChar) === -1) { continue; }\n\n occurrence.push({\n index: i,\n token: forwardChar\n });\n }\n\n return occurrence;\n};\n\n/*\n * Format any JS date to a specified date given by user.\n *\n * @param date {Number | Date} : The date object which is to be formatted\n * @param format {String} : The format using which the date will be formatted for display\n */\nDateTimeFormatter.formatAs = function (date, format) {\n const nDate = convertToNativeDate(date);\n const occurrence = DateTimeFormatter.findTokens(format);\n const definitions = DateTimeFormatter.getTokenDefinitions();\n let formattedStr = String(format);\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n let token;\n let formattedVal;\n let i;\n let l;\n\n for (i = 0, l = occurrence.length; i < l; i++) {\n token = occurrence[i].token;\n formattedVal = definitions[token].formatter(nDate);\n formattedStr = formattedStr.replace(new RegExp(tokenPrefix + token, 'g'), formattedVal);\n }\n\n return formattedStr;\n};\n\n/*\n * Parses the user specified date string to extract the date time params.\n *\n * @return {Array} : Value of date time params in an array [year, month, day, hour, minutes, seconds, milli]\n */\nDateTimeFormatter.prototype.parse = function (dateTimeStamp, options) {\n const tokenResolver = DateTimeFormatter.tokenResolver();\n const dtParams = this.extractTokenValue(dateTimeStamp);\n const dtParamSeq = DateTimeFormatter.DATETIME_PARAM_SEQUENCE;\n const noBreak = options && options.noBreak;\n const dtParamArr = [];\n const args = [];\n let resolverKey;\n let resolverParams;\n let resolverFn;\n let val;\n let i;\n let param;\n let resolvedVal;\n let l;\n let result = [];\n\n for (resolverKey in tokenResolver) {\n if (!{}.hasOwnProperty.call(tokenResolver, resolverKey)) { continue; }\n\n args.length = 0;\n resolverParams = tokenResolver[resolverKey];\n resolverFn = resolverParams.splice(resolverParams.length - 1, 1)[0];\n\n for (i = 0, l = resolverParams.length; i < l; i++) {\n param = resolverParams[i];\n val = dtParams[param.name];\n\n if (val === undefined) {\n args.push(null);\n } else {\n args.push([param, val]);\n }\n }\n\n resolvedVal = resolverFn.apply(this, args);\n\n if ((resolvedVal === undefined || resolvedVal === null) && !noBreak) {\n break;\n }\n\n dtParamArr[dtParamSeq[resolverKey]] = resolvedVal;\n }\n\n if (dtParamArr.length && this.checkIfOnlyYear(dtParamArr.length))\n {\n result.unshift(dtParamArr[0], 0, 1); }\n else {\n result.unshift(...dtParamArr);\n }\n\n return result;\n};\n\n/*\n * Extract the value of the token from user specified date time string.\n *\n * @return {Object} : An key value pair which contains the tokens as key and value as pair\n */\nDateTimeFormatter.prototype.extractTokenValue = function (dateTimeStamp) {\n const format = this.format;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const occurrence = DateTimeFormatter.findTokens(format);\n const tokenObj = {};\n\n let lastOccurrenceIndex;\n let occObj;\n let occIndex;\n let targetText;\n let regexFormat;\n\n let l;\n let i;\n\n regexFormat = String(format);\n\n const tokenArr = occurrence.map(obj => obj.token);\n const occurrenceLength = occurrence.length;\n for (i = occurrenceLength - 1; i >= 0; i--) {\n occIndex = occurrence[i].index;\n\n if (occIndex + 1 === regexFormat.length - 1) {\n lastOccurrenceIndex = occIndex;\n continue;\n }\n\n if (lastOccurrenceIndex === undefined) {\n lastOccurrenceIndex = regexFormat.length;\n }\n\n targetText = regexFormat.substring(occIndex + 2, lastOccurrenceIndex);\n regexFormat = regexFormat.substring(0, occIndex + 2) +\n RegExp.escape(targetText) +\n regexFormat.substring(lastOccurrenceIndex, regexFormat.length);\n\n lastOccurrenceIndex = occIndex;\n }\n\n for (i = 0; i < occurrenceLength; i++) {\n occObj = occurrence[i];\n regexFormat = regexFormat.replace(tokenPrefix + occObj.token, definitions[occObj.token].extract());\n }\n\n const extractValues = dateTimeStamp.match(new RegExp(regexFormat)) || [];\n extractValues.shift();\n\n for (i = 0, l = tokenArr.length; i < l; i++) {\n tokenObj[tokenArr[i]] = extractValues[i];\n }\n return tokenObj;\n};\n\n/*\n * Give back the JS native date formed from user specified date string\n *\n * @return {Date} : Native JS Date\n */\nDateTimeFormatter.prototype.getNativeDate = function (dateTimeStamp) {\n let date = null;\n if (Number.isFinite(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n } else if (!this.format && Date.parse(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n }\n else {\n const dtParams = this.dtParams = this.parse(dateTimeStamp);\n if (dtParams.length) {\n this.nativeDate = new Date(...dtParams);\n date = this.nativeDate;\n }\n }\n return date;\n};\n\nDateTimeFormatter.prototype.checkIfOnlyYear = function(len) {\n return len === 1 && this.format.match(/y|Y/g).length;\n};\n\n/*\n * Represents JS native date to a user specified format.\n *\n * @param format {String} : The format according to which the date is to be represented\n * @return {String} : The formatted date string\n */\nDateTimeFormatter.prototype.formatAs = function (format, dateTimeStamp) {\n let nativeDate;\n\n if (dateTimeStamp) {\n nativeDate = this.nativeDate = this.getNativeDate(dateTimeStamp);\n } else if (!(nativeDate = this.nativeDate)) {\n nativeDate = this.getNativeDate(dateTimeStamp);\n }\n\n return DateTimeFormatter.formatAs(nativeDate, format);\n};\n\nexport { DateTimeFormatter as default };\n","/**\n * The utility function to calculate major column.\n *\n * @param {Object} store - The store object.\n * @return {Function} Returns the push function.\n */\nexport default (store) => {\n let i = 0;\n return (...fields) => {\n fields.forEach((val, fieldIndex) => {\n if (!(store[fieldIndex] instanceof Array)) {\n store[fieldIndex] = Array.from({ length: i });\n }\n store[fieldIndex].push(val);\n });\n i++;\n };\n};\n","/* eslint-disable */\nconst OBJECTSTRING = 'object';\nconst objectToStrFn = Object.prototype.toString;\nconst objectToStr = '[object Object]';\nconst arrayToStr = '[object Array]';\n\nfunction checkCyclicRef(obj, parentArr) {\n let i = parentArr.length;\n let bIndex = -1;\n\n while (i) {\n if (obj === parentArr[i]) {\n bIndex = i;\n return bIndex;\n }\n i -= 1;\n }\n\n return bIndex;\n}\n\nfunction merge(obj1, obj2, skipUndef, tgtArr, srcArr) {\n var item,\n srcVal,\n tgtVal,\n str,\n cRef;\n // check whether obj2 is an array\n // if array then iterate through it's index\n // **** MOOTOOLS precution\n\n if (!srcArr) {\n tgtArr = [obj1];\n srcArr = [obj2];\n }\n else {\n tgtArr.push(obj1);\n srcArr.push(obj2);\n }\n\n if (obj2 instanceof Array) {\n for (item = 0; item < obj2.length; item += 1) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (typeof tgtVal !== OBJECTSTRING) {\n if (!(skipUndef && tgtVal === undefined)) {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = tgtVal instanceof Array ? [] : {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n }\n }\n else {\n for (item in obj2) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (tgtVal !== null && typeof tgtVal === OBJECTSTRING) {\n // Fix for issue BUG: FWXT-602\n // IE < 9 Object.prototype.toString.call(null) gives\n // '[object Object]' instead of '[object Null]'\n // that's why null value becomes Object in IE < 9\n str = objectToStrFn.call(tgtVal);\n if (str === objectToStr) {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else if (str === arrayToStr) {\n if (srcVal === null || !(srcVal instanceof Array)) {\n srcVal = obj1[item] = [];\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (skipUndef && tgtVal === undefined) {\n continue;\n }\n obj1[item] = tgtVal;\n }\n }\n }\n return obj1;\n}\n\n\nfunction extend2 (obj1, obj2, skipUndef) {\n //if none of the arguments are object then return back\n if (typeof obj1 !== OBJECTSTRING && typeof obj2 !== OBJECTSTRING) {\n return null;\n }\n\n if (typeof obj2 !== OBJECTSTRING || obj2 === null) {\n return obj1;\n }\n\n if (typeof obj1 !== OBJECTSTRING) {\n obj1 = obj2 instanceof Array ? [] : {};\n }\n merge(obj1, obj2, skipUndef);\n return obj1;\n}\n\nexport { extend2 as default };\n","import { DataFormat } from '../enums';\n\n/**\n * Checks whether the value is an array.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an array otherwise returns false.\n */\nexport function isArray (val) {\n return Array.isArray(val);\n}\n\n/**\n * Checks whether the value is an object.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an object otherwise returns false.\n */\nexport function isObject (val) {\n return val === Object(val);\n}\n\n/**\n * Checks whether the value is a string value.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is a string value otherwise returns false.\n */\nexport function isString (val) {\n return typeof val === 'string';\n}\n\n/**\n * Checks whether the value is callable.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is callable otherwise returns false.\n */\nexport function isCallable (val) {\n return typeof val === 'function';\n}\n\n/**\n * Returns the unique values from the input array.\n *\n * @param {Array} data - The input array.\n * @return {Array} Returns a new array of unique values.\n */\nexport function uniqueValues (data) {\n return [...new Set(data)];\n}\n\nexport const getUniqueId = () => `id-${new Date().getTime()}${Math.round(Math.random() * 10000)}`;\n\n/**\n * Checks Whether two arrays have same content.\n *\n * @param {Array} arr1 - The first array.\n * @param {Array} arr2 - The 2nd array.\n * @return {boolean} Returns whether two array have same content.\n */\nexport function isArrEqual(arr1, arr2) {\n if (!isArray(arr1) || !isArray(arr2)) {\n return arr1 === arr2;\n }\n\n if (arr1.length !== arr2.length) {\n return false;\n }\n\n for (let i = 0; i < arr1.length; i++) {\n if (arr1[i] !== arr2[i]) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * It is the default number format function for the measure field type.\n *\n * @param {any} val - The input value.\n * @return {number} Returns a number value.\n */\nexport function formatNumber(val) {\n return val;\n}\n\n/**\n * Returns the detected data format.\n *\n * @param {any} data - The input data to be tested.\n * @return {string} Returns the data format name.\n */\nexport const detectDataFormat = (data) => {\n if (isString(data)) {\n return DataFormat.DSV_STR;\n } else if (isArray(data) && isArray(data[0])) {\n return DataFormat.DSV_ARR;\n } else if (isArray(data) && (data.length === 0 || isObject(data[0]))) {\n return DataFormat.FLAT_JSON;\n }\n return null;\n};\n","import { FieldType } from './enums';\nimport { getUniqueId } from './utils';\n\nconst fieldStore = {\n data: {},\n\n createNamespace (fieldArr, name) {\n const dataId = name || getUniqueId();\n\n this.data[dataId] = {\n name: dataId,\n fields: fieldArr,\n\n fieldsObj () {\n let fieldsObj = this._cachedFieldsObj;\n\n if (!fieldsObj) {\n fieldsObj = this._cachedFieldsObj = {};\n this.fields.forEach((field) => {\n fieldsObj[field.name()] = field;\n });\n }\n return fieldsObj;\n },\n getMeasure () {\n let measureFields = this._cachedMeasure;\n\n if (!measureFields) {\n measureFields = this._cachedMeasure = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.MEASURE) {\n measureFields[field.name()] = field;\n }\n });\n }\n return measureFields;\n },\n getDimension () {\n let dimensionFields = this._cachedDimension;\n\n if (!this._cachedDimension) {\n dimensionFields = this._cachedDimension = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.DIMENSION) {\n dimensionFields[field.name()] = field;\n }\n });\n }\n return dimensionFields;\n },\n };\n return this.data[dataId];\n },\n};\n\nexport default fieldStore;\n","/**\n * The wrapper class on top of the primitive value of a field.\n *\n * @todo Need to have support for StringValue, NumberValue, DateTimeValue\n * and GeoValue. These types should expose predicate API mostly.\n */\nclass Value {\n\n /**\n * Creates new Value instance.\n *\n * @param {*} val - the primitive value from the field cell.\n * @param {string | Field} field - The field from which the value belongs.\n */\n constructor (val, field) {\n Object.defineProperty(this, '_value', {\n enumerable: false,\n configurable: false,\n writable: false,\n value: val\n });\n\n this.field = field;\n }\n\n /**\n * Returns the field value.\n *\n * @return {*} Returns the current value.\n */\n get value () {\n return this._value;\n }\n\n /**\n * Converts to human readable string.\n *\n * @override\n * @return {string} Returns a human readable string of the field value.\n *\n */\n toString () {\n return String(this.value);\n }\n\n /**\n * Returns the value of the field.\n *\n * @override\n * @return {*} Returns the field value.\n */\n valueOf () {\n return this.value;\n }\n}\n\nexport default Value;\n","/**\n * Iterates through the diffSet array and call the callback with the current\n * index.\n *\n * @param {string} rowDiffset - The row diffset string e.g. '0-4,6,10-13'.\n * @param {Function} callback - The callback function to be called with every index.\n */\nexport function rowDiffsetIterator (rowDiffset, callback) {\n if (rowDiffset.length > 0) {\n const rowDiffArr = rowDiffset.split(',');\n rowDiffArr.forEach((diffStr) => {\n const diffStsArr = diffStr.split('-');\n const start = +(diffStsArr[0]);\n const end = +(diffStsArr[1] || diffStsArr[0]);\n if (end >= start) {\n for (let i = start; i <= end; i += 1) {\n callback(i);\n }\n }\n });\n }\n}\n","/**\n * A parser to parser null, undefined, invalid and NIL values.\n *\n * @public\n * @class\n */\nclass InvalidAwareTypes {\n /**\n * Static method which gets/sets the invalid value registry.\n *\n * @public\n * @param {Object} config - The custom configuration supplied by user.\n * @return {Object} Returns the invalid values registry.\n */\n static invalidAwareVals (config) {\n if (!config) {\n return InvalidAwareTypes._invalidAwareValsMap;\n }\n return Object.assign(InvalidAwareTypes._invalidAwareValsMap, config);\n }\n\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} value - The value of the invalid data type.\n */\n constructor (value) {\n this._value = value;\n }\n\n /**\n * Returns the current value of the instance.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n value () {\n return this._value;\n }\n\n /**\n * Returns the current value of the instance in string format.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n toString () {\n return String(this._value);\n }\n\n static isInvalid(val) {\n return (val instanceof InvalidAwareTypes) || !!InvalidAwareTypes.invalidAwareVals()[val];\n }\n\n static getInvalidType(val) {\n return val instanceof InvalidAwareTypes ? val : InvalidAwareTypes.invalidAwareVals()[val];\n }\n}\n\n/**\n * Enums for Invalid types.\n */\nInvalidAwareTypes.NULL = new InvalidAwareTypes('null');\nInvalidAwareTypes.NA = new InvalidAwareTypes('na');\nInvalidAwareTypes.NIL = new InvalidAwareTypes('nil');\n\n/**\n * Default Registry for mapping the invalid values.\n *\n * @private\n */\nInvalidAwareTypes._invalidAwareValsMap = {\n invalid: InvalidAwareTypes.NA,\n nil: InvalidAwareTypes.NIL,\n null: InvalidAwareTypes.NULL,\n undefined: InvalidAwareTypes.NA\n};\n\nexport default InvalidAwareTypes;\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport InvalidAwareTypes from '../invalid-aware-types';\n\nconst generateBuckets = (binSize, start, end) => {\n const buckets = [];\n let next = start;\n\n while (next < end) {\n buckets.push(next);\n next += binSize;\n }\n buckets.push(next);\n\n return buckets;\n};\n\nconst findBucketRange = (bucketRanges, value) => {\n let leftIdx = 0;\n let rightIdx = bucketRanges.length - 1;\n let midIdx;\n let range;\n\n // Here use binary search as the bucketRanges is a sorted array\n while (leftIdx <= rightIdx) {\n midIdx = leftIdx + Math.floor((rightIdx - leftIdx) / 2);\n range = bucketRanges[midIdx];\n\n if (value >= range.start && value < range.end) {\n return range;\n } else if (value >= range.end) {\n leftIdx = midIdx + 1;\n } else if (value < range.start) {\n rightIdx = midIdx - 1;\n }\n }\n\n return null;\n};\n\n /**\n * Creates the bin data from input measure field and supplied configs.\n *\n * @param {Measure} measureField - The Measure field instance.\n * @param {string} rowDiffset - The datamodel rowDiffset values.\n * @param {Object} config - The config object.\n * @return {Object} Returns the binned data and the corresponding bins.\n */\nexport function createBinnedFieldData (measureField, rowDiffset, config) {\n let { buckets, binsCount, binSize, start, end } = config;\n const [dMin, dMax] = measureField.domain();\n\n if (!buckets) {\n start = (start !== 0 && (!start || start > dMin)) ? dMin : start;\n end = (end !== 0 && (!end || end < dMax)) ? (dMax + 1) : end;\n\n if (binsCount) {\n binSize = Math.ceil(Math.abs(end - start) / binsCount);\n }\n\n buckets = generateBuckets(binSize, start, end);\n }\n\n if (buckets[0] > dMin) {\n buckets.unshift(dMin);\n }\n if (buckets[buckets.length - 1] <= dMax) {\n buckets.push(dMax + 1);\n }\n\n const bucketRanges = [];\n for (let i = 0; i < buckets.length - 1; i++) {\n bucketRanges.push({\n start: buckets[i],\n end: buckets[i + 1]\n });\n }\n\n const binnedData = [];\n rowDiffsetIterator(rowDiffset, (i) => {\n const datum = measureField.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n binnedData.push(datum);\n return;\n }\n\n const range = findBucketRange(bucketRanges, datum);\n binnedData.push(`${range.start}-${range.end}`);\n });\n\n return { binnedData, bins: buckets };\n}\n","export { DataFormat, FilteringMode } from '../enums';\n/**\n * The event name for data propagation.\n */\nexport const PROPAGATION = 'propagation';\n\n/**\n * The name of the unique row id column in DataModel.\n */\nexport const ROW_ID = '__id__';\n\n/**\n * The enums for operation names performed on DataModel.\n */\nexport const DM_DERIVATIVES = {\n SELECT: 'select',\n PROJECT: 'project',\n GROUPBY: 'group',\n COMPOSE: 'compose',\n CAL_VAR: 'calculatedVariable',\n BIN: 'bin'\n};\n\nexport const JOINS = {\n CROSS: 'cross',\n LEFTOUTER: 'leftOuter',\n RIGHTOUTER: 'rightOuter',\n NATURAL: 'natural',\n FULLOUTER: 'fullOuter'\n};\n\nexport const LOGICAL_OPERATORS = {\n AND: 'and',\n OR: 'or'\n};\n","import { persistDerivation, persistAncestorDerivation } from '../helper';\nimport { DM_DERIVATIVES } from '../constants';\n\n/**\n * DataModel's opearators are exposed as composable functional operators as well as chainable operators. Chainable\n * operators are called on the instances of {@link Datamodel} and {@link Relation} class.\n *\n * Those same operators can be used as composable operators from `DataModel.Operators` namespace.\n *\n * All these operators have similar behaviour. All these operators when called with the argument returns a function\n * which expects a DataModel instance.\n *\n * @public\n * @module Operators\n * @namespace DataModel\n */\n\n/**\n * This is functional version of selection operator. {@link link_to_selection | Selection} is a row filtering operation.\n * It takes {@link SelectionPredicate | predicate} for filtering criteria and returns a function.\n * The returned function is called with the DataModel instance on which the action needs to be performed.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection opearation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * [Warn] Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * [Error] `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @example\n * const select = DataModel.Operators.select;\n * usaCarsFn = select(fields => fields.Origin.value === 'USA');\n * usaCarsDm = usaCarsFn(dm);\n * console.log(usaCarsDm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {SelectionPredicate} selectFn - Predicate funciton which is called for each row with the current row\n * ```\n * function (row, i) { ... }\n * ```\n * @param {Object} [config] - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const select = (...args) => dm => dm.select(...args);\n\n/**\n * This is functional version of projection operator. {@link link_to_projection | Projection} is a column filtering\n * operation.It expects list of fields name and either include those or exclude those based on {@link FilteringMode} on\n * the resultant variable.It returns a function which is called with the DataModel instance on which the action needs\n * to be performed.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const project = (...args) => dm => dm.project(...args);\n\n/**\n * This is functional version of binnig operator. Binning happens on a measure field based on a binning configuration.\n * Binning in DataModel does not aggregate the number of rows present in DataModel instance after binning, it just adds\n * a new field with the binned value. Refer binning {@link example_of_binning | example} to have a intuition of what\n * binning is and the use case.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non uniform buckets\n * - providing bin count\n * - providing each bin size\n *\n * When custom buckets are provided as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const buckets = {\n * start: 30\n * stops: [80, 100, 110]\n * };\n * const config = { buckets, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(dm);\n *\n * @text\n * When `binCount` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binCount: 5, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @text\n * When `binSize` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binSize: 200, name: 'binnedHorsepower' }\n * const binnedDm = dataModel.bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {String} name Name of measure which will be used to create bin\n * @param {Object} config Config required for bin creation\n * @param {Array.} config.bucketObj.stops Defination of bucket ranges. Two subsequent number from arrays\n * are picked and a range is created. The first number from range is inclusive and the second number from range\n * is exclusive.\n * @param {Number} [config.bucketObj.startAt] Force the start of the bin from a particular number.\n * If not mentioned, the start of the bin or the lower domain of the data if stops is not mentioned, else its\n * the first value of the stop.\n * @param {Number} config.binSize Bucket size for each bin\n * @param {Number} config.binCount Number of bins which will be created\n * @param {String} config.name Name of the new binned field to be created\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const bin = (...args) => dm => dm.bin(...args);\n\n/**\n * This is functional version of `groupBy` operator.Groups the data using particular dimensions and by reducing\n * measures. It expects a list of dimensions using which it projects the datamodel and perform aggregations to reduce\n * the duplicate tuples. Refer this {@link link_to_one_example_with_group_by | document} to know the intuition behind\n * groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupBy = DataModel.Operators.groupBy;\n * const groupedFn = groupBy(['Year'], { horsepower: 'max' } );\n * groupedDM = groupByFn(dm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const groupBy = (...args) => dm => dm.groupBy(...args);\n\n/**\n * Enables composing operators to run multiple operations and save group of operataion as named opration on a DataModel.\n * The resulting DataModel will be the result of all the operation provided. The operations provided will be executed in\n * a serial manner ie. result of one operation will be the input for the next operations (like pipe operator in unix).\n *\n * Suported operations in compose are\n * - `select`\n * - `project`\n * - `groupBy`\n * - `bin`\n * - `compose`\n *\n * @example\n * const compose = DataModel.Operators.compose;\n * const select = DataModel.Operators.select;\n * const project = DataModel.Operators.project;\n *\n * let composedFn = compose(\n * select(fields => fields.netprofit.value <= 15),\n * project(['netprofit', 'netsales']));\n *\n * const dataModel = new DataModel(data1, schema1);\n *\n * let composedDm = composedFn(dataModel);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} operators: An array of operation that will be applied on the\n * datatable.\n *\n * @returns {DataModel} Instance of resultant DataModel\n */\nexport const compose = (...operations) =>\n (dm, config = { saveChild: true }) => {\n let currentDM = dm;\n let firstChild;\n const derivations = [];\n\n operations.forEach((operation) => {\n currentDM = operation(currentDM);\n derivations.push(...currentDM._derivation);\n if (!firstChild) {\n firstChild = currentDM;\n }\n });\n\n if (firstChild && firstChild !== currentDM) {\n firstChild.dispose();\n }\n\n persistDerivation(currentDM, DM_DERIVATIVES.COMPOSE, null, derivations);\n // reset all ancestorDerivation saved in-between compose\n currentDM._ancestorDerivation = [];\n persistAncestorDerivation(dm, currentDM);\n\n if (config.saveChild) {\n currentDM.setParent(dm);\n } else {\n currentDM.setParent(null);\n }\n\n return currentDM;\n };\n","/**\n * The helper function that returns an array of common schema\n * from two fieldStore instances.\n *\n * @param {FieldStore} fs1 - The first FieldStore instance.\n * @param {FieldStore} fs2 - The second FieldStore instance.\n * @return {Array} An array containing the common schema.\n */\nexport function getCommonSchema (fs1, fs2) {\n const retArr = [];\n const fs1Arr = [];\n fs1.fields.forEach((field) => {\n fs1Arr.push(field.schema().name);\n });\n fs2.fields.forEach((field) => {\n if (fs1Arr.indexOf(field.schema().name) !== -1) {\n retArr.push(field.schema().name);\n }\n });\n return retArr;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { getCommonSchema } from './get-common-schema';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { JOINS } from '../constants';\nimport { prepareJoinData } from '../helper';\n/**\n * Default filter function for crossProduct.\n *\n * @return {boolean} Always returns true.\n */\nfunction defaultFilterFn() { return true; }\n\n/**\n * Implementation of cross product operation between two DataModel instances.\n * It internally creates the data and schema for the new DataModel.\n *\n * @param {DataModel} dataModel1 - The left DataModel instance.\n * @param {DataModel} dataModel2 - The right DataModel instance.\n * @param {Function} filterFn - The filter function which is used to filter the tuples.\n * @param {boolean} [replaceCommonSchema=false] - The flag if the common name schema should be there.\n * @return {DataModel} Returns The newly created DataModel instance from the crossProduct operation.\n */\nexport function crossProduct (dm1, dm2, filterFn, replaceCommonSchema = false, jointype = JOINS.CROSS) {\n const schema = [];\n const data = [];\n const applicableFilterFn = filterFn || defaultFilterFn;\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreName = dm1FieldStore.name;\n const dm2FieldStoreName = dm2FieldStore.name;\n const name = `${dm1FieldStore.name}.${dm2FieldStore.name}`;\n const commonSchemaList = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n if (dm1FieldStoreName === dm2FieldStoreName) {\n throw new Error('DataModels must have different alias names');\n }\n // Here prepare the schema\n dm1FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1 && !replaceCommonSchema) {\n tmpSchema.name = `${dm1FieldStore.name}.${tmpSchema.name}`;\n }\n schema.push(tmpSchema);\n });\n dm2FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1) {\n if (!replaceCommonSchema) {\n tmpSchema.name = `${dm2FieldStore.name}.${tmpSchema.name}`;\n schema.push(tmpSchema);\n }\n } else {\n schema.push(tmpSchema);\n }\n });\n\n // Here prepare Data\n rowDiffsetIterator(dm1._rowDiffset, (i) => {\n let rowAdded = false;\n let rowPosition;\n rowDiffsetIterator(dm2._rowDiffset, (ii) => {\n const tuple = [];\n const userArg = {};\n userArg[dm1FieldStoreName] = {};\n userArg[dm2FieldStoreName] = {};\n dm1FieldStore.fields.forEach((field) => {\n tuple.push(field.partialField.data[i]);\n userArg[dm1FieldStoreName][field.name()] = field.partialField.data[i];\n });\n dm2FieldStore.fields.forEach((field) => {\n if (!(commonSchemaList.indexOf(field.schema().name) !== -1 && replaceCommonSchema)) {\n tuple.push(field.partialField.data[ii]);\n }\n userArg[dm2FieldStoreName][field.name()] = field.partialField.data[ii];\n });\n\n let cachedStore = {};\n let cloneProvider1 = () => dm1.detachedRoot();\n let cloneProvider2 = () => dm2.detachedRoot();\n\n const dm1Fields = prepareJoinData(userArg[dm1FieldStoreName]);\n const dm2Fields = prepareJoinData(userArg[dm2FieldStoreName]);\n if (applicableFilterFn(dm1Fields, dm2Fields, cloneProvider1, cloneProvider2, cachedStore)) {\n const tupleObj = {};\n tuple.forEach((cellVal, iii) => {\n tupleObj[schema[iii].name] = cellVal;\n });\n if (rowAdded && JOINS.CROSS !== jointype) {\n data[rowPosition] = tupleObj;\n }\n else {\n data.push(tupleObj);\n rowAdded = true;\n rowPosition = i;\n }\n } else if ((jointype === JOINS.LEFTOUTER || jointype === JOINS.RIGHTOUTER) && !rowAdded) {\n const tupleObj = {};\n let len = dm1FieldStore.fields.length - 1;\n tuple.forEach((cellVal, iii) => {\n if (iii <= len) {\n tupleObj[schema[iii].name] = cellVal;\n }\n else {\n tupleObj[schema[iii].name] = null;\n }\n });\n rowAdded = true;\n rowPosition = i;\n data.push(tupleObj);\n }\n });\n });\n\n return new DataModel(data, schema, { name });\n}\n","/**\n * The default sort function.\n *\n * @param {*} a - The first value.\n * @param {*} b - The second value.\n * @return {number} Returns the comparison result e.g. 1 or 0 or -1.\n */\nfunction defSortFn (a, b) {\n const a1 = `${a}`;\n const b1 = `${b}`;\n if (a1 < b1) {\n return -1;\n }\n if (a1 > b1) {\n return 1;\n }\n return 0;\n}\n\n/**\n * The helper function for merge sort which creates the sorted array\n * from the two halves of the input array.\n *\n * @param {Array} arr - The target array which needs to be merged.\n * @param {number} lo - The starting index of the first array half.\n * @param {number} mid - The ending index of the first array half.\n * @param {number} hi - The ending index of the second array half.\n * @param {Function} sortFn - The sort function.\n */\nfunction merge (arr, lo, mid, hi, sortFn) {\n const mainArr = arr;\n const auxArr = [];\n for (let i = lo; i <= hi; i += 1) {\n auxArr[i] = mainArr[i];\n }\n let a = lo;\n let b = mid + 1;\n\n for (let i = lo; i <= hi; i += 1) {\n if (a > mid) {\n mainArr[i] = auxArr[b];\n b += 1;\n } else if (b > hi) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else if (sortFn(auxArr[a], auxArr[b]) <= 0) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else {\n mainArr[i] = auxArr[b];\n b += 1;\n }\n }\n}\n\n/**\n * The helper function for merge sort which would be called\n * recursively for sorting the array halves.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {number} lo - The starting index of the array half.\n * @param {number} hi - The ending index of the array half.\n * @param {Function} sortFn - The sort function.\n * @return {Array} Returns the target array itself.\n */\nfunction sort (arr, lo, hi, sortFn) {\n if (hi === lo) { return arr; }\n\n const mid = lo + Math.floor((hi - lo) / 2);\n sort(arr, lo, mid, sortFn);\n sort(arr, mid + 1, hi, sortFn);\n merge(arr, lo, mid, hi, sortFn);\n\n return arr;\n}\n\n/**\n * The implementation of merge sort.\n * It is used in DataModel for stable sorting as it is not sure\n * what the sorting algorithm used by browsers is stable or not.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {Function} [sortFn=defSortFn] - The sort function.\n * @return {Array} Returns the input array itself in sorted order.\n */\nexport function mergeSort (arr, sortFn = defSortFn) {\n if (arr.length > 1) {\n sort(arr, 0, arr.length - 1, sortFn);\n }\n return arr;\n}\n","import { DimensionSubtype, MeasureSubtype } from '../enums';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { mergeSort } from './merge-sort';\nimport { fieldInSchema } from '../helper';\nimport { isCallable, isArray, } from '../utils';\n/**\n * Generates the sorting functions to sort the data of a DataModel instance\n * according to the input data type.\n *\n * @param {string} dataType - The data type e.g. 'measure', 'datetime' etc.\n * @param {string} sortType - The sorting order i.e. 'asc' or 'desc'.\n * @param {integer} index - The index of the data which will be sorted.\n * @return {Function} Returns the the sorting function.\n */\nfunction getSortFn (dataType, sortType, index) {\n let retFunc;\n switch (dataType) {\n case MeasureSubtype.CONTINUOUS:\n case DimensionSubtype.TEMPORAL:\n if (sortType === 'desc') {\n retFunc = (a, b) => b[index] - a[index];\n } else {\n retFunc = (a, b) => a[index] - b[index];\n }\n break;\n default:\n retFunc = (a, b) => {\n const a1 = `${a[index]}`;\n const b1 = `${b[index]}`;\n if (a1 < b1) {\n return sortType === 'desc' ? 1 : -1;\n }\n if (a1 > b1) {\n return sortType === 'desc' ? -1 : 1;\n }\n return 0;\n };\n }\n return retFunc;\n}\n\n/**\n * Groups the data according to the specified target field.\n *\n * @param {Array} data - The input data array.\n * @param {number} fieldIndex - The target field index within schema array.\n * @return {Array} Returns an array containing the grouped data.\n */\nfunction groupData(data, fieldIndex) {\n const hashMap = new Map();\n const groupedData = [];\n\n data.forEach((datum) => {\n const fieldVal = datum[fieldIndex];\n if (hashMap.has(fieldVal)) {\n groupedData[hashMap.get(fieldVal)][1].push(datum);\n } else {\n groupedData.push([fieldVal, [datum]]);\n hashMap.set(fieldVal, groupedData.length - 1);\n }\n });\n\n return groupedData;\n}\n\n/**\n * Creates the argument value used for sorting function when sort is done\n * with another fields.\n *\n * @param {Array} groupedDatum - The grouped datum for a single dimension field value.\n * @param {Array} targetFields - An array of the sorting fields.\n * @param {Array} targetFieldDetails - An array of the sorting field details in schema.\n * @return {Object} Returns an object containing the value of sorting fields and the target field name.\n */\nfunction createSortingFnArg(groupedDatum, targetFields, targetFieldDetails) {\n const arg = {\n label: groupedDatum[0]\n };\n\n targetFields.reduce((acc, next, idx) => {\n acc[next] = groupedDatum[1].map(datum => datum[targetFieldDetails[idx].index]);\n return acc;\n }, arg);\n\n return arg;\n}\n\n/**\n * Sorts the data before return in dataBuilder.\n *\n * @param {Object} dataObj - An object containing the data and schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nfunction sortData(dataObj, sortingDetails) {\n const { data, schema } = dataObj;\n let fieldName;\n let sortMeta;\n let fDetails;\n let i = sortingDetails.length - 1;\n\n for (; i >= 0; i--) {\n fieldName = sortingDetails[i][0];\n sortMeta = sortingDetails[i][1];\n fDetails = fieldInSchema(schema, fieldName);\n\n if (!fDetails) {\n // eslint-disable-next-line no-continue\n continue;\n }\n\n if (isCallable(sortMeta)) {\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortMeta(a[fDetails.index], b[fDetails.index]));\n } else if (isArray(sortMeta)) {\n const groupedData = groupData(data, fDetails.index);\n const sortingFn = sortMeta[sortMeta.length - 1];\n const targetFields = sortMeta.slice(0, sortMeta.length - 1);\n const targetFieldDetails = targetFields.map(f => fieldInSchema(schema, f));\n\n groupedData.forEach((groupedDatum) => {\n groupedDatum.push(createSortingFnArg(groupedDatum, targetFields, targetFieldDetails));\n });\n\n mergeSort(groupedData, (a, b) => {\n const m = a[2];\n const n = b[2];\n return sortingFn(m, n);\n });\n\n // Empty the array\n data.length = 0;\n groupedData.forEach((datum) => {\n data.push(...datum[1]);\n });\n } else {\n sortMeta = String(sortMeta).toLowerCase() === 'desc' ? 'desc' : 'asc';\n mergeSort(data, getSortFn(fDetails.type, sortMeta, fDetails.index));\n }\n }\n\n dataObj.uids = [];\n data.forEach((value) => {\n dataObj.uids.push(value.pop());\n });\n}\n\n\n/**\n * Builds the actual data array.\n *\n * @param {Array} fieldStore - An array of field.\n * @param {string} rowDiffset - A string consisting of which rows to be included eg. '0-2,4,6';\n * @param {string} colIdentifier - A string consisting of the details of which column\n * to be included eg 'date,sales,profit';\n * @param {Object} sortingDetails - An object containing the sorting details of the DataModel instance.\n * @param {Object} options - The options required to create the type of the data.\n * @return {Object} Returns an object containing the multidimensional array and the relative schema.\n */\nexport function dataBuilder (fieldStore, rowDiffset, colIdentifier, sortingDetails, options) {\n const defOptions = {\n addUid: false,\n columnWise: false\n };\n options = Object.assign({}, defOptions, options);\n\n const retObj = {\n schema: [],\n data: [],\n uids: []\n };\n const addUid = options.addUid;\n const reqSorting = sortingDetails && sortingDetails.length > 0;\n // It stores the fields according to the colIdentifier argument\n const tmpDataArr = [];\n // Stores the fields according to the colIdentifier argument\n const colIArr = colIdentifier.split(',');\n\n colIArr.forEach((colName) => {\n for (let i = 0; i < fieldStore.length; i += 1) {\n if (fieldStore[i].name() === colName) {\n tmpDataArr.push(fieldStore[i]);\n break;\n }\n }\n });\n\n // Inserts the schema to the schema object\n tmpDataArr.forEach((field) => {\n /** @todo Need to use extend2 here otherwise user can overwrite the schema. */\n retObj.schema.push(field.schema());\n });\n\n if (addUid) {\n retObj.schema.push({\n name: 'uid',\n type: 'identifier'\n });\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n retObj.data.push([]);\n const insertInd = retObj.data.length - 1;\n let start = 0;\n tmpDataArr.forEach((field, ii) => {\n retObj.data[insertInd][ii + start] = field.partialField.data[i];\n });\n if (addUid) {\n retObj.data[insertInd][tmpDataArr.length] = i;\n }\n // Creates an array of unique identifiers for each row\n retObj.uids.push(i);\n\n // If sorting needed then there is the need to expose the index\n // mapping from the old index to its new index\n if (reqSorting) { retObj.data[insertInd].push(i); }\n });\n\n // Handles the sort functionality\n if (reqSorting) {\n sortData(retObj, sortingDetails);\n }\n\n if (options.columnWise) {\n const tmpData = Array(...Array(retObj.schema.length)).map(() => []);\n retObj.data.forEach((tuple) => {\n tuple.forEach((data, i) => {\n tmpData[i].push(data);\n });\n });\n retObj.data = tmpData;\n }\n\n return retObj;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n\n/**\n * Performs the union operation between two dm instances.\n *\n * @todo Fix the conflicts between union and difference terminology here.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function difference (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n * @param {boolean} addData - If true only tuple will be added to the data.\n */\n function prepareDataHelper(dm, fieldsObj, addData) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n if (addData) { data.push(tuple); }\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm2, dm2FieldStoreFieldObj, false);\n prepareDataHelper(dm1, dm1FieldStoreFieldObj, true);\n\n return new DataModel(data, schema, { name });\n}\n\n","import { isArray } from '../utils';\nimport InvalidAwareTypes from '../invalid-aware-types';\nimport { GROUP_BY_FUNCTIONS } from '../enums';\n\nconst { SUM, AVG, FIRST, LAST, COUNT, STD, MIN, MAX } = GROUP_BY_FUNCTIONS;\n\nfunction getFilteredValues(arr) {\n return arr.filter(item => !(item instanceof InvalidAwareTypes));\n}\n/**\n * Reducer function that returns the sum of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the sum of the array.\n */\nfunction sum (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const filteredNumber = getFilteredValues(arr);\n const totalSum = filteredNumber.length ?\n filteredNumber.reduce((acc, curr) => acc + curr, 0)\n : InvalidAwareTypes.NULL;\n return totalSum;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that returns the average of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the mean value of the array.\n */\nfunction avg (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const totalSum = sum(arr);\n const len = arr.length || 1;\n return (Number.isNaN(totalSum) || totalSum instanceof InvalidAwareTypes) ?\n InvalidAwareTypes.NULL : totalSum / len;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the min value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the minimum value of the array.\n */\nfunction min (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.min(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the max value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the maximum value of the array.\n */\nfunction max (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.max(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the first value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the first value of the array.\n */\nfunction first (arr) {\n return arr[0];\n}\n\n/**\n * Reducer function that gives the last value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the last value of the array.\n */\nfunction last (arr) {\n return arr[arr.length - 1];\n}\n\n/**\n * Reducer function that gives the count value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the length of the array.\n */\nfunction count (arr) {\n if (isArray(arr)) {\n return arr.length;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Calculates the variance of the input array.\n *\n * @param {Array.} arr - The input array.\n * @return {number} Returns the variance of the input array.\n */\nfunction variance (arr) {\n let mean = avg(arr);\n return avg(arr.map(num => (num - mean) ** 2));\n}\n\n/**\n * Calculates the square root of the variance of the input array.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the square root of the variance.\n */\nfunction std (arr) {\n return Math.sqrt(variance(arr));\n}\n\n\nconst fnList = {\n [SUM]: sum,\n [AVG]: avg,\n [MIN]: min,\n [MAX]: max,\n [FIRST]: first,\n [LAST]: last,\n [COUNT]: count,\n [STD]: std\n};\n\nconst defaultReducerName = SUM;\n\nexport {\n defaultReducerName,\n sum as defReducer,\n fnList,\n};\n","import { defReducer, fnList } from '../operator';\n\n/**\n * A page level storage which stores, registers, unregisters reducers for all the datamodel instances. There is only one\n * reducer store available in a page. All the datamodel instances receive same instance of reducer store. DataModel\n * out of the box provides handful of {@link reducer | reducers} which can be used as reducer funciton.\n *\n * @public\n * @namespace DataModel\n */\nclass ReducerStore {\n constructor () {\n this.store = new Map();\n this.store.set('defReducer', defReducer);\n\n Object.entries(fnList).forEach((key) => {\n this.store.set(key[0], key[1]);\n });\n }\n\n /**\n * Changes the `defaultReducer` globally. For all the fields which does not have `defAggFn` mentioned in schema, the\n * value of `defaultReducer` is used for aggregation.\n *\n * @public\n * @param {string} [reducer='sum'] - The name of the default reducer. It picks up the definition from store by doing\n * name lookup. If no name is found then it takes `sum` as the default reducer.\n * @return {ReducerStore} Returns instance of the singleton store in page.\n */\n defaultReducer (...params) {\n if (!params.length) {\n return this.store.get('defReducer');\n }\n\n let reducer = params[0];\n\n if (typeof reducer === 'function') {\n this.store.set('defReducer', reducer);\n } else {\n reducer = String(reducer);\n if (Object.keys(fnList).indexOf(reducer) !== -1) {\n this.store.set('defReducer', fnList[reducer]);\n } else {\n throw new Error(`Reducer ${reducer} not found in registry`);\n }\n }\n return this;\n }\n\n /**\n *\n * Registers a {@link reducer | reducer}.\n * A {@link reducer | reducer} has to be registered before it is used.\n *\n * @example\n * // find the mean squared value of a given set\n * const reducerStore = DataModel.Reducers();\n *\n * reducers.register('meanSquared', (arr) => {\n * const squaredVal = arr.map(item => item * item);\n * let sum = 0;\n * for (let i = 0, l = squaredVal.length; i < l; i++) {\n * sum += squaredVal[i++];\n * }\n *\n * return sum;\n * })\n *\n * // datamodel (dm) is already prepared with cars.json\n * const dm1 = dm.groupBy(['origin'], {\n * accleration: 'meanSquared'\n * });\n *\n * @public\n *\n * @param {string} name formal name for a reducer. If the given name already exists in store it is overridden by new\n * definition.\n * @param {Function} reducer definition of {@link reducer} function.\n *\n * @return {Function} function for unregistering the reducer.\n */\n register (name, reducer) {\n if (typeof reducer !== 'function') {\n throw new Error('Reducer should be a function');\n }\n\n name = String(name);\n this.store.set(name, reducer);\n\n return () => { this.__unregister(name); };\n }\n\n __unregister (name) {\n if (this.store.has(name)) {\n this.store.delete(name);\n }\n }\n\n resolve (name) {\n if (name instanceof Function) {\n return name;\n }\n return this.store.get(name);\n }\n}\n\nconst reducerStore = (function () {\n let store = null;\n\n function getStore () {\n if (store === null) {\n store = new ReducerStore();\n }\n return store;\n }\n return getStore();\n}());\n\nexport default reducerStore;\n","import { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport DataModel from '../export';\nimport reducerStore from '../utils/reducer-store';\nimport { defaultReducerName } from './group-by-function';\nimport { FieldType } from '../enums';\n\n/**\n * This function sanitize the user given field and return a common Array structure field\n * list\n * @param {DataModel} dataModel the dataModel operating on\n * @param {Array} fieldArr user input of field Array\n * @return {Array} arrays of field name\n */\nfunction getFieldArr (dataModel, fieldArr) {\n const retArr = [];\n const fieldStore = dataModel.getFieldspace();\n const dimensions = fieldStore.getDimension();\n\n Object.entries(dimensions).forEach(([key]) => {\n if (fieldArr && fieldArr.length) {\n if (fieldArr.indexOf(key) !== -1) {\n retArr.push(key);\n }\n } else {\n retArr.push(key);\n }\n });\n\n return retArr;\n}\n\n/**\n * This sanitize the reducer provide by the user and create a common type of object.\n * user can give function Also\n * @param {DataModel} dataModel dataModel to worked on\n * @param {Object|function} [reducers={}] reducer provided by the users\n * @return {Object} object containing reducer function for every measure\n */\nfunction getReducerObj (dataModel, reducers = {}) {\n const retObj = {};\n const fieldStore = dataModel.getFieldspace();\n const measures = fieldStore.getMeasure();\n const defReducer = reducerStore.defaultReducer();\n\n Object.keys(measures).forEach((measureName) => {\n if (typeof reducers[measureName] !== 'string') {\n reducers[measureName] = measures[measureName].defAggFn();\n }\n const reducerFn = reducerStore.resolve(reducers[measureName]);\n if (reducerFn) {\n retObj[measureName] = reducerFn;\n } else {\n retObj[measureName] = defReducer;\n reducers[measureName] = defaultReducerName;\n }\n });\n return retObj;\n}\n\n/**\n * main function which perform the group-by operations which reduce the measures value is the\n * fields are common according to the reducer function provided\n * @param {DataModel} dataModel the dataModel to worked\n * @param {Array} fieldArr fields according to which the groupby should be worked\n * @param {Object|Function} reducers reducers function\n * @param {DataModel} existingDataModel Existing datamodel instance\n * @return {DataModel} new dataModel with the group by\n */\nfunction groupBy (dataModel, fieldArr, reducers, existingDataModel) {\n const sFieldArr = getFieldArr(dataModel, fieldArr);\n const reducerObj = getReducerObj(dataModel, reducers);\n const fieldStore = dataModel.getFieldspace();\n const fieldStoreObj = fieldStore.fieldsObj();\n const dbName = fieldStore.name;\n const dimensionArr = [];\n const measureArr = [];\n const schema = [];\n const hashMap = {};\n const data = [];\n let newDataModel;\n\n // Prepare the schema\n Object.entries(fieldStoreObj).forEach(([key, value]) => {\n if (sFieldArr.indexOf(key) !== -1 || reducerObj[key]) {\n schema.push(extend2({}, value.schema()));\n\n switch (value.schema().type) {\n case FieldType.MEASURE:\n measureArr.push(key);\n break;\n default:\n case FieldType.DIMENSION:\n dimensionArr.push(key);\n }\n }\n });\n // Prepare the data\n let rowCount = 0;\n rowDiffsetIterator(dataModel._rowDiffset, (i) => {\n let hash = '';\n dimensionArr.forEach((_) => {\n hash = `${hash}-${fieldStoreObj[_].partialField.data[i]}`;\n });\n if (hashMap[hash] === undefined) {\n hashMap[hash] = rowCount;\n data.push({});\n dimensionArr.forEach((_) => {\n data[rowCount][_] = fieldStoreObj[_].partialField.data[i];\n });\n measureArr.forEach((_) => {\n data[rowCount][_] = [fieldStoreObj[_].partialField.data[i]];\n });\n rowCount += 1;\n } else {\n measureArr.forEach((_) => {\n data[hashMap[hash]][_].push(fieldStoreObj[_].partialField.data[i]);\n });\n }\n });\n\n // reduction\n let cachedStore = {};\n let cloneProvider = () => dataModel.detachedRoot();\n data.forEach((row) => {\n const tuple = row;\n measureArr.forEach((_) => {\n tuple[_] = reducerObj[_](row[_], cloneProvider, cachedStore);\n });\n });\n if (existingDataModel) {\n existingDataModel.__calculateFieldspace();\n newDataModel = existingDataModel;\n }\n else {\n newDataModel = new DataModel(data, schema, { name: dbName });\n }\n return newDataModel;\n}\n\nexport { groupBy, getFieldArr, getReducerObj };\n","import { getCommonSchema } from './get-common-schema';\n\n/**\n * The filter function used in natural join.\n * It generates a function that will have the logic to join two\n * DataModel instances by the process of natural join.\n *\n * @param {DataModel} dm1 - The left DataModel instance.\n * @param {DataModel} dm2 - The right DataModel instance.\n * @return {Function} Returns a function that is used in cross-product operation.\n */\nexport function naturalJoinFilter (dm1, dm2) {\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n // const dm1FieldStoreName = dm1FieldStore.name;\n // const dm2FieldStoreName = dm2FieldStore.name;\n const commonSchemaArr = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n return (dm1Fields, dm2Fields) => {\n let retainTuple = true;\n commonSchemaArr.forEach((fieldName) => {\n if (dm1Fields[fieldName].value ===\n dm2Fields[fieldName].value && retainTuple) {\n retainTuple = true;\n } else {\n retainTuple = false;\n }\n });\n return retainTuple;\n };\n}\n","import DataModel from '../export';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n/**\n * Performs the union operation between two dm instances.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function union (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n */\n function prepareDataHelper (dm, fieldsObj) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n data.push(tuple);\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm1, dm1FieldStoreFieldObj);\n prepareDataHelper(dm2, dm2FieldStoreFieldObj);\n\n return new DataModel(data, schema, { name });\n}\n","import { crossProduct } from './cross-product';\nimport { JOINS } from '../constants';\nimport { union } from './union';\n\n\nexport function leftOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel1, dataModel2, filterFn, false, JOINS.LEFTOUTER);\n}\n\nexport function rightOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel2, dataModel1, filterFn, false, JOINS.RIGHTOUTER);\n}\n\nexport function fullOuterJoin (dataModel1, dataModel2, filterFn) {\n return union(leftOuterJoin(dataModel1, dataModel2, filterFn), rightOuterJoin(dataModel1, dataModel2, filterFn));\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\n\n/**\n * In {@link DataModel}, every tabular data consists of column, a column is stored as field.\n * Field contains all the data for a given column in an array.\n *\n * Each record consists of several fields; the fields of all records form the columns.\n * Examples of fields: name, gender, sex etc.\n *\n * In DataModel, each field can have multiple attributes which describes its data and behaviour.\n * A field can have two types of data: Measure and Dimension.\n *\n * A Dimension Field is the context on which a data is categorized and the measure is the numerical values that\n * quantify the data set.\n * In short a dimension is the lens through which you are looking at your measure data.\n *\n * Refer to {@link Schema} to get info about possible field attributes.\n *\n * @public\n * @class\n */\nexport default class Field {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n this.partialField = partialField;\n this.rowDiffset = rowDiffset;\n }\n\n /**\n * Generates the field type specific domain.\n *\n * @public\n * @abstract\n */\n domain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the the field schema.\n *\n * @public\n * @return {string} Returns the field schema.\n */\n schema () {\n return this.partialField.schema;\n }\n\n /**\n * Returns the name of the field.\n *\n * @public\n * @return {string} Returns the name of the field.\n */\n name () {\n return this.partialField.name;\n }\n\n /**\n * Returns the type of the field.\n *\n * @public\n * @return {string} Returns the type of the field.\n */\n type () {\n return this.partialField.schema.type;\n }\n\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return this.partialField.schema.subtype;\n }\n\n /**\n * Returns the description of the field.\n *\n * @public\n * @return {string} Returns the description of the field.\n */\n description () {\n return this.partialField.schema.description;\n }\n\n /**\n * Returns the display name of the field.\n *\n * @public\n * @return {string} Returns the display name of the field.\n */\n displayName () {\n return this.partialField.schema.displayName || this.partialField.schema.name;\n }\n\n /**\n * Returns the data associated with the field.\n *\n * @public\n * @return {Array} Returns the data.\n */\n data () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n data.push(this.partialField.data[i]);\n });\n return data;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @abstract\n */\n formattedData () {\n throw new Error('Not yet implemented');\n }\n}\n","import Field from '../field';\n\n/**\n * Represents dimension field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Dimension extends Field {\n /**\n * Returns the domain for the dimension field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { DimensionSubtype } from '../../enums';\nimport Dimension from '../dimension';\n/**\n * Represents categorical field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Categorical extends Dimension {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return DimensionSubtype.CATEGORICAL;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n return domain;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport Dimension from '../dimension';\nimport { DateTimeFormatter } from '../../utils';\nimport InvalidAwareTypes from '../../invalid-aware-types';\n\n/**\n * Represents temporal field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Temporal extends Dimension {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n super(partialField, rowDiffset);\n\n this._cachedMinDiff = null;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be\n // occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n\n return domain;\n }\n\n\n /**\n * Calculates the minimum consecutive difference from the associated field data.\n *\n * @public\n * @return {number} Returns the minimum consecutive diff in milliseconds.\n */\n minimumConsecutiveDifference () {\n if (this._cachedMinDiff) {\n return this._cachedMinDiff;\n }\n\n const sortedData = this.data().filter(item => !(item instanceof InvalidAwareTypes)).sort((a, b) => a - b);\n const arrLn = sortedData.length;\n let minDiff = Number.POSITIVE_INFINITY;\n let prevDatum;\n let nextDatum;\n let processedCount = 0;\n\n for (let i = 1; i < arrLn; i++) {\n prevDatum = sortedData[i - 1];\n nextDatum = sortedData[i];\n\n if (nextDatum === prevDatum) {\n continue;\n }\n\n minDiff = Math.min(minDiff, nextDatum - sortedData[i - 1]);\n processedCount++;\n }\n\n if (!processedCount) {\n minDiff = null;\n }\n this._cachedMinDiff = minDiff;\n\n return this._cachedMinDiff;\n }\n\n /**\n * Returns the format specified in the input schema while creating field.\n *\n * @public\n * @return {string} Returns the datetime format.\n */\n format () {\n return this.partialField.schema.format;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n data.push(datum);\n } else {\n data.push(DateTimeFormatter.formatAs(datum, this.format()));\n }\n });\n return data;\n }\n}\n\n","import Dimension from '../dimension';\n\n/**\n * Represents binned field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Binned extends Dimension {\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the last and first values of bins config array.\n */\n calculateDataDomain () {\n const binsArr = this.partialField.schema.bins;\n return [binsArr[0], binsArr[binsArr.length - 1]];\n }\n\n /**\n * Returns the bins config provided while creating the field instance.\n *\n * @public\n * @return {Array} Returns the bins array config.\n */\n bins () {\n return this.partialField.schema.bins;\n }\n}\n","import { formatNumber } from '../../utils';\nimport { defaultReducerName } from '../../operator/group-by-function';\nimport Field from '../field';\n\n/**\n * Represents measure field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Measure extends Field {\n /**\n * Returns the domain for the measure field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Returns the unit of the measure field.\n *\n * @public\n * @return {string} Returns unit of the field.\n */\n unit () {\n return this.partialField.schema.unit;\n }\n\n /**\n * Returns the aggregation function name of the measure field.\n *\n * @public\n * @return {string} Returns aggregation function name of the field.\n */\n defAggFn () {\n return this.partialField.schema.defAggFn || defaultReducerName;\n }\n\n /**\n * Returns the number format of the measure field.\n *\n * @public\n * @return {Function} Returns number format of the field.\n */\n numberFormat () {\n const { numberFormat } = this.partialField.schema;\n return numberFormat instanceof Function ? numberFormat : formatNumber;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { MeasureSubtype } from '../../enums';\nimport Measure from '../measure';\nimport InvalidAwareTypes from '../../invalid-aware-types';\n\n/**\n * Represents continuous field subtype.\n *\n * @public\n * @class\n * @extends Measure\n */\nexport default class Continuous extends Measure {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return MeasureSubtype.CONTINUOUS;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the min and max values.\n */\n calculateDataDomain () {\n let min = Number.POSITIVE_INFINITY;\n let max = Number.NEGATIVE_INFINITY;\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n return;\n }\n\n if (datum < min) {\n min = datum;\n }\n if (datum > max) {\n max = datum;\n }\n });\n\n return [min, max];\n }\n}\n","/**\n * A interface to represent a parser which is responsible to parse the field.\n *\n * @public\n * @interface\n */\nexport default class FieldParser {\n /**\n * Parses a single value of a field and return the sanitized form.\n *\n * @public\n * @abstract\n */\n parse () {\n throw new Error('Not yet implemented');\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the categorical values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class CategoricalParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the stringified form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the stringified value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n result = String(val).trim();\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { DateTimeFormatter } from '../../../utils';\nimport FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the temporal values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class TemporalParser extends FieldParser {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {Object} schema - The schema object for the corresponding field.\n */\n constructor (schema) {\n super();\n this.schema = schema;\n this._dtf = new DateTimeFormatter(this.schema.format);\n }\n\n /**\n * Parses a single value of a field and returns the millisecond value.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {number} Returns the millisecond value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let nativeDate = this._dtf.getNativeDate(val);\n result = nativeDate ? nativeDate.getTime() : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the binned values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class BinnedParser extends FieldParser {\n /**\n * Parses a single binned value of a field and returns the sanitized value.\n *\n * @public\n * @param {string} val - The value of the field.\n * @return {string} Returns the sanitized value.\n */\n parse (val) {\n const regex = /^\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*-\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*$/;\n val = String(val);\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let matched = val.match(regex);\n result = matched ? `${Number.parseFloat(matched[1])}-${Number.parseFloat(matched[2])}`\n : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the continuous values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class ContinuousParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the number form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the number value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let parsedVal = parseFloat(val, 10);\n result = Number.isNaN(parsedVal) ? InvalidAwareTypes.NA : parsedVal;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","/**\n * Stores the full data and the metadata of a field. It provides\n * a single source of data from which the future Field\n * instance can get a subset of it with a rowDiffset config.\n *\n * @class\n * @public\n */\nexport default class PartialField {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} name - The name of the field.\n * @param {Array} data - The data array.\n * @param {Object} schema - The schema object of the corresponding field.\n * @param {FieldParser} parser - The parser instance corresponding to that field.\n */\n constructor (name, data, schema, parser) {\n this.name = name;\n this.schema = schema;\n this.parser = parser;\n this.data = this._sanitize(data);\n }\n\n /**\n * Sanitizes the field data.\n *\n * @private\n * @param {Array} data - The actual input data.\n * @return {Array} Returns the sanitized data.\n */\n _sanitize (data) {\n return data.map(datum => this.parser.parse(datum));\n }\n}\n","import { FieldType, DimensionSubtype, MeasureSubtype } from './enums';\nimport {\n Categorical,\n Temporal,\n Binned,\n Continuous,\n CategoricalParser,\n TemporalParser,\n BinnedParser,\n ContinuousParser,\n PartialField\n} from './fields';\n\n/**\n * Creates a field instance according to the provided data and schema.\n *\n * @param {Array} data - The field data array.\n * @param {Object} schema - The field schema object.\n * @return {Field} Returns the newly created field instance.\n */\nfunction createUnitField(data, schema) {\n data = data || [];\n let partialField;\n\n switch (schema.type) {\n case FieldType.MEASURE:\n switch (schema.subtype) {\n case MeasureSubtype.CONTINUOUS:\n partialField = new PartialField(schema.name, data, schema, new ContinuousParser());\n return new Continuous(partialField, `0-${data.length - 1}`);\n default:\n partialField = new PartialField(schema.name, data, schema, new ContinuousParser());\n return new Continuous(partialField, `0-${data.length - 1}`);\n }\n case FieldType.DIMENSION:\n switch (schema.subtype) {\n case DimensionSubtype.CATEGORICAL:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n case DimensionSubtype.TEMPORAL:\n partialField = new PartialField(schema.name, data, schema, new TemporalParser(schema));\n return new Temporal(partialField, `0-${data.length - 1}`);\n case DimensionSubtype.BINNED:\n partialField = new PartialField(schema.name, data, schema, new BinnedParser());\n return new Binned(partialField, `0-${data.length - 1}`);\n default:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n }\n default:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n }\n}\n\n\n/**\n * Creates a field instance from partialField and rowDiffset.\n *\n * @param {PartialField} partialField - The corresponding partial field.\n * @param {string} rowDiffset - The data subset config.\n * @return {Field} Returns the newly created field instance.\n */\nexport function createUnitFieldFromPartial(partialField, rowDiffset) {\n const { schema } = partialField;\n\n switch (schema.type) {\n case FieldType.MEASURE:\n switch (schema.subtype) {\n case MeasureSubtype.CONTINUOUS:\n return new Continuous(partialField, rowDiffset);\n default:\n return new Continuous(partialField, rowDiffset);\n }\n case FieldType.DIMENSION:\n switch (schema.subtype) {\n case DimensionSubtype.CATEGORICAL:\n return new Categorical(partialField, rowDiffset);\n case DimensionSubtype.TEMPORAL:\n return new Temporal(partialField, rowDiffset);\n case DimensionSubtype.BINNED:\n return new Binned(partialField, rowDiffset);\n default:\n return new Categorical(partialField, rowDiffset);\n }\n default:\n return new Categorical(partialField, rowDiffset);\n }\n}\n\n/**\n * Creates the field instances with input data and schema.\n *\n * @param {Array} dataColumn - The data array for fields.\n * @param {Array} schema - The schema array for fields.\n * @param {Array} headers - The array of header names.\n * @return {Array.} Returns an array of newly created field instances.\n */\nexport function createFields(dataColumn, schema, headers) {\n const headersObj = {};\n\n if (!(headers && headers.length)) {\n headers = schema.map(item => item.name);\n }\n\n headers.forEach((header, i) => {\n headersObj[header] = i;\n });\n\n return schema.map(item => createUnitField(dataColumn[headersObj[item.name]], item));\n}\n","import { DataFormat } from './enums';\n\nexport default {\n dataFormat: DataFormat.AUTO\n};\n","import { columnMajor } from '../utils';\n\n/**\n * Parses and converts data formatted in DSV array to a manageable internal format.\n *\n * @param {Array.} arr - A 2D array containing of the DSV data.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv data is header or not.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * [\"a\", \"b\", \"c\"],\n * [1, 2, 3],\n * [4, 5, 6],\n * [7, 8, 9]\n * ];\n */\nfunction DSVArr (arr, options) {\n const defaultOption = {\n firstRowHeader: true,\n };\n options = Object.assign({}, defaultOption, options);\n\n let header;\n const columns = [];\n const push = columnMajor(columns);\n\n if (options.firstRowHeader) {\n // If header present then mutate the array.\n // Do in-place mutation to save space.\n header = arr.splice(0, 1)[0];\n } else {\n header = [];\n }\n\n arr.forEach(field => push(...field));\n\n return [header, columns];\n}\n\nexport default DSVArr;\n","var EOL = {},\n EOF = {},\n QUOTE = 34,\n NEWLINE = 10,\n RETURN = 13;\n\nfunction objectConverter(columns) {\n return new Function(\"d\", \"return {\" + columns.map(function(name, i) {\n return JSON.stringify(name) + \": d[\" + i + \"]\";\n }).join(\",\") + \"}\");\n}\n\nfunction customConverter(columns, f) {\n var object = objectConverter(columns);\n return function(row, i) {\n return f(object(row), i, columns);\n };\n}\n\n// Compute unique columns in order of discovery.\nfunction inferColumns(rows) {\n var columnSet = Object.create(null),\n columns = [];\n\n rows.forEach(function(row) {\n for (var column in row) {\n if (!(column in columnSet)) {\n columns.push(columnSet[column] = column);\n }\n }\n });\n\n return columns;\n}\n\nexport default function(delimiter) {\n var reFormat = new RegExp(\"[\\\"\" + delimiter + \"\\n\\r]\"),\n DELIMITER = delimiter.charCodeAt(0);\n\n function parse(text, f) {\n var convert, columns, rows = parseRows(text, function(row, i) {\n if (convert) return convert(row, i - 1);\n columns = row, convert = f ? customConverter(row, f) : objectConverter(row);\n });\n rows.columns = columns || [];\n return rows;\n }\n\n function parseRows(text, f) {\n var rows = [], // output rows\n N = text.length,\n I = 0, // current character index\n n = 0, // current line number\n t, // current token\n eof = N <= 0, // current token followed by EOF?\n eol = false; // current token followed by EOL?\n\n // Strip the trailing newline.\n if (text.charCodeAt(N - 1) === NEWLINE) --N;\n if (text.charCodeAt(N - 1) === RETURN) --N;\n\n function token() {\n if (eof) return EOF;\n if (eol) return eol = false, EOL;\n\n // Unescape quotes.\n var i, j = I, c;\n if (text.charCodeAt(j) === QUOTE) {\n while (I++ < N && text.charCodeAt(I) !== QUOTE || text.charCodeAt(++I) === QUOTE);\n if ((i = I) >= N) eof = true;\n else if ((c = text.charCodeAt(I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n return text.slice(j + 1, i - 1).replace(/\"\"/g, \"\\\"\");\n }\n\n // Find next delimiter or newline.\n while (I < N) {\n if ((c = text.charCodeAt(i = I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n else if (c !== DELIMITER) continue;\n return text.slice(j, i);\n }\n\n // Return last token before EOF.\n return eof = true, text.slice(j, N);\n }\n\n while ((t = token()) !== EOF) {\n var row = [];\n while (t !== EOL && t !== EOF) row.push(t), t = token();\n if (f && (row = f(row, n++)) == null) continue;\n rows.push(row);\n }\n\n return rows;\n }\n\n function format(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return [columns.map(formatValue).join(delimiter)].concat(rows.map(function(row) {\n return columns.map(function(column) {\n return formatValue(row[column]);\n }).join(delimiter);\n })).join(\"\\n\");\n }\n\n function formatRows(rows) {\n return rows.map(formatRow).join(\"\\n\");\n }\n\n function formatRow(row) {\n return row.map(formatValue).join(delimiter);\n }\n\n function formatValue(text) {\n return text == null ? \"\"\n : reFormat.test(text += \"\") ? \"\\\"\" + text.replace(/\"/g, \"\\\"\\\"\") + \"\\\"\"\n : text;\n }\n\n return {\n parse: parse,\n parseRows: parseRows,\n format: format,\n formatRows: formatRows\n };\n}\n","import dsv from \"./dsv\";\n\nvar csv = dsv(\",\");\n\nexport var csvParse = csv.parse;\nexport var csvParseRows = csv.parseRows;\nexport var csvFormat = csv.format;\nexport var csvFormatRows = csv.formatRows;\n","import dsv from \"./dsv\";\n\nvar tsv = dsv(\"\\t\");\n\nexport var tsvParse = tsv.parse;\nexport var tsvParseRows = tsv.parseRows;\nexport var tsvFormat = tsv.format;\nexport var tsvFormatRows = tsv.formatRows;\n","import { dsvFormat as d3Dsv } from 'd3-dsv';\nimport DSVArr from './dsv-arr';\n\n/**\n * Parses and converts data formatted in DSV string to a manageable internal format.\n *\n * @todo Support to be given for https://tools.ietf.org/html/rfc4180.\n * @todo Sample implementation https://github.com/knrz/CSV.js/.\n *\n * @param {string} str - The input DSV string.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv string data is header or not.\n * @param {string} [options.fieldSeparator=\",\"] - The separator of two consecutive field.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = `\n * a,b,c\n * 1,2,3\n * 4,5,6\n * 7,8,9\n * `\n */\nfunction DSVStr (str, options) {\n const defaultOption = {\n firstRowHeader: true,\n fieldSeparator: ','\n };\n options = Object.assign({}, defaultOption, options);\n\n const dsv = d3Dsv(options.fieldSeparator);\n return DSVArr(dsv.parseRows(str), options);\n}\n\nexport default DSVStr;\n","import { columnMajor } from '../utils';\n\n/**\n * Parses and converts data formatted in JSON to a manageable internal format.\n *\n * @param {Array.} arr - The input data formatted in JSON.\n * @return {Array.} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * {\n * \"a\": 1,\n * \"b\": 2,\n * \"c\": 3\n * },\n * {\n * \"a\": 4,\n * \"b\": 5,\n * \"c\": 6\n * },\n * {\n * \"a\": 7,\n * \"b\": 8,\n * \"c\": 9\n * }\n * ];\n */\nfunction FlatJSON (arr) {\n const header = {};\n let i = 0;\n let insertionIndex;\n const columns = [];\n const push = columnMajor(columns);\n\n arr.forEach((item) => {\n const fields = [];\n for (let key in item) {\n if (key in header) {\n insertionIndex = header[key];\n } else {\n header[key] = i++;\n insertionIndex = i - 1;\n }\n fields[insertionIndex] = item[key];\n }\n push(...fields);\n });\n\n return [Object.keys(header), columns];\n}\n\nexport default FlatJSON;\n","import FlatJSON from './flat-json';\nimport DSVArr from './dsv-arr';\nimport DSVStr from './dsv-str';\nimport { detectDataFormat } from '../utils';\n\n/**\n * Parses the input data and detect the format automatically.\n *\n * @param {string|Array} data - The input data.\n * @param {Object} options - An optional config specific to data format.\n * @return {Array.} Returns an array of headers and column major data.\n */\nfunction Auto (data, options) {\n const converters = { FlatJSON, DSVStr, DSVArr };\n const dataFormat = detectDataFormat(data);\n\n if (!dataFormat) {\n throw new Error('Couldn\\'t detect the data format');\n }\n\n return converters[dataFormat](data, options);\n}\n\nexport default Auto;\n","import { FieldType, FilteringMode, DimensionSubtype, MeasureSubtype, DataFormat } from './enums';\nimport fieldStore from './field-store';\nimport Value from './value';\nimport {\n rowDiffsetIterator\n} from './operator';\nimport { DM_DERIVATIVES, LOGICAL_OPERATORS } from './constants';\nimport { createFields, createUnitFieldFromPartial } from './field-creator';\nimport defaultConfig from './default-config';\nimport * as converter from './converter';\nimport { extend2, detectDataFormat } from './utils';\n\n/**\n * Prepares the selection data.\n */\nfunction prepareSelectionData (fields, i) {\n const resp = {};\n for (let field of fields) {\n resp[field.name()] = new Value(field.partialField.data[i], field);\n }\n return resp;\n}\n\nexport function prepareJoinData (fields) {\n const resp = {};\n Object.keys(fields).forEach((key) => { resp[key] = new Value(fields[key], key); });\n return resp;\n}\n\nexport const updateFields = ([rowDiffset, colIdentifier], partialFieldspace, fieldStoreName) => {\n let collID = colIdentifier.length ? colIdentifier.split(',') : [];\n let partialFieldMap = partialFieldspace.fieldsObj();\n let newFields = collID.map(coll => createUnitFieldFromPartial(partialFieldMap[coll].partialField, rowDiffset));\n return fieldStore.createNamespace(newFields, fieldStoreName);\n};\n\nexport const persistDerivation = (model, operation, config = {}, criteriaFn) => {\n if (operation === DM_DERIVATIVES.COMPOSE) {\n model._derivation.length = 0;\n model._derivation.push(...criteriaFn);\n } else {\n model._derivation.push({\n op: operation,\n meta: config,\n criteria: criteriaFn\n });\n }\n};\n\nexport const persistAncestorDerivation = (sourceDm, newDm) => {\n newDm._ancestorDerivation.push(...sourceDm._ancestorDerivation, ...sourceDm._derivation);\n};\n\nexport const selectHelper = (rowDiffset, fields, selectFn, config, sourceDm) => {\n const newRowDiffSet = [];\n let lastInsertedValue = -1;\n let { mode } = config;\n let li;\n let cachedStore = {};\n let cloneProvider = () => sourceDm.detachedRoot();\n const selectorHelperFn = index => selectFn(\n prepareSelectionData(fields, index),\n index,\n cloneProvider,\n cachedStore\n );\n\n let checker;\n if (mode === FilteringMode.INVERSE) {\n checker = index => !selectorHelperFn(index);\n } else {\n checker = index => selectorHelperFn(index);\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n if (checker(i)) {\n if (lastInsertedValue !== -1 && i === (lastInsertedValue + 1)) {\n li = newRowDiffSet.length - 1;\n newRowDiffSet[li] = `${newRowDiffSet[li].split('-')[0]}-${i}`;\n } else {\n newRowDiffSet.push(`${i}`);\n }\n lastInsertedValue = i;\n }\n });\n return newRowDiffSet.join(',');\n};\n\nexport const cloneWithAllFields = (model) => {\n const clonedDm = model.clone(false);\n const partialFieldspace = model.getPartialFieldspace();\n clonedDm._colIdentifier = partialFieldspace.fields.map(f => f.name()).join(',');\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n clonedDm.__calculateFieldspace().calculateFieldsConfig();\n\n return clonedDm;\n};\n\nexport const filterPropagationModel = (model, propModels, config = {}) => {\n const operation = config.operation || LOGICAL_OPERATORS.AND;\n const filterByMeasure = config.filterByMeasure || false;\n let fns = [];\n if (!propModels.length) {\n fns = [() => false];\n } else {\n fns = propModels.map(propModel => ((dataModel) => {\n const dataObj = dataModel.getData();\n const schema = dataObj.schema;\n const fieldsConfig = dataModel.getFieldsConfig();\n const fieldsSpace = dataModel.getFieldspace().fieldsObj();\n const data = dataObj.data;\n const domain = Object.values(fieldsConfig).reduce((acc, v) => {\n acc[v.def.name] = fieldsSpace[v.def.name].domain();\n return acc;\n }, {});\n\n return (fields) => {\n const include = !data.length ? false : data.some(row => schema.every((propField) => {\n if (!(propField.name in fields)) {\n return true;\n }\n const value = fields[propField.name].valueOf();\n if (filterByMeasure && propField.type === FieldType.MEASURE) {\n return value >= domain[propField.name][0] && value <= domain[propField.name][1];\n }\n\n if (propField.type !== FieldType.DIMENSION) {\n return true;\n }\n const idx = fieldsConfig[propField.name].index;\n return row[idx] === fields[propField.name].valueOf();\n }));\n return include;\n };\n })(propModel));\n }\n\n let filteredModel;\n if (operation === LOGICAL_OPERATORS.AND) {\n filteredModel = cloneWithAllFields(model).select(fields => fns.every(fn => fn(fields)), {\n saveChild: false,\n mode: FilteringMode.ALL\n });\n } else {\n filteredModel = cloneWithAllFields(model).select(fields => fns.some(fn => fn(fields)), {\n mode: FilteringMode.ALL,\n saveChild: false\n });\n }\n\n return filteredModel;\n};\n\nexport const cloneWithSelect = (sourceDm, selectFn, selectConfig, cloneConfig) => {\n const cloned = sourceDm.clone(cloneConfig.saveChild);\n const rowDiffset = selectHelper(\n cloned._rowDiffset,\n cloned.getPartialFieldspace().fields,\n selectFn,\n selectConfig,\n sourceDm\n );\n cloned._rowDiffset = rowDiffset;\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivation(cloned, DM_DERIVATIVES.SELECT, { config: selectConfig }, selectFn);\n persistAncestorDerivation(sourceDm, cloned);\n\n return cloned;\n};\n\nexport const cloneWithProject = (sourceDm, projField, config, allFields) => {\n const cloned = sourceDm.clone(config.saveChild);\n let projectionSet = projField;\n if (config.mode === FilteringMode.INVERSE) {\n projectionSet = allFields.filter(fieldName => projField.indexOf(fieldName) === -1);\n }\n // cloned._colIdentifier = sourceDm._colIdentifier.split(',')\n // .filter(coll => projectionSet.indexOf(coll) !== -1).join();\n cloned._colIdentifier = projectionSet.join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivation(\n cloned,\n DM_DERIVATIVES.PROJECT,\n { projField, config, actualProjField: projectionSet },\n null\n );\n persistAncestorDerivation(sourceDm, cloned);\n\n return cloned;\n};\n\nexport const sanitizeUnitSchema = (unitSchema) => {\n // Do deep clone of the unit schema as the user might change it later.\n unitSchema = extend2({}, unitSchema);\n if (!unitSchema.type) {\n unitSchema.type = FieldType.DIMENSION;\n }\n\n if (!unitSchema.subtype) {\n switch (unitSchema.type) {\n case FieldType.MEASURE:\n unitSchema.subtype = MeasureSubtype.CONTINUOUS;\n break;\n default:\n case FieldType.DIMENSION:\n unitSchema.subtype = DimensionSubtype.CATEGORICAL;\n break;\n }\n }\n\n return unitSchema;\n};\n\nexport const sanitizeSchema = schema => schema.map(unitSchema => sanitizeUnitSchema(unitSchema));\n\nexport const updateData = (relation, data, schema, options) => {\n schema = sanitizeSchema(schema);\n options = Object.assign(Object.assign({}, defaultConfig), options);\n const converterFn = converter[options.dataFormat];\n\n if (!(converterFn && typeof converterFn === 'function')) {\n throw new Error(`No converter function found for ${options.dataFormat} format`);\n }\n\n const [header, formattedData] = converterFn(data, options);\n const fieldArr = createFields(formattedData, schema, header);\n\n // This will create a new fieldStore with the fields\n const nameSpace = fieldStore.createNamespace(fieldArr, options.name);\n relation._partialFieldspace = nameSpace;\n // If data is provided create the default colIdentifier and rowDiffset\n relation._rowDiffset = formattedData.length && formattedData[0].length ? `0-${formattedData[0].length - 1}` : '';\n relation._colIdentifier = (schema.map(_ => _.name)).join();\n relation._dataFormat = options.dataFormat === DataFormat.AUTO ? detectDataFormat(data) : options.dataFormat;\n return relation;\n};\n\nexport const fieldInSchema = (schema, field) => {\n let i = 0;\n\n for (; i < schema.length; ++i) {\n if (field === schema[i].name) {\n return {\n type: schema[i].subtype || schema[i].type,\n index: i\n };\n }\n }\n return null;\n};\n\n\nexport const getDerivationArguments = (derivation) => {\n let params = [];\n let operation;\n operation = derivation.op;\n switch (operation) {\n case DM_DERIVATIVES.SELECT:\n params = [derivation.criteria];\n break;\n case DM_DERIVATIVES.PROJECT:\n params = [derivation.meta.actualProjField];\n break;\n case DM_DERIVATIVES.GROUPBY:\n operation = 'groupBy';\n params = [derivation.meta.groupByString.split(','), derivation.criteria];\n break;\n default:\n operation = null;\n }\n\n return {\n operation,\n params\n };\n};\n\nconst applyExistingOperationOnModel = (propModel, dataModel) => {\n const derivations = dataModel.getDerivations();\n let selectionModel = propModel[0];\n let rejectionModel = propModel[1];\n\n derivations.forEach((derivation) => {\n if (!derivation) {\n return;\n }\n\n const { operation, params } = getDerivationArguments(derivation);\n if (operation) {\n selectionModel = selectionModel[operation](...params, {\n saveChild: false\n });\n rejectionModel = rejectionModel[operation](...params, {\n saveChild: false\n });\n }\n });\n\n return [selectionModel, rejectionModel];\n};\n\nconst getFilteredModel = (propModel, path) => {\n for (let i = 0, len = path.length; i < len; i++) {\n const model = path[i];\n propModel = applyExistingOperationOnModel(propModel, model);\n }\n return propModel;\n};\n\nconst propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = {}) => {\n const nonTraversingModel = propModelInf.nonTraversingModel;\n const excludeModels = propModelInf.excludeModels || [];\n\n if (dataModel === nonTraversingModel) {\n return;\n }\n\n const propagate = excludeModels.length ? excludeModels.indexOf(dataModel) === -1 : true;\n\n propagate && dataModel.handlePropagation(propModel, config);\n\n const children = dataModel._children;\n children.forEach((child) => {\n let [selectionModel, rejectionModel] = applyExistingOperationOnModel(propModel, child);\n propagateIdentifiers(child, [selectionModel, rejectionModel], config, propModelInf);\n });\n};\n\nexport const getRootGroupByModel = (model) => {\n while (model._parent && model._derivation.find(d => d.op !== DM_DERIVATIVES.GROUPBY)) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getRootDataModel = (model) => {\n while (model._parent) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getPathToRootModel = (model, path = []) => {\n while (model._parent) {\n path.push(model);\n model = model._parent;\n }\n return path;\n};\n\nexport const propagateToAllDataModels = (identifiers, rootModels, propagationInf, config) => {\n let criteria;\n let propModel;\n const { propagationNameSpace, propagateToSource } = propagationInf;\n const propagationSourceId = propagationInf.sourceId;\n const propagateInterpolatedValues = config.propagateInterpolatedValues;\n const filterFn = (entry) => {\n const filter = config.filterFn || (() => true);\n return filter(entry, config);\n };\n\n let criterias = [];\n\n if (identifiers === null && config.persistent !== true) {\n criterias = [{\n criteria: []\n }];\n } else {\n let actionCriterias = Object.values(propagationNameSpace.mutableActions);\n if (propagateToSource !== false) {\n actionCriterias = actionCriterias.filter(d => d.config.sourceId !== propagationSourceId);\n }\n\n const filteredCriteria = actionCriterias.filter(filterFn).map(action => action.config.criteria);\n\n const excludeModels = [];\n\n if (propagateToSource !== false) {\n const sourceActionCriterias = Object.values(propagationNameSpace.mutableActions);\n\n sourceActionCriterias.forEach((actionInf) => {\n const actionConf = actionInf.config;\n if (actionConf.applyOnSource === false && actionConf.action === config.action &&\n actionConf.sourceId !== propagationSourceId) {\n excludeModels.push(actionInf.model);\n criteria = sourceActionCriterias.filter(d => d !== actionInf).map(d => d.config.criteria);\n criteria.length && criterias.push({\n criteria,\n models: actionInf.model,\n path: getPathToRootModel(actionInf.model)\n });\n }\n });\n }\n\n\n criteria = [].concat(...[...filteredCriteria, identifiers]).filter(d => d !== null);\n criterias.push({\n criteria,\n excludeModels: [...excludeModels, ...config.excludeModels || []]\n });\n }\n\n const rootModel = rootModels.model;\n\n const propConfig = Object.assign({\n sourceIdentifiers: identifiers,\n propagationSourceId\n }, config);\n\n const rootGroupByModel = rootModels.groupByModel;\n if (propagateInterpolatedValues && rootGroupByModel) {\n propModel = filterPropagationModel(rootGroupByModel, criteria, {\n filterByMeasure: propagateInterpolatedValues\n });\n propagateIdentifiers(rootGroupByModel, propModel, propConfig);\n }\n\n criterias.forEach((inf) => {\n const propagationModel = filterPropagationModel(rootModel, inf.criteria);\n const path = inf.path;\n\n if (path) {\n const filteredModel = getFilteredModel(propagationModel, path.reverse());\n inf.models.handlePropagation(filteredModel, propConfig);\n } else {\n propagateIdentifiers(rootModel, propagationModel, propConfig, {\n excludeModels: inf.excludeModels,\n nonTraversingModel: propagateInterpolatedValues && rootGroupByModel\n });\n }\n });\n};\n\nexport const propagateImmutableActions = (propagationNameSpace, rootModels, propagationInf) => {\n const immutableActions = propagationNameSpace.immutableActions;\n\n for (const action in immutableActions) {\n const actionInf = immutableActions[action];\n const actionConf = actionInf.config;\n const propagationSourceId = propagationInf.config.sourceId;\n const filterImmutableAction = propagationInf.propConfig.filterImmutableAction ?\n propagationInf.propConfig.filterImmutableAction(actionConf, propagationInf.config) : true;\n if (actionConf.sourceId !== propagationSourceId && filterImmutableAction) {\n const criteriaModel = actionConf.criteria;\n propagateToAllDataModels(criteriaModel, rootModels, {\n propagationNameSpace,\n propagateToSource: false,\n sourceId: propagationSourceId\n }, actionConf);\n }\n }\n};\n\nexport const addToPropNamespace = (propagationNameSpace, config = {}, model) => {\n let sourceNamespace;\n const isMutableAction = config.isMutableAction;\n const criteria = config.criteria;\n const key = `${config.action}-${config.sourceId}`;\n\n if (isMutableAction) {\n sourceNamespace = propagationNameSpace.mutableActions;\n } else {\n sourceNamespace = propagationNameSpace.immutableActions;\n }\n\n if (criteria === null) {\n delete sourceNamespace[key];\n } else {\n sourceNamespace[key] = {\n model,\n config\n };\n }\n\n return this;\n};\n","import { FilteringMode } from './enums';\nimport { getUniqueId } from './utils';\nimport { updateFields, cloneWithSelect, cloneWithProject, updateData } from './helper';\nimport { crossProduct, difference, naturalJoinFilter, union } from './operator';\n\n/**\n * Relation provides the definitions of basic operators of relational algebra like *selection*, *projection*, *union*,\n * *difference* etc.\n *\n * It is extended by {@link DataModel} to inherit the functionalities of relational algebra concept.\n *\n * @class\n * @public\n * @module Relation\n * @namespace DataModel\n */\nclass Relation {\n\n /**\n * Creates a new Relation instance by providing underlying data and schema.\n *\n * @private\n *\n * @param {Object | string | Relation} data - The input tabular data in dsv or json format or\n * an existing Relation instance object.\n * @param {Array} schema - An array of data schema.\n * @param {Object} [options] - The optional options.\n */\n constructor (...params) {\n let source;\n\n this._parent = null;\n this._derivation = [];\n this._ancestorDerivation = [];\n this._children = [];\n\n if (params.length === 1 && ((source = params[0]) instanceof Relation)) {\n // parent datamodel was passed as part of source\n this._colIdentifier = source._colIdentifier;\n this._rowDiffset = source._rowDiffset;\n this._dataFormat = source._dataFormat;\n this._parent = source;\n this._partialFieldspace = this._parent._partialFieldspace;\n this._fieldStoreName = getUniqueId();\n this.__calculateFieldspace().calculateFieldsConfig();\n } else {\n updateData(this, ...params);\n this._fieldStoreName = this._partialFieldspace.name;\n this.__calculateFieldspace().calculateFieldsConfig();\n this._propagationNameSpace = {\n mutableActions: {},\n immutableActions: {}\n };\n }\n }\n\n /**\n * Retrieves the {@link Schema | schema} details for every {@link Field | field} as an array.\n *\n * @public\n *\n * @return {Array.} Array of fields schema.\n * ```\n * [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', numberFormat: (val) => `${val} miles / gallon` },\n * { name: 'Cylinder', type: 'dimension' },\n * { name: 'Displacement', type: 'measure', defAggFn: 'max' },\n * { name: 'HorsePower', type: 'measure', defAggFn: 'max' },\n * { name: 'Weight_in_lbs', type: 'measure', defAggFn: 'avg', },\n * { name: 'Acceleration', type: 'measure', defAggFn: 'avg' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin' }\n * ]\n * ```\n */\n getSchema () {\n return this.getFieldspace().fields.map(d => d.schema());\n }\n\n /**\n * Returns the name of the {@link DataModel} instance. If no name was specified during {@link DataModel}\n * initialization, then it returns a auto-generated name.\n *\n * @public\n *\n * @return {string} Name of the DataModel instance.\n */\n getName() {\n return this._fieldStoreName;\n }\n\n getFieldspace () {\n return this._fieldspace;\n }\n\n __calculateFieldspace () {\n this._fieldspace = updateFields([this._rowDiffset, this._colIdentifier],\n this.getPartialFieldspace(), this._fieldStoreName);\n return this;\n }\n\n getPartialFieldspace () {\n return this._partialFieldspace;\n }\n\n /**\n * Performs {@link link_of_cross_product | cross-product} between two {@link DataModel} instances and returns a\n * new {@link DataModel} instance containing the results. This operation is also called theta join.\n *\n * Cross product takes two set and create one set where each value of one set is paired with each value of another\n * set.\n *\n * This method takes an optional predicate which filters the generated result rows. If the predicate returns true\n * the combined row is included in the resulatant table.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.join(originDM)));\n *\n * console.log(carsDM.join(originDM,\n * obj => obj.[originDM.getName()].Origin === obj.[carsDM.getName()].Origin));\n *\n * @text\n * This is chained version of `join` operator. `join` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel to be joined with the current instance DataModel.\n * @param {SelectionPredicate} filterFn - The predicate function that will filter the result of the crossProduct.\n *\n * @return {DataModel} New DataModel instance created after joining.\n */\n join (joinWith, filterFn) {\n return crossProduct(this, joinWith, filterFn);\n }\n\n /**\n * {@link natural_join | Natural join} is a special kind of cross-product join where filtering of rows are performed\n * internally by resolving common fields are from both table and the rows with common value are included.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.naturalJoin(originDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel with which the current instance of DataModel on which the method is\n * called will be joined.\n * @return {DataModel} New DataModel instance created after joining.\n */\n naturalJoin (joinWith) {\n return crossProduct(this, joinWith, naturalJoinFilter(this, joinWith), true);\n }\n\n /**\n * {@link link_to_union | Union} operation can be termed as vertical stacking of all rows from both the DataModel\n * instances, provided that both of the {@link DataModel} instances should have same column names.\n *\n * @example\n * console.log(EuropeanMakerDM.union(USAMakerDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} unionWith - DataModel instance for which union has to be applied with the instance on which\n * the method is called\n *\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n union (unionWith) {\n return union(this, unionWith);\n }\n\n /**\n * {@link link_to_difference | Difference } operation only include rows which are present in the datamodel on which\n * it was called but not on the one passed as argument.\n *\n * @example\n * console.log(highPowerDM.difference(highExpensiveDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} differenceWith - DataModel instance for which difference has to be applied with the instance\n * on which the method is called\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n difference (differenceWith) {\n return difference(this, differenceWith);\n }\n\n /**\n * {@link link_to_selection | Selection} is a row filtering operation. It expects a predicate and an optional mode\n * which control which all rows should be included in the resultant DataModel instance.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection operation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resultant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * // with selection mode NORMAL:\n * const normDt = dt.select(fields => fields.Origin.value === \"USA\")\n * console.log(normDt));\n *\n * // with selection mode INVERSE:\n * const inverDt = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt);\n *\n * // with selection mode ALL:\n * const dtArr = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.ALL })\n * // print the selected parts\n * console.log(dtArr[0]);\n * // print the inverted parts\n * console.log(dtArr[1]);\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Function} selectFn - The predicate function which is called for each row with the current row.\n * ```\n * function (row, i, cloneProvider, store) { ... }\n * ```\n * @param {Object} config - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance.\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection.\n * @return {DataModel} Returns the new DataModel instance(s) after operation.\n */\n select (selectFn, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n\n const cloneConfig = { saveChild: config.saveChild };\n let oDm;\n\n if (config.mode === FilteringMode.ALL) {\n const selectDm = cloneWithSelect(\n this,\n selectFn,\n { mode: FilteringMode.NORMAL },\n cloneConfig\n );\n const rejectDm = cloneWithSelect(\n this,\n selectFn,\n { mode: FilteringMode.INVERSE },\n cloneConfig\n );\n oDm = [selectDm, rejectDm];\n } else {\n oDm = cloneWithSelect(\n this,\n selectFn,\n config,\n cloneConfig\n );\n }\n\n return oDm;\n }\n\n /**\n * Retrieves a boolean value if the current {@link DataModel} instance has data.\n *\n * @example\n * const schema = [\n * { name: 'CarName', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n * const data = [];\n *\n * const dt = new DataModel(data, schema);\n * console.log(dt.isEmpty());\n *\n * @public\n *\n * @return {Boolean} True if the datamodel has no data, otherwise false.\n */\n isEmpty () {\n return !this._rowDiffset.length || !this._colIdentifier.length;\n }\n\n /**\n * Creates a clone from the current DataModel instance with child parent relationship.\n *\n * @private\n * @param {boolean} [saveChild=true] - Whether the cloned instance would be recorded in the parent instance.\n * @return {DataModel} - Returns the newly cloned DataModel instance.\n */\n clone (saveChild = true) {\n const clonedDm = new this.constructor(this);\n if (saveChild) {\n clonedDm.setParent(this);\n } else {\n clonedDm.setParent(null);\n }\n return clonedDm;\n }\n\n /**\n * {@link Projection} is filter column (field) operation. It expects list of fields' name and either include those\n * or exclude those based on {@link FilteringMode} on the resultant variable.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * const dm = new DataModel(data, schema);\n *\n * // with projection mode NORMAL:\n * const normDt = dt.project([\"Name\", \"HorsePower\"]);\n * console.log(normDt.getData());\n *\n * // with projection mode INVERSE:\n * const inverDt = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt.getData());\n *\n * // with selection mode ALL:\n * const dtArr = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.ALL })\n * // print the normal parts\n * console.log(dtArr[0].getData());\n * // print the inverted parts\n * console.log(dtArr[1].getData());\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {DataModel} Returns the new DataModel instance after operation.\n */\n project (projField, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const { mode } = config;\n\n let normalizedProjField = projField.reduce((acc, field) => {\n if (field.constructor.name === 'RegExp') {\n acc.push(...allFields.filter(fieldName => fieldName.search(field) !== -1));\n } else if (field in fieldConfig) {\n acc.push(field);\n }\n return acc;\n }, []);\n\n normalizedProjField = Array.from(new Set(normalizedProjField)).map(field => field.trim());\n let dataModel;\n\n if (mode === FilteringMode.ALL) {\n let projectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.NORMAL,\n saveChild: config.saveChild\n }, allFields);\n let rejectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.INVERSE,\n saveChild: config.saveChild\n }, allFields);\n dataModel = [projectionClone, rejectionClone];\n } else {\n let projectionClone = cloneWithProject(this, normalizedProjField, config, allFields);\n dataModel = projectionClone;\n }\n\n return dataModel;\n }\n\n getFieldsConfig () {\n return this._fieldConfig;\n }\n\n calculateFieldsConfig () {\n this._fieldConfig = this._fieldspace.fields.reduce((acc, fieldDef, i) => {\n acc[fieldDef.name()] = {\n index: i,\n def: { name: fieldDef.name(), type: fieldDef.type(), subtype: fieldDef.subtype() }\n };\n return acc;\n }, {});\n return this;\n }\n\n\n /**\n * Frees up the resources associated with the current DataModel instance and breaks all the links instance has in\n * the DAG.\n *\n * @public\n */\n dispose () {\n this._parent && this._parent.removeChild(this);\n this._parent = null;\n this._children.forEach((child) => {\n child._parent = null;\n });\n this._children = [];\n }\n\n /**\n * Removes the specified child {@link DataModel} from the child list of the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\")\n * dt.removeChild(dt2);\n *\n * @private\n *\n * @param {DataModel} child - Delegates the parent to remove this child.\n */\n removeChild (child) {\n let idx = this._children.findIndex(sibling => sibling === child);\n idx !== -1 ? this._children.splice(idx, 1) : true;\n }\n\n /**\n * Sets the specified {@link DataModel} as a parent for the current {@link DataModel} instance.\n *\n * @param {DataModel} parent - The datamodel instance which will act as parent.\n */\n setParent (parent) {\n this._parent && this._parent.removeChild(this);\n this._parent = parent;\n parent && parent._children.push(this);\n }\n\n /**\n * Returns the parent {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const parentDm = dt2.getParent();\n *\n * @return {DataModel} Returns the parent DataModel instance.\n */\n getParent () {\n return this._parent;\n }\n\n /**\n * Returns the immediate child {@link DataModel} instances.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const childDm1 = dt.select(fields => fields.Origin.value === \"USA\");\n * const childDm2 = dt.select(fields => fields.Origin.value === \"Japan\");\n * const childDm3 = dt.groupBy([\"Origin\"]);\n *\n * @return {DataModel[]} Returns the immediate child DataModel instances.\n */\n getChildren () {\n return this._children;\n }\n\n /**\n * Returns the in-between operation meta data while creating the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const derivations = dt3.getDerivations();\n *\n * @return {Any[]} Returns the derivation meta data.\n */\n getDerivations () {\n return this._derivation;\n }\n\n /**\n * Returns the in-between operation meta data happened from root {@link DataModel} to current instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const ancDerivations = dt3.getAncestorDerivations();\n *\n * @return {Any[]} Returns the previous derivation meta data.\n */\n getAncestorDerivations () {\n return this._ancestorDerivation;\n }\n}\n\nexport default Relation;\n","/* eslint-disable default-case */\n\nimport { FieldType, DimensionSubtype, DataFormat } from './enums';\nimport {\n persistDerivation,\n persistAncestorDerivation,\n getRootGroupByModel,\n propagateToAllDataModels,\n getRootDataModel,\n propagateImmutableActions,\n addToPropNamespace,\n sanitizeUnitSchema\n} from './helper';\nimport { DM_DERIVATIVES, PROPAGATION } from './constants';\nimport {\n dataBuilder,\n rowDiffsetIterator,\n groupBy\n} from './operator';\nimport { createBinnedFieldData } from './operator/bucket-creator';\nimport Relation from './relation';\nimport reducerStore from './utils/reducer-store';\nimport { createFields } from './field-creator';\nimport InvalidAwareTypes from './invalid-aware-types';\n\n/**\n * DataModel is an in-browser representation of tabular data. It supports\n * {@link https://en.wikipedia.org/wiki/Relational_algebra | relational algebra} operators as well as generic data\n * processing opearators.\n * DataModel extends {@link Relation} class which defines all the relational algebra opreators. DataModel gives\n * definition of generic data processing operators which are not relational algebra complient.\n *\n * @public\n * @class\n * @extends Relation\n * @memberof Datamodel\n */\nclass DataModel extends Relation {\n /**\n * Creates a new DataModel instance by providing data and schema. Data could be in the form of\n * - Flat JSON\n * - DSV String\n * - 2D Array\n *\n * By default DataModel finds suitable adapter to serialize the data. DataModel also expects a\n * {@link Schema | schema} for identifying the variables present in data.\n *\n * @constructor\n * @example\n * const data = loadData('cars.csv');\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', unit : 'cm', scale: '1000', numberformat: val => `${val}G`},\n * { name: 'Cylinders', type: 'dimension' },\n * { name: 'Displacement', type: 'measure' },\n * { name: 'Horsepower', type: 'measure' },\n * { name: 'Weight_in_lbs', type: 'measure' },\n * { name: 'Acceleration', type: 'measure' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin', type: 'dimension' }\n * ];\n * const dm = new DataModel(data, schema, { name: 'Cars' });\n * table(dm);\n *\n * @public\n *\n * @param {Array. | string | Array.} data Input data in any of the mentioned formats\n * @param {Array.} schema Defination of the variables. Order of the variables in data and order of the\n * variables in schema has to be same.\n * @param {object} [options] Optional arguments to specify more settings regarding the creation part\n * @param {string} [options.name] Name of the datamodel instance. If no name is given an auto generated name is\n * assigned to the instance.\n * @param {string} [options.fieldSeparator=','] specify field separator type if the data is of type dsv string.\n */\n constructor (...args) {\n super(...args);\n\n this._onPropagation = [];\n this._sortingDetails = [];\n }\n\n /**\n * Reducers are simple functions which reduces an array of numbers to a representative number of the set.\n * Like an array of numbers `[10, 20, 5, 15]` can be reduced to `12.5` if average / mean reducer function is\n * applied. All the measure fields in datamodel (variables in data) needs a reducer to handle aggregation.\n *\n * @public\n *\n * @return {ReducerStore} Singleton instance of {@link ReducerStore}.\n */\n static get Reducers () {\n return reducerStore;\n }\n\n /**\n * Configure null, undefined, invalid values in the source data\n *\n * @public\n *\n * @param {Object} [config] - Configuration to control how null, undefined and non-parsable values are\n * represented in DataModel.\n * @param {string} [config.undefined] - Define how an undefined value will be represented.\n * @param {string} [config.null] - Define how a null value will be represented.\n * @param {string} [config.invalid] - Define how a non-parsable value will be represented.\n */\n static configureInvalidAwareTypes (config) {\n return InvalidAwareTypes.invalidAwareVals(config);\n }\n\n /**\n * Retrieve the data attached to an instance in JSON format.\n *\n * @example\n * // DataModel instance is already prepared and assigned to dm variable\n * const data = dm.getData({\n * order: 'column',\n * formatter: {\n * origin: (val) => val === 'European Union' ? 'EU' : val;\n * }\n * });\n * console.log(data);\n *\n * @public\n *\n * @param {Object} [options] Options to control how the raw data is to be returned.\n * @param {string} [options.order='row'] Defines if data is retieved in row order or column order. Possible values\n * are `'rows'` and `'columns'`\n * @param {Function} [options.formatter=null] Formats the output data. This expects an object, where the keys are\n * the name of the variable needs to be formatted. The formatter function is called for each row passing the\n * value of the cell for a particular row as arguments. The formatter is a function in the form of\n * `function (value, rowId, schema) => { ... }`\n * Know more about {@link Fomatter}.\n *\n * @return {Array} Returns a multidimensional array of the data with schema. The return format looks like\n * ```\n * {\n * data,\n * schema\n * }\n * ```\n */\n getData (options) {\n const defOptions = {\n order: 'row',\n formatter: null,\n withUid: false,\n getAllFields: false,\n sort: []\n };\n options = Object.assign({}, defOptions, options);\n const fields = this.getPartialFieldspace().fields;\n\n const dataGenerated = dataBuilder.call(\n this,\n this.getPartialFieldspace().fields,\n this._rowDiffset,\n options.getAllFields ? fields.map(d => d.name()).join() : this._colIdentifier,\n options.sort,\n {\n columnWise: options.order === 'column',\n addUid: !!options.withUid\n }\n );\n\n if (!options.formatter) {\n return dataGenerated;\n }\n\n const { formatter } = options;\n const { data, schema, uids } = dataGenerated;\n const fieldNames = schema.map((e => e.name));\n const fmtFieldNames = Object.keys(formatter);\n const fmtFieldIdx = fmtFieldNames.reduce((acc, next) => {\n const idx = fieldNames.indexOf(next);\n if (idx !== -1) {\n acc.push([idx, formatter[next]]);\n }\n return acc;\n }, []);\n\n if (options.order === 'column') {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n data[fIdx].forEach((datum, datumIdx) => {\n data[fIdx][datumIdx] = fmtFn.call(\n undefined,\n datum,\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n } else {\n data.forEach((datum, datumIdx) => {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n datum[fIdx] = fmtFn.call(\n undefined,\n datum[fIdx],\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n }\n\n return dataGenerated;\n }\n\n /**\n * Groups the data using particular dimensions and by reducing measures. It expects a list of dimensions using which\n * it projects the datamodel and perform aggregations to reduce the duplicate tuples. Refer this\n * {@link link_to_one_example_with_group_by | document} to know the intuition behind groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupedDM = dm.groupBy(['Year'], { horsepower: 'max' } );\n * console.log(groupedDm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {DataModel} Returns a new DataModel instance after performing the groupby.\n */\n groupBy (fieldsArr, reducers = {}, config = { saveChild: true }) {\n const groupByString = `${fieldsArr.join()}`;\n let params = [this, fieldsArr, reducers];\n const newDataModel = groupBy(...params);\n\n persistDerivation(\n newDataModel,\n DM_DERIVATIVES.GROUPBY,\n { fieldsArr, groupByString, defaultReducer: reducerStore.defaultReducer() },\n reducers\n );\n persistAncestorDerivation(this, newDataModel);\n\n if (config.saveChild) {\n newDataModel.setParent(this);\n } else {\n newDataModel.setParent(null);\n }\n\n return newDataModel;\n }\n\n /**\n * Performs sorting operation on the current {@link DataModel} instance according to the specified sorting details.\n * Like every other operator it doesn't mutate the current DataModel instance on which it was called, instead\n * returns a new DataModel instance containing the sorted data.\n *\n * DataModel support multi level sorting by listing the variables using which sorting needs to be performed and\n * the type of sorting `ASC` or `DESC`.\n *\n * In the following example, data is sorted by `Origin` field in `DESC` order in first level followed by another\n * level of sorting by `Acceleration` in `ASC` order.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * let sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\"] // Default value is ASC\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * // Sort with a custom sorting function\n * sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\", (a, b) => a - b] // Custom sorting function\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @text\n * DataModel also provides another sorting mechanism out of the box where sort is applied to a variable using\n * another variable which determines the order.\n * Like the above DataModel contains three fields `Origin`, `Name` and `Acceleration`. Now, the data in this\n * model can be sorted by `Origin` field according to the average value of all `Acceleration` for a\n * particular `Origin` value.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * const sortedDm = dm.sort([\n * ['Origin', ['Acceleration', (a, b) => avg(...a.Acceleration) - avg(...b.Acceleration)]]\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @public\n *\n * @param {Array.} sortingDetails - Sorting details based on which the sorting will be performed.\n * @return {DataModel} Returns a new instance of DataModel with sorted data.\n */\n sort (sortingDetails) {\n const rawData = this.getData({\n order: 'row',\n sort: sortingDetails\n });\n const header = rawData.schema.map(field => field.name);\n const dataInCSVArr = [header].concat(rawData.data);\n\n const sortedDm = new this.constructor(dataInCSVArr, rawData.schema, { dataFormat: 'DSVArr' });\n sortedDm._sortingDetails = sortingDetails;\n return sortedDm;\n }\n\n /**\n * Performs the serialization operation on the current {@link DataModel} instance according to the specified data\n * type. When an {@link DataModel} instance is created, it de-serializes the input data into its internal format,\n * and during its serialization process, it converts its internal data format to the specified data type and returns\n * that data regardless what type of data is used during the {@link DataModel} initialization.\n *\n * @example\n * // here dm is the pre-declared DataModel instance.\n * const csvData = dm.serialize(DataModel.DataFormat.DSV_STR, { fieldSeparator: \",\" });\n * console.log(csvData); // The csv formatted data.\n *\n * const jsonData = dm.serialize(DataModel.DataFormat.FLAT_JSON);\n * console.log(jsonData); // The json data.\n *\n * @public\n *\n * @param {string} type - The data type name for serialization.\n * @param {Object} options - The optional option object.\n * @param {string} options.fieldSeparator - The field separator character for DSV data type.\n * @return {Array|string} Returns the serialized data.\n */\n serialize (type, options) {\n type = type || this._dataFormat;\n options = Object.assign({}, { fieldSeparator: ',' }, options);\n\n const fields = this.getFieldspace().fields;\n const colData = fields.map(f => f.formattedData());\n const rowsCount = colData[0].length;\n let serializedData;\n let rowIdx;\n let colIdx;\n\n if (type === DataFormat.FLAT_JSON) {\n serializedData = [];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = {};\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row[fields[colIdx].name()] = colData[colIdx][rowIdx];\n }\n serializedData.push(row);\n }\n } else if (type === DataFormat.DSV_STR) {\n serializedData = [fields.map(f => f.name()).join(options.fieldSeparator)];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row.join(options.fieldSeparator));\n }\n serializedData = serializedData.join('\\n');\n } else if (type === DataFormat.DSV_ARR) {\n serializedData = [fields.map(f => f.name())];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row);\n }\n } else {\n throw new Error(`Data type ${type} is not supported`);\n }\n\n return serializedData;\n }\n\n addField (field) {\n const fieldName = field.name();\n this._colIdentifier += `,${fieldName}`;\n const partialFieldspace = this._partialFieldspace;\n\n if (!partialFieldspace.fieldsObj()[field.name()]) {\n partialFieldspace.fields.push(field);\n } else {\n const fieldIndex = partialFieldspace.fields.findIndex(fieldinst => fieldinst.name() === fieldName);\n fieldIndex >= 0 && (partialFieldspace.fields[fieldIndex] = field);\n }\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n\n this.__calculateFieldspace().calculateFieldsConfig();\n return this;\n }\n\n /**\n * Creates a new variable calculated from existing variables. This method expects the definition of the newly created\n * variable and a function which resolves the value of the new variable from existing variables.\n *\n * Can create a new measure based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const newDm = dataModel.calculateVariable({\n * name: 'powerToWeight',\n * type: 'measure'\n * }, ['horsepower', 'weight_in_lbs', (hp, weight) => hp / weight ]);\n *\n *\n * Can create a new dimension based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const child = dataModel.calculateVariable(\n * {\n * name: 'Efficiency',\n * type: 'dimension'\n * }, ['horsepower', (hp) => {\n * if (hp < 80) { return 'low'; },\n * else if (hp < 120) { return 'moderate'; }\n * else { return 'high' }\n * }]);\n *\n * @public\n *\n * @param {Object} schema - The schema of newly defined variable.\n * @param {Array.} dependency - An array containing the dependency variable names and a resolver\n * function as the last element.\n * @param {Object} config - An optional config object.\n * @param {boolean} [config.saveChild] - Whether the newly created DataModel will be a child.\n * @param {boolean} [config.replaceVar] - Whether the newly created variable will replace the existing variable.\n * @return {DataModel} Returns an instance of DataModel with the new field.\n */\n calculateVariable (schema, dependency, config) {\n schema = sanitizeUnitSchema(schema);\n config = Object.assign({}, { saveChild: true, replaceVar: false }, config);\n\n const fieldsConfig = this.getFieldsConfig();\n const depVars = dependency.slice(0, dependency.length - 1);\n const retrieveFn = dependency[dependency.length - 1];\n\n if (fieldsConfig[schema.name] && !config.replaceVar) {\n throw new Error(`${schema.name} field already exists in datamodel`);\n }\n\n const depFieldIndices = depVars.map((field) => {\n const fieldSpec = fieldsConfig[field];\n if (!fieldSpec) {\n // @todo dont throw error here, use warning in production mode\n throw new Error(`${field} is not a valid column name.`);\n }\n return fieldSpec.index;\n });\n\n const clone = this.clone(config.saveChild);\n\n const fs = clone.getFieldspace().fields;\n const suppliedFields = depFieldIndices.map(idx => fs[idx]);\n\n let cachedStore = {};\n let cloneProvider = () => this.detachedRoot();\n\n const computedValues = [];\n rowDiffsetIterator(clone._rowDiffset, (i) => {\n const fieldsData = suppliedFields.map(field => field.partialField.data[i]);\n computedValues[i] = retrieveFn(...fieldsData, i, cloneProvider, cachedStore);\n });\n const [field] = createFields([computedValues], [schema], [schema.name]);\n clone.addField(field);\n\n persistDerivation(clone, DM_DERIVATIVES.CAL_VAR, { config: schema, fields: depVars }, retrieveFn);\n persistAncestorDerivation(this, clone);\n\n return clone;\n }\n\n /**\n * Propagates changes across all the connected DataModel instances.\n *\n * @param {Array} identifiers - A list of identifiers that were interacted with.\n * @param {Object} payload - The interaction specific details.\n *\n * @return {DataModel} DataModel instance.\n */\n propagate (identifiers, config = {}, addToNameSpace, propConfig = {}) {\n const isMutableAction = config.isMutableAction;\n const propagationSourceId = config.sourceId;\n const payload = config.payload;\n const rootModel = getRootDataModel(this);\n const propagationNameSpace = rootModel._propagationNameSpace;\n const rootGroupByModel = getRootGroupByModel(this);\n const rootModels = {\n groupByModel: rootGroupByModel,\n model: rootModel\n };\n\n addToNameSpace && addToPropNamespace(propagationNameSpace, config, this);\n propagateToAllDataModels(identifiers, rootModels, { propagationNameSpace, sourceId: propagationSourceId },\n Object.assign({\n payload\n }, config));\n\n if (isMutableAction) {\n propagateImmutableActions(propagationNameSpace, rootModels, {\n config,\n propConfig\n }, this);\n }\n\n return this;\n }\n\n /**\n * Associates a callback with an event name.\n *\n * @param {string} eventName - The name of the event.\n * @param {Function} callback - The callback to invoke.\n * @return {DataModel} Returns this current DataModel instance itself.\n */\n on (eventName, callback) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation.push(callback);\n break;\n }\n return this;\n }\n\n /**\n * Unsubscribes the callbacks for the provided event name.\n *\n * @param {string} eventName - The name of the event to unsubscribe.\n * @return {DataModel} Returns the current DataModel instance itself.\n */\n unsubscribe (eventName) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation = [];\n break;\n\n }\n return this;\n }\n\n /**\n * This method is used to invoke the method associated with propagation.\n *\n * @param {Object} payload The interaction payload.\n * @param {DataModel} identifiers The propagated DataModel.\n * @memberof DataModel\n */\n handlePropagation (propModel, payload) {\n let propListeners = this._onPropagation;\n propListeners.forEach(fn => fn.call(this, propModel, payload));\n }\n\n /**\n * Performs the binning operation on a measure field based on the binning configuration. Binning means discretizing\n * values of a measure. Binning configuration contains an array; subsequent values from the array marks the boundary\n * of buckets in [inclusive, exclusive) range format. This operation does not mutate the subject measure field,\n * instead, it creates a new field (variable) of type dimension and subtype binned.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non-uniform buckets,\n * - providing bins count,\n * - providing each bin size,\n *\n * When custom `buckets` are provided as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', buckets: [30, 80, 100, 110] }\n * const binnedDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binsCount` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', binsCount: 5, start: 0, end: 100 }\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binSize` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHorsepower', binSize: 20, start: 5}\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @public\n *\n * @param {string} measureFieldName - The name of the target measure field.\n * @param {Object} config - The config object.\n * @param {string} [config.name] - The name of the new field which will be created.\n * @param {string} [config.buckets] - An array containing the bucket ranges.\n * @param {string} [config.binSize] - The size of each bin. It is ignored when buckets are given.\n * @param {string} [config.binsCount] - The total number of bins to generate. It is ignored when buckets are given.\n * @param {string} [config.start] - The start value of the bucket ranges. It is ignored when buckets are given.\n * @param {string} [config.end] - The end value of the bucket ranges. It is ignored when buckets are given.\n * @return {DataModel} Returns a new {@link DataModel} instance with the new field.\n */\n bin (measureFieldName, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n if (!fieldsConfig[measureFieldName]) {\n throw new Error(`Field ${measureFieldName} doesn't exist`);\n }\n\n const binFieldName = config.name || `${measureFieldName}_binned`;\n\n if (fieldsConfig[binFieldName]) {\n throw new Error(`Field ${binFieldName} already exists`);\n }\n\n const measureField = this.getFieldspace().fieldsObj()[measureFieldName];\n const { binnedData, bins } = createBinnedFieldData(measureField, this._rowDiffset, config);\n\n const binField = createFields([binnedData], [\n {\n name: binFieldName,\n type: FieldType.DIMENSION,\n subtype: DimensionSubtype.BINNED,\n bins\n }], [binFieldName])[0];\n\n const clone = this.clone(config.saveChild);\n clone.addField(binField);\n\n persistDerivation(clone, DM_DERIVATIVES.BIN, { measureFieldName, config, binFieldName }, null);\n persistAncestorDerivation(this, clone);\n\n return clone;\n }\n\n /**\n * Creates a new {@link DataModel} instance with completely detached root from current {@link DataModel} instance,\n * the new {@link DataModel} instance has no parent-children relationship with the current one, but has same data as\n * the current one.\n * This API is useful when a completely different {@link DataModel} but with same data as the current instance is\n * needed.\n *\n * @example\n * const dm = new DataModel(data, schema);\n * const detachedDm = dm.detachedRoot();\n *\n * // has different namespace\n * console.log(dm.getPartialFieldspace().name);\n * console.log(detachedDm.getPartialFieldspace().name);\n *\n * // has same data\n * console.log(dm.getData());\n * console.log(detachedDm.getData());\n *\n * @public\n *\n * @return {DataModel} Returns a detached {@link DataModel} instance.\n */\n detachedRoot () {\n const data = this.serialize(DataFormat.FLAT_JSON);\n const schema = this.getSchema();\n\n return new DataModel(data, schema);\n }\n}\n\nexport default DataModel;\n","import { fnList } from '../operator/group-by-function';\n\nexport const { sum, avg, min, max, first, last, count, std: sd } = fnList;\n","import DataModel from './datamodel';\nimport {\n compose,\n bin,\n select,\n project,\n groupby as groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union\n} from './operator';\nimport * as Stats from './stats';\nimport * as enums from './enums';\nimport { DateTimeFormatter } from './utils';\nimport { DataFormat, FilteringMode, DM_DERIVATIVES } from './constants';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport pkg from '../package.json';\n\nconst Operators = {\n compose,\n bin,\n select,\n project,\n groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union\n};\n\nconst version = pkg.version;\nObject.assign(DataModel, {\n Operators,\n Stats,\n DM_DERIVATIVES,\n DateTimeFormatter,\n DataFormat,\n FilteringMode,\n InvalidAwareTypes,\n version\n}, enums);\n\nexport default DataModel;\n","/**\n * Wrapper on calculateVariable() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const calculateVariable = (...args) => dm => dm.calculateVariable(...args);\n\n/**\n * Wrapper on sort() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const sort = (...args) => dm => dm.sort(...args);\n","import { crossProduct } from './cross-product';\nimport { naturalJoinFilter } from './natural-join-filter-function';\n\nexport function naturalJoin (dataModel1, dataModel2) {\n return crossProduct(dataModel1, dataModel2, naturalJoinFilter(dataModel1, dataModel2), true);\n}\n"],"sourceRoot":""} \ No newline at end of file diff --git a/src/export.js b/src/export.js index 625b162..ed7d7c6 100644 --- a/src/export.js +++ b/src/export.js @@ -22,7 +22,7 @@ import { DataFormat, FilteringMode, DM_DERIVATIVES } from './constants'; import InvalidAwareTypes from './invalid-aware-types'; import pkg from '../package.json'; -DataModel.Operators = { +const Operators = { compose, bin, select, @@ -38,12 +38,17 @@ DataModel.Operators = { fullOuterJoin, union }; -DataModel.Stats = Stats; -Object.assign(DataModel, enums, { DM_DERIVATIVES }); -DataModel.DateTimeFormatter = DateTimeFormatter; -DataModel.DataFormat = DataFormat; -DataModel.FilteringMode = FilteringMode; -DataModel.InvalidAwareTypes = InvalidAwareTypes; -DataModel.version = pkg.version; + +const version = pkg.version; +Object.assign(DataModel, { + Operators, + Stats, + DM_DERIVATIVES, + DateTimeFormatter, + DataFormat, + FilteringMode, + InvalidAwareTypes, + version +}, enums); export default DataModel; From c5fc6ac4980d438b483754a6f222ae87e6bc08b9 Mon Sep 17 00:00:00 2001 From: Ranajit Banerjee Date: Fri, 22 Mar 2019 16:48:06 +0530 Subject: [PATCH 17/21] - Add sort enum in derivatives --- src/constants/index.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/constants/index.js b/src/constants/index.js index 86c5bb0..512970f 100644 --- a/src/constants/index.js +++ b/src/constants/index.js @@ -18,7 +18,8 @@ export const DM_DERIVATIVES = { GROUPBY: 'group', COMPOSE: 'compose', CAL_VAR: 'calculatedVariable', - BIN: 'bin' + BIN: 'bin', + SORT: 'sort' }; export const JOINS = { From 85042628a8e2eab43abbf0e2e5d9a965a7780f5d Mon Sep 17 00:00:00 2001 From: Rousan Ali Date: Fri, 22 Mar 2019 20:42:35 +0530 Subject: [PATCH 18/21] #58-Validate field type and subtype before use --- example/samples/example4.js | 72 ++++++++++++++++++++++++++++--------- src/helper.js | 34 ++++++++++++++++-- src/index.spec.js | 35 +++++++++++++++++- 3 files changed, 122 insertions(+), 19 deletions(-) diff --git a/example/samples/example4.js b/example/samples/example4.js index e39d2ef..471f457 100644 --- a/example/samples/example4.js +++ b/example/samples/example4.js @@ -1,17 +1,57 @@ -const data = [ - { age: 30, job: 'management', marital: 'married' }, - { age: 59, job: 'blue-collar', marital: 'married' }, - { age: 35, job: 'management', marital: 'single' }, - { age: 57, job: 'self-employed', marital: 'married' }, - { age: 28, job: 'blue-collar', marital: 'married' }, - { age: 30, job: 'blue-collar', marital: 'single' }, -]; -const schema = [ - { name: 'age', type: 'measure' }, - { name: 'job', type: 'dimension' }, - { name: 'marital', type: 'dimension' } -]; -const rootDm = new DataModel(data, schema); +/* eslint-disable */ -const dm = rootDm.select(fields => fields.age.value > 30); -const sortedDm = dm.sort([['age', 'ASC']]); \ No newline at end of file +d3.json('./data/cars.json', (data) => { + const jsonData = data, + schema = [ + { + "name": "Name", + "type": "dimension" + }, + { + "name": "Maker", + "type": "dimension" + }, + { + "name": "Miles_per_Gallon", + "type": "measure", + "defAggFn": "avg" + }, + { + "name": "Displacement", + "type": "measure", + "subtype": "continuous", + "defAggFn": "max" + }, + { + "name": "Horsepower", + "type": "measure", + "defAggFn": "avg" + }, + { + "name": "Weight_in_lbs", + "type": "measure", + "defAggFn": "min" + }, + { + "name": "Acceleration", + "type": "measure", + "defAggFn": "avg" + }, + { + "name": "Origin", + "type": "dimension" + }, + { + "name": "Cylinders", + "type": "dimension" + }, + { + "name": "Year", + "type": "dimension", + "subtype": "temporal", + "format": "%Y-%m-%d" + } + ] + + dm = new DataModel(jsonData, schema); +}); \ No newline at end of file diff --git a/src/helper.js b/src/helper.js index 637af3f..5b82f2e 100644 --- a/src/helper.js +++ b/src/helper.js @@ -217,7 +217,37 @@ export const sanitizeUnitSchema = (unitSchema) => { return unitSchema; }; -export const sanitizeSchema = schema => schema.map(unitSchema => sanitizeUnitSchema(unitSchema)); +export const validateUnitSchema = (unitSchema) => { + const supportedMeasureSubTypes = [MeasureSubtype.CONTINUOUS]; + const supportedDimSubTypes = [ + DimensionSubtype.CATEGORICAL, + DimensionSubtype.BINNED, + DimensionSubtype.TEMPORAL, + DimensionSubtype.GEO + ]; + const { type, subtype } = unitSchema; + + switch (type) { + case FieldType.DIMENSION: + if (supportedDimSubTypes.indexOf(subtype) === -1) { + throw new Error(`DataModel doesn't support field subtype: ${subtype}`); + } + break; + case FieldType.MEASURE: + if (supportedMeasureSubTypes.indexOf(subtype) === -1) { + throw new Error(`DataModel doesn't support field subtype: ${subtype}`); + } + break; + default: + throw new Error(`DataModel doesn't support field type: ${type}`); + } +}; + +export const sanitizeAndValidateSchema = schema => schema.map((unitSchema) => { + unitSchema = sanitizeUnitSchema(unitSchema); + validateUnitSchema(unitSchema); + return unitSchema; +}); export const resolveFieldName = (schema, dataHeader) => { schema.forEach((unitSchema) => { @@ -232,7 +262,7 @@ export const resolveFieldName = (schema, dataHeader) => { }; export const updateData = (relation, data, schema, options) => { - schema = sanitizeSchema(schema); + schema = sanitizeAndValidateSchema(schema); options = Object.assign(Object.assign({}, defaultConfig), options); const converterFn = converter[options.dataFormat]; diff --git a/src/index.spec.js b/src/index.spec.js index 8038c59..c959417 100644 --- a/src/index.spec.js +++ b/src/index.spec.js @@ -1,5 +1,5 @@ /* global beforeEach, describe, it, context */ -/* eslint-disable no-unused-expressions */ +/* eslint-disable no-unused-expressions, no-new */ import { expect } from 'chai'; import { FilteringMode, DataFormat } from './enums'; @@ -13,6 +13,39 @@ function avg(...nums) { } describe('DataModel', () => { + describe('#Constructor', () => { + it('should validate schema before use', () => { + const data = [ + { age: 30, job: 'unemployed', marital: null }, + { age: 'Age', job: 'services', marital: 'married' }, + { age: 22, job: undefined, marital: 'single' } + ]; + let schema = [ + { name: 'age', type: 'measure' }, + { name: 'job', type: 'dimension' }, + { name: 'marital', type: 'un-supported-type' }, + ]; + const mockedFn = () => { + new DataModel(data, schema); + }; + expect(mockedFn).to.throw(); + + schema = [ + { name: 'age', type: 'measure' }, + { name: 'job', type: 'dimension' }, + { name: 'marital', type: 'dimension', subtype: 'invalid-subtype' }, + ]; + expect(mockedFn).to.throw(); + + schema = [ + { name: 'age', type: 'measure', subtype: 'invalid-subtype' }, + { name: 'job', type: 'dimension' }, + { name: 'marital', type: 'dimension' }, + ]; + expect(mockedFn).to.throw(); + }); + }); + describe('#version', () => { it('should be same to the version value specified in package.json file', () => { expect(DataModel.version).to.equal(pkg.version); From d2da25da282a62bcd79f97c94722567dd1399be4 Mon Sep 17 00:00:00 2001 From: Rousan Ali Date: Mon, 25 Mar 2019 14:01:09 +0530 Subject: [PATCH 19/21] Do some code refactore on derivations --- src/datamodel.js | 34 ++++++++++++++++++++++++---------- src/helper.js | 20 +++++++++++++++----- src/operator/compose.js | 11 ++++++++--- 3 files changed, 47 insertions(+), 18 deletions(-) diff --git a/src/datamodel.js b/src/datamodel.js index 2121d91..809eecb 100644 --- a/src/datamodel.js +++ b/src/datamodel.js @@ -2,8 +2,7 @@ import { FieldType, DimensionSubtype, DataFormat } from './enums'; import { - persistDerivation, - persistAncestorDerivation, + persistDerivations, getRootGroupByModel, propagateToAllDataModels, getRootDataModel, @@ -239,13 +238,13 @@ class DataModel extends Relation { let params = [this, fieldsArr, reducers]; const newDataModel = groupBy(...params); - persistDerivation( + persistDerivations( + this, newDataModel, DM_DERIVATIVES.GROUPBY, { fieldsArr, groupByString, defaultReducer: reducerStore.defaultReducer() }, reducers ); - persistAncestorDerivation(this, newDataModel); if (config.saveChild) { newDataModel.setParent(this); @@ -317,8 +316,13 @@ class DataModel extends Relation { const sortedDm = new this.constructor(dataInCSVArr, rawData.schema, { dataFormat: 'DSVArr' }); - persistDerivation(sortedDm, DM_DERIVATIVES.SORT, config, sortingDetails); - persistAncestorDerivation(this, sortedDm); + persistDerivations( + this, + sortedDm, + DM_DERIVATIVES.SORT, + config, + sortingDetails + ); if (config.saveChild) { sortedDm.setParent(this); @@ -490,8 +494,13 @@ class DataModel extends Relation { const [field] = createFields([computedValues], [schema], [schema.name]); clone.addField(field); - persistDerivation(clone, DM_DERIVATIVES.CAL_VAR, { config: schema, fields: depVars }, retrieveFn); - persistAncestorDerivation(this, clone); + persistDerivations( + this, + clone, + DM_DERIVATIVES.CAL_VAR, + { config: schema, fields: depVars }, + retrieveFn + ); return clone; } @@ -646,8 +655,13 @@ class DataModel extends Relation { const clone = this.clone(config.saveChild); clone.addField(binField); - persistDerivation(clone, DM_DERIVATIVES.BIN, { measureFieldName, config, binFieldName }, null); - persistAncestorDerivation(this, clone); + persistDerivations( + this, + clone, + DM_DERIVATIVES.BIN, + { measureFieldName, config, binFieldName }, + null + ); return clone; } diff --git a/src/helper.js b/src/helper.js index 637af3f..fa3bea5 100644 --- a/src/helper.js +++ b/src/helper.js @@ -34,7 +34,7 @@ export const updateFields = ([rowDiffset, colIdentifier], partialFieldspace, fie return fieldStore.createNamespace(newFields, fieldStoreName); }; -export const persistDerivation = (model, operation, config = {}, criteriaFn) => { +export const persistCurrentDerivation = (model, operation, config = {}, criteriaFn) => { if (operation === DM_DERIVATIVES.COMPOSE) { model._derivation.length = 0; model._derivation.push(...criteriaFn); @@ -51,6 +51,11 @@ export const persistAncestorDerivation = (sourceDm, newDm) => { newDm._ancestorDerivation.push(...sourceDm._ancestorDerivation, ...sourceDm._derivation); }; +export const persistDerivations = (sourceDm, model, operation, config = {}, criteriaFn) => { + persistCurrentDerivation(model, operation, config, criteriaFn); + persistAncestorDerivation(sourceDm, model); +}; + export const selectHelper = (rowDiffset, fields, selectFn, config, sourceDm) => { const newRowDiffSet = []; let lastInsertedValue = -1; @@ -167,8 +172,13 @@ export const cloneWithSelect = (sourceDm, selectFn, selectConfig, cloneConfig) = cloned._rowDiffset = rowDiffset; cloned.__calculateFieldspace().calculateFieldsConfig(); - persistDerivation(cloned, DM_DERIVATIVES.SELECT, { config: selectConfig }, selectFn); - persistAncestorDerivation(sourceDm, cloned); + persistDerivations( + sourceDm, + cloned, + DM_DERIVATIVES.SELECT, + { config: selectConfig }, + selectFn + ); return cloned; }; @@ -184,13 +194,13 @@ export const cloneWithProject = (sourceDm, projField, config, allFields) => { cloned._colIdentifier = projectionSet.join(','); cloned.__calculateFieldspace().calculateFieldsConfig(); - persistDerivation( + persistDerivations( + sourceDm, cloned, DM_DERIVATIVES.PROJECT, { projField, config, actualProjField: projectionSet }, null ); - persistAncestorDerivation(sourceDm, cloned); return cloned; }; diff --git a/src/operator/compose.js b/src/operator/compose.js index 0fb9f36..3930ecb 100644 --- a/src/operator/compose.js +++ b/src/operator/compose.js @@ -1,4 +1,4 @@ -import { persistDerivation, persistAncestorDerivation } from '../helper'; +import { persistDerivations } from '../helper'; import { DM_DERIVATIVES } from '../constants'; /** @@ -231,10 +231,15 @@ export const compose = (...operations) => firstChild.dispose(); } - persistDerivation(currentDM, DM_DERIVATIVES.COMPOSE, null, derivations); // reset all ancestorDerivation saved in-between compose currentDM._ancestorDerivation = []; - persistAncestorDerivation(dm, currentDM); + persistDerivations( + dm, + currentDM, + DM_DERIVATIVES.COMPOSE, + null, + derivations + ); if (config.saveChild) { currentDM.setParent(dm); From c3a0fa468a0dfe49edd8d9b6b306570e4643bd19 Mon Sep 17 00:00:00 2001 From: Rousan Ali Date: Mon, 25 Mar 2019 18:08:32 +0530 Subject: [PATCH 20/21] Update travis config file --- .travis.yml | 13 +++++++------ src/helper.js | 8 ++++---- 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/.travis.yml b/.travis.yml index 797b21d..49d82f7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,11 +1,12 @@ language: node_js node_js: - - "8" +- '8' before_script: - - npm install - - npm install -g codecov +- npm install +- npm install -g codecov script: - - npm test - - codecov -f coverage/lcov.info +- npm test +- codecov -f coverage/lcov.info notifications: - slack: fusioncharts:JmooWfzCnyxe4p7KTTJU5xzP + slack: + secure: E/O+6gcjD2oTwLnt10w6qEpNQp9AkcuZsy6xsK/Hxw7z47IZwM5BHkUAJhTX0QcJyamZbFaMU9mJ5P4ClLnMPBBj4KV+mznu8yTSySfCub2LKpuGyKIqH3BHqepHbSZiAlMhQkq5OUfW8tOo2p8j6kc5AbvPx6pHCr/nQc0HkjwBQY4SwLb60LGXbPkYsoZhYnXmJmRg/iowu00qakjXH7FInsGist//ZlJp9MiaZH3Cfdo4l3rZn4AJ1naFBD4bNb+Wqqh6zVO4DdOiBVTsq3bZ6vcNZVb2IqlYZvCLODuwhvHiO4wKsQ9QAAhm1TXrraXfs9kR9pMeZeUtnlEeZURu/m7J1Wz2PkBOGGUjCb2xGucppgyg3/1eG3esEL6M6pqVGzuGH1CKjh4aRVZoq47UHDoN+N8Q4ix+TZMqztCFQV47bs56dlmc8hnlluANJsrlJha0p3myYQknv2qjgLjfbZDHoKKybpdAsfaZsDQ3aShw+EtdB38FA0YjsWYQNWh1YmKWBEz1W+jMZ7AlLrkNi20+JHmNngZPW4MQy1Mi0xN905Rlr4RwnzUt5o+pbP78zIlCnZFU5KIIoMDdlKXoZ9hC6gptTajhXkcIjm+FSezy6VUtNpMS0dCSM3RomYWx1MSEpo1XIMzzb6bujBD/XLckejJWAJcTF+PEzz8= diff --git a/src/helper.js b/src/helper.js index 47200e9..cd985e6 100644 --- a/src/helper.js +++ b/src/helper.js @@ -235,21 +235,21 @@ export const validateUnitSchema = (unitSchema) => { DimensionSubtype.TEMPORAL, DimensionSubtype.GEO ]; - const { type, subtype } = unitSchema; + const { type, subtype, name } = unitSchema; switch (type) { case FieldType.DIMENSION: if (supportedDimSubTypes.indexOf(subtype) === -1) { - throw new Error(`DataModel doesn't support field subtype: ${subtype}`); + throw new Error(`DataModel doesn't support dimension field subtype ${subtype} used for ${name} field`); } break; case FieldType.MEASURE: if (supportedMeasureSubTypes.indexOf(subtype) === -1) { - throw new Error(`DataModel doesn't support field subtype: ${subtype}`); + throw new Error(`DataModel doesn't support measure field subtype ${subtype} used for ${name} field`); } break; default: - throw new Error(`DataModel doesn't support field type: ${type}`); + throw new Error(`DataModel doesn't support field type ${type} used for ${name} field`); } }; From aab83469b2cd5467dbc7f6aa2df001d3148438fb Mon Sep 17 00:00:00 2001 From: Rousan Ali Date: Tue, 26 Mar 2019 17:16:48 +0530 Subject: [PATCH 21/21] Bump the version and make a build --- dist/datamodel.js | 2 ++ dist/datamodel.js.map | 1 + package.json | 3 ++- 3 files changed, 5 insertions(+), 1 deletion(-) create mode 100644 dist/datamodel.js create mode 100644 dist/datamodel.js.map diff --git a/dist/datamodel.js b/dist/datamodel.js new file mode 100644 index 0000000..8cab24b --- /dev/null +++ b/dist/datamodel.js @@ -0,0 +1,2 @@ +!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define("DataModel",[],t):"object"==typeof exports?exports.DataModel=t():e.DataModel=t()}(window,function(){return function(e){var t={};function n(r){if(t[r])return t[r].exports;var a=t[r]={i:r,l:!1,exports:{}};return e[r].call(a.exports,a,a.exports,n),a.l=!0,a.exports}return n.m=e,n.c=t,n.d=function(e,t,r){n.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:r})},n.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},n.t=function(e,t){if(1&t&&(e=n(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var r=Object.create(null);if(n.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var a in e)n.d(r,a,function(t){return e[t]}.bind(null,a));return r},n.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return n.d(t,"a",t),t},n.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},n.p="",n(n.s=1)}([function(e){e.exports={name:"datamodel",description:"Relational algebra compliant in-memory tabular data store",homepage:"https://github.com/chartshq/datamodel",version:"2.2.0",license:"MIT",main:"dist/datamodel.js",keywords:["datamodel","data","relational","algebra","model","muze","fusioncharts","table","tabular","operation"],author:"Muzejs.org (https://muzejs.org/)",repository:{type:"git",url:"https://github.com/chartshq/datamodel.git"},contributors:[{name:"Akash Goswami",email:"akashgoswami90s@gmail.com"},{name:"Subhash Haldar"},{name:"Rousan Ali",email:"rousanali786@gmail.com",url:"https://rousan.io"},{name:"Ujjal Kumar Dutta",email:"duttaujjalkumar@live.com"}],dependencies:{"d3-dsv":"^1.0.8"},devDependencies:{"babel-cli":"6.26.0","babel-core":"^6.26.3","babel-eslint":"6.1.2","babel-loader":"^7.1.4","babel-plugin-transform-runtime":"^6.23.0","babel-preset-env":"^1.7.0","babel-preset-es2015":"^6.24.1","babel-preset-flow":"^6.23.0",chai:"3.5.0","cross-env":"^5.0.5",eslint:"3.19.0","eslint-config-airbnb":"15.1.0","eslint-plugin-import":"2.7.0","eslint-plugin-jsx-a11y":"5.1.1","eslint-plugin-react":"7.3.0","istanbul-instrumenter-loader":"^3.0.0",jsdoc:"3.5.5",json2yaml:"^1.1.0",karma:"1.7.1","karma-chai":"0.1.0","karma-chrome-launcher":"2.1.1","karma-coverage-istanbul-reporter":"^1.3.0","karma-mocha":"1.3.0","karma-spec-reporter":"0.0.31","karma-webpack":"2.0.3",marked:"^0.5.0",mocha:"3.4.2","mocha-webpack":"0.7.0","transform-runtime":"0.0.0",webpack:"^4.12.0","webpack-cli":"^3.0.7","webpack-dev-server":"^3.1.4"},scripts:{test:"npm run lint && npm run ut",ut:"karma start karma.conf.js",utd:"karma start --single-run false --browsers Chrome karma.conf.js ",build:"webpack --mode production",start:"webpack-dev-server --config webpack.config.dev.js --mode development --open",lint:"eslint ./src","lint-errors":"eslint --quiet ./src",docs:"rm -rf yaml && mkdir yaml && jsdoc -c jsdoc.conf.json"}}},function(e,t,n){var r=n(2);e.exports=r.default?r.default:r},function(e,t,n){"use strict";n.r(t);var r={};n.r(r),n.d(r,"DataFormat",function(){return o}),n.d(r,"DimensionSubtype",function(){return u}),n.d(r,"MeasureSubtype",function(){return c}),n.d(r,"FieldType",function(){return f}),n.d(r,"FilteringMode",function(){return l}),n.d(r,"GROUP_BY_FUNCTIONS",function(){return s});var a={};n.r(a),n.d(a,"DSVArr",function(){return Ze}),n.d(a,"DSVStr",function(){return lt}),n.d(a,"FlatJSON",function(){return st}),n.d(a,"Auto",function(){return dt});var i={};n.r(i),n.d(i,"sum",function(){return Ct}),n.d(i,"avg",function(){return xt}),n.d(i,"min",function(){return Lt}),n.d(i,"max",function(){return Ut}),n.d(i,"first",function(){return Vt}),n.d(i,"last",function(){return Yt}),n.d(i,"count",function(){return Ht}),n.d(i,"sd",function(){return Bt});var o={FLAT_JSON:"FlatJSON",DSV_STR:"DSVStr",DSV_ARR:"DSVArr",AUTO:"Auto"},u={CATEGORICAL:"categorical",TEMPORAL:"temporal",GEO:"geo",BINNED:"binned"},c={CONTINUOUS:"continuous"},f={MEASURE:"measure",DIMENSION:"dimension"},l={NORMAL:"normal",INVERSE:"inverse",ALL:"all"},s={SUM:"sum",AVG:"avg",MIN:"min",MAX:"max",FIRST:"first",LAST:"last",COUNT:"count",STD:"std"};function d(e){return e instanceof Date?e:new Date(e)}function p(e){return e<10?"0"+e:e}function h(e){this.format=e,this.dtParams=void 0,this.nativeDate=void 0}RegExp.escape=function(e){return e.replace(/[-[\]{}()*+?.,\\^$|#\s]/g,"\\$&")},h.TOKEN_PREFIX="%",h.DATETIME_PARAM_SEQUENCE={YEAR:0,MONTH:1,DAY:2,HOUR:3,MINUTE:4,SECOND:5,MILLISECOND:6},h.defaultNumberParser=function(e){return function(t){var n;return isFinite(n=parseInt(t,10))?n:e}},h.defaultRangeParser=function(e,t){return function(n){var r,a=void 0;if(!n)return t;var i=n.toLowerCase();for(a=0,r=e.length;aa.getFullYear()&&(t=""+(i-1)+r),d(t).getFullYear()},formatter:function(e){var t=d(e).getFullYear().toString(),n=void 0;return t&&(n=t.length,t=t.substring(n-2,n)),t}},Y:{name:"Y",index:0,extract:function(){return"(\\d{4})"},parser:h.defaultNumberParser(),formatter:function(e){return d(e).getFullYear().toString()}}}},h.getTokenFormalNames=function(){var e=h.getTokenDefinitions();return{HOUR:e.H,HOUR_12:e.l,AMPM_UPPERCASE:e.p,AMPM_LOWERCASE:e.P,MINUTE:e.M,SECOND:e.S,SHORT_DAY:e.a,LONG_DAY:e.A,DAY_OF_MONTH:e.e,DAY_OF_MONTH_CONSTANT_WIDTH:e.d,SHORT_MONTH:e.b,LONG_MONTH:e.B,MONTH_OF_YEAR:e.m,SHORT_YEAR:e.y,LONG_YEAR:e.Y}},h.tokenResolver=function(){var e=h.getTokenDefinitions(),t=function(){for(var e=0,t=void 0,n=void 0,r=arguments.length;e=0;)o=e[i+1],-1!==r.indexOf(o)&&a.push({index:i,token:o});return a},h.formatAs=function(e,t){var n,r=d(e),a=h.findTokens(t),i=h.getTokenDefinitions(),o=String(t),u=h.TOKEN_PREFIX,c=void 0,f=void 0,l=void 0;for(l=0,n=a.length;l=0;d--)(f=i[d].index)+1!==s.length-1?(void 0===u&&(u=s.length),l=s.substring(f+2,u),s=s.substring(0,f+2)+RegExp.escape(l)+s.substring(u,s.length),u=f):u=f;for(d=0;d0&&e.split(",").forEach(function(e){var n=e.split("-"),r=+n[0],a=+(n[1]||n[0]);if(a>=r)for(var i=r;i<=a;i+=1)t(i)})}var R=function(){function e(e,t){for(var n=0;n=(i=e[a=n+Math.floor((r-n)/2)]).start&&t=i.end?n=a+1:t3&&void 0!==arguments[3]&&arguments[3],a=arguments.length>4&&void 0!==arguments[4]?arguments[4]:U.CROSS,i=[],o=[],u=n||B,c=e.getFieldspace(),f=t.getFieldspace(),l=c.name,s=f.name,d=c.name+"."+f.name,p=H(c,f);if(l===s)throw new Error("DataModels must have different alias names");return c.fields.forEach(function(e){var t=_({},e.schema());-1===p.indexOf(t.name)||r||(t.name=c.name+"."+t.name),i.push(t)}),f.fields.forEach(function(e){var t=_({},e.schema());-1!==p.indexOf(t.name)?r||(t.name=f.name+"."+t.name,i.push(t)):i.push(t)}),D(e._rowDiffset,function(n){var d=!1,h=void 0;D(t._rowDiffset,function(v){var m=[],y={};y[l]={},y[s]={},c.fields.forEach(function(e){m.push(e.partialField.data[n]),y[l][e.name()]=e.partialField.data[n]}),f.fields.forEach(function(e){-1!==p.indexOf(e.schema().name)&&r||m.push(e.partialField.data[v]),y[s][e.name()]=e.partialField.data[v]});var g=vt(y[l]),b=vt(y[s]);if(u(g,b,function(){return e.detachedRoot()},function(){return t.detachedRoot()},{})){var O={};m.forEach(function(e,t){O[i[t].name]=e}),d&&U.CROSS!==a?o[h]=O:(o.push(O),d=!0,h=n)}else if((a===U.LEFTOUTER||a===U.RIGHTOUTER)&&!d){var w={},_=c.fields.length-1;m.forEach(function(e,t){w[i[t].name]=t<=_?e:null}),d=!0,h=n,o.push(w)}})}),new It(o,i,{name:d})}function J(e,t){var n=""+e,r=""+t;return nr?1:0}function z(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:J;return e.length>1&&function e(t,n,r,a){if(r===n)return t;var i=n+Math.floor((r-n)/2);return e(t,n,i,a),e(t,i+1,r,a),function(e,t,n,r,a){for(var i=e,o=[],u=t;u<=r;u+=1)o[u]=i[u];for(var c=t,f=n+1,l=t;l<=r;l+=1)c>n?(i[l]=o[f],f+=1):f>r?(i[l]=o[c],c+=1):a(o[c],o[f])<=0?(i[l]=o[c],c+=1):(i[l]=o[f],f+=1)}(t,n,i,r,a),t}(e,0,e.length-1,t),e}function K(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);ti?"desc"===t?-1:1:0}}return r}function X(e,t){var n=new Map,r=[];return e.forEach(function(e){var a=e[t];n.has(a)?r[n.get(a)][1].push(e):(r.push([a,[e]]),n.set(a,r.length-1))}),r}function q(e,t,n){var r={label:e[0]};return t.reduce(function(t,r,a){return t[r]=e[1].map(function(e){return e[n[a].index]}),t},r),r}function Z(e,t,n,r,a){a=Object.assign({},{addUid:!1,columnWise:!1},a);var i={schema:[],data:[],uids:[]},o=a.addUid,u=r&&r.length>0,c=[];if(n.split(",").forEach(function(t){for(var n=0;n=0;u--)a=t[u][0],i=t[u][1],(o=jt(r,a))&&("function"==typeof i?z(n,function(e,t){return i(e[o.index],t[o.index])}):E(i)?function(){var e=X(n,o.index),t=i[i.length-1],a=i.slice(0,i.length-1),u=a.map(function(e){return jt(r,e)});e.forEach(function(e){e.push(q(e,a,u))}),z(e,function(e,n){var r=e[2],a=n[2];return t(r,a)}),n.length=0,e.forEach(function(e){n.push.apply(n,K(e[1]))})}():(i="desc"===String(i).toLowerCase()?"desc":"asc",z(n,W(o.type,i,o.index))));e.uids=[],n.forEach(function(t){e.uids.push(t.pop())})}(i,r),a.columnWise){var f=Array.apply(void 0,K(Array(i.schema.length))).map(function(){return[]});i.data.forEach(function(e){e.forEach(function(e,t){f[t].push(e)})}),i.data=f}return i}function $(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!S(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t,r){D(e._rowDiffset,function(e){var o={},u="";a.forEach(function(n){var r=t[n].partialField.data[e];u+="-"+r,o[n]=r}),n[u]||(r&&i.push(o),n[u]=!0)})}return e._colIdentifier.split(",").forEach(function(e){var t=c[e];r.push(_({},t.schema())),a.push(t.schema().name)}),s(t,f,!1),s(e,c,!0),new It(i,r,{name:l})}function Q(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function ee(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n={},r=e.getFieldspace().getMeasure(),a=me.defaultReducer();return Object.keys(r).forEach(function(e){"string"!=typeof t[e]&&(t[e]=r[e].defAggFn());var i=me.resolve(t[e]);i?n[e]=i:(n[e]=a,t[e]=pe)}),n}(e,n),o=e.getFieldspace(),u=o.fieldsObj(),c=o.name,l=[],s=[],d=[],p={},h=[],v=void 0;Object.entries(u).forEach(function(e){var t=ye(e,2),n=t[0],r=t[1];if(-1!==a.indexOf(n)||i[n])switch(d.push(_({},r.schema())),r.schema().type){case f.MEASURE:s.push(n);break;default:case f.DIMENSION:l.push(n)}});var m=0;D(e._rowDiffset,function(e){var t="";l.forEach(function(n){t=t+"-"+u[n].partialField.data[e]}),void 0===p[t]?(p[t]=m,h.push({}),l.forEach(function(t){h[m][t]=u[t].partialField.data[e]}),s.forEach(function(t){h[m][t]=[u[t].partialField.data[e]]}),m+=1):s.forEach(function(n){h[p[t]][n].push(u[n].partialField.data[e])})});var y={},g=function(){return e.detachedRoot()};return h.forEach(function(e){var t=e;s.forEach(function(n){t[n]=i[n](e[n],g,y)})}),r?(r.__calculateFieldspace(),v=r):v=new zt(h,d,{name:c}),v}function be(e,t){var n=H(e.getFieldspace(),t.getFieldspace());return function(e,t){var r=!0;return n.forEach(function(n){r=!(e[n].value!==t[n].value||!r)}),r}}function Oe(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!S(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t){D(e._rowDiffset,function(e){var r={},o="";a.forEach(function(n){var a=t[n].partialField.data[e];o+="-"+a,r[n]=a}),n[o]||(i.push(r),n[o]=!0)})}return e._colIdentifier.split(",").forEach(function(e){var t=c[e];r.push(_({},t.schema())),a.push(t.schema().name)}),s(e,c),s(t,f),new zt(i,r,{name:l})}function we(e,t,n){return G(e,t,n,!1,U.LEFTOUTER)}function _e(e,t,n){return G(t,e,n,!1,U.RIGHTOUTER)}var Ee=function(){function e(e,t){for(var n=0;nn&&(n=a))}),[t,n]}}]),t}(),xe=function(){function e(e,t){for(var n=0;n9999?"+"+it(t,6):it(t,4))+"-"+it(e.getUTCMonth()+1,2)+"-"+it(e.getUTCDate(),2)+(i?"T"+it(n,2)+":"+it(r,2)+":"+it(a,2)+"."+it(i,3)+"Z":a?"T"+it(n,2)+":"+it(r,2)+":"+it(a,2)+"Z":r||n?"T"+it(n,2)+":"+it(r,2)+"Z":"")}var ut=function(e){var t=new RegExp('["'+e+"\n\r]"),n=e.charCodeAt(0);function r(e,t){var r,a=[],i=e.length,o=0,u=0,c=i<=0,f=!1;function l(){if(c)return Qe;if(f)return f=!1,$e;var t,r,a=o;if(e.charCodeAt(a)===et){for(;o++=i?c=!0:(r=e.charCodeAt(o++))===tt?f=!0:r===nt&&(f=!0,e.charCodeAt(o)===tt&&++o),e.slice(a+1,t-1).replace(/""/g,'"')}for(;o2&&void 0!==arguments[2]?arguments[2]:{},a=arguments[3];t===L.COMPOSE?(e._derivation.length=0,(n=e._derivation).push.apply(n,ht(a))):e._derivation.push({op:t,meta:r,criteria:a})}(t,n,arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},arguments[4]),function(e,t){var n;(n=t._ancestorDerivation).push.apply(n,ht(e._ancestorDerivation).concat(ht(e._derivation)))}(e,t)},gt=function(e,t,n,r,a){var i=[],o=-1,u=r.mode,c=void 0,f={},s=function(){return a.detachedRoot()},d=function(e){return n(function(e,t){var n={},r=!0,a=!1,i=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done);r=!0){var c=o.value;n[c.name()]=new k(c.partialField.data[t],c)}}catch(e){a=!0,i=e}finally{try{!r&&u.return&&u.return()}finally{if(a)throw i}}return n}(t,e),e,s,f)},p=void 0;return p=u===l.INVERSE?function(e){return!d(e)}:function(e){return d(e)},D(e,function(e){p(e)&&(-1!==o&&e===o+1?(c=i.length-1,i[c]=i[c].split("-")[0]+"-"+e):i.push(""+e),o=e)}),i.join(",")},bt=function(e){var t=e.clone(!1),n=e.getPartialFieldspace();return t._colIdentifier=n.fields.map(function(e){return e.name()}).join(","),n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,t.__calculateFieldspace().calculateFieldsConfig(),t},Ot=function(e,t){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},r=n.operation||V,a=n.filterByMeasure||!1,i=[];i=t.length?t.map(function(e){return n=(t=e).getData(),r=n.schema,i=t.getFieldsConfig(),o=t.getFieldspace().fieldsObj(),u=n.data,c=Object.values(i).reduce(function(e,t){return e[t.def.name]=o[t.def.name].domain(),e},{}),function(e){return!!u.length&&u.some(function(t){return r.every(function(n){if(!(n.name in e))return!0;var r=e[n.name].valueOf();if(a&&n.type===f.MEASURE)return r>=c[n.name][0]&&r<=c[n.name][1];if(n.type!==f.DIMENSION)return!0;var o=i[n.name].index;return t[o]===e[n.name].valueOf()})})};var t,n,r,i,o,u,c}):[function(){return!1}];return r===V?bt(e).select(function(e){return i.every(function(t){return t(e)})},{saveChild:!1,mode:l.ALL}):bt(e).select(function(e){return i.some(function(t){return t(e)})},{mode:l.ALL,saveChild:!1})},wt=function(e,t,n,r){var a=e.clone(r.saveChild),i=gt(a._rowDiffset,a.getPartialFieldspace().fields,t,n,e);return a._rowDiffset=i,a.__calculateFieldspace().calculateFieldsConfig(),yt(e,a,L.SELECT,{config:n},t),a},_t=function(e,t,n,r){var a=e.clone(n.saveChild),i=t;return n.mode===l.INVERSE&&(i=r.filter(function(e){return-1===t.indexOf(e)})),a._colIdentifier=i.join(","),a.__calculateFieldspace().calculateFieldsConfig(),yt(e,a,L.PROJECT,{projField:t,config:n,actualProjField:i},null),a},Et=function(e){if((e=_({},e)).type||(e.type=f.DIMENSION),!e.subtype)switch(e.type){case f.MEASURE:e.subtype=c.CONTINUOUS;break;default:case f.DIMENSION:e.subtype=u.CATEGORICAL}return e},At=function(e){return e.map(function(e){return function(e){var t=[c.CONTINUOUS],n=[u.CATEGORICAL,u.BINNED,u.TEMPORAL,u.GEO],r=e.type,a=e.subtype,i=e.name;switch(r){case f.DIMENSION:if(-1===n.indexOf(a))throw new Error("DataModel doesn't support dimension field subtype "+a+" used for "+i+" field");break;case f.MEASURE:if(-1===t.indexOf(a))throw new Error("DataModel doesn't support measure field subtype "+a+" used for "+i+" field");break;default:throw new Error("DataModel doesn't support field type "+r+" used for "+i+" field")}}(e=Et(e)),e})},St=function(e,t,n,r){n=At(n),r=Object.assign(Object.assign({},qe),r);var i=a[r.dataFormat];if(!i||"function"!=typeof i)throw new Error("No converter function found for "+r.dataFormat+" format");var u=i(t,r),c=pt(u,2),f=c[0],l=c[1];!function(e,t){e.forEach(function(e){var n=e.as;if(n){var r=t.indexOf(e.name);t[r]=n,e.name=n,delete e.as}})}(n,f);var s=Xe(l,n,f),d=T.createNamespace(s,r.name);return e._partialFieldspace=d,e._rowDiffset=l.length&&l[0].length?"0-"+(l[0].length-1):"",e._colIdentifier=n.map(function(e){return e.name}).join(),e._dataFormat=r.dataFormat===o.AUTO?N(t):r.dataFormat,e},jt=function(e,t){for(var n=0;n2&&void 0!==arguments[2]?arguments[2]:{},a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=a.nonTraversingModel,o=a.excludeModels||[];t!==i&&((!o.length||-1===o.indexOf(t))&&t.handlePropagation(n,r),t._children.forEach(function(t){var i=Nt(n,t),o=pt(i,2),u=o[0],c=o[1];e(t,[u,c],r,a)}))},Ft=function(e){for(var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[];e._parent;)t.push(e),e=e._parent;return t},kt=function(e,t,n,r){var a=void 0,i=void 0,o=n.propagationNameSpace,u=n.propagateToSource,c=n.sourceId,f=r.propagateInterpolatedValues,l=[];if(null===e&&!0!==r.persistent)l=[{criteria:[]}];else{var s,d=Object.values(o.mutableActions);!1!==u&&(d=d.filter(function(e){return e.config.sourceId!==c}));var p=d.filter(function(e){return(r.filterFn||function(){return!0})(e,r)}).map(function(e){return e.config.criteria}),h=[];if(!1!==u){var v=Object.values(o.mutableActions);v.forEach(function(e){var t=e.config;!1===t.applyOnSource&&t.action===r.action&&t.sourceId!==c&&(h.push(e.model),(a=v.filter(function(t){return t!==e}).map(function(e){return e.config.criteria})).length&&l.push({criteria:a,models:e.model,path:Ft(e.model)}))})}a=(s=[]).concat.apply(s,[].concat(ht(p),[e])).filter(function(e){return null!==e}),l.push({criteria:a,excludeModels:[].concat(h,ht(r.excludeModels||[]))})}var m=t.model,y=Object.assign({sourceIdentifiers:e,propagationSourceId:c},r),g=t.groupByModel;f&&g&&(i=Ot(g,a,{filterByMeasure:f}),Tt(g,i,y)),l.forEach(function(e){var t=Ot(m,e.criteria),n=e.path;if(n){var r=function(e,t){for(var n=0,r=t.length;n0&&void 0!==arguments[0])||arguments[0],t=new this.constructor(this);return e?t.setParent(this):t.setParent(null),t}},{key:"project",value:function(e,t){var n={mode:l.NORMAL,saveChild:!0};t=Object.assign({},n,t);var r=this.getFieldsConfig(),a=Object.keys(r),i=t.mode,o=e.reduce(function(e,t){return"RegExp"===t.constructor.name?e.push.apply(e,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{saveChild:!0},r=""+e.join(),a=[this,e,t],i=ge.apply(void 0,a);return yt(this,i,L.GROUPBY,{fieldsArr:e,groupByString:r,defaultReducer:me.defaultReducer()},t),n.saveChild?i.setParent(this):i.setParent(null),i}},{key:"sort",value:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{saveChild:!1},n=this.getData({order:"row",sort:e}),r=[n.schema.map(function(e){return e.name})].concat(n.data),a=new this.constructor(r,n.schema,{dataFormat:"DSVArr"});return yt(this,a,L.SORT,t,e),t.saveChild?a.setParent(this):a.setParent(null),a}},{key:"serialize",value:function(e,t){e=e||this._dataFormat,t=Object.assign({},{fieldSeparator:","},t);var n=this.getFieldspace().fields,r=n.map(function(e){return e.formattedData()}),a=r[0].length,i=void 0,u=void 0,c=void 0;if(e===o.FLAT_JSON)for(i=[],u=0;u=0&&(n.fields[r]=e)}else n.fields.push(e);return n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,this.__calculateFieldspace().calculateFieldsConfig(),this}},{key:"calculateVariable",value:function(e,t,n){var r=this;e=Et(e),n=Object.assign({},{saveChild:!0,replaceVar:!1},n);var a=this.getFieldsConfig(),i=t.slice(0,t.length-1),o=t[t.length-1];if(a[e.name]&&!n.replaceVar)throw new Error(e.name+" field already exists in datamodel");var u=i.map(function(e){var t=a[e];if(!t)throw new Error(e+" is not a valid column name.");return t.index}),c=this.clone(n.saveChild),f=c.getFieldspace().fields,l=u.map(function(e){return f[e]}),s={},d=function(){return r.detachedRoot()},p=[];D(c._rowDiffset,function(e){var t=l.map(function(t){return t.partialField.data[e]});p[e]=o.apply(void 0,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},a=t.isMutableAction,i=t.sourceId,o=t.payload,u=function(e){for(;e._parent;)e=e._parent;return e}(this),c=u._propagationNameSpace,f={groupByModel:function(e){for(;e._parent&&e._derivation.find(function(e){return e.op!==L.GROUPBY});)e=e._parent;return e}(this),model:u};return n&&function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=void 0,a=t.isMutableAction,i=t.criteria,o=t.action+"-"+t.sourceId;r=a?e.mutableActions:e.immutableActions,null===i?delete r[o]:r[o]={model:n,config:t}}(c,t,this),kt(e,f,{propagationNameSpace:c,sourceId:i},Object.assign({payload:o},t)),a&&function(e,t,n){var r=e.immutableActions;for(var a in r){var i=r[a].config,o=n.config.sourceId,u=!n.propConfig.filterImmutableAction||n.propConfig.filterImmutableAction(i,n.config);if(i.sourceId!==o&&u){var c=i.criteria;kt(c,t,{propagationNameSpace:e,propagateToSource:!1,sourceId:o},i)}}}(c,f,{config:t,propConfig:r}),this}},{key:"on",value:function(e,t){switch(e){case"propagation":this._onPropagation.push(t)}return this}},{key:"unsubscribe",value:function(e){switch(e){case"propagation":this._onPropagation=[]}return this}},{key:"handlePropagation",value:function(e,t){var n=this;this._onPropagation.forEach(function(r){return r.call(n,e,t)})}},{key:"bin",value:function(e,t){var n=this.getFieldsConfig();if(!n[e])throw new Error("Field "+e+" doesn't exist");var r=t.name||e+"_binned";if(n[r])throw new Error("Field "+r+" already exists");var a=function(e,t,n){var r=n.buckets,a=n.binsCount,i=n.binSize,o=n.start,u=n.end,c=e.domain(),f=I(c,2),l=f[0],s=f[1];r||(o=0!==o&&(!o||o>l)?l:o,u=0!==u&&(!u||ul&&r.unshift(l),r[r.length-1]<=s&&r.push(s+1);for(var d=[],p=0;p1&&void 0!==arguments[1]?arguments[1]:{saveChild:!0},r=e,a=void 0,i=[];return t.forEach(function(e){r=e(r),i.push.apply(i,function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t {\n let i;\n let l;\n\n if (!val) { return defVal; }\n\n const nVal = val.toLowerCase();\n\n for (i = 0, l = range.length; i < l; i++) {\n if (range[i].toLowerCase() === nVal) {\n return i;\n }\n }\n\n if (i === undefined) {\n return defVal;\n }\n return null;\n };\n};\n\n/*\n * Defines the tokens which are supporter by the dateformatter. Using this definitation a value gets extracted from\n * the user specifed date string. This also formats the value for display purpose from native JS date.\n * The definition of each token contains the following named properties\n * {\n * %token_name% : {\n * name: name of the token, this is used in reverse lookup,\n * extract: a function that returns the regular expression to extract that piece of information. All the\n * regex should be gouped by using ()\n * parser: a function which receives value extracted by the above regex and parse it to get the date params\n * formatter: a formatter function that takes milliseconds or JS Date object and format the param\n * represented by the token only.\n * }\n * }\n *\n * @return {Object} : Definition of the all the supported tokens.\n */\nDateTimeFormatter.getTokenDefinitions = function () {\n const daysDef = {\n short: [\n 'Sun',\n 'Mon',\n 'Tue',\n 'Wed',\n 'Thu',\n 'Fri',\n 'Sat'\n ],\n long: [\n 'Sunday',\n 'Monday',\n 'Tuesday',\n 'Wednesday',\n 'Thursday',\n 'Friday',\n 'Saturday'\n ]\n };\n const monthsDef = {\n short: [\n 'Jan',\n 'Feb',\n 'Mar',\n 'Apr',\n 'May',\n 'Jun',\n 'Jul',\n 'Aug',\n 'Sep',\n 'Oct',\n 'Nov',\n 'Dec'\n ],\n long: [\n 'January',\n 'February',\n 'March',\n 'April',\n 'May',\n 'June',\n 'July',\n 'August',\n 'September',\n 'October',\n 'November',\n 'December'\n ]\n };\n\n const definitions = {\n H: {\n // 24 hours format\n name: 'H',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n\n return d.getHours().toString();\n }\n },\n l: {\n // 12 hours format\n name: 'l',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const hours = d.getHours() % 12;\n\n return (hours === 0 ? 12 : hours).toString();\n }\n },\n p: {\n // AM or PM\n name: 'p',\n index: 3,\n extract () { return '(AM|PM)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'AM' : 'PM');\n }\n },\n P: {\n // am or pm\n name: 'P',\n index: 3,\n extract () { return '(am|pm)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'am' : 'pm');\n }\n },\n M: {\n // Two digit minutes 00 - 59\n name: 'M',\n index: 4,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const mins = d.getMinutes();\n\n return pad(mins);\n }\n },\n S: {\n // Two digit seconds 00 - 59\n name: 'S',\n index: 5,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const seconds = d.getSeconds();\n\n return pad(seconds);\n }\n },\n K: {\n // Milliseconds\n name: 'K',\n index: 6,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const ms = d.getMilliseconds();\n\n return ms.toString();\n }\n },\n a: {\n // Short name of day, like Mon\n name: 'a',\n index: 2,\n extract () { return `(${daysDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.short[day]).toString();\n }\n },\n A: {\n // Long name of day, like Monday\n name: 'A',\n index: 2,\n extract () { return `(${daysDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.long[day]).toString();\n }\n },\n e: {\n // 8 of March, 11 of November\n name: 'e',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return day.toString();\n }\n },\n d: {\n // 08 of March, 11 of November\n name: 'd',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return pad(day);\n }\n },\n b: {\n // Short month, like Jan\n name: 'b',\n index: 1,\n extract () { return `(${monthsDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.short[month]).toString();\n }\n },\n B: {\n // Long month, like January\n name: 'B',\n index: 1,\n extract () { return `(${monthsDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.long[month]).toString();\n }\n },\n m: {\n // Two digit month of year like 01 for January\n name: 'm',\n index: 1,\n extract () { return '(\\\\d+)'; },\n parser (val) { return DateTimeFormatter.defaultNumberParser()(val) - 1; },\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return pad(month + 1);\n }\n },\n y: {\n // Short year like 90 for 1990\n name: 'y',\n index: 0,\n extract () { return '(\\\\d{2})'; },\n parser (val) {\n let result;\n if (val) {\n const l = val.length;\n val = val.substring(l - 2, l);\n }\n let parsedVal = DateTimeFormatter.defaultNumberParser()(val);\n let presentDate = new Date();\n let presentYear = Math.trunc((presentDate.getFullYear()) / 100);\n\n result = `${presentYear}${parsedVal}`;\n\n if (convertToNativeDate(result).getFullYear() > presentDate.getFullYear()) {\n result = `${presentYear - 1}${parsedVal}`;\n }\n return convertToNativeDate(result).getFullYear();\n },\n formatter (val) {\n const d = convertToNativeDate(val);\n let year = d.getFullYear().toString();\n let l;\n\n if (year) {\n l = year.length;\n year = year.substring(l - 2, l);\n }\n\n return year;\n }\n },\n Y: {\n // Long year like 1990\n name: 'Y',\n index: 0,\n extract () { return '(\\\\d{4})'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const year = d.getFullYear().toString();\n\n return year;\n }\n }\n };\n\n return definitions;\n};\n\n/*\n * The tokens which works internally is not user friendly in terms of memorizing the names. This gives a formal\n * definition to the informal notations.\n *\n * @return {Object} : Formal definition of the tokens\n */\nDateTimeFormatter.getTokenFormalNames = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n\n return {\n HOUR: definitions.H,\n HOUR_12: definitions.l,\n AMPM_UPPERCASE: definitions.p,\n AMPM_LOWERCASE: definitions.P,\n MINUTE: definitions.M,\n SECOND: definitions.S,\n SHORT_DAY: definitions.a,\n LONG_DAY: definitions.A,\n DAY_OF_MONTH: definitions.e,\n DAY_OF_MONTH_CONSTANT_WIDTH: definitions.d,\n SHORT_MONTH: definitions.b,\n LONG_MONTH: definitions.B,\n MONTH_OF_YEAR: definitions.m,\n SHORT_YEAR: definitions.y,\n LONG_YEAR: definitions.Y\n };\n};\n\n/*\n * This defines the rules and declares dependencies that resolves a date parameter (year, month etc) from\n * the date time parameter array.\n *\n * @return {Object} : An object that contains dependencies and a resolver function. The dependencies values are fed\n * to the resolver function in that particular sequence only.\n */\nDateTimeFormatter.tokenResolver = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const defaultResolver = (...args) => { // eslint-disable-line require-jsdoc\n let i = 0;\n let arg;\n let targetParam;\n const l = args.length;\n\n for (; i < l; i++) {\n arg = args[i];\n if (args[i]) {\n targetParam = arg;\n }\n }\n\n if (!targetParam) { return null; }\n\n return targetParam[0].parser(targetParam[1]);\n };\n\n return {\n YEAR: [definitions.y, definitions.Y,\n defaultResolver\n ],\n MONTH: [definitions.b, definitions.B, definitions.m,\n defaultResolver\n ],\n DAY: [definitions.a, definitions.A, definitions.e, definitions.d,\n defaultResolver\n ],\n HOUR: [definitions.H, definitions.l, definitions.p, definitions.P,\n function (hourFormat24, hourFormat12, ampmLower, ampmUpper) {\n let targetParam;\n let amOrpm;\n let isPM;\n let val;\n\n if (hourFormat12 && (amOrpm = (ampmLower || ampmUpper))) {\n if (amOrpm[0].parser(amOrpm[1]) === 'pm') {\n isPM = true;\n }\n\n targetParam = hourFormat12;\n } else if (hourFormat12) {\n targetParam = hourFormat12;\n } else {\n targetParam = hourFormat24;\n }\n\n if (!targetParam) { return null; }\n\n val = targetParam[0].parser(targetParam[1]);\n if (isPM) {\n val += 12;\n }\n return val;\n }\n ],\n MINUTE: [definitions.M,\n defaultResolver\n ],\n SECOND: [definitions.S,\n defaultResolver\n ]\n };\n};\n\n/*\n * Finds token from the format rule specified by a user.\n * @param format {String} : The format of the input date specified by the user\n * @return {Array} : An array of objects which contains the available token and their occurence index in the format\n */\nDateTimeFormatter.findTokens = function (format) {\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenLiterals = Object.keys(definitions);\n const occurrence = [];\n let i;\n let forwardChar;\n\n while ((i = format.indexOf(tokenPrefix, i + 1)) >= 0) {\n forwardChar = format[i + 1];\n if (tokenLiterals.indexOf(forwardChar) === -1) { continue; }\n\n occurrence.push({\n index: i,\n token: forwardChar\n });\n }\n\n return occurrence;\n};\n\n/*\n * Format any JS date to a specified date given by user.\n *\n * @param date {Number | Date} : The date object which is to be formatted\n * @param format {String} : The format using which the date will be formatted for display\n */\nDateTimeFormatter.formatAs = function (date, format) {\n const nDate = convertToNativeDate(date);\n const occurrence = DateTimeFormatter.findTokens(format);\n const definitions = DateTimeFormatter.getTokenDefinitions();\n let formattedStr = String(format);\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n let token;\n let formattedVal;\n let i;\n let l;\n\n for (i = 0, l = occurrence.length; i < l; i++) {\n token = occurrence[i].token;\n formattedVal = definitions[token].formatter(nDate);\n formattedStr = formattedStr.replace(new RegExp(tokenPrefix + token, 'g'), formattedVal);\n }\n\n return formattedStr;\n};\n\n/*\n * Parses the user specified date string to extract the date time params.\n *\n * @return {Array} : Value of date time params in an array [year, month, day, hour, minutes, seconds, milli]\n */\nDateTimeFormatter.prototype.parse = function (dateTimeStamp, options) {\n const tokenResolver = DateTimeFormatter.tokenResolver();\n const dtParams = this.extractTokenValue(dateTimeStamp);\n const dtParamSeq = DateTimeFormatter.DATETIME_PARAM_SEQUENCE;\n const noBreak = options && options.noBreak;\n const dtParamArr = [];\n const args = [];\n let resolverKey;\n let resolverParams;\n let resolverFn;\n let val;\n let i;\n let param;\n let resolvedVal;\n let l;\n let result = [];\n\n for (resolverKey in tokenResolver) {\n if (!{}.hasOwnProperty.call(tokenResolver, resolverKey)) { continue; }\n\n args.length = 0;\n resolverParams = tokenResolver[resolverKey];\n resolverFn = resolverParams.splice(resolverParams.length - 1, 1)[0];\n\n for (i = 0, l = resolverParams.length; i < l; i++) {\n param = resolverParams[i];\n val = dtParams[param.name];\n\n if (val === undefined) {\n args.push(null);\n } else {\n args.push([param, val]);\n }\n }\n\n resolvedVal = resolverFn.apply(this, args);\n\n if ((resolvedVal === undefined || resolvedVal === null) && !noBreak) {\n break;\n }\n\n dtParamArr[dtParamSeq[resolverKey]] = resolvedVal;\n }\n\n if (dtParamArr.length && this.checkIfOnlyYear(dtParamArr.length))\n {\n result.unshift(dtParamArr[0], 0, 1); }\n else {\n result.unshift(...dtParamArr);\n }\n\n return result;\n};\n\n/*\n * Extract the value of the token from user specified date time string.\n *\n * @return {Object} : An key value pair which contains the tokens as key and value as pair\n */\nDateTimeFormatter.prototype.extractTokenValue = function (dateTimeStamp) {\n const format = this.format;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const occurrence = DateTimeFormatter.findTokens(format);\n const tokenObj = {};\n\n let lastOccurrenceIndex;\n let occObj;\n let occIndex;\n let targetText;\n let regexFormat;\n\n let l;\n let i;\n\n regexFormat = String(format);\n\n const tokenArr = occurrence.map(obj => obj.token);\n const occurrenceLength = occurrence.length;\n for (i = occurrenceLength - 1; i >= 0; i--) {\n occIndex = occurrence[i].index;\n\n if (occIndex + 1 === regexFormat.length - 1) {\n lastOccurrenceIndex = occIndex;\n continue;\n }\n\n if (lastOccurrenceIndex === undefined) {\n lastOccurrenceIndex = regexFormat.length;\n }\n\n targetText = regexFormat.substring(occIndex + 2, lastOccurrenceIndex);\n regexFormat = regexFormat.substring(0, occIndex + 2) +\n RegExp.escape(targetText) +\n regexFormat.substring(lastOccurrenceIndex, regexFormat.length);\n\n lastOccurrenceIndex = occIndex;\n }\n\n for (i = 0; i < occurrenceLength; i++) {\n occObj = occurrence[i];\n regexFormat = regexFormat.replace(tokenPrefix + occObj.token, definitions[occObj.token].extract());\n }\n\n const extractValues = dateTimeStamp.match(new RegExp(regexFormat)) || [];\n extractValues.shift();\n\n for (i = 0, l = tokenArr.length; i < l; i++) {\n tokenObj[tokenArr[i]] = extractValues[i];\n }\n return tokenObj;\n};\n\n/*\n * Give back the JS native date formed from user specified date string\n *\n * @return {Date} : Native JS Date\n */\nDateTimeFormatter.prototype.getNativeDate = function (dateTimeStamp) {\n let date = null;\n if (Number.isFinite(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n } else if (!this.format && Date.parse(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n }\n else {\n const dtParams = this.dtParams = this.parse(dateTimeStamp);\n if (dtParams.length) {\n this.nativeDate = new Date(...dtParams);\n date = this.nativeDate;\n }\n }\n return date;\n};\n\nDateTimeFormatter.prototype.checkIfOnlyYear = function(len) {\n return len === 1 && this.format.match(/y|Y/g).length;\n};\n\n/*\n * Represents JS native date to a user specified format.\n *\n * @param format {String} : The format according to which the date is to be represented\n * @return {String} : The formatted date string\n */\nDateTimeFormatter.prototype.formatAs = function (format, dateTimeStamp) {\n let nativeDate;\n\n if (dateTimeStamp) {\n nativeDate = this.nativeDate = this.getNativeDate(dateTimeStamp);\n } else if (!(nativeDate = this.nativeDate)) {\n nativeDate = this.getNativeDate(dateTimeStamp);\n }\n\n return DateTimeFormatter.formatAs(nativeDate, format);\n};\n\nexport { DateTimeFormatter as default };\n","/**\n * The utility function to calculate major column.\n *\n * @param {Object} store - The store object.\n * @return {Function} Returns the push function.\n */\nexport default (store) => {\n let i = 0;\n return (...fields) => {\n fields.forEach((val, fieldIndex) => {\n if (!(store[fieldIndex] instanceof Array)) {\n store[fieldIndex] = Array.from({ length: i });\n }\n store[fieldIndex].push(val);\n });\n i++;\n };\n};\n","/* eslint-disable */\nconst OBJECTSTRING = 'object';\nconst objectToStrFn = Object.prototype.toString;\nconst objectToStr = '[object Object]';\nconst arrayToStr = '[object Array]';\n\nfunction checkCyclicRef(obj, parentArr) {\n let i = parentArr.length;\n let bIndex = -1;\n\n while (i) {\n if (obj === parentArr[i]) {\n bIndex = i;\n return bIndex;\n }\n i -= 1;\n }\n\n return bIndex;\n}\n\nfunction merge(obj1, obj2, skipUndef, tgtArr, srcArr) {\n var item,\n srcVal,\n tgtVal,\n str,\n cRef;\n // check whether obj2 is an array\n // if array then iterate through it's index\n // **** MOOTOOLS precution\n\n if (!srcArr) {\n tgtArr = [obj1];\n srcArr = [obj2];\n }\n else {\n tgtArr.push(obj1);\n srcArr.push(obj2);\n }\n\n if (obj2 instanceof Array) {\n for (item = 0; item < obj2.length; item += 1) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (typeof tgtVal !== OBJECTSTRING) {\n if (!(skipUndef && tgtVal === undefined)) {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = tgtVal instanceof Array ? [] : {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n }\n }\n else {\n for (item in obj2) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (tgtVal !== null && typeof tgtVal === OBJECTSTRING) {\n // Fix for issue BUG: FWXT-602\n // IE < 9 Object.prototype.toString.call(null) gives\n // '[object Object]' instead of '[object Null]'\n // that's why null value becomes Object in IE < 9\n str = objectToStrFn.call(tgtVal);\n if (str === objectToStr) {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else if (str === arrayToStr) {\n if (srcVal === null || !(srcVal instanceof Array)) {\n srcVal = obj1[item] = [];\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (skipUndef && tgtVal === undefined) {\n continue;\n }\n obj1[item] = tgtVal;\n }\n }\n }\n return obj1;\n}\n\n\nfunction extend2 (obj1, obj2, skipUndef) {\n //if none of the arguments are object then return back\n if (typeof obj1 !== OBJECTSTRING && typeof obj2 !== OBJECTSTRING) {\n return null;\n }\n\n if (typeof obj2 !== OBJECTSTRING || obj2 === null) {\n return obj1;\n }\n\n if (typeof obj1 !== OBJECTSTRING) {\n obj1 = obj2 instanceof Array ? [] : {};\n }\n merge(obj1, obj2, skipUndef);\n return obj1;\n}\n\nexport { extend2 as default };\n","import { DataFormat } from '../enums';\n\n/**\n * Checks whether the value is an array.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an array otherwise returns false.\n */\nexport function isArray (val) {\n return Array.isArray(val);\n}\n\n/**\n * Checks whether the value is an object.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an object otherwise returns false.\n */\nexport function isObject (val) {\n return val === Object(val);\n}\n\n/**\n * Checks whether the value is a string value.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is a string value otherwise returns false.\n */\nexport function isString (val) {\n return typeof val === 'string';\n}\n\n/**\n * Checks whether the value is callable.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is callable otherwise returns false.\n */\nexport function isCallable (val) {\n return typeof val === 'function';\n}\n\n/**\n * Returns the unique values from the input array.\n *\n * @param {Array} data - The input array.\n * @return {Array} Returns a new array of unique values.\n */\nexport function uniqueValues (data) {\n return [...new Set(data)];\n}\n\nexport const getUniqueId = () => `id-${new Date().getTime()}${Math.round(Math.random() * 10000)}`;\n\n/**\n * Checks Whether two arrays have same content.\n *\n * @param {Array} arr1 - The first array.\n * @param {Array} arr2 - The 2nd array.\n * @return {boolean} Returns whether two array have same content.\n */\nexport function isArrEqual(arr1, arr2) {\n if (!isArray(arr1) || !isArray(arr2)) {\n return arr1 === arr2;\n }\n\n if (arr1.length !== arr2.length) {\n return false;\n }\n\n for (let i = 0; i < arr1.length; i++) {\n if (arr1[i] !== arr2[i]) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * It is the default number format function for the measure field type.\n *\n * @param {any} val - The input value.\n * @return {number} Returns a number value.\n */\nexport function formatNumber(val) {\n return val;\n}\n\n/**\n * Returns the detected data format.\n *\n * @param {any} data - The input data to be tested.\n * @return {string} Returns the data format name.\n */\nexport const detectDataFormat = (data) => {\n if (isString(data)) {\n return DataFormat.DSV_STR;\n } else if (isArray(data) && isArray(data[0])) {\n return DataFormat.DSV_ARR;\n } else if (isArray(data) && (data.length === 0 || isObject(data[0]))) {\n return DataFormat.FLAT_JSON;\n }\n return null;\n};\n","import { FieldType } from './enums';\nimport { getUniqueId } from './utils';\n\nconst fieldStore = {\n data: {},\n\n createNamespace (fieldArr, name) {\n const dataId = name || getUniqueId();\n\n this.data[dataId] = {\n name: dataId,\n fields: fieldArr,\n\n fieldsObj () {\n let fieldsObj = this._cachedFieldsObj;\n\n if (!fieldsObj) {\n fieldsObj = this._cachedFieldsObj = {};\n this.fields.forEach((field) => {\n fieldsObj[field.name()] = field;\n });\n }\n return fieldsObj;\n },\n getMeasure () {\n let measureFields = this._cachedMeasure;\n\n if (!measureFields) {\n measureFields = this._cachedMeasure = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.MEASURE) {\n measureFields[field.name()] = field;\n }\n });\n }\n return measureFields;\n },\n getDimension () {\n let dimensionFields = this._cachedDimension;\n\n if (!this._cachedDimension) {\n dimensionFields = this._cachedDimension = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.DIMENSION) {\n dimensionFields[field.name()] = field;\n }\n });\n }\n return dimensionFields;\n },\n };\n return this.data[dataId];\n },\n};\n\nexport default fieldStore;\n","/**\n * The wrapper class on top of the primitive value of a field.\n *\n * @todo Need to have support for StringValue, NumberValue, DateTimeValue\n * and GeoValue. These types should expose predicate API mostly.\n */\nclass Value {\n\n /**\n * Creates new Value instance.\n *\n * @param {*} val - the primitive value from the field cell.\n * @param {string | Field} field - The field from which the value belongs.\n */\n constructor (val, field) {\n Object.defineProperty(this, '_value', {\n enumerable: false,\n configurable: false,\n writable: false,\n value: val\n });\n\n this.field = field;\n }\n\n /**\n * Returns the field value.\n *\n * @return {*} Returns the current value.\n */\n get value () {\n return this._value;\n }\n\n /**\n * Converts to human readable string.\n *\n * @override\n * @return {string} Returns a human readable string of the field value.\n *\n */\n toString () {\n return String(this.value);\n }\n\n /**\n * Returns the value of the field.\n *\n * @override\n * @return {*} Returns the field value.\n */\n valueOf () {\n return this.value;\n }\n}\n\nexport default Value;\n","/**\n * Iterates through the diffSet array and call the callback with the current\n * index.\n *\n * @param {string} rowDiffset - The row diffset string e.g. '0-4,6,10-13'.\n * @param {Function} callback - The callback function to be called with every index.\n */\nexport function rowDiffsetIterator (rowDiffset, callback) {\n if (rowDiffset.length > 0) {\n const rowDiffArr = rowDiffset.split(',');\n rowDiffArr.forEach((diffStr) => {\n const diffStsArr = diffStr.split('-');\n const start = +(diffStsArr[0]);\n const end = +(diffStsArr[1] || diffStsArr[0]);\n if (end >= start) {\n for (let i = start; i <= end; i += 1) {\n callback(i);\n }\n }\n });\n }\n}\n","/**\n * A parser to parser null, undefined, invalid and NIL values.\n *\n * @public\n * @class\n */\nclass InvalidAwareTypes {\n /**\n * Static method which gets/sets the invalid value registry.\n *\n * @public\n * @param {Object} config - The custom configuration supplied by user.\n * @return {Object} Returns the invalid values registry.\n */\n static invalidAwareVals (config) {\n if (!config) {\n return InvalidAwareTypes._invalidAwareValsMap;\n }\n return Object.assign(InvalidAwareTypes._invalidAwareValsMap, config);\n }\n\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} value - The value of the invalid data type.\n */\n constructor (value) {\n this._value = value;\n }\n\n /**\n * Returns the current value of the instance.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n value () {\n return this._value;\n }\n\n /**\n * Returns the current value of the instance in string format.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n toString () {\n return String(this._value);\n }\n\n static isInvalid(val) {\n return (val instanceof InvalidAwareTypes) || !!InvalidAwareTypes.invalidAwareVals()[val];\n }\n\n static getInvalidType(val) {\n return val instanceof InvalidAwareTypes ? val : InvalidAwareTypes.invalidAwareVals()[val];\n }\n}\n\n/**\n * Enums for Invalid types.\n */\nInvalidAwareTypes.NULL = new InvalidAwareTypes('null');\nInvalidAwareTypes.NA = new InvalidAwareTypes('na');\nInvalidAwareTypes.NIL = new InvalidAwareTypes('nil');\n\n/**\n * Default Registry for mapping the invalid values.\n *\n * @private\n */\nInvalidAwareTypes._invalidAwareValsMap = {\n invalid: InvalidAwareTypes.NA,\n nil: InvalidAwareTypes.NIL,\n null: InvalidAwareTypes.NULL,\n undefined: InvalidAwareTypes.NA\n};\n\nexport default InvalidAwareTypes;\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport InvalidAwareTypes from '../invalid-aware-types';\n\nconst generateBuckets = (binSize, start, end) => {\n const buckets = [];\n let next = start;\n\n while (next < end) {\n buckets.push(next);\n next += binSize;\n }\n buckets.push(next);\n\n return buckets;\n};\n\nconst findBucketRange = (bucketRanges, value) => {\n let leftIdx = 0;\n let rightIdx = bucketRanges.length - 1;\n let midIdx;\n let range;\n\n // Here use binary search as the bucketRanges is a sorted array\n while (leftIdx <= rightIdx) {\n midIdx = leftIdx + Math.floor((rightIdx - leftIdx) / 2);\n range = bucketRanges[midIdx];\n\n if (value >= range.start && value < range.end) {\n return range;\n } else if (value >= range.end) {\n leftIdx = midIdx + 1;\n } else if (value < range.start) {\n rightIdx = midIdx - 1;\n }\n }\n\n return null;\n};\n\n /**\n * Creates the bin data from input measure field and supplied configs.\n *\n * @param {Measure} measureField - The Measure field instance.\n * @param {string} rowDiffset - The datamodel rowDiffset values.\n * @param {Object} config - The config object.\n * @return {Object} Returns the binned data and the corresponding bins.\n */\nexport function createBinnedFieldData (measureField, rowDiffset, config) {\n let { buckets, binsCount, binSize, start, end } = config;\n const [dMin, dMax] = measureField.domain();\n\n if (!buckets) {\n start = (start !== 0 && (!start || start > dMin)) ? dMin : start;\n end = (end !== 0 && (!end || end < dMax)) ? (dMax + 1) : end;\n\n if (binsCount) {\n binSize = Math.ceil(Math.abs(end - start) / binsCount);\n }\n\n buckets = generateBuckets(binSize, start, end);\n }\n\n if (buckets[0] > dMin) {\n buckets.unshift(dMin);\n }\n if (buckets[buckets.length - 1] <= dMax) {\n buckets.push(dMax + 1);\n }\n\n const bucketRanges = [];\n for (let i = 0; i < buckets.length - 1; i++) {\n bucketRanges.push({\n start: buckets[i],\n end: buckets[i + 1]\n });\n }\n\n const binnedData = [];\n rowDiffsetIterator(rowDiffset, (i) => {\n const datum = measureField.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n binnedData.push(datum);\n return;\n }\n\n const range = findBucketRange(bucketRanges, datum);\n binnedData.push(`${range.start}-${range.end}`);\n });\n\n return { binnedData, bins: buckets };\n}\n","export { DataFormat, FilteringMode } from '../enums';\n/**\n * The event name for data propagation.\n */\nexport const PROPAGATION = 'propagation';\n\n/**\n * The name of the unique row id column in DataModel.\n */\nexport const ROW_ID = '__id__';\n\n/**\n * The enums for operation names performed on DataModel.\n */\nexport const DM_DERIVATIVES = {\n SELECT: 'select',\n PROJECT: 'project',\n GROUPBY: 'group',\n COMPOSE: 'compose',\n CAL_VAR: 'calculatedVariable',\n BIN: 'bin',\n SORT: 'sort'\n};\n\nexport const JOINS = {\n CROSS: 'cross',\n LEFTOUTER: 'leftOuter',\n RIGHTOUTER: 'rightOuter',\n NATURAL: 'natural',\n FULLOUTER: 'fullOuter'\n};\n\nexport const LOGICAL_OPERATORS = {\n AND: 'and',\n OR: 'or'\n};\n","import { persistDerivations } from '../helper';\nimport { DM_DERIVATIVES } from '../constants';\n\n/**\n * DataModel's opearators are exposed as composable functional operators as well as chainable operators. Chainable\n * operators are called on the instances of {@link Datamodel} and {@link Relation} class.\n *\n * Those same operators can be used as composable operators from `DataModel.Operators` namespace.\n *\n * All these operators have similar behaviour. All these operators when called with the argument returns a function\n * which expects a DataModel instance.\n *\n * @public\n * @module Operators\n * @namespace DataModel\n */\n\n/**\n * This is functional version of selection operator. {@link link_to_selection | Selection} is a row filtering operation.\n * It takes {@link SelectionPredicate | predicate} for filtering criteria and returns a function.\n * The returned function is called with the DataModel instance on which the action needs to be performed.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection opearation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * [Warn] Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * [Error] `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @example\n * const select = DataModel.Operators.select;\n * usaCarsFn = select(fields => fields.Origin.value === 'USA');\n * usaCarsDm = usaCarsFn(dm);\n * console.log(usaCarsDm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {SelectionPredicate} selectFn - Predicate funciton which is called for each row with the current row\n * ```\n * function (row, i) { ... }\n * ```\n * @param {Object} [config] - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const select = (...args) => dm => dm.select(...args);\n\n/**\n * This is functional version of projection operator. {@link link_to_projection | Projection} is a column filtering\n * operation.It expects list of fields name and either include those or exclude those based on {@link FilteringMode} on\n * the resultant variable.It returns a function which is called with the DataModel instance on which the action needs\n * to be performed.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const project = (...args) => dm => dm.project(...args);\n\n/**\n * This is functional version of binnig operator. Binning happens on a measure field based on a binning configuration.\n * Binning in DataModel does not aggregate the number of rows present in DataModel instance after binning, it just adds\n * a new field with the binned value. Refer binning {@link example_of_binning | example} to have a intuition of what\n * binning is and the use case.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non uniform buckets\n * - providing bin count\n * - providing each bin size\n *\n * When custom buckets are provided as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const buckets = {\n * start: 30\n * stops: [80, 100, 110]\n * };\n * const config = { buckets, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(dm);\n *\n * @text\n * When `binCount` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binCount: 5, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @text\n * When `binSize` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binSize: 200, name: 'binnedHorsepower' }\n * const binnedDm = dataModel.bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {String} name Name of measure which will be used to create bin\n * @param {Object} config Config required for bin creation\n * @param {Array.} config.bucketObj.stops Defination of bucket ranges. Two subsequent number from arrays\n * are picked and a range is created. The first number from range is inclusive and the second number from range\n * is exclusive.\n * @param {Number} [config.bucketObj.startAt] Force the start of the bin from a particular number.\n * If not mentioned, the start of the bin or the lower domain of the data if stops is not mentioned, else its\n * the first value of the stop.\n * @param {Number} config.binSize Bucket size for each bin\n * @param {Number} config.binCount Number of bins which will be created\n * @param {String} config.name Name of the new binned field to be created\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const bin = (...args) => dm => dm.bin(...args);\n\n/**\n * This is functional version of `groupBy` operator.Groups the data using particular dimensions and by reducing\n * measures. It expects a list of dimensions using which it projects the datamodel and perform aggregations to reduce\n * the duplicate tuples. Refer this {@link link_to_one_example_with_group_by | document} to know the intuition behind\n * groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupBy = DataModel.Operators.groupBy;\n * const groupedFn = groupBy(['Year'], { horsepower: 'max' } );\n * groupedDM = groupByFn(dm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const groupBy = (...args) => dm => dm.groupBy(...args);\n\n/**\n * Enables composing operators to run multiple operations and save group of operataion as named opration on a DataModel.\n * The resulting DataModel will be the result of all the operation provided. The operations provided will be executed in\n * a serial manner ie. result of one operation will be the input for the next operations (like pipe operator in unix).\n *\n * Suported operations in compose are\n * - `select`\n * - `project`\n * - `groupBy`\n * - `bin`\n * - `compose`\n *\n * @example\n * const compose = DataModel.Operators.compose;\n * const select = DataModel.Operators.select;\n * const project = DataModel.Operators.project;\n *\n * let composedFn = compose(\n * select(fields => fields.netprofit.value <= 15),\n * project(['netprofit', 'netsales']));\n *\n * const dataModel = new DataModel(data1, schema1);\n *\n * let composedDm = composedFn(dataModel);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} operators: An array of operation that will be applied on the\n * datatable.\n *\n * @returns {DataModel} Instance of resultant DataModel\n */\nexport const compose = (...operations) =>\n (dm, config = { saveChild: true }) => {\n let currentDM = dm;\n let firstChild;\n const derivations = [];\n\n operations.forEach((operation) => {\n currentDM = operation(currentDM);\n derivations.push(...currentDM._derivation);\n if (!firstChild) {\n firstChild = currentDM;\n }\n });\n\n if (firstChild && firstChild !== currentDM) {\n firstChild.dispose();\n }\n\n // reset all ancestorDerivation saved in-between compose\n currentDM._ancestorDerivation = [];\n persistDerivations(\n dm,\n currentDM,\n DM_DERIVATIVES.COMPOSE,\n null,\n derivations\n );\n\n if (config.saveChild) {\n currentDM.setParent(dm);\n } else {\n currentDM.setParent(null);\n }\n\n return currentDM;\n };\n","/**\n * The helper function that returns an array of common schema\n * from two fieldStore instances.\n *\n * @param {FieldStore} fs1 - The first FieldStore instance.\n * @param {FieldStore} fs2 - The second FieldStore instance.\n * @return {Array} An array containing the common schema.\n */\nexport function getCommonSchema (fs1, fs2) {\n const retArr = [];\n const fs1Arr = [];\n fs1.fields.forEach((field) => {\n fs1Arr.push(field.schema().name);\n });\n fs2.fields.forEach((field) => {\n if (fs1Arr.indexOf(field.schema().name) !== -1) {\n retArr.push(field.schema().name);\n }\n });\n return retArr;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { getCommonSchema } from './get-common-schema';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { JOINS } from '../constants';\nimport { prepareJoinData } from '../helper';\n/**\n * Default filter function for crossProduct.\n *\n * @return {boolean} Always returns true.\n */\nfunction defaultFilterFn() { return true; }\n\n/**\n * Implementation of cross product operation between two DataModel instances.\n * It internally creates the data and schema for the new DataModel.\n *\n * @param {DataModel} dataModel1 - The left DataModel instance.\n * @param {DataModel} dataModel2 - The right DataModel instance.\n * @param {Function} filterFn - The filter function which is used to filter the tuples.\n * @param {boolean} [replaceCommonSchema=false] - The flag if the common name schema should be there.\n * @return {DataModel} Returns The newly created DataModel instance from the crossProduct operation.\n */\nexport function crossProduct (dm1, dm2, filterFn, replaceCommonSchema = false, jointype = JOINS.CROSS) {\n const schema = [];\n const data = [];\n const applicableFilterFn = filterFn || defaultFilterFn;\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreName = dm1FieldStore.name;\n const dm2FieldStoreName = dm2FieldStore.name;\n const name = `${dm1FieldStore.name}.${dm2FieldStore.name}`;\n const commonSchemaList = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n if (dm1FieldStoreName === dm2FieldStoreName) {\n throw new Error('DataModels must have different alias names');\n }\n // Here prepare the schema\n dm1FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1 && !replaceCommonSchema) {\n tmpSchema.name = `${dm1FieldStore.name}.${tmpSchema.name}`;\n }\n schema.push(tmpSchema);\n });\n dm2FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1) {\n if (!replaceCommonSchema) {\n tmpSchema.name = `${dm2FieldStore.name}.${tmpSchema.name}`;\n schema.push(tmpSchema);\n }\n } else {\n schema.push(tmpSchema);\n }\n });\n\n // Here prepare Data\n rowDiffsetIterator(dm1._rowDiffset, (i) => {\n let rowAdded = false;\n let rowPosition;\n rowDiffsetIterator(dm2._rowDiffset, (ii) => {\n const tuple = [];\n const userArg = {};\n userArg[dm1FieldStoreName] = {};\n userArg[dm2FieldStoreName] = {};\n dm1FieldStore.fields.forEach((field) => {\n tuple.push(field.partialField.data[i]);\n userArg[dm1FieldStoreName][field.name()] = field.partialField.data[i];\n });\n dm2FieldStore.fields.forEach((field) => {\n if (!(commonSchemaList.indexOf(field.schema().name) !== -1 && replaceCommonSchema)) {\n tuple.push(field.partialField.data[ii]);\n }\n userArg[dm2FieldStoreName][field.name()] = field.partialField.data[ii];\n });\n\n let cachedStore = {};\n let cloneProvider1 = () => dm1.detachedRoot();\n let cloneProvider2 = () => dm2.detachedRoot();\n\n const dm1Fields = prepareJoinData(userArg[dm1FieldStoreName]);\n const dm2Fields = prepareJoinData(userArg[dm2FieldStoreName]);\n if (applicableFilterFn(dm1Fields, dm2Fields, cloneProvider1, cloneProvider2, cachedStore)) {\n const tupleObj = {};\n tuple.forEach((cellVal, iii) => {\n tupleObj[schema[iii].name] = cellVal;\n });\n if (rowAdded && JOINS.CROSS !== jointype) {\n data[rowPosition] = tupleObj;\n }\n else {\n data.push(tupleObj);\n rowAdded = true;\n rowPosition = i;\n }\n } else if ((jointype === JOINS.LEFTOUTER || jointype === JOINS.RIGHTOUTER) && !rowAdded) {\n const tupleObj = {};\n let len = dm1FieldStore.fields.length - 1;\n tuple.forEach((cellVal, iii) => {\n if (iii <= len) {\n tupleObj[schema[iii].name] = cellVal;\n }\n else {\n tupleObj[schema[iii].name] = null;\n }\n });\n rowAdded = true;\n rowPosition = i;\n data.push(tupleObj);\n }\n });\n });\n\n return new DataModel(data, schema, { name });\n}\n","/**\n * The default sort function.\n *\n * @param {*} a - The first value.\n * @param {*} b - The second value.\n * @return {number} Returns the comparison result e.g. 1 or 0 or -1.\n */\nfunction defSortFn (a, b) {\n const a1 = `${a}`;\n const b1 = `${b}`;\n if (a1 < b1) {\n return -1;\n }\n if (a1 > b1) {\n return 1;\n }\n return 0;\n}\n\n/**\n * The helper function for merge sort which creates the sorted array\n * from the two halves of the input array.\n *\n * @param {Array} arr - The target array which needs to be merged.\n * @param {number} lo - The starting index of the first array half.\n * @param {number} mid - The ending index of the first array half.\n * @param {number} hi - The ending index of the second array half.\n * @param {Function} sortFn - The sort function.\n */\nfunction merge (arr, lo, mid, hi, sortFn) {\n const mainArr = arr;\n const auxArr = [];\n for (let i = lo; i <= hi; i += 1) {\n auxArr[i] = mainArr[i];\n }\n let a = lo;\n let b = mid + 1;\n\n for (let i = lo; i <= hi; i += 1) {\n if (a > mid) {\n mainArr[i] = auxArr[b];\n b += 1;\n } else if (b > hi) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else if (sortFn(auxArr[a], auxArr[b]) <= 0) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else {\n mainArr[i] = auxArr[b];\n b += 1;\n }\n }\n}\n\n/**\n * The helper function for merge sort which would be called\n * recursively for sorting the array halves.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {number} lo - The starting index of the array half.\n * @param {number} hi - The ending index of the array half.\n * @param {Function} sortFn - The sort function.\n * @return {Array} Returns the target array itself.\n */\nfunction sort (arr, lo, hi, sortFn) {\n if (hi === lo) { return arr; }\n\n const mid = lo + Math.floor((hi - lo) / 2);\n sort(arr, lo, mid, sortFn);\n sort(arr, mid + 1, hi, sortFn);\n merge(arr, lo, mid, hi, sortFn);\n\n return arr;\n}\n\n/**\n * The implementation of merge sort.\n * It is used in DataModel for stable sorting as it is not sure\n * what the sorting algorithm used by browsers is stable or not.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {Function} [sortFn=defSortFn] - The sort function.\n * @return {Array} Returns the input array itself in sorted order.\n */\nexport function mergeSort (arr, sortFn = defSortFn) {\n if (arr.length > 1) {\n sort(arr, 0, arr.length - 1, sortFn);\n }\n return arr;\n}\n","import { DimensionSubtype, MeasureSubtype } from '../enums';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { mergeSort } from './merge-sort';\nimport { fieldInSchema } from '../helper';\nimport { isCallable, isArray, } from '../utils';\n/**\n * Generates the sorting functions to sort the data of a DataModel instance\n * according to the input data type.\n *\n * @param {string} dataType - The data type e.g. 'measure', 'datetime' etc.\n * @param {string} sortType - The sorting order i.e. 'asc' or 'desc'.\n * @param {integer} index - The index of the data which will be sorted.\n * @return {Function} Returns the the sorting function.\n */\nfunction getSortFn (dataType, sortType, index) {\n let retFunc;\n switch (dataType) {\n case MeasureSubtype.CONTINUOUS:\n case DimensionSubtype.TEMPORAL:\n if (sortType === 'desc') {\n retFunc = (a, b) => b[index] - a[index];\n } else {\n retFunc = (a, b) => a[index] - b[index];\n }\n break;\n default:\n retFunc = (a, b) => {\n const a1 = `${a[index]}`;\n const b1 = `${b[index]}`;\n if (a1 < b1) {\n return sortType === 'desc' ? 1 : -1;\n }\n if (a1 > b1) {\n return sortType === 'desc' ? -1 : 1;\n }\n return 0;\n };\n }\n return retFunc;\n}\n\n/**\n * Groups the data according to the specified target field.\n *\n * @param {Array} data - The input data array.\n * @param {number} fieldIndex - The target field index within schema array.\n * @return {Array} Returns an array containing the grouped data.\n */\nfunction groupData(data, fieldIndex) {\n const hashMap = new Map();\n const groupedData = [];\n\n data.forEach((datum) => {\n const fieldVal = datum[fieldIndex];\n if (hashMap.has(fieldVal)) {\n groupedData[hashMap.get(fieldVal)][1].push(datum);\n } else {\n groupedData.push([fieldVal, [datum]]);\n hashMap.set(fieldVal, groupedData.length - 1);\n }\n });\n\n return groupedData;\n}\n\n/**\n * Creates the argument value used for sorting function when sort is done\n * with another fields.\n *\n * @param {Array} groupedDatum - The grouped datum for a single dimension field value.\n * @param {Array} targetFields - An array of the sorting fields.\n * @param {Array} targetFieldDetails - An array of the sorting field details in schema.\n * @return {Object} Returns an object containing the value of sorting fields and the target field name.\n */\nfunction createSortingFnArg(groupedDatum, targetFields, targetFieldDetails) {\n const arg = {\n label: groupedDatum[0]\n };\n\n targetFields.reduce((acc, next, idx) => {\n acc[next] = groupedDatum[1].map(datum => datum[targetFieldDetails[idx].index]);\n return acc;\n }, arg);\n\n return arg;\n}\n\n/**\n * Sorts the data before return in dataBuilder.\n *\n * @param {Object} dataObj - An object containing the data and schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nfunction sortData(dataObj, sortingDetails) {\n const { data, schema } = dataObj;\n let fieldName;\n let sortMeta;\n let fDetails;\n let i = sortingDetails.length - 1;\n\n for (; i >= 0; i--) {\n fieldName = sortingDetails[i][0];\n sortMeta = sortingDetails[i][1];\n fDetails = fieldInSchema(schema, fieldName);\n\n if (!fDetails) {\n // eslint-disable-next-line no-continue\n continue;\n }\n\n if (isCallable(sortMeta)) {\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortMeta(a[fDetails.index], b[fDetails.index]));\n } else if (isArray(sortMeta)) {\n const groupedData = groupData(data, fDetails.index);\n const sortingFn = sortMeta[sortMeta.length - 1];\n const targetFields = sortMeta.slice(0, sortMeta.length - 1);\n const targetFieldDetails = targetFields.map(f => fieldInSchema(schema, f));\n\n groupedData.forEach((groupedDatum) => {\n groupedDatum.push(createSortingFnArg(groupedDatum, targetFields, targetFieldDetails));\n });\n\n mergeSort(groupedData, (a, b) => {\n const m = a[2];\n const n = b[2];\n return sortingFn(m, n);\n });\n\n // Empty the array\n data.length = 0;\n groupedData.forEach((datum) => {\n data.push(...datum[1]);\n });\n } else {\n sortMeta = String(sortMeta).toLowerCase() === 'desc' ? 'desc' : 'asc';\n mergeSort(data, getSortFn(fDetails.type, sortMeta, fDetails.index));\n }\n }\n\n dataObj.uids = [];\n data.forEach((value) => {\n dataObj.uids.push(value.pop());\n });\n}\n\n\n/**\n * Builds the actual data array.\n *\n * @param {Array} fieldStore - An array of field.\n * @param {string} rowDiffset - A string consisting of which rows to be included eg. '0-2,4,6';\n * @param {string} colIdentifier - A string consisting of the details of which column\n * to be included eg 'date,sales,profit';\n * @param {Object} sortingDetails - An object containing the sorting details of the DataModel instance.\n * @param {Object} options - The options required to create the type of the data.\n * @return {Object} Returns an object containing the multidimensional array and the relative schema.\n */\nexport function dataBuilder (fieldStore, rowDiffset, colIdentifier, sortingDetails, options) {\n const defOptions = {\n addUid: false,\n columnWise: false\n };\n options = Object.assign({}, defOptions, options);\n\n const retObj = {\n schema: [],\n data: [],\n uids: []\n };\n const addUid = options.addUid;\n const reqSorting = sortingDetails && sortingDetails.length > 0;\n // It stores the fields according to the colIdentifier argument\n const tmpDataArr = [];\n // Stores the fields according to the colIdentifier argument\n const colIArr = colIdentifier.split(',');\n\n colIArr.forEach((colName) => {\n for (let i = 0; i < fieldStore.length; i += 1) {\n if (fieldStore[i].name() === colName) {\n tmpDataArr.push(fieldStore[i]);\n break;\n }\n }\n });\n\n // Inserts the schema to the schema object\n tmpDataArr.forEach((field) => {\n /** @todo Need to use extend2 here otherwise user can overwrite the schema. */\n retObj.schema.push(field.schema());\n });\n\n if (addUid) {\n retObj.schema.push({\n name: 'uid',\n type: 'identifier'\n });\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n retObj.data.push([]);\n const insertInd = retObj.data.length - 1;\n let start = 0;\n tmpDataArr.forEach((field, ii) => {\n retObj.data[insertInd][ii + start] = field.partialField.data[i];\n });\n if (addUid) {\n retObj.data[insertInd][tmpDataArr.length] = i;\n }\n // Creates an array of unique identifiers for each row\n retObj.uids.push(i);\n\n // If sorting needed then there is the need to expose the index\n // mapping from the old index to its new index\n if (reqSorting) { retObj.data[insertInd].push(i); }\n });\n\n // Handles the sort functionality\n if (reqSorting) {\n sortData(retObj, sortingDetails);\n }\n\n if (options.columnWise) {\n const tmpData = Array(...Array(retObj.schema.length)).map(() => []);\n retObj.data.forEach((tuple) => {\n tuple.forEach((data, i) => {\n tmpData[i].push(data);\n });\n });\n retObj.data = tmpData;\n }\n\n return retObj;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n\n/**\n * Performs the union operation between two dm instances.\n *\n * @todo Fix the conflicts between union and difference terminology here.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function difference (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n * @param {boolean} addData - If true only tuple will be added to the data.\n */\n function prepareDataHelper(dm, fieldsObj, addData) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n if (addData) { data.push(tuple); }\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm2, dm2FieldStoreFieldObj, false);\n prepareDataHelper(dm1, dm1FieldStoreFieldObj, true);\n\n return new DataModel(data, schema, { name });\n}\n\n","import { isArray } from '../utils';\nimport InvalidAwareTypes from '../invalid-aware-types';\nimport { GROUP_BY_FUNCTIONS } from '../enums';\n\nconst { SUM, AVG, FIRST, LAST, COUNT, STD, MIN, MAX } = GROUP_BY_FUNCTIONS;\n\nfunction getFilteredValues(arr) {\n return arr.filter(item => !(item instanceof InvalidAwareTypes));\n}\n/**\n * Reducer function that returns the sum of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the sum of the array.\n */\nfunction sum (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const filteredNumber = getFilteredValues(arr);\n const totalSum = filteredNumber.length ?\n filteredNumber.reduce((acc, curr) => acc + curr, 0)\n : InvalidAwareTypes.NULL;\n return totalSum;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that returns the average of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the mean value of the array.\n */\nfunction avg (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const totalSum = sum(arr);\n const len = arr.length || 1;\n return (Number.isNaN(totalSum) || totalSum instanceof InvalidAwareTypes) ?\n InvalidAwareTypes.NULL : totalSum / len;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the min value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the minimum value of the array.\n */\nfunction min (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.min(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the max value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the maximum value of the array.\n */\nfunction max (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.max(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the first value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the first value of the array.\n */\nfunction first (arr) {\n return arr[0];\n}\n\n/**\n * Reducer function that gives the last value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the last value of the array.\n */\nfunction last (arr) {\n return arr[arr.length - 1];\n}\n\n/**\n * Reducer function that gives the count value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the length of the array.\n */\nfunction count (arr) {\n if (isArray(arr)) {\n return arr.length;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Calculates the variance of the input array.\n *\n * @param {Array.} arr - The input array.\n * @return {number} Returns the variance of the input array.\n */\nfunction variance (arr) {\n let mean = avg(arr);\n return avg(arr.map(num => (num - mean) ** 2));\n}\n\n/**\n * Calculates the square root of the variance of the input array.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the square root of the variance.\n */\nfunction std (arr) {\n return Math.sqrt(variance(arr));\n}\n\n\nconst fnList = {\n [SUM]: sum,\n [AVG]: avg,\n [MIN]: min,\n [MAX]: max,\n [FIRST]: first,\n [LAST]: last,\n [COUNT]: count,\n [STD]: std\n};\n\nconst defaultReducerName = SUM;\n\nexport {\n defaultReducerName,\n sum as defReducer,\n fnList,\n};\n","import { defReducer, fnList } from '../operator';\n\n/**\n * A page level storage which stores, registers, unregisters reducers for all the datamodel instances. There is only one\n * reducer store available in a page. All the datamodel instances receive same instance of reducer store. DataModel\n * out of the box provides handful of {@link reducer | reducers} which can be used as reducer funciton.\n *\n * @public\n * @namespace DataModel\n */\nclass ReducerStore {\n constructor () {\n this.store = new Map();\n this.store.set('defReducer', defReducer);\n\n Object.entries(fnList).forEach((key) => {\n this.store.set(key[0], key[1]);\n });\n }\n\n /**\n * Changes the `defaultReducer` globally. For all the fields which does not have `defAggFn` mentioned in schema, the\n * value of `defaultReducer` is used for aggregation.\n *\n * @public\n * @param {string} [reducer='sum'] - The name of the default reducer. It picks up the definition from store by doing\n * name lookup. If no name is found then it takes `sum` as the default reducer.\n * @return {ReducerStore} Returns instance of the singleton store in page.\n */\n defaultReducer (...params) {\n if (!params.length) {\n return this.store.get('defReducer');\n }\n\n let reducer = params[0];\n\n if (typeof reducer === 'function') {\n this.store.set('defReducer', reducer);\n } else {\n reducer = String(reducer);\n if (Object.keys(fnList).indexOf(reducer) !== -1) {\n this.store.set('defReducer', fnList[reducer]);\n } else {\n throw new Error(`Reducer ${reducer} not found in registry`);\n }\n }\n return this;\n }\n\n /**\n *\n * Registers a {@link reducer | reducer}.\n * A {@link reducer | reducer} has to be registered before it is used.\n *\n * @example\n * // find the mean squared value of a given set\n * const reducerStore = DataModel.Reducers();\n *\n * reducers.register('meanSquared', (arr) => {\n * const squaredVal = arr.map(item => item * item);\n * let sum = 0;\n * for (let i = 0, l = squaredVal.length; i < l; i++) {\n * sum += squaredVal[i++];\n * }\n *\n * return sum;\n * })\n *\n * // datamodel (dm) is already prepared with cars.json\n * const dm1 = dm.groupBy(['origin'], {\n * accleration: 'meanSquared'\n * });\n *\n * @public\n *\n * @param {string} name formal name for a reducer. If the given name already exists in store it is overridden by new\n * definition.\n * @param {Function} reducer definition of {@link reducer} function.\n *\n * @return {Function} function for unregistering the reducer.\n */\n register (name, reducer) {\n if (typeof reducer !== 'function') {\n throw new Error('Reducer should be a function');\n }\n\n name = String(name);\n this.store.set(name, reducer);\n\n return () => { this.__unregister(name); };\n }\n\n __unregister (name) {\n if (this.store.has(name)) {\n this.store.delete(name);\n }\n }\n\n resolve (name) {\n if (name instanceof Function) {\n return name;\n }\n return this.store.get(name);\n }\n}\n\nconst reducerStore = (function () {\n let store = null;\n\n function getStore () {\n if (store === null) {\n store = new ReducerStore();\n }\n return store;\n }\n return getStore();\n}());\n\nexport default reducerStore;\n","import { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport DataModel from '../export';\nimport reducerStore from '../utils/reducer-store';\nimport { defaultReducerName } from './group-by-function';\nimport { FieldType } from '../enums';\n\n/**\n * This function sanitize the user given field and return a common Array structure field\n * list\n * @param {DataModel} dataModel the dataModel operating on\n * @param {Array} fieldArr user input of field Array\n * @return {Array} arrays of field name\n */\nfunction getFieldArr (dataModel, fieldArr) {\n const retArr = [];\n const fieldStore = dataModel.getFieldspace();\n const dimensions = fieldStore.getDimension();\n\n Object.entries(dimensions).forEach(([key]) => {\n if (fieldArr && fieldArr.length) {\n if (fieldArr.indexOf(key) !== -1) {\n retArr.push(key);\n }\n } else {\n retArr.push(key);\n }\n });\n\n return retArr;\n}\n\n/**\n * This sanitize the reducer provide by the user and create a common type of object.\n * user can give function Also\n * @param {DataModel} dataModel dataModel to worked on\n * @param {Object|function} [reducers={}] reducer provided by the users\n * @return {Object} object containing reducer function for every measure\n */\nfunction getReducerObj (dataModel, reducers = {}) {\n const retObj = {};\n const fieldStore = dataModel.getFieldspace();\n const measures = fieldStore.getMeasure();\n const defReducer = reducerStore.defaultReducer();\n\n Object.keys(measures).forEach((measureName) => {\n if (typeof reducers[measureName] !== 'string') {\n reducers[measureName] = measures[measureName].defAggFn();\n }\n const reducerFn = reducerStore.resolve(reducers[measureName]);\n if (reducerFn) {\n retObj[measureName] = reducerFn;\n } else {\n retObj[measureName] = defReducer;\n reducers[measureName] = defaultReducerName;\n }\n });\n return retObj;\n}\n\n/**\n * main function which perform the group-by operations which reduce the measures value is the\n * fields are common according to the reducer function provided\n * @param {DataModel} dataModel the dataModel to worked\n * @param {Array} fieldArr fields according to which the groupby should be worked\n * @param {Object|Function} reducers reducers function\n * @param {DataModel} existingDataModel Existing datamodel instance\n * @return {DataModel} new dataModel with the group by\n */\nfunction groupBy (dataModel, fieldArr, reducers, existingDataModel) {\n const sFieldArr = getFieldArr(dataModel, fieldArr);\n const reducerObj = getReducerObj(dataModel, reducers);\n const fieldStore = dataModel.getFieldspace();\n const fieldStoreObj = fieldStore.fieldsObj();\n const dbName = fieldStore.name;\n const dimensionArr = [];\n const measureArr = [];\n const schema = [];\n const hashMap = {};\n const data = [];\n let newDataModel;\n\n // Prepare the schema\n Object.entries(fieldStoreObj).forEach(([key, value]) => {\n if (sFieldArr.indexOf(key) !== -1 || reducerObj[key]) {\n schema.push(extend2({}, value.schema()));\n\n switch (value.schema().type) {\n case FieldType.MEASURE:\n measureArr.push(key);\n break;\n default:\n case FieldType.DIMENSION:\n dimensionArr.push(key);\n }\n }\n });\n // Prepare the data\n let rowCount = 0;\n rowDiffsetIterator(dataModel._rowDiffset, (i) => {\n let hash = '';\n dimensionArr.forEach((_) => {\n hash = `${hash}-${fieldStoreObj[_].partialField.data[i]}`;\n });\n if (hashMap[hash] === undefined) {\n hashMap[hash] = rowCount;\n data.push({});\n dimensionArr.forEach((_) => {\n data[rowCount][_] = fieldStoreObj[_].partialField.data[i];\n });\n measureArr.forEach((_) => {\n data[rowCount][_] = [fieldStoreObj[_].partialField.data[i]];\n });\n rowCount += 1;\n } else {\n measureArr.forEach((_) => {\n data[hashMap[hash]][_].push(fieldStoreObj[_].partialField.data[i]);\n });\n }\n });\n\n // reduction\n let cachedStore = {};\n let cloneProvider = () => dataModel.detachedRoot();\n data.forEach((row) => {\n const tuple = row;\n measureArr.forEach((_) => {\n tuple[_] = reducerObj[_](row[_], cloneProvider, cachedStore);\n });\n });\n if (existingDataModel) {\n existingDataModel.__calculateFieldspace();\n newDataModel = existingDataModel;\n }\n else {\n newDataModel = new DataModel(data, schema, { name: dbName });\n }\n return newDataModel;\n}\n\nexport { groupBy, getFieldArr, getReducerObj };\n","import { getCommonSchema } from './get-common-schema';\n\n/**\n * The filter function used in natural join.\n * It generates a function that will have the logic to join two\n * DataModel instances by the process of natural join.\n *\n * @param {DataModel} dm1 - The left DataModel instance.\n * @param {DataModel} dm2 - The right DataModel instance.\n * @return {Function} Returns a function that is used in cross-product operation.\n */\nexport function naturalJoinFilter (dm1, dm2) {\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n // const dm1FieldStoreName = dm1FieldStore.name;\n // const dm2FieldStoreName = dm2FieldStore.name;\n const commonSchemaArr = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n return (dm1Fields, dm2Fields) => {\n let retainTuple = true;\n commonSchemaArr.forEach((fieldName) => {\n if (dm1Fields[fieldName].value ===\n dm2Fields[fieldName].value && retainTuple) {\n retainTuple = true;\n } else {\n retainTuple = false;\n }\n });\n return retainTuple;\n };\n}\n","import DataModel from '../export';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n/**\n * Performs the union operation between two dm instances.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function union (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n */\n function prepareDataHelper (dm, fieldsObj) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n data.push(tuple);\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm1, dm1FieldStoreFieldObj);\n prepareDataHelper(dm2, dm2FieldStoreFieldObj);\n\n return new DataModel(data, schema, { name });\n}\n","import { crossProduct } from './cross-product';\nimport { JOINS } from '../constants';\nimport { union } from './union';\n\n\nexport function leftOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel1, dataModel2, filterFn, false, JOINS.LEFTOUTER);\n}\n\nexport function rightOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel2, dataModel1, filterFn, false, JOINS.RIGHTOUTER);\n}\n\nexport function fullOuterJoin (dataModel1, dataModel2, filterFn) {\n return union(leftOuterJoin(dataModel1, dataModel2, filterFn), rightOuterJoin(dataModel1, dataModel2, filterFn));\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\n\n/**\n * In {@link DataModel}, every tabular data consists of column, a column is stored as field.\n * Field contains all the data for a given column in an array.\n *\n * Each record consists of several fields; the fields of all records form the columns.\n * Examples of fields: name, gender, sex etc.\n *\n * In DataModel, each field can have multiple attributes which describes its data and behaviour.\n * A field can have two types of data: Measure and Dimension.\n *\n * A Dimension Field is the context on which a data is categorized and the measure is the numerical values that\n * quantify the data set.\n * In short a dimension is the lens through which you are looking at your measure data.\n *\n * Refer to {@link Schema} to get info about possible field attributes.\n *\n * @public\n * @class\n */\nexport default class Field {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n this.partialField = partialField;\n this.rowDiffset = rowDiffset;\n }\n\n /**\n * Generates the field type specific domain.\n *\n * @public\n * @abstract\n */\n domain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the the field schema.\n *\n * @public\n * @return {string} Returns the field schema.\n */\n schema () {\n return this.partialField.schema;\n }\n\n /**\n * Returns the name of the field.\n *\n * @public\n * @return {string} Returns the name of the field.\n */\n name () {\n return this.partialField.name;\n }\n\n /**\n * Returns the type of the field.\n *\n * @public\n * @return {string} Returns the type of the field.\n */\n type () {\n return this.partialField.schema.type;\n }\n\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return this.partialField.schema.subtype;\n }\n\n /**\n * Returns the description of the field.\n *\n * @public\n * @return {string} Returns the description of the field.\n */\n description () {\n return this.partialField.schema.description;\n }\n\n /**\n * Returns the display name of the field.\n *\n * @public\n * @return {string} Returns the display name of the field.\n */\n displayName () {\n return this.partialField.schema.displayName || this.partialField.schema.name;\n }\n\n /**\n * Returns the data associated with the field.\n *\n * @public\n * @return {Array} Returns the data.\n */\n data () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n data.push(this.partialField.data[i]);\n });\n return data;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @abstract\n */\n formattedData () {\n throw new Error('Not yet implemented');\n }\n}\n","import Field from '../field';\n\n/**\n * Represents dimension field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Dimension extends Field {\n /**\n * Returns the domain for the dimension field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { DimensionSubtype } from '../../enums';\nimport Dimension from '../dimension';\n/**\n * Represents categorical field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Categorical extends Dimension {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return DimensionSubtype.CATEGORICAL;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n return domain;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport Dimension from '../dimension';\nimport { DateTimeFormatter } from '../../utils';\nimport InvalidAwareTypes from '../../invalid-aware-types';\n\n/**\n * Represents temporal field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Temporal extends Dimension {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n super(partialField, rowDiffset);\n\n this._cachedMinDiff = null;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be\n // occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n\n return domain;\n }\n\n\n /**\n * Calculates the minimum consecutive difference from the associated field data.\n *\n * @public\n * @return {number} Returns the minimum consecutive diff in milliseconds.\n */\n minimumConsecutiveDifference () {\n if (this._cachedMinDiff) {\n return this._cachedMinDiff;\n }\n\n const sortedData = this.data().filter(item => !(item instanceof InvalidAwareTypes)).sort((a, b) => a - b);\n const arrLn = sortedData.length;\n let minDiff = Number.POSITIVE_INFINITY;\n let prevDatum;\n let nextDatum;\n let processedCount = 0;\n\n for (let i = 1; i < arrLn; i++) {\n prevDatum = sortedData[i - 1];\n nextDatum = sortedData[i];\n\n if (nextDatum === prevDatum) {\n continue;\n }\n\n minDiff = Math.min(minDiff, nextDatum - sortedData[i - 1]);\n processedCount++;\n }\n\n if (!processedCount) {\n minDiff = null;\n }\n this._cachedMinDiff = minDiff;\n\n return this._cachedMinDiff;\n }\n\n /**\n * Returns the format specified in the input schema while creating field.\n *\n * @public\n * @return {string} Returns the datetime format.\n */\n format () {\n return this.partialField.schema.format;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n data.push(datum);\n } else {\n data.push(DateTimeFormatter.formatAs(datum, this.format()));\n }\n });\n return data;\n }\n}\n\n","import Dimension from '../dimension';\n\n/**\n * Represents binned field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Binned extends Dimension {\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the last and first values of bins config array.\n */\n calculateDataDomain () {\n const binsArr = this.partialField.schema.bins;\n return [binsArr[0], binsArr[binsArr.length - 1]];\n }\n\n /**\n * Returns the bins config provided while creating the field instance.\n *\n * @public\n * @return {Array} Returns the bins array config.\n */\n bins () {\n return this.partialField.schema.bins;\n }\n}\n","import { formatNumber } from '../../utils';\nimport { defaultReducerName } from '../../operator/group-by-function';\nimport Field from '../field';\n\n/**\n * Represents measure field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Measure extends Field {\n /**\n * Returns the domain for the measure field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Returns the unit of the measure field.\n *\n * @public\n * @return {string} Returns unit of the field.\n */\n unit () {\n return this.partialField.schema.unit;\n }\n\n /**\n * Returns the aggregation function name of the measure field.\n *\n * @public\n * @return {string} Returns aggregation function name of the field.\n */\n defAggFn () {\n return this.partialField.schema.defAggFn || defaultReducerName;\n }\n\n /**\n * Returns the number format of the measure field.\n *\n * @public\n * @return {Function} Returns number format of the field.\n */\n numberFormat () {\n const { numberFormat } = this.partialField.schema;\n return numberFormat instanceof Function ? numberFormat : formatNumber;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { MeasureSubtype } from '../../enums';\nimport Measure from '../measure';\nimport InvalidAwareTypes from '../../invalid-aware-types';\n\n/**\n * Represents continuous field subtype.\n *\n * @public\n * @class\n * @extends Measure\n */\nexport default class Continuous extends Measure {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return MeasureSubtype.CONTINUOUS;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the min and max values.\n */\n calculateDataDomain () {\n let min = Number.POSITIVE_INFINITY;\n let max = Number.NEGATIVE_INFINITY;\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n return;\n }\n\n if (datum < min) {\n min = datum;\n }\n if (datum > max) {\n max = datum;\n }\n });\n\n return [min, max];\n }\n}\n","/**\n * A interface to represent a parser which is responsible to parse the field.\n *\n * @public\n * @interface\n */\nexport default class FieldParser {\n /**\n * Parses a single value of a field and return the sanitized form.\n *\n * @public\n * @abstract\n */\n parse () {\n throw new Error('Not yet implemented');\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the categorical values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class CategoricalParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the stringified form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the stringified value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n result = String(val).trim();\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { DateTimeFormatter } from '../../../utils';\nimport FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the temporal values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class TemporalParser extends FieldParser {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {Object} schema - The schema object for the corresponding field.\n */\n constructor (schema) {\n super();\n this.schema = schema;\n this._dtf = new DateTimeFormatter(this.schema.format);\n }\n\n /**\n * Parses a single value of a field and returns the millisecond value.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {number} Returns the millisecond value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let nativeDate = this._dtf.getNativeDate(val);\n result = nativeDate ? nativeDate.getTime() : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the binned values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class BinnedParser extends FieldParser {\n /**\n * Parses a single binned value of a field and returns the sanitized value.\n *\n * @public\n * @param {string} val - The value of the field.\n * @return {string} Returns the sanitized value.\n */\n parse (val) {\n const regex = /^\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*-\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*$/;\n val = String(val);\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let matched = val.match(regex);\n result = matched ? `${Number.parseFloat(matched[1])}-${Number.parseFloat(matched[2])}`\n : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the continuous values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class ContinuousParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the number form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the number value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let parsedVal = parseFloat(val, 10);\n result = Number.isNaN(parsedVal) ? InvalidAwareTypes.NA : parsedVal;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","/**\n * Stores the full data and the metadata of a field. It provides\n * a single source of data from which the future Field\n * instance can get a subset of it with a rowDiffset config.\n *\n * @class\n * @public\n */\nexport default class PartialField {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} name - The name of the field.\n * @param {Array} data - The data array.\n * @param {Object} schema - The schema object of the corresponding field.\n * @param {FieldParser} parser - The parser instance corresponding to that field.\n */\n constructor (name, data, schema, parser) {\n this.name = name;\n this.schema = schema;\n this.parser = parser;\n this.data = this._sanitize(data);\n }\n\n /**\n * Sanitizes the field data.\n *\n * @private\n * @param {Array} data - The actual input data.\n * @return {Array} Returns the sanitized data.\n */\n _sanitize (data) {\n return data.map(datum => this.parser.parse(datum));\n }\n}\n","import { FieldType, DimensionSubtype, MeasureSubtype } from './enums';\nimport {\n Categorical,\n Temporal,\n Binned,\n Continuous,\n CategoricalParser,\n TemporalParser,\n BinnedParser,\n ContinuousParser,\n PartialField\n} from './fields';\n\n/**\n * Creates a field instance according to the provided data and schema.\n *\n * @param {Array} data - The field data array.\n * @param {Object} schema - The field schema object.\n * @return {Field} Returns the newly created field instance.\n */\nfunction createUnitField(data, schema) {\n data = data || [];\n let partialField;\n\n switch (schema.type) {\n case FieldType.MEASURE:\n switch (schema.subtype) {\n case MeasureSubtype.CONTINUOUS:\n partialField = new PartialField(schema.name, data, schema, new ContinuousParser());\n return new Continuous(partialField, `0-${data.length - 1}`);\n default:\n partialField = new PartialField(schema.name, data, schema, new ContinuousParser());\n return new Continuous(partialField, `0-${data.length - 1}`);\n }\n case FieldType.DIMENSION:\n switch (schema.subtype) {\n case DimensionSubtype.CATEGORICAL:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n case DimensionSubtype.TEMPORAL:\n partialField = new PartialField(schema.name, data, schema, new TemporalParser(schema));\n return new Temporal(partialField, `0-${data.length - 1}`);\n case DimensionSubtype.BINNED:\n partialField = new PartialField(schema.name, data, schema, new BinnedParser());\n return new Binned(partialField, `0-${data.length - 1}`);\n default:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n }\n default:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n }\n}\n\n\n/**\n * Creates a field instance from partialField and rowDiffset.\n *\n * @param {PartialField} partialField - The corresponding partial field.\n * @param {string} rowDiffset - The data subset config.\n * @return {Field} Returns the newly created field instance.\n */\nexport function createUnitFieldFromPartial(partialField, rowDiffset) {\n const { schema } = partialField;\n\n switch (schema.type) {\n case FieldType.MEASURE:\n switch (schema.subtype) {\n case MeasureSubtype.CONTINUOUS:\n return new Continuous(partialField, rowDiffset);\n default:\n return new Continuous(partialField, rowDiffset);\n }\n case FieldType.DIMENSION:\n switch (schema.subtype) {\n case DimensionSubtype.CATEGORICAL:\n return new Categorical(partialField, rowDiffset);\n case DimensionSubtype.TEMPORAL:\n return new Temporal(partialField, rowDiffset);\n case DimensionSubtype.BINNED:\n return new Binned(partialField, rowDiffset);\n default:\n return new Categorical(partialField, rowDiffset);\n }\n default:\n return new Categorical(partialField, rowDiffset);\n }\n}\n\n/**\n * Creates the field instances with input data and schema.\n *\n * @param {Array} dataColumn - The data array for fields.\n * @param {Array} schema - The schema array for fields.\n * @param {Array} headers - The array of header names.\n * @return {Array.} Returns an array of newly created field instances.\n */\nexport function createFields(dataColumn, schema, headers) {\n const headersObj = {};\n\n if (!(headers && headers.length)) {\n headers = schema.map(item => item.name);\n }\n\n headers.forEach((header, i) => {\n headersObj[header] = i;\n });\n\n return schema.map(item => createUnitField(dataColumn[headersObj[item.name]], item));\n}\n","import { DataFormat } from './enums';\n\nexport default {\n dataFormat: DataFormat.AUTO\n};\n","import { columnMajor } from '../utils';\n\n/**\n * Parses and converts data formatted in DSV array to a manageable internal format.\n *\n * @param {Array.} arr - A 2D array containing of the DSV data.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv data is header or not.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * [\"a\", \"b\", \"c\"],\n * [1, 2, 3],\n * [4, 5, 6],\n * [7, 8, 9]\n * ];\n */\nfunction DSVArr (arr, options) {\n const defaultOption = {\n firstRowHeader: true,\n };\n options = Object.assign({}, defaultOption, options);\n\n let header;\n const columns = [];\n const push = columnMajor(columns);\n\n if (options.firstRowHeader) {\n // If header present then mutate the array.\n // Do in-place mutation to save space.\n header = arr.splice(0, 1)[0];\n } else {\n header = [];\n }\n\n arr.forEach(field => push(...field));\n\n return [header, columns];\n}\n\nexport default DSVArr;\n","var EOL = {},\n EOF = {},\n QUOTE = 34,\n NEWLINE = 10,\n RETURN = 13;\n\nfunction objectConverter(columns) {\n return new Function(\"d\", \"return {\" + columns.map(function(name, i) {\n return JSON.stringify(name) + \": d[\" + i + \"]\";\n }).join(\",\") + \"}\");\n}\n\nfunction customConverter(columns, f) {\n var object = objectConverter(columns);\n return function(row, i) {\n return f(object(row), i, columns);\n };\n}\n\n// Compute unique columns in order of discovery.\nfunction inferColumns(rows) {\n var columnSet = Object.create(null),\n columns = [];\n\n rows.forEach(function(row) {\n for (var column in row) {\n if (!(column in columnSet)) {\n columns.push(columnSet[column] = column);\n }\n }\n });\n\n return columns;\n}\n\nfunction pad(value, width) {\n var s = value + \"\", length = s.length;\n return length < width ? new Array(width - length + 1).join(0) + s : s;\n}\n\nfunction formatYear(year) {\n return year < 0 ? \"-\" + pad(-year, 6)\n : year > 9999 ? \"+\" + pad(year, 6)\n : pad(year, 4);\n}\n\nfunction formatDate(date) {\n var hours = date.getUTCHours(),\n minutes = date.getUTCMinutes(),\n seconds = date.getUTCSeconds(),\n milliseconds = date.getUTCMilliseconds();\n return isNaN(date) ? \"Invalid Date\"\n : formatYear(date.getUTCFullYear(), 4) + \"-\" + pad(date.getUTCMonth() + 1, 2) + \"-\" + pad(date.getUTCDate(), 2)\n + (milliseconds ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \":\" + pad(seconds, 2) + \".\" + pad(milliseconds, 3) + \"Z\"\n : seconds ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \":\" + pad(seconds, 2) + \"Z\"\n : minutes || hours ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \"Z\"\n : \"\");\n}\n\nexport default function(delimiter) {\n var reFormat = new RegExp(\"[\\\"\" + delimiter + \"\\n\\r]\"),\n DELIMITER = delimiter.charCodeAt(0);\n\n function parse(text, f) {\n var convert, columns, rows = parseRows(text, function(row, i) {\n if (convert) return convert(row, i - 1);\n columns = row, convert = f ? customConverter(row, f) : objectConverter(row);\n });\n rows.columns = columns || [];\n return rows;\n }\n\n function parseRows(text, f) {\n var rows = [], // output rows\n N = text.length,\n I = 0, // current character index\n n = 0, // current line number\n t, // current token\n eof = N <= 0, // current token followed by EOF?\n eol = false; // current token followed by EOL?\n\n // Strip the trailing newline.\n if (text.charCodeAt(N - 1) === NEWLINE) --N;\n if (text.charCodeAt(N - 1) === RETURN) --N;\n\n function token() {\n if (eof) return EOF;\n if (eol) return eol = false, EOL;\n\n // Unescape quotes.\n var i, j = I, c;\n if (text.charCodeAt(j) === QUOTE) {\n while (I++ < N && text.charCodeAt(I) !== QUOTE || text.charCodeAt(++I) === QUOTE);\n if ((i = I) >= N) eof = true;\n else if ((c = text.charCodeAt(I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n return text.slice(j + 1, i - 1).replace(/\"\"/g, \"\\\"\");\n }\n\n // Find next delimiter or newline.\n while (I < N) {\n if ((c = text.charCodeAt(i = I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n else if (c !== DELIMITER) continue;\n return text.slice(j, i);\n }\n\n // Return last token before EOF.\n return eof = true, text.slice(j, N);\n }\n\n while ((t = token()) !== EOF) {\n var row = [];\n while (t !== EOL && t !== EOF) row.push(t), t = token();\n if (f && (row = f(row, n++)) == null) continue;\n rows.push(row);\n }\n\n return rows;\n }\n\n function preformatBody(rows, columns) {\n return rows.map(function(row) {\n return columns.map(function(column) {\n return formatValue(row[column]);\n }).join(delimiter);\n });\n }\n\n function format(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return [columns.map(formatValue).join(delimiter)].concat(preformatBody(rows, columns)).join(\"\\n\");\n }\n\n function formatBody(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return preformatBody(rows, columns).join(\"\\n\");\n }\n\n function formatRows(rows) {\n return rows.map(formatRow).join(\"\\n\");\n }\n\n function formatRow(row) {\n return row.map(formatValue).join(delimiter);\n }\n\n function formatValue(value) {\n return value == null ? \"\"\n : value instanceof Date ? formatDate(value)\n : reFormat.test(value += \"\") ? \"\\\"\" + value.replace(/\"/g, \"\\\"\\\"\") + \"\\\"\"\n : value;\n }\n\n return {\n parse: parse,\n parseRows: parseRows,\n format: format,\n formatBody: formatBody,\n formatRows: formatRows\n };\n}\n","import dsv from \"./dsv\";\n\nvar csv = dsv(\",\");\n\nexport var csvParse = csv.parse;\nexport var csvParseRows = csv.parseRows;\nexport var csvFormat = csv.format;\nexport var csvFormatBody = csv.formatBody;\nexport var csvFormatRows = csv.formatRows;\n","import dsv from \"./dsv\";\n\nvar tsv = dsv(\"\\t\");\n\nexport var tsvParse = tsv.parse;\nexport var tsvParseRows = tsv.parseRows;\nexport var tsvFormat = tsv.format;\nexport var tsvFormatBody = tsv.formatBody;\nexport var tsvFormatRows = tsv.formatRows;\n","import { dsvFormat as d3Dsv } from 'd3-dsv';\nimport DSVArr from './dsv-arr';\n\n/**\n * Parses and converts data formatted in DSV string to a manageable internal format.\n *\n * @todo Support to be given for https://tools.ietf.org/html/rfc4180.\n * @todo Sample implementation https://github.com/knrz/CSV.js/.\n *\n * @param {string} str - The input DSV string.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv string data is header or not.\n * @param {string} [options.fieldSeparator=\",\"] - The separator of two consecutive field.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = `\n * a,b,c\n * 1,2,3\n * 4,5,6\n * 7,8,9\n * `\n */\nfunction DSVStr (str, options) {\n const defaultOption = {\n firstRowHeader: true,\n fieldSeparator: ','\n };\n options = Object.assign({}, defaultOption, options);\n\n const dsv = d3Dsv(options.fieldSeparator);\n return DSVArr(dsv.parseRows(str), options);\n}\n\nexport default DSVStr;\n","import { columnMajor } from '../utils';\n\n/**\n * Parses and converts data formatted in JSON to a manageable internal format.\n *\n * @param {Array.} arr - The input data formatted in JSON.\n * @return {Array.} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * {\n * \"a\": 1,\n * \"b\": 2,\n * \"c\": 3\n * },\n * {\n * \"a\": 4,\n * \"b\": 5,\n * \"c\": 6\n * },\n * {\n * \"a\": 7,\n * \"b\": 8,\n * \"c\": 9\n * }\n * ];\n */\nfunction FlatJSON (arr) {\n const header = {};\n let i = 0;\n let insertionIndex;\n const columns = [];\n const push = columnMajor(columns);\n\n arr.forEach((item) => {\n const fields = [];\n for (let key in item) {\n if (key in header) {\n insertionIndex = header[key];\n } else {\n header[key] = i++;\n insertionIndex = i - 1;\n }\n fields[insertionIndex] = item[key];\n }\n push(...fields);\n });\n\n return [Object.keys(header), columns];\n}\n\nexport default FlatJSON;\n","import FlatJSON from './flat-json';\nimport DSVArr from './dsv-arr';\nimport DSVStr from './dsv-str';\nimport { detectDataFormat } from '../utils';\n\n/**\n * Parses the input data and detect the format automatically.\n *\n * @param {string|Array} data - The input data.\n * @param {Object} options - An optional config specific to data format.\n * @return {Array.} Returns an array of headers and column major data.\n */\nfunction Auto (data, options) {\n const converters = { FlatJSON, DSVStr, DSVArr };\n const dataFormat = detectDataFormat(data);\n\n if (!dataFormat) {\n throw new Error('Couldn\\'t detect the data format');\n }\n\n return converters[dataFormat](data, options);\n}\n\nexport default Auto;\n","import { FieldType, FilteringMode, DimensionSubtype, MeasureSubtype, DataFormat } from './enums';\nimport fieldStore from './field-store';\nimport Value from './value';\nimport {\n rowDiffsetIterator\n} from './operator';\nimport { DM_DERIVATIVES, LOGICAL_OPERATORS } from './constants';\nimport { createFields, createUnitFieldFromPartial } from './field-creator';\nimport defaultConfig from './default-config';\nimport * as converter from './converter';\nimport { extend2, detectDataFormat } from './utils';\n\n/**\n * Prepares the selection data.\n */\nfunction prepareSelectionData (fields, i) {\n const resp = {};\n for (let field of fields) {\n resp[field.name()] = new Value(field.partialField.data[i], field);\n }\n return resp;\n}\n\nexport function prepareJoinData (fields) {\n const resp = {};\n Object.keys(fields).forEach((key) => { resp[key] = new Value(fields[key], key); });\n return resp;\n}\n\nexport const updateFields = ([rowDiffset, colIdentifier], partialFieldspace, fieldStoreName) => {\n let collID = colIdentifier.length ? colIdentifier.split(',') : [];\n let partialFieldMap = partialFieldspace.fieldsObj();\n let newFields = collID.map(coll => createUnitFieldFromPartial(partialFieldMap[coll].partialField, rowDiffset));\n return fieldStore.createNamespace(newFields, fieldStoreName);\n};\n\nexport const persistCurrentDerivation = (model, operation, config = {}, criteriaFn) => {\n if (operation === DM_DERIVATIVES.COMPOSE) {\n model._derivation.length = 0;\n model._derivation.push(...criteriaFn);\n } else {\n model._derivation.push({\n op: operation,\n meta: config,\n criteria: criteriaFn\n });\n }\n};\n\nexport const persistAncestorDerivation = (sourceDm, newDm) => {\n newDm._ancestorDerivation.push(...sourceDm._ancestorDerivation, ...sourceDm._derivation);\n};\n\nexport const persistDerivations = (sourceDm, model, operation, config = {}, criteriaFn) => {\n persistCurrentDerivation(model, operation, config, criteriaFn);\n persistAncestorDerivation(sourceDm, model);\n};\n\nexport const selectHelper = (rowDiffset, fields, selectFn, config, sourceDm) => {\n const newRowDiffSet = [];\n let lastInsertedValue = -1;\n let { mode } = config;\n let li;\n let cachedStore = {};\n let cloneProvider = () => sourceDm.detachedRoot();\n const selectorHelperFn = index => selectFn(\n prepareSelectionData(fields, index),\n index,\n cloneProvider,\n cachedStore\n );\n\n let checker;\n if (mode === FilteringMode.INVERSE) {\n checker = index => !selectorHelperFn(index);\n } else {\n checker = index => selectorHelperFn(index);\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n if (checker(i)) {\n if (lastInsertedValue !== -1 && i === (lastInsertedValue + 1)) {\n li = newRowDiffSet.length - 1;\n newRowDiffSet[li] = `${newRowDiffSet[li].split('-')[0]}-${i}`;\n } else {\n newRowDiffSet.push(`${i}`);\n }\n lastInsertedValue = i;\n }\n });\n return newRowDiffSet.join(',');\n};\n\nexport const cloneWithAllFields = (model) => {\n const clonedDm = model.clone(false);\n const partialFieldspace = model.getPartialFieldspace();\n clonedDm._colIdentifier = partialFieldspace.fields.map(f => f.name()).join(',');\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n clonedDm.__calculateFieldspace().calculateFieldsConfig();\n\n return clonedDm;\n};\n\nexport const filterPropagationModel = (model, propModels, config = {}) => {\n const operation = config.operation || LOGICAL_OPERATORS.AND;\n const filterByMeasure = config.filterByMeasure || false;\n let fns = [];\n if (!propModels.length) {\n fns = [() => false];\n } else {\n fns = propModels.map(propModel => ((dataModel) => {\n const dataObj = dataModel.getData();\n const schema = dataObj.schema;\n const fieldsConfig = dataModel.getFieldsConfig();\n const fieldsSpace = dataModel.getFieldspace().fieldsObj();\n const data = dataObj.data;\n const domain = Object.values(fieldsConfig).reduce((acc, v) => {\n acc[v.def.name] = fieldsSpace[v.def.name].domain();\n return acc;\n }, {});\n\n return (fields) => {\n const include = !data.length ? false : data.some(row => schema.every((propField) => {\n if (!(propField.name in fields)) {\n return true;\n }\n const value = fields[propField.name].valueOf();\n if (filterByMeasure && propField.type === FieldType.MEASURE) {\n return value >= domain[propField.name][0] && value <= domain[propField.name][1];\n }\n\n if (propField.type !== FieldType.DIMENSION) {\n return true;\n }\n const idx = fieldsConfig[propField.name].index;\n return row[idx] === fields[propField.name].valueOf();\n }));\n return include;\n };\n })(propModel));\n }\n\n let filteredModel;\n if (operation === LOGICAL_OPERATORS.AND) {\n filteredModel = cloneWithAllFields(model).select(fields => fns.every(fn => fn(fields)), {\n saveChild: false,\n mode: FilteringMode.ALL\n });\n } else {\n filteredModel = cloneWithAllFields(model).select(fields => fns.some(fn => fn(fields)), {\n mode: FilteringMode.ALL,\n saveChild: false\n });\n }\n\n return filteredModel;\n};\n\nexport const cloneWithSelect = (sourceDm, selectFn, selectConfig, cloneConfig) => {\n const cloned = sourceDm.clone(cloneConfig.saveChild);\n const rowDiffset = selectHelper(\n cloned._rowDiffset,\n cloned.getPartialFieldspace().fields,\n selectFn,\n selectConfig,\n sourceDm\n );\n cloned._rowDiffset = rowDiffset;\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivations(\n sourceDm,\n cloned,\n DM_DERIVATIVES.SELECT,\n { config: selectConfig },\n selectFn\n );\n\n return cloned;\n};\n\nexport const cloneWithProject = (sourceDm, projField, config, allFields) => {\n const cloned = sourceDm.clone(config.saveChild);\n let projectionSet = projField;\n if (config.mode === FilteringMode.INVERSE) {\n projectionSet = allFields.filter(fieldName => projField.indexOf(fieldName) === -1);\n }\n // cloned._colIdentifier = sourceDm._colIdentifier.split(',')\n // .filter(coll => projectionSet.indexOf(coll) !== -1).join();\n cloned._colIdentifier = projectionSet.join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivations(\n sourceDm,\n cloned,\n DM_DERIVATIVES.PROJECT,\n { projField, config, actualProjField: projectionSet },\n null\n );\n\n return cloned;\n};\n\nexport const sanitizeUnitSchema = (unitSchema) => {\n // Do deep clone of the unit schema as the user might change it later.\n unitSchema = extend2({}, unitSchema);\n if (!unitSchema.type) {\n unitSchema.type = FieldType.DIMENSION;\n }\n\n if (!unitSchema.subtype) {\n switch (unitSchema.type) {\n case FieldType.MEASURE:\n unitSchema.subtype = MeasureSubtype.CONTINUOUS;\n break;\n default:\n case FieldType.DIMENSION:\n unitSchema.subtype = DimensionSubtype.CATEGORICAL;\n break;\n }\n }\n\n return unitSchema;\n};\n\nexport const validateUnitSchema = (unitSchema) => {\n const supportedMeasureSubTypes = [MeasureSubtype.CONTINUOUS];\n const supportedDimSubTypes = [\n DimensionSubtype.CATEGORICAL,\n DimensionSubtype.BINNED,\n DimensionSubtype.TEMPORAL,\n DimensionSubtype.GEO\n ];\n const { type, subtype, name } = unitSchema;\n\n switch (type) {\n case FieldType.DIMENSION:\n if (supportedDimSubTypes.indexOf(subtype) === -1) {\n throw new Error(`DataModel doesn't support dimension field subtype ${subtype} used for ${name} field`);\n }\n break;\n case FieldType.MEASURE:\n if (supportedMeasureSubTypes.indexOf(subtype) === -1) {\n throw new Error(`DataModel doesn't support measure field subtype ${subtype} used for ${name} field`);\n }\n break;\n default:\n throw new Error(`DataModel doesn't support field type ${type} used for ${name} field`);\n }\n};\n\nexport const sanitizeAndValidateSchema = schema => schema.map((unitSchema) => {\n unitSchema = sanitizeUnitSchema(unitSchema);\n validateUnitSchema(unitSchema);\n return unitSchema;\n});\n\nexport const resolveFieldName = (schema, dataHeader) => {\n schema.forEach((unitSchema) => {\n const fieldNameAs = unitSchema.as;\n if (!fieldNameAs) { return; }\n\n const idx = dataHeader.indexOf(unitSchema.name);\n dataHeader[idx] = fieldNameAs;\n unitSchema.name = fieldNameAs;\n delete unitSchema.as;\n });\n};\n\nexport const updateData = (relation, data, schema, options) => {\n schema = sanitizeAndValidateSchema(schema);\n options = Object.assign(Object.assign({}, defaultConfig), options);\n const converterFn = converter[options.dataFormat];\n\n if (!(converterFn && typeof converterFn === 'function')) {\n throw new Error(`No converter function found for ${options.dataFormat} format`);\n }\n\n const [header, formattedData] = converterFn(data, options);\n resolveFieldName(schema, header);\n const fieldArr = createFields(formattedData, schema, header);\n\n // This will create a new fieldStore with the fields\n const nameSpace = fieldStore.createNamespace(fieldArr, options.name);\n relation._partialFieldspace = nameSpace;\n // If data is provided create the default colIdentifier and rowDiffset\n relation._rowDiffset = formattedData.length && formattedData[0].length ? `0-${formattedData[0].length - 1}` : '';\n relation._colIdentifier = (schema.map(_ => _.name)).join();\n relation._dataFormat = options.dataFormat === DataFormat.AUTO ? detectDataFormat(data) : options.dataFormat;\n return relation;\n};\n\nexport const fieldInSchema = (schema, field) => {\n let i = 0;\n\n for (; i < schema.length; ++i) {\n if (field === schema[i].name) {\n return {\n type: schema[i].subtype || schema[i].type,\n index: i\n };\n }\n }\n return null;\n};\n\n\nexport const getDerivationArguments = (derivation) => {\n let params = [];\n let operation;\n operation = derivation.op;\n switch (operation) {\n case DM_DERIVATIVES.SELECT:\n params = [derivation.criteria];\n break;\n case DM_DERIVATIVES.PROJECT:\n params = [derivation.meta.actualProjField];\n break;\n case DM_DERIVATIVES.GROUPBY:\n operation = 'groupBy';\n params = [derivation.meta.groupByString.split(','), derivation.criteria];\n break;\n default:\n operation = null;\n }\n\n return {\n operation,\n params\n };\n};\n\nconst applyExistingOperationOnModel = (propModel, dataModel) => {\n const derivations = dataModel.getDerivations();\n let selectionModel = propModel[0];\n let rejectionModel = propModel[1];\n\n derivations.forEach((derivation) => {\n if (!derivation) {\n return;\n }\n\n const { operation, params } = getDerivationArguments(derivation);\n if (operation) {\n selectionModel = selectionModel[operation](...params, {\n saveChild: false\n });\n rejectionModel = rejectionModel[operation](...params, {\n saveChild: false\n });\n }\n });\n\n return [selectionModel, rejectionModel];\n};\n\nconst getFilteredModel = (propModel, path) => {\n for (let i = 0, len = path.length; i < len; i++) {\n const model = path[i];\n propModel = applyExistingOperationOnModel(propModel, model);\n }\n return propModel;\n};\n\nconst propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = {}) => {\n const nonTraversingModel = propModelInf.nonTraversingModel;\n const excludeModels = propModelInf.excludeModels || [];\n\n if (dataModel === nonTraversingModel) {\n return;\n }\n\n const propagate = excludeModels.length ? excludeModels.indexOf(dataModel) === -1 : true;\n\n propagate && dataModel.handlePropagation(propModel, config);\n\n const children = dataModel._children;\n children.forEach((child) => {\n let [selectionModel, rejectionModel] = applyExistingOperationOnModel(propModel, child);\n propagateIdentifiers(child, [selectionModel, rejectionModel], config, propModelInf);\n });\n};\n\nexport const getRootGroupByModel = (model) => {\n while (model._parent && model._derivation.find(d => d.op !== DM_DERIVATIVES.GROUPBY)) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getRootDataModel = (model) => {\n while (model._parent) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getPathToRootModel = (model, path = []) => {\n while (model._parent) {\n path.push(model);\n model = model._parent;\n }\n return path;\n};\n\nexport const propagateToAllDataModels = (identifiers, rootModels, propagationInf, config) => {\n let criteria;\n let propModel;\n const { propagationNameSpace, propagateToSource } = propagationInf;\n const propagationSourceId = propagationInf.sourceId;\n const propagateInterpolatedValues = config.propagateInterpolatedValues;\n const filterFn = (entry) => {\n const filter = config.filterFn || (() => true);\n return filter(entry, config);\n };\n\n let criterias = [];\n\n if (identifiers === null && config.persistent !== true) {\n criterias = [{\n criteria: []\n }];\n } else {\n let actionCriterias = Object.values(propagationNameSpace.mutableActions);\n if (propagateToSource !== false) {\n actionCriterias = actionCriterias.filter(d => d.config.sourceId !== propagationSourceId);\n }\n\n const filteredCriteria = actionCriterias.filter(filterFn).map(action => action.config.criteria);\n\n const excludeModels = [];\n\n if (propagateToSource !== false) {\n const sourceActionCriterias = Object.values(propagationNameSpace.mutableActions);\n\n sourceActionCriterias.forEach((actionInf) => {\n const actionConf = actionInf.config;\n if (actionConf.applyOnSource === false && actionConf.action === config.action &&\n actionConf.sourceId !== propagationSourceId) {\n excludeModels.push(actionInf.model);\n criteria = sourceActionCriterias.filter(d => d !== actionInf).map(d => d.config.criteria);\n criteria.length && criterias.push({\n criteria,\n models: actionInf.model,\n path: getPathToRootModel(actionInf.model)\n });\n }\n });\n }\n\n\n criteria = [].concat(...[...filteredCriteria, identifiers]).filter(d => d !== null);\n criterias.push({\n criteria,\n excludeModels: [...excludeModels, ...config.excludeModels || []]\n });\n }\n\n const rootModel = rootModels.model;\n\n const propConfig = Object.assign({\n sourceIdentifiers: identifiers,\n propagationSourceId\n }, config);\n\n const rootGroupByModel = rootModels.groupByModel;\n if (propagateInterpolatedValues && rootGroupByModel) {\n propModel = filterPropagationModel(rootGroupByModel, criteria, {\n filterByMeasure: propagateInterpolatedValues\n });\n propagateIdentifiers(rootGroupByModel, propModel, propConfig);\n }\n\n criterias.forEach((inf) => {\n const propagationModel = filterPropagationModel(rootModel, inf.criteria);\n const path = inf.path;\n\n if (path) {\n const filteredModel = getFilteredModel(propagationModel, path.reverse());\n inf.models.handlePropagation(filteredModel, propConfig);\n } else {\n propagateIdentifiers(rootModel, propagationModel, propConfig, {\n excludeModels: inf.excludeModels,\n nonTraversingModel: propagateInterpolatedValues && rootGroupByModel\n });\n }\n });\n};\n\nexport const propagateImmutableActions = (propagationNameSpace, rootModels, propagationInf) => {\n const immutableActions = propagationNameSpace.immutableActions;\n\n for (const action in immutableActions) {\n const actionInf = immutableActions[action];\n const actionConf = actionInf.config;\n const propagationSourceId = propagationInf.config.sourceId;\n const filterImmutableAction = propagationInf.propConfig.filterImmutableAction ?\n propagationInf.propConfig.filterImmutableAction(actionConf, propagationInf.config) : true;\n if (actionConf.sourceId !== propagationSourceId && filterImmutableAction) {\n const criteriaModel = actionConf.criteria;\n propagateToAllDataModels(criteriaModel, rootModels, {\n propagationNameSpace,\n propagateToSource: false,\n sourceId: propagationSourceId\n }, actionConf);\n }\n }\n};\n\nexport const addToPropNamespace = (propagationNameSpace, config = {}, model) => {\n let sourceNamespace;\n const isMutableAction = config.isMutableAction;\n const criteria = config.criteria;\n const key = `${config.action}-${config.sourceId}`;\n\n if (isMutableAction) {\n sourceNamespace = propagationNameSpace.mutableActions;\n } else {\n sourceNamespace = propagationNameSpace.immutableActions;\n }\n\n if (criteria === null) {\n delete sourceNamespace[key];\n } else {\n sourceNamespace[key] = {\n model,\n config\n };\n }\n\n return this;\n};\n","import { FilteringMode } from './enums';\nimport { getUniqueId } from './utils';\nimport { updateFields, cloneWithSelect, cloneWithProject, updateData } from './helper';\nimport { crossProduct, difference, naturalJoinFilter, union } from './operator';\n\n/**\n * Relation provides the definitions of basic operators of relational algebra like *selection*, *projection*, *union*,\n * *difference* etc.\n *\n * It is extended by {@link DataModel} to inherit the functionalities of relational algebra concept.\n *\n * @class\n * @public\n * @module Relation\n * @namespace DataModel\n */\nclass Relation {\n\n /**\n * Creates a new Relation instance by providing underlying data and schema.\n *\n * @private\n *\n * @param {Object | string | Relation} data - The input tabular data in dsv or json format or\n * an existing Relation instance object.\n * @param {Array} schema - An array of data schema.\n * @param {Object} [options] - The optional options.\n */\n constructor (...params) {\n let source;\n\n this._parent = null;\n this._derivation = [];\n this._ancestorDerivation = [];\n this._children = [];\n\n if (params.length === 1 && ((source = params[0]) instanceof Relation)) {\n // parent datamodel was passed as part of source\n this._colIdentifier = source._colIdentifier;\n this._rowDiffset = source._rowDiffset;\n this._dataFormat = source._dataFormat;\n this._parent = source;\n this._partialFieldspace = this._parent._partialFieldspace;\n this._fieldStoreName = getUniqueId();\n this.__calculateFieldspace().calculateFieldsConfig();\n } else {\n updateData(this, ...params);\n this._fieldStoreName = this._partialFieldspace.name;\n this.__calculateFieldspace().calculateFieldsConfig();\n this._propagationNameSpace = {\n mutableActions: {},\n immutableActions: {}\n };\n }\n }\n\n /**\n * Retrieves the {@link Schema | schema} details for every {@link Field | field} as an array.\n *\n * @public\n *\n * @return {Array.} Array of fields schema.\n * ```\n * [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', numberFormat: (val) => `${val} miles / gallon` },\n * { name: 'Cylinder', type: 'dimension' },\n * { name: 'Displacement', type: 'measure', defAggFn: 'max' },\n * { name: 'HorsePower', type: 'measure', defAggFn: 'max' },\n * { name: 'Weight_in_lbs', type: 'measure', defAggFn: 'avg', },\n * { name: 'Acceleration', type: 'measure', defAggFn: 'avg' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin' }\n * ]\n * ```\n */\n getSchema () {\n return this.getFieldspace().fields.map(d => d.schema());\n }\n\n /**\n * Returns the name of the {@link DataModel} instance. If no name was specified during {@link DataModel}\n * initialization, then it returns a auto-generated name.\n *\n * @public\n *\n * @return {string} Name of the DataModel instance.\n */\n getName() {\n return this._fieldStoreName;\n }\n\n getFieldspace () {\n return this._fieldspace;\n }\n\n __calculateFieldspace () {\n this._fieldspace = updateFields([this._rowDiffset, this._colIdentifier],\n this.getPartialFieldspace(), this._fieldStoreName);\n return this;\n }\n\n getPartialFieldspace () {\n return this._partialFieldspace;\n }\n\n /**\n * Performs {@link link_of_cross_product | cross-product} between two {@link DataModel} instances and returns a\n * new {@link DataModel} instance containing the results. This operation is also called theta join.\n *\n * Cross product takes two set and create one set where each value of one set is paired with each value of another\n * set.\n *\n * This method takes an optional predicate which filters the generated result rows. If the predicate returns true\n * the combined row is included in the resulatant table.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.join(originDM)));\n *\n * console.log(carsDM.join(originDM,\n * obj => obj.[originDM.getName()].Origin === obj.[carsDM.getName()].Origin));\n *\n * @text\n * This is chained version of `join` operator. `join` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel to be joined with the current instance DataModel.\n * @param {SelectionPredicate} filterFn - The predicate function that will filter the result of the crossProduct.\n *\n * @return {DataModel} New DataModel instance created after joining.\n */\n join (joinWith, filterFn) {\n return crossProduct(this, joinWith, filterFn);\n }\n\n /**\n * {@link natural_join | Natural join} is a special kind of cross-product join where filtering of rows are performed\n * internally by resolving common fields are from both table and the rows with common value are included.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.naturalJoin(originDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel with which the current instance of DataModel on which the method is\n * called will be joined.\n * @return {DataModel} New DataModel instance created after joining.\n */\n naturalJoin (joinWith) {\n return crossProduct(this, joinWith, naturalJoinFilter(this, joinWith), true);\n }\n\n /**\n * {@link link_to_union | Union} operation can be termed as vertical stacking of all rows from both the DataModel\n * instances, provided that both of the {@link DataModel} instances should have same column names.\n *\n * @example\n * console.log(EuropeanMakerDM.union(USAMakerDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} unionWith - DataModel instance for which union has to be applied with the instance on which\n * the method is called\n *\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n union (unionWith) {\n return union(this, unionWith);\n }\n\n /**\n * {@link link_to_difference | Difference } operation only include rows which are present in the datamodel on which\n * it was called but not on the one passed as argument.\n *\n * @example\n * console.log(highPowerDM.difference(highExpensiveDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} differenceWith - DataModel instance for which difference has to be applied with the instance\n * on which the method is called\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n difference (differenceWith) {\n return difference(this, differenceWith);\n }\n\n /**\n * {@link link_to_selection | Selection} is a row filtering operation. It expects a predicate and an optional mode\n * which control which all rows should be included in the resultant DataModel instance.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection operation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resultant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * // with selection mode NORMAL:\n * const normDt = dt.select(fields => fields.Origin.value === \"USA\")\n * console.log(normDt));\n *\n * // with selection mode INVERSE:\n * const inverDt = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt);\n *\n * // with selection mode ALL:\n * const dtArr = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.ALL })\n * // print the selected parts\n * console.log(dtArr[0]);\n * // print the inverted parts\n * console.log(dtArr[1]);\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Function} selectFn - The predicate function which is called for each row with the current row.\n * ```\n * function (row, i, cloneProvider, store) { ... }\n * ```\n * @param {Object} config - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance.\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection.\n * @return {DataModel} Returns the new DataModel instance(s) after operation.\n */\n select (selectFn, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n\n const cloneConfig = { saveChild: config.saveChild };\n let oDm;\n\n if (config.mode === FilteringMode.ALL) {\n const selectDm = cloneWithSelect(\n this,\n selectFn,\n { mode: FilteringMode.NORMAL },\n cloneConfig\n );\n const rejectDm = cloneWithSelect(\n this,\n selectFn,\n { mode: FilteringMode.INVERSE },\n cloneConfig\n );\n oDm = [selectDm, rejectDm];\n } else {\n oDm = cloneWithSelect(\n this,\n selectFn,\n config,\n cloneConfig\n );\n }\n\n return oDm;\n }\n\n /**\n * Retrieves a boolean value if the current {@link DataModel} instance has data.\n *\n * @example\n * const schema = [\n * { name: 'CarName', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n * const data = [];\n *\n * const dt = new DataModel(data, schema);\n * console.log(dt.isEmpty());\n *\n * @public\n *\n * @return {Boolean} True if the datamodel has no data, otherwise false.\n */\n isEmpty () {\n return !this._rowDiffset.length || !this._colIdentifier.length;\n }\n\n /**\n * Creates a clone from the current DataModel instance with child parent relationship.\n *\n * @private\n * @param {boolean} [saveChild=true] - Whether the cloned instance would be recorded in the parent instance.\n * @return {DataModel} - Returns the newly cloned DataModel instance.\n */\n clone (saveChild = true) {\n const clonedDm = new this.constructor(this);\n if (saveChild) {\n clonedDm.setParent(this);\n } else {\n clonedDm.setParent(null);\n }\n return clonedDm;\n }\n\n /**\n * {@link Projection} is filter column (field) operation. It expects list of fields' name and either include those\n * or exclude those based on {@link FilteringMode} on the resultant variable.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * const dm = new DataModel(data, schema);\n *\n * // with projection mode NORMAL:\n * const normDt = dt.project([\"Name\", \"HorsePower\"]);\n * console.log(normDt.getData());\n *\n * // with projection mode INVERSE:\n * const inverDt = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt.getData());\n *\n * // with selection mode ALL:\n * const dtArr = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.ALL })\n * // print the normal parts\n * console.log(dtArr[0].getData());\n * // print the inverted parts\n * console.log(dtArr[1].getData());\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {DataModel} Returns the new DataModel instance after operation.\n */\n project (projField, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const { mode } = config;\n\n let normalizedProjField = projField.reduce((acc, field) => {\n if (field.constructor.name === 'RegExp') {\n acc.push(...allFields.filter(fieldName => fieldName.search(field) !== -1));\n } else if (field in fieldConfig) {\n acc.push(field);\n }\n return acc;\n }, []);\n\n normalizedProjField = Array.from(new Set(normalizedProjField)).map(field => field.trim());\n let dataModel;\n\n if (mode === FilteringMode.ALL) {\n let projectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.NORMAL,\n saveChild: config.saveChild\n }, allFields);\n let rejectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.INVERSE,\n saveChild: config.saveChild\n }, allFields);\n dataModel = [projectionClone, rejectionClone];\n } else {\n let projectionClone = cloneWithProject(this, normalizedProjField, config, allFields);\n dataModel = projectionClone;\n }\n\n return dataModel;\n }\n\n getFieldsConfig () {\n return this._fieldConfig;\n }\n\n calculateFieldsConfig () {\n this._fieldConfig = this._fieldspace.fields.reduce((acc, fieldObj, i) => {\n acc[fieldObj.name()] = {\n index: i,\n def: fieldObj.schema(),\n };\n return acc;\n }, {});\n return this;\n }\n\n\n /**\n * Frees up the resources associated with the current DataModel instance and breaks all the links instance has in\n * the DAG.\n *\n * @public\n */\n dispose () {\n this._parent && this._parent.removeChild(this);\n this._parent = null;\n this._children.forEach((child) => {\n child._parent = null;\n });\n this._children = [];\n }\n\n /**\n * Removes the specified child {@link DataModel} from the child list of the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\")\n * dt.removeChild(dt2);\n *\n * @private\n *\n * @param {DataModel} child - Delegates the parent to remove this child.\n */\n removeChild (child) {\n let idx = this._children.findIndex(sibling => sibling === child);\n idx !== -1 ? this._children.splice(idx, 1) : true;\n }\n\n /**\n * Sets the specified {@link DataModel} as a parent for the current {@link DataModel} instance.\n *\n * @param {DataModel} parent - The datamodel instance which will act as parent.\n */\n setParent (parent) {\n this._parent && this._parent.removeChild(this);\n this._parent = parent;\n parent && parent._children.push(this);\n }\n\n /**\n * Returns the parent {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const parentDm = dt2.getParent();\n *\n * @return {DataModel} Returns the parent DataModel instance.\n */\n getParent () {\n return this._parent;\n }\n\n /**\n * Returns the immediate child {@link DataModel} instances.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const childDm1 = dt.select(fields => fields.Origin.value === \"USA\");\n * const childDm2 = dt.select(fields => fields.Origin.value === \"Japan\");\n * const childDm3 = dt.groupBy([\"Origin\"]);\n *\n * @return {DataModel[]} Returns the immediate child DataModel instances.\n */\n getChildren () {\n return this._children;\n }\n\n /**\n * Returns the in-between operation meta data while creating the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const derivations = dt3.getDerivations();\n *\n * @return {Any[]} Returns the derivation meta data.\n */\n getDerivations () {\n return this._derivation;\n }\n\n /**\n * Returns the in-between operation meta data happened from root {@link DataModel} to current instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const ancDerivations = dt3.getAncestorDerivations();\n *\n * @return {Any[]} Returns the previous derivation meta data.\n */\n getAncestorDerivations () {\n return this._ancestorDerivation;\n }\n}\n\nexport default Relation;\n","/* eslint-disable default-case */\n\nimport { FieldType, DimensionSubtype, DataFormat } from './enums';\nimport {\n persistDerivations,\n getRootGroupByModel,\n propagateToAllDataModels,\n getRootDataModel,\n propagateImmutableActions,\n addToPropNamespace,\n sanitizeUnitSchema\n} from './helper';\nimport { DM_DERIVATIVES, PROPAGATION } from './constants';\nimport {\n dataBuilder,\n rowDiffsetIterator,\n groupBy\n} from './operator';\nimport { createBinnedFieldData } from './operator/bucket-creator';\nimport Relation from './relation';\nimport reducerStore from './utils/reducer-store';\nimport { createFields } from './field-creator';\nimport InvalidAwareTypes from './invalid-aware-types';\n\n/**\n * DataModel is an in-browser representation of tabular data. It supports\n * {@link https://en.wikipedia.org/wiki/Relational_algebra | relational algebra} operators as well as generic data\n * processing opearators.\n * DataModel extends {@link Relation} class which defines all the relational algebra opreators. DataModel gives\n * definition of generic data processing operators which are not relational algebra complient.\n *\n * @public\n * @class\n * @extends Relation\n * @memberof Datamodel\n */\nclass DataModel extends Relation {\n /**\n * Creates a new DataModel instance by providing data and schema. Data could be in the form of\n * - Flat JSON\n * - DSV String\n * - 2D Array\n *\n * By default DataModel finds suitable adapter to serialize the data. DataModel also expects a\n * {@link Schema | schema} for identifying the variables present in data.\n *\n * @constructor\n * @example\n * const data = loadData('cars.csv');\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', unit : 'cm', scale: '1000', numberformat: val => `${val}G`},\n * { name: 'Cylinders', type: 'dimension' },\n * { name: 'Displacement', type: 'measure' },\n * { name: 'Horsepower', type: 'measure' },\n * { name: 'Weight_in_lbs', type: 'measure' },\n * { name: 'Acceleration', type: 'measure' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin', type: 'dimension' }\n * ];\n * const dm = new DataModel(data, schema, { name: 'Cars' });\n * table(dm);\n *\n * @public\n *\n * @param {Array. | string | Array.} data Input data in any of the mentioned formats\n * @param {Array.} schema Defination of the variables. Order of the variables in data and order of the\n * variables in schema has to be same.\n * @param {object} [options] Optional arguments to specify more settings regarding the creation part\n * @param {string} [options.name] Name of the datamodel instance. If no name is given an auto generated name is\n * assigned to the instance.\n * @param {string} [options.fieldSeparator=','] specify field separator type if the data is of type dsv string.\n */\n constructor (...args) {\n super(...args);\n\n this._onPropagation = [];\n }\n\n /**\n * Reducers are simple functions which reduces an array of numbers to a representative number of the set.\n * Like an array of numbers `[10, 20, 5, 15]` can be reduced to `12.5` if average / mean reducer function is\n * applied. All the measure fields in datamodel (variables in data) needs a reducer to handle aggregation.\n *\n * @public\n *\n * @return {ReducerStore} Singleton instance of {@link ReducerStore}.\n */\n static get Reducers () {\n return reducerStore;\n }\n\n /**\n * Configure null, undefined, invalid values in the source data\n *\n * @public\n *\n * @param {Object} [config] - Configuration to control how null, undefined and non-parsable values are\n * represented in DataModel.\n * @param {string} [config.undefined] - Define how an undefined value will be represented.\n * @param {string} [config.null] - Define how a null value will be represented.\n * @param {string} [config.invalid] - Define how a non-parsable value will be represented.\n */\n static configureInvalidAwareTypes (config) {\n return InvalidAwareTypes.invalidAwareVals(config);\n }\n\n /**\n * Retrieve the data attached to an instance in JSON format.\n *\n * @example\n * // DataModel instance is already prepared and assigned to dm variable\n * const data = dm.getData({\n * order: 'column',\n * formatter: {\n * origin: (val) => val === 'European Union' ? 'EU' : val;\n * }\n * });\n * console.log(data);\n *\n * @public\n *\n * @param {Object} [options] Options to control how the raw data is to be returned.\n * @param {string} [options.order='row'] Defines if data is retieved in row order or column order. Possible values\n * are `'rows'` and `'columns'`\n * @param {Function} [options.formatter=null] Formats the output data. This expects an object, where the keys are\n * the name of the variable needs to be formatted. The formatter function is called for each row passing the\n * value of the cell for a particular row as arguments. The formatter is a function in the form of\n * `function (value, rowId, schema) => { ... }`\n * Know more about {@link Fomatter}.\n *\n * @return {Array} Returns a multidimensional array of the data with schema. The return format looks like\n * ```\n * {\n * data,\n * schema\n * }\n * ```\n */\n getData (options) {\n const defOptions = {\n order: 'row',\n formatter: null,\n withUid: false,\n getAllFields: false,\n sort: []\n };\n options = Object.assign({}, defOptions, options);\n const fields = this.getPartialFieldspace().fields;\n\n const dataGenerated = dataBuilder.call(\n this,\n this.getPartialFieldspace().fields,\n this._rowDiffset,\n options.getAllFields ? fields.map(d => d.name()).join() : this._colIdentifier,\n options.sort,\n {\n columnWise: options.order === 'column',\n addUid: !!options.withUid\n }\n );\n\n if (!options.formatter) {\n return dataGenerated;\n }\n\n const { formatter } = options;\n const { data, schema, uids } = dataGenerated;\n const fieldNames = schema.map((e => e.name));\n const fmtFieldNames = Object.keys(formatter);\n const fmtFieldIdx = fmtFieldNames.reduce((acc, next) => {\n const idx = fieldNames.indexOf(next);\n if (idx !== -1) {\n acc.push([idx, formatter[next]]);\n }\n return acc;\n }, []);\n\n if (options.order === 'column') {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n data[fIdx].forEach((datum, datumIdx) => {\n data[fIdx][datumIdx] = fmtFn.call(\n undefined,\n datum,\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n } else {\n data.forEach((datum, datumIdx) => {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n datum[fIdx] = fmtFn.call(\n undefined,\n datum[fIdx],\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n }\n\n return dataGenerated;\n }\n\n /**\n * Groups the data using particular dimensions and by reducing measures. It expects a list of dimensions using which\n * it projects the datamodel and perform aggregations to reduce the duplicate tuples. Refer this\n * {@link link_to_one_example_with_group_by | document} to know the intuition behind groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupedDM = dm.groupBy(['Year'], { horsepower: 'max' } );\n * console.log(groupedDm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {DataModel} Returns a new DataModel instance after performing the groupby.\n */\n groupBy (fieldsArr, reducers = {}, config = { saveChild: true }) {\n const groupByString = `${fieldsArr.join()}`;\n let params = [this, fieldsArr, reducers];\n const newDataModel = groupBy(...params);\n\n persistDerivations(\n this,\n newDataModel,\n DM_DERIVATIVES.GROUPBY,\n { fieldsArr, groupByString, defaultReducer: reducerStore.defaultReducer() },\n reducers\n );\n\n if (config.saveChild) {\n newDataModel.setParent(this);\n } else {\n newDataModel.setParent(null);\n }\n\n return newDataModel;\n }\n\n /**\n * Performs sorting operation on the current {@link DataModel} instance according to the specified sorting details.\n * Like every other operator it doesn't mutate the current DataModel instance on which it was called, instead\n * returns a new DataModel instance containing the sorted data.\n *\n * DataModel support multi level sorting by listing the variables using which sorting needs to be performed and\n * the type of sorting `ASC` or `DESC`.\n *\n * In the following example, data is sorted by `Origin` field in `DESC` order in first level followed by another\n * level of sorting by `Acceleration` in `ASC` order.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * let sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\"] // Default value is ASC\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * // Sort with a custom sorting function\n * sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\", (a, b) => a - b] // Custom sorting function\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @text\n * DataModel also provides another sorting mechanism out of the box where sort is applied to a variable using\n * another variable which determines the order.\n * Like the above DataModel contains three fields `Origin`, `Name` and `Acceleration`. Now, the data in this\n * model can be sorted by `Origin` field according to the average value of all `Acceleration` for a\n * particular `Origin` value.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * const sortedDm = dm.sort([\n * ['Origin', ['Acceleration', (a, b) => avg(...a.Acceleration) - avg(...b.Acceleration)]]\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @public\n *\n * @param {Array.} sortingDetails - Sorting details based on which the sorting will be performed.\n * @return {DataModel} Returns a new instance of DataModel with sorted data.\n */\n sort (sortingDetails, config = { saveChild: false }) {\n const rawData = this.getData({\n order: 'row',\n sort: sortingDetails\n });\n const header = rawData.schema.map(field => field.name);\n const dataInCSVArr = [header].concat(rawData.data);\n\n const sortedDm = new this.constructor(dataInCSVArr, rawData.schema, { dataFormat: 'DSVArr' });\n\n persistDerivations(\n this,\n sortedDm,\n DM_DERIVATIVES.SORT,\n config,\n sortingDetails\n );\n\n if (config.saveChild) {\n sortedDm.setParent(this);\n } else {\n sortedDm.setParent(null);\n }\n\n return sortedDm;\n }\n\n /**\n * Performs the serialization operation on the current {@link DataModel} instance according to the specified data\n * type. When an {@link DataModel} instance is created, it de-serializes the input data into its internal format,\n * and during its serialization process, it converts its internal data format to the specified data type and returns\n * that data regardless what type of data is used during the {@link DataModel} initialization.\n *\n * @example\n * // here dm is the pre-declared DataModel instance.\n * const csvData = dm.serialize(DataModel.DataFormat.DSV_STR, { fieldSeparator: \",\" });\n * console.log(csvData); // The csv formatted data.\n *\n * const jsonData = dm.serialize(DataModel.DataFormat.FLAT_JSON);\n * console.log(jsonData); // The json data.\n *\n * @public\n *\n * @param {string} type - The data type name for serialization.\n * @param {Object} options - The optional option object.\n * @param {string} options.fieldSeparator - The field separator character for DSV data type.\n * @return {Array|string} Returns the serialized data.\n */\n serialize (type, options) {\n type = type || this._dataFormat;\n options = Object.assign({}, { fieldSeparator: ',' }, options);\n\n const fields = this.getFieldspace().fields;\n const colData = fields.map(f => f.formattedData());\n const rowsCount = colData[0].length;\n let serializedData;\n let rowIdx;\n let colIdx;\n\n if (type === DataFormat.FLAT_JSON) {\n serializedData = [];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = {};\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row[fields[colIdx].name()] = colData[colIdx][rowIdx];\n }\n serializedData.push(row);\n }\n } else if (type === DataFormat.DSV_STR) {\n serializedData = [fields.map(f => f.name()).join(options.fieldSeparator)];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row.join(options.fieldSeparator));\n }\n serializedData = serializedData.join('\\n');\n } else if (type === DataFormat.DSV_ARR) {\n serializedData = [fields.map(f => f.name())];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row);\n }\n } else {\n throw new Error(`Data type ${type} is not supported`);\n }\n\n return serializedData;\n }\n\n addField (field) {\n const fieldName = field.name();\n this._colIdentifier += `,${fieldName}`;\n const partialFieldspace = this._partialFieldspace;\n\n if (!partialFieldspace.fieldsObj()[field.name()]) {\n partialFieldspace.fields.push(field);\n } else {\n const fieldIndex = partialFieldspace.fields.findIndex(fieldinst => fieldinst.name() === fieldName);\n fieldIndex >= 0 && (partialFieldspace.fields[fieldIndex] = field);\n }\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n\n this.__calculateFieldspace().calculateFieldsConfig();\n return this;\n }\n\n /**\n * Creates a new variable calculated from existing variables. This method expects the definition of the newly created\n * variable and a function which resolves the value of the new variable from existing variables.\n *\n * Can create a new measure based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const newDm = dataModel.calculateVariable({\n * name: 'powerToWeight',\n * type: 'measure'\n * }, ['horsepower', 'weight_in_lbs', (hp, weight) => hp / weight ]);\n *\n *\n * Can create a new dimension based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const child = dataModel.calculateVariable(\n * {\n * name: 'Efficiency',\n * type: 'dimension'\n * }, ['horsepower', (hp) => {\n * if (hp < 80) { return 'low'; },\n * else if (hp < 120) { return 'moderate'; }\n * else { return 'high' }\n * }]);\n *\n * @public\n *\n * @param {Object} schema - The schema of newly defined variable.\n * @param {Array.} dependency - An array containing the dependency variable names and a resolver\n * function as the last element.\n * @param {Object} config - An optional config object.\n * @param {boolean} [config.saveChild] - Whether the newly created DataModel will be a child.\n * @param {boolean} [config.replaceVar] - Whether the newly created variable will replace the existing variable.\n * @return {DataModel} Returns an instance of DataModel with the new field.\n */\n calculateVariable (schema, dependency, config) {\n schema = sanitizeUnitSchema(schema);\n config = Object.assign({}, { saveChild: true, replaceVar: false }, config);\n\n const fieldsConfig = this.getFieldsConfig();\n const depVars = dependency.slice(0, dependency.length - 1);\n const retrieveFn = dependency[dependency.length - 1];\n\n if (fieldsConfig[schema.name] && !config.replaceVar) {\n throw new Error(`${schema.name} field already exists in datamodel`);\n }\n\n const depFieldIndices = depVars.map((field) => {\n const fieldSpec = fieldsConfig[field];\n if (!fieldSpec) {\n // @todo dont throw error here, use warning in production mode\n throw new Error(`${field} is not a valid column name.`);\n }\n return fieldSpec.index;\n });\n\n const clone = this.clone(config.saveChild);\n\n const fs = clone.getFieldspace().fields;\n const suppliedFields = depFieldIndices.map(idx => fs[idx]);\n\n let cachedStore = {};\n let cloneProvider = () => this.detachedRoot();\n\n const computedValues = [];\n rowDiffsetIterator(clone._rowDiffset, (i) => {\n const fieldsData = suppliedFields.map(field => field.partialField.data[i]);\n computedValues[i] = retrieveFn(...fieldsData, i, cloneProvider, cachedStore);\n });\n const [field] = createFields([computedValues], [schema], [schema.name]);\n clone.addField(field);\n\n persistDerivations(\n this,\n clone,\n DM_DERIVATIVES.CAL_VAR,\n { config: schema, fields: depVars },\n retrieveFn\n );\n\n return clone;\n }\n\n /**\n * Propagates changes across all the connected DataModel instances.\n *\n * @param {Array} identifiers - A list of identifiers that were interacted with.\n * @param {Object} payload - The interaction specific details.\n *\n * @return {DataModel} DataModel instance.\n */\n propagate (identifiers, config = {}, addToNameSpace, propConfig = {}) {\n const isMutableAction = config.isMutableAction;\n const propagationSourceId = config.sourceId;\n const payload = config.payload;\n const rootModel = getRootDataModel(this);\n const propagationNameSpace = rootModel._propagationNameSpace;\n const rootGroupByModel = getRootGroupByModel(this);\n const rootModels = {\n groupByModel: rootGroupByModel,\n model: rootModel\n };\n\n addToNameSpace && addToPropNamespace(propagationNameSpace, config, this);\n propagateToAllDataModels(identifiers, rootModels, { propagationNameSpace, sourceId: propagationSourceId },\n Object.assign({\n payload\n }, config));\n\n if (isMutableAction) {\n propagateImmutableActions(propagationNameSpace, rootModels, {\n config,\n propConfig\n }, this);\n }\n\n return this;\n }\n\n /**\n * Associates a callback with an event name.\n *\n * @param {string} eventName - The name of the event.\n * @param {Function} callback - The callback to invoke.\n * @return {DataModel} Returns this current DataModel instance itself.\n */\n on (eventName, callback) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation.push(callback);\n break;\n }\n return this;\n }\n\n /**\n * Unsubscribes the callbacks for the provided event name.\n *\n * @param {string} eventName - The name of the event to unsubscribe.\n * @return {DataModel} Returns the current DataModel instance itself.\n */\n unsubscribe (eventName) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation = [];\n break;\n\n }\n return this;\n }\n\n /**\n * This method is used to invoke the method associated with propagation.\n *\n * @param {Object} payload The interaction payload.\n * @param {DataModel} identifiers The propagated DataModel.\n * @memberof DataModel\n */\n handlePropagation (propModel, payload) {\n let propListeners = this._onPropagation;\n propListeners.forEach(fn => fn.call(this, propModel, payload));\n }\n\n /**\n * Performs the binning operation on a measure field based on the binning configuration. Binning means discretizing\n * values of a measure. Binning configuration contains an array; subsequent values from the array marks the boundary\n * of buckets in [inclusive, exclusive) range format. This operation does not mutate the subject measure field,\n * instead, it creates a new field (variable) of type dimension and subtype binned.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non-uniform buckets,\n * - providing bins count,\n * - providing each bin size,\n *\n * When custom `buckets` are provided as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', buckets: [30, 80, 100, 110] }\n * const binnedDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binsCount` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', binsCount: 5, start: 0, end: 100 }\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binSize` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHorsepower', binSize: 20, start: 5}\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @public\n *\n * @param {string} measureFieldName - The name of the target measure field.\n * @param {Object} config - The config object.\n * @param {string} [config.name] - The name of the new field which will be created.\n * @param {string} [config.buckets] - An array containing the bucket ranges.\n * @param {string} [config.binSize] - The size of each bin. It is ignored when buckets are given.\n * @param {string} [config.binsCount] - The total number of bins to generate. It is ignored when buckets are given.\n * @param {string} [config.start] - The start value of the bucket ranges. It is ignored when buckets are given.\n * @param {string} [config.end] - The end value of the bucket ranges. It is ignored when buckets are given.\n * @return {DataModel} Returns a new {@link DataModel} instance with the new field.\n */\n bin (measureFieldName, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n if (!fieldsConfig[measureFieldName]) {\n throw new Error(`Field ${measureFieldName} doesn't exist`);\n }\n\n const binFieldName = config.name || `${measureFieldName}_binned`;\n\n if (fieldsConfig[binFieldName]) {\n throw new Error(`Field ${binFieldName} already exists`);\n }\n\n const measureField = this.getFieldspace().fieldsObj()[measureFieldName];\n const { binnedData, bins } = createBinnedFieldData(measureField, this._rowDiffset, config);\n\n const binField = createFields([binnedData], [\n {\n name: binFieldName,\n type: FieldType.DIMENSION,\n subtype: DimensionSubtype.BINNED,\n bins\n }], [binFieldName])[0];\n\n const clone = this.clone(config.saveChild);\n clone.addField(binField);\n\n persistDerivations(\n this,\n clone,\n DM_DERIVATIVES.BIN,\n { measureFieldName, config, binFieldName },\n null\n );\n\n return clone;\n }\n\n /**\n * Creates a new {@link DataModel} instance with completely detached root from current {@link DataModel} instance,\n * the new {@link DataModel} instance has no parent-children relationship with the current one, but has same data as\n * the current one.\n * This API is useful when a completely different {@link DataModel} but with same data as the current instance is\n * needed.\n *\n * @example\n * const dm = new DataModel(data, schema);\n * const detachedDm = dm.detachedRoot();\n *\n * // has different namespace\n * console.log(dm.getPartialFieldspace().name);\n * console.log(detachedDm.getPartialFieldspace().name);\n *\n * // has same data\n * console.log(dm.getData());\n * console.log(detachedDm.getData());\n *\n * @public\n *\n * @return {DataModel} Returns a detached {@link DataModel} instance.\n */\n detachedRoot () {\n const data = this.serialize(DataFormat.FLAT_JSON);\n const schema = this.getSchema();\n\n return new DataModel(data, schema);\n }\n}\n\nexport default DataModel;\n","import { fnList } from '../operator/group-by-function';\n\nexport const { sum, avg, min, max, first, last, count, std: sd } = fnList;\n","import DataModel from './datamodel';\nimport {\n compose,\n bin,\n select,\n project,\n groupby as groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union\n} from './operator';\nimport * as Stats from './stats';\nimport * as enums from './enums';\nimport { DateTimeFormatter } from './utils';\nimport { DataFormat, FilteringMode, DM_DERIVATIVES } from './constants';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport pkg from '../package.json';\n\nconst Operators = {\n compose,\n bin,\n select,\n project,\n groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union\n};\n\nconst version = pkg.version;\nObject.assign(DataModel, {\n Operators,\n Stats,\n DM_DERIVATIVES,\n DateTimeFormatter,\n DataFormat,\n FilteringMode,\n InvalidAwareTypes,\n version\n}, enums);\n\nexport default DataModel;\n","/**\n * Wrapper on calculateVariable() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const calculateVariable = (...args) => dm => dm.calculateVariable(...args);\n\n/**\n * Wrapper on sort() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const sort = (...args) => dm => dm.sort(...args);\n","import { crossProduct } from './cross-product';\nimport { naturalJoinFilter } from './natural-join-filter-function';\n\nexport function naturalJoin (dataModel1, dataModel2) {\n return crossProduct(dataModel1, dataModel2, naturalJoinFilter(dataModel1, dataModel2), true);\n}\n"],"sourceRoot":""} \ No newline at end of file diff --git a/package.json b/package.json index f5ba3a7..f783694 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "datamodel", "description": "Relational algebra compliant in-memory tabular data store", "homepage": "https://github.com/chartshq/datamodel", - "version": "2.1.0", + "version": "2.2.0", "license": "MIT", "main": "dist/datamodel.js", "keywords": [ @@ -17,6 +17,7 @@ "tabular", "operation" ], + "author": "Muzejs.org (https://muzejs.org/)", "repository": { "type": "git", "url": "https://github.com/chartshq/datamodel.git"