I have an array of arrays, the length of the array is 17672, each array contains 7 elements. Looks like that:

id| product | client | reward | tariff | Threshold ------------------------------------------------------ 1 | guitar | john | 0.2 | 0.5 | 500 1 | guitar | john | 0.3 | 0.6 | 1000 1 | guitar | john | 0.4 | 0.7 | 2000 2 | drums | ringo | 0.2 | 0.5 | 2 | drums | ringo | 0.3 | 0.6 | 2 | drums | ringo | 0.7 | 0.8 | 3 | cello | george | 0.1 | 0.2 | 500 3 | cello | george | 0.2 | 0.4 | 800 3 | cello | george | 0.4 | 0.6 | 3000 

There are still columns here, but I removed them with no problems.

From this table I want to get the following JSON

 [ { "limit": { "max": "10000", "min": "2" }, "thesholds": [ { "reward": 0.25653, "tariff": 0.41186, "threshold": "500" }, { "reward": 0.58445, "tariff": 0.31421, "threshold": "1000" }, { "reward": 6.71413, "tariff": 0.2757, "threshold": "2000" } ], "client": "john", "product": "guitar" }, { "limit": { "max": "100", "min": "2" }, "thesholds": [], "product": "drums", "client": "ringo" } ] 

My previous code that takes data from two different data, products and thresholds. There are now 11514 products and 9237 thresholds. But this code works for a very long time. More than 6 minutes, which is not very good. Now I have combined the two tables into one by key, and the table in the example above is obtained.

Question: how to group 3 rows into one, and add a column to an array in the array?

I thought, if a table is merged into one, there will be faster conversions to JSON and give it to the client

The tags do not have these technologies, I will write here

linqjs - http://neue.cc/reference.htm

google fusion table - data storage from google

 function getClientsProducts() { // indexes var prdInx = clientsProductsIndex(SHEET_ID, "clientsProducts"); // Fusion Table query var sql = 'SELECT * FROM ' + tables["clientsProducts"]; var fClientsProducts = FusionTables.Query.sqlGet(sql, { hdrs: false }).rows; return Enumerable.From(ClientsProducts).Select(function(prd) { return { limit: { max: prd[prdInx.limit.max], min: prd[prdInx.limit.min] }, threshold: getClientsProductsThreshold(prd[prdInx.id]), client: prd[prdInx.client], product: prd[prdInx.product] } }) .Take(2) .ToJSON(null, 4) } function getClientsProductsThreshold(id) { var sql = 'SELECT * FROM ' + tables["clientsProductsThreshold"] + " WHERE id = '" + id + "' LIMIT 3"; var rows = FusionTables.Query.sqlGet(sql, { hdrs: false }).rows; return Enumerable.From(rows).Select(function(row) { return { reward: row[1], tariff: row[2], threshold: row[3] } }).ToArray() } 

    1 answer 1

    I did not change anything. Redid the request in another way. It turns out arrays are much more labor-intensive than objects. Therefore, the decision was made as follows. Abandon google fusion table, and for starters, make a lookout on the following script

    1. To make object from the clients table
    2. Make a lookup object for each customer's products.
    3. Go through the clients.producst and clients.thresholds tables, write data to the clients / products lookups by keys
    4. Build clients / products back into arrays

    1 pass on each table in the amount of - 3 table views (since there are only 3 tables)

    Result. I processed more than 6 minutes in the previous version, now it is less than 7 seconds Plus, I think there is something else that needs to be optimized.

     var ss = SpreadsheetApp.openById(SHEET_ID); 

    var ss = SpreadsheetApp.openById(SHEET_ID);

    // values var stor = { clients: ss.getSheetByName ("clients"). getDataRange (). getValues ​​(). slice (1), products: ss.getSheetByName ("clientsProducts"). getDataRange (). getValues ​​(). slice (1), percents: ss.getSheetByName ("clientsProductsPercents"). getDataRange (). getValues ​​(). slice (1) }

    // indexes var i = { clients: clientsIndex (SHEET_ID, "clients"), products: clientsProductsIndex (SHEET_ID, "clientsProducts"), percents: clientsProductsPercentsIndex (SHEET_ID, "clientsProductsPercents"), }

    function mapClient (client) { var key = client [i.clients.key]; return { key: key, name: client [i.clients.name], }, percent: { reward: client [i.clients.percent.reward], tariff: client [i.clients.percent.tariff] }, products: {} } }

    function reduceClient (clients, client) { clients [client.key] = client; return clients }

    var clients = Enumerable.From (stor.clients) .Select (mapClient) .ToArray (). reduce (reduceClient, {});

    function mapClientProducts (row) { var client = row [i.products.key] .split ("-") [0]; var product = row [i.products.key] .split ("-") [1]; clients [client] .products [product] = { limit: { max: row [i.products.limit.max], min: row [i.products.limit.min] }, percents: {} product: product }; }

    Enumerable.From (stor.products) .Select (mapClientsProducts) .Count ();

    function mapClientsProductsPercents (row) { var product = row [i.percents.key] .split ("-") [1]; var client = row [i.percents.key] .split ("-") [0]; clients [client] .products [product] .percents = { reward: row [i.percents.percent.reward], tariff: row [i.percents.percent.tariff], threshold: row [i.percents.percent.threshold] } }

    Enumerable.From (stor.percents) .Select (mapClientsProductsPercents) .Count ();

    function mapProducts (client) { var products = Enumerable.From (client.Value.products) .Select ("$. Value"). ToArray (); client.Value.products = products; return client.Value; }

    var result = Enumerable.From (clients) .Select (mapProducts) .ToArray ();

    return JSON.stringify (result, null, 4); }

    • one
      And why do you need an intermediate ToArray between map and reduce? - VladD
    • Well, this is not to write a function inside the Select parameter. And it turns into a mess. Read goa as mapClients - sbaet
    • I don’t know how in js, but in .NET, reduce works the same way on a materialized, non-materialized sequence. Therefore, intermediate materialization is redundant. // And from Select it will not be possible to make ToArray , ToArray selector passed to Select works on single elements, and not on the entire sequence. - VladD
    • Good question, the name is simply not the topic. Renamed - sbaet