0

I have these datasets below and I want to joint the data in each set:

    var data = {};
    var datasets = [
        {"_id":"58d6c806d7c80d5d44a35204","particles":{"timestamp":[1490470918708,1490470962915,1490470967186],"Particles":["108","108","109"]}},
        {"_id":"58d6caf62552b75f26f56f91","particles":{"timestamp":[1490471670888,1490473309103],"Particles":["109","100"]}}
    ];

    datasets.forEach(function(dataset, index) {
        for (var key in dataset.particles) {
            data[key] = dataset.particles[key];
        }
    });
    console.log(data);

Result:

{ timestamp: [ 1490471670888, 1490473309103 ],
  Particles: [ '109', '100' ] }

They are not joined. The result I am after:

 { timestamp: [ 1490470918708,1490470962915,1490470967186, 1490471670888, 1490473309103 ],
  Particles: [ '108','108','109', '109', '100' ] }

Any ideas?

EDIT

Is concat or push faster when comes to a very large data?

Run
  • 54,938
  • 169
  • 450
  • 748
  • 1
    As I see a lot of good answer so I am not going to write mine. Just a comment I notices a lot of people are using `concat`. This creates a new array every time instead of just appending new items in the existing array. I recommend using `push` instead. – Luka Mar 25 '17 at 21:54
  • @Luka is push faster than concat? – Run Mar 25 '17 at 21:57
  • 1
    In general `push` is faster if you call it with `Array.prototype.push.apply`. But be careful as this way has limit: you cannot append large amount of items this way. – Luka Mar 25 '17 at 22:01
  • @Luka i need to push very large amount of data. so I think concat is better then? – Run Mar 25 '17 at 22:02
  • 1
    then use concat to be on the safe side. Using `push` may simply break the program. – Luka Mar 25 '17 at 22:04

3 Answers3

2

You have to concatanate the arrays, not just reassign the value.

var data = {};
var datasets = [
    {"_id":"58d6c806d7c80d5d44a35204","particles":{"timestamp":[1490470918708,1490470962915,1490470967186],"Particles":["108","108","109"]}},
    {"_id":"58d6caf62552b75f26f56f91","particles":{"timestamp":[1490471670888,1490473309103],"Particles":["109","100"]}}
];

datasets.forEach(function(dataset, index) {
  for (var key in dataset.particles) {

    // Check if key already exists.
    if( data[key] === undefined )
      data[key] = [];

    // Add current timestamps.
    data[key] = data[key].concat(dataset.particles[key]);
  }
});
console.log(data);
pishpish
  • 2,574
  • 15
  • 22
1

Hope this is what required :

var data = {};
var datasets = [{
    "_id": "58d6c806d7c80d5d44a35204",
    "particles": {
      "timestamp": [1490470918708, 1490470962915, 1490470967186],
      "Particles": ["108", "108", "109"]
    }
  },
  {
    "_id": "58d6caf62552b75f26f56f91",
    "particles": {
      "timestamp": [1490471670888, 1490473309103],
      "Particles": ["109", "100"]
    }
  }
];
var output = {

}
datasets.forEach(function(dataset, index) {
  for (var key in dataset.particles) {
    if (typeof output[key] == 'undefined') {
      output[key] = []
    }
    //console.log(dataset.particles[key])
    for (var k of dataset.particles[key]) {
      output[key].push(k)
    }

  }
});
console.log(output);
Shrijan Tiwari
  • 673
  • 6
  • 17
  • thanks for the answer! is concat or push **faster** when comes to a very large data? – Run Mar 25 '17 at 21:54
  • 1
    seems like concat is much faster then push checkout the difference from here https://jsperf.com/concat-vs-push-array/1 – Shrijan Tiwari Mar 26 '17 at 09:19
1

In a functional programming style, you could use reduce and Object.keys:

var datasets = [
    {"_id":"58d6c806d7c80d5d44a35204","particles":{"timestamp":[1490470918708,1490470962915,1490470967186],"Particles":["108","108","109"]}},
    {"_id":"58d6caf62552b75f26f56f91","particles":{"timestamp":[1490471670888,1490473309103],"Particles":["109","100"]}}
];

var data = datasets.reduce ( (acc, {particles}) =>
    Object.keys(particles).reduce ( (acc, key) =>
        (acc[key] = (acc[key] || []).concat(particles[key]), acc), acc
), {} );
    
console.log(data);
.as-console-wrapper { max-height: 100% !important; top: 0; }

Alternative with push instead of concat, in case performance is an issue:

var datasets = [
    {"_id":"58d6c806d7c80d5d44a35204","particles":{"timestamp":[1490470918708,1490470962915,1490470967186],"Particles":["108","108","109"]}},
    {"_id":"58d6caf62552b75f26f56f91","particles":{"timestamp":[1490471670888,1490473309103],"Particles":["109","100"]}}
];

var data = datasets.reduce ( (acc, {particles}) =>
    Object.keys(particles).reduce ( (acc, key) =>
        ((acc[key] = (acc[key] || [])).push(...particles[key]), acc), acc
), {} );
    
console.log(data);
.as-console-wrapper { max-height: 100% !important; top: 0; }

But be aware that in this version of the code, push will get each individual array element in particles[key] as an argument, which occupies stack space, which in turn is limited. See "Is there a max number of arguments JavaScript functions can accept?". You could overcome this limitation, by performing separate push calls for each individual element in particles[key], but that will degrade performance. So in the end you could be better of with the concat version.

Community
  • 1
  • 1
trincot
  • 317,000
  • 35
  • 244
  • 286
  • thanks. is this faster than for loop for concatenating very large data? – Run Mar 26 '17 at 08:36
  • 1
    Why not test it on your data? In general `for` loops are fast, but if you do a lot of manipulation inside those loops, then array methods can sometimes lead to code with fewer assignments, which can then be better optimised by the engine. It all depends on your actual case and data. Just test it. – trincot Mar 26 '17 at 08:51
  • 1
    I added a variant with `push` instead of `concat`, but it really is to be tested whether there is a performance difference for your case. – trincot Mar 26 '17 at 09:11