0

I have a function for search the longest common elements in two array:

   /**
    * Return the common elements in two array
    */ 
    function searchArrayInArray(array1, array2) {
       var result = [];
       for (var j = 0, e = array1.length; j < e; j++){
           var element = array1[j];
           if( array2.indexOf(element) !== -1 ){
               result.push(element);
           }
       }
       return result;
    }

This method works, but I want improve performance because I call it many times.

There is any performance improvement appliable?

Side note: the elements into the arrays are unsorted string

    /**
    * Return the common elements in two array
    */ 
    function searchArrayInArray(array1, array2) {
       var result = [];
       for (var j = 0, e = array1.length; j < e; j++){
           var element = array1[j];
           if( array2.indexOf(element) !== -1 ){
               result.push(element);
           }
       }
       return result;
    }
    
    var result = searchArrayInArray(['a', 'b'], ['b', 'c']);
    
    document.getElementById("result").innerHTML = JSON.stringify(result, null, 2);
<pre id="result"></pre>
ar099968
  • 6,963
  • 12
  • 64
  • 127
  • Check out the answers in [Simplest code for array intersection in javascript](https://stackoverflow.com/q/1885557/218196) – Felix Kling Mar 22 '19 at 17:02

3 Answers3

3

If you're concerned about performance, you'll want to use a data structure which provides good look-up times. Array methods like Array.prototype.indexOf, Array.prototype.includes, and Array.prototype.find all have linear look-ups. Map has binary look-up and Set has constant look-up. I think Set will be ideal in this situation.

A straightforward implementation of intersection -

const intersection = (a1 = [], a2 = []) =>
{ const s =
    new Set(a1)
  
  const result =
    []

  for (const x of a2)
    if (s.has(x))
      result.push(x)

  return result
}

console.log(intersection(['a', 'b'], ['b', 'c']))
// [ 'b' ]

This can be simplified a bit using higher-order functions like Array.prototype.filter -

const intersection = (a1 = [], a2 = []) =>
{ const s =
    new Set(a1)
  
  return a2.filter(x => s.has(x))
}

console.log(intersection(['a', 'b'], ['b', 'c']))
// [ 'b' ]

This concept can be expanded upon to support intersecting an arbitrary number of arrays -

const intersection = (a1 = [], a2 = []) =>
{ const s =
    new Set(a1)
  
  return a2.filter(x => s.has(x))
}

const intersectAll = (a = [], ...more) =>
  more.reduce(intersection, a)

console.log(intersectAll(['a', 'b'], ['b', 'c'], ['b', 'd'], ['e', 'b']))
// [ 'b' ]
Mulan
  • 129,518
  • 31
  • 228
  • 259
  • I have called 20.000 times your implentation and it is slower than mine: 2500 ms vs 20000 ms – ar099968 Mar 22 '19 at 17:17
  • You need to use a larger data set to see the performance gains from using `Map` or `Set`, Try merging two arrays of 100 elements. Then two arrays of 1,000 elements. Then two arrays of 1,000,000 elements. The differences will be night/day. – Mulan Mar 22 '19 at 17:20
  • 1
    i have created the set outdide the function and it's more fast. Thanks! – ar099968 Mar 22 '19 at 18:04
0

Well indexOf() is O(n) so by using Set() instead you can improve complexity from O(n^2) to O(n * log n)

function searchArrayInArray(array1, array2) {
       var result = [];
       let set = new Set();
       for(el of array2){
            set.add(el);
       }

       for (var j = 0, e = array1.length; j < e; j++){
           var element = array1[j];
           if( set.has(element) ){
               result.push(element);
           }
       }
       return result;
    }
Photon
  • 2,717
  • 1
  • 18
  • 22
0

The easiest way:

var a = [1,2,3,4,5,6,7,8,9,10];
var b = [2,4,5,7,11,15];


var c = a.filter(value => b.includes(value))
console.log(c)
Chris
  • 7,675
  • 8
  • 51
  • 101
md_salm
  • 459
  • 5
  • 9