There is an array of the form
ar=[283462197, 191777391, 243143621, 451231707, 217268739, ] // и там их 1 310 341 штук it is necessary to select unique values, I do through this function
var uniqueAr = function(ar) { var existing = {}, result = []; var length = ar.length; for (i = length; i--;) { if (!existing.hasOwnProperty(ar[i])) { result.push(ar[i]); existing[ar[i]] = true; //any value will do } } return result;}; IMHO works very quickly for 80.774ms As a result, I have 114262 elements, I do my business and it turns out that 73928 should be removed from them, and then the problems begin, I do this:
grdb.user_ids_clear//массив после уникализации banIds// те что нужно удалить, само собой тоже уникальны console.time("исключение банов"); var tar = []; var exist = false; var banIdslength = banIds.length; for (let i = grdb.user_ids_clear.length; i--;) { exist = false; for (let ii = banIdslength; ii--;) { if (banIds[ii] === grdb.user_ids_clear[i]) { exist = true; break; } } if (!exist) tar.push(grdb.user_ids_clear[i]); } console.timeEnd("исключение банов"); And it took 893288.239мс it is simply unacceptable, forgive 893288.239мс to explain how it goes, why the unique procedure of the same complexity is done 4 orders of magnitude faster with a size of 10 times a large array.