|
1 | 1 | /** |
2 | 2 | * Remove duplicate values from a sequence, preserving the order of the first occurrence of each value. |
3 | 3 | * |
4 | | - * Time Complexity: |
5 | | - * Space Complexity: |
6 | | - * Optimal Time Complexity: |
7 | | - * |
| 4 | + * Time Complexity:before refactoring O(n²)- after refactoring O(n) |
| 5 | + * Space Complexity:O(n) |
| 6 | + * Optimal Time Complexity:O(n) |
| 7 | + |
| 8 | + * The original implementation relied on nested loops to detect duplicates, |
| 9 | + *resulting in O(n²) time complexity, which does not scale well for large inputs. |
| 10 | + *The refactored solution uses a Set to perform duplicate checks in constant time. |
| 11 | + *This removes the need for an inner loop and reduces the overall time complexity |
| 12 | + *to O(n), which is optimal for this problem. |
| 13 | + *using https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Set |
8 | 14 | * @param {Array} inputSequence - Sequence to remove duplicates from |
9 | 15 | * @returns {Array} New sequence with duplicates removed |
10 | 16 | */ |
11 | 17 | export function removeDuplicates(inputSequence) { |
12 | | - const uniqueItems = []; |
| 18 | + const uniqueItemSet = new Set; |
13 | 19 |
|
14 | | - for ( |
15 | | - let currentIndex = 0; |
16 | | - currentIndex < inputSequence.length; |
17 | | - currentIndex++ |
18 | | - ) { |
19 | | - let isDuplicate = false; |
20 | | - for ( |
21 | | - let compareIndex = 0; |
22 | | - compareIndex < uniqueItems.length; |
23 | | - compareIndex++ |
24 | | - ) { |
25 | | - if (inputSequence[currentIndex] === uniqueItems[compareIndex]) { |
26 | | - isDuplicate = true; |
27 | | - break; |
28 | | - } |
29 | | - } |
30 | | - if (!isDuplicate) { |
31 | | - uniqueItems.push(inputSequence[currentIndex]); |
32 | | - } |
| 20 | + for (let i=0 ;i<inputSequence.length;i++){ |
| 21 | + uniqueItemSet.add(inputSequence[i]); |
33 | 22 | } |
34 | | - |
35 | | - return uniqueItems; |
| 23 | + return [...uniqueItemSet]; |
36 | 24 | } |
0 commit comments