1. /**
  2. * Removes duplicate URLs from an array of URLs.
  3. *
  4. * @param {string[]} urls An array of URLs.
  5. * @returns {string[]} A new array containing only unique URLs.
  6. */
  7. function removeDuplicateUrls(urls) {
  8. if (!Array.isArray(urls)) {
  9. console.error("Input must be an array.");
  10. return [];
  11. }
  12. const uniqueUrls = [];
  13. const seen = new Set(); // Use a Set for efficient duplicate checking
  14. for (const url of urls) {
  15. if (typeof url !== 'string') {
  16. console.warn("Skipping non-string value:", url);
  17. continue;
  18. }
  19. const normalizedUrl = url.trim().toLowerCase(); // Normalize for case-insensitive comparison
  20. if (!seen.has(normalizedUrl)) {
  21. uniqueUrls.push(url);
  22. seen.add(normalizedUrl);
  23. }
  24. }
  25. return uniqueUrls;
  26. }
  27. // Example Usage (for testing)
  28. // const urlList = [
  29. // "https://www.example.com",
  30. // "example.com",
  31. // "https://www.example.com",
  32. // "https://example.com",
  33. // "example.com ",
  34. // "invalid url",
  35. // 123,
  36. // "https://www.EXAMPLE.com"
  37. // ];
  38. // const uniqueList = removeDuplicateUrls(urlList);
  39. // console.log(uniqueList);

Add your comment