0

I am currently building a web scraper in NodeJS and I am facing a certain problem. After running my code, I receive this error:

undefined is not a valid uri or options object.

I am not sure how to bypass this error, I've looked at these examples: Example One, Example Two

Here is all my code:

var request = require('request');
var cheerio = require('cheerio');
var URL = require('url-parse');

var START_URL = "http://example.com";

var pagesVisited = {};
var numPagesVisited = 0;
var pagesToVisit = [];
var url = new URL(START_URL);
var baseUrl = url.protocol + "//" + url.hostname;

pagesToVisit.push(START_URL);
setInterval(crawl,5000);

function crawl() {
  var nextPage = pagesToVisit.pop();
  if (nextPage in pagesVisited) {
    // We've already visited this page, so repeat the crawl
    setInterval(crawl,5000);
  } else {
    // New page we haven't visited
    visitPage(nextPage, crawl);
  }
}

function visitPage(url, callback) {
  // Add page to our set
  pagesVisited[url] = true;
  numPagesVisited++;

  // Make the request
  console.log("Visiting page " + url);
  request(url, function(error, response, body) {
     // Check status code (200 is HTTP OK)
     console.log("Status code: " + response.statusCode);
     if(response.statusCode !== 200) {
       console.log(response.statusCode);
       callback();
       return;
     }else{
       console.log(error);
     }
     // Parse the document body
     var $ = cheerio.load(body);
       collectInternalLinks($);
       // In this short program, our callback is just calling crawl()
       callback();

  });
}

function collectInternalLinks($) {
    var relativeLinks = $("a[href^='/']");
    console.log("Found " + relativeLinks.length + " relative links on page");
    relativeLinks.each(function() {
        pagesToVisit.push(baseUrl + $(this).attr('href'));
    });
}
ZombieChowder
  • 1,187
  • 12
  • 36
  • Hate to break it to you man but you are going to need to attach a debugger and step through your code. Visual Studio offers a great NodeJS debugger but I'm sure there are other options too. – Adam H Jul 17 '18 at 15:18

2 Answers2

0

Once your pagesToVisit empties, the url will be undefined since calling pop on an empty array returns this value.

I would add a check in visitPage that url is not undefined, e.g.

function visitPage(url, callback) {
    if (!url) {
        // We're done
        return;
    }

Or in crawl, check that pagesToVisit has elements, e.g.

function crawl() {
  var nextPage = pagesToVisit.pop();
  if (!nextPage) {
      // We're done!
      console.log('Crawl complete!');
  } else if (nextPage in pagesVisited) {
    // We've already visited this page, so repeat the crawl
    setInterval(crawl,5000);
  } else {
    // New page we haven't visited
    visitPage(nextPage, crawl);
  }
}
Terry Lennox
  • 29,471
  • 5
  • 28
  • 40
  • yeah I just attached a `console.log` to `if(!url){}` and the error is there. Does this mean that I was at the end of my parsing or something is breaking somewhere? – ZombieChowder Jul 17 '18 at 15:23
  • I'd guess you've either reached the end of the array, or you have a url in the array that's empty. – Terry Lennox Jul 17 '18 at 17:18
0

Taking hints from Terry Lennox's answer, I modified the crawl() function slightly:

function crawl() {
    var nextPage = pagesToVisit.pop();
    if (nextPage in pagesVisited) {
        // We've already visited this page, so repeat the crawl
        setInterval(crawl, 5000);
    } else if(nextPage) {
        // New page we haven't visited
        visitPage(nextPage, crawl);
    } 
} 

All I am doing is check whether the popped element exists or not before calling visitPage().

I get the following output:

Visiting page http://example.com
Status code: 200
response.statusCode:  200
null
Found 0 relative links on page
^C
benSooraj
  • 447
  • 5
  • 18