0

I am having a tough time understanding promises.

I am creating a file that scrapes a website with node and NPM and then logs the data onto a CSV file. Right now I am gathering the data fine with multiple scrapes, but I want to call the function that writes to the CSV file after all of the scrapes are finished.

Can someone inform me how to create a promise that will wait until all the scrapes are done in the "scraper" function before calling the FileWrite function?

Right now I am using request-promise to make a request and then do something with the data afterwards, but I am confused on how to make the FileWrite function happen after multiple requests take place. I have tried placing the call to FileWrite inside of one of the request-promises but all of them are iterating over multiple elements to scrape and I do not want the file to write multiple times.

'use strict';

//require NPM packages


//I chose to use request to make the http calls because it is very easy to use.
//This npm package also has recent updates, within the last 2 days.
//Lastly it has a huge number of downloads, this means it has a solid reputation in the community
var request = require('request');


//I chose to use cheerio to write the jquery for our node scraper,
//This package is very simple to use, and it was easy to write jQuery I was already familiar with,
//Cheerio also makes it simple for us to work with HTML elements on the server.
//Lastly, Cheerio is popular within the community, with continuous updates and a lot of downloads.
var cheerio = require('cheerio');

var rp = require('request-promise');

var fs = require('fs');


//I used the json2csv npm package because it was easy to implement into my code,
//This module also has frequent updates and heavy download activity.
//This is the most elegant package to download for simple translation of json objects to a CSV file format.
var json2csv = require('json2csv');




//Array for shirts JSON object for json2csv to write.
var ShirtProps = [];
var Counter = 0;
var homeURL = "http://www.shirts4mike.com/";


//start the scraper
scraper()


//Initial scrape of the home page, looking for shirts
function scraper () {

  //use the datafolderexists function to check if data is a directory
  if (!DataFolderExists('data')) {
    fs.mkdir('data');
  }
  //initial request of the home url to find links that may have shirts in them
rp(homeURL).then(function (html) {

    //use cheerio to load the HTML for scraping
    var $ = cheerio.load(html);
    //For every link with shirt in it iterate over the link and make a request.
    $("a[href*=shirt]").each(function() {


        //request promise 
        rp('http://www.shirts4mike.com/' + $(this).attr("href")).then(function (html) {
            Counter ++;
            //pass the html into the shirt data creator, so if it wound up scraping individual shirts from any of the links it adds it to the data object
            var $ = cheerio.load(html);
            //if the add to cart input exists, log the data to the shirtprops arary.
            if ($('input[value="Add to Cart"]').length) {
              var ShirtURL = $(this).find('a').attr('href');
              var time = new Date();
              //json array for json2csv
              var ShirtData = {
              Title: $('title').html(),
              Price: $('.price').html(),
              ImageURL: $('img').attr('src'),
              URL: homeURL + ShirtURL,
              Time: time.toString() 
              };
                ShirtProps.push(ShirtData);
                console.log(ShirtData);
            } else {
              //else we are on a products page, scrape those links for shirt data
                $('ul.products li').each(function() {
                var ShirtURL = $(this).find('a').attr('href');
                    rp('http://www.shirts4mike.com/' + ShirtURL).then(function (html){

                    var $ = cheerio.load(html);
                    var time = new Date();
                    var ShirtData = {
                    Title: $('title').html(),
                    Price: $('.price').html(),
                    ImageURL: $('img').attr('src'),
                    Url: homeURL + ShirtURL,
                    Time: time.toString()
                  };
                  ShirtProps.push(ShirtData);
                  console.log(ShirtData);

          }).catch(function(error) {
          console.error(error.message);
          console.error('Scrape failed from: ' + homeURL + 'blah2' + ' The site may be down, or your connection may need troubleshooting.');
          }); //end catch error
      }); //end products li each
              } //end else



    }).catch(function(error) {  //end rp
      console.error(error.message); //end if
  //tell the user in lamens terms why the scrape may have failed.
      console.error('Scrape failed from: ' + homeURL + 'blah' + ' The site may be down, or your connection may need troubleshooting.');
    }); //end catch error
  });  //end href each
    //one thing all shirts links have in common, they are contained in a div with class shirts, find the link to the shirts page based on this class.

    // //console.log testing purposes
    // console.log("This is the shirts link: " + findShirtLinks);

    // //call iterateLinks function, pass in the findShirtLinks variable to scrape that page
    // iterateLinks(findShirtLinks);

  }).catch(function(error) {
  console.error(error.message); //end if
  //tell the user in lamens terms why the scrape may have failed.
  console.error('Scrape failed from: ' + homeURL + ' The site may be down, or your connection may need troubleshooting.');
  });//end catch error
 //end scraper

}



//create function to write the CSV file.
function FileWrite() {
  //fields variable holds the column headers
  var fields = ['Title', 'Price', 'ImageURL', 'URL', 'Time'];
  //CSV variable for injecting the fields and object into the converter.
  var csv = json2csv({data: ShirtProps, fields: fields}); 
  console.log(csv);

  //creating a simple date snagger for writing the file with date in the file name.
  var d = new Date();
  var month = d.getMonth()+1;
  var day = d.getDate();
  var output = d.getFullYear() + '-' +
  ((''+month).length<2 ? '0' : '') + month + '-' +
  ((''+day).length<2 ? '0' : '') + day;

  fs.writeFile('./data/' + output + '.csv', csv, function (error) {
          if (error) throw error;
          console.error('There was an error writing the CSV file.');

    });

} //end FileWrite


//Check if data folder exists, source: http://stackoverflow.com/questions/4482686/check-synchronously-if-file-directory-exists-in-node-js
function DataFolderExists(folder) {
  try {
    // Query the entry
    var DataFolder = fs.lstatSync(folder);

    // Is it a directory?
    if (DataFolder.isDirectory()) {
        return true;
    } else {
        return false;
    }
} //end try
catch (error) {
    console.error(error.message);
    console.error('There was an error checking if the folder exists.');
}

}  //end DataFolderExists
deeveeABC
  • 960
  • 3
  • 12
  • 34
BrokenWings
  • 169
  • 12
  • Have you tried flattening promise chains? - http://solutionoptimist.com/2013/12/27/javascript-promise-chains-2/ – deeveeABC Aug 03 '16 at 07:15

3 Answers3

0

request-promise uses bluebird for promises as far as i know. There are a lot of helper methods build in, see http://bluebirdjs.com/docs/api-reference.html for details.

General: if you want to wait for a bunch of promises to be resolved you can use Promise.all like:

var Promise = require("bluebird");
var promises = [];
for (var i = 0; i < 100; ++i) {
    promises.push(someAsyncFunction(i));
}
Promise.all(promises).then(function() {
    console.log("all the promises were resolved");
});

ps: at the start of the scraper you use a async fs method but aren't waiting for the result. You wan't to wait for the cb or use the sync one (mkdirSync)

Johannes Merz
  • 3,252
  • 17
  • 33
  • can you do a more specific example? I dont understand why our counter is set at 100 and someasyncfunction takes the parameter of " i " in the loop, we dont want to push the function to the array 100 times no? – BrokenWings Aug 04 '16 at 18:19
  • didn't you use a for loop in your original example? I just assumed you wanted to request things in parallel. – Johannes Merz Aug 05 '16 at 19:31
0
  • Like with every async operations, be it callbacks or promises, when calling them in a loop you should always group them together. The grouping method's choice is yours, but you'd generally want to go with the parallel option. Consider giving up on specific promisified-versions of modules and learn a more general library (which generally always has its own .promisify() method anyway) and make use of its .parallel() method.

  • When dealing with nested promises, do not forget to always have a return statement inside the .then(function(){...}. If you don't, your promises chain won't know it has to wait for the nested promises to resolve before moving forward.

  • You don't have to specify a .catch(function(){...}) function for each promise, since errors bubbles up pretty much the same way it would with a regular try {} catch (e) {} block code, for synchronous operations.

Radioreve
  • 3,173
  • 3
  • 19
  • 32
0

var elems = $("a[href*=shirt]").nextAll(), var eachLength = elems.length;

use the nextall() to get all element in array. So we have length now, using that length we can validate and call the file write function

   'use strict';

    //require NPM packages


    //I chose to use request to make the http calls because it is very easy to use.
    //This npm package also has recent updates, within the last 2 days.
    //Lastly it has a huge number of downloads, this means it has a solid reputation in the community
    var request = require('request');


    //I chose to use cheerio to write the jquery for our node scraper,
    //This package is very simple to use, and it was easy to write jQuery I was already familiar with,
    //Cheerio also makes it simple for us to work with HTML elements on the server.
    //Lastly, Cheerio is popular within the community, with continuous updates and a lot of downloads.
    var cheerio = require('cheerio');

    var rp = require('request-promise');

    var fs = require('fs');


    //I used the json2csv npm package because it was easy to implement into my code,
    //This module also has frequent updates and heavy download activity.
    //This is the most elegant package to download for simple translation of json objects to a CSV file format.
    var json2csv = require('json2csv');




    //Array for shirts JSON object for json2csv to write.
    var ShirtProps = [];
    var Counter = 0;
    var homeURL = "http://www.shirts4mike.com/";


    //start the scraper
    scraper()


    //Initial scrape of the home page, looking for shirts
    function scraper () {

      //use the datafolderexists function to check if data is a directory
      if (!DataFolderExists('data')) {
        fs.mkdir('data');
      }
      //initial request of the home url to find links that may have shirts in them

    rp(homeURL).then(function (html) {

    //use cheerio to load the HTML for scraping
    var $ = cheerio.load(html);
    //For every link with shirt in it iterate over the link and make a request.

    var elems = $("a[href*=shirt]").nextAll(), 
    var eachLength = elems.length;

    elems.each(function() {


        //request promise 
        rp('http://www.shirts4mike.com/' + $(this).attr("href")).then(function (html) {

            //pass the html into the shirt data creator, so if it wound up scraping individual shirts from any of the links it adds it to the data object
            var $ = cheerio.load(html);
            //if the add to cart input exists, log the data to the shirtprops arary.
            if ($('input[value="Add to Cart"]').length) {
              var ShirtURL = $(this).find('a').attr('href');
              var time = new Date();
              //json array for json2csv
              var ShirtData = {
                Title: $('title').html(),
                Price: $('.price').html(),
                ImageURL: $('img').attr('src'),
                URL: homeURL + ShirtURL,
                Time: time.toString() 
              };
                ShirtProps.push(ShirtData);
                console.log(ShirtData);
                Counter ++;
                if (eachLength == Counter ) {
                  FileWrite();
                };
            } else {
              //else we are on a products page, scrape those links for shirt data
                var InnerElm = $('ul.products li').nextAll(), 
                var innereachLength = InnerElm.length;
                var innercount= 0;
                InnerElm.each(function() {
                var ShirtURL = $(this).find('a').attr('href');
                    rp('http://www.shirts4mike.com/' + ShirtURL).then(function (html){
                      innercount++;
                    var $ = cheerio.load(html);
                    var time = new Date();
                    var ShirtData = {
                      Title: $('title').html(),
                      Price: $('.price').html(),
                      ImageURL: $('img').attr('src'),
                      Url: homeURL + ShirtURL,
                      Time: time.toString()
                    };
                    ShirtProps.push(ShirtData);
                    if (innercount == innereachLength) {
                        Counter ++;
                        if (eachLength == Counter ) {
                          FileWrite();
                        };
                    };
                  console.log(ShirtData);

          }).catch(function(error) {
             Counter ++;
            if (eachLength == Counter ) {
                FileWrite();
            };
          console.error(error.message);
          console.error('Scrape failed from: ' + homeURL + 'blah2' + ' The site may be down, or your connection may need troubleshooting.');
          }); //end catch error
      }); //end products li each
              } //end else



    }).catch(function(error) {  //end rp
      console.error(error.message); //end if
  //tell the user in lamens terms why the scrape may have failed.
      console.error('Scrape failed from: ' + homeURL + 'blah' + ' The site may be down, or your connection may need troubleshooting.');
    }); //end catch error
  });  //end href each
    //one thing all shirts links have in common, they are contained in a div with class shirts, find the link to the shirts page based on this class.

    // //console.log testing purposes
    // console.log("This is the shirts link: " + findShirtLinks);

    // //call iterateLinks function, pass in the findShirtLinks variable to scrape that page
    // iterateLinks(findShirtLinks);

  }).catch(function(error) {
  console.error(error.message); //end if
  //tell the user in lamens terms why the scrape may have failed.
  console.error('Scrape failed from: ' + homeURL + ' The site may be down, or your connection may need troubleshooting.');
  });//end catch error
 //end scraper

}



//create function to write the CSV file.
function FileWrite() {
  //fields variable holds the column headers
  var fields = ['Title', 'Price', 'ImageURL', 'URL', 'Time'];
  //CSV variable for injecting the fields and object into the converter.
  var csv = json2csv({data: ShirtProps, fields: fields}); 
  console.log(csv);

  //creating a simple date snagger for writing the file with date in the file name.
  var d = new Date();
  var month = d.getMonth()+1;
  var day = d.getDate();
  var output = d.getFullYear() + '-' +
  ((''+month).length<2 ? '0' : '') + month + '-' +
  ((''+day).length<2 ? '0' : '') + day;

  fs.writeFile('./data/' + output + '.csv', csv, function (error) {
          if (error) throw error;
          console.error('There was an error writing the CSV file.');

    });

} //end FileWrite


//Check if data folder exists, source: http://stackoverflow.com/questions/4482686/check-synchronously-if-file-directory-exists-in-node-js
function DataFolderExists(folder) {
  try {
    // Query the entry
    var DataFolder = fs.lstatSync(folder);

    // Is it a directory?
    if (DataFolder.isDirectory()) {
        return true;
    } else {
        return false;
    }
} //end try
catch (error) {
    console.error(error.message);
    console.error('There was an error checking if the folder exists.');
}

} 
kannanfa
  • 100
  • 1
  • 10
  • i like this idea but it writes the file multiple times, I am wanting to write the file one time after all requests finish. – BrokenWings Aug 04 '16 at 01:52
  • i think this code will work. when the request success or fail we increment the count when it equals to length it call the file write. – kannanfa Aug 04 '16 at 06:12