[フレーム]
Last Updated: May 07, 2016
·
22.66K
· peterd

Scraping web pages using Node.js using request+promise

The following example shows how you can grab a remote JSON page using Node.js by combining the request and promise modules.

var Promise = require("promise");
var request = require("request");

var url = "https://raw.github.com/mikeal/request/master/package.json";

requestp(url, true).then(function (data) {
 console.log("%s@%s: %s", data.name, data.version, data.description);
}, function (err) {
 console.error("%s; %s", err.message, url);
 console.log("%j", err.res.statusCode);
});

function requestp(url, json) {
 json = json || false;
 return new Promise(function (resolve, reject) {
 request({url:url, json:json}, function (err, res, body) {
 if (err) {
 return reject(err);
 } else if (res.statusCode !== 200) {
 err = new Error("Unexpected status code: " + res.statusCode);
 err.res = res;
 return reject(err);
 }
 resolve(body);
 });
 });
}

2 Responses
Add your response

Oh man, you just made my life much easier writing a scraper. This is gold!

over 1 year ago ·

If you use Request-Promise you get the same request+promise power with even less lines of code:

var rp = require('request-promise');

var url = "https://raw.github.com/mikeal/request/master/package.json";

rp({ url:url, json:true })
 .then(function (data) {
 console.log("%s@%s: %s", data.name, data.version, data.description);
 })
 .catch(function (reason) {
 console.error("%s; %s", reason.error.message, reason.options.url);
 console.log("%j", reason.response.statusCode);
 });
over 1 year ago ·

AltStyle によって変換されたページ (->オリジナル) /