se-scraper/index.js

101 lines
3.5 KiB
JavaScript
Raw Normal View History

const { Cluster } = require('./src/puppeteer-cluster/dist/index.js');
2018-12-24 14:25:02 +01:00
const handler = require('./src/node_scraper.js');
var fs = require('fs');
var os = require("os");
2018-12-24 14:25:02 +01:00
exports.scrape = async function(user_config, callback) {
// options for scraping
let config = {
// the user agent to scrape with
user_agent: 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.110 Safari/537.36',
// if random_user_agent is set to True, a random user agent is chosen
random_user_agent: false,
// whether to select manual settings in visible mode
set_manual_settings: false,
// log ip address data
log_ip_address: false,
// log http headers
log_http_headers: false,
// how long to sleep between requests. a random sleep interval within the range [a,b]
// is drawn before every request. empty string for no sleeping.
sleep_range: '',
// which search engine to scrape
search_engine: 'google',
compress: false, // compress
debug: false,
verbose: true,
2019-03-07 13:16:12 +01:00
keywords: ['search engine scraping scrapeulous.com'],
// whether to start the browser in headless mode
headless: true,
// specify flags passed to chrome here
chrome_flags: [],
// the number of pages to scrape for each keyword
num_pages: 1,
// path to output file, data will be stored in JSON
output_file: '',
// whether to prevent images, css, fonts and media from being loaded
// will speed up scraping a great deal
block_assets: true,
// path to js module that extends functionality
// this module should export the functions:
// get_browser, handle_metadata, close_browser
//custom_func: resolve('examples/pluggable.js'),
custom_func: '',
// path to a proxy file, one proxy per line. Example:
// socks5://78.94.172.42:1080
// http://118.174.233.10:48400
proxy_file: '',
proxies: [],
// check if headless chrome escapes common detection techniques
// this is a quick test and should be used for debugging
test_evasion: false,
2019-03-07 15:50:36 +01:00
apply_evasion_techniques: true,
// settings for puppeteer-cluster
puppeteer_cluster_config: {
timeout: 30 * 60 * 1000, // max timeout set to 30 minutes
monitor: false,
concurrency: Cluster.CONCURRENCY_BROWSER,
2019-03-07 13:16:12 +01:00
maxConcurrency: 1,
}
};
2018-12-24 14:25:02 +01:00
// overwrite default config
for (var key in user_config) {
config[key] = user_config[key];
}
2018-12-24 14:25:02 +01:00
if (fs.existsSync(config.keyword_file)) {
config.keywords = read_keywords_from_file(config.keyword_file);
}
2018-12-24 14:25:02 +01:00
if (fs.existsSync(config.proxy_file)) {
config.proxies = read_keywords_from_file(config.proxy_file);
if (config.verbose) {
console.log(`${config.proxies.length} proxies loaded.`);
}
}
2018-12-24 14:25:02 +01:00
if (!callback) {
// called when results are ready
callback = function (err, response) {
if (err) {
console.error(err)
}
2018-12-24 14:25:02 +01:00
console.dir(response.results, {depth: null, colors: true});
}
}
await handler.handler(config, undefined, callback );
2018-12-24 14:25:02 +01:00
};
function read_keywords_from_file(fname) {
let kws = fs.readFileSync(fname).toString().split(os.EOL);
// clean keywords
kws = kws.filter((kw) => {
return kw.trim().length > 0;
});
return kws;
2018-12-24 14:25:02 +01:00
}