2019-06-11 18:16:59 +02:00
|
|
|
const se_scraper = require('./../src/node_scraper.js');
|
2019-01-26 20:15:19 +01:00
|
|
|
|
2019-06-11 18:16:59 +02:00
|
|
|
(async () => {
|
|
|
|
let browser_config = {
|
|
|
|
random_user_agent: true,
|
|
|
|
write_meta_data: true,
|
|
|
|
sleep_range: '[1,1]',
|
|
|
|
debug_level: 1,
|
|
|
|
headless: true,
|
2019-06-11 18:33:11 +02:00
|
|
|
output_file: `examples/results/multiple_search_engines.json`
|
2019-06-11 18:16:59 +02:00
|
|
|
};
|
2019-01-26 20:15:19 +01:00
|
|
|
|
2019-06-11 18:16:59 +02:00
|
|
|
let scrape_job = {
|
|
|
|
search_engine: 'google',
|
|
|
|
keywords: ['news', 'se-scraper'],
|
|
|
|
num_pages: 1,
|
|
|
|
};
|
2019-01-26 20:15:19 +01:00
|
|
|
|
2019-06-11 18:16:59 +02:00
|
|
|
var scraper = new se_scraper.ScrapeManager(browser_config);
|
|
|
|
await scraper.start();
|
2019-01-26 20:15:19 +01:00
|
|
|
|
2019-06-11 18:16:59 +02:00
|
|
|
for (var se of ['google', 'bing']) {
|
|
|
|
scrape_job.search_engine = se;
|
|
|
|
var results = await scraper.scrape(scrape_job);
|
|
|
|
console.dir(results, {depth: null, colors: true});
|
2019-01-26 20:15:19 +01:00
|
|
|
}
|
|
|
|
|
2019-06-11 18:16:59 +02:00
|
|
|
await scraper.quit();
|
|
|
|
})();
|
|
|
|
|