This commit is contained in:
Nikolai Tschacher 2019-06-29 17:01:25 +02:00
commit 5e8ff1cb34
6 changed files with 949 additions and 28 deletions

874
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -21,12 +21,16 @@
},
"license": "ISC",
"dependencies": {
"chai": "^4.2.0",
"cheerio": "^1.0.0-rc.2",
"debug": "^4.1.1",
"got": "^9.6.0",
"proxy-chain": "^0.2.7",
"puppeteer": "^1.17.0",
"puppeteer-cluster": "^0.13.0"
},
"devDependencies": {
"chai": "^4.2.0",
"chai-string": "^1.5.0",
"mocha": "^6.1.4"
}
}

View File

@ -47,6 +47,21 @@ class BingScraper extends Scraper {
async load_start_page() {
let startUrl = this.build_start_url('https://www.bing.com/search?') || 'https://www.bing.com/';
if (this.config.bing_settings) {
startUrl = `https://www.${this.config.bing_settings.bing_domain}/search?`;
if (this.config.bing_settings.bing_domain) {
startUrl = `https://www.${this.config.bing_settings.bing_domain}/search?`;
} else {
startUrl = `https://www.bing.com/search?`;
}
for (var key in this.config.bing_settings) {
if (key !== 'bing_domain') {
startUrl += `${key}=${this.config.bing_settings[key]}&`
}
}
}
try {
await this.page.goto(startUrl);
await this.page.waitForSelector('input[name="q"]', { timeout: this.STANDARD_TIMEOUT });

View File

@ -33,6 +33,7 @@ module.exports = class Scraper {
this.PROXY_TIMEOUT = 15000;
this.SOLVE_CAPTCHA_TIME = 45000;
this.html_output = {};
this.results = {};
this.result_rank = 1;
// keep track of the requests done
@ -71,7 +72,10 @@ module.exports = class Scraper {
await this.scraping_loop();
return this.results;
return {
'results': this.results,
'html_output': this.html_output,
};
}
/**
@ -157,6 +161,7 @@ module.exports = class Scraper {
this.num_keywords++;
this.keyword = keyword;
this.results[keyword] = {};
this.html_output[keyword] = {};
this.result_rank = 1;
if (this.pluggable && this.pluggable.before_keyword_scraped) {
@ -191,6 +196,11 @@ module.exports = class Scraper {
}
let html = await this.page.content();
if (this.config.html_output) {
this.html_output[keyword][page_num] = html;
}
let parsed = this.parse(html);
this.results[keyword][this.page_num] = parsed ? parsed : await this.parse_async(html);

View File

@ -327,6 +327,7 @@ class ScrapeManager {
Object.assign(this.config, scrape_config);
var results = {};
var html_output = {};
var num_requests = 0;
var metadata = {};
@ -391,11 +392,13 @@ class ScrapeManager {
scraperInstances.push(obj);
}
let resolved = await Promise.all(execPromises);
let promiseReturns = await Promise.all(execPromises);
for (var group of resolved) {
for (var key in group) {
results[key] = group[key];
// Merge results per keyword
for (let promiseReturn of promiseReturns) {
for (let keyword of this.config.keywords) {
results[keyword] = promiseReturn.results[keyword];
html_output[keyword] = promiseReturn.html_output[keyword];
}
}
@ -448,6 +451,7 @@ class ScrapeManager {
return {
results: results,
html_output: (this.config.html_output) ? html_output : undefined,
metadata: metadata || {},
};
}

View File

@ -1,5 +1,7 @@
const se_scraper = require('./../index.js');
var assert = require('chai').assert;
const chai = require('chai');
chai.use(require('chai-string'));
const assert = chai.assert;
/*
* Use chai and mocha for tests.
@ -184,8 +186,46 @@ function test_case_effective_query(response) {
}
}
(async () => {
await normal_search_test();
await no_results_test();
await effective_query_test();
})();
async function html_output_query_test() {
let config = {
compress: false,
debug_level: 1,
keyword_file: '',
headless: true,
output_file: '',
block_assets: true,
user_agent: 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.110 Safari/537.36',
random_user_agent: false,
};
let scrape_config = {
search_engine: 'google',
keywords: normal_search_keywords,
num_pages: 3,
html_output: true,
};
let output = await se_scraper.scrape(config, scrape_config);
normal_search_test_case( output );
check_html_output_test_case( output );
}
function check_html_output_test_case( response ) {
for (let query in response.html_output) {
assert.containsAllKeys(response.html_output, normal_search_keywords, 'not all keywords were scraped.');
for (let page_number in response.html_output[query]) {
assert.isNumber(parseInt(page_number), 'page_number must be numeric');
assert.startsWith(response.html_output[query][page_number], '<!DOCTYPE html><html');
}
}
}
describe('Google', function(){
this.timeout(30000);
it('normal search', normal_search_test);
it('no results', no_results_test);
it('effective query', effective_query_test);
it('html output query', html_output_query_test);
});