forked from extern/se-scraper
Remove uneeded try catch block
Add proper error for ip matching test
This commit is contained in:
parent
50bda275a6
commit
0db6e068da
@ -1,3 +1,4 @@
|
||||
'use strict';
|
||||
const meta = require('./metadata.js');
|
||||
const common = require('./common.js');
|
||||
var log = common.log;
|
||||
@ -41,12 +42,8 @@ module.exports = class Scraper {
|
||||
let settings = this.config[`${this.config.search_engine}_settings`];
|
||||
if (settings) {
|
||||
if (typeof settings === 'string') {
|
||||
try {
|
||||
settings = JSON.parse(settings);
|
||||
this.config[`${this.config.search_engine}_settings`] = settings;
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
settings = JSON.parse(settings);
|
||||
this.config[`${this.config.search_engine}_settings`] = settings;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -59,13 +56,8 @@ module.exports = class Scraper {
|
||||
|
||||
await this.page.setViewport({ width: 1920, height: 1040 });
|
||||
|
||||
let do_continue = await this.load_search_engine();
|
||||
|
||||
if (!do_continue) {
|
||||
console.error('Failed to load the search engine: load_search_engine()');
|
||||
} else {
|
||||
await this.scraping_loop();
|
||||
}
|
||||
await this.load_search_engine();
|
||||
await this.scraping_loop();
|
||||
|
||||
return this.results;
|
||||
}
|
||||
@ -120,24 +112,16 @@ module.exports = class Scraper {
|
||||
if (this.proxy && this.config.log_ip_address === true) {
|
||||
log(this.config, 3, `${this.metadata.ipinfo.ip} vs ${this.proxy}`);
|
||||
|
||||
try {
|
||||
// if the ip returned by ipinfo is not a substring of our proxystring, get the heck outta here
|
||||
if (!this.proxy.includes(this.metadata.ipinfo.ip)) {
|
||||
console.error(`Proxy ${this.proxy} does not work.`);
|
||||
return false;
|
||||
} else {
|
||||
log(this.config, 1, `Using valid Proxy: ${this.proxy}`);
|
||||
}
|
||||
} catch (exception) {
|
||||
// if the ip returned by ipinfo is not a substring of our proxystring, get the heck outta here
|
||||
if (!this.proxy.includes(this.metadata.ipinfo.ip)) {
|
||||
throw new Error(`Proxy output ip ${this.proxy} does not match with provided one`);
|
||||
} else {
|
||||
log(this.config, 1, `Using valid Proxy: ${this.proxy}`);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
try {
|
||||
return await this.load_start_page();
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
return false;
|
||||
}
|
||||
return await this.load_start_page();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -382,7 +366,6 @@ module.exports = class Scraper {
|
||||
// This is where we'll put the code to get around the tests.
|
||||
async function evadeChromeHeadlessDetection(page) {
|
||||
|
||||
try {
|
||||
// Pass the Webdriver Test.
|
||||
await page.evaluateOnNewDocument(() => {
|
||||
const newProto = navigator.__proto__;
|
||||
@ -509,8 +492,4 @@ async function evadeChromeHeadlessDetection(page) {
|
||||
return null;
|
||||
};
|
||||
});
|
||||
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user