mirror of
https://github.com/Lissy93/web-check.git
synced 2025-08-09 13:04:52 +02:00
Write lambda functions for getting DNS records and Robots.txt
This commit is contained in:
37
server/lambda/read-robots-txt.js
Normal file
37
server/lambda/read-robots-txt.js
Normal file
@ -0,0 +1,37 @@
|
||||
const fetch = require('node-fetch');
|
||||
|
||||
exports.handler = async function(event, context) {
|
||||
const siteURL = event.queryStringParameters.url;
|
||||
|
||||
if (!siteURL) {
|
||||
return {
|
||||
statusCode: 400,
|
||||
body: 'Missing URL parameter',
|
||||
};
|
||||
}
|
||||
|
||||
const parsedURL = new URL(siteURL);
|
||||
const robotsURL = `${parsedURL.protocol}//${parsedURL.hostname}/robots.txt`;
|
||||
|
||||
try {
|
||||
const response = await fetch(robotsURL);
|
||||
const text = await response.text();
|
||||
|
||||
if (response.ok) {
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: text,
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
statusCode: response.status,
|
||||
body: `Failed to fetch robots.txt`,
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
statusCode: 500,
|
||||
body: `Error fetching robots.txt: ${error.toString()}`,
|
||||
};
|
||||
}
|
||||
};
|
Reference in New Issue
Block a user