Protocol error (Target.createTarget): Target closed error for aws lambda+proxy chain package

I am trying to run puppeteer with proxy chain package but I am getting this error message:
"errorType": "Error",
"errorMessage": "Protocol error (Target.createTarget): Target closed.",
"errorType": "Error",
"errorMessage": "Protocol error (Target.createTarget): Target closed.",
Code:
const chromium = require('chrome-aws-lambda');
const { addExtra } = require("puppeteer-extra");
const puppeteerExtra = addExtra(chromium.puppeteer);
const proxyChain = require('proxy-chain');

const StealthPlugin = require("puppeteer-extra-plugin-stealth");
puppeteerExtra.use(StealthPlugin());

exports.handler = async (event, context, callback) => {
let finalResult = [];
const url = ``;
let browser;
const oldProxyUrl = ''; // --> bright data proxy
const newProxyUrl = await proxyChain.anonymizeProxy(oldProxyUrl);

console.log("newProxyUrl", newProxyUrl)

try {
browser = await puppeteerExtra.launch({
args: ['--no-sandbox', '--disable-setuid-sandbox', `--proxy-server=${newProxyUrl}`],
defaultViewport: chromium.defaultViewport,
executablePath: await chromium.executablePath,
headless: chromium.headless
});

const page = await browser.newPage();

await page.goto(url);

finalResult = await extractElements(page);

} catch (error) {
return callback(error);
} finally {
await browser.close();
}

return callback(null, finalResult);
};
const chromium = require('chrome-aws-lambda');
const { addExtra } = require("puppeteer-extra");
const puppeteerExtra = addExtra(chromium.puppeteer);
const proxyChain = require('proxy-chain');

const StealthPlugin = require("puppeteer-extra-plugin-stealth");
puppeteerExtra.use(StealthPlugin());

exports.handler = async (event, context, callback) => {
let finalResult = [];
const url = ``;
let browser;
const oldProxyUrl = ''; // --> bright data proxy
const newProxyUrl = await proxyChain.anonymizeProxy(oldProxyUrl);

console.log("newProxyUrl", newProxyUrl)

try {
browser = await puppeteerExtra.launch({
args: ['--no-sandbox', '--disable-setuid-sandbox', `--proxy-server=${newProxyUrl}`],
defaultViewport: chromium.defaultViewport,
executablePath: await chromium.executablePath,
headless: chromium.headless
});

const page = await browser.newPage();

await page.goto(url);

finalResult = await extractElements(page);

} catch (error) {
return callback(error);
} finally {
await browser.close();
}

return callback(null, finalResult);
};
1 Reply
conscious-sapphire
conscious-sapphire3y ago
PuppeteerCrawler | API | Crawlee
Provides a simple framework for parallel crawling of web pages using headless Chrome with Puppeteer. The URLs to crawl are fed either from a static list of URLs or from a dynamic queue of URLs enabling recursive crawling of websites. Since PuppeteerCrawler uses headless Chrome to download web pages an...

Did you find this page helpful?