now I use puppeteer to crawl data on a web page. I need chrome,. Every time I finish crawling data, chrome takes up a lot of cpu and memory, as shown in the following figure:
my code is as follows:
const puppeteer = require ("puppeteer");
(async () = > {
const browser = await puppeteer.launch({
args: ["--no-sandbox"],
dumpio: false
});
const page = await browser.newPage();
await page.goto("https://book.douban.com/chart");
try {
let issue = await page.$eval("div.main > div.wrap > ul.lot-award > li.currentAward > div.period > span ",el => el.innerHTML);
let num = await page.$eval("div.main > div.wrap > ul.lot-award > li.currentAward > -sharpNum.lot-nums ",el => el.innerHTML);
let num1 = num.replace(/ /g, "");
let num2 = num1.replace(/[\@\-sharp\$\%\^\&\*\(\)\{\}\:\"\L\<\>\?\[\]]/g,"");
let num3 = num2.replace(/spanclass/g,"").replace(/span/g,"").replace(/no/g,"").replace(/=/g,"");
let num4 = num3.substring(0,num3.length - 1);
let num5 = num4.split("/");
console.log(num3);
console.log(num4);
console.log(num5);
console.log(issue);
} catch (err) {
console.log("pid: ",process.pid," :"," : \r\n ",err.stack)
} finally {
browser.close(); //
}
}) ();
excuse me, what should I do? Do you gods have any good methods or ideas? Thank you!