1import { RequestQueue, CheerioCrawler, log } from 'crawlee';
2
3
4
5log.setLevel(log.LEVELS.WARNING);
6
7
8
9const tableResult = 1;
10
11
12
13
14const requestQueue = await RequestQueue.open();
15const urls = ['https://crawlee.dev', 'https://google.com', 'https://c57.fr'];
16
17for (const url of urls) {
18 await requestQueue.addRequest({ url });
19}
20
21
22
23let websites = [];
24const crawler = new CheerioCrawler({
25 requestQueue,
26
27
28 async requestHandler({ $, request }) {
29
30
31 const title = $('title').text();
32 if (!tableResult) {
33 log.info('Result:');
34 console.log(`The title of "${request.url}" is: ${title}.`);
35 }
36 websites.push({ url: request.url, title });
37 },
38});
39
40
41await crawler.run(['https://apify.com']);
42
43
44
45websites = websites.map(website => ({ ...website, url: website.url.slice(8) })).sort((a, b) => a.url.localeCompare(b.url));
46if (tableResult) console.table(websites);