$echo '{
<  "startUrls": [
<    {
<      "url": "https://crawlee.dev/js"
<    }
<  ],
<  "globs": [
<    {
<      "glob": "https://crawlee.dev/js/*/*"
<    }
<  ],
<  "pseudoUrls": [],
<  "excludes": [
<    {
<      "glob": "/**/*.{png,jpg,jpeg,pdf}"
<    }
<  ],
<  "linkSelector": "a",
<  "respectRobotsTxtFile": true,
<  "pageFunction": "async function pageFunction(context) {\\n    const { page, request, log } = context;\\n    const title = await page.title();\\n    log.info(`URL: ${request.url} TITLE: ${title}`);\\n    return {\\n        url: request.url,\\n        title\\n    };\\n}",
<  "proxyConfiguration": {
<    "useApifyProxy": true
<  },
<  "initialCookies": [],
<  "launcher": "chromium",
<  "waitUntil": "networkidle",
<  "preNavigationHooks": "// We need to return array of (possibly async) functions here.\\n// The functions accept two arguments: the \\"crawlingContext\\" object\\n// and \\"gotoOptions\\".\\n[\\n    async (crawlingContext, gotoOptions) => {\\n        const { page } = crawlingContext;\\n        // ...\\n    },\\n]",
<  "postNavigationHooks": "// We need to return array of (possibly async) functions here.\\n// The functions accept a single argument: the \\"crawlingContext\\" object.\\n[\\n    async (crawlingContext) => {\\n        const { page } = crawlingContext;\\n        // ...\\n    },\\n]",
<  "customData": {}
<}' |
<apify call apify/playwright-scraper --silent --output-dataset