$echo '{
< "datasetType": "organisations",
< "listingItemsPerPage": 50,
< "inputExtendFromFunction": "\\n/**\\n * Inputs:\\n *\\n * `ctx.io` - Apify Actor class, see https://docs.apify.com/sdk/js/reference/class/Actor.\\n * `ctx.input` - The input object that was passed to this Actor.\\n * `ctx.state` - An object you can use to persist state across all your custom functions.\\n * `ctx.sendRequest` - Fetch remote data. Uses '\''got-scraping'\'', same as Apify'\''s `sendRequest`.\\n * See https://crawlee.dev/docs/guides/got-scraping\\n * `ctx.itemCacheKey` - A function you can use to get cacheID for current `entry`.\\n * It takes the entry itself, and a list of properties to be used for hashing.\\n * By default, you should pass `input.cachePrimaryKeys` to it.\\n *\\n */\\n// async ({ io, input, state, sendRequest, itemCacheKey }) => {\\n// // Example: Load Actor config from GitHub URL (public)\\n// const config = await sendRequest.get('\''https://raw.githubusercontent.com/username/project/main/config.json'\'').json();\\n// \\n// // Increase concurrency during off-peak hours\\n// // NOTE: Imagine we'\''re targetting a small server, that can be slower during the day\\n// const hours = new Date().getUTCHours();\\n// const isOffPeak = hours < 6 || hours > 20;\\n// config.maxConcurrency = isOffPeak ? 8 : 3;\\n// \\n// return config;\\n//\\n// /* ========== SEE BELOW FOR MORE EXAMPLES ========= */\\n//\\n// /**\\n// * ======= ACCESSING DATASET ========\\n// * To save/load/access entries in Dataset.\\n// * Docs:\\n// * - https://docs.apify.com/platform/storage/dataset\\n// * - https://docs.apify.com/sdk/js/docs/guides/result-storage#dataset\\n// * - https://docs.apify.com/sdk/js/docs/examples/map-and-reduce\\n// */\\n// // const dataset = await io.openDataset('\''MyDatasetId'\'');\\n// // const info = await dataset.getInfo();\\n// // console.log(info.itemCount);\\n// // // => 0\\n//\\n// /**\\n// * ======= ACCESSING REMOTE DATA ========\\n// * Use `sendRequest` to get data from the internet:\\n// * Docs:\\n// * - https://github.com/apify/got-scraping\\n// */\\n// // const catFact = await sendRequest.get('\''https://cat-fact.herokuapp.com/facts/5887e1d85c873e0011036889'\'').json();\\n// // console.log(catFact.text);\\n// // // => \\"Cats make about 100 different sounds. Dogs make only about 10.\\",\\n//\\n// /**\\n// * ======= USING CACHE ========\\n// * To save the entry to the KeyValue cache (or retrieve it), you can use\\n// * `itemCacheKey` to create the entry'\''s ID for you:\\n// */\\n// // const cacheId = itemCacheKey(item, input.cachePrimaryKeys);\\n// // const cache = await io.openKeyValueStore('\''MyStoreId'\'');\\n// // cache.setValue(cacheId, entry);\\n// };",
< "startUrlsFromFunction": "\\n/**\\n * Inputs:\\n *\\n * `ctx.io` - Apify Actor class, see https://docs.apify.com/sdk/js/reference/class/Actor.\\n * `ctx.input` - The input object that was passed to this Actor.\\n * `ctx.state` - An object you can use to persist state across all your custom functions.\\n * `ctx.sendRequest` - Fetch remote data. Uses '\''got-scraping'\'', same as Apify'\''s `sendRequest`.\\n * See https://crawlee.dev/docs/guides/got-scraping\\n * `ctx.itemCacheKey` - A function you can use to get cacheID for current `entry`.\\n * It takes the entry itself, and a list of properties to be used for hashing.\\n * By default, you should pass `input.cachePrimaryKeys` to it.\\n *\\n */\\n// async ({ io, input, state, sendRequest, itemCacheKey }) => {\\n// // Example: Create and load URLs from a Dataset by combining multiple fields\\n// const dataset = await io.openDataset(datasetNameOrId);\\n// const data = await dataset.getData();\\n// const urls = data.items.map((item) => `https://example.com/u/${item.userId}/list/${item.listId}`);\\n// return urls;\\n//\\n// /* ========== SEE BELOW FOR MORE EXAMPLES ========= */\\n//\\n// /**\\n// * ======= ACCESSING DATASET ========\\n// * To save/load/access entries in Dataset.\\n// * Docs:\\n// * - https://docs.apify.com/platform/storage/dataset\\n// * - https://docs.apify.com/sdk/js/docs/guides/result-storage#dataset\\n// * - https://docs.apify.com/sdk/js/docs/examples/map-and-reduce\\n// */\\n// // const dataset = await io.openDataset('\''MyDatasetId'\'');\\n// // const info = await dataset.getInfo();\\n// // console.log(info.itemCount);\\n// // // => 0\\n//\\n// /**\\n// * ======= ACCESSING REMOTE DATA ========\\n// * Use `sendRequest` to get data from the internet:\\n// * Docs:\\n// * - https://github.com/apify/got-scraping\\n// */\\n// // const catFact = await sendRequest.get('\''https://cat-fact.herokuapp.com/facts/5887e1d85c873e0011036889'\'').json();\\n// // console.log(catFact.text);\\n// // // => \\"Cats make about 100 different sounds. Dogs make only about 10.\\",\\n//\\n// /**\\n// * ======= USING CACHE ========\\n// * To save the entry to the KeyValue cache (or retrieve it), you can use\\n// * `itemCacheKey` to create the entry'\''s ID for you:\\n// */\\n// // const cacheId = itemCacheKey(item, input.cachePrimaryKeys);\\n// // const cache = await io.openKeyValueStore('\''MyStoreId'\'');\\n// // cache.setValue(cacheId, entry);\\n// };",
< "requestMaxEntries": 50,
< "requestTransform": "\\n/**\\n * Inputs:\\n * `request` - Request holding URL to be scraped.\\n * `ctx.io` - Apify Actor class, see https://docs.apify.com/sdk/js/reference/class/Actor.\\n * `ctx.input` - The input object that was passed to this Actor.\\n * `ctx.state` - An object you can use to persist state across all your custom functions.\\n * `ctx.sendRequest` - Fetch remote data. Uses '\''got-scraping'\'', same as Apify'\''s `sendRequest`.\\n * See https://crawlee.dev/docs/guides/got-scraping\\n * `ctx.itemCacheKey` - A function you can use to get cacheID for current `entry`.\\n * It takes the entry itself, and a list of properties to be used for hashing.\\n * By default, you should pass `input.cachePrimaryKeys` to it.\\n *\\n */\\n// async (request, { io, input, state, sendRequest, itemCacheKey }) => {\\n// // Example: Tag requests\\n// // (maybe because we use RequestQueue that pools multiple scrapers)\\n// request.userData.tag = \\"VARIANT_A\\";\\n// return requestQueue;\\n//\\n// /* ========== SEE BELOW FOR MORE EXAMPLES ========= */\\n//\\n// /**\\n// * ======= ACCESSING DATASET ========\\n// * To save/load/access entries in Dataset.\\n// * Docs:\\n// * - https://docs.apify.com/platform/storage/dataset\\n// * - https://docs.apify.com/sdk/js/docs/guides/result-storage#dataset\\n// * - https://docs.apify.com/sdk/js/docs/examples/map-and-reduce\\n// */\\n// // const dataset = await io.openDataset('\''MyDatasetId'\'');\\n// // const info = await dataset.getInfo();\\n// // console.log(info.itemCount);\\n// // // => 0\\n//\\n// /**\\n// * ======= ACCESSING REMOTE DATA ========\\n// * Use `sendRequest` to get data from the internet:\\n// * Docs:\\n// * - https://github.com/apify/got-scraping\\n// */\\n// // const catFact = await sendRequest.get('\''https://cat-fact.herokuapp.com/facts/5887e1d85c873e0011036889'\'').json();\\n// // console.log(catFact.text);\\n// // // => \\"Cats make about 100 different sounds. Dogs make only about 10.\\",\\n//\\n// /**\\n// * ======= USING CACHE ========\\n// * To save the entry to the KeyValue cache (or retrieve it), you can use\\n// * `itemCacheKey` to create the entry'\''s ID for you:\\n// */\\n// // const cacheId = itemCacheKey(item, input.cachePrimaryKeys);\\n// // const cache = await io.openKeyValueStore('\''MyStoreId'\'');\\n// // cache.setValue(cacheId, entry);\\n// };",
< "requestTransformBefore": "\\n/**\\n * Inputs:\\n *\\n * `ctx.io` - Apify Actor class, see https://docs.apify.com/sdk/js/reference/class/Actor.\\n * `ctx.input` - The input object that was passed to this Actor.\\n * `ctx.state` - An object you can use to persist state across all your custom functions.\\n * `ctx.sendRequest` - Fetch remote data. Uses '\''got-scraping'\'', same as Apify'\''s `sendRequest`.\\n * See https://crawlee.dev/docs/guides/got-scraping\\n * `ctx.itemCacheKey` - A function you can use to get cacheID for current `entry`.\\n * It takes the entry itself, and a list of properties to be used for hashing.\\n * By default, you should pass `input.cachePrimaryKeys` to it.\\n *\\n */\\n// async ({ io, input, state, sendRequest, itemCacheKey }) => {\\n// // Example: Fetch data or run code BEFORE requests are processed.\\n// state.categories = await sendRequest.get('\''https://example.com/my-categories'\'').json();\\n//\\n// /* ========== SEE BELOW FOR MORE EXAMPLES ========= */\\n//\\n// /**\\n// * ======= ACCESSING DATASET ========\\n// * To save/load/access entries in Dataset.\\n// * Docs:\\n// * - https://docs.apify.com/platform/storage/dataset\\n// * - https://docs.apify.com/sdk/js/docs/guides/result-storage#dataset\\n// * - https://docs.apify.com/sdk/js/docs/examples/map-and-reduce\\n// */\\n// // const dataset = await io.openDataset('\''MyDatasetId'\'');\\n// // const info = await dataset.getInfo();\\n// // console.log(info.itemCount);\\n// // // => 0\\n//\\n// /**\\n// * ======= ACCESSING REMOTE DATA ========\\n// * Use `sendRequest` to get data from the internet:\\n// * Docs:\\n// * - https://github.com/apify/got-scraping\\n// */\\n// // const catFact = await sendRequest.get('\''https://cat-fact.herokuapp.com/facts/5887e1d85c873e0011036889'\'').json();\\n// // console.log(catFact.text);\\n// // // => \\"Cats make about 100 different sounds. Dogs make only about 10.\\",\\n//\\n// /**\\n// * ======= USING CACHE ========\\n// * To save the entry to the KeyValue cache (or retrieve it), you can use\\n// * `itemCacheKey` to create the entry'\''s ID for you:\\n// */\\n// // const cacheId = itemCacheKey(item, input.cachePrimaryKeys);\\n// // const cache = await io.openKeyValueStore('\''MyStoreId'\'');\\n// // cache.setValue(cacheId, entry);\\n// };",
< "requestTransformAfter": "\\n/**\\n * Inputs:\\n *\\n * `ctx.io` - Apify Actor class, see https://docs.apify.com/sdk/js/reference/class/Actor.\\n * `ctx.input` - The input object that was passed to this Actor.\\n * `ctx.state` - An object you can use to persist state across all your custom functions.\\n * `ctx.sendRequest` - Fetch remote data. Uses '\''got-scraping'\'', same as Apify'\''s `sendRequest`.\\n * See https://crawlee.dev/docs/guides/got-scraping\\n * `ctx.itemCacheKey` - A function you can use to get cacheID for current `entry`.\\n * It takes the entry itself, and a list of properties to be used for hashing.\\n * By default, you should pass `input.cachePrimaryKeys` to it.\\n *\\n */\\n// async ({ io, input, state, sendRequest, itemCacheKey }) => {\\n// // Example: Fetch data or run code AFTER requests are processed.\\n// delete state.categories;\\n//\\n// /* ========== SEE BELOW FOR MORE EXAMPLES ========= */\\n//\\n// /**\\n// * ======= ACCESSING DATASET ========\\n// * To save/load/access entries in Dataset.\\n// * Docs:\\n// * - https://docs.apify.com/platform/storage/dataset\\n// * - https://docs.apify.com/sdk/js/docs/guides/result-storage#dataset\\n// * - https://docs.apify.com/sdk/js/docs/examples/map-and-reduce\\n// */\\n// // const dataset = await io.openDataset('\''MyDatasetId'\'');\\n// // const info = await dataset.getInfo();\\n// // console.log(info.itemCount);\\n// // // => 0\\n//\\n// /**\\n// * ======= ACCESSING REMOTE DATA ========\\n// * Use `sendRequest` to get data from the internet:\\n// * Docs:\\n// * - https://github.com/apify/got-scraping\\n// */\\n// // const catFact = await sendRequest.get('\''https://cat-fact.herokuapp.com/facts/5887e1d85c873e0011036889'\'').json();\\n// // console.log(catFact.text);\\n// // // => \\"Cats make about 100 different sounds. Dogs make only about 10.\\",\\n//\\n// /**\\n// * ======= USING CACHE ========\\n// * To save the entry to the KeyValue cache (or retrieve it), you can use\\n// * `itemCacheKey` to create the entry'\''s ID for you:\\n// */\\n// // const cacheId = itemCacheKey(item, input.cachePrimaryKeys);\\n// // const cache = await io.openKeyValueStore('\''MyStoreId'\'');\\n// // cache.setValue(cacheId, entry);\\n// };",
< "requestFilter": "\\n/**\\n * Inputs:\\n * `request` - Request holding URL to be scraped.\\n * `ctx.io` - Apify Actor class, see https://docs.apify.com/sdk/js/reference/class/Actor.\\n * `ctx.input` - The input object that was passed to this Actor.\\n * `ctx.state` - An object you can use to persist state across all your custom functions.\\n * `ctx.sendRequest` - Fetch remote data. Uses '\''got-scraping'\'', same as Apify'\''s `sendRequest`.\\n * See https://crawlee.dev/docs/guides/got-scraping\\n * `ctx.itemCacheKey` - A function you can use to get cacheID for current `entry`.\\n * It takes the entry itself, and a list of properties to be used for hashing.\\n * By default, you should pass `input.cachePrimaryKeys` to it.\\n *\\n */\\n// async (request, { io, input, state, sendRequest, itemCacheKey }) => {\\n// // Example: Filter requests based on their tag\\n// // (maybe because we use RequestQueue that pools multiple scrapers)\\n// return request.userData.tag === \\"VARIANT_A\\";\\n//\\n// /* ========== SEE BELOW FOR MORE EXAMPLES ========= */\\n//\\n// /**\\n// * ======= ACCESSING DATASET ========\\n// * To save/load/access entries in Dataset.\\n// * Docs:\\n// * - https://docs.apify.com/platform/storage/dataset\\n// * - https://docs.apify.com/sdk/js/docs/guides/result-storage#dataset\\n// * - https://docs.apify.com/sdk/js/docs/examples/map-and-reduce\\n// */\\n// // const dataset = await io.openDataset('\''MyDatasetId'\'');\\n// // const info = await dataset.getInfo();\\n// // console.log(info.itemCount);\\n// // // => 0\\n//\\n// /**\\n// * ======= ACCESSING REMOTE DATA ========\\n// * Use `sendRequest` to get data from the internet:\\n// * Docs:\\n// * - https://github.com/apify/got-scraping\\n// */\\n// // const catFact = await sendRequest.get('\''https://cat-fact.herokuapp.com/facts/5887e1d85c873e0011036889'\'').json();\\n// // console.log(catFact.text);\\n// // // => \\"Cats make about 100 different sounds. Dogs make only about 10.\\",\\n//\\n// /**\\n// * ======= USING CACHE ========\\n// * To save the entry to the KeyValue cache (or retrieve it), you can use\\n// * `itemCacheKey` to create the entry'\''s ID for you:\\n// */\\n// // const cacheId = itemCacheKey(item, input.cachePrimaryKeys);\\n// // const cache = await io.openKeyValueStore('\''MyStoreId'\'');\\n// // cache.setValue(cacheId, entry);\\n// };",
< "requestFilterBefore": "\\n/**\\n * Inputs:\\n *\\n * `ctx.io` - Apify Actor class, see https://docs.apify.com/sdk/js/reference/class/Actor.\\n * `ctx.input` - The input object that was passed to this Actor.\\n * `ctx.state` - An object you can use to persist state across all your custom functions.\\n * `ctx.sendRequest` - Fetch remote data. Uses '\''got-scraping'\'', same as Apify'\''s `sendRequest`.\\n * See https://crawlee.dev/docs/guides/got-scraping\\n * `ctx.itemCacheKey` - A function you can use to get cacheID for current `entry`.\\n * It takes the entry itself, and a list of properties to be used for hashing.\\n * By default, you should pass `input.cachePrimaryKeys` to it.\\n *\\n */\\n// async ({ io, input, state, sendRequest, itemCacheKey }) => {\\n// // Example: Fetch data or run code BEFORE requests are processed.\\n// state.categories = await sendRequest.get('\''https://example.com/my-categories'\'').json();\\n//\\n// /* ========== SEE BELOW FOR MORE EXAMPLES ========= */\\n//\\n// /**\\n// * ======= ACCESSING DATASET ========\\n// * To save/load/access entries in Dataset.\\n// * Docs:\\n// * - https://docs.apify.com/platform/storage/dataset\\n// * - https://docs.apify.com/sdk/js/docs/guides/result-storage#dataset\\n// * - https://docs.apify.com/sdk/js/docs/examples/map-and-reduce\\n// */\\n// // const dataset = await io.openDataset('\''MyDatasetId'\'');\\n// // const info = await dataset.getInfo();\\n// // console.log(info.itemCount);\\n// // // => 0\\n//\\n// /**\\n// * ======= ACCESSING REMOTE DATA ========\\n// * Use `sendRequest` to get data from the internet:\\n// * Docs:\\n// * - https://github.com/apify/got-scraping\\n// */\\n// // const catFact = await sendRequest.get('\''https://cat-fact.herokuapp.com/facts/5887e1d85c873e0011036889'\'').json();\\n// // console.log(catFact.text);\\n// // // => \\"Cats make about 100 different sounds. Dogs make only about 10.\\",\\n//\\n// /**\\n// * ======= USING CACHE ========\\n// * To save the entry to the KeyValue cache (or retrieve it), you can use\\n// * `itemCacheKey` to create the entry'\''s ID for you:\\n// */\\n// // const cacheId = itemCacheKey(item, input.cachePrimaryKeys);\\n// // const cache = await io.openKeyValueStore('\''MyStoreId'\'');\\n// // cache.setValue(cacheId, entry);\\n// };",
< "requestFilterAfter": "\\n/**\\n * Inputs:\\n *\\n * `ctx.io` - Apify Actor class, see https://docs.apify.com/sdk/js/reference/class/Actor.\\n * `ctx.input` - The input object that was passed to this Actor.\\n * `ctx.state` - An object you can use to persist state across all your custom functions.\\n * `ctx.sendRequest` - Fetch remote data. Uses '\''got-scraping'\'', same as Apify'\''s `sendRequest`.\\n * See https://crawlee.dev/docs/guides/got-scraping\\n * `ctx.itemCacheKey` - A function you can use to get cacheID for current `entry`.\\n * It takes the entry itself, and a list of properties to be used for hashing.\\n * By default, you should pass `input.cachePrimaryKeys` to it.\\n *\\n */\\n// async ({ io, input, state, sendRequest, itemCacheKey }) => {\\n// // Example: Fetch data or run code AFTER requests are processed.\\n// delete state.categories;\\n//\\n// /* ========== SEE BELOW FOR MORE EXAMPLES ========= */\\n//\\n// /**\\n// * ======= ACCESSING DATASET ========\\n// * To save/load/access entries in Dataset.\\n// * Docs:\\n// * - https://docs.apify.com/platform/storage/dataset\\n// * - https://docs.apify.com/sdk/js/docs/guides/result-storage#dataset\\n// * - https://docs.apify.com/sdk/js/docs/examples/map-and-reduce\\n// */\\n// // const dataset = await io.openDataset('\''MyDatasetId'\'');\\n// // const info = await dataset.getInfo();\\n// // console.log(info.itemCount);\\n// // // => 0\\n//\\n// /**\\n// * ======= ACCESSING REMOTE DATA ========\\n// * Use `sendRequest` to get data from the internet:\\n// * Docs:\\n// * - https://github.com/apify/got-scraping\\n// */\\n// // const catFact = await sendRequest.get('\''https://cat-fact.herokuapp.com/facts/5887e1d85c873e0011036889'\'').json();\\n// // console.log(catFact.text);\\n// // // => \\"Cats make about 100 different sounds. Dogs make only about 10.\\",\\n//\\n// /**\\n// * ======= USING CACHE ========\\n// * To save the entry to the KeyValue cache (or retrieve it), you can use\\n// * `itemCacheKey` to create the entry'\''s ID for you:\\n// */\\n// // const cacheId = itemCacheKey(item, input.cachePrimaryKeys);\\n// // const cache = await io.openKeyValueStore('\''MyStoreId'\'');\\n// // cache.setValue(cacheId, entry);\\n// };",
< "outputMaxEntries": 50,
< "outputTransform": "\\n/**\\n * Inputs:\\n * `entry` - Scraped entry.\\n * `ctx.io` - Apify Actor class, see https://docs.apify.com/sdk/js/reference/class/Actor.\\n * `ctx.input` - The input object that was passed to this Actor.\\n * `ctx.state` - An object you can use to persist state across all your custom functions.\\n * `ctx.sendRequest` - Fetch remote data. Uses '\''got-scraping'\'', same as Apify'\''s `sendRequest`.\\n * See https://crawlee.dev/docs/guides/got-scraping\\n * `ctx.itemCacheKey` - A function you can use to get cacheID for current `entry`.\\n * It takes the entry itself, and a list of properties to be used for hashing.\\n * By default, you should pass `input.cachePrimaryKeys` to it.\\n *\\n */\\n// async (entry, { io, input, state, sendRequest, itemCacheKey }) => {\\n// // Example: Add extra custom fields like aggregates\\n// return {\\n// ...entry,\\n// imagesCount: entry.images.length,\\n// };\\n//\\n// /* ========== SEE BELOW FOR MORE EXAMPLES ========= */\\n//\\n// /**\\n// * ======= ACCESSING DATASET ========\\n// * To save/load/access entries in Dataset.\\n// * Docs:\\n// * - https://docs.apify.com/platform/storage/dataset\\n// * - https://docs.apify.com/sdk/js/docs/guides/result-storage#dataset\\n// * - https://docs.apify.com/sdk/js/docs/examples/map-and-reduce\\n// */\\n// // const dataset = await io.openDataset('\''MyDatasetId'\'');\\n// // const info = await dataset.getInfo();\\n// // console.log(info.itemCount);\\n// // // => 0\\n//\\n// /**\\n// * ======= ACCESSING REMOTE DATA ========\\n// * Use `sendRequest` to get data from the internet:\\n// * Docs:\\n// * - https://github.com/apify/got-scraping\\n// */\\n// // const catFact = await sendRequest.get('\''https://cat-fact.herokuapp.com/facts/5887e1d85c873e0011036889'\'').json();\\n// // console.log(catFact.text);\\n// // // => \\"Cats make about 100 different sounds. Dogs make only about 10.\\",\\n//\\n// /**\\n// * ======= USING CACHE ========\\n// * To save the entry to the KeyValue cache (or retrieve it), you can use\\n// * `itemCacheKey` to create the entry'\''s ID for you:\\n// */\\n// // const cacheId = itemCacheKey(item, input.cachePrimaryKeys);\\n// // const cache = await io.openKeyValueStore('\''MyStoreId'\'');\\n// // cache.setValue(cacheId, entry);\\n// };",
< "outputTransformBefore": "\\n/**\\n * Inputs:\\n *\\n * `ctx.io` - Apify Actor class, see https://docs.apify.com/sdk/js/reference/class/Actor.\\n * `ctx.input` - The input object that was passed to this Actor.\\n * `ctx.state` - An object you can use to persist state across all your custom functions.\\n * `ctx.sendRequest` - Fetch remote data. Uses '\''got-scraping'\'', same as Apify'\''s `sendRequest`.\\n * See https://crawlee.dev/docs/guides/got-scraping\\n * `ctx.itemCacheKey` - A function you can use to get cacheID for current `entry`.\\n * It takes the entry itself, and a list of properties to be used for hashing.\\n * By default, you should pass `input.cachePrimaryKeys` to it.\\n *\\n */\\n// async ({ io, input, state, sendRequest, itemCacheKey }) => {\\n// // Example: Fetch data or run code BEFORE entries are scraped.\\n// state.categories = await sendRequest.get('\''https://example.com/my-categories'\'').json();\\n//\\n// /* ========== SEE BELOW FOR MORE EXAMPLES ========= */\\n//\\n// /**\\n// * ======= ACCESSING DATASET ========\\n// * To save/load/access entries in Dataset.\\n// * Docs:\\n// * - https://docs.apify.com/platform/storage/dataset\\n// * - https://docs.apify.com/sdk/js/docs/guides/result-storage#dataset\\n// * - https://docs.apify.com/sdk/js/docs/examples/map-and-reduce\\n// */\\n// // const dataset = await io.openDataset('\''MyDatasetId'\'');\\n// // const info = await dataset.getInfo();\\n// // console.log(info.itemCount);\\n// // // => 0\\n//\\n// /**\\n// * ======= ACCESSING REMOTE DATA ========\\n// * Use `sendRequest` to get data from the internet:\\n// * Docs:\\n// * - https://github.com/apify/got-scraping\\n// */\\n// // const catFact = await sendRequest.get('\''https://cat-fact.herokuapp.com/facts/5887e1d85c873e0011036889'\'').json();\\n// // console.log(catFact.text);\\n// // // => \\"Cats make about 100 different sounds. Dogs make only about 10.\\",\\n//\\n// /**\\n// * ======= USING CACHE ========\\n// * To save the entry to the KeyValue cache (or retrieve it), you can use\\n// * `itemCacheKey` to create the entry'\''s ID for you:\\n// */\\n// // const cacheId = itemCacheKey(item, input.cachePrimaryKeys);\\n// // const cache = await io.openKeyValueStore('\''MyStoreId'\'');\\n// // cache.setValue(cacheId, entry);\\n// };",
< "outputTransformAfter": "\\n/**\\n * Inputs:\\n *\\n * `ctx.io` - Apify Actor class, see https://docs.apify.com/sdk/js/reference/class/Actor.\\n * `ctx.input` - The input object that was passed to this Actor.\\n * `ctx.state` - An object you can use to persist state across all your custom functions.\\n * `ctx.sendRequest` - Fetch remote data. Uses '\''got-scraping'\'', same as Apify'\''s `sendRequest`.\\n * See https://crawlee.dev/docs/guides/got-scraping\\n * `ctx.itemCacheKey` - A function you can use to get cacheID for current `entry`.\\n * It takes the entry itself, and a list of properties to be used for hashing.\\n * By default, you should pass `input.cachePrimaryKeys` to it.\\n *\\n */\\n// async ({ io, input, state, sendRequest, itemCacheKey }) => {\\n// // Example: Fetch data or run code AFTER entries are scraped.\\n// delete state.categories;\\n//\\n// /* ========== SEE BELOW FOR MORE EXAMPLES ========= */\\n//\\n// /**\\n// * ======= ACCESSING DATASET ========\\n// * To save/load/access entries in Dataset.\\n// * Docs:\\n// * - https://docs.apify.com/platform/storage/dataset\\n// * - https://docs.apify.com/sdk/js/docs/guides/result-storage#dataset\\n// * - https://docs.apify.com/sdk/js/docs/examples/map-and-reduce\\n// */\\n// // const dataset = await io.openDataset('\''MyDatasetId'\'');\\n// // const info = await dataset.getInfo();\\n// // console.log(info.itemCount);\\n// // // => 0\\n//\\n// /**\\n// * ======= ACCESSING REMOTE DATA ========\\n// * Use `sendRequest` to get data from the internet:\\n// * Docs:\\n// * - https://github.com/apify/got-scraping\\n// */\\n// // const catFact = await sendRequest.get('\''https://cat-fact.herokuapp.com/facts/5887e1d85c873e0011036889'\'').json();\\n// // console.log(catFact.text);\\n// // // => \\"Cats make about 100 different sounds. Dogs make only about 10.\\",\\n//\\n// /**\\n// * ======= USING CACHE ========\\n// * To save the entry to the KeyValue cache (or retrieve it), you can use\\n// * `itemCacheKey` to create the entry'\''s ID for you:\\n// */\\n// // const cacheId = itemCacheKey(item, input.cachePrimaryKeys);\\n// // const cache = await io.openKeyValueStore('\''MyStoreId'\'');\\n// // cache.setValue(cacheId, entry);\\n// };",
< "outputFilter": "\\n/**\\n * Inputs:\\n * `entry` - Scraped entry.\\n * `ctx.io` - Apify Actor class, see https://docs.apify.com/sdk/js/reference/class/Actor.\\n * `ctx.input` - The input object that was passed to this Actor.\\n * `ctx.state` - An object you can use to persist state across all your custom functions.\\n * `ctx.sendRequest` - Fetch remote data. Uses '\''got-scraping'\'', same as Apify'\''s `sendRequest`.\\n * See https://crawlee.dev/docs/guides/got-scraping\\n * `ctx.itemCacheKey` - A function you can use to get cacheID for current `entry`.\\n * It takes the entry itself, and a list of properties to be used for hashing.\\n * By default, you should pass `input.cachePrimaryKeys` to it.\\n *\\n */\\n// async (entry, { io, input, state, sendRequest, itemCacheKey }) => {\\n// // Example: Filter entries based on number of images they have (at least 5)\\n// return entry.images.length > 5;\\n//\\n// /* ========== SEE BELOW FOR MORE EXAMPLES ========= */\\n//\\n// /**\\n// * ======= ACCESSING DATASET ========\\n// * To save/load/access entries in Dataset.\\n// * Docs:\\n// * - https://docs.apify.com/platform/storage/dataset\\n// * - https://docs.apify.com/sdk/js/docs/guides/result-storage#dataset\\n// * - https://docs.apify.com/sdk/js/docs/examples/map-and-reduce\\n// */\\n// // const dataset = await io.openDataset('\''MyDatasetId'\'');\\n// // const info = await dataset.getInfo();\\n// // console.log(info.itemCount);\\n// // // => 0\\n//\\n// /**\\n// * ======= ACCESSING REMOTE DATA ========\\n// * Use `sendRequest` to get data from the internet:\\n// * Docs:\\n// * - https://github.com/apify/got-scraping\\n// */\\n// // const catFact = await sendRequest.get('\''https://cat-fact.herokuapp.com/facts/5887e1d85c873e0011036889'\'').json();\\n// // console.log(catFact.text);\\n// // // => \\"Cats make about 100 different sounds. Dogs make only about 10.\\",\\n//\\n// /**\\n// * ======= USING CACHE ========\\n// * To save the entry to the KeyValue cache (or retrieve it), you can use\\n// * `itemCacheKey` to create the entry'\''s ID for you:\\n// */\\n// // const cacheId = itemCacheKey(item, input.cachePrimaryKeys);\\n// // const cache = await io.openKeyValueStore('\''MyStoreId'\'');\\n// // cache.setValue(cacheId, entry);\\n// };",
< "outputFilterBefore": "\\n/**\\n * Inputs:\\n *\\n * `ctx.io` - Apify Actor class, see https://docs.apify.com/sdk/js/reference/class/Actor.\\n * `ctx.input` - The input object that was passed to this Actor.\\n * `ctx.state` - An object you can use to persist state across all your custom functions.\\n * `ctx.sendRequest` - Fetch remote data. Uses '\''got-scraping'\'', same as Apify'\''s `sendRequest`.\\n * See https://crawlee.dev/docs/guides/got-scraping\\n * `ctx.itemCacheKey` - A function you can use to get cacheID for current `entry`.\\n * It takes the entry itself, and a list of properties to be used for hashing.\\n * By default, you should pass `input.cachePrimaryKeys` to it.\\n *\\n */\\n// async ({ io, input, state, sendRequest, itemCacheKey }) => {\\n// // Example: Fetch data or run code BEFORE entries are scraped.\\n// state.categories = await sendRequest.get('\''https://example.com/my-categories'\'').json();\\n//\\n// /* ========== SEE BELOW FOR MORE EXAMPLES ========= */\\n//\\n// /**\\n// * ======= ACCESSING DATASET ========\\n// * To save/load/access entries in Dataset.\\n// * Docs:\\n// * - https://docs.apify.com/platform/storage/dataset\\n// * - https://docs.apify.com/sdk/js/docs/guides/result-storage#dataset\\n// * - https://docs.apify.com/sdk/js/docs/examples/map-and-reduce\\n// */\\n// // const dataset = await io.openDataset('\''MyDatasetId'\'');\\n// // const info = await dataset.getInfo();\\n// // console.log(info.itemCount);\\n// // // => 0\\n//\\n// /**\\n// * ======= ACCESSING REMOTE DATA ========\\n// * Use `sendRequest` to get data from the internet:\\n// * Docs:\\n// * - https://github.com/apify/got-scraping\\n// */\\n// // const catFact = await sendRequest.get('\''https://cat-fact.herokuapp.com/facts/5887e1d85c873e0011036889'\'').json();\\n// // console.log(catFact.text);\\n// // // => \\"Cats make about 100 different sounds. Dogs make only about 10.\\",\\n//\\n// /**\\n// * ======= USING CACHE ========\\n// * To save the entry to the KeyValue cache (or retrieve it), you can use\\n// * `itemCacheKey` to create the entry'\''s ID for you:\\n// */\\n// // const cacheId = itemCacheKey(item, input.cachePrimaryKeys);\\n// // const cache = await io.openKeyValueStore('\''MyStoreId'\'');\\n// // cache.setValue(cacheId, entry);\\n// };",
< "outputFilterAfter": "\\n/**\\n * Inputs:\\n *\\n * `ctx.io` - Apify Actor class, see https://docs.apify.com/sdk/js/reference/class/Actor.\\n * `ctx.input` - The input object that was passed to this Actor.\\n * `ctx.state` - An object you can use to persist state across all your custom functions.\\n * `ctx.sendRequest` - Fetch remote data. Uses '\''got-scraping'\'', same as Apify'\''s `sendRequest`.\\n * See https://crawlee.dev/docs/guides/got-scraping\\n * `ctx.itemCacheKey` - A function you can use to get cacheID for current `entry`.\\n * It takes the entry itself, and a list of properties to be used for hashing.\\n * By default, you should pass `input.cachePrimaryKeys` to it.\\n *\\n */\\n// async ({ io, input, state, sendRequest, itemCacheKey }) => {\\n// // Example: Fetch data or run code AFTER entries are scraped.\\n// delete state.categories;\\n//\\n// /* ========== SEE BELOW FOR MORE EXAMPLES ========= */\\n//\\n// /**\\n// * ======= ACCESSING DATASET ========\\n// * To save/load/access entries in Dataset.\\n// * Docs:\\n// * - https://docs.apify.com/platform/storage/dataset\\n// * - https://docs.apify.com/sdk/js/docs/guides/result-storage#dataset\\n// * - https://docs.apify.com/sdk/js/docs/examples/map-and-reduce\\n// */\\n// // const dataset = await io.openDataset('\''MyDatasetId'\'');\\n// // const info = await dataset.getInfo();\\n// // console.log(info.itemCount);\\n// // // => 0\\n//\\n// /**\\n// * ======= ACCESSING REMOTE DATA ========\\n// * Use `sendRequest` to get data from the internet:\\n// * Docs:\\n// * - https://github.com/apify/got-scraping\\n// */\\n// // const catFact = await sendRequest.get('\''https://cat-fact.herokuapp.com/facts/5887e1d85c873e0011036889'\'').json();\\n// // console.log(catFact.text);\\n// // // => \\"Cats make about 100 different sounds. Dogs make only about 10.\\",\\n//\\n// /**\\n// * ======= USING CACHE ========\\n// * To save the entry to the KeyValue cache (or retrieve it), you can use\\n// * `itemCacheKey` to create the entry'\''s ID for you:\\n// */\\n// // const cacheId = itemCacheKey(item, input.cachePrimaryKeys);\\n// // const cache = await io.openKeyValueStore('\''MyStoreId'\'');\\n// // cache.setValue(cacheId, entry);\\n// };",
< "maxRequestRetries": 10,
< "maxRequestsPerMinute": 120,
< "minConcurrency": 1,
< "maxConcurrency": 5,
< "requestHandlerTimeoutSecs": 14400,
< "logLevel": "info",
< "errorReportingDatasetId": "REPORTING"
<}' |
<apify call jurooravec/skcris-scraper --silent --output-dataset