Actor picture

Icon Burglar

yuri/icon-burglar

This act takes the URLs prepared by a crawler and downloads the images to finally zip them

No credit card required

Author's avatarYuri Manders
  • Modified
  • Users1
  • Runs67

Dockerfile

# This is a template for a Dockerfile used to run acts in Actor system.
# The base image name below is set during the act build, based on user settings.
# IMPORTANT: The base image must set a correct working directory, such as /usr/src/app or /home/user
FROM apify/actor-node-basic:v0.21.10

# Second, copy just package.json and package-lock.json since it should be
# the only file that affects "npm install" in the next step, to speed up the build
COPY package*.json ./

# Install NPM packages, skip optional and development dependencies to
# keep the image small. Avoid logging too much and print the dependency
# tree for debugging
RUN npm --quiet set progress=false \
 && npm install --only=prod --no-optional \
 && echo "Installed NPM packages:" \
 && (npm list --all || true) \
 && echo "Node.js version:" \
 && node --version \
 && echo "NPM version:" \
 && npm --version

# Copy source code to container
# Do this in the last step, to have fast build if only the source code changed
COPY  . ./

# NOTE: The CMD is already defined by the base image.
# Uncomment this for local node inspector debugging:
# CMD [ "node", "--inspect=0.0.0.0:9229", "main.js" ]

package.json

{
    "name": "apify-project",
    "version": "0.0.1",
    "description": "",
    "author": "It's not you it's me",
    "license": "ISC",
    "dependencies": {
        "apify": "0.21.10",
        "underscore": "latest",
        "request-promise": "latest",
        "bluebird": "latest"
    },
    "scripts": {
        "start": "node main.js"
    }
}

main.js

// Get all the libraries this crawler uses
const Apify = require('apify');
const _ = require('underscore');
const rp = require('request-promise');
const Promise = require('bluebird');

Apify.main(async () => {
    // Get act input and validate it
    const input = await Apify.getValue('INPUT');
    console.log('Input:')
    console.dir(input);
    if (!input || !input._id) {
        throw new Error('Input is missing the "_id" attribute. Did you start it from crawler finish webhook?');
    }
    const executionId = input._id;
    
    // Print info about crawler run
    const crawlerRunDetails = await Apify.client.crawlers.getExecutionDetails({ executionId });
    if (!crawlerRunDetails) {
        throw new Error(`There is no crawler run with ID: "${executionId}"`);
    }
    console.log(`Details of the crawler run (ID: ${executionId}):`);
    console.dir(crawlerRunDetails);
    
    // Iterate through all crawler results and count them
    // Here is the place where you can add something more adventurous :)
    console.log(`Counting results from crawler run...`);
    
    const limit = 100;
    let offset = 0;
    let totalItems = 0;
    let results;

    results = await Apify.client.crawlers.getExecutionResults({ 
        executionId,
        limit,
        offset
    });
    
    // Prepare each result for downloading
    await Promise.each(results.items[0].pageFunctionResult, async function(value, i){
           
           console.log('url', value.url)
           
           // Move each result in the key-value store
           const file = await rp ({
               url : value.url, encoding : null
           });
           
           // Attach the right filename and encoding
           await Apify.setValue(value.file+'.png', file, { contentType : 'image/png'})
    });
    
    // Take the key-value store and zip data (this uses a different Actor (https://www.apify.com/jaroslavhejlek/zip-key-value-store))
    const run = await Apify.call('jaroslavhejlek/zip-key-value-store', { "keyValueStoreId": process.env.APIFY_DEFAULT_KEY_VALUE_STORE_ID, "filesPerZipFile": 2000 });
    console.dir(run);
    
});