Actor picture

Dropbox Upload Actor

aaron/dropbox-upload-actor

Streams file from URL to a dropbox account.

No credit card required

Author's avatarAaron Jackson
  • Modified
  • Users4
  • Runs185
Actor picture
Dropbox Upload Actor

Dockerfile

# This is a template for a Dockerfile used to run acts in Actor system.
# The base image name below is set during the act build, based on user settings.
# IMPORTANT: The base image must set a correct working directory, such as /usr/src/app or /home/user
FROM apify/actor-node-basic:v0.21.10

# Second, copy just package.json and package-lock.json since it should be
# the only file that affects "npm install" in the next step, to speed up the build
COPY package*.json ./

# Install NPM packages, skip optional and development dependencies to
# keep the image small. Avoid logging too much and print the dependency
# tree for debugging
RUN npm --quiet set progress=false \
 && npm install --only=prod --no-optional \
 && echo "Installed NPM packages:" \
 && (npm list --all || true) \
 && echo "Node.js version:" \
 && node --version \
 && echo "NPM version:" \
 && npm --version

# Copy source code to container
# Do this in the last step, to have fast build if only the source code changed
COPY  . ./

# NOTE: The CMD is already defined by the base image.
# Uncomment this for local node inspector debugging:
# CMD [ "node", "--inspect=0.0.0.0:9229", "main.js" ]

package.json

{
    "name": "apify-project",
    "version": "0.0.1",
    "description": "",
    "author": "It's not you it's me",
    "license": "ISC",
    "dependencies": {
        "node-fetch": "latest",
        "dropbox": "latest",
        "apify": "0.21.10"
    },
    "scripts": {
        "start": "node main.js"
    }
}

main.js

const fetch = require('node-fetch');
var Dropbox = require('dropbox').Dropbox;
const Apify = require('apify');

Apify.main(async () => {
    const input = await Apify.getValue('INPUT');
    const accessToken = input.accessToken;
    const filePath = input.filePath ? input.filePath : '';
    const fileUrl = input.fileUrl;

    await uploadFile(fileUrl, filePath);

    async function uploadFile(fileUrl, filePath){ 
        if(typeof fileUrl !== 'string'){
          throw new TypeError('Parameter is not type of string.');
        }
        
        // Open Dropbox instance.
        var dbx = await new Dropbox({
            fetch: fetch,
            accessToken: accessToken
        });
  
        const UPLOAD_FILE_SIZE_LIMIT = 150 * 1024 * 1024; // 150Mb - Dropbox file size threshold 
        const maxBlob = 8 * 1000 * 1000; // 8Mb - Dropbox JavaScript API suggested max file / chunk size
  
        const buffer = await getFileBuffer(fileUrl);
        console.log(`File size: ${(buffer.byteLength/1000000).toFixed(2)}Mb`);
  
        // Determine whether you should use filesUpload or filesUploadSession.
        if(buffer.byteLength < UPLOAD_FILE_SIZE_LIMIT){
            // Upload for files less than 150Mb don't have to be chunked.
            console.log('Uploading file.');
            await dbx.filesUpload({ path: filePath, contents:buffer })
                .then(res => console.log('File successfully uploaded.'))
                .catch(err => console.log(err));
        } else {
            var chunks = [];
            var offset = 0;
    
            while(offset < buffer.byteLength){
                var chunkSize = Math.min(maxBlob, buffer.byteLength - offset);
                chunks.push(buffer.slice(offset, offset + chunkSize ));
                offset += chunkSize;
            }
    
            var index = 0;

            for(chunk of chunks){
                var percentToFinishUpload;

                if (index == 0){
                    // Begin upload session.
                    var {session_id} = await dbx.filesUploadSessionStart({ close: false, contents: chunk });
                    console.log(`Opening session: ${session_id}`);
                    index++;

                    percentToFinishUpload = ((index * chunk.byteLength)/buffer.byteLength * 100).toFixed(2);
                    console.log(`Upload percentage: ${percentToFinishUpload}%`);
                } else if (index < chunks.length - 1){
                    // Append chunk to upload session.
                    let cursor = { session_id: session_id, offset: index * chunk.byteLength };
                    await dbx.filesUploadSessionAppendV2({ cursor: cursor, close: false, contents: chunk});
                    index++;

                    percentToFinishUpload = Math.floor(((index * chunk.byteLength)/buffer.byteLength) * 100).toFixed(2);
                    console.log(`Upload percentage: ${percentToFinishUpload}%`);
                } else {
                    // Append final chunk and close session.
                    let cursor = { session_id: session_id, offset: buffer.byteLength - chunk.byteLength };
                    let commit = { path: input.filePath, mode: 'add', autorename: true, mute: false};
                    await dbx.filesUploadSessionFinish({ cursor: cursor, commit: commit, contents: chunk })
                        .then(res => console.log('Success!'));

                    percentToFinishUpload = Math.floor(((index * chunk.byteLength)/buffer.byteLength) * 100).toFixed(2);
                    console.log('File upload complete.');
                }
            }
        }
    }
  
      async function getFileBuffer(fileUrl){
        console.log(`Fetching file from: ${fileUrl}`);
  
        const blob = await fetch(fileUrl)
          .then(response => response.blob())
          .catch(err => console.log(err));
  
        console.log('File successfully retrieved.');
        console.log('Creating buffer from file.');
        const buf = Buffer.from(blob[Object.getOwnPropertySymbols(blob)[1]]);
        
        console.log('Returning buffer.');
        return buf;
      }
});