
Firestore Import
Pricing
Pay per usage
Go to Store

Firestore Import
Imports dataset items to Firestone DB.
0.0 (0)
Pricing
Pay per usage
2
Total users
28
Monthly users
1
Last modified
2 years ago
.eslintrc
{ "extends": "@apify"}
.gitignore
apify_storagenode_modules
.npmignore
# This file tells Git which files shouldn't be added to source control
.ideanode_modules
Dockerfile
# Dockerfile contains instructions how to build a Docker image that will contain# all the code and configuration needed to run your actor. For a full# Dockerfile reference, see https://docs.docker.com/engine/reference/builder/
# First, specify the base Docker image. Apify provides the following base images# for your convenience:# apify/actor-node-basic (Node.js 10 on Alpine Linux, small and fast image)# apify/actor-node-chrome (Node.js 10 + Chrome on Debian)# apify/actor-node-chrome-xvfb (Node.js 10 + Chrome + Xvfb on Debian)# For more information, see https://apify.com/docs/actor#base-images# Note that you can use any other image from Docker Hub.FROM apify/actor-node-basic
# Second, copy just package.json and package-lock.json since they are the only files# that affect NPM install in the next stepCOPY package*.json ./
# Install NPM packages, skip optional and development dependencies to keep the# image small. Avoid logging too much and print the dependency tree for debuggingRUN npm --quiet set progress=false \ && npm install --only=prod --no-optional \ && echo "Installed NPM packages:" \ && npm list \ && echo "Node.js version:" \ && node --version \ && echo "NPM version:" \ && npm --version
# Next, copy the remaining files and directories with the source code.# Since we do this after NPM install, quick build will be really fast# for simple source file changes.COPY . ./
# Specify how to run the source codeCMD npm start
INPUT_SCHEMA.json
{ "title": "Firestore Import input", "description": "Imports dataset to Firestore DB", "type": "object", "schemaVersion": 1, "properties": { "datasetId": { "title": "Dataset", "type": "string", "description": "Dataset ID of dataset you want to import to Firestore", "editor": "textfield" }, "apiKey": { "title": "Api key", "type": "string", "description": "Firestore API key", "editor": "textfield" }, "authDomain": { "title": "Auth domain", "type": "string", "description": "Firestore authentication domain", "editor": "textfield" }, "projectId": { "title": "Project ID", "type": "string", "description": "Firestore project ID", "editor": "textfield" }, "collectionName": { "title": "Collection name", "type": "string", "description": "Firestore collection name", "editor": "textfield" } }, "required": ["datasetId", "apiKey", "authDomain", "projectId", "collectionName"]}
apify.json
{ "name": "firestore-import", "version": "0.0", "buildTag": "latest", "env": null, "template": "hello_world"}
main.js
1const Apify = require('apify');2const firebase = require('firebase');3
4Apify.main(async () => {5 const input = await Apify.getInput();6
7 const { data } = input;8 let islegacyPhantomJSTask = false;9 if (data && typeof data === 'string') {10 // It runs from legacy phantomjs crawler task finished webhook11 const legacyInput = JSON.parse(data);12 Object.assign(input, legacyInput);13 islegacyPhantomJSTask = true;14 }15
16 const { datasetId, apiKey, authDomain, projectId, collectionName } = input;17
18 if (!datasetId) {19 throw new Error('DatasetId is required on input.');20 }21
22 firebase.initializeApp({23 apiKey,24 authDomain,25 projectId,26 });27
28 // Initialize Cloud Firestore through Firebase29 const db = firebase.firestore();30 console.log(`Start importing dataset ${datasetId} to firestore.`);31 const dataset = await Apify.openDataset(datasetId, { forceCloud: true });32 const datasetInfo = await dataset.getInfo();33 // Import dataset from actor/task34 const limit = 1000;35 let counter = 0;36 for (let offset = 0; offset < datasetInfo.itemCount; offset += limit) {37 const pagination = await dataset.getData({38 simplified: islegacyPhantomJSTask,39 clean: !islegacyPhantomJSTask,40 limit,41 offset,42 });43 console.log(`Get dataset items offset: ${pagination.offset}`);44 for (const item of pagination.items) {45 try {46 await db.collection(collectionName).add(item);47 counter++;48 } catch (err) {49 console.log(`Cannot import item ${JSON.stringify(item)}: ${err.message}`);50 }51 }52 }53
54 console.log(`Imported ${counter} from dataset ${datasetId}.`);55
56 console.log('Done!');57});
package.json
{ "name": "firestore-import", "version": "0.0.1", "description": "This is a boilerplate of an Apify actor.", "dependencies": { "apify": "^0.16.0", "firebase": "^7.2.0" }, "devDependencies": { "@apify/eslint-config": "0.0.3", "eslint": "^6.5.1" }, "scripts": { "start": "node main.js", "test": "echo \"Error: oops, the actor has no tests yet, sad!\" && exit 1" }, "author": "It's not you it's me", "license": "ISC"}