Firestore Import avatar
Firestore Import

Pricing

Pay per usage

Go to Store
Firestore Import

Firestore Import

Developed by

Jakub Drobník

Maintained by Community

Imports dataset items to Firestone DB.

0.0 (0)

Pricing

Pay per usage

2

Monthly users

1

Last modified

2 years ago

.eslintrc

1{
2  "extends": "@apify"
3}

.gitignore

1apify_storage
2node_modules

.npmignore

1# This file tells Git which files shouldn't be added to source control
2
3.idea
4node_modules

Dockerfile

1# Dockerfile contains instructions how to build a Docker image that will contain
2# all the code and configuration needed to run your actor. For a full
3# Dockerfile reference, see https://docs.docker.com/engine/reference/builder/
4
5# First, specify the base Docker image. Apify provides the following base images
6# for your convenience:
7#  apify/actor-node-basic (Node.js 10 on Alpine Linux, small and fast image)
8#  apify/actor-node-chrome (Node.js 10 + Chrome on Debian)
9#  apify/actor-node-chrome-xvfb (Node.js 10 + Chrome + Xvfb on Debian)
10# For more information, see https://apify.com/docs/actor#base-images
11# Note that you can use any other image from Docker Hub.
12FROM apify/actor-node-basic
13
14# Second, copy just package.json and package-lock.json since they are the only files
15# that affect NPM install in the next step
16COPY package*.json ./
17
18# Install NPM packages, skip optional and development dependencies to keep the
19# image small. Avoid logging too much and print the dependency tree for debugging
20RUN npm --quiet set progress=false \
21 && npm install --only=prod --no-optional \
22 && echo "Installed NPM packages:" \
23 && npm list \
24 && echo "Node.js version:" \
25 && node --version \
26 && echo "NPM version:" \
27 && npm --version
28
29# Next, copy the remaining files and directories with the source code.
30# Since we do this after NPM install, quick build will be really fast
31# for simple source file changes.
32COPY . ./
33
34# Specify how to run the source code
35CMD npm start

INPUT_SCHEMA.json

1{
2  "title": "Firestore Import input",
3  "description": "Imports dataset to Firestore DB",
4  "type": "object",
5  "schemaVersion": 1,
6  "properties": {
7    "datasetId": {
8      "title": "Dataset",
9      "type": "string",
10      "description": "Dataset ID of dataset you want to import to Firestore",
11      "editor": "textfield"
12    },
13    "apiKey": {
14      "title": "Api key",
15      "type": "string",
16      "description": "Firestore API key",
17      "editor": "textfield"
18    },
19    "authDomain": {
20      "title": "Auth domain",
21      "type": "string",
22      "description": "Firestore authentication domain",
23      "editor": "textfield"
24    },
25    "projectId": {
26      "title": "Project ID",
27      "type": "string",
28      "description": "Firestore project ID",
29      "editor": "textfield"
30    },
31    "collectionName": {
32      "title": "Collection name",
33      "type": "string",
34      "description": "Firestore collection name",
35      "editor": "textfield"
36    }
37  },
38  "required": ["datasetId", "apiKey", "authDomain", "projectId", "collectionName"]
39}

apify.json

1{
2	"name": "firestore-import",
3	"version": "0.0",
4	"buildTag": "latest",
5	"env": null,
6	"template": "hello_world"
7}

main.js

1const Apify = require('apify');
2const firebase = require('firebase');
3
4Apify.main(async () => {
5    const input = await Apify.getInput();
6
7    const { data } = input;
8    let islegacyPhantomJSTask = false;
9    if (data && typeof data === 'string') {
10        // It runs from legacy phantomjs crawler task finished webhook
11        const legacyInput = JSON.parse(data);
12        Object.assign(input, legacyInput);
13        islegacyPhantomJSTask = true;
14    }
15
16    const { datasetId, apiKey, authDomain, projectId, collectionName } = input;
17
18    if (!datasetId) {
19        throw new Error('DatasetId is required on input.');
20    }
21
22    firebase.initializeApp({
23        apiKey,
24        authDomain,
25        projectId,
26    });
27
28    // Initialize Cloud Firestore through Firebase
29    const db = firebase.firestore();
30    console.log(`Start importing dataset ${datasetId} to firestore.`);
31    const dataset = await Apify.openDataset(datasetId, { forceCloud: true });
32    const datasetInfo = await dataset.getInfo();
33    // Import dataset from actor/task
34    const limit = 1000;
35    let counter = 0;
36    for (let offset = 0; offset < datasetInfo.itemCount; offset += limit) {
37        const pagination = await dataset.getData({
38            simplified: islegacyPhantomJSTask,
39            clean: !islegacyPhantomJSTask,
40            limit,
41            offset,
42        });
43        console.log(`Get dataset items offset: ${pagination.offset}`);
44        for (const item of pagination.items) {
45            try {
46                await db.collection(collectionName).add(item);
47                counter++;
48            } catch (err) {
49                console.log(`Cannot import item ${JSON.stringify(item)}: ${err.message}`);
50            }
51        }
52    }
53
54    console.log(`Imported ${counter} from dataset ${datasetId}.`);
55
56    console.log('Done!');
57});

package.json

1{
2	"name": "firestore-import",
3	"version": "0.0.1",
4	"description": "This is a boilerplate of an Apify actor.",
5	"dependencies": {
6		"apify": "^0.16.0",
7		"firebase": "^7.2.0"
8	},
9	"devDependencies": {
10		"@apify/eslint-config": "0.0.3",
11		"eslint": "^6.5.1"
12	},
13	"scripts": {
14		"start": "node main.js",
15		"test": "echo \"Error: oops, the actor has no tests yet, sad!\" && exit 1"
16	},
17	"author": "It's not you it's me",
18	"license": "ISC"
19}

Pricing

Pricing model

Pay per usage

This Actor is paid per platform usage. The Actor is free to use, and you only pay for the Apify platform usage.