Ingatlan.com Scraper avatar
Ingatlan.com Scraper
Deprecated
View all Actors
This Actor is deprecated

This Actor is unavailable because the developer has decided to deprecate it. Would you like to try a similar Actor instead?

See alternative Actors
Ingatlan.com Scraper

Ingatlan.com Scraper

gavilar/ingatlancom-scraper

Scraper for the Hungarian property market Ingatlan.com

.editorconfig

1root = true
2
3[*]
4indent_style = space
5indent_size = 4
6charset = utf-8
7trim_trailing_whitespace = true
8insert_final_newline = true
9end_of_line = lf

.eslintrc

1{
2    "extends": "@apify"
3}

.gitignore

1# This file tells Git which files shouldn't be added to source control
2
3.idea
4node_modules

Dockerfile

1# First, specify the base Docker image. You can read more about
2# the available images at https://sdk.apify.com/docs/guides/docker-images
3# You can also use any other image from Docker Hub.
4FROM apify/actor-node:16
5
6# Second, copy just package.json and package-lock.json since it should be
7# the only file that affects "npm install" in the next step, to speed up the build
8COPY package*.json ./
9
10# Install NPM packages, skip optional and development dependencies to
11# keep the image small. Avoid logging too much and print the dependency
12# tree for debugging
13RUN npm --quiet set progress=false \
14 && npm install --only=prod --no-optional \
15 && echo "Installed NPM packages:" \
16 && (npm list --only=prod --no-optional --all || true) \
17 && echo "Node.js version:" \
18 && node --version \
19 && echo "NPM version:" \
20 && npm --version
21
22# Next, copy the remaining files and directories with the source code.
23# Since we do this after NPM install, quick build will be really fast
24# for most source file changes.
25COPY . ./
26
27# Optionally, specify how to launch the source code of your actor.
28# By default, Apify's base Docker images define the CMD instruction
29# that runs the Node.js source code using the command specified
30# in the "scripts.start" section of the package.json file.
31# In short, the instruction looks something like this:
32#
33# CMD npm start

INPUT_SCHEMA.json

1{
2    "title": "Input schema for the apify_project actor.",
3    "type": "object",
4    "schemaVersion": 1,
5    "properties": {
6        "url": {
7            "title": "URL to the list of results to scrape.",
8            "type": "string",
9            "description": "Use this to first filter the list to your liking.",
10            "editor": "textfield",
11            "default": "https://ingatlan.com/szukites/elado+haz+v-ker+u:K%C3%A1lvin_t%C3%A9r|35834+v-ker"
12        }
13    },
14    "required": ["url"]
15}

apify.json

1{
2    "env": { "npm_config_loglevel": "silent" }
3}

main.js

1import Apify from 'apify';
2import { gotScraping } from 'got-scraping';
3import cheerio from 'cheerio';
4
5const input = await Apify.getInput();
6let home_url = input.url.split('?')[0] // Remove params
7const foreignAddresses = ["realestatehungary.hu", "immobilienungarn.net"];
8foreignAddresses.forEach( (_, foreignAddress) => {
9    home_url.replace(foreignAddress, 'ingatlan.com');
10})
11
12// Find number of pages
13const home = await gotScraping(home_url);
14const $home = cheerio.load(home.body);
15const pageNumberText = $home('.pagination__page-number').text().trim();
16let nPages = 1;
17if (pageNumberText !== '') {
18    const regex = /(\d*) oldal/gm;
19    const m = regex.exec(pageNumberText);
20    nPages = parseInt(m[1]);
21}
22
23// Generate page urls and add to queue
24const requestQueue = await Apify.openRequestQueue();
25for (let n = 1; n <= nPages; n++) {
26    await requestQueue.addRequest({
27        url: home_url + '?page=' + n.toString(),
28        userData: {
29            label: 'LISTPAGE'
30        }
31    });
32}
33
34const crawler = new Apify.CheerioCrawler({
35    requestQueue,
36    handlePageFunction: async ({ request, $ }) => {
37        if (request.userData.label == 'LISTPAGE') {
38            await Apify.utils.enqueueLinks({
39                $,
40                requestQueue,
41                selector: 'div a.listing__link.js-listing-active-area[href]',
42                baseUrl: 'https://ingatlan.com/'
43            })
44            return;
45        }
46        const parameterValues = $('div.parametersContainer div.parameterValues')
47        const data = {
48            address: $('h1.address').text().trim(),
49            price: parameterValues.eq(0).find('span').eq(0).text().trim(),
50            sqm: parameterValues.eq(1).text().trim(),
51            rooms: parameterValues.eq(2).text().trim(),
52            url: request.loadedUrl
53        }
54        Apify.pushData(data);
55    }
56})
57
58await crawler.run();

package.json

1{
2    "name": "ingatlancom-scraper",
3    "version": "0.0.1",
4    "description": "A scraper for the hungarian property market Ingatlan.com",
5    "dependencies": {
6        "apify": "^2.0.7"
7    },
8    "devDependencies": {
9        "@apify/eslint-config": "^0.1.3",
10        "eslint": "^7.0.0"
11    },
12    "scripts": {
13        "start": "node main.js",
14        "lint": "./node_modules/.bin/eslint ./src --ext .js,.jsx",
15        "lint:fix": "./node_modules/.bin/eslint ./src --ext .js,.jsx --fix",
16        "test": "echo \"Error: oops, the actor has no tests yet, sad!\" && exit 1"
17    },
18    "author": "It's not you it's me",
19    "license": "ISC",
20    "type": "module"
21}
Developer
Maintained by Community
Categories