From f0b4712c897ee35f2d79cf0408f480c2c0bb41da Mon Sep 17 00:00:00 2001 From: neodarz Date: Sun, 13 Jan 2019 11:22:16 +0100 Subject: Save all crawled datas in database --- crawler/neodarznet/pipelines.py | 15 ++++++++++++++- crawler/neodarznet/settings.py | 2 ++ 2 files changed, 16 insertions(+), 1 deletion(-) (limited to 'crawler') diff --git a/crawler/neodarznet/pipelines.py b/crawler/neodarznet/pipelines.py index 71e7865..6703796 100644 --- a/crawler/neodarznet/pipelines.py +++ b/crawler/neodarznet/pipelines.py @@ -1,6 +1,19 @@ # -*- coding: utf-8 -*- +import logging + +from database.models import Page class NeodarznetPipeline(object): - def process_time(self, item, spider): + + def process_item(self, item, spider): + try: + page = Page.get(Page.url == item['url']) + q = Page.update(**item).where(Page.url == item['url']) + q.execute() + logging.info("Update item {}".format(page)) + except Page.DoesNotExist: + page = Page.create(**item) + logging.info("Create item {}".format(page)) + logging.info('Item {} stored in db'.format(page)) return item diff --git a/crawler/neodarznet/settings.py b/crawler/neodarznet/settings.py index 8d65b09..2e5f184 100644 --- a/crawler/neodarznet/settings.py +++ b/crawler/neodarznet/settings.py @@ -8,3 +8,5 @@ NEWSPIDER_MODULE = 'crawler.neodarznet.spiders' ROBOTSTXT_OBEY = True DEPTH_LIMIT = 0 + +ITEM_PIPELINES = {'crawler.neodarznet.pipelines.NeodarznetPipeline': 0} -- cgit v1.2.1