From f0b4712c897ee35f2d79cf0408f480c2c0bb41da Mon Sep 17 00:00:00 2001 From: neodarz Date: Sun, 13 Jan 2019 11:22:16 +0100 Subject: Save all crawled datas in database --- app.py | 11 +++++++++++ 1 file changed, 11 insertions(+) (limited to 'app.py') diff --git a/app.py b/app.py index 2a80507..281f932 100644 --- a/app.py +++ b/app.py @@ -1,8 +1,19 @@ +import scrapy +from scrapy.crawler import CrawlerProcess +from scrapy.utils.project import get_project_settings + +from crawler.neodarznet.spiders.scrape import ScrapSpider + from database.models import Page, db import config +process = CrawlerProcess(get_project_settings()) + def main(): db.create_tables([Page]) + process.crawl(ScrapSpider) + process.start() + if __name__ == '__main__': main() -- cgit v1.2.1