import scrapy from scrapy.crawler import CrawlerProcess from scrapy.utils.project import get_project_settings from psycopg2 import connect from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT from crawler.neodarznet.spiders.scrape import ScrapSpider from database.models import Page, db import config process = CrawlerProcess(get_project_settings()) def main(): try: db.create_tables([Page]) except: con = connect(user=config.DB_USER, host=config.DB_HOST, password=config.DB_PASS) con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) cur = con.cursor() cur.execute('CREATE DATABASE '+config.DB+';') cur.close() con.close() db.create_tables([Page]) process.crawl(ScrapSpider) process.start() if __name__ == '__main__': main()