blob: ee04e3dbd81f3825a88d3511308cfdaa2dd993ce (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
|
import scrapy
from scrapy.crawler import CrawlerProcess
from scrapy.utils.project import get_project_settings
from psycopg2 import connect
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
from crawler.neodarznet.spiders.scrape import ScrapSpider
from database.models import Page, db
import config
process = CrawlerProcess(get_project_settings())
def main():
try:
db.create_tables([Page])
except:
con = connect(user=config.DB_USER, host=config.DB_HOST, password=config.DB_PASS)
con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
cur = con.cursor()
cur.execute('CREATE DATABASE '+config.DB+';')
cur.close()
con.close()
db.create_tables([Page])
process.crawl(ScrapSpider)
process.start()
if __name__ == '__main__':
main()
|