aboutsummaryrefslogtreecommitdiff
path: root/app.py
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--app.py53
1 files changed, 38 insertions, 15 deletions
diff --git a/app.py b/app.py
index ee04e3d..42e9055 100644
--- a/app.py
+++ b/app.py
@@ -1,31 +1,54 @@
import scrapy
+import sys
from scrapy.crawler import CrawlerProcess
from scrapy.utils.project import get_project_settings
-from psycopg2 import connect
-from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
+from flask import Flask, request, jsonify
+import json
+
+from sphinx import sphinx
from crawler.neodarznet.spiders.scrape import ScrapSpider
from database.models import Page, db
import config
-process = CrawlerProcess(get_project_settings())
+app = Flask(__name__)
-def main():
+@app.route("/", methods=['GET', 'POST'])
+def search():
+ query = request.args.get('search')
+ if query != '' and query != None:
+ response = sphinx.search(query)
+ elif query is None:
+ response = {'error': 1, 'msg': 'Use `search` attribute for make a search'}
+ else:
+ response = {'error': 1, 'msg': 'Query cannot be null'}
+ return jsonify(response)
+def crawl():
try:
db.create_tables([Page])
- except:
- con = connect(user=config.DB_USER, host=config.DB_HOST, password=config.DB_PASS)
- con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
- cur = con.cursor()
- cur.execute('CREATE DATABASE '+config.DB+';')
- cur.close()
- con.close()
- db.create_tables([Page])
+ process = CrawlerProcess(get_project_settings())
+ process.crawl(ScrapSpider)
+ process.start()
+ except Exception as e:
+ print(e)
+
+def main():
+
+ app.run(debug=True, use_reloader=True)
- process.crawl(ScrapSpider)
- process.start()
+def show_help():
+ print("Launch server => "+str(sys.argv[0]))
+ print("Launch all crawler => "+str(sys.argv[0])+" crawl")
if __name__ == '__main__':
- main()
+ if len(sys.argv) == 1:
+ main()
+ elif len(sys.argv) == 2:
+ if sys.argv[1] == "crawl":
+ crawl()
+ else:
+ show_help()
+ else:
+ show_help()