aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorneodarz <neodarz@neodarz.net>2019-01-16 22:31:16 +0100
committerneodarz <neodarz@neodarz.net>2019-01-16 22:31:16 +0100
commita32379718277388baa692c6a2a9e3f3b3bbcb902 (patch)
tree0aa06b679f0a91864dc11a99759cd955a1b038f7
parentf2bd717b4d64bd3d9f183b92a22665282fc8b82e (diff)
downloadkhanindexer-a32379718277388baa692c6a2a9e3f3b3bbcb902.tar.xz
khanindexer-a32379718277388baa692c6a2a9e3f3b3bbcb902.zip
Add app code
Diffstat (limited to '')
-rw-r--r--app.py53
1 files changed, 38 insertions, 15 deletions
diff --git a/app.py b/app.py
index ee04e3d..42e9055 100644
--- a/app.py
+++ b/app.py
@@ -1,31 +1,54 @@
import scrapy
+import sys
from scrapy.crawler import CrawlerProcess
from scrapy.utils.project import get_project_settings
-from psycopg2 import connect
-from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
+from flask import Flask, request, jsonify
+import json
+
+from sphinx import sphinx
from crawler.neodarznet.spiders.scrape import ScrapSpider
from database.models import Page, db
import config
-process = CrawlerProcess(get_project_settings())
+app = Flask(__name__)
-def main():
+@app.route("/", methods=['GET', 'POST'])
+def search():
+ query = request.args.get('search')
+ if query != '' and query != None:
+ response = sphinx.search(query)
+ elif query is None:
+ response = {'error': 1, 'msg': 'Use `search` attribute for make a search'}
+ else:
+ response = {'error': 1, 'msg': 'Query cannot be null'}
+ return jsonify(response)
+def crawl():
try:
db.create_tables([Page])
- except:
- con = connect(user=config.DB_USER, host=config.DB_HOST, password=config.DB_PASS)
- con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
- cur = con.cursor()
- cur.execute('CREATE DATABASE '+config.DB+';')
- cur.close()
- con.close()
- db.create_tables([Page])
+ process = CrawlerProcess(get_project_settings())
+ process.crawl(ScrapSpider)
+ process.start()
+ except Exception as e:
+ print(e)
+
+def main():
+
+ app.run(debug=True, use_reloader=True)
- process.crawl(ScrapSpider)
- process.start()
+def show_help():
+ print("Launch server => "+str(sys.argv[0]))
+ print("Launch all crawler => "+str(sys.argv[0])+" crawl")
if __name__ == '__main__':
- main()
+ if len(sys.argv) == 1:
+ main()
+ elif len(sys.argv) == 2:
+ if sys.argv[1] == "crawl":
+ crawl()
+ else:
+ show_help()
+ else:
+ show_help()