diff --git a/.gitignore b/.gitignore index 0f8b6b4..33f7fea 100644 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,4 @@ visited.db test.py .idea/* .env +dump.rdb diff --git a/README.md b/README.md index b9291c2..5c17fda 100644 --- a/README.md +++ b/README.md @@ -29,7 +29,10 @@ I've grown tired of SEO agencies making us pay hundreds of euros for simple tool ## Installation -You need **Python3** +You need: +- **Python3** +- **[Redis Server](https://redis.io/topics/quickstart)** + ```Bash git clone https://github.com/StanGirard/SEOToolkit @@ -42,24 +45,25 @@ Then install dependencies pip install -r requirements.txt ``` -or you can use Docker +## Running +### Flask ```Bash -docker pull stangirard/osat:latest +python3 run.py ``` -## Running - +### Redis Server ```Bash -flask run +redis-server ``` -or with docker - +### Celery Worker ```Bash -docker run -d -p 5000:5000 stangirard/osat:latest +celery worker -A celery_worker.celery --loglevel=info ``` + + ## Dashboard You can access the dashboard by going to [localhost:5000](http://localhost:5000) diff --git a/celery_worker.py b/celery_worker.py new file mode 100644 index 0000000..09e6d98 --- /dev/null +++ b/celery_worker.py @@ -0,0 +1,5 @@ +from toolkit import celery +from toolkit.factory import create_app +from toolkit.celery_utils import init_celery +app = create_app() +init_celery(celery, app) \ No newline at end of file diff --git a/config.py b/config.py index 2f248c1..e47bb8d 100644 --- a/config.py +++ b/config.py @@ -15,6 +15,10 @@ class Config: FLASK_APP = environ.get('FLASK_APP', "SEOToolkit") FLASK_ENV = environ.get('FLASK_ENV', 'development') GOOGLE_API_KEY = environ.get('GOOGLE_API_KEY', "None") + + # Celery + CELERY_BROKER_URL = environ.get('CELERY_BROKER_URL','redis://localhost:6379/0') + CELERY_RESULT_BACKEND = environ.get('CELERY_RESULT_BACKEND','redis://localhost:6379/0') # Database SQLALCHEMY_DATABASE_URI = environ.get("SQLALCHEMY_DATABASE_URI", "sqlite:///database.db") diff --git a/requirements.txt b/requirements.txt index 68c28e6..7c03f24 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,18 +1,20 @@ -SQLAlchemy==1.3.13 -matplotlib==3.1.2 -inscriptis==0.0.4.1.1 -seaborn==0.10.1 -numpy==1.18.1 +beautifulsoup4==4.9.1 bokeh==2.0.2 -requests==2.20.0 -nltk==3.4.5 -Flask_SQLAlchemy==2.4.1 -pandas==1.0.1 +celery==4.4.6 +Flask_SQLAlchemy==2.4.3 Flask==1.1.1 -networkx==2.4 -scipy==1.4.0 -google==2.0.3 gensim==3.8.1 -beautifulsoup4==4.9.1 -python-dotenv==0.13.0 +google==2.0.3 +inscriptis==0.0.4.1.1 +matplotlib==3.1.2 +networkx==2.4 +nltk==3.4.5 +numpy==1.18.1 +pandas==1.0.1 +python-dotenv==0.14.0 +requests==2.20.0 +redis==3.5.3 scikit_learn==0.23.1 +scipy==1.4.0 +seaborn==0.10.1 +SQLAlchemy==1.3.18 diff --git a/run.py b/run.py new file mode 100644 index 0000000..824907f --- /dev/null +++ b/run.py @@ -0,0 +1,6 @@ +from toolkit import factory +import toolkit + +if __name__ == "__main__": + app = factory.create_app(celery=toolkit.celery) + app.run(host='0.0.0.0') \ No newline at end of file diff --git a/run.sh b/run.sh new file mode 100755 index 0000000..f489d10 --- /dev/null +++ b/run.sh @@ -0,0 +1,5 @@ +redis-server & +python3 run.py & +celery worker -A celery_worker.celery --loglevel=info --pool=solo + + diff --git a/toolkit/__init__.py b/toolkit/__init__.py index bbd01bd..281dcde 100644 --- a/toolkit/__init__.py +++ b/toolkit/__init__.py @@ -1,15 +1,10 @@ -from flask import Flask +from celery import Celery from flask_sqlalchemy import SQLAlchemy dbAlchemy = SQLAlchemy() +def make_celery(app_name=__name__): + backend = "redis://localhost:6379/0" + broker = backend.replace("0", "1") + return Celery(app_name, backend=backend, broker=broker) -def create_app(): - """Construct the core application.""" - app = Flask(__name__) - app.config.from_object('config.Config') - dbAlchemy.init_app(app) - - with app.app_context(): - import toolkit.routes # Import routes - dbAlchemy.create_all() # Create sql tables for our data models - return app \ No newline at end of file +celery = make_celery() \ No newline at end of file diff --git a/toolkit/celery_utils.py b/toolkit/celery_utils.py new file mode 100644 index 0000000..5cfb50a --- /dev/null +++ b/toolkit/celery_utils.py @@ -0,0 +1,8 @@ +def init_celery(celery, app): + celery.conf.update(app.config) + TaskBase = celery.Task + class ContextTask(TaskBase): + def __call__(self, *args, **kwargs): + with app.app_context(): + return TaskBase.__call__(self, *args, **kwargs) + celery.Task = ContextTask \ No newline at end of file diff --git a/toolkit/celeryapp/__init__.py b/toolkit/celeryapp/__init__.py new file mode 100644 index 0000000..0d3fa0a --- /dev/null +++ b/toolkit/celeryapp/__init__.py @@ -0,0 +1,14 @@ +from celery import Celery + +def make_celery(app): + celery = Celery(app.import_name, backend=app.config['CELERY_RESULT_BACKEND'], + broker=app.config['CELERY_BROKER_URL']) + celery.conf.update(app.config) + TaskBase = celery.Task + class ContextTask(TaskBase): + abstract = True + def __call__(self, *args, **kwargs): + with app.app_context(): + return TaskBase.__call__(self, *args, **kwargs) + celery.Task = ContextTask + return celery \ No newline at end of file diff --git a/toolkit/celeryapp/tasks.py b/toolkit/celeryapp/tasks.py new file mode 100644 index 0000000..30edfea --- /dev/null +++ b/toolkit/celeryapp/tasks.py @@ -0,0 +1,88 @@ +import math +import time +from datetime import datetime +import json + +from celery.signals import task_prerun, worker_process_init +from flask import current_app as app +from sqlalchemy import update +from toolkit import celery +from toolkit import dbAlchemy as db +from toolkit.controller.graphs.core import generate_interactive_graph +from toolkit.controller.keywords.core import get_query_results +from toolkit.controller.seo.audit import get_all_links_website +from toolkit.controller.seo.headers import find_all_headers_url +from toolkit.controller.seo.images import find_all_images +from toolkit.controller.seo.lighthouse import audit_google_lighthouse_full +from toolkit.controller.seo.links import find_all_links +from toolkit.controller.serp.core import query_domain_serp +from toolkit.models import Audit, LighthouseScore + +# @task_prerun.connect +# def celery_prerun(*args, **kwargs): +# #print g +# print("Launching Celery App") + +@celery.task(bind=True,name="Lighthouse") +def LighthouseAudit(self,url): + new_score = LighthouseScore( + url = url,status_job="RUNNING",task_id=str(self.request.id), accessibility=0,pwa=0,seo=0, best_practices=0,performance=0, begin_date=datetime.now() + ) + db.session.add(new_score) + db.session.commit() + value = audit_google_lighthouse_full(url) + accessibility = int(math.floor(value["lighthouseResult"]["categories"]["accessibility"]["score"] * 100)) + seo = int(math.floor(value["lighthouseResult"]["categories"]["seo"]["score"] * 100)) + pwa = int(math.floor(value["lighthouseResult"]["categories"]["pwa"]["score"] * 100)) + best_practices = int(math.floor(value["lighthouseResult"]["categories"]["best-practices"]["score"] * 100)) + performance = int(math.floor(value["lighthouseResult"]["categories"]["performance"]["score"] * 100)) + conn = db.engine.connect() + smt = update(LighthouseScore).where(LighthouseScore.url == url).values(accessibility=accessibility,pwa=pwa,seo=seo, best_practices=best_practices,performance=performance, status_job="FINISHED") + conn.execute(smt) + return {'url': url, 'status': 'Task completed!'} + +@celery.task(bind=True,name="Graphs") +def GraphsGenerate(self,domain): + result = generate_interactive_graph(domain,str(self.request.id), False, 500) + return {'url': domain, 'status': 'Task completed!'} + +@celery.task(bind=True,name="SerpRank") +def SerpRank(self,query, domain, lang, tld): + result = query_domain_serp(query, domain, lang, tld, str(self.request.id)) + return {'url': domain, 'status': 'Task completed!'} + +@celery.task(bind=True,name="Keywords") +def KeywordsGet(self,query): + result = get_query_results(query, str(self.request.id)) + return {'url': query, 'status': 'Task completed!'} + +@celery.task(bind=True,name="Extract") +def Extractor(self,extract_type, url): + new_audit = Audit( + url=url, result=None, type_audit=extract_type,status_job="RUNNING",task_id=str(self.request.id), begin_date=datetime.now() + ) + db.session.add(new_audit) + db.session.commit() + if extract_type == "Headers": + value = find_all_headers_url(url) + conn = db.engine.connect() + smt = update(Audit).where(Audit.url == url).where(Audit.type_audit == extract_type).values(result=json.dumps(value), status_job="FINISHED") + conn.execute(smt) + if extract_type == "Links": + value = find_all_links(url) + conn = db.engine.connect() + smt = update(Audit).where(Audit.url == url).where(Audit.type_audit == extract_type).values(result=json.dumps(value), status_job="FINISHED") + conn.execute(smt) + if extract_type == "Links_Website": + value = get_all_links_website(url) + conn = db.engine.connect() + smt = update(Audit).where(Audit.url == url).where(Audit.type_audit == extract_type).values(result=json.dumps(value), status_job="FINISHED") + conn.execute(smt) + if extract_type == "Images": + print("hello") + value = find_all_images(url) + conn = db.engine.connect() + smt = update(Audit).where(Audit.url == url).where(Audit.type_audit == extract_type).values(result=json.dumps(value), status_job="FINISHED") + conn.execute(smt) + + return {'url': url,"Extract": extract_type, 'status': 'Task completed!'} diff --git a/toolkit/controller/graphs/core.py b/toolkit/controller/graphs/core.py index 3db62da..163a32b 100644 --- a/toolkit/controller/graphs/core.py +++ b/toolkit/controller/graphs/core.py @@ -1,22 +1,26 @@ +import logging import math +import urllib +import urllib.parse from datetime import datetime, timedelta -from bokeh.embed import components -import urllib.parse import networkx as nx +import seaborn as sns +from bokeh.embed import components +from bokeh.layouts import row +from bokeh.models import (BoxZoomTool, Circle, ColorBar, ColumnDataSource, + DataTable, HoverTool, MultiLine, Range1d, ResetTool, + TableColumn) +from bokeh.models.graphs import NodesAndLinkedEdges +from bokeh.palettes import Spectral4, Spectral6, Spectral8 from bokeh.plotting import figure, from_networkx -from bokeh.models import (BoxZoomTool, Circle, HoverTool, - MultiLine, Range1d, ResetTool, ColorBar, - ColumnDataSource, DataTable, TableColumn) from bokeh.transform import linear_cmap -from bokeh.palettes import Spectral4, Spectral8, Spectral6 -from bokeh.models.graphs import NodesAndLinkedEdges -from bokeh.layouts import row -from flask import render_template - +from flask import render_template, request +from sqlalchemy import update +from toolkit import dbAlchemy as db +from toolkit.lib.api_tools import generate_answer from toolkit.lib.http_tools import request_parse, request_status_code -import seaborn as sns -import logging +from toolkit.models import Graphs palette = sns.color_palette("hls", 99) pal_hex_lst = palette.as_hex() @@ -153,33 +157,49 @@ def update_or_insert_graph_in_db(conn, urls, maximum, update=False): return render_template("graphs/bokeh.jinja2", script=script, div=div, domain=domain, template="Flask", time=datetime.now().strftime("%m/%d/%Y, %H:%M:%S")) -def generate_interactive_graph(conn, urls, relaunch, maxi_urls): + + + + +def update_or_insert_graph_in_db( urls, maximum, updating=False): + plot, domain = generate_graph_internal_link_interactive(urls, maximum) + script, div = components(plot) + conn = db.engine.connect() + smt = update(Graphs).where(Graphs.urls == urls).values(script= script, + div = div, begin_date=datetime.now(), status_job="FINISHED") + conn.execute(smt) + return render_template("graphs/bokeh.jinja2", script=script, div=div, domain=domain, template="Flask", time=datetime.now()) + +def generate_interactive_graph(urls, relaunch,task, maxi_urls): if urls is None: return "Empty Url paramaters" maximum_urls = 500 if maxi_urls is not None: maximum_urls = int(maxi_urls) - stopped, already_exists = graphs.check_status_url(conn, urls, "FINISHED") - - if stopped == True: - - # If not first time - if already_exists: - query_result = graphs.select_visited(conn, urls) + urls_exists = Graphs.query.filter(Graphs.urls == urls).count() + if urls_exists > 0: + stopped = Graphs.query.filter(Graphs.urls == urls and Graphs.status_job == "RUNNING").first() + if stopped.status_job == "FINISHED": + query_result = Graphs.query.filter(Graphs.urls == urls and Graphs.status_job == "RUNNING").first() # ALREADY VISITED IN THE LAST 24 HOURS - if datetime.strptime(query_result[0][2], '%m/%d/%Y, %H:%M:%S') + timedelta(hours=24) > datetime.now() and relaunch != "True": - return render_template("graphs/bokeh.jinja2", script=query_result[0][3], div=query_result[0][4], domain=urllib.parse.urlparse(query_result[0][1]).netloc, template="Flask", time=datetime.strptime(query_result[0][2], '%m/%d/%Y, %H:%M:%S')) + if query_result.begin_date + timedelta(hours=24) > datetime.now() and relaunch != "True": + return render_template("graphs/bokeh.jinja2", script=query_result.script, div=query_result.div, domain=urllib.parse.urlparse(query_result.urls).netloc, template="Flask", time=query_result.begin_date) # More than 24 hours or parameter redo is True - if (datetime.strptime(query_result[0][2], '%m/%d/%Y, %H:%M:%S') + timedelta(hours=24) < datetime.now() or relaunch == "True"): - graphs.update_running_db(conn, ("RUNNING", urls)) - return update_or_insert_graph_in_db(conn, urls, maximum_urls, True) + if query_result.begin_date + timedelta(hours=24) < datetime.now() or relaunch == "True": + conn = db.engine.connect() + smt = update(Graphs).where(Graphs.urls == urls).values(status_job="RUNNING") + conn.execute(smt) + return update_or_insert_graph_in_db(urls, maximum_urls, True) - # If first time else: - graphs.insert_url_db(conn, (urls, datetime.now().strftime( - "%m/%d/%Y, %H:%M:%S"), "", "", "RUNNING")) - return update_or_insert_graph_in_db(conn, urls, maximum_urls) + return {"error": "You graph is being generated. Please wait"} + else: - return "JOB IS ALREADY RUNNING. PLEASE WAIT AND REFRESH." + new_graph = Graphs( + urls = urls, script="", div="", status_job = "RUNNING",task_id=task, begin_date=datetime.now() + ) + db.session.add(new_graph) + db.session.commit() + return update_or_insert_graph_in_db(urls, maximum_urls) diff --git a/toolkit/controller/keywords/__init__.py b/toolkit/controller/keywords/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/toolkit/controller/keywords/core.py b/toolkit/controller/keywords/core.py new file mode 100644 index 0000000..a14265d --- /dev/null +++ b/toolkit/controller/keywords/core.py @@ -0,0 +1,30 @@ +import json +from datetime import datetime + +from sqlalchemy import update + +from toolkit import dbAlchemy as db +from toolkit.controller.analysis.keywords import generate_results +from toolkit.models import Keywords +from toolkit.lib.api_tools import generate_answer + + +def get_query_results(query,task, redo=False): + check_exist = Keywords.query.filter(Keywords.query_text==query).count() + if check_exist > 0: + result = Keywords.query.filter(Keywords.query_text==query).first() + if result.status_job == "RUNNING": + return {"error": "query is already running, please wait and then refresh"} + elif result.status_job == "FINISHED": + return json.loads(result.results) + else: + new_keywords = Keywords(query_text=query, results="", + status_job="RUNNING",task_id=task,begin_date=datetime.now()) + db.session.add(new_keywords) + db.session.commit() + results = generate_results(query, 20) + conn = db.engine.connect() + smt = update(Keywords).where(Keywords.query_text==query).values(results=json.dumps(results), status_job="FINISHED") + conn.execute(smt) + return results + return "error" diff --git a/toolkit/controller/serp/__init__.py b/toolkit/controller/serp/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/toolkit/controller/serp/core.py b/toolkit/controller/serp/core.py new file mode 100644 index 0000000..5d3f04e --- /dev/null +++ b/toolkit/controller/serp/core.py @@ -0,0 +1,52 @@ +from datetime import datetime, timedelta +from urllib.parse import urlparse + +from flask import current_app as app +from flask import request +from sqlalchemy import update + +from toolkit import dbAlchemy as db +from toolkit.controller.seo.rank import rank +from toolkit.lib.api_tools import generate_answer +from toolkit.models import Serp + + +def query_domain_serp( query, domain, lang, tld,task): + domain = urlparse(domain).netloc + urlparse(domain).path + if query and domain: + existing_serp_count= Serp.query.filter( + Serp.query_text == query and Serp.domain == domain + ).count() + + if existing_serp_count > 0: + existing_serp = Serp.query.filter( + Serp.query_text == query and Serp.domain == domain + ).all() + if existing_serp[0].begin_date + timedelta(hours=24) < datetime.now(): + conn = db.engine.connect() + smt = update(Serp).where(Serp.query_text==query and Serp.domain==domain).values(begin_date=datetime.now(),url=result["url"], status_job="RUNNING", pos=result["pos"], task_id=task) + conn.execute(smt) + result = rank(domain, query, lang=lang, tld=tld) + smt = update(Serp).where(Serp.query_text==query and Serp.domain==domain).values(begin_date=datetime.now(),url=result["url"], status_job="FINISHED", pos=result["pos"]) + conn.execute(smt) + return result + else: + return {"pos": existing_serp[0].pos, "url": existing_serp[0].url, "query": existing_serp[0].query_text} + + all_results_count = Serp.query.order_by(Serp.begin_date.desc()).count() + if all_results_count >= 5: + all_results = Serp.query.order_by(Serp.begin_date.desc()).all() + if all_results[4].begin_date+ timedelta(hours=1) > datetime.now(): + waiting = datetime.now() - all_results[4].begin_date + secs = 3600 - int(waiting.total_seconds()) + minutes = int(secs / 60) % 60 + return {"limit": "Imposing a limit of 5 query per hour to avoid Google Ban", "waiting_time": str(minutes) + "m " + str(int(secs % 60)) + "s" } + + new_result = Serp(query_text=query,pos=-20, domain=domain, url=None, begin_date=datetime.now(), task_id=task, status_job="RUNNING" ) + db.session.add(new_result) + db.session.commit() + result = rank(domain, query, lang=lang, tld=tld) + conn = db.engine.connect() + smt = update(Serp).where(Serp.query_text==query and Serp.domain==domain).values(begin_date=datetime.now(),url=result["url"], status_job="FINISHED", pos=result["pos"]) + conn.execute(smt) + return result \ No newline at end of file diff --git a/toolkit/extension.py b/toolkit/extension.py new file mode 100644 index 0000000..3f8ff9e --- /dev/null +++ b/toolkit/extension.py @@ -0,0 +1,38 @@ +import flask +from flask_sqlalchemy import SQLAlchemy +from celery import Celery +from config import Config + +class FlaskCelery(Celery): + + def __init__(self, *args, **kwargs): + + super(FlaskCelery, self).__init__(*args, **kwargs) + self.patch_task() + + if 'app' in kwargs: + self.init_app(kwargs['app']) + + def patch_task(self): + TaskBase = self.Task + _celery = self + + class ContextTask(TaskBase): + abstract = True + + def __call__(self, *args, **kwargs): + if flask.has_app_context(): + return TaskBase.__call__(self, *args, **kwargs) + else: + with _celery.app.app_context(): + return TaskBase.__call__(self, *args, **kwargs) + + self.Task = ContextTask + + def init_app(self, app): + self.app = app + self.config_from_object(app.config) + print(app.config) + +celery = FlaskCelery(__name__, broker=Config.BROKER_URL,backend=Config.RESULT_BACKEND, include=["toolkit.celeryapp.tasks"]) +dbAlchemy = SQLAlchemy() \ No newline at end of file diff --git a/toolkit/factory.py b/toolkit/factory.py new file mode 100644 index 0000000..f507689 --- /dev/null +++ b/toolkit/factory.py @@ -0,0 +1,21 @@ +from flask import Flask + +from config import Config +from celery import Celery +from .celery_utils import init_celery +from toolkit import dbAlchemy + + + +def create_app(**kwargs): + """Construct the core application.""" + app = Flask(__name__) + app.config.from_object(Config) + dbAlchemy.init_app(app) + if kwargs.get("celery"): + init_celery(kwargs.get("celery"), app) + with app.app_context(): + # Import routes + import toolkit.routes + dbAlchemy.create_all() # Create sql tables for our data models + return app \ No newline at end of file diff --git a/toolkit/models/audit.py b/toolkit/models/audit.py index 486ffe1..192900b 100644 --- a/toolkit/models/audit.py +++ b/toolkit/models/audit.py @@ -19,7 +19,7 @@ class Audit(dbAlchemy.Model): dbAlchemy.Text, index=False, unique=False, - nullable=False + nullable=True ) type_audit = dbAlchemy.Column( dbAlchemy.String(20), @@ -27,6 +27,18 @@ class Audit(dbAlchemy.Model): unique=False, nullable=True ) + task_id = dbAlchemy.Column( + dbAlchemy.String(40), + index=False, + unique=False, + nullable=True, + ) + status_job = dbAlchemy.Column( + dbAlchemy.String(20), + index=False, + unique=False, + nullable=True + ) begin_date = dbAlchemy.Column( dbAlchemy.DateTime, index=False, diff --git a/toolkit/models/graphs.py b/toolkit/models/graphs.py index cd9ae4e..adfae05 100644 --- a/toolkit/models/graphs.py +++ b/toolkit/models/graphs.py @@ -32,6 +32,12 @@ class Graphs(dbAlchemy.Model): unique=False, nullable=True ) + task_id = dbAlchemy.Column( + dbAlchemy.String(40), + index=False, + unique=False, + nullable=True, + ) begin_date = dbAlchemy.Column( dbAlchemy.DateTime, index=False, diff --git a/toolkit/models/keywords.py b/toolkit/models/keywords.py index 35bb1c2..482d7dd 100644 --- a/toolkit/models/keywords.py +++ b/toolkit/models/keywords.py @@ -20,6 +20,12 @@ class Keywords(dbAlchemy.Model): unique=False, nullable=False ) + task_id = dbAlchemy.Column( + dbAlchemy.String(40), + index=False, + unique=False, + nullable=True, + ) status_job = dbAlchemy.Column( dbAlchemy.String(20), index=False, diff --git a/toolkit/models/score.py b/toolkit/models/score.py index 288a9b5..ee98b8a 100644 --- a/toolkit/models/score.py +++ b/toolkit/models/score.py @@ -45,6 +45,19 @@ class LighthouseScore(dbAlchemy.Model): unique=False, nullable=False ) + status_job = dbAlchemy.Column( + dbAlchemy.String(20), + index=False, + unique=False, + nullable=True, + default="FINISHED" + ) + task_id = dbAlchemy.Column( + dbAlchemy.String(40), + index=False, + unique=False, + nullable=True, + ) begin_date = dbAlchemy.Column( dbAlchemy.DateTime, index=False, diff --git a/toolkit/models/serp.py b/toolkit/models/serp.py index 526fe14..1c01749 100644 --- a/toolkit/models/serp.py +++ b/toolkit/models/serp.py @@ -32,6 +32,18 @@ class Serp(dbAlchemy.Model): unique=False, nullable=True ) + status_job = dbAlchemy.Column( + dbAlchemy.String(20), + index=False, + unique=False, + nullable=True + ) + task_id = dbAlchemy.Column( + dbAlchemy.String(40), + index=False, + unique=False, + nullable=True, + ) begin_date = dbAlchemy.Column( dbAlchemy.DateTime, index=False, diff --git a/toolkit/routes/audit/api.py b/toolkit/routes/audit/api.py index 6ad544a..ae0e818 100644 --- a/toolkit/routes/audit/api.py +++ b/toolkit/routes/audit/api.py @@ -1,35 +1,75 @@ import json import math +import time from datetime import datetime from flask import current_app as app -from flask import redirect, request, url_for, render_template -from sqlalchemy import func +from flask import redirect, render_template, request, url_for +from sqlalchemy import func, update +from toolkit import celery from toolkit import dbAlchemy as db +from toolkit.celeryapp.tasks import LighthouseAudit from toolkit.controller.seo.lighthouse import audit_google_lighthouse_full +from toolkit.lib.api_tools import generate_answer from toolkit.models import Audit, LighthouseScore -from toolkit.lib.api_tools import generate_answer + +@app.route('/status/') +def taskstatus(task_id): + task = celery.AsyncResult(task_id) + if task.state == 'PENDING': + # job did not start yet + response = { + 'state': task.state, + 'current': 0, + 'total': 1, + 'status': 'Pending...' + } + elif task.state != 'FAILURE': + response = { + 'state': task.state, + 'current': task.info.get('current', 0), + 'total': task.info.get('total', 1), + 'status': task.info.get('status', '') + } + if 'result' in task.info: + response['result'] = task.info['result'] + else: + # something went wrong in the background job + response = { + 'state': task.state, + 'current': 1, + 'total': 1, + 'status': str(task.info), # this is the exception raised + } + return json.dumps(response) + +@app.route('/api/audit/lighthouse/score/test', methods=["GET"]) +def testi_test(): + task = LighthouseAudit.delay("https://test.com") + return {"id":task.id} @app.route('/api/audit/lighthouse/score', methods=["POST"]) def post_audit_lighthouse_score(): try: url = request.form['url'] if url: - value = audit_google_lighthouse_full(url) - accessibility = int(math.floor(value["lighthouseResult"]["categories"]["accessibility"]["score"] * 100)) - seo = int(math.floor(value["lighthouseResult"]["categories"]["seo"]["score"] * 100)) - pwa = int(math.floor(value["lighthouseResult"]["categories"]["pwa"]["score"] * 100)) - best_practices = int(math.floor(value["lighthouseResult"]["categories"]["best-practices"]["score"] * 100)) - performance = int(math.floor(value["lighthouseResult"]["categories"]["performance"]["score"] * 100)) - new_score = LighthouseScore( - url = url, accessibility=accessibility,pwa=pwa,seo=seo, best_practices=best_practices,performance=performance, begin_date=datetime.now() - ) - db.session.add(new_score) - db.session.commit() - print("Good") - return generate_answer() + task = LighthouseAudit.delay(url) + return generate_answer(data={"id":task.id}) + else: + return generate_answer(success=False) + except Exception as e: + print(e) + return generate_answer(success=False) + +@app.route('/api/audit/lighthouse/score/status', methods=["POST"]) +def get_audit_status_by_task(): + try: + task_id = request.form['task'] + result = LighthouseScore.query.filter(LighthouseScore.task_id == task_id).first() + if result and result.status_job == "FINISHED": + return generate_answer(success=True) else: return generate_answer(success=False) except Exception as e: @@ -40,17 +80,30 @@ def post_audit_lighthouse_score(): def get_all_audit_lighthouse_score(): try: LS = LighthouseScore - quer = db.session.query(LS.id,LS.url, LS.accessibility, LS.pwa, LS.seo, LS.best_practices, LS.performance, func.max(LS.begin_date).label('begin_date')).group_by(LS.url) + quer = db.session.query(LS.id,LS.url, LS.accessibility, LS.pwa, LS.seo, LS.best_practices, LS.performance,LS.status_job, LS.task_id, func.max(LS.begin_date).label('begin_date')).group_by(LS.url) results = quer.all() result_arr={"results": [], "google_error":False} if app.config['GOOGLE_API_KEY'] == "None": result_arr["google_error"] = True for i in results: - result_arr["results"].append({"id": i.id, "url": i.url, "accessibility": i.accessibility, "pwa": i.pwa, "seo": i.seo, "best_practices": i.best_practices, "performance": i.performance, "begin_date": i.begin_date}) + result_arr["results"].append({"id": i.id, "url": i.url, "accessibility": i.accessibility, "pwa": i.pwa, "seo": i.seo, "best_practices": i.best_practices, "performance": i.performance,"status_job": i.status_job,"task_id": i.task_id, "begin_date": i.begin_date}) return generate_answer(data=result_arr) except Exception as e: print(e) return generate_answer(success=False) + +@app.route('/api/audit/lighthouse/score/delete', methods=["POST"]) +def post_delete_lighthouse_score(): + try: + id = request.form['id'] + + result = LighthouseScore().query.filter(LighthouseScore.id == id).first() + LighthouseScore().query.filter(LighthouseScore.url == result.url).delete() + db.session.commit() + return generate_answer(success=True) + except Exception as e: + print(e) + return generate_answer(success=False) @app.route('/api/audit/lighthouse/score/', methods=["GET"]) def get_audit_lighthouse_score_by_id(id): @@ -92,4 +145,4 @@ def get_audit_lighthouse_score_all(): return generate_answer(data=result_arr) except Exception as e: print(e) - return generate_answer(success=False) \ No newline at end of file + return generate_answer(success=False) diff --git a/toolkit/routes/audit/dashboard.py b/toolkit/routes/audit/dashboard.py index b0e0de0..c7cf520 100644 --- a/toolkit/routes/audit/dashboard.py +++ b/toolkit/routes/audit/dashboard.py @@ -1,5 +1,6 @@ import json import math +import time from datetime import datetime from flask import current_app as app @@ -21,6 +22,7 @@ def audit_home(): @app.route('/audit/lighthouse/score', methods=["POST"]) def add_audit_lighthouse_score(): result = post_request_api('/api/audit/lighthouse/score', request.form) + time.sleep(.300) return redirect(url_for('dashboard_audit_lighthouse_score')) @app.route('/audit/lighthouse/score') @@ -36,7 +38,12 @@ def dashboard_audit_lighthouse_score_get_id(id): return render_template("audit/lighthouse/lighthouse.jinja2", url=result["url"], id=result["id"], result=result["results"], seo_list=result["table"]["seo_list"], accessibility_list=result["table"]["accessibility_list"],pwa_list=result["table"]["pwa_list"], best_list=result["table"]["best_list"], performance_list=result["table"]["performance_list"], labels=result["table"]["labels"]) - +@app.route('/audit/lighthouse/score/delete', methods=["GET"]) +def delete_audit_lighthouse(): + id = request.args.get('id') + print("HHHHHHHHHHHHHHHHHHHHH") + result = post_request_api('/api/audit/lighthouse/score/delete', {"id": id}) + return redirect(url_for('dashboard_audit_lighthouse_score')) @app.route('/audit/lighthouse/score/all') def dashboard_audit_lighthouse_score_all(): diff --git a/toolkit/routes/extract/api.py b/toolkit/routes/extract/api.py index 0918438..46ce4c7 100644 --- a/toolkit/routes/extract/api.py +++ b/toolkit/routes/extract/api.py @@ -1,5 +1,6 @@ import json from datetime import datetime +import time from flask import current_app as app from flask import redirect, render_template, request, url_for @@ -10,6 +11,7 @@ from toolkit.controller.seo.images import find_all_images from toolkit.controller.seo.links import find_all_links from toolkit.lib.api_tools import generate_answer +from toolkit.celeryapp.tasks import Extractor from toolkit.models import Audit @@ -20,7 +22,7 @@ def get_extract_headers_all(): result_arr = {"results": []} for i in results: result_arr["results"].append( - {"id": i.id, "url": i.url, "result": i.result, "begin_date": i.begin_date}) + {"id": i.id, "url": i.url, "result": i.result, "begin_date": i.begin_date, "task_id": i.task_id, "status_job": i.status_job}) return generate_answer(data=result_arr) except Exception as e: print(e) @@ -43,7 +45,7 @@ def get_extract_links_status_all(): result_arr = {"results": []} for i in results: result_arr["results"].append( - {"id": i.id, "url": i.url, "result": i.result, "begin_date": i.begin_date}) + {"id": i.id, "url": i.url, "result": i.result, "begin_date": i.begin_date, "task_id": i.task_id, "status_job": i.status_job}) return generate_answer(data=result_arr) except Exception as e: print(e) @@ -68,12 +70,8 @@ def post_extract_headers(): count = Audit.query.filter(Audit.url == url).filter( Audit.type_audit == "Headers").count() if url and count == 0: - value = find_all_headers_url(url) - new_audit = Audit( - url=url, result=json.dumps(value), type_audit="Headers", begin_date=datetime.now() - ) - db.session.add(new_audit) - db.session.commit() + Extractor.delay("Headers",url) + time.sleep(.300) return generate_answer(success=True) except Exception as e: print(e) @@ -97,12 +95,8 @@ def post_extract_add_links(): count = Audit.query.filter(Audit.url == url).filter( Audit.type_audit == "Links").count() if url and count == 0: - value = find_all_links(url) - new_audit = Audit( - url=url, result=json.dumps(value), type_audit="Links", begin_date=datetime.now() - ) - db.session.add(new_audit) - db.session.commit() + Extractor.delay("Links",url) + time.sleep(.300) return generate_answer(success=True) except Exception as e: print(e) @@ -126,7 +120,7 @@ def get_extract_all_links_website(): result_arr = {"results": []} for i in results: result_arr["results"].append( - {"id": i.id, "url": i.url, "result": i.result, "begin_date": i.begin_date}) + {"id": i.id, "url": i.url, "result": i.result, "begin_date": i.begin_date, "task_id": i.task_id, "status_job": i.status_job}) return generate_answer(data=result_arr) except Exception as e: print(e) @@ -139,12 +133,8 @@ def post_extract_add_links_website(): count = Audit.query.filter(Audit.url == url).filter( Audit.type_audit == "Links_Website").count() if url and count == 0: - value = get_all_links_website(url) - new_audit = Audit( - url=url, result=json.dumps(value), type_audit="Links_Website", begin_date=datetime.now() - ) - db.session.add(new_audit) - db.session.commit() + Extractor.delay("Links_Website",url) + time.sleep(.300) return generate_answer(success=True) except Exception as e: print(e) @@ -179,7 +169,7 @@ def get_extract_images_all(): result_arr = {"results":[]} for i in results: result_arr["results"].append( - {"id": i.id, "url": i.url, "result": i.result, "begin_date": i.begin_date}) + {"id": i.id, "url": i.url, "result": i.result, "begin_date": i.begin_date, "task_id": i.task_id, "status_job": i.status_job}) return generate_answer(data=result_arr) except Exception as e: print(e) @@ -192,12 +182,9 @@ def post_extract_add_images(): count = Audit.query.filter(Audit.url == url).filter( Audit.type_audit == "Images").count() if url and count == 0: - value = find_all_images(url) - new_audit = Audit( - url=url, result=json.dumps(value), type_audit="Images", begin_date=datetime.now() - ) - db.session.add(new_audit) - db.session.commit() + Extractor.delay("Images",url) + time.sleep(.300) + return generate_answer(success=True) except Exception as e: print(e) @@ -225,3 +212,15 @@ def post_delete_extract_image(): print(e) return generate_answer(success=False) +@app.route('/api/extract/status', methods=["POST"]) +def get_extract_status_by_task(): + try: + task_id = request.form['task'] + result = Audit.query.filter(Audit.task_id == task_id).first() + if result and result.status_job == "FINISHED": + return generate_answer(success=True) + else: + return generate_answer(success=False) + except Exception as e: + print(e) + return generate_answer(success=False) \ No newline at end of file diff --git a/toolkit/routes/graphs/api.py b/toolkit/routes/graphs/api.py index d6c69cd..c532b9c 100644 --- a/toolkit/routes/graphs/api.py +++ b/toolkit/routes/graphs/api.py @@ -4,76 +4,32 @@ from datetime import datetime, timedelta from toolkit import dbAlchemy as db from toolkit.models import Graphs +import time import urllib from urllib.parse import urlparse from toolkit.controller.graphs.core import generate_graph_internal_link_interactive from toolkit.lib.api_tools import generate_answer +from toolkit.celeryapp.tasks import GraphsGenerate from sqlalchemy import update - -def update_or_insert_graph_in_db( urls, maximum, updating=False): - plot, domain = generate_graph_internal_link_interactive(urls, maximum) - script, div = components(plot) - conn = db.engine.connect() - smt = update(Graphs).where(Graphs.urls == urls).values(script= script, - div = div, begin_date=datetime.now(), status_job="FINISHED") - conn.execute(smt) - return render_template("graphs/bokeh.jinja2", script=script, div=div, domain=domain, template="Flask", time=datetime.now()) - -def generate_interactive_graph(urls, relaunch, maxi_urls): - if urls is None: - return "Empty Url paramaters" - maximum_urls = 500 - if maxi_urls is not None: - maximum_urls = int(maxi_urls) - urls_exists = Graphs.query.filter(Graphs.urls == urls).count() - if urls_exists > 0: - stopped = Graphs.query.filter(Graphs.urls == urls and Graphs.status_job == "RUNNING").first() - if stopped.status_job == "FINISHED": - query_result = Graphs.query.filter(Graphs.urls == urls and Graphs.status_job == "RUNNING").first() - # ALREADY VISITED IN THE LAST 24 HOURS - - if query_result.begin_date + timedelta(hours=24) > datetime.now() and relaunch != "True": - return render_template("graphs/bokeh.jinja2", script=query_result.script, div=query_result.div, domain=urllib.parse.urlparse(query_result.urls).netloc, template="Flask", time=query_result.begin_date) - - # More than 24 hours or parameter redo is True - if query_result.begin_date + timedelta(hours=24) < datetime.now() or relaunch == "True": - conn = db.engine.connect() - smt = update(Graphs).where(Graphs.urls == urls).values(status_job="RUNNING") - conn.execute(smt) - return update_or_insert_graph_in_db(urls, maximum_urls, True) - - else: - return {"error": "You graph is being generated. Please wait"} - - else: - new_graph = Graphs( - urls = urls, script="", div="", status_job = "RUNNING", begin_date=datetime.now() - ) - db.session.add(new_graph) - db.session.commit() - return update_or_insert_graph_in_db(urls, maximum_urls) - - @app.route('/api/graphs', methods=["POST", "GET"]) def get_post_graphs(): try: error = None if request.method == "POST": domain = request.form["domain"] - if urlparse(domain).scheme not in ["https", "http"]: - error = "Please input an url with https or http at the beginning" + if domain.startswith("https://") or domain.startswith("http://"): + result = GraphsGenerate.delay(domain) else: - result = generate_interactive_graph(domain, False, 500) - if "error" in result: - error = result + result = GraphsGenerate.delay("https://" + domain) + time.sleep(0.3) results = Graphs.query.all() - result_arr= {"results":[], "error": error} + result_arr= {"results":[]} for i in results: - result_arr["results"].append({"id": i.id, "urls": i.urls, "status_job": i.status_job, "begin_date": i.begin_date}) + result_arr["results"].append({"id": i.id, "urls": i.urls, "status_job": i.status_job,"task_id": i.task_id, "begin_date": i.begin_date}) return generate_answer(data=result_arr) except Exception as e: print(e) @@ -100,3 +56,16 @@ def post_delete_graph(): except Exception as e: print(e) return generate_answer(success=False) + +@app.route('/api/graphs/status', methods=["POST"]) +def get_graphs_status_by_task(): + try: + task_id = request.form['task'] + result = Graphs.query.filter(Graphs.task_id == task_id).first() + if result and result.status_job == "FINISHED": + return generate_answer(success=True) + else: + return generate_answer(success=False) + except Exception as e: + print(e) + return generate_answer(success=False) \ No newline at end of file diff --git a/toolkit/routes/graphs/dashboard.py b/toolkit/routes/graphs/dashboard.py index ad37236..09161dd 100644 --- a/toolkit/routes/graphs/dashboard.py +++ b/toolkit/routes/graphs/dashboard.py @@ -5,7 +5,6 @@ from toolkit import dbAlchemy from toolkit.models import Graphs -from toolkit.routes.graphs.api import generate_interactive_graph from toolkit.lib.api_tools import post_request_api, get_request_api diff --git a/toolkit/routes/keywords/api.py b/toolkit/routes/keywords/api.py index 8a9bd12..daa7a9d 100644 --- a/toolkit/routes/keywords/api.py +++ b/toolkit/routes/keywords/api.py @@ -1,5 +1,6 @@ import json from datetime import datetime +import time from flask import current_app as app from flask import request @@ -9,29 +10,7 @@ from toolkit.controller.analysis.keywords import generate_results from toolkit.models import Keywords from toolkit.lib.api_tools import generate_answer - - -def get_query_results(query, redo=False): - check_exist = Keywords.query.filter(Keywords.query_text==query).count() - if check_exist > 0: - result = Keywords.query.filter(Keywords.query_text==query).first() - if result.status_job == "RUNNING": - return {"error": "query is already running, please wait and then refresh"} - elif result.status_job == "FINISHED": - return json.loads(result.results) - else: - new_keywords = Keywords(query_text=query, results="", - status_job="RUNNING",begin_date=datetime.now()) - db.session.add(new_keywords) - db.session.commit() - results = generate_results(query, 20) - conn = db.engine.connect() - smt = update(Keywords).where(Keywords.query_text==query).values(results=json.dumps(results), status_job="FINISHED") - conn.execute(smt) - - #Serp.update().where(query_text==query and domain==domain).values(begin_date=datetime.now(),url=result["url"], pos=result["pos"]) - return results - return "error" +from toolkit.celeryapp.tasks import KeywordsGet @app.route('/api/keywords', methods=["POST", "GET"]) @@ -39,11 +18,13 @@ def get_post_keywords(): try: if request.method == "POST": query = request.form["query"] - get_query_results(query) + result = KeywordsGet.delay(query) + time.sleep(.300) + keyw = Keywords.query.all() results = {"results":[]} for keyword in keyw: - results["results"].append({"id":keyword.id,"query": keyword.query_text, "status_job": keyword.status_job}) + results["results"].append({"id":keyword.id,"query": keyword.query_text, "status_job": keyword.status_job,"task_id": keyword.task_id}) return generate_answer(data=results) except Exception as e: print(e) @@ -72,4 +53,17 @@ def get_keywords_by_id(id): print(e) return generate_answer(success=False) +@app.route('/api/keywords/status', methods=["POST"]) +def get_keywords_status_by_task(): + try: + task_id = request.form['task'] + result = Keywords.query.filter(Keywords.task_id == task_id).first() + if result and result.status_job == "FINISHED": + return generate_answer(success=True) + else: + return generate_answer(success=False) + except Exception as e: + print(e) + return generate_answer(success=False) + diff --git a/toolkit/routes/keywords/dashboard.py b/toolkit/routes/keywords/dashboard.py index 52d4a95..37d2fd3 100644 --- a/toolkit/routes/keywords/dashboard.py +++ b/toolkit/routes/keywords/dashboard.py @@ -5,7 +5,6 @@ from toolkit import dbAlchemy from toolkit.models import Keywords -from toolkit.routes.keywords.api import get_query_results from toolkit.lib.api_tools import post_request_api, get_request_api diff --git a/toolkit/routes/serp/api.py b/toolkit/routes/serp/api.py index c81bbbe..5187d04 100644 --- a/toolkit/routes/serp/api.py +++ b/toolkit/routes/serp/api.py @@ -1,5 +1,6 @@ from datetime import datetime, timedelta from urllib.parse import urlparse +import time from flask import current_app as app from flask import request @@ -9,41 +10,7 @@ from toolkit.controller.seo.rank import rank from toolkit.lib.api_tools import generate_answer from toolkit.models import Serp - - -def query_domain_serp( query, domain, lang, tld): - domain = urlparse(domain).netloc + urlparse(domain).path - if query and domain: - existing_serp_count= Serp.query.filter( - Serp.query_text == query and Serp.domain == domain - ).count() - - if existing_serp_count > 0: - existing_serp = Serp.query.filter( - Serp.query_text == query and Serp.domain == domain - ).all() - if existing_serp[0].begin_date + timedelta(hours=24) < datetime.now(): - result = rank(domain, query, lang=lang, tld=tld) - update(Serp).where(Serp.query_text==query and Serp.domain==domain).values(begin_date=datetime.now(),url=result["url"], pos=result["pos"]) - return result - else: - return {"pos": existing_serp[0].pos, "url": existing_serp[0].url, "query": existing_serp[0].query_text} - - all_results_count = Serp.query.order_by(Serp.begin_date.desc()).count() - if all_results_count >= 5: - all_results = Serp.query.order_by(Serp.begin_date.desc()).all() - if all_results[4].begin_date+ timedelta(hours=1) > datetime.now(): - waiting = datetime.now() - all_results[4].begin_date - secs = 3600 - int(waiting.total_seconds()) - minutes = int(secs / 60) % 60 - return {"limit": "Imposing a limit of 5 query per hour to avoid Google Ban", "waiting_time": str(minutes) + "m " + str(int(secs % 60)) + "s" } - - result = rank(domain, query, lang=lang, tld=tld) - new_result = Serp(query_text=result["query"],pos=result["pos"], domain=domain, url=result["url"], begin_date=datetime.now() ) - db.session.add(new_result) - db.session.commit() - return result - +from toolkit.celeryapp.tasks import SerpRank @app.route('/api/rank', methods=["POST", "GET"]) def get_post_rank(): @@ -54,14 +21,12 @@ def get_post_rank(): domain = request.form["domain"] if not (domain.startswith('//') or domain.startswith('http://') or domain.startswith('https://')): domain = '//' + domain - result = query_domain_serp( query, domain, "en", "com") - - if result and "limit" in result: - error = result + result = SerpRank.delay(query, domain, "en", "com") + time.sleep(.300) result = Serp.query.order_by(Serp.begin_date.desc()).all() result_list = {"results": [], "error": error} for i in result: - result_list["results"].append({"id": i.id, "domain": i.domain, "pos": i.pos, "url": i.pos, "query": i.query_text, "time": i.begin_date}) + result_list["results"].append({"id": i.id, "domain": i.domain, "pos": i.pos, "url": i.pos, "query": i.query_text, "time": i.begin_date, "status_job": i.status_job, "task_id": i.task_id}) return generate_answer(data=result_list) except Exception as e: print(e) @@ -77,3 +42,17 @@ def post_delete_rank(): except Exception as e: print(e) return generate_answer(success=False) + + +@app.route('/api/rank/status', methods=["POST"]) +def get_rank_status_by_task(): + try: + task_id = request.form['task'] + result = Serp.query.filter(Serp.task_id == task_id).first() + if result and result.status_job == "FINISHED": + return generate_answer(success=True) + else: + return generate_answer(success=False) + except Exception as e: + print(e) + return generate_answer(success=False) \ No newline at end of file diff --git a/toolkit/routes/serp/dashboard.py b/toolkit/routes/serp/dashboard.py index c03319a..f9a1120 100644 --- a/toolkit/routes/serp/dashboard.py +++ b/toolkit/routes/serp/dashboard.py @@ -7,7 +7,6 @@ from toolkit import dbAlchemy from toolkit.models import Serp -from toolkit.routes.serp.api import query_domain_serp from toolkit.lib.api_tools import post_request_api, get_request_api diff --git a/toolkit/templates/audit/lighthouse/lighthouse_all.jinja2 b/toolkit/templates/audit/lighthouse/lighthouse_all.jinja2 index 66bca43..51954fa 100644 --- a/toolkit/templates/audit/lighthouse/lighthouse_all.jinja2 +++ b/toolkit/templates/audit/lighthouse/lighthouse_all.jinja2 @@ -19,98 +19,131 @@ {% endif %}
- - - - - - - - - - - - - - - {% for item in result %} +
URLAccessibilityBest PracticesPerformancesSEOPWAUpdate Time Update
+ + + + + + + + + + + + + + {% for item in result %} - + - - - + - + - + - + - - + {% else %} + - - {% endfor %} - + + + {% endif %} + + + +{% endfor %} +
URLAccessibilityBest PracticesPerformancesSEOPWAUpdate Time Update
- + + {{item.url}} - -
+ + + {%if item.status_job == "FINISHED" %} +
+ +
{{item.accessibility}}
-
-
-
+
+
+
-
-
-
+
+
+
{{item.best_practices}}
-
-
-
+
+
+
-
-
-
+
+
+
{{item.performance}}
-
-
-
+
+
+
-
- -
-
+
+ +
+
{{item.seo}}
-
-
-
+
+
+
-
- -
-
+
+ +
+
{{item.pwa}}
-
-
-
+
+
+
-
- -
+ - {{item.begin_date|formatdatetime}} - -
- - - - +
+
+ Loading...
- -
+ + {{item.begin_date|formatdatetime}} + +
+ + + + + +
+
+ + + + + +
+
{% endblock %} \ No newline at end of file diff --git a/toolkit/templates/base.jinja2 b/toolkit/templates/base.jinja2 index 3d1590d..2cec85b 100644 --- a/toolkit/templates/base.jinja2 +++ b/toolkit/templates/base.jinja2 @@ -18,6 +18,8 @@ + + @@ -121,8 +123,9 @@ - - + + + @@ -130,6 +133,7 @@ + {% block script_js%} {% endblock%} diff --git a/toolkit/templates/extract/headers/extract_headers_all.jinja2 b/toolkit/templates/extract/headers/extract_headers_all.jinja2 index 09d5e1a..1f3ba12 100644 --- a/toolkit/templates/extract/headers/extract_headers_all.jinja2 +++ b/toolkit/templates/extract/headers/extract_headers_all.jinja2 @@ -17,6 +17,7 @@ URL + Status Update Time Update @@ -31,7 +32,27 @@ {{item.url}} - + + {% if item.status_job == "RUNNING" %} + {{item["status_job"]}} + + {% else %} + {{item["status_job"]}} {% endif %} + {{item.begin_date|formatdatetime}} diff --git a/toolkit/templates/extract/images/images_all.jinja2 b/toolkit/templates/extract/images/images_all.jinja2 index e719a30..2a3ba6f 100644 --- a/toolkit/templates/extract/images/images_all.jinja2 +++ b/toolkit/templates/extract/images/images_all.jinja2 @@ -17,6 +17,7 @@ URL + Status Update Time Update @@ -31,6 +32,27 @@ {{item.url}} + + {% if item.status_job == "RUNNING" %} + {{item["status_job"]}} + + {% else %} + {{item["status_job"]}} {% endif %} + diff --git a/toolkit/templates/extract/links/links_all.jinja2 b/toolkit/templates/extract/links/links_all.jinja2 index 19fb90d..0e19896 100644 --- a/toolkit/templates/extract/links/links_all.jinja2 +++ b/toolkit/templates/extract/links/links_all.jinja2 @@ -17,6 +17,7 @@ URL + Status Update Time Update @@ -31,6 +32,27 @@ {{item.url}} + + {% if item.status_job == "RUNNING" %} + {{item["status_job"]}} + + {% else %} + {{item["status_job"]}} {% endif %} + diff --git a/toolkit/templates/extract/links_website/links_all_website.jinja2 b/toolkit/templates/extract/links_website/links_all_website.jinja2 index c1443a8..e9cab44 100644 --- a/toolkit/templates/extract/links_website/links_all_website.jinja2 +++ b/toolkit/templates/extract/links_website/links_all_website.jinja2 @@ -18,6 +18,7 @@ URL + Status Update Time Update @@ -32,7 +33,27 @@ {{item.url}} - + + {% if item.status_job == "RUNNING" %} + {{item["status_job"]}} + + {% else %} + {{item["status_job"]}} {% endif %} + {{item.begin_date|formatdatetime}} diff --git a/toolkit/templates/graphs/graphs_all.jinja2 b/toolkit/templates/graphs/graphs_all.jinja2 index 1f3e9ed..8476f1a 100644 --- a/toolkit/templates/graphs/graphs_all.jinja2 +++ b/toolkit/templates/graphs/graphs_all.jinja2 @@ -40,7 +40,25 @@ {% if item.status_job == "RUNNING" %} - {{item["status_job"]}} {% else %} + {{item["status_job"]}} + + + {% else %} {{item["status_job"]}} {% endif %} diff --git a/toolkit/templates/keywords/keywords_all.jinja2 b/toolkit/templates/keywords/keywords_all.jinja2 index 0f0406f..41112a1 100644 --- a/toolkit/templates/keywords/keywords_all.jinja2 +++ b/toolkit/templates/keywords/keywords_all.jinja2 @@ -33,7 +33,24 @@ {% if item.status_job == "RUNNING" %} - {{item["status_job"]}} {% else %} + {{item["status_job"]}} + + {% else %} {{item["status_job"]}} {% endif %} diff --git a/toolkit/templates/serp/rank.jinja2 b/toolkit/templates/serp/rank.jinja2 index fe4fdfa..aa4b007 100644 --- a/toolkit/templates/serp/rank.jinja2 +++ b/toolkit/templates/serp/rank.jinja2 @@ -39,7 +39,29 @@ {{item.query}} - {{item.pos}} + {% if item.status_job == "FINISHED" %} + {{item.pos}} + {% else %} +
+ Loading... +
+ + {% endif%}
diff --git a/wsgi.py b/wsgi.py deleted file mode 100644 index 6e11243..0000000 --- a/wsgi.py +++ /dev/null @@ -1,7 +0,0 @@ -"""App entry point.""" -from toolkit import create_app - -app = create_app() - -if __name__ == "__main__": - app.run(host='0.0.0.0', port=5000) \ No newline at end of file