Skip to content
Snippets Groups Projects
Commit cb9da5c7 authored by Patrick Jentsch's avatar Patrick Jentsch
Browse files

Simplify daemon logic

parent 0f30e518
No related branches found
No related tags found
No related merge requests found
Showing
with 308 additions and 226 deletions
...@@ -13,11 +13,3 @@ services: ...@@ -13,11 +13,3 @@ services:
- "./web/nopaque.py:/home/nopaque/nopaque.py" - "./web/nopaque.py:/home/nopaque/nopaque.py"
- "./web/requirements.txt:/home/nopaque/requirements.txt" - "./web/requirements.txt:/home/nopaque/requirements.txt"
- "./web/tests:/home/nopaque/tests" - "./web/tests:/home/nopaque/tests"
nopaqued:
volumes:
# Mount code as volumes
- "./daemon/app:/home/nopaqued/app"
- "./daemon/boot.sh:/home/nopaqued/boot.sh"
- "./daemon/config.py:/home/nopaqued/config.py"
- "./daemon/nopaqued.py:/home/nopaqued/nopaqued.py"
- "./daemon/requirements.txt:/home/nopaqued/requirements.txt"
version: "3.5" version: "3.5"
services: services:
db:
env_file: db.env
image: postgres:11
restart: unless-stopped
volumes:
- "${HOST_DB_DIR:-./db}:/var/lib/postgresql/data"
mq:
image: redis:6
restart: unless-stopped
volumes:
- "${HOST_MQ_DIR:-./mq}:/data"
nopaque: nopaque:
build: build:
args: args:
DOCKER_GID: ${HOST_DOCKER_GID}
GID: ${HOST_GID} GID: ${HOST_GID}
UID: ${HOST_UID} UID: ${HOST_UID}
context: ./web context: ./web
...@@ -16,31 +30,3 @@ services: ...@@ -16,31 +30,3 @@ services:
volumes: volumes:
- "${NOPAQUE_DATA_DIR:-/mnt/nopaque}:${NOPAQUE_DATA_DIR:-/mnt/nopaque}" - "${NOPAQUE_DATA_DIR:-/mnt/nopaque}:${NOPAQUE_DATA_DIR:-/mnt/nopaque}"
- "${HOST_NOPAQUE_LOG_FILE-./nopaque.log}:${NOPAQUE_LOG_FILE:-/home/nopaque/nopaque.log}" - "${HOST_NOPAQUE_LOG_FILE-./nopaque.log}:${NOPAQUE_LOG_FILE:-/home/nopaque/nopaque.log}"
nopaqued:
build:
args:
DOCKER_GID: ${HOST_DOCKER_GID}
GID: ${HOST_GID}
UID: ${HOST_UID}
context: ./daemon
depends_on:
- db
- nopaque
env_file: .env
image: nopaqued:development
restart: unless-stopped
volumes:
- "/var/run/docker.sock:/var/run/docker.sock"
- "${NOPAQUE_DATA_DIR:-/mnt/nopaque}:${NOPAQUE_DATA_DIR:-/mnt/nopaque}"
- "${HOST_NOPAQUE_DAEMON_LOG_FILE-./nopaqued.log}:${NOPAQUE_DAEMON_LOG_FILE:-/home/nopaqued/nopaqued.log}"
db:
env_file: db.env
image: postgres:11
restart: unless-stopped
volumes:
- "${HOST_DB_DIR:-./db}:/var/lib/postgresql/data"
mq:
image: redis:6
restart: unless-stopped
volumes:
- "${HOST_MQ_DIR:-./mq}:/data"
FLASK_APP=nopaque.py
...@@ -4,9 +4,9 @@ FROM python:3.9.0-slim-buster ...@@ -4,9 +4,9 @@ FROM python:3.9.0-slim-buster
LABEL authors="Patrick Jentsch <p.jentsch@uni-bielefeld.de>, Stephan Porada <sporada@uni-bielefeld.de>" LABEL authors="Patrick Jentsch <p.jentsch@uni-bielefeld.de>, Stephan Porada <sporada@uni-bielefeld.de>"
ARG DOCKER_GID
ARG UID ARG UID
ARG GID ARG GID
ENV FLASK_APP=nopaque.py
ENV LANG=C.UTF-8 ENV LANG=C.UTF-8
......
...@@ -38,6 +38,7 @@ def create_app(config_name): ...@@ -38,6 +38,7 @@ def create_app(config_name):
from .main import main as main_blueprint from .main import main as main_blueprint
from .services import services as services_blueprint from .services import services as services_blueprint
from .settings import settings as settings_blueprint from .settings import settings as settings_blueprint
app.register_blueprint(admin_blueprint, url_prefix='/admin') app.register_blueprint(admin_blueprint, url_prefix='/admin')
app.register_blueprint(auth_blueprint, url_prefix='/auth') app.register_blueprint(auth_blueprint, url_prefix='/auth')
app.register_blueprint(corpora_blueprint, url_prefix='/corpora') app.register_blueprint(corpora_blueprint, url_prefix='/corpora')
......
from email.message import EmailMessage
class Notification(EmailMessage):
"""docstring for Email."""
def set_notification_content(self,
subject_template,
subject_template_values_dict,
body_txt_template_path,
body_html_template_path,
body_template_values_dict):
# Create subject with subject_template_values_dict
self['subject'] = subject_template.format(
**subject_template_values_dict)
# Open template files and insert values from body_template_values_dict
with open(body_txt_template_path) as nfile:
self.body = nfile.read().format(**body_template_values_dict)
with open(body_html_template_path) as nfile:
self.html = nfile.read().format(**body_template_values_dict)
# Set txt of email
self.set_content(self.body)
# Set html alternative
self.add_alternative(self.html, subtype='html')
def set_addresses(self, sender, recipient):
self['From'] = sender
self['to'] = recipient
class NotificationService:
"""This is a nopaque notifcation service object."""
def __init__(self, smtp):
# Bool to show if the mail server stoped sending mails due to exceeding
# its sending limit
self.mail_limit_exceeded = False
# Holds due to an error unsent email notifications
self.not_sent = {}
self.smtp = smtp
def send(self, email):
self.smtp.send_message(email)
def quit(self):
self.smtp.quit()
<html>
<body>
<p>Dear <b>{username}</b>,</p>
<p>The status of your Job/Corpus({id}) with the title <b>"{title}"</b> has changed!</p>
<p>It is now <b>{status}</b>!</p>
<p>Time of this status update was: <b>{time} UTC</b></p>
<p>You can access your Job/Corpus here: <a href="{url}">{url}</a>
</p>
<p>Kind regards!<br>
Your nopaque team</p>
</body>
</html>
Dear {username},
The status of your Job/Corpus({id}) with the title "{title}" has changed!
It is now {status}!
Time of this status update was: {time} UTC
You can access your Job/Corpus here: {url}
Kind regards!
Your nopaque team
\ No newline at end of file
from sqlalchemy import asc
from .libnotify.notification import Notification
from .libnotify.service import NotificationService
from .. import configuration as config
from .. import Session
from ..decorators import background
from ..models import NotificationEmailData
import logging
import os
import smtplib
ROOT_DIR = os.path.abspath(os.path.dirname(__file__))
@background
def notify():
session = Session()
if config.SMTP_USE_SSL:
smtp = smtplib.SMTP_SSL(host=config.SMTP_SERVER, port=config.SMTP_PORT)
else:
smtp = smtplib.SMTP(host=config.SMTP_SERVER, port=config.SMTP_PORT)
if config.SMTP_USE_TLS:
smtp.starttls()
try:
smtp.login(config.SMTP_USERNAME, config.SMTP_PASSWORD)
except smtplib.SMTPHeloError:
logging.warning('The server didn’t reply properly to the HELO '
'greeting.')
return
except smtplib.SMTPAuthenticationError as e:
logging.warning('The server didn’t accept the username/password '
'combination.')
logging.warning(e)
return
except smtplib.SMTPNotSupportedError:
logging.warning('The AUTH command is not supported by the server.')
return
except smtplib.SMTPException:
logging.warning('No suitable authentication method was found.')
return
notification_service = NotificationService(smtp)
# create notifications (content, recipient etc.)
notifications = __create_mail_notifications(notification_service, session)
# only login and send mails if there are any notifications
if (len(notifications) > 0):
# combine new and unsent notifications
notifications.update(notification_service.not_sent)
# send all notifications
__send_mail_notifications(notifications, notification_service)
# remove unsent notifications because they have been sent now
# but only if mail limit has not been exceeded
if (notification_service.mail_limit_exceeded is not True):
notification_service.not_sent = {}
smtp.quit()
Session.remove()
# Email notification functions
def __create_mail_notifications(notification_service, session):
notification_email_data = session.query(NotificationEmailData).order_by(asc(NotificationEmailData.creation_date)).all() # noqa
notifications = {}
for data in notification_email_data:
notification = Notification()
notification.set_addresses(config.SMTP_DEFAULT_SENDER,
data.job.user.email)
subject_template = ('[nopaque] Status update for your Job/Corpora: '
'{title}!')
subject_template_values_dict = {'title': data.job.title}
url = '{}://{}/{}/{}'.format(config.PROTOCOL,
config.DOMAIN,
'jobs',
data.job.id)
body_template_values_dict = {'username': data.job.user.username,
'id': data.job.id,
'title': data.job.title,
'status': data.notify_status,
'time': data.creation_date,
'url': url}
txt_tmplt = os.path.join(ROOT_DIR,
'libnotify/templates/notification.txt')
html_tmplt = os.path.join(ROOT_DIR,
'libnotify/templates/notification.html')
notification.set_notification_content(subject_template,
subject_template_values_dict,
txt_tmplt,
html_tmplt,
body_template_values_dict)
notifications[data.job.id] = notification
# Using a dictionary for notifications avoids sending multiple mails
# if the status of a job changes in a few seconds. The user will not
# get swamped with mails for queued, running and complete if those
# happen in in a few seconds. Only the last update will be sent.
# This depends on the sleep time interval though.
session.delete(data)
session.commit()
return notifications
def __send_mail_notifications(notifications, notification_service):
for key, notification in notifications.items():
try:
notification_service.send(notification)
notification_service.mail_limit_exceeded = False
except Exception:
# Adds notifications to unsent if mail server exceded limit for
# consecutive mail sending
logging.warning('limit')
notification_service.not_sent[key] = notification
notification_service.mail_limit_exceeded = True
notification_service.not_sent.update(notifications)
from app import create_app from .. import db
from time import sleep from ..models import Corpus, Job
from ..decorators import background
import docker import docker
app = create_app()
docker_client = docker.from_env() docker_client = docker.from_env()
from . import corpus_utils, job_utils # noqa
app.app_context().push()
from . import check_corpora, check_jobs, notify # noqa
def run():
check_corpora_thread = check_corpora()
check_jobs_thread = check_jobs()
notify_thread = notify()
while True:
if not check_corpora_thread.is_alive():
check_corpora_thread = check_corpora()
if not check_jobs_thread.is_alive():
check_jobs_thread = check_jobs()
if not notify_thread.is_alive():
notify_thread = notify()
sleep(3)
@background
def check_corpora(): def check_corpora():
corpora = Corpus.query.all() corpora = Corpus.query.all()
for corpus in filter(lambda corpus: corpus.status == 'submitted', corpora): for corpus in filter(lambda corpus: corpus.status == 'submitted', corpora):
__create_build_corpus_service(corpus) corpus_utils.create_build_corpus_service(corpus)
for corpus in filter(lambda corpus: (corpus.status == 'queued' for corpus in filter(lambda corpus: (corpus.status == 'queued'
or corpus.status == 'running'), or corpus.status == 'running'),
corpora): corpora):
__checkout_build_corpus_service(corpus) corpus_utils.checkout_build_corpus_service(corpus)
for corpus in filter(lambda corpus: corpus.status == 'start analysis', for corpus in filter(lambda corpus: corpus.status == 'start analysis',
corpora): corpora):
__create_cqpserver_container(corpus) corpus_utils.create_cqpserver_container(corpus)
for corpus in filter(lambda corpus: corpus.status == 'stop analysis', for corpus in filter(lambda corpus: corpus.status == 'stop analysis',
corpora): corpora):
__remove_cqpserver_container(corpus) corpus_utils.remove_cqpserver_container(corpus)
db.session.commit()
def check_jobs():
jobs = Job.query.all()
for job in filter(lambda job: job.status == 'submitted', jobs):
job_utils.create_job_service(job)
for job in filter(lambda job: job.status == 'queued', jobs):
job_utils.checkout_job_service(job)
for job in filter(lambda job: job.status == 'running', jobs):
job_utils.checkout_job_service(job)
db.session.commit()
from flask import current_app
from . import docker_client from . import docker_client
from .. import db
from ..decorators import background
from ..models import Corpus
import docker import docker
import logging import logging
import os import os
import shutil import shutil
@background def create_build_corpus_service(corpus):
def check_corpora(): corpus_dir = os.path.join(current_app.config['DATA_DIR'],
corpora = Corpus.query.all()
for corpus in filter(lambda corpus: corpus.status == 'submitted', corpora):
__create_build_corpus_service(corpus)
for corpus in filter(lambda corpus: (corpus.status == 'queued'
or corpus.status == 'running'),
corpora):
__checkout_build_corpus_service(corpus)
for corpus in filter(lambda corpus: corpus.status == 'start analysis',
corpora):
__create_cqpserver_container(corpus)
for corpus in filter(lambda corpus: corpus.status == 'stop analysis',
corpora):
__remove_cqpserver_container(corpus)
db.session.commit()
Session.remove()
def __create_build_corpus_service(corpus):
corpus_dir = os.path.join(config.DATA_DIR,
str(corpus.user_id), str(corpus.user_id),
'corpora', 'corpora',
str(corpus.id)) str(corpus.id))
...@@ -41,73 +20,75 @@ def __create_build_corpus_service(corpus): ...@@ -41,73 +20,75 @@ def __create_build_corpus_service(corpus):
shutil.rmtree(corpus_registry_dir) shutil.rmtree(corpus_registry_dir)
os.mkdir(corpus_data_dir) os.mkdir(corpus_data_dir)
os.mkdir(corpus_registry_dir) os.mkdir(corpus_registry_dir)
service_args = {'command': 'docker-entrypoint.sh build-corpus', service_args = {
'constraints': ['node.role==worker'], 'command': 'docker-entrypoint.sh build-corpus',
'labels': {'origin': 'nopaque', 'constraints': ['node.role==worker'],
'type': 'corpus.prepare', 'labels': {'origin': 'nopaque',
'corpus_id': str(corpus.id)}, 'type': 'corpus.prepare',
'mounts': [corpus_file + ':/root/files/corpus.vrt:ro', 'corpus_id': str(corpus.id)},
corpus_data_dir + ':/corpora/data:rw', 'mounts': [corpus_file + ':/root/files/corpus.vrt:ro',
corpus_registry_dir + ':/usr/local/share/cwb/registry:rw'], corpus_data_dir + ':/corpora/data:rw',
'name': 'build-corpus_{}'.format(corpus.id), corpus_registry_dir + ':/usr/local/share/cwb/registry:rw'],
'restart_policy': docker.types.RestartPolicy()} 'name': 'build-corpus_{}'.format(corpus.id),
'restart_policy': docker.types.RestartPolicy()
}
service_image = \ service_image = \
'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest' 'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest'
try:
service = docker_client.services.get(service_args['name'])
except docker.errors.NotFound:
pass
except docker.errors.DockerException:
return
else:
service.remove()
try: try:
docker_client.services.create(service_image, **service_args) docker_client.services.create(service_image, **service_args)
except docker.errors.DockerException: except docker.errors.APIError as e:
logging.error('create_build_corpus_service({}): '.format(corpus.id)
+ '{} (status: {} -> failed)'.format(e, corpus.status))
corpus.status = 'failed' corpus.status = 'failed'
else: else:
corpus.status = 'queued' corpus.status = 'queued'
finally:
# TODO: send email
pass
def __checkout_build_corpus_service(corpus): def checkout_build_corpus_service(corpus):
service_name = 'build-corpus_{}'.format(corpus.id) service_name = 'build-corpus_{}'.format(corpus.id)
try: try:
service = docker_client.services.get(service_name) service = docker_client.services.get(service_name)
except docker.errors.NotFound: except docker.errors.NotFound as e:
logging.error('__checkout_build_corpus_service({}):'.format(corpus.id) logging.error('checkout_build_corpus_service({}):'.format(corpus.id)
+ ' The service does not exist.' + ' {} (stauts: {} -> failed)'.format(e, corpus.status))
+ ' (stauts: {} -> failed)'.format(corpus.status))
corpus.status = 'failed' corpus.status = 'failed'
return # TODO: handle docker.errors.APIError and docker.errors.InvalidVersion
except docker.errors.DockerException: else:
return service_tasks = service.tasks()
service_tasks = service.tasks() if not service_tasks:
if not service_tasks: return
return task_state = service_tasks[0].get('Status').get('State')
task_state = service_tasks[0].get('Status').get('State') if corpus.status == 'queued' and task_state != 'pending':
if corpus.status == 'queued' and task_state != 'pending': corpus.status = 'running'
corpus.status = 'running' elif corpus.status == 'running' and task_state == 'complete':
elif corpus.status == 'running' and task_state == 'complete': service.remove()
service.remove() corpus.status = 'prepared'
corpus.status = 'prepared' elif corpus.status == 'running' and task_state == 'failed':
elif corpus.status == 'running' and task_state == 'failed': service.remove()
service.remove() corpus.status = task_state
corpus.status = task_state finally:
# TODO: send email
pass
def __create_cqpserver_container(corpus): def create_cqpserver_container(corpus):
corpus_dir = os.path.join(config.DATA_DIR, corpus_dir = os.path.join(current_app.config['DATA_DIR'],
str(corpus.user_id), str(corpus.user_id),
'corpora', 'corpora',
str(corpus.id)) str(corpus.id))
corpus_data_dir = os.path.join(corpus_dir, 'data') corpus_data_dir = os.path.join(corpus_dir, 'data')
corpus_registry_dir = os.path.join(corpus_dir, 'registry') corpus_registry_dir = os.path.join(corpus_dir, 'registry')
container_args = {'command': 'cqpserver', container_args = {
'detach': True, 'command': 'cqpserver',
'volumes': [corpus_data_dir + ':/corpora/data:rw', 'detach': True,
corpus_registry_dir + ':/usr/local/share/cwb/registry:rw'], 'volumes': [corpus_data_dir + ':/corpora/data:rw',
'name': 'cqpserver_{}'.format(corpus.id), corpus_registry_dir + ':/usr/local/share/cwb/registry:rw'],
'network': 'nopaque_default'} 'name': 'cqpserver_{}'.format(corpus.id),
'network': 'nopaque_default'
}
container_image = \ container_image = \
'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest' 'gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/cqpserver:latest'
try: try:
...@@ -126,7 +107,7 @@ def __create_cqpserver_container(corpus): ...@@ -126,7 +107,7 @@ def __create_cqpserver_container(corpus):
corpus.status = 'analysing' corpus.status = 'analysing'
def __remove_cqpserver_container(corpus): def remove_cqpserver_container(corpus):
container_name = 'cqpserver_{}'.format(corpus.id) container_name = 'cqpserver_{}'.format(corpus.id)
try: try:
container = docker_client.containers.get(container_name) container = docker_client.containers.get(container_name)
......
from datetime import datetime from datetime import datetime
from .. import configuration as config from flask import current_app
from .. import docker_client, Session from . import docker_client
from ..decorators import background from .. import db
from ..models import Job, JobResult, NotificationData, NotificationEmailData from ..models import JobResult
import docker import docker
import logging import logging
import json import json
import os import os
@background def create_job_service(job):
def check_jobs(): job_dir = os.path.join(current_app.config['DATA_DIR'],
session = Session()
jobs = session.query(Job).all()
for job in filter(lambda job: job.status == 'submitted', jobs):
__create_job_service(job)
for job in filter(lambda job: job.status == 'queued', jobs):
__checkout_job_service(job, session)
__add_notification_data(job, 'queued', session)
for job in filter(lambda job: job.status == 'running', jobs):
__checkout_job_service(job, session)
__add_notification_data(job, 'running', session)
for job in filter(lambda job: job.status == 'complete', jobs):
__add_notification_data(job, 'complete', session)
for job in filter(lambda job: job.status == 'failed', jobs):
__add_notification_data(job, 'failed', session)
for job in filter(lambda job: job.status == 'canceling', jobs):
__remove_job_service(job)
session.commit()
Session.remove()
def __add_notification_data(job, notified_on_status, session):
# checks if user wants any notifications at all
if (job.user.setting_job_status_mail_notifications == 'none'):
return
# checks if user wants only notification on completed jobs
elif (job.user.setting_job_status_mail_notifications == 'end'
and notified_on_status != 'complete'):
return
else:
# check if a job already has associated NotificationData
notification_exists = len(job.notification_data)
# create notification_data for current job if there is none
if (notification_exists == 0):
notification_data = NotificationData(job_id=job.id)
session.add(notification_data)
# If no commit job will have no NotificationData
session.commit()
if (job.notification_data[0].notified_on != notified_on_status):
notification_email_data = NotificationEmailData(job_id=job.id)
notification_email_data.notify_status = notified_on_status
notification_email_data.creation_date = datetime.utcnow()
job.notification_data[0].notified_on = notified_on_status
session.add(notification_email_data)
def __create_job_service(job):
job_dir = os.path.join(config.DATA_DIR,
str(job.user_id), str(job.user_id),
'jobs', 'jobs',
str(job.id)) str(job.id))
...@@ -80,68 +33,69 @@ def __create_job_service(job): ...@@ -80,68 +33,69 @@ def __create_job_service(job):
'restart_policy': docker.types.RestartPolicy()} 'restart_policy': docker.types.RestartPolicy()}
service_image = ('gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/' service_image = ('gitlab.ub.uni-bielefeld.de:4567/sfb1288inf/'
+ job.service + ':' + job.service_version) + job.service + ':' + job.service_version)
try:
service = docker_client.services.get(service_args['name'])
except docker.errors.NotFound:
pass
except docker.errors.DockerException:
return
else:
service.remove()
try: try:
docker_client.services.create(service_image, **service_args) docker_client.services.create(service_image, **service_args)
except docker.errors.DockerException: except docker.errors.APIError as e:
logging.error('create_job_service({}): {} '.format(job.id, e)
+ '(status: {} -> failed)'.format(job.status))
job.status = 'failed' job.status = 'failed'
else: else:
job.status = 'queued' job.status = 'queued'
finally:
# TODO: send email
pass
def __checkout_job_service(job, session): def checkout_job_service(job):
service_name = 'job_{}'.format(job.id) service_name = 'job_{}'.format(job.id)
try: try:
service = docker_client.services.get(service_name) service = docker_client.services.get(service_name)
except docker.errors.NotFound: except docker.errors.NotFound as e:
logging.error('__checkout_job_service({}): '.format(job.id) logging.error('checkout_job_service({}): {} '.format(job.id, e)
+ 'The service does not exist. ' + '(status: {} -> submitted)'.format(job.status))
+ '(status: {} -> failed)'.format(job.status)) job.status = 'submitted'
job.status = 'failed' # TODO: handle docker.errors.APIError and docker.errors.InvalidVersion
return else:
except docker.errors.DockerException: service_tasks = service.tasks()
return if not service_tasks:
service_tasks = service.tasks() return
if not service_tasks: task_state = service_tasks[0].get('Status').get('State')
return if job.status == 'queued' and task_state != 'pending':
task_state = service_tasks[0].get('Status').get('State') job.status = 'running'
if job.status == 'queued' and task_state != 'pending': elif job.status == 'queued' and task_state == 'complete':
job.status = 'running' service.remove()
elif (job.status == 'running' job.end_date = datetime.utcnow()
and (task_state == 'complete' or task_state == 'failed')): job.status = task_state
service.remove() if task_state == 'complete':
job.end_date = datetime.utcnow() results_dir = os.path.join(current_app.config['DATA_DIR'],
job.status = task_state str(job.user_id),
if task_state == 'complete': 'jobs',
results_dir = os.path.join(config.DATA_DIR, str(job.id),
str(job.user_id), 'output')
'jobs', results = filter(lambda x: x.endswith('.zip'),
str(job.id), os.listdir(results_dir))
'output') for result in results:
results = filter(lambda x: x.endswith('.zip'), job_result = JobResult(dir=results_dir,
os.listdir(results_dir)) filename=result,
for result in results: job_id=job.id)
job_result = JobResult(dir=results_dir, db.session.add(job_result)
filename=result, elif job.status == 'running' and task_state == 'failed':
job_id=job.id) service.remove()
session.add(job_result) job.end_date = datetime.utcnow()
job.status = task_state
finally:
# TODO: send email
pass
def __remove_job_service(job): def remove_job_service(job):
service_name = 'job_{}'.format(job.id) service_name = 'job_{}'.format(job.id)
try: try:
service = docker_client.services.get(service_name) service = docker_client.services.get(service_name)
except docker.errors.NotFound: except docker.errors.NotFound:
# TODO: send email
job.status = 'canceled' job.status = 'canceled'
except docker.errors.DockerException: # TODO: handle docker.errors.APIError and docker.errors.InvalidVersion
return
else: else:
service.update(mounts=None) service.update(mounts=None)
service.remove() service.remove()
<p>Dear <b>{{ user.username }}</b>,</p>
<p>The status of your Job/Corpus({{ job.id }}) with the title <b>"{{ job.title }}"</b> has changed!</p>
<p>It is now <b>{{ job.status }}</b>!</p>
<p>Time of this status update was: <b>{time} UTC</b></p>
<p>You can access your Job/Corpus here: <a href="{{ url_for('jobs.job', job_id=job.id) }}">{{ url_for('jobs.job', job_id=job.id) }}</a></p>
<p>Kind regards!<br>Your nopaque team</p>
Dear {{ user.username }},
The status of your Job/Corpus({{ job.id }}) with the title "{{ job.title }}" has changed!
It is now {{ job.status }}!
Time of this status update was: {time} UTC
You can access your Job/Corpus here: {{ url_for('jobs.job', job_id=job.id) }}
Kind regards!
Your nopaque team
#!/bin/bash #!/bin/bash
source venv/bin/activate source venv/bin/activate
export FLASK_APP=nopaque.py while true; do
flask deploy
if [[ "$?" == "0" ]]; then
break
fi
echo Deploy command failed, retrying in 5 secs...
sleep 5
done
if [[ "$#" -eq 0 ]]; then if [[ "$#" -eq 0 ]]; then
while true; do
flask deploy
if [[ "$?" == "0" ]]; then
break
fi
echo Deploy command failed, retrying in 5 secs...
sleep 5
done
python nopaque.py python nopaque.py
elif [[ "$1" == "flask" ]]; then elif [[ "$1" == "flask" ]]; then
exec ${@:1} exec ${@:1}
......
...@@ -51,6 +51,13 @@ def deploy(): ...@@ -51,6 +51,13 @@ def deploy():
Role.insert_roles() Role.insert_roles()
@app.cli.command()
def tasks():
from app.tasks import process_corpora, process_jobs
process_corpora()
process_jobs()
@app.cli.command() @app.cli.command()
def test(): def test():
"""Run the unit tests.""" """Run the unit tests."""
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment