From b2fd03aebad93f69c19c7b47515ad3710180a820 Mon Sep 17 00:00:00 2001
From: "Hartung, Michael" <michael.hartung@uni-hamburg.de>
Date: Tue, 24 May 2022 11:38:09 +0200
Subject: [PATCH] renaming netex to drugstone and celery integration (with
 flower and celery-beat)

---
 .gitignore                               |   7 +-
 Dockerfile                               |   6 +-
 README.md                                |   2 +-
 docker-compose.yml                       |  44 ++++++--
 docker-django.env.dev                    |  16 +++
 docker-entrypoint.sh                     |   8 +-
 drugstone/__init__.py                    |   7 +-
 drugstone/backend_tasks.py               | 125 +++++++++++++++++++++
 drugstone/celery.py                      |   6 +-
 drugstone/celery_tasks.py                |  17 ---
 drugstone/settings/__init__.py           |   1 +
 drugstone/settings/celery_schedule.py    |   8 ++
 drugstone/{ => settings}/settings.py     |  41 +++----
 drugstone/tasks.py                       | 135 +++--------------------
 drugstone/views.py                       |   2 +-
 import-data.sh => scripts/import-data.sh |   2 +
 scripts/start_celery_beat.sh             |   1 +
 scripts/start_celery_worker.sh           |   1 +
 supervisord.conf                         |   6 +-
 19 files changed, 240 insertions(+), 195 deletions(-)
 create mode 100644 docker-django.env.dev
 create mode 100755 drugstone/backend_tasks.py
 delete mode 100644 drugstone/celery_tasks.py
 create mode 100644 drugstone/settings/__init__.py
 create mode 100644 drugstone/settings/celery_schedule.py
 rename drugstone/{ => settings}/settings.py (77%)
 mode change 100755 => 100644 drugstone/tasks.py
 rename import-data.sh => scripts/import-data.sh (93%)
 create mode 100644 scripts/start_celery_beat.sh
 create mode 100644 scripts/start_celery_worker.sh

diff --git a/.gitignore b/.gitignore
index d9d37c5..5cf8a77 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,5 @@
 .idea/
+.env/
 venv/
 *.pyc
 *.pydevproject
@@ -8,4 +9,8 @@ db.sqlite3
 supervisord.log
 supervisord.log
 supervisord.pid
-docker-entrypoint.lock
\ No newline at end of file
+docker-entrypoint.lock
+celerybeat-schedule.bak
+celerybeat-schedule.dat
+celerybeat-schedule.dir
+docker-django.env.prod
\ No newline at end of file
diff --git a/Dockerfile b/Dockerfile
index b5b8d7a..9f236cf 100755
--- a/Dockerfile
+++ b/Dockerfile
@@ -22,11 +22,11 @@ RUN pip install -r /usr/src/drugstone/requirements.txt
 RUN pip install gunicorn
 
 COPY ./supervisord.conf /etc/supervisor/conf.d/supervisord.conf
-COPY ./docker-entrypoint.sh /entrypoint.sh
-COPY ./import-data.sh /import.sh
+COPY ./docker-entrypoint.sh /usr/src/drugstone/docker-entrypoint.sh
+# COPY ./scripts/ /usr/src/drugstone/scripts/
 
 COPY . /usr/src/drugstone/
 
 #EXPOSE 8000
 
-ENTRYPOINT ["sh", "/entrypoint.sh"]
+# ENTRYPOINT ["sh", "/entrypoint.sh"]
diff --git a/README.md b/README.md
index 1101f52..3a8396b 100755
--- a/README.md
+++ b/README.md
@@ -25,4 +25,4 @@ python3 manage.py make_graphs
 
 
 ### Docker DEV environment (building is optional)
-``docker-compose -f docker-compose.yml -f docker-compose.dev.yml up -d --build``
+``docker-compose -f docker-compose.yml up -d --build``
diff --git a/docker-compose.yml b/docker-compose.yml
index b668cac..39ae760 100755
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,15 +1,18 @@
-version: '3.3'
+version: '3.8'
 
 services:
   app:
     image: drugstone_backend
     container_name: drugstone_backend
+    command: 
+      - "sh"
+      - "/usr/src/drugstone/docker-entrypoint.sh"
     build: .
     env_file:
-      - 'docker-django.env'
+      - 'docker-django.env.dev'
     restart: always
     volumes:
-      - drugstone_usr_volume:/usr/src/drugstone/data_drugstone/
+      - ./:/usr/src/drugstone/
     ports:
       - 8001:8000
     networks:
@@ -32,49 +35,66 @@ services:
       - POSTGRES_DB=drugstone
       - POSTGRES_USER=drugstone
       - POSTGRES_PASSWORD=t6278yczAH7rPKVMxaDD
-    command:
+    command: 
       - "postgres"
-      - "-c"
+      - "-c" 
       - "max_connections=10000"
       - "-c"
       - "shared_buffers=2GB"
   redis:
     image: redis
     container_name: drugstone_redis
-    hostname: drugstone_redis
     restart: always
     command: ["redis-server"]
     networks:
       - drugstone_net
+    ports:
+      - 6379:6379
   celery:
+    command: 
+      - "sh"
+      - "/usr/src/drugstone/scripts/start_celery_worker.sh"
     restart: always
     build: .
     container_name: drugstone_celery
     hostname: drugstone_celery
-    command: celery -A core worker -l info
+    env_file:
+      - './docker-django.env.dev'
     volumes:
-      - ./:/usr/src/app/
+      - ./:/usr/src/drugstone/
     depends_on:
       - redis
       - db
     networks:
       - drugstone_net
   celery-beat:
+    command: 
+      - "sh"
+      - "/usr/src/drugstone/scripts/start_celery_beat.sh"
     build: .
     container_name: drugstone_celery_beat
     hostname: drugstone_celery_beat
-    command: celery -A core beat -l info
+    env_file:
+      - './docker-django.env.dev'
     volumes:
-      - ./:/usr/src/app/
+      - ./:/usr/src/drugstone/
     depends_on:
       - redis
       - db
     networks:
       - drugstone_net
+  flower:  
+    image: mher/flower
+    container_name: drugstone_flower
+    env_file:
+      - './docker-django.env.dev'
+    ports:  
+      - 8888:8888
+    networks:
+      - drugstone_net
 
 networks:
   drugstone_net:
 
 volumes:
-  drugstone_postgres_volume:
-  drugstone_usr_volume:
\ No newline at end of file
+  drugstone_postgres_volume:
\ No newline at end of file
diff --git a/docker-django.env.dev b/docker-django.env.dev
new file mode 100644
index 0000000..3930f4b
--- /dev/null
+++ b/docker-django.env.dev
@@ -0,0 +1,16 @@
+DEBUG=1
+SECRET_KEY="0&y9v0@9%@c^woz8m+h2(^$#3gd^c@d82kmmq8tu*nesc_x9i+"
+SUPERUSER_NAME=admin
+SUPERUSER_PASS=Mb2R7CbqAPbpaKawKg7Z
+SQL_ENGINE=django.db.backends.postgresql
+SQL_DATABASE=drugstone
+SQL_USER=drugstone
+SQL_PASSWORD=t6278yczAH7rPKVMxaDD
+SQL_HOST=drugstone_postgres
+SQL_PORT=5432
+REDIS_HOST=redis
+REDIS_PORT=6379
+DJANGO_SETTINGS_MODULE=drugstone.settings
+CELERY_BROKER_URL=redis://redis:6379/0
+FLOWER_PORT=8888
+FLOWER_BASIC_AUTH=drugstone:test
\ No newline at end of file
diff --git a/docker-entrypoint.sh b/docker-entrypoint.sh
index 28a92a0..bf97629 100755
--- a/docker-entrypoint.sh
+++ b/docker-entrypoint.sh
@@ -1,13 +1,11 @@
 #!/bin/bash
 
-python3 manage.py migrate --run-syncdb
-python3 manage.py createfixtures
-python3 manage.py cleanuptasks
-
 file="docker-entrypoint.lock"
 # exit if entrypoint.lock exists to prevent new import of data every time docker is restarted
 if ! test -f "$file"; then
-    sh import-data.sh
+    python3 manage.py createfixtures
+    python3 manage.py cleanuptasks
+    sh scripts/import-data.sh
     touch $file
 fi
 
diff --git a/drugstone/__init__.py b/drugstone/__init__.py
index 49cb76a..b4ad9c7 100755
--- a/drugstone/__init__.py
+++ b/drugstone/__init__.py
@@ -1 +1,6 @@
-from .wsgi import application  # noqa: F401
+from drugstone.wsgi import application  # noqa: F401
+# This will make sure the app is always imported when
+# Django starts so that shared_task will use this app.
+from drugstone.celery import app as celery_app
+
+__all__ = ['celery_app']
\ No newline at end of file
diff --git a/drugstone/backend_tasks.py b/drugstone/backend_tasks.py
new file mode 100755
index 0000000..e8a6134
--- /dev/null
+++ b/drugstone/backend_tasks.py
@@ -0,0 +1,125 @@
+import json
+from datetime import datetime
+
+import redis
+import rq
+import os
+
+from tasks.task_hook import TaskHook
+
+qr_r = redis.Redis(host=os.getenv('REDIS_HOST', 'redis'),
+                   port=os.getenv('REDIS_PORT', 6379),
+                   db=0,
+                   decode_responses=False)
+rq_tasks = rq.Queue('drugstone_tasks', connection=qr_r)
+
+
+r = redis.Redis(host=os.getenv('REDIS_HOST', 'redis'),
+                port=os.getenv('REDIS_PORT', 6379),
+                db=0,
+                decode_responses=True)
+
+
+def run_task(token, algorithm, parameters):
+    def set_progress(progress, status):
+        r.set(f'{token}_progress', f'{progress}')
+        r.set(f'{token}_status', f'{status}')
+
+    def set_result(results):
+        r.set(f'{token}_result', json.dumps(results, allow_nan=True))
+        r.set(f'{token}_finished_at', str(datetime.now().timestamp()))
+        r.set(f'{token}_done', '1')
+
+        set_progress(1.0, 'Done.')
+
+    set_progress(0.0, 'Computation started')
+
+    worker_id = os.getenv('RQ_WORKER_ID')
+    r.set(f'{token}_worker_id', f'{worker_id}')
+    job_id = os.getenv('RQ_JOB_ID')
+    r.set(f'{token}_job_id', f'{job_id}')
+    r.set(f'{token}_started_at', str(datetime.now().timestamp()))
+
+    task_hook = TaskHook(json.loads(parameters), './data_drugstone/Networks/', set_progress, set_result)
+
+    try:
+        if algorithm == 'dummy':
+            raise RuntimeError('Dummy algorithm for testing purposes.')
+        elif algorithm == 'multisteiner':
+            from tasks.multi_steiner import multi_steiner
+            multi_steiner(task_hook)
+        elif algorithm == 'keypathwayminer':
+            from tasks.keypathwayminer_task import kpm_task
+            kpm_task(task_hook)
+        elif algorithm == 'trustrank':
+            from tasks.trust_rank import trust_rank
+            trust_rank(task_hook)
+        elif algorithm == 'closeness':
+            from tasks.closeness_centrality import closeness_centrality
+            closeness_centrality(task_hook)
+        elif algorithm == 'degree':
+            from tasks.degree_centrality import degree_centrality
+            degree_centrality(task_hook)
+        elif algorithm == 'proximity':
+            from tasks.network_proximity import network_proximity
+            network_proximity(task_hook)
+        elif algorithm == 'betweenness':
+            from tasks.betweenness_centrality import betweenness_centrality
+            betweenness_centrality(task_hook)
+        elif algorithm in ['quick', 'super']:
+            from tasks.quick_task import quick_task
+            quick_task(task_hook)
+    except Exception as e:
+        r.set(f'{token}_status', f'{e}')
+        r.set(f'{token}_failed', '1')
+
+
+def refresh_from_redis(task):
+    task.worker_id = r.get(f'{task.token}_worker_id')
+    if not task.worker_id:
+        return
+
+    task.job_id = r.get(f'{task.token}_job_id')
+    task.progress = float(r.get(f'{task.token}_progress'))
+    task.done = True if r.get(f'{task.token}_done') else False
+    task.failed = True if r.get(f'{task.token}_failed') else False
+    status = r.get(f'{task.token}_status')
+    if not status or len(status) < 255:
+        task.status = status
+    else:
+        task.status = status[:255]
+    started_at = r.get(f'{task.token}_started_at')
+    if started_at:
+        task.started_at = datetime.fromtimestamp(float(started_at))
+    finished_at = r.get(f'{task.token}_finished_at')
+    if finished_at:
+        task.finished_at = datetime.fromtimestamp(float(finished_at))
+    task.result = r.get(f'{task.token}_result')
+
+
+def start_task(task):
+    job = rq_tasks.enqueue(run_task, task.token, task.algorithm, task.parameters, job_timeout=30*60)
+    task.job_id = job.id
+
+
+def task_stats(task):
+    pos = 1
+    for j in rq_tasks.jobs:
+        if j.id == task.job_id:
+            break
+        pos += 1
+
+    return {
+        'queueLength': rq_tasks.count,
+        'queuePosition': pos,
+    }
+
+
+def task_result(task):
+    if not task.done:
+        return None
+    return json.loads(task.result, parse_constant=lambda c: None)
+
+
+def task_parameters(task):
+    return json.loads(task.parameters)
diff --git a/drugstone/celery.py b/drugstone/celery.py
index c45b7b3..1dd349b 100644
--- a/drugstone/celery.py
+++ b/drugstone/celery.py
@@ -3,8 +3,8 @@ import os
 from celery import Celery
 
 
-os.environ.setdefault("DJANGO_SETTINGS_MODULE", "core.settings")
+os.environ.setdefault("DJANGO_SETTINGS_MODULE", "drugstone.settings")
 
-app = Celery("core")
+app = Celery("drugstone")
 app.config_from_object("django.conf:settings", namespace="CELERY")
-app.autodiscover_tasks()
\ No newline at end of file
+app.autodiscover_tasks()
diff --git a/drugstone/celery_tasks.py b/drugstone/celery_tasks.py
deleted file mode 100644
index a89c988..0000000
--- a/drugstone/celery_tasks.py
+++ /dev/null
@@ -1,17 +0,0 @@
-from celery import shared_task
-from celery.utils.log import get_task_logger
-from drugstone.util.nedrex import fetch_nedrex_data, integrate_nedrex_data
-
-logger = get_task_logger(__name__)
-
-def task_update_db_from_nedrex():
-    logger.info("Updating DB from NeDRex.")
-    
-    logger.info("Fetching data...")
-    fetch_nedrex_data()
-
-    logger.info("Integrating data...")
-    integrate_nedrex_data()
-
-    logger.info("Done.")
-
diff --git a/drugstone/settings/__init__.py b/drugstone/settings/__init__.py
new file mode 100644
index 0000000..c70020d
--- /dev/null
+++ b/drugstone/settings/__init__.py
@@ -0,0 +1 @@
+from .settings import *
\ No newline at end of file
diff --git a/drugstone/settings/celery_schedule.py b/drugstone/settings/celery_schedule.py
new file mode 100644
index 0000000..88e8f9a
--- /dev/null
+++ b/drugstone/settings/celery_schedule.py
@@ -0,0 +1,8 @@
+from celery.schedules import crontab
+
+CELERY_BEAT_SCHEDULE = {
+    "update_db": {
+        "task": "drugstone.tasks.task_update_db_from_nedrex",
+        "schedule": crontab(minute="*/1000"),
+    },
+}
\ No newline at end of file
diff --git a/drugstone/settings.py b/drugstone/settings/settings.py
similarity index 77%
rename from drugstone/settings.py
rename to drugstone/settings/settings.py
index 73cc432..b943133 100755
--- a/drugstone/settings.py
+++ b/drugstone/settings/settings.py
@@ -11,9 +11,7 @@ https://docs.djangoproject.com/en/3.0/ref/settings/
 """
 
 import os
-
-# celery beat
-from celery.schedules import crontab
+from .celery_schedule import *
 
 # Build paths inside the project like this: os.path.join(BASE_DIR, ...)
 BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@@ -23,20 +21,15 @@ BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
 # See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
 
 # SECURITY WARNING: keep the secret key used in production secret!
-SECRET_KEY = os.environ.get('SECRET_KEY', '0&y9v0@9%@c^woz8m+h2(^$#3gd^c@d82kmmq8tu*nesc_x9i+')
+SECRET_KEY = os.environ.get('SECRET_KEY')
 
 # SECURITY WARNING: don't run with debug turned on in production!
-DEBUG = os.environ.get('DEBUG', True)
+DEBUG = os.environ.get('DEBUG', False)
 
 ALLOWED_HOSTS = [
-    'www.exbio.wzw.tum.de',
-    'exbio.wzw.tum.de',
-    '10.162.163.32',  # oskar
     'localhost',
     '127.0.0.1',
-    '10.162.163.20',  # alfred
-    'ml-s-zbhdock1.ad.uni-hamburg.de',
-    'cosy-test.zbh.uni-hamburg.de',
+    'drugstone-backend.zbh.uni-hamburg.de',
     'drugst.one'
 ]
 
@@ -94,11 +87,11 @@ WSGI_APPLICATION = 'drugstone.wsgi.application'
 DATABASES = {
     'default': {
         'ENGINE': os.environ.get('SQL_ENGINE', 'django.db.backends.postgresql'),
-        'NAME': os.environ.get('SQL_DATABASE', 'drugstone'),   # os.path.join(BASE_DIR, 'db.sqlite3')
-        'USER': os.environ.get('SQL_USER', 'drugstone'),
-        'PASSWORD': os.environ.get('SQL_PASSWORD', 't6278yczAH7rPKVMxaDD'),
-        'HOST': os.environ.get('SQL_HOST', 'drugstone_postgres'),
-        'PORT': os.environ.get('SQL_PORT', '5432'),
+        'NAME': os.environ.get('SQL_DATABASE'),
+        'USER': os.environ.get('SQL_USER'),
+        'PASSWORD': os.environ.get('SQL_PASSWORD'),
+        'HOST': os.environ.get('SQL_HOST'),
+        'PORT': os.environ.get('SQL_PORT'),
     }
 }
 
@@ -165,20 +158,14 @@ SESSION_ENGINE = 'django.contrib.sessions.backends.cache'
 CACHES = {
     'default': {
         'BACKEND': 'django_redis.cache.RedisCache',
-        'LOCATION': f'redis://{os.environ.get("REDIS_HOST", "drugstone_redis")}: \
-            {os.environ.get("REDIS_PORT", "6379")}/1',
+        'LOCATION': f'redis://{os.environ.get("REDIS_HOST")}: \
+            {os.environ.get("REDIS_PORT")}/1',
         'OPTIONS': {
             'CLIENT_CLASS': 'django_redis.client.DefaultClient',
         }
     }
 }
 
-CELERY_BROKER_URL = "redis://redis:6379"
-CELERY_RESULT_BACKEND = "redis://redis:6379"
-
-CELERY_BEAT_SCHEDULE = {
-    "update_db": {
-        "task": "control.celery_tasks.task_update_db_from_nedrex",
-        "schedule": crontab(minute="*/3"),
-    }
-}
+CELERY_BROKER_URL = os.environ.get("CELERY_BROKER_URL")
+# timezones: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones
+CELERY_TIMEZONE = 'Europe/Berlin'
diff --git a/drugstone/tasks.py b/drugstone/tasks.py
old mode 100755
new mode 100644
index 5211e34..4547ea4
--- a/drugstone/tasks.py
+++ b/drugstone/tasks.py
@@ -1,125 +1,18 @@
-import json
-from datetime import datetime
+from celery import shared_task
+from celery.utils.log import get_task_logger
+from drugstone.util.nedrex import fetch_nedrex_data, integrate_nedrex_data
 
-import redis
-import rq
-import os
+logger = get_task_logger(__name__)
 
-from tasks.task_hook import TaskHook
+@shared_task
+def task_update_db_from_nedrex():
+    logger.info("Updating DB from NeDRex.")
+    print('here')
+    
+    logger.info("Fetching data...")
+    fetch_nedrex_data()
 
-qr_r = redis.Redis(host=os.getenv('REDIS_HOST', 'drugstone_redis'),
-                   port=os.getenv('REDIS_PORT', 6379),
-                   db=0,
-                   decode_responses=False)
-rq_tasks = rq.Queue('drugstone_tasks', connection=qr_r)
+    logger.info("Integrating data...")
+    integrate_nedrex_data()
+    logger.info("Done.")
 
-
-r = redis.Redis(host=os.getenv('REDIS_HOST', 'drugstone_redis'),
-                port=os.getenv('REDIS_PORT', 6379),
-                db=0,
-                decode_responses=True)
-
-
-def run_task(token, algorithm, parameters):
-    def set_progress(progress, status):
-        r.set(f'{token}_progress', f'{progress}')
-        r.set(f'{token}_status', f'{status}')
-
-    def set_result(results):
-        r.set(f'{token}_result', json.dumps(results, allow_nan=True))
-        r.set(f'{token}_finished_at', str(datetime.now().timestamp()))
-        r.set(f'{token}_done', '1')
-
-        set_progress(1.0, 'Done.')
-
-    set_progress(0.0, 'Computation started')
-
-    worker_id = os.getenv('RQ_WORKER_ID')
-    r.set(f'{token}_worker_id', f'{worker_id}')
-    job_id = os.getenv('RQ_JOB_ID')
-    r.set(f'{token}_job_id', f'{job_id}')
-    r.set(f'{token}_started_at', str(datetime.now().timestamp()))
-
-    task_hook = TaskHook(json.loads(parameters), './data_drugstone/Networks/', set_progress, set_result)
-
-    try:
-        if algorithm == 'dummy':
-            raise RuntimeError('Dummy algorithm for testing purposes.')
-        elif algorithm == 'multisteiner':
-            from tasks.multi_steiner import multi_steiner
-            multi_steiner(task_hook)
-        elif algorithm == 'keypathwayminer':
-            from tasks.keypathwayminer_task import kpm_task
-            kpm_task(task_hook)
-        elif algorithm == 'trustrank':
-            from tasks.trust_rank import trust_rank
-            trust_rank(task_hook)
-        elif algorithm == 'closeness':
-            from tasks.closeness_centrality import closeness_centrality
-            closeness_centrality(task_hook)
-        elif algorithm == 'degree':
-            from tasks.degree_centrality import degree_centrality
-            degree_centrality(task_hook)
-        elif algorithm == 'proximity':
-            from tasks.network_proximity import network_proximity
-            network_proximity(task_hook)
-        elif algorithm == 'betweenness':
-            from tasks.betweenness_centrality import betweenness_centrality
-            betweenness_centrality(task_hook)
-        elif algorithm in ['quick', 'super']:
-            from tasks.quick_task import quick_task
-            quick_task(task_hook)
-    except Exception as e:
-        r.set(f'{token}_status', f'{e}')
-        r.set(f'{token}_failed', '1')
-
-
-def refresh_from_redis(task):
-    task.worker_id = r.get(f'{task.token}_worker_id')
-    if not task.worker_id:
-        return
-
-    task.job_id = r.get(f'{task.token}_job_id')
-    task.progress = float(r.get(f'{task.token}_progress'))
-    task.done = True if r.get(f'{task.token}_done') else False
-    task.failed = True if r.get(f'{task.token}_failed') else False
-    status = r.get(f'{task.token}_status')
-    if not status or len(status) < 255:
-        task.status = status
-    else:
-        task.status = status[:255]
-    started_at = r.get(f'{task.token}_started_at')
-    if started_at:
-        task.started_at = datetime.fromtimestamp(float(started_at))
-    finished_at = r.get(f'{task.token}_finished_at')
-    if finished_at:
-        task.finished_at = datetime.fromtimestamp(float(finished_at))
-    task.result = r.get(f'{task.token}_result')
-
-
-def start_task(task):
-    job = rq_tasks.enqueue(run_task, task.token, task.algorithm, task.parameters, job_timeout=30*60)
-    task.job_id = job.id
-
-
-def task_stats(task):
-    pos = 1
-    for j in rq_tasks.jobs:
-        if j.id == task.job_id:
-            break
-        pos += 1
-
-    return {
-        'queueLength': rq_tasks.count,
-        'queuePosition': pos,
-    }
-
-
-def task_result(task):
-    if not task.done:
-        return None
-    return json.loads(task.result, parse_constant=lambda c: None)
-
-
-def task_parameters(task):
-    return json.loads(task.parameters)
diff --git a/drugstone/views.py b/drugstone/views.py
index abc81f5..6381476 100755
--- a/drugstone/views.py
+++ b/drugstone/views.py
@@ -24,7 +24,7 @@ from drugstone.models import Protein, Task, ProteinDrugInteraction, \
 from drugstone.serializers import ProteinSerializer, TaskSerializer, \
     ProteinDrugInteractionSerializer, DrugSerializer, TaskStatusSerializer, TissueSerializer, NetworkSerializer, \
     ProteinDisorderAssociationSerializer, DisorderSerializer, DrugDisorderIndicationSerializer
-from drugstone.tasks import start_task, refresh_from_redis, task_stats, task_result, task_parameters
+from drugstone.backend_tasks import start_task, refresh_from_redis, task_stats, task_result, task_parameters
 
 
 # we might want to replace this class with some ProteinProteinInteraction view of user input proteins
diff --git a/import-data.sh b/scripts/import-data.sh
similarity index 93%
rename from import-data.sh
rename to scripts/import-data.sh
index 328b3aa..8da89e5 100755
--- a/import-data.sh
+++ b/scripts/import-data.sh
@@ -1,4 +1,6 @@
 #!/bin/bash
+python3 manage.py migrate --run-syncdb
+
 python3 manage.py populate_db --delete_model PPI,PDI,Drug,Protein,Tissue,Disorder,PDiAssociations
 
 python3 manage.py populate_db --data_dir . -p protein-file.txt
diff --git a/scripts/start_celery_beat.sh b/scripts/start_celery_beat.sh
new file mode 100644
index 0000000..2ac9f6d
--- /dev/null
+++ b/scripts/start_celery_beat.sh
@@ -0,0 +1 @@
+celery -A drugstone beat -l INFO
\ No newline at end of file
diff --git a/scripts/start_celery_worker.sh b/scripts/start_celery_worker.sh
new file mode 100644
index 0000000..bd9fa81
--- /dev/null
+++ b/scripts/start_celery_worker.sh
@@ -0,0 +1 @@
+celery -A drugstone worker -l INFO
\ No newline at end of file
diff --git a/supervisord.conf b/supervisord.conf
index b6d10b4..00d6fbf 100755
--- a/supervisord.conf
+++ b/supervisord.conf
@@ -1,8 +1,8 @@
 [supervisord]
 nodaemon=true
-user=root
+# user=root
 
-[program:drugstone_app]
+[program:drugstone_django]
 command=gunicorn --bind 0.0.0.0:8000 --timeout 1200 --workers 8 --log-level debug drugstone:application
 directory=/usr/src/drugstone/
 user=nobody
@@ -13,7 +13,7 @@ stdout_logfile_maxbytes=0
 
 [program:drugstone_worker]
 process_name=drugstone_worker_%(process_num)02d
-command=rq worker --url redis://redis:6379/0 drugsotne_tasks
+command=rq worker --url redis://redis:6379/0 drugstone_tasks
 directory=/usr/src/drugstone/
 numprocs=20
 autostart=true
-- 
GitLab