diff --git a/backend/.coveragerc b/.coveragerc
similarity index 100%
rename from backend/.coveragerc
rename to .coveragerc
diff --git a/.dockerignore b/.dockerignore
index fb00b3c09226ce7db6d952f2933f56ced7c53450..5ea3785654ca768cbd4c7842d4ca01888e9c187f 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -3,13 +3,13 @@
 Dockerfile
 
 # Django
-
 */db.sqlite3
 */__pycache__*
 *.pyc
 *.pyo
 *.pyd
 */env*
+*/.venv*
 pip-log.txt
 pip-delete-this-directory.txt
 .tox
@@ -19,6 +19,8 @@ pip-delete-this-directory.txt
 coverage.xml
 *,cover
 *.log
+static/
+public/
 
 
 # node
diff --git a/backend/.editorconfig b/.editorconfig
similarity index 100%
rename from backend/.editorconfig
rename to .editorconfig
diff --git a/.gitignore b/.gitignore
index 04005159a65f154e523f2f6be8ee5e9e6c839f25..5209d36326a554e319a3ad32903a82e352f67226 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,6 +1,37 @@
-# operation system
-.DS_Store
+# python specific
+*.DS_Store
+*.egg-info
+*.pot
+*.py[co]
+.tox/
+__pycache__
+MANIFEST
+.coverage
+cache/
+.mypy_cache/
 
-# ide specific
+# Django specific
+dist/
+docs/_build/
+docs/locale/
+tests/coverage_html/
+tests/.coverage
+build/
+tests/report/
+*.sqlite3
+static/
+
+# project specific
+env-grady/
+env/
+.venv/
+scripts/
+coverage_html/
+public/
+*.csv
+.importer*
 *.sublime-*
 .idea/
+
+# node
+node_modules
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 2d62fe511bd1a7a860181ea7531b90a426d44536..eca8a569428ffef5f98df9509758254551feff34 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -22,7 +22,7 @@ build_backend:
         stage: test
         image: $IMAGE_TAG
         before_script:
-                - cd backend/
+                - pip install -r requirements.dev.txt
 
 test_pytest:
         <<: *test_definition_backend
@@ -32,7 +32,7 @@ test_pytest:
                 - DJANGO_SETTINGS_MODULE=grady.settings pytest --cov
         artifacts:
                 paths:
-                        - backend/.coverage
+                        - .coverage
 
 test_prospector:
         <<: *test_definition_backend
@@ -54,16 +54,18 @@ test_frontend:
 
 # =========================== Gitlab pages section =========================== #
 test_coverage:
-        <<: *test_definition_backend
+        image: $IMAGE_TAG
         stage:
                 pages
         script:
-                - coverage html -d ../public
+                - coverage html -d public
         dependencies:
                 - test_pytest
         artifacts:
                 paths:
                         - public
+        only:
+                - master
 
 # ============================== Staging section ============================= #
 .staging_template: &staging_definition
@@ -71,6 +73,8 @@ test_coverage:
         image: docker:latest
         only:
                 - master
+        before_script:
+                - apk add --update py-pip && pip install docker-compose
 
 staging:
         <<: *staging_definition
@@ -79,15 +83,12 @@ staging:
                 url: https://staging.grady.janmax.org
                 on_stop: staging_stop
         script:
-                - apk add --update py-pip && pip install docker-compose
                 - docker login -u gitlab-ci-token -p $CI_JOB_TOKEN $CI_REGISTRY
-                - docker-compose pull
-                - docker-compose up -d --build
+                - docker-compose up -d --force-recreate
 
 staging_stop:
         <<: *staging_definition
         script:
-                - apk add --update py-pip && pip install docker-compose
                 - docker-compose rm --force --stop
         when: manual
         environment:
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index b2f4c009357ff738a45f0edd40bdd00f00fb1899..e63378ac57329d9a0728dce6758f4d2e7585e6d0 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,8 +1,21 @@
-repos:
--   repo: local
-    hooks:
-    -   id: prospector
-        name: prospector
-        entry: ./pre-commit-scripts/prospector.sh
-        language: script
-        types: [python]
+- repo: git@github.com:pre-commit/pre-commit-hooks
+  sha: v1.1.1
+  hooks:
+  - id: trailing-whitespace
+  - id: end-of-file-fixer
+  - id: debug-statements
+  - id: flake8
+    args:
+    - --exclude=*/migrations/*,docs/*
+  - id: check-added-large-files
+  - id: requirements-txt-fixer
+    args:
+    - requirements.txt
+    - requirements.dev.txt
+- repo: local
+  hooks:
+  - id: prospector
+    name: prospector
+    entry: ./pre-commit-scripts/prospector.sh
+    language: script
+    types: [python]
diff --git a/.pylintrc b/.pylintrc
new file mode 100644
index 0000000000000000000000000000000000000000..2bfbcef12a25402143b5aa779e21246a103e01b2
--- /dev/null
+++ b/.pylintrc
@@ -0,0 +1,20 @@
+[MASTER]
+
+# Add files or directories to the blacklist. They should be base names, not
+# paths.
+ignore=CVS, migrations, static, env, docs, manage.py
+
+# Use multiple processes to speed up Pylint.
+jobs=4
+
+# Pickle collected data for later comparisons.
+persistent=yes
+
+# When enabled, pylint would attempt to guess common misconfiguration and emit
+# user-friendly hints instead of false-positive error messages
+suggestion-mode=yes
+
+# List of plugins (as comma separated values of python modules names) to load,
+# usually to register additional checkers.
+load-plugins=pylint_django
+
diff --git a/Dockerfile b/Dockerfile
index acec75a13f543e9177e51379f3f534c70ba76d44..bbb5fbb57527031bf4fdb3826b46f8638e41813f 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,9 +1,4 @@
-# Build Python files
-FROM python:3.6 as python
-COPY backend/requirements.txt .
-RUN pip install -r requirements.txt
-RUN curl https://gitlab.gwdg.de/snippets/51/raw --output words
-
+# Build Vue files
 FROM node:carbon as node
 
 WORKDIR /app/
@@ -19,17 +14,18 @@ ENV PYTHONUNBUFFERED 1
 
 # This set is need otherwise the postgres driver wont work
 RUN apk update \
-  && apk add --virtual build-deps gcc python3-dev musl-dev \
-  && apk add --no-cache postgresql-dev \
-  && pip install psycopg2 \
-  && apk del build-deps
+  && apk add --virtual build-deps gcc python3-dev musl-dev curl \
+  && apk add --no-cache postgresql-dev
 
 RUN mkdir -p /usr/share/dict
-WORKDIR /code/backend
+RUN curl -s https://gitlab.gwdg.de/snippets/51/raw --output /usr/share/dict/words
+
+WORKDIR /code
 
 COPY . /code
-COPY --from=python /root/.cache /root/.cache
-COPY --from=python /words /usr/share/dict/words
-COPY --from=node /app/dist /code/backend/static
+COPY --from=node /app/dist /code/frontend/dist
+COPY --from=node /app/dist/index.html /code/core/templates/index.html
 
 RUN pip install -r requirements.txt && rm -rf /root/.cache
+RUN python manage.py collectstatic --noinput
+RUN apk del build-deps
diff --git a/Makefile b/Makefile
index 4b670d2d353e708df4dbc2cea23f966761ce9111..b9dc21017bb8b37e07b0a1ee88195f1f6580b0ce 100644
--- a/Makefile
+++ b/Makefile
@@ -1,17 +1,18 @@
 APP_LIST ?= core grady util
 DB_NAME = postgres
+VENV_PATH = .venv
 
-.PHONY: collectstatic run install migrations-check isort isort-check
+.PHONY: run install migrations-check isort isort-check test
 
-collectstatic: # used only in production
-	./backend/manage.py collectstatic --ignore node_modules
-	./manage.py compress --force
+.venv:
+	@python3.6 -m venv $(VENV_PATH) || exit 1
+	@echo To enter: source .venv/bin/activate
 
 run:
-	python backend/manage.py runserver 0.0.0.0:8000
+	python manage.py runserver 0.0.0.0:8000
 
 migrations-check:
-	python backend/manage.py makemigrations --check --dry-run
+	python manage.py makemigrations --check --dry-run
 
 isort:
 	isort -rc $(APP_LIST)
@@ -19,30 +20,19 @@ isort:
 isort-check:
 	isort -c -rc $(APP_LIST)
 
-loaddata:
-	./backend/manage.py loaddata core/fixtures/testdata-groups.json
-
-loadexamples:
-	./backend/manage.py loaddata core/fixtures/testdata-user.json
-	./backend/manage.py loaddata core/fixtures/testdata-core.json
+migrate:
+	python manage.py migrate
 
 install:
-	pip install -r backend/requirements.txt
-	yarn --cwd frontend/ 
-	yarn --cwd backend/
+	pip install -r requirements.txt
+	pip install -r requirements.dev.txt
 
 test:
-	python backend/manage.py run test
+	DJANGO_SETTINGS_MODULE=grady.settings pytest
 
 coverage:
-	coverage run backend/manage.py test
-	coverage report
-
-docker-db-create:
-	docker create --name $(DB_NAME) -p 5432:5432 postgres:9.5
-
-docker-db-start:
-	docker start $(DB_NAME)
+	DJANGO_SETTINGS_MODULE=grady.settings pytest --cov
+	coverage html
 
-docker-db-stop:
-	docker stop $(DB_NAME)
+db:
+	docker run --rm --name $(DB_NAME) -p 5432:5432 postgres:9.5
diff --git a/README.md b/README.md
index dc345b7c440baaccfc7a6fedd7aef604f25a4dde..908435fe2987f9a327936a8158bf13462ee9c42d 100644
--- a/README.md
+++ b/README.md
@@ -41,8 +41,8 @@ To set up a new instance perform the following steps:
 1.  Create a virtual environment with a Python3.6 interpreter and
     activate it. It works like this:
 
-        virtualenv -p python3.6 env
-        source env/bin/activate
+        make .venv
+        source .venv/bin/activate
 
     Just type `deactivate` the get out.
 
@@ -50,33 +50,21 @@ To set up a new instance perform the following steps:
 
         export DJANGO_DEV=True
 
-3.  Install dependencies:
-
-        pip install -r backend/requirements.txt
-        yarn --cwd frontend/ 
-        yarn --cwd backend/
-
-    or alternatively with the make task:
+3.  Install backend dependencies with:
 
         make install
 
-    Some systems (like Ubuntu Xenial) come with a preinstalled "yarn -
-    scenario testing of Unix command line tools". Using this will
-    **not** work. The [yarn package
-    manager](<https://yarnpkg.com/en/docs/install>) is needed.
-
 4.  Set up a Postgres 9.5 database. If you have docker installed the
     easiest way is to just run it in a docker container, like this:
 
         docker run -d --rm --name postgres -p 5432:5432 postgres:9.5
 
     Alternatively, take a look at the Makefile targets that should make your
-    life easier.
+    life easier, e.g `make db`.
 
-    And apply database migrations:
+    And apply database migrations once the database is up:
 
         python manage.py migrate
-        python manage.py loaddata core/fixtures/testdata-groups.json
 
 5.  Create a superuser if necessary:
 
@@ -85,18 +73,12 @@ To set up a new instance perform the following steps:
     More users can be added in the admin interface. You should be able
     to reach it via <http://localhost:8000/admin>.
 
-6.  To import some test data in order to see how the application might look like
-    run:
-        make loadexamples
-
 7.  Everything is set. You can start the development server with:
 
-        python manage.py runserver
-
-    or just:
-
         make run
 
+8.  Congratulations! Your backend should now be up an running. To setup the frontend
+    see the README in the `frontend` folder.
 
 Testing
 -------
diff --git a/backend/.gitignore b/backend/.gitignore
deleted file mode 100644
index 86ca4a4842208f80247e2f5f30648fa73444dabe..0000000000000000000000000000000000000000
--- a/backend/.gitignore
+++ /dev/null
@@ -1,33 +0,0 @@
-# python specific
-*.egg-info
-*.pot
-*.py[co]
-.tox/
-__pycache__
-MANIFEST
-.coverage
-cache/
-.mypy_cache/
-
-# Django specific
-dist/
-docs/_build/
-docs/locale/
-tests/coverage_html/
-tests/.coverage
-build/
-tests/report/
-*.sqlite3
-static/
-
-# project specific
-env-grady/
-env/
-scripts/
-coverage_html/
-public/
-*.csv
-.importer*
-
-# node
-node_modules
diff --git a/backend/Makefile b/backend/Makefile
deleted file mode 100644
index 64d359b2954d6172665aec7772875bbe9e4e9b98..0000000000000000000000000000000000000000
--- a/backend/Makefile
+++ /dev/null
@@ -1,42 +0,0 @@
-APP_LIST ?= core grady util
-DB_NAME = postgres
-
-.PHONY: collectstatic run install migrations-check isort isort-check
-
-collectstatic: # used only in production
-	python manage.py collectstatic --ignore node_modules
-	python manage.py compress --force
-
-run:
-	python manage.py runserver 0.0.0.0:8000
-
-migrations-check:
-	python manage.py makemigrations --check --dry-run
-
-isort:
-	isort -rc $(APP_LIST)
-
-isort-check:
-	isort -c -rc $(APP_LIST)
-
-loaddata:
-	python manage.py loaddata core/fixtures/testdata-groups.json
-
-loadexamples:
-	python manage.py loaddata core/fixtures/testdata-user.json
-	python manage.py loaddata core/fixtures/testdata-core.json
-
-install:
-	pip install -r requirements.txt
-	yarn
-
-test:
-	python manage.py test
-
-coverage:
-	coverage run manage.py test
-	coverage report
-
-db:
-	docker run --rm --name $(DB_NAME) -p 5432:5432 postgres:9.5
-
diff --git a/backend/scripts/README.rst b/backend/scripts/README.rst
deleted file mode 100644
index 46af791e4de48682ed54ba493832131d7dae2f81..0000000000000000000000000000000000000000
--- a/backend/scripts/README.rst
+++ /dev/null
@@ -1,11 +0,0 @@
-What is this directory about?
-=============================
-
-Well, it just serves as a collection of files that currently live in folders
-not part of the git repository, since they contain volatile or test data. I
-include them here for the sake of completeness, but they will be removed in
-later versions, since their work has to be encapsulated in the overall process.
-When documentations becomes more accurate a detailed explanation on how to use
-them will be added.
-
-.. note:: Please keep in mind: These file are of poor quality and are likely to fail if not used in a correct manner.
diff --git a/backend/scripts/compile.py b/backend/scripts/compile.py
deleted file mode 100755
index 091669a73d967085f2668d1f80a007a0221302df..0000000000000000000000000000000000000000
--- a/backend/scripts/compile.py
+++ /dev/null
@@ -1,104 +0,0 @@
-#!/usr/local/bin/python3
-"""This script adds compiler output to the json output of the convert script
-
-[description]
-"""
-import subprocess
-import json
-import os
-
-TEMP_DIR        = 'temp_code'
-OUTFILE         = 'submissions_compiled.json'
-INFILE          = 'submissions.json'
-OBJECT_DIR      = 'klausur_tag_01/objects'
-
-ALL_OK          = 0
-COMPILE_ERROR   = 1
-LINKER_ERROR    = 2
-WARNINGS        = 3
-
-
-def get_compiler_output(task_id, text):
-    dst = open(os.path.join(TEMP_DIR, task_id + '.o'), 'w')
-    try:
-        compile_cmd = subprocess.run(
-            [
-                "gcc-6", "-Wall", "-std=c11", "-c", "-xc",
-                "-o", dst.name,
-                f"-I{TEMP_DIR}",
-                "-"
-            ],
-            stderr=subprocess.PIPE,
-            input=text,
-            encoding='utf-8',
-        )
-
-        if compile_cmd.returncode:
-            return compile_cmd, None, COMPILE_ERROR # it fucking failed
-
-        object_code = os.path.join(OBJECT_DIR, f"{task_id}-testing.o")
-        if os.path.exists(object_code): # this is ok
-            link_cmd = subprocess.run(
-                [
-                    "gcc-6",
-                    "-o", "/dev/null",
-                    dst.name,
-                    object_code
-
-                ],
-                stderr=subprocess.PIPE,
-                encoding='utf-8',
-            )
-        else: # this case is weird
-            if task_id == 'a05': # for day 2 task a05 its ok to just compile
-                return compile_cmd, None, compile_cmd.returncode
-            elif task_id == 'a06':
-                link_cmd = subprocess.run(
-                    [
-                        "gcc-6",
-                        "-o", "/dev/null",
-                        dst.name,
-                        os.path.join(TEMP_DIR, "a05.o"),
-                    ],
-                    stderr=subprocess.PIPE,
-                    encoding='utf-8',
-                )
-
-        if link_cmd.returncode:
-            return compile_cmd, link_cmd, LINKER_ERROR
-
-        return compile_cmd, link_cmd, ALL_OK
-    finally:
-        dst.close()
-
-
-def main():
-    with open(INFILE, 'r', encoding='utf-8') as submissions:
-        data = json.JSONDecoder().decode(submissions.read())
-
-    total = len(data)
-    for i, (username, userinfo) in enumerate(data.items()):
-        print(f"\r- {i+1}/{total} done. processing submissions of {username}\t\t\t\t", end='')
-
-        # create new entry
-        co = userinfo['compiler_output'] = {}
-
-        for task_id, submission in userinfo['submissions'].items():
-            if submission:
-                _compile, _link, err = get_compiler_output(task_id, submission)
-                msg = {
-                    COMPILE_ERROR: "\n\n[FAIL] DID NOT COMPILE",
-                    LINKER_ERROR: "\n\n[FAIL] COULD NOT LINK",
-                    ALL_OK: "",
-                }[err]
-                co[task_id] = _compile.stderr + (_link.stderr if _link else "") + msg
-            else:
-                co[task_id] = ""
-
-    print()
-
-    with open(OUTFILE, 'w', encoding='utf-8') as submissions:
-        submissions.write(json.JSONEncoder().encode(data))
-
-if __name__ == '__main__':
-    main()
diff --git a/backend/yarn.lock b/backend/yarn.lock
deleted file mode 100644
index ea3497394831d53bde26b826dee5f20e46a8f682..0000000000000000000000000000000000000000
--- a/backend/yarn.lock
+++ /dev/null
@@ -1,32 +0,0 @@
-# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
-# yarn lockfile v1
-
-
-ace-editor-builds@^1.2.4:
-  version "1.2.4"
-  resolved "https://registry.yarnpkg.com/ace-editor-builds/-/ace-editor-builds-1.2.4.tgz#5213874b1b23f9d79ac62d106e32970e2af3727f"
-
-bootstrap@4.0.0-beta.2:
-  version "4.0.0-beta.2"
-  resolved "https://registry.yarnpkg.com/bootstrap/-/bootstrap-4.0.0-beta.2.tgz#4d67d2aa2219f062cd90bc1247e6747b9e8fd051"
-
-datatables.net-bs4@^1.10.15:
-  version "1.10.16"
-  resolved "https://registry.yarnpkg.com/datatables.net-bs4/-/datatables.net-bs4-1.10.16.tgz#9eee67cfa8565bd3807a603a188305f7d0e20e32"
-  dependencies:
-    datatables.net "1.10.16"
-    jquery ">=1.7"
-
-datatables.net@1.10.16, datatables.net@^1.10.15:
-  version "1.10.16"
-  resolved "https://registry.yarnpkg.com/datatables.net/-/datatables.net-1.10.16.tgz#4b052d1082824261b68eed9d22741b711d3d2469"
-  dependencies:
-    jquery ">=1.7"
-
-jquery@>=1.7:
-  version "3.2.1"
-  resolved "https://registry.yarnpkg.com/jquery/-/jquery-3.2.1.tgz#5c4d9de652af6cd0a770154a631bba12b015c787"
-
-popper.js@^1.12.3:
-  version "1.12.5"
-  resolved "https://registry.yarnpkg.com/popper.js/-/popper.js-1.12.5.tgz#229e4dea01629e1f1a1e26991ffade5024220fa6"
diff --git a/backend/core/__init__.py b/core/__init__.py
similarity index 100%
rename from backend/core/__init__.py
rename to core/__init__.py
diff --git a/backend/core/admin.py b/core/admin.py
similarity index 100%
rename from backend/core/admin.py
rename to core/admin.py
diff --git a/backend/core/apps.py b/core/apps.py
similarity index 100%
rename from backend/core/apps.py
rename to core/apps.py
diff --git a/backend/core/grady_speak.py b/core/grady_speak.py
similarity index 100%
rename from backend/core/grady_speak.py
rename to core/grady_speak.py
diff --git a/backend/core/migrations/0001_initial.py b/core/migrations/0001_initial.py
similarity index 100%
rename from backend/core/migrations/0001_initial.py
rename to core/migrations/0001_initial.py
diff --git a/backend/core/migrations/0002_auto_20171110_1612.py b/core/migrations/0002_auto_20171110_1612.py
similarity index 100%
rename from backend/core/migrations/0002_auto_20171110_1612.py
rename to core/migrations/0002_auto_20171110_1612.py
diff --git a/backend/core/migrations/0003_student_matrikel_no.py b/core/migrations/0003_student_matrikel_no.py
similarity index 100%
rename from backend/core/migrations/0003_student_matrikel_no.py
rename to core/migrations/0003_student_matrikel_no.py
diff --git a/backend/core/migrations/__init__.py b/core/migrations/__init__.py
similarity index 100%
rename from backend/core/migrations/__init__.py
rename to core/migrations/__init__.py
diff --git a/backend/core/models.py b/core/models.py
similarity index 100%
rename from backend/core/models.py
rename to core/models.py
diff --git a/backend/core/permissions.py b/core/permissions.py
similarity index 87%
rename from backend/core/permissions.py
rename to core/permissions.py
index e9b879409c9f89bd1a918676f001e243769be3bb..bb478558c4f08d7f219bd683d7ab42eadea1d00b 100644
--- a/backend/core/permissions.py
+++ b/core/permissions.py
@@ -16,9 +16,6 @@ class IsUserGenericPermission(permissions.BasePermission):
     def has_permission(self, request: HttpRequest, view: View) -> bool:
         """ required by BasePermission. Check if user is instance of any
         of the models provided in class' models attribute """
-        log.warn("Checking permission of request %s on view %s for user %s",
-                 request, view, request.user)
-
         assert self.models is not None, (
             "'%s' has to include a `models` attribute"
             % self.__class__.__name__
@@ -29,7 +26,7 @@ class IsUserGenericPermission(permissions.BasePermission):
             user.get_associated_user(), models) for models in self.models)
 
         if not is_authorized:
-            log.warn('User %s has no permission to view %s',
+            log.warn('User "%s" has no permission to view %s',
                      user.username, view.__class__.__name__)
 
         return is_authorized
diff --git a/backend/core/serializers.py b/core/serializers.py
similarity index 100%
rename from backend/core/serializers.py
rename to core/serializers.py
diff --git a/core/templates/index.html b/core/templates/index.html
new file mode 100644
index 0000000000000000000000000000000000000000..5f3e8da1c8220ecabe0055fe0609b9796a11eff5
--- /dev/null
+++ b/core/templates/index.html
@@ -0,0 +1,9 @@
+<!DOCTYPE html>
+<html>
+<head>
+  <title>Grady Frontend placeholder</title>
+</head>
+<body>
+This will be replaced in production.
+</body>
+</html>
diff --git a/backend/core/tests/__init__.py b/core/tests/__init__.py
similarity index 100%
rename from backend/core/tests/__init__.py
rename to core/tests/__init__.py
diff --git a/backend/core/tests/data_factories.py b/core/tests/data_factories.py
similarity index 100%
rename from backend/core/tests/data_factories.py
rename to core/tests/data_factories.py
diff --git a/backend/core/tests/test_access_rights.py b/core/tests/test_access_rights.py
similarity index 100%
rename from backend/core/tests/test_access_rights.py
rename to core/tests/test_access_rights.py
diff --git a/backend/core/tests/test_auth.py b/core/tests/test_auth.py
similarity index 100%
rename from backend/core/tests/test_auth.py
rename to core/tests/test_auth.py
diff --git a/backend/core/tests/test_examlist.py b/core/tests/test_examlist.py
similarity index 100%
rename from backend/core/tests/test_examlist.py
rename to core/tests/test_examlist.py
diff --git a/backend/core/tests/test_factory_and_feedback.py b/core/tests/test_factory_and_feedback.py
similarity index 100%
rename from backend/core/tests/test_factory_and_feedback.py
rename to core/tests/test_factory_and_feedback.py
diff --git a/backend/core/tests/test_student_page.py b/core/tests/test_student_page.py
similarity index 100%
rename from backend/core/tests/test_student_page.py
rename to core/tests/test_student_page.py
diff --git a/backend/core/tests/test_student_reviewer_viewset.py b/core/tests/test_student_reviewer_viewset.py
similarity index 100%
rename from backend/core/tests/test_student_reviewer_viewset.py
rename to core/tests/test_student_reviewer_viewset.py
diff --git a/backend/core/tests/test_submissiontypeview.py b/core/tests/test_submissiontypeview.py
similarity index 100%
rename from backend/core/tests/test_submissiontypeview.py
rename to core/tests/test_submissiontypeview.py
diff --git a/backend/core/tests/test_tutor_api_endpoints.py b/core/tests/test_tutor_api_endpoints.py
similarity index 100%
rename from backend/core/tests/test_tutor_api_endpoints.py
rename to core/tests/test_tutor_api_endpoints.py
diff --git a/backend/core/urls.py b/core/urls.py
similarity index 88%
rename from backend/core/urls.py
rename to core/urls.py
index e64d529790c3f805c984de185817ff3983bb6fb4..845578227821504d6eb67111ecf6ea7c33593140 100644
--- a/backend/core/urls.py
+++ b/core/urls.py
@@ -1,5 +1,6 @@
 from django.conf.urls import include, url
 from django.contrib.staticfiles.urls import staticfiles_urlpatterns
+from django.views.generic.base import TemplateView
 from rest_framework.routers import DefaultRouter
 from rest_framework_jwt.views import obtain_jwt_token, refresh_jwt_token
 
@@ -18,6 +19,7 @@ urlpatterns = [
     url(r'^api/', include(router.urls)),
     url(r'^api-token-auth/', obtain_jwt_token),
     url(r'^api-token-refresh', refresh_jwt_token),
+    url(r'^$', TemplateView.as_view(template_name='index.html')),
 ]
 
 urlpatterns += staticfiles_urlpatterns()
diff --git a/backend/core/views.py b/core/views.py
similarity index 100%
rename from backend/core/views.py
rename to core/views.py
diff --git a/backend/delbert.py b/delbert.py
similarity index 93%
rename from backend/delbert.py
rename to delbert.py
index 4e05dabd289dca8aa829315cb3e21ab923eca288..d3526487ca037b4a1d0fc1c89116c25e768bc6cf 100755
--- a/backend/delbert.py
+++ b/delbert.py
@@ -14,13 +14,12 @@ import util.importer
 from core.models import Student, Submission
 
 
+unused_variable = []
 
 
-unused_variable =  []
-
 def parseme():
-    parser      = argparse.ArgumentParser()
-    subparsers  = parser.add_subparsers(dest="command")
+    parser = argparse.ArgumentParser()
+    subparsers = parser.add_subparsers(dest="command")
 
     parser.add_argument(
         '-o', '--output',
@@ -109,7 +108,7 @@ def handle_enableusers(switch, exclude, include, **kwargs):
         for user in User.objects.filter(username__in=include):
             user.is_active = switch == 'on'
             user.save()
-    else: # this includes nothing set
+    else:  # this includes nothing set
         for user in User.objects.exclude(username__in=exclude):
             user.is_active = switch == 'on'
             user.save()
@@ -126,16 +125,19 @@ def handle_replaceusernames(matno2username_dict, **kwargs):
 
 def handle_extractsubmissions(output, **kwargs):
     for submission in Submission.objects.filter(feedback__isnull=False).order_by('type'):
-        print(submission.feedback.score, repr(submission.text), file=open(str(submission.type).replace(' ', '_'), 'a'))
+        print(submission.feedback.score, repr(submission.text),
+              file=open(str(submission.type).replace(' ', '_'), 'a'))
 
 
 def handle_importer(**kwargs):
     util.importer.start()
 
+
 def main():
     args = parseme()
     if args.command:
         globals()['handle_' + args.command](**vars(args))
 
+
 if __name__ == '__main__':
     main()
diff --git a/docker-compose.yml b/docker-compose.yml
index d85cfb636e4bbdaba7325592190f96790d8520cb..2fcc62daccd8edd17354897348a63f6e376b9963 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,19 +1,30 @@
 version: '3'
 
 services:
+
   postgres:
     image: postgres:9.5
-  web:
-    build: .
+    restart: always
+
+  grady:
+    image: docker.gitlab.gwdg.de/j.michal/grady:master
     command:
       - /bin/sh
       - -c
       - |
+        sleep 5
         python manage.py migrate --noinput
-        gunicorn --bind 0.0.0.0:8000 grady.wsgi:application &
-        cd static/ && python -m http.server 8080
-    ports:
-      - "8000:8000"
-      - "8080:8080"
+        gunicorn \
+          --bind 0.0.0.0:8000 \
+          --workers=2 \
+          --worker-class=gevent \
+          grady.wsgi:application
     depends_on:
       - postgres
+    restart: always
+    networks:
+      - default
+    expose:
+      - "8000"
+    ports:
+      - "8000:8000"
diff --git a/backend/docs/Database Design/Entity Relation Model.erdplus b/docs/Database Design/Entity Relation Model.erdplus
similarity index 100%
rename from backend/docs/Database Design/Entity Relation Model.erdplus
rename to docs/Database Design/Entity Relation Model.erdplus
diff --git a/backend/docs/Database Design/Relational Schema.erdplus b/docs/Database Design/Relational Schema.erdplus
similarity index 100%
rename from backend/docs/Database Design/Relational Schema.erdplus
rename to docs/Database Design/Relational Schema.erdplus
diff --git a/backend/docs/Database Design/er-model.graphml b/docs/Database Design/er-model.graphml
similarity index 100%
rename from backend/docs/Database Design/er-model.graphml
rename to docs/Database Design/er-model.graphml
diff --git a/backend/docs/Database Design/erdplus-diagram-pdf-export.pdf b/docs/Database Design/erdplus-diagram-pdf-export.pdf
similarity index 100%
rename from backend/docs/Database Design/erdplus-diagram-pdf-export.pdf
rename to docs/Database Design/erdplus-diagram-pdf-export.pdf
diff --git a/backend/docs/Database Design/erdplus-er-model-export.pdf b/docs/Database Design/erdplus-er-model-export.pdf
similarity index 100%
rename from backend/docs/Database Design/erdplus-er-model-export.pdf
rename to docs/Database Design/erdplus-er-model-export.pdf
diff --git a/backend/docs/core_uml.png b/docs/core_uml.png
similarity index 100%
rename from backend/docs/core_uml.png
rename to docs/core_uml.png
diff --git a/frontend/README.md b/frontend/README.md
index 450a662ef31342b1705aeffdd2e0f591579850df..216c4d327e5915323421b342c78af5384bfc2883 100644
--- a/frontend/README.md
+++ b/frontend/README.md
@@ -1,4 +1,4 @@
-# frontend
+# Frontend
 
 > Vue.js frontend for Grady
 
@@ -25,3 +25,9 @@ npm test
 ```
 
 For a detailed explanation on how things work, check out the [guide](http://vuejs-templates.github.io/webpack/) and [docs for vue-loader](http://vuejs.github.io/vue-loader).
+
+### A note on yarn package manager
+
+Some systems (like Ubuntu Xenial) come with a preinstalled "yarn -
+scenario testing of Unix command line tools". Using this will
+**not** work. The [yarn package manager](<https://yarnpkg.com/en/docs/install>) is needed.
diff --git a/backend/grady/__init__.py b/grady/__init__.py
similarity index 100%
rename from backend/grady/__init__.py
rename to grady/__init__.py
diff --git a/backend/grady/settings/__init__.py b/grady/settings/__init__.py
similarity index 100%
rename from backend/grady/settings/__init__.py
rename to grady/settings/__init__.py
diff --git a/backend/grady/settings/default.py b/grady/settings/default.py
similarity index 87%
rename from backend/grady/settings/default.py
rename to grady/settings/default.py
index f839d3ef2724d87949af12e66706728ef32f2154..9b1b6e817f8b75c03f88077ccd33b0c6a4a4329e 100644
--- a/backend/grady/settings/default.py
+++ b/grady/settings/default.py
@@ -13,8 +13,6 @@ https://docs.djangoproject.com/en/1.10/ref/settings/
 import datetime
 import os
 
-from django.contrib.messages import constants as messages
-
 # Build paths inside the project like this: os.path.join(BASE_DIR, ...)
 BASE_DIR = os.path.dirname(os.path.dirname(
     os.path.dirname(os.path.abspath(__file__))))
@@ -55,6 +53,7 @@ MIDDLEWARE = [
     'django.contrib.auth.middleware.AuthenticationMiddleware',
     'django.contrib.messages.middleware.MessageMiddleware',
     'django.middleware.clickjacking.XFrameOptionsMiddleware',
+    'whitenoise.middleware.WhiteNoiseMiddleware',
 ]
 
 ROOT_URLCONF = 'grady.urls'
@@ -106,13 +105,16 @@ USE_TZ = True
 
 # Static files (CSS, JavaScript, Images)
 # https://docs.djangoproject.com/en/1.10/howto/static-files/
+STATIC_URL = '/static/'
+STATIC_ROOT = os.path.join(BASE_DIR, 'static')
+STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
+STATICFILES_FINDERS = (
+    'django.contrib.staticfiles.finders.FileSystemFinder',
+    'django.contrib.staticfiles.finders.AppDirectoriesFinder',
+)
 STATICFILES_DIRS = (
-    os.path.join(BASE_DIR, 'node_modules'),
+    'frontend/dist/static',
 )
-STATIC_URL = '/static/'
-STATIC_ROOT = os.path.join(BASE_DIR, 'static/')
-
-FIXTURE_DIRS = ['/core/fixtures/']
 
 GRAPH_MODELS = {
     'all_applications': True,
@@ -122,20 +124,6 @@ GRAPH_MODELS = {
 LOGIN_REDIRECT_URL = '/'
 LOGIN_URL = '/'
 
-
-MESSAGE_TAGS = {
-    messages.DEBUG: 'alert-info',
-    messages.INFO: 'alert-info',
-    messages.SUCCESS: 'alert-success',
-    messages.WARNING: 'alert-warning',
-    messages.ERROR: 'alert-danger',
-}
-
-STATICFILES_FINDERS = (
-    'django.contrib.staticfiles.finders.FileSystemFinder',
-    'django.contrib.staticfiles.finders.AppDirectoriesFinder',
-)
-
 AUTH_USER_MODEL = 'core.UserAccount'
 AUTH_PASSWORD_VALIDATORS = []
 CORS_ORIGIN_WHITELIST = (
@@ -163,13 +151,9 @@ LOGGING = {
     "version": 1,
     "disable_existing_loggers": False,
     "formatters": {
-        'django.server': {
-            'datefmt': '%d/%b/%Y %H:%M:%S',
-            'format': '[%(asctime)s] %(levelname)-10s %(name)-20s %(message)s',
-        },
         'core': {
-            'datefmt': '%d/%b/%Y %H:%M:%S',
-            'format': '[%(asctime)s] %(levelname)-10s %(name)-20s "%(message)s"',
+            'datefmt': '%Y-%m-%d %H:%M:%S',
+            'format': '[%(asctime)s] [%(levelname)s] %(name)-20s %(message)s',
         },
     },
     'filters': {
diff --git a/backend/grady/settings/live.py b/grady/settings/live.py
similarity index 63%
rename from backend/grady/settings/live.py
rename to grady/settings/live.py
index 2f3bfe34cbc7ee4a56a476881f397bf744af5135..50c7887bbc239fc30911c9e2dee491da44f693c9 100644
--- a/backend/grady/settings/live.py
+++ b/grady/settings/live.py
@@ -11,7 +11,9 @@ X_FRAME_OPTIONS = 'DENY'
 DEBUG = False
 
 # adjust this setting to your needs
-ALLOWED_HOSTS = ['localhost', '.grady.janmax.org']
+ALLOWED_HOSTS = [
+    'localhost', '.grady.janmax.org', 'grady.informatik.uni-goettingen.de'
+]
 
 # sample postgres sql database configuration
 DATABASES = {
@@ -29,16 +31,8 @@ DATABASES = {
 # https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
 
 AUTH_PASSWORD_VALIDATORS = [
-    {
-        'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
-    },
-    {
-        'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
-    },
-    {
-        'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
-    },
-    {
-        'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
-    },
+    {'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'},
+    {'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator'},
+    {'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator'},
+    {'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator'}
 ]
diff --git a/backend/grady/urls.py b/grady/urls.py
similarity index 93%
rename from backend/grady/urls.py
rename to grady/urls.py
index 95d879bbd63467ba2d213c45201a3227139e0b78..5a75e52c268589248d4dca2f4b51c4dadcfe2dfd 100644
--- a/backend/grady/urls.py
+++ b/grady/urls.py
@@ -21,5 +21,5 @@ urlpatterns = [
     url(r'^', include('core.urls')),
 
     url(r'^api-auth/', include('rest_framework.urls',
-        namespace='rest_framework')),
+                               namespace='rest_framework')),
 ]
diff --git a/backend/grady/wsgi.py b/grady/wsgi.py
similarity index 100%
rename from backend/grady/wsgi.py
rename to grady/wsgi.py
diff --git a/backend/manage.py b/manage.py
similarity index 100%
rename from backend/manage.py
rename to manage.py
diff --git a/pre-commit-scripts/prospector.sh b/pre-commit-scripts/prospector.sh
index dd41b303bcb966282958a05885d2e9344ba84eb4..f6d218ff3bdf21de8df9dc93d0cbf80bcfd80452 100755
--- a/pre-commit-scripts/prospector.sh
+++ b/pre-commit-scripts/prospector.sh
@@ -1,11 +1,8 @@
 #!/bin/bash
 
-cd backend
-unset GIT_DIR
-
-diff_files="$(git diff  --cached --name-only --relative --diff-filter=AM)" 
+diff_files="$(git diff  --cached --name-only --relative --diff-filter=AM)"
 if [ -n "$diff_files" ]; then
 	prospector --uses django $diff_files
 else
 	exit 0
-fi
\ No newline at end of file
+fi
diff --git a/requirements.dev.txt b/requirements.dev.txt
new file mode 100644
index 0000000000000000000000000000000000000000..63b69b7d1ecfc80af3daa2c7027332dadfc8178a
--- /dev/null
+++ b/requirements.dev.txt
@@ -0,0 +1,4 @@
+pre-commit~=1.4.1
+prospector~=0.12.7
+pytest-cov~=2.5.1
+pytest-django~=3.1.2
diff --git a/backend/requirements.txt b/requirements.txt
similarity index 68%
rename from backend/requirements.txt
rename to requirements.txt
index bd5a9cac3be9f45f6dd82e0c1ebb5032c36423c4..e93aa26a6366fff21e0052ae2b604a3d8046c27f 100644
--- a/backend/requirements.txt
+++ b/requirements.txt
@@ -1,11 +1,10 @@
-Django~=1.11.3
+django-cors-headers~=2.1.0
 django-extensions~=1.7.7
-djangorestframework~=3.6.3
 djangorestframework-jwt~=1.11.0
-django-cors-headers~=2.1.0
+djangorestframework~=3.6.3
+Django~=1.11.8
+gevent~=1.2.2
 gunicorn~=19.7.0
 psycopg2~=2.7.1
+whitenoise~=3.3.1
 xlrd~=1.0.0
-pytest-cov~=2.5.1
-pytest-django~=3.1.2
-prospector~=0.12.7
diff --git a/backend/util/__init__.py b/util/__init__.py
similarity index 100%
rename from backend/util/__init__.py
rename to util/__init__.py
diff --git a/backend/util/convert.py b/util/convert.py
similarity index 89%
rename from backend/util/convert.py
rename to util/convert.py
index e038b7f39257b3661101332d117452cc6b87f392..44995cbb2dd26be2e0d23e20e1778473ae8a887c 100755
--- a/backend/util/convert.py
+++ b/util/convert.py
@@ -48,18 +48,18 @@ parser.add_argument('OUTFILE', help='Where to write the final file')
 parser.add_argument('-u', '--usernames', help='a json dict matno -> email')
 parser.add_argument(
     '-n', '--NUMBER_OF_TASKS',
-    default=0, # don't check
+    default=0,  # don't check
     metavar='NUMBER_OF_TASKS',
     type=int,
     help='Where to write the final file')
 
 
-
 # one user has one submission (code) per task
 # yes, I know it is possible to name match groups via (?P<name>) but
 # I like this solution better since it gets the job done nicely
 user_head = namedtuple('user_head', 'kohorte, name')
-user_head_re = re.compile(r'^Ergebnisse von Testdurchlauf (?P<kohorte>\d+) für (?P<name>[\w\s\.,-]+)$')
+user_head_re = re.compile(
+    r'^Ergebnisse von Testdurchlauf (?P<kohorte>\d+) für (?P<name>[\w\s\.,-]+)$')
 
 # one task has a title and id and hpfly code
 task_head_re = re.compile(r'^Quellcode Frage(?P<title>.*) \d{8}$')
@@ -67,6 +67,7 @@ task_head_re = re.compile(r'^Quellcode Frage(?P<title>.*) \d{8}$')
 # nor parsing the weird mat no
 matno_re = re.compile(r'^(?P<matrikel_no>\d{8})-(\d{3})-(\d{3})$')
 
+
 def converter(infile, usernames=None, number_of_tasks=0,):
 
     # Modify these iterators in order to change extraction behaviour
@@ -77,7 +78,6 @@ def converter(infile, usernames=None, number_of_tasks=0,):
             m = re.search(matno_re, row[1].value)
             yield row[0].value, m.group('matrikel_no') if m else row[1].value
 
-
     def sheet_iter_data(sheet):
         """ yields all rows that are not of empty type as one string """
         for row in (sheet.row(i) for i in range(sheet.nrows)):
@@ -102,7 +102,7 @@ def converter(infile, usernames=None, number_of_tasks=0,):
                 root.append([user_head(*user.groups())])
             elif task:
                 root[-1].append(task.group('title'))
-            else: # should be code
+            else:  # should be code
                 root[-1].append(urllib.parse.unquote(row).strip())
 
     if number_of_tasks:
@@ -119,25 +119,26 @@ def converter(infile, usernames=None, number_of_tasks=0,):
             return mat_to_email[name2mat[user.name]].split('@')[0]
         return ''.join(filter(str.isupper, user.name)) + name2mat[user.name]
 
-    usernames = {user.name : get_username(user) for (user, *_) in root}
+    usernames = {user.name: get_username(user) for (user, *_) in root}
 
     # form list to json_like via comprehension
     # the format {userinitials + matrikel_no : {name:, matrikel_no:, tasklist: {id:, ..., id:}}}
     return {
-        usernames[user.name] : {
-            'name' : user.name,
-            'email' : mat_to_email[name2mat[user.name]],
-            'matrikel_no' : name2mat[user.name],
-            'submissions' : [
+        usernames[user.name]: {
+            'name': user.name,
+            'email': mat_to_email[name2mat[user.name]],
+            'matrikel_no': name2mat[user.name],
+            'submissions': [
                 {
-                    "type" : task,
-                    "code" : code,
-                    "tests" : {},
+                    "type": task,
+                    "code": code,
+                    "tests": {},
                 } for task, code in zip(task_list[::2], task_list[1::2])
             ]
         } for (user, *task_list) in sorted(root, key=lambda u: u[0].name)
     }
 
+
 def write_to_file(json_dict, outfile):
     # just encode python style
     with open(outfile, "w") as out:
@@ -151,6 +152,7 @@ def main():
     json_dict = converter(args.INFILE, args.usernames, args.NUMBER_OF_TASKS)
     write_to_file(json_dict, args.OUTFILE)
 
+
 if __name__ == '__main__':
     SCRIPT = True
     main()
diff --git a/backend/util/factories.py b/util/factories.py
similarity index 96%
rename from backend/util/factories.py
rename to util/factories.py
index 32c2d67fc03200556e3a44015bdf9525f7ff93b0..a4161cfd62634c76c0e3480571b4f0f421455c6a 100644
--- a/backend/util/factories.py
+++ b/util/factories.py
@@ -1,7 +1,8 @@
 import configparser
 import secrets
 
-from core.models import UserAccount as User, Student, Tutor, Reviewer
+from core.models import UserAccount as User
+from core.models import Reviewer, Student, Tutor
 
 STUDENTS = 'students'
 TUTORS = 'tutors'
@@ -22,7 +23,7 @@ def store_password(username, groupname, password):
     storage = configparser.ConfigParser()
     storage.read(PASSWORDS)
 
-    if not groupname in storage:
+    if groupname not in storage:
         storage[groupname] = {}
 
     storage[groupname][username] = password
diff --git a/backend/util/importer.py b/util/importer.py
similarity index 96%
rename from backend/util/importer.py
rename to util/importer.py
index f9fed844f9d36970a105c7191533ad38f9b94998..d6f3080403a80c7b6f08eaa4c8e9240aa38837b9 100644
--- a/backend/util/importer.py
+++ b/util/importer.py
@@ -7,13 +7,12 @@ from typing import Callable
 import util.convert
 import util.processing
 from core.models import UserAccount as User
-from core.models import (ExamType, Feedback, Reviewer, Student, Submission,
-                         SubmissionType, Test, Tutor)
+from core.models import (ExamType, Feedback, Student, Submission,
+                         SubmissionType, Test)
+from util.factories import REVIEWERS, STUDENTS, TUTORS, GradyUserFactory
 from util.messages import info, warn
 from util.processing import EmptyTest
 
-from util.factories import STUDENTS, REVIEWERS, TUTORS, GradyUserFactory
-
 HISTFILE = '.importer_history'
 RECORDS = '.importer'
 PASSWORDS = '.importer_passwords'
@@ -60,7 +59,8 @@ class chdir_context(object):
 
 def i(prompt: str, default: str='', is_path: bool=False, is_file: bool=False):
     if default is YES or default is NO:
-        answer = valid[input(f'[Q] {prompt} ({default}): ').lower() or ('y' if YES == default else 'n')]
+        answer = valid[input(f'[Q] {prompt} ({default}): ').lower() or (
+            'y' if YES == default else 'n')]
     elif default:
         answer = input(f'[Q] {prompt} ({default}): ') or default
     else:
@@ -72,7 +72,6 @@ def i(prompt: str, default: str='', is_path: bool=False, is_file: bool=False):
 
     return answer
 
-# TODO more factories
 
 def add_user(username, group, **kwargs):
     user = GradyUserFactory()._make_base_user(
@@ -277,8 +276,8 @@ def do_preprocess_submissions():
 
     print('''
     Preprocessing might take some time depending on the amount of data
-    and the complexity of the programs and the corresponding unit tests. You can
-    specify what test you want to run.
+    and the complexity of the programs and the corresponding unit tests. You
+    can specify what test you want to run.
 
     Tests do depend on each other. Therefore specifying a test will also
     result in running all its dependencies\n''')
@@ -360,9 +359,9 @@ def start():
 
     print('''Welcome to the Grady importer!
 
-    This script aims at making the setup of the database as easy as possible. It
-    at the same time serves as a documentation on how data is imported in Grady.
-    Let\'s dive right in.\n''')
+    This script aims at making the setup of the database as easy as possible.
+    It at the same time serves as a documentation on how data is imported in
+    Grady. Let\'s dive right in.\n''')
 
     try:
         print('The following importers are available:\n')
diff --git a/backend/util/messages.py b/util/messages.py
similarity index 98%
rename from backend/util/messages.py
rename to util/messages.py
index 8f236313b598099954497a5e3a78e93a5d61ae12..7f1aeac441028341a09f0d688e62ce55d32e892a 100644
--- a/backend/util/messages.py
+++ b/util/messages.py
@@ -4,18 +4,23 @@ import sys
 def warn(*message):
     print('[W]', *message)
 
+
 def debug(*message):
     print('[DEBUG]', *message)
 
+
 def info(*message):
     print('[I]', *message)
 
+
 def error(*message):
     print('[E]', *message)
 
+
 def abort(*message):
     print('[FATAL]', *message)
     sys.exit('exiting...')
 
+
 def exit(message='exiting...'):
     sys.exit(*message)
diff --git a/backend/util/processing.py b/util/processing.py
similarity index 80%
rename from backend/util/processing.py
rename to util/processing.py
index e83a44eb791ff800ab9dcd1dff54ee20bcde682e..26671595d88231c04482b571310aad6055d3a858 100644
--- a/backend/util/processing.py
+++ b/util/processing.py
@@ -12,11 +12,11 @@ try:
 except ModuleNotFoundError:
     from util import testcases
 
-DESCFILE    = '../data/descfile.txt'
-BINARIES    = '../data/klausur_zweittermin/bin'
-OBJECTS     = '../data/klausur_zweittermin/objects'
+DESCFILE = '../data/descfile.txt'
+BINARIES = '../data/klausur_zweittermin/bin'
+OBJECTS = '../data/klausur_zweittermin/objects'
 SUBMISSIONS = '../data/binf1801_pre.json'
-HEADER      = '../data/klausur_zweittermin/code-testing'
+HEADER = '../data/klausur_zweittermin/code-testing'
 
 
 def run_cmd(cmd, stdin=None, check=False, timeout=1):
@@ -46,18 +46,18 @@ class Test(metaclass=abc.ABCMeta):
 
     @classmethod
     def available_tests(cls):
-        return {sub.__name__ : sub for sub in all_subclasses(cls)}
+        return {sub.__name__: sub for sub in all_subclasses(cls)}
 
     def __new__(cls, *args, **kwargs):
-        assert hasattr(cls, 'depends'),         "depends not defined"
-        assert hasattr(cls, 'label_success'),   "label_success not defined"
-        assert hasattr(cls, 'label_failure'),   "label_failure not defined"
+        assert hasattr(cls, 'depends'), "depends not defined"
+        assert hasattr(cls, 'label_success'), "label_success not defined"
+        assert hasattr(cls, 'label_failure'), "label_failure not defined"
         return super().__new__(cls)
 
     def __init__(self, submission_obj, **kwargs):
 
         if not self.dependencies_satisfied(submission_obj):
-            self.result     = False
+            self.result = False
             self.annotation = "TEST DEPENDENCY NOT MET"
             self.serialize(submission_obj)
 
@@ -79,13 +79,13 @@ class Test(metaclass=abc.ABCMeta):
         return all(dep(submission_obj).result for dep in self.depends)
 
     def deserialize(self, test):
-        self.result     = test['label'] == self.label_success
+        self.result = test['label'] == self.label_success
         self.annotation = test['annotation']
 
     def serialize(self, submission_obj):
         as_dict = {
-            'name'       : str(self),
-            'annotation' : self.annotation
+            'name': str(self),
+            'annotation': self.annotation
         }
 
         if self.result:
@@ -103,9 +103,9 @@ class Test(metaclass=abc.ABCMeta):
 class EmptyTest(Test):
     """docstring for EmptyTest"""
 
-    depends         = ()
-    label_success   = 'NOT_EMPTY'
-    label_failure   = 'EMPTY'
+    depends = ()
+    label_success = 'NOT_EMPTY'
+    label_failure = 'EMPTY'
 
     def run_test(self, submission_obj):
         return bool(submission_obj['code'].strip()), ""
@@ -113,9 +113,9 @@ class EmptyTest(Test):
 
 class CompileTest(Test):
 
-    depends         = (EmptyTest, )
-    label_success   = 'COMPILATION_SUCCESSFUL'
-    label_failure   = 'COMPILATION_FAILED'
+    depends = (EmptyTest, )
+    label_success = 'COMPILATION_SUCCESSFUL'
+    label_failure = 'COMPILATION_FAILED'
 
     def run_test(self, submission_obj):
 
@@ -126,9 +126,9 @@ class CompileTest(Test):
 
 class LinkTest(Test):
 
-    depends         = (CompileTest, )
-    label_success   = 'LINKING_SUCCESSFUL'
-    label_failure   = 'LINKING_FAILED'
+    depends = (CompileTest, )
+    label_success = 'LINKING_SUCCESSFUL'
+    label_failure = 'LINKING_FAILED'
 
     def run_test(self, submission_obj):
 
@@ -142,9 +142,9 @@ class LinkTest(Test):
 class UnitTestTest(Test):
     """docstring for UnitTestTest"""
 
-    depends         = (LinkTest, )
-    label_success   = 'UNITTEST_SUCCSESSFUL'
-    label_failure   = 'UNITTEST_FAILED'
+    depends = (LinkTest, )
+    label_success = 'UNITTEST_SUCCSESSFUL'
+    label_failure = 'UNITTEST_FAILED'
 
     @staticmethod
     def testcase(i, args, stdout):
diff --git a/backend/util/testcases.py b/util/testcases.py
similarity index 68%
rename from backend/util/testcases.py
rename to util/testcases.py
index b94fab230a0c11b6cb850b00e1e83d95431ab705..a95e657d6244c298d7da500ac2b6888bd98fb5a2 100644
--- a/backend/util/testcases.py
+++ b/util/testcases.py
@@ -12,8 +12,7 @@ except ModuleNotFoundError:
 types = ('integer', 'unsigned_integer', 'character', 'string')
 list_sep = '...'
 
-re_task = re.compile(
-    r'^-- (?P<title>.*)\n(USAGE: (?P<cmd>[\./\w]+) (?P<syntax>.*)|NO EXECUTABLE)', re.MULTILINE)
+re_task = re.compile(r'^-- (?P<title>.*)\n(USAGE: (?P<cmd>[\./\w]+) (?P<syntax>.*)|NO EXECUTABLE)', re.MULTILINE)
 re_args = re.compile(rf"<({'|'.join(types)}|{'|'.join(t + '_list' for t in types)})>")
 
 
@@ -30,7 +29,7 @@ def unsigned_integer(upper=50):
 
 
 def character():
-    return random.choice(10*ascii_letters + 2*digits + '%*+,-./:?@[]^_{}~')
+    return random.choice(10 * ascii_letters + 2 * digits + '%*+,-./:?@[]^_{}~')
 
 
 def string(lenght=31):
@@ -48,8 +47,7 @@ def rubbish():
 
 
 def argument_generator(syntax):
-    syntax, _ = re.subn(
-        r'<([\w\s]+)> <\1> \.\.\. <\1> <\1>', r'<\1_list>', syntax)
+    syntax, _ = re.subn(r'<([\w\s]+)> <\1> \.\.\. <\1> <\1>', r'<\1_list>', syntax)
     syntax, _ = re.subn(r'<(\w+)\s(\w+)>', r'<\1_\2>', syntax)
 
     return ' '.join(str(call_function(arg)) for arg in re.findall(re_args, syntax))
@@ -64,7 +62,7 @@ def testcases_generator(task, n=10):
     yield ''
     yield '0'
 
-    for i in range(n//2):
+    for i in range(n // 2):
         yield rubbish()
 
     for i in range(n):
@@ -73,26 +71,31 @@ def testcases_generator(task, n=10):
 
 def testcases(description_path):
     for t in types:
-        globals()[t + '_list'] = type_list(t) # I fucking love it
+        globals()[t + '_list'] = type_list(t)  # I fucking love it
 
     with open(description_path) as description_file:
         description = description_file.read()
 
     return {
-        task['title'] : {
-            'cmd' : task['cmd'],
-            'cases' : [t for t in testcases_generator(task)]
+        task['title']: {
+            'cmd': task['cmd'],
+            'cases': [t for t in testcases_generator(task)]
         } for task in re.finditer(re_task, description)
     }
 
+
 def evaluated_testcases(description_path):
     task_testcases = testcases(description_path)
 
     for task in filter(lambda t: t['cmd'], task_testcases.values()):
-        path_to_binary = os.path.join(os.path.join(processing.BINARIES, os.path.basename(task['cmd'])))
-        task['results'] = [processing.run_cmd(f"{path_to_binary} {case}").stdout for case in task['cases']]
+        path_to_binary = os.path.join(os.path.join(
+            processing.BINARIES, os.path.basename(task['cmd'])))
+        task['results'] = [processing.run_cmd(
+            f"{path_to_binary} {case}").stdout for case in task['cases']]
 
     return task_testcases
 
+
 if __name__ == '__main__':
-    print(json.dumps(evaluated_testcases(processing.DESCFILE), sort_keys=True, indent=4))
+    print(json.dumps(evaluated_testcases(
+        processing.DESCFILE), sort_keys=True, indent=4))