diff --git a/.gitignore b/.gitignore
index fee446607cb45af1d19e25d89757bc1311571dd5..76818e987e90fea2666187a33953e9a8e76d94ee 100644
--- a/.gitignore
+++ b/.gitignore
@@ -21,11 +21,16 @@ build/
 static/
 tests/report/
 *.sqlite3
+env/
+static/
 
 # project specific
 env-grady/
 env/
+scripts/
 *.csv
+*.json
+.importer*
 
 # operation system
 .DS_Store
diff --git a/Makefile b/Makefile
index 7eab7e0ecbef9a1465fabdac4cdebe6002b1c0e1..c53e5fae320758b94b14d8a89572b54883ca395e 100644
--- a/Makefile
+++ b/Makefile
@@ -1,4 +1,4 @@
-APP_LIST ?= core grady
+APP_LIST ?= core grady util
 
 
 .PHONY: collectstatics run install migrations-check isort isort-check build-webpack
diff --git a/convert.py b/convert.py
deleted file mode 100755
index 94eb5ed5e1f61e149ea184867529bb4c0c7743d3..0000000000000000000000000000000000000000
--- a/convert.py
+++ /dev/null
@@ -1,133 +0,0 @@
-#!/usr/local/bin/python3
-""" a simple script that converts ilias exam output to readable json
-
-The json output will look like this:
-{
-    "max.mustermann": { <<--- OR all uppercase letter of the name + username/matrikel_no
-        "matrikel_no": "12345678",
-        "name": "Mustermann, Max",
-        "task_list": {
-            "[task_id_1]": "print Hello World!",
-            ....,
-            "[task_id_n]": "#include <stdio.h> etc."
-        }
-    },
-    ... ans so on
-}
-
-usage: convert.py [-h] [-n NUMBER_OF_TASKS] INFILE OUTFILE
-
-positional arguments:
-  INFILE                Ilias exam data
-  OUTFILE               Where to write the final file
-
-optional arguments:
-  -h, --help            show this help message and exit
-  -n NUMBER_OF_TASKS, --NUMBER_OF_TASKS NUMBER_OF_TASKS
-                        Where to write the final file
-
-
-Author: Jan Maximilian Michal
-Date: 30 March 2017
-"""
-
-import json
-import os
-import re
-import argparse
-import urllib.parse
-from collections import namedtuple
-
-from xlrd import open_workbook
-
-parser = argparse.ArgumentParser()
-parser.add_argument('INFILE', help='Ilias exam data')
-parser.add_argument('OUTFILE', help='Where to write the final file')
-parser.add_argument('-u', '--usernames', help='a json dict matno -> email')
-parser.add_argument(
-    '-n', '--NUMBER_OF_TASKS',
-    default=0, # don't check
-    metavar='NUMBER_OF_TASKS',
-    type=int,
-    help='Where to write the final file')
-args = parser.parse_args()
-
-# meta sheet contains ilias evaluation names usernames etc - data contains code
-meta, *data = open_workbook(args.INFILE, open(os.devnull, 'w')).sheets()
-
-# one user has one submission (code) per task
-# yes, I know it is possible to name match groups via (?P<name>) but
-# I like this solution better since it gets the job done nicely
-user_head = namedtuple('user_head', 'kohorte, name')
-user_head_re = re.compile(r'^Ergebnisse von Testdurchlauf (?P<kohorte>\d+) für (?P<name>[\w\s\.,-]+)$')
-
-# one task has a title and id and hpfly code
-task_head = namedtuple('task_head', 'id, title')
-task_head_re = re.compile(r'^Quellcode Frage\[(?P<id>[a-z]\d{2})\] (?P<title>.*) \d{8}$')
-
-# nor parsing the weird mat no
-matno_re = re.compile(r'^(?P<matrikel_no>\d{8})-(\d{3})-(\d{3})$')
-
-# Modify these iterators in order to change extraction behaviour
-
-
-def sheet_iter_meta(sheet):
-    """ yield first and second col entry as tuple of (name, matnr) """
-    for row in (sheet.row(i) for i in range(1, sheet.nrows)):
-        m = re.search(matno_re, row[1].value)
-        yield row[0].value, m.group('matrikel_no') if m else row[1].value
-
-
-def sheet_iter_data(sheet):
-    """ yields all rows that are not of empty type as one string """
-    for row in (sheet.row(i) for i in range(sheet.nrows)):
-        if any(map(lambda c: c.ctype, row)):
-            yield ''.join(c.value for c in row)
-
-# nice!
-name2mat = dict(sheet_iter_meta(meta))
-
-# from xls to lists and namedtuples
-# [ [user0, task0_h, code0, ..., taskn, coden ], ..., [...] ]
-root = []
-for sheet in data:
-    for row in sheet_iter_data(sheet):
-        user = re.search(user_head_re, row)
-        task = re.search(task_head_re, row)
-        if user:
-            root.append([user_head(*user.groups())])
-        elif task:
-            root[-1].append(task_head(*task.groups()))
-        else: # should be code
-            root[-1].append(urllib.parse.unquote(row).strip())
-
-if args.NUMBER_OF_TASKS:
-    for (user, *task_list) in sorted(root, key=lambda u: u[0].name):
-        assert len(task_list) == args.NUMBER_OF_TASKS * 2
-
-if args.usernames:
-    with open(args.usernames) as data:
-        mat_to_email = json.JSONDecoder().decode(data.read())
-    usernames = {user.name : mat_to_email[name2mat[user.name]].split('@')[0] if name2mat[user.name] in mat_to_email else ''.join(filter(str.isupper, user.name)) + name2mat[user.name] for (user, *_) in root}
-
-else: # legacy support / fallback
-    usernames = {user.name : ''.join(filter(str.isupper, user.name)) + name2mat[user.name] for (user, *_) in root}
-
-# form list to json_like via comprehension
-# the format {userinitials + matrikel_no : {name:, matrikel_no:, tasklist: {id:, ..., id:}}}
-json_dict = {
-    usernames[user.name] : {
-        'name' : user.name,
-        'matrikel_no' : name2mat[user.name],
-        'submissions' : {
-            f"{task.id}" : code
-            for task, code in zip(task_list[::2], task_list[1::2])
-        }
-    } for (user, *task_list) in sorted(root, key=lambda u: u[0].name)
-}
-
-# just encode python style
-with open(args.OUTFILE, "w") as out:
-    out.write(json.JSONEncoder().encode(json_dict))
-
-print(f"Wrote data to {args.OUTFILE}. Done.")
diff --git a/core/admin.py b/core/admin.py
index eec44cbba967b3dac3c8da81fb4e54684897f6fd..300130b8b737f58e64454245a483d9f13d00d77f 100644
--- a/core/admin.py
+++ b/core/admin.py
@@ -1,10 +1,11 @@
 from django.contrib import admin
 
-from .models import Feedback, Student, Submission, SubmissionType
+from .models import Feedback, Student, Submission, SubmissionType, Test
 
 # Register your models here.
 
 admin.site.register(SubmissionType)
 admin.site.register(Feedback)
 admin.site.register(Student)
+admin.site.register(Test)
 admin.site.register(Submission)
diff --git a/core/fixtures/testdata-core.json b/core/fixtures/testdata-core.json
index ff4385a6ca4c846b14debc333abfeef9452b277d..fa913e9d82a430eb60e4042eb63a026da7ed8115 100644
--- a/core/fixtures/testdata-core.json
+++ b/core/fixtures/testdata-core.json
@@ -1 +1,92 @@
-[{"model": "core.submissiontype", "pk": 1, "fields": {"name": "Aufgabe 01", "slug": "brezmaphgocfuikw", "full_score": 10, "task_description": "description", "possible_solution": "solution", "correction_guideline": "guideline"}}, {"model": "core.submissiontype", "pk": 2, "fields": {"name": "Aufgabe 02", "slug": "zbjfwldsuhqgxvmn", "full_score": 20, "task_description": "description", "possible_solution": "solution", "correction_guideline": "guideline"}}, {"model": "core.student", "pk": 1, "fields": {"matrikel_no": "12345678", "has_logged_in": false, "name": "Student 01 Vorname und Nachname", "user": 4}}, {"model": "core.student", "pk": 2, "fields": {"matrikel_no": "87654321", "has_logged_in": false, "name": "Student 02 Vorname und Nachname", "user": 5}}, {"model": "core.submission", "pk": 1, "fields": {"slug": "qgleatcwzfxsdnjr", "seen": false, "type": 1, "text": "function generate(timeout){\r\n\r\n\t$('#menu_button_img').attr('src', 'style/menu_blink.gif'); \r\n\r\n\tif(timeout == 0)\t\t\t\t\t\t\t\t\r\n\t\t$('#config_form').attr('action', $('#config_form').attr('action') + '#title'); \t\t\t\t// show directly the question\r\n\telse\r\n\t\ttimeout = 0;\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t// disable timeout\r\n\t\r\n\tsetTimeout(function(){ $('#config_form').submit(); }, timeout);\r\n\r\n}", "pre_corrections": "COMPILER", "student": 1}}, {"model": "core.submission", "pk": 2, "fields": {"slug": "mrthqgsloaydjfnc", "seen": false, "type": 2, "text": "function showTextEditor(){\r\n\r\n\t$('.ilc_question_Standard').hide('slow');\r\n\t$('.ilc_question_ml_Standard').hide('slow');\r\n\t$('.text_editor').show('slow');\r\n\t\r\n}\r\n\r\nfunction showConfig(){\r\n\r\n\t$('#config_wrapper').animate(\r\n\t\t{\r\n\t\t\tright: ($('#config_wrapper').css('right') == '0px' ? '-322px' : '0px')\r\n\t\t}, \r\n\t500);\r\n\r\n}", "pre_corrections": "LINKER ERROR", "student": 1}}, {"model": "core.submission", "pk": 3, "fields": {"slug": "hunkgevtcfdobyxw", "seen": false, "type": 2, "text": "$(document).keydown(function(evt){\r\n\r\n\tif(evt.which == 9){\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t// #9 = TAB\r\n\t\tgenerate(0);\r\n\t\tevt.preventDefault();\r\n\t}\r\n\t\r\n});", "pre_corrections": "ALL GOOD", "student": 2}}, {"model": "core.submission", "pk": 4, "fields": {"slug": "gurvbyzxjfmhdiep", "seen": false, "type": 1, "text": "function showTextEditor(){\r\n\r\n\t$('.ilc_question_Standard').hide('slow');\r\n\t$('.ilc_question_ml_Standard').hide('slow');\r\n\t$('.text_editor').show('slow');\r\n\t\r\n}\r\n\r\nfunction showConfig(){\r\n\r\n\t$('#config_wrapper').animate(\r\n\t\t{\r\n\t\t\tright: ($('#config_wrapper').css('right') == '0px' ? '-322px' : '0px')\r\n\t\t}, \r\n\t500);\r\n\r\n}", "pre_corrections": "QUACK", "student": 2}}]
\ No newline at end of file
+[
+    {
+        "fields": {
+            "full_score": 10,
+            "name": "Aufgabe 01",
+            "possible_solution": "solution",
+            "slug": "brezmaphgocfuikw",
+            "task_description": "description"
+        },
+        "model": "core.submissiontype",
+        "pk": 1
+    },
+    {
+        "fields": {
+            "full_score": 20,
+            "name": "Aufgabe 02",
+            "possible_solution": "solution",
+            "slug": "zbjfwldsuhqgxvmn",
+            "task_description": "description"
+        },
+        "model": "core.submissiontype",
+        "pk": 2
+    },
+    {
+        "fields": {
+            "has_logged_in": false,
+            "matrikel_no": "12345678",
+            "name": "Student 01 Vorname und Nachname",
+            "user": 4
+        },
+        "model": "core.student",
+        "pk": 1
+    },
+    {
+        "fields": {
+            "has_logged_in": false,
+            "matrikel_no": "87654321",
+            "name": "Student 02 Vorname und Nachname",
+            "user": 5
+        },
+        "model": "core.student",
+        "pk": 2
+    },
+    {
+        "fields": {
+            "pre_corrections": "COMPILER",
+            "seen_by_student": false,
+            "slug": "qgleatcwzfxsdnjr",
+            "student": 1,
+            "text": "function generate(timeout){\r\n\r\n\t$('#menu_button_img').attr('src', 'style/menu_blink.gif'); \r\n\r\n\tif(timeout == 0)\t\t\t\t\t\t\t\t\r\n\t\t$('#config_form').attr('action', $('#config_form').attr('action') + '#title'); \t\t\t\t// show directly the question\r\n\telse\r\n\t\ttimeout = 0;\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t// disable timeout\r\n\t\r\n\tsetTimeout(function(){ $('#config_form').submit(); }, timeout);\r\n\r\n}",
+            "type": 1
+        },
+        "model": "core.submission",
+        "pk": 1
+    },
+    {
+        "fields": {
+            "pre_corrections": "LINKER ERROR",
+            "seen_by_student": false,
+            "slug": "mrthqgsloaydjfnc",
+            "student": 1,
+            "text": "function showTextEditor(){\r\n\r\n\t$('.ilc_question_Standard').hide('slow');\r\n\t$('.ilc_question_ml_Standard').hide('slow');\r\n\t$('.text_editor').show('slow');\r\n\t\r\n}\r\n\r\nfunction showConfig(){\r\n\r\n\t$('#config_wrapper').animate(\r\n\t\t{\r\n\t\t\tright: ($('#config_wrapper').css('right') == '0px' ? '-322px' : '0px')\r\n\t\t}, \r\n\t500);\r\n\r\n}",
+            "type": 2
+        },
+        "model": "core.submission",
+        "pk": 2
+    },
+    {
+        "fields": {
+            "pre_corrections": "ALL GOOD",
+            "seen_by_student": false,
+            "slug": "hunkgevtcfdobyxw",
+            "student": 2,
+            "text": "$(document).keydown(function(evt){\r\n\r\n\tif(evt.which == 9){\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t// #9 = TAB\r\n\t\tgenerate(0);\r\n\t\tevt.preventDefault();\r\n\t}\r\n\t\r\n});",
+            "type": 2
+        },
+        "model": "core.submission",
+        "pk": 3
+    },
+    {
+        "fields": {
+            "pre_corrections": "QUACK",
+            "seen_by_student": false,
+            "slug": "gurvbyzxjfmhdiep",
+            "student": 2,
+            "text": "function showTextEditor(){\r\n\r\n\t$('.ilc_question_Standard').hide('slow');\r\n\t$('.ilc_question_ml_Standard').hide('slow');\r\n\t$('.text_editor').show('slow');\r\n\t\r\n}\r\n\r\nfunction showConfig(){\r\n\r\n\t$('#config_wrapper').animate(\r\n\t\t{\r\n\t\t\tright: ($('#config_wrapper').css('right') == '0px' ? '-322px' : '0px')\r\n\t\t}, \r\n\t500);\r\n\r\n}",
+            "type": 1
+        },
+        "model": "core.submission",
+        "pk": 4
+    }
+]
diff --git a/core/migrations/0001_initial.py b/core/migrations/0001_initial.py
index 0a1832a515a9b716324bfae5134e2c92024de6d1..41d82568a409984d883a048591eb3171deb98e6a 100644
--- a/core/migrations/0001_initial.py
+++ b/core/migrations/0001_initial.py
@@ -2,10 +2,11 @@
 # Generated by Django 1.10.6 on 2017-04-05 20:11
 from __future__ import unicode_literals
 
-import core.models
+import django.db.models.deletion
 from django.conf import settings
 from django.db import migrations, models
-import django.db.models.deletion
+
+import core.models
 
 
 class Migration(migrations.Migration):
diff --git a/core/migrations/0002_auto_20170412_1447.py b/core/migrations/0002_auto_20170412_1447.py
index 592dc9d2199eb7dc7250b274b44dd3bbba027ae6..f92b185386910b6e770fa8403e32b8b2502ba24d 100644
--- a/core/migrations/0002_auto_20170412_1447.py
+++ b/core/migrations/0002_auto_20170412_1447.py
@@ -2,8 +2,8 @@
 # Generated by Django 1.10.7 on 2017-04-12 14:47
 from __future__ import unicode_literals
 
-from django.db import migrations, models
 import django.utils.timezone
+from django.db import migrations, models
 
 
 class Migration(migrations.Migration):
diff --git a/core/migrations/0005_auto_20170413_0124.py b/core/migrations/0005_auto_20170413_0124.py
index 571477e152adb23bfe722f5ceeb6c98c2dd9c7ab..b25f249e46b66aab720450b013058e13bc31beda 100644
--- a/core/migrations/0005_auto_20170413_0124.py
+++ b/core/migrations/0005_auto_20170413_0124.py
@@ -2,8 +2,8 @@
 # Generated by Django 1.10.7 on 2017-04-13 01:24
 from __future__ import unicode_literals
 
-from django.db import migrations, models
 import django.db.models.deletion
+from django.db import migrations, models
 
 
 class Migration(migrations.Migration):
diff --git a/core/migrations/0007_auto_20170522_1827.py b/core/migrations/0007_auto_20170522_1827.py
new file mode 100644
index 0000000000000000000000000000000000000000..fb6c0578ab3202c63ea9376750c9fff4fc7b54af
--- /dev/null
+++ b/core/migrations/0007_auto_20170522_1827.py
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.10.7 on 2017-05-22 18:27
+from __future__ import unicode_literals
+
+import django.db.models.deletion
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('core', '0006_auto_20170413_1102'),
+    ]
+
+    operations = [
+        migrations.AlterField(
+            model_name='submission',
+            name='student',
+            field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='submissions', to='core.Student'),
+        ),
+        migrations.AlterUniqueTogether(
+            name='submission',
+            unique_together=set([('type', 'student')]),
+        ),
+    ]
diff --git a/core/migrations/0008_auto_20170522_1834.py b/core/migrations/0008_auto_20170522_1834.py
new file mode 100644
index 0000000000000000000000000000000000000000..7eb42fad8d01012fdfeba12b7f068dde59af52fe
--- /dev/null
+++ b/core/migrations/0008_auto_20170522_1834.py
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.10.7 on 2017-05-22 18:34
+from __future__ import unicode_literals
+
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('core', '0007_auto_20170522_1827'),
+    ]
+
+    operations = [
+        migrations.RenameField(
+            model_name='submission',
+            old_name='seen',
+            new_name='seen_by_student',
+        ),
+        migrations.RemoveField(
+            model_name='submissiontype',
+            name='correction_guideline',
+        ),
+    ]
diff --git a/core/migrations/0009_auto_20170710_1308.py b/core/migrations/0009_auto_20170710_1308.py
new file mode 100644
index 0000000000000000000000000000000000000000..0a06d82f61967fa295b5b9783e80a2f5a2cfbf62
--- /dev/null
+++ b/core/migrations/0009_auto_20170710_1308.py
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.10.7 on 2017-07-10 13:08
+from __future__ import unicode_literals
+
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('core', '0008_auto_20170522_1834'),
+    ]
+
+    operations = [
+        migrations.RenameField(
+            model_name='submissiontype',
+            old_name='task_description',
+            new_name='description',
+        ),
+        migrations.RenameField(
+            model_name='submissiontype',
+            old_name='possible_solution',
+            new_name='solution',
+        ),
+    ]
diff --git a/core/migrations/0010_auto_20170710_1604.py b/core/migrations/0010_auto_20170710_1604.py
new file mode 100644
index 0000000000000000000000000000000000000000..ab702ede9c84a59e0762885ed0aaef9d40d5ae2d
--- /dev/null
+++ b/core/migrations/0010_auto_20170710_1604.py
@@ -0,0 +1,34 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.10.7 on 2017-07-10 16:04
+from __future__ import unicode_literals
+
+import django.db.models.deletion
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('core', '0009_auto_20170710_1308'),
+    ]
+
+    operations = [
+        migrations.CreateModel(
+            name='Test',
+            fields=[
+                ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+                ('name', models.CharField(max_length=30, unique=True)),
+                ('label', models.CharField(max_length=50, unique=True)),
+                ('annotation', models.TextField()),
+                ('submission', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='tests', to='core.Submission')),
+            ],
+            options={
+                'verbose_name': 'Test',
+                'verbose_name_plural': 'Tests',
+            },
+        ),
+        migrations.AlterUniqueTogether(
+            name='test',
+            unique_together=set([('submission', 'name')]),
+        ),
+    ]
diff --git a/core/migrations/0011_auto_20170710_1610.py b/core/migrations/0011_auto_20170710_1610.py
new file mode 100644
index 0000000000000000000000000000000000000000..7bf4689a3ab7df1d1112e9930660aedbc85da745
--- /dev/null
+++ b/core/migrations/0011_auto_20170710_1610.py
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.10.7 on 2017-07-10 16:10
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('core', '0010_auto_20170710_1604'),
+    ]
+
+    operations = [
+        migrations.AlterField(
+            model_name='test',
+            name='label',
+            field=models.CharField(max_length=50),
+        ),
+        migrations.AlterField(
+            model_name='test',
+            name='name',
+            field=models.CharField(max_length=30),
+        ),
+    ]
diff --git a/core/migrations/0012_auto_20170711_1104.py b/core/migrations/0012_auto_20170711_1104.py
new file mode 100644
index 0000000000000000000000000000000000000000..9cc1976430b11665b40073f68b976860fd163598
--- /dev/null
+++ b/core/migrations/0012_auto_20170711_1104.py
@@ -0,0 +1,35 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.10.7 on 2017-07-11 11:04
+from __future__ import unicode_literals
+
+import django.db.models.deletion
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('core', '0011_auto_20170710_1610'),
+    ]
+
+    operations = [
+        migrations.CreateModel(
+            name='ExamType',
+            fields=[
+                ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+                ('module_reference', models.CharField(max_length=50)),
+                ('total_score', models.PositiveIntegerField()),
+                ('pass_score', models.PositiveIntegerField()),
+                ('pass_only', models.BooleanField(default=False)),
+            ],
+            options={
+                'verbose_name': 'ExamType',
+                'verbose_name_plural': 'ExamTypes',
+            },
+        ),
+        migrations.AddField(
+            model_name='student',
+            name='exam',
+            field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='students', to='core.ExamType'),
+        ),
+    ]
diff --git a/core/migrations/0013_auto_20170712_1643.py b/core/migrations/0013_auto_20170712_1643.py
new file mode 100644
index 0000000000000000000000000000000000000000..bd526207077997edd6dd8b0d2277dd1537d96f66
--- /dev/null
+++ b/core/migrations/0013_auto_20170712_1643.py
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.10.7 on 2017-07-12 16:43
+from __future__ import unicode_literals
+
+import django.db.models.deletion
+from django.conf import settings
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('core', '0012_auto_20170711_1104'),
+    ]
+
+    operations = [
+        migrations.AlterField(
+            model_name='examtype',
+            name='module_reference',
+            field=models.CharField(max_length=50, unique=True),
+        ),
+        migrations.AlterField(
+            model_name='student',
+            name='user',
+            field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='student', to=settings.AUTH_USER_MODEL),
+        ),
+    ]
diff --git a/core/migrations/0014_auto_20170712_1704.py b/core/migrations/0014_auto_20170712_1704.py
new file mode 100644
index 0000000000000000000000000000000000000000..65a9c943d2aa857d13e44a062bacf5b24629bd5b
--- /dev/null
+++ b/core/migrations/0014_auto_20170712_1704.py
@@ -0,0 +1,22 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.10.7 on 2017-07-12 17:04
+from __future__ import unicode_literals
+
+import django.db.models.deletion
+from django.conf import settings
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('core', '0013_auto_20170712_1643'),
+    ]
+
+    operations = [
+        migrations.AlterField(
+            model_name='feedback',
+            name='of_tutor',
+            field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='feedback_list', to=settings.AUTH_USER_MODEL),
+        ),
+    ]
diff --git a/core/models.py b/core/models.py
index 8be60cc193a0d78425ab9bab416f18e00e3dc9a7..31880da0ddbec12922118cfc5246d57e17989253 100644
--- a/core/models.py
+++ b/core/models.py
@@ -41,15 +41,20 @@
 ##########################################################################
 
 
-from random import sample, randrange
+from collections import OrderedDict
+from random import randrange, sample
 from string import ascii_lowercase
 
 from django.contrib.auth.models import User
 from django.db import models
-from django.db.models import Q
+from django.db.models import Value as V
+from django.db.models import (BooleanField, Case, Count, F, IntegerField, Q,
+                              Sum, When)
+from django.db.models.functions import Coalesce
 
 SLUG_LENGTH = 16
 
+
 def random_slug():
     return ''.join(sample(ascii_lowercase, SLUG_LENGTH))
 
@@ -58,64 +63,168 @@ def random_matrikel_no():
     return str(2e7 + randrange(1e8))
 
 
+def get_annotated_tutor_list():
+    return User.objects\
+        .annotate(Count('feedback_list'))\
+        .filter(groups__name='Tutors')\
+        .order_by('-feedback_list__count')
+
+
+class ExamType(models.Model):
+
+    class Meta:
+        verbose_name = "ExamType"
+        verbose_name_plural = "ExamTypes"
+
+    def __str__(self):
+        return self.module_reference
+
+    module_reference = models.CharField(max_length=50, unique=True)
+    total_score      = models.PositiveIntegerField()
+    pass_score       = models.PositiveIntegerField()
+    pass_only        = models.BooleanField(default=False)
+
+
 class SubmissionType(models.Model):
     # Fields
-    name = models.CharField(max_length=50, unique=True)
-    slug = models.SlugField(editable=False, unique=True, default=random_slug)
-    full_score = models.PositiveIntegerField(default=0)
-    task_description = models.TextField()
-    possible_solution = models.TextField()
-    correction_guideline = models.TextField()
+    name        = models.CharField(max_length=50, unique=True)
+    full_score  = models.PositiveIntegerField(default=0)
+    description = models.TextField()
+    solution    = models.TextField()
+    slug        = models.SlugField(
+        editable=False, unique=True, default=random_slug)
+
+    def __str__(self):
+        return self.name
 
     class Meta:
-        verbose_name = "SubmissionType"
+        verbose_name        = "SubmissionType"
         verbose_name_plural = "SubmissionType Set"
 
-    def __str__(self):
-        return self.name
+    @classmethod
+    def get_annotated_feedback_count(cls):
+        """ Annotates submission lists with counts
+
+        count both
+            * number of submission per submission type
+            * count of received feedback per submission type
+            *
+        Alternative with case
+            Count(Case(
+                When(submissions__feedback_list__origin=Feedback.MANUAL,
+                    then=Value(1)), output_field=IntegerField())
+            )
+
+        Returns:
+            annotated queryset
+        """
+        return cls.objects\
+            .annotate( # to display only manual
+                feedback_count=Count(
+                    Case(
+                        When(
+                                Q(submissions__feedback__isnull=False) &
+                                Q(submissions__feedback__status=Feedback.ACCEPTED),
+                            then=V(1)), output_field=IntegerField(),
+                    )
+                )
+            ).annotate(
+                submission_count=Count('submissions')
+            ).annotate(
+                percentage=(F('feedback_count') * 100 / F('submission_count'))
+            ).all().order_by('name')
 
 
 class Student(models.Model):
     # Fields
-    matrikel_no = models.CharField(
+    has_logged_in   = models.BooleanField(default=False)
+    exam            = models.ForeignKey('ExamType', related_name='students', null=True)
+    name            = models.CharField(max_length=50, default="__no_name__")
+    matrikel_no     = models.CharField(
         unique=True, max_length=8, default=random_matrikel_no)
-    has_logged_in = models.BooleanField(default=False)
-    name = models.CharField(max_length=50, default="__no_name__")
     user = models.OneToOneField(
         User, on_delete=models.CASCADE,
+        related_name='student',
         limit_choices_to={'groups__name': 'Students'},
     )
 
+    def score_per_submission(self):
+        if self.submissions.all():
+            return OrderedDict({
+                s.type : s.feedback.score if hasattr(s, 'feedback') else 0
+                for s in self.submissions.all()
+            })
+        else:
+            return OrderedDict({
+                t.name : 0 for t in SubmissionType.objects.all()
+            })
+
+    @classmethod
+    def get_overall_score_annotated_submission_list(cls):
+        return cls.objects.annotate(
+            overall_score=Coalesce(Sum('submissions__feedback__score'), V(0)),
+        ).annotate(
+            done=Case(
+                When(exam__pass_score__lt=F('overall_score'), then=V(1)),
+                default=V(0),
+                output_field=BooleanField()
+            )
+        )
+
     def disable(self):
         self.has_logged_in = True
         self.save()
 
+    def __str__(self):
+        return self.user.username
+
     class Meta:
-        verbose_name = "Student"
+        verbose_name        = "Student"
         verbose_name_plural = "Student Set"
 
+
+class Test(models.Model):
+
+    name       = models.CharField(max_length=30)
+    label      = models.CharField(max_length=50)
+    annotation = models.TextField()
+    submission = models.ForeignKey(
+        'submission',
+        related_name='tests',
+        on_delete=models.CASCADE,
+    )
+
+    class Meta:
+        verbose_name        = "Test"
+        verbose_name_plural = "Tests"
+        unique_together     = (('submission', 'name'),)
+
     def __str__(self):
-        return self.user.username
+        return f'{self.name} {self.label}'
 
 
 class Submission(models.Model):
 
     # Fields
-    slug = models.SlugField(editable=False, unique=True, default=random_slug)
-
-    # This indicates that the student has seen his feedback
-    seen = models.BooleanField(default=False)
-    type = models.ForeignKey(
-        SubmissionType,
-        related_name='submissions'
-    )
-    text = models.TextField(blank=True)
+    seen_by_student = models.BooleanField(default=False)
+    text            = models.TextField(blank=True)
     pre_corrections = models.TextField(blank=True)
-    student = models.ForeignKey(Student, on_delete=models.CASCADE)
+    slug            = models.SlugField(
+        editable=False,
+        unique=True,
+        default=random_slug)
+    type            = models.ForeignKey(
+        SubmissionType,
+        related_name='submissions')
+    student         = models.ForeignKey(
+        Student,
+        on_delete=models.CASCADE,
+        related_name='submissions')
 
     class Meta:
-        verbose_name = "Submission"
+        verbose_name        = "Submission"
         verbose_name_plural = "Submission Set"
+        unique_together     = (('type', 'student'),)
 
     def __str__(self):
         return "Submission of type '{}' from Student '{}'".format(
@@ -145,9 +254,10 @@ class Submission(models.Model):
 
         candidates = cls.objects.filter(
             (
-                  Q(feedback__isnull=True)
+                Q(feedback__isnull=True)
                 | Q(feedback__origin=Feedback.DID_NOT_COMPILE)
                 | Q(feedback__origin=Feedback.COULD_NOT_LINK)
+                | Q(feedback__origin=Feedback.FAILED_UNIT_TESTS)
             )
             & ~Q(feedback__of_tutor=tutor)
         )
@@ -161,7 +271,8 @@ class Submission(models.Model):
             return False
 
         submission = candidates[0]
-        feedback = submission.feedback if hasattr(submission, 'feedback') else Feedback()
+        feedback = submission.feedback if hasattr(
+            submission, 'feedback') else Feedback()
         feedback.origin = Feedback.MANUAL
         feedback.status = Feedback.EDITABLE
         feedback.of_tutor = tutor
@@ -180,21 +291,18 @@ class Feedback(models.Model):
     slug = models.SlugField(
         editable=False,
         unique=True,
-        default=random_slug
-    )
+        default=random_slug)
     of_submission = models.OneToOneField(
         Submission,
         related_name='feedback',
-        blank=False, null=False
-    )
+        unique=True,
+        blank=False, null=False)
     of_tutor = models.ForeignKey(
-        User, related_name='corrected_submissions',
-    )
+        User, related_name='feedback_list',)
     of_reviewer = models.ForeignKey(
         User,
         related_name='reviewed_submissions',
-        blank=True, null=True
-    )
+        blank=True, null=True)
 
     # what is the current status of our feedback
     (
@@ -217,14 +325,14 @@ class Feedback(models.Model):
     # how was this feedback created
     (
         WAS_EMPTY,
-        PASSED_UNIT_TESTS,
+        FAILED_UNIT_TESTS,
         DID_NOT_COMPILE,
         COULD_NOT_LINK,
         MANUAL,
     ) = range(5)
     ORIGIN = (
         (WAS_EMPTY,         'was empty'),
-        (PASSED_UNIT_TESTS, 'passed unittests'),
+        (FAILED_UNIT_TESTS, 'passed unittests'),
         (DID_NOT_COMPILE,   'did not compile'),
         (COULD_NOT_LINK,    'could not link'),
         (MANUAL,            'created by a human. yak!'),
@@ -235,7 +343,7 @@ class Feedback(models.Model):
     )
 
     class Meta:
-        verbose_name = "Feedback"
+        verbose_name        = "Feedback"
         verbose_name_plural = "Feedback Set"
 
     def __str__(self):
@@ -247,6 +355,14 @@ class Feedback(models.Model):
     def get_full_score(self):
         return self.of_submission.type.full_score
 
+    @classmethod
+    def get_open_feedback(cls, user):
+        return cls.objects.filter(
+            Q(status=Feedback.OPEN) &
+            ~Q(of_tutor=user) # you shall not request your own feedback
+        )
+
+
     @classmethod
     def tutor_unfinished_feedback(cls, user):
         """Gets only the feedback that is assigned and not accepted. A tutor
diff --git a/core/serializers.py b/core/serializers.py
new file mode 100644
index 0000000000000000000000000000000000000000..8ff9df315a2fe0893112916ef40eb5126d4d6c17
--- /dev/null
+++ b/core/serializers.py
@@ -0,0 +1,91 @@
+
+import hashlib
+
+from django.contrib.auth.models import User
+from rest_framework import serializers
+
+from core.models import Feedback, Student, Submission, SubmissionType
+
+
+class SubmissionTypeSerializer(serializers.ModelSerializer):
+
+    def create(self, validated_data):
+        return SubmissionType(**validated_data)
+
+    class Meta:
+        model = SubmissionType
+        exclude = ('slug',)
+
+
+class CreateStudentSerializer(serializers.ModelSerializer):
+
+    username = serializers.CharField(source='user.username')
+    email    = serializers.CharField(source='user.email')
+    password = serializers.CharField(source='user.password')
+
+    class Meta:
+        model = Student
+        fields = ('username', 'name', 'email', 'matrikel_no', 'password')
+        extra_kwargs = {'password': {'write_only': True}}
+
+    def to_representation(self, obj):
+        return {
+            'username'      : obj.user.username,
+            'name'          : obj.name,
+            'matrikel_no'   : obj.matrikel_no,
+        }
+
+    def create(self, validated_data):
+        user = User(
+            email=validated_data['email'],
+            username=validated_data['username']
+        )
+        user.set_password(validated_data['password'])
+
+        return Student.objects.create(
+            name=validated_data['name'],
+            matrikel_no=validated_data['matrikel_no'],
+            user=user,
+        )
+
+
+class CreateSubmissionSerializer(serializers.ModelSerializer):
+
+    type = serializers.SlugRelatedField(
+        queryset=SubmissionType.objects.all(),
+        slug_field='name'
+    )
+
+    student = serializers.SlugRelatedField(
+        queryset=User.objects.all(),
+        slug_field='username'
+    )
+
+    class Meta:
+        model = Submission
+        fields = ('type', 'text', 'pre_corrections', 'student')
+
+    def create(self, validated_data):
+        validated_data['student'] = validated_data['student'].student
+        return Submission.objects.create(**validated_data)
+
+
+class AnonymousFeedbackSerializer(serializers.ModelSerializer):
+
+    def to_representation(self, obj):
+        return {
+            'feedback'  : obj.text,
+            'score'     : obj.score,
+            'tutor'     : obj.of_tutor.username,
+            'student'   : hashlib.sha256(
+                obj.of_submission.student.matrikel_no.encode() +
+                obj.of_submission.student.name.encode()).hexdigest(),
+            'code'      : obj.of_submission.text
+        }
+
+    def create(self, validated_data):
+        return NotImplemented
+
+    class Meta:
+        model = Feedback
+        fields = ()
diff --git a/core/static/css/custom.css b/core/static/css/custom.css
index a0efe89049800d1402390316fe859b468726bc51..8e2e7f1cd81a4773dae66f4408bf3a3e7640ccb9 100644
--- a/core/static/css/custom.css
+++ b/core/static/css/custom.css
@@ -58,3 +58,4 @@ table.dataTable {
     white-space: nowrap;
     width: 1%;
 }
+
diff --git a/core/templates/core/component/tests_editor.html b/core/templates/core/component/tests_editor.html
new file mode 100644
index 0000000000000000000000000000000000000000..362397783a8d19310265168dee86069754c1aa49
--- /dev/null
+++ b/core/templates/core/component/tests_editor.html
@@ -0,0 +1,27 @@
+{# Custom feedback from the compiler #}
+<div class="card my-1">
+  <a data-toggle="collapse" href="#collapse4">
+    <h5 class="card-header">Tester Output</h5>
+  </a>
+  <div id="collapse4" class="collapse hide" role="tabpanel">
+    <div class="card-block m-2">
+      <div id="tests_editor" class="editor editor-pre">{% for test in submission.tests.all %}
+# {{test.name}}
+{{test.annotation}}
+RESULT: {{test.label}}
+-------------------------------------------------
+{% endfor %}
+      </div>
+    </div>
+  </div>
+  <script>
+    var editor_pre = ace.edit("tests_editor");
+    editor_pre.setOptions({
+      readOnly: true,
+      showGutter: false,
+      highlightActiveLine: false,
+      maxLines: Infinity,
+    })
+  </script>
+</div>
+
diff --git a/core/templates/core/feedback_form.html b/core/templates/core/feedback_form.html
index 9f38500d3fee6ff5af8a0e54f0c7d7459590c428..601aa89b6a881f939fc29b0aab08d994f2f4a283 100644
--- a/core/templates/core/feedback_form.html
+++ b/core/templates/core/feedback_form.html
@@ -25,9 +25,14 @@
       <a data-toggle="collapse" href="#collapse4">
         <h5 class="card-header">Custom Feedback</h5>
       </a>
-      <div id="collapse4" class="collapse {% if feedback.of_submission.pre_corrections %}show{% else %}hide{% endif %}" role="tabpanel">
+      <div id="collapse4" class="collapse show" role="tabpanel">
         <div class="card-block m-2">
-          <div id="pre_corrections" class="editor editor-pre">{{feedback.of_submission.pre_corrections}}</div>
+          <div id="tests_editor" class="editor editor-pre">{% for test in feedback.of_submission.tests.all %}
+# {{test.name}}
+{{test.annotation}}
+RESULT: {{test.label}}
+-------------------------------------------------
+          {% endfor %}</div>
         </div>
       </div>
     </div>
@@ -39,12 +44,12 @@
       </a>
       <div id="collapse5" class="collapse show" role="tabpanel">
         <div class="card-block m-2">
-          <div id="solution" class="editor editor-code">{{feedback.of_submission.type.possible_solution}}</div>
+          <div id="solution" class="editor editor-code">{{feedback.of_submission.type.solution}}</div>
         </div>
       </div>
     </div>
 
-    {% include "core/component/feedback_card.html" with unique="1" header="Description" content=feedback.of_submission.type.task_description expanded="hide" %}
+    {% include "core/component/feedback_card.html" with unique="1" header="Description" content=feedback.of_submission.type.description expanded="hide" %}
 
     <div class="my-2">
       <button type="button" id="collapseAllOpen"  class="btn btn-secondary">Open All</button>
@@ -69,7 +74,7 @@
         <div class="form-inline">
 
           {# Score field #}
-          <div class="input-group col-5 nopadding mr-1 mb-1">
+          <div class="input-group col-5 nopadding mr-1 mb-2">
             <span class="input-group-addon">Score:</span>
             <input
             class="form-control"
@@ -87,30 +92,32 @@
 
           {# status select #}
           {% with form.fields.status as status %}
-          <div class="form-group mr-1 mb-1">
+          <div class="form-group mr-2 mb-2">
             <select class="custom-select" id="id_status" name="status">
               {% for val, name in status.choices %}
-              <option value="{{val}}" {% if val == feedback.status %}selected{% endif %}> {{name}}</option>
+              <option value="{{val}}" {% if val == feedback.status %}selected{% endif %}>{{name}}</option>
               {% endfor %}
             </select>
           </div>
           {% endwith %}
 
-          <button type="submit" form="form1" class="btn btn-secondary mr-1 mb-1" name="update" value="Submit">Submit</button>
+          <div>
+            {# Beware! compares status and origin #}
+            <button type="submit" form="form1" class="btn btn-outline-success mb-2" name="update" value="Save">Save</button>
+            <button type="submit" form="form1" class="btn btn-outline-success mb-2" name="update" value="Submit">Save and return</button>
 
-          {# Beware! compares status and origin #}
-          {% if feedback.status == feedback.NEEDS_REVIEW or feedback.status == feedback.EDITABLE %}
-          <button type="submit" form="form1" class="btn btn-success mr-1 mb-1" name="update" value="Next">Next</button>
-          <button type="submit" form="form1" class="btn btn-success mr-1 mb-1" name="update" value="Save">Save</button>
-          {% endif %}
+            {% if feedback.status == feedback.NEEDS_REVIEW or feedback.status == feedback.EDITABLE %}
+            <button type="submit" onclick="set_accepted();" form="form1" class="btn btn-outline-success mb-2" name="update" value="Next">Accept, Save and Next</button>
+            {% endif %}
 
-          {% if feedback.origin != feedback.MANUAL %}
-          <a href="{% url 'FeedbackDelete' feedback.slug %}" class="btn btn-outline-danger mr-1 mb-1" name="delete" value="Delete">Delete auto feedback</a>
-          {% endif %}
+            {% if feedback.origin != feedback.MANUAL %}
+            <a href="{% url 'FeedbackDelete' feedback.slug %}" class="btn btn-outline-danger mb-2" name="delete" value="Delete">Delete auto feedback</a>
+            {% endif %}
 
-          {% if feedback.status == feedback.ACCEPTED %}
-          <button class="btn btn-secondary mr-1 mb-1" value="Submit" disabled>View is read only</button>
-          {% endif %}
+            {% if feedback.status == feedback.ACCEPTED %}
+            <button class="btn btn-secondary mr-1 mb-2" value="Submit" disabled>View is read only</button>
+            {% endif %}
+          </div>
         </div>
       </form>
     </div>
@@ -126,6 +133,10 @@
 {% block script_block %}
 <script>
 
+  function set_accepted() {
+    $('#id_status')[0].value = {{feedback.ACCEPTED}};
+  }
+
   $('#collapseAllOpen').click(function(){
     $('.collapse').collapse('show');
   });
@@ -141,7 +152,7 @@
   {% endif %}
 
   // we need this one for the compiler erros readonly
-  var editor_pre = ace.edit("pre_corrections");
+  var editor_pre = ace.edit("tests_editor");
   editor_pre.setOptions({
     readOnly: true,
     showGutter: false,
diff --git a/core/templates/core/r/reviewer_base.html b/core/templates/core/r/reviewer_base.html
index 6ab586329f900be8c86ad062ac30ebdf0775ad8f..c79f575f6e3321ba897604d6c1dd143394a65e58 100644
--- a/core/templates/core/r/reviewer_base.html
+++ b/core/templates/core/r/reviewer_base.html
@@ -4,7 +4,8 @@
 
 {% block navbar %}
 <a class="nav-item nav-link" href="{% url 'start' %}">Feedback</a>
-<a class="nav-item nav-link" href="{% url 'submission_list' %}">Submissions</a>
+<a class="nav-item nav-link" href="{% url 'ReviewerSubmissionListView' %}">Submissions</a>
+<a class="nav-item nav-link" href="{% url 'ReviewerStudentListView' %}">Students</a>
 <div class="nav-item dropdown">
   <a class="nav-link dropdown-toggle" href="http://example.com" id="navbarDropdownMenuLink" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">
     Export
@@ -17,7 +18,7 @@
 
 {% block body_block %}
 <div class="row my-3">
-  <div class="col-4">
+  <div class="col-3">
     {% block sidebar %}
     {# This just gives an overview about what has been done already #}
     {% include "core/r/progress_card.html" %}
@@ -27,7 +28,7 @@
     {% endblock sidebar %}
   </div>
 
-  <div class="col-8">
+  <div class="col-9">
     {% block main %}
 
     {% endblock main %}
diff --git a/core/templates/core/r/single_submission.html b/core/templates/core/r/single_submission.html
index 09d388b3849725e3530c8e7dce92124626a0c654..59f26832868b3da5e9b84b151cd713c043dabea7 100644
--- a/core/templates/core/r/single_submission.html
+++ b/core/templates/core/r/single_submission.html
@@ -39,7 +39,7 @@
           Create Feedback
           {% endif %}
         </a>
-        <a href="{% url 'submission_list' %}" class="btn btn-outline-success">Back</a>
+        <button class="btn btn-outline-success" onclick="window.history.go(-1); return false;">Back</button>
       </div>
     </div>
   </div>
@@ -47,10 +47,11 @@
   <div class="col-4 my-4">
     <div class="card">
       <div class="card-block">
-        <div class="card-header">Your submission</div>
+        <div class="card-header">Student submission</div>
         <div class="editor-code" id="textarea_submission">{{submission.text}}</div>
       </div>
     </div>
+    {% include "core/component/tests_editor.html" %}
   </div>
 
   {% if feedback %}
diff --git a/core/templates/core/r/student_list.html b/core/templates/core/r/student_list.html
new file mode 100644
index 0000000000000000000000000000000000000000..57f433f05720ecc6e29f96ea93772ad86635c508
--- /dev/null
+++ b/core/templates/core/r/student_list.html
@@ -0,0 +1,59 @@
+{% extends 'core/r/reviewer_base.html' %}
+
+{% load staticfiles %}
+
+{% block main %}
+
+<div class="card">
+  <h5 class="card-header">Student Overview</h5>
+  <div class="card-block">
+    <table id="list-id-submission_list" class="table nomargin">
+      <thead class="rotate">
+        <tr class="high">
+          <th>Name</th>
+          <th>Username</th>
+          <th>Module</th>
+          {% for submission_type in submission_type_list %}
+            <th><font size="1">{{submission_type.name}}</font></th>
+          {% endfor %}
+          <th>Total score</th>
+          <th>Done</th>
+        </tr>
+      </thead>
+      <tbody>
+      {% for student in student_list %}
+        <tr>
+          <td class="fit">{{student.name}}</td>
+          <td>{{student.user.username}}</td>
+          <td>{{student.exam}}</td>
+          {% for sub in student.submissions.all %}
+            <td>{% if sub.feedback %}
+              <a href="{% url 'ReviewerSubmissionView' sub.slug %}" role="button" class="btn-link btn-sm"><code>{{sub.feedback.score}} / {{sub.type.full_score}}</code></a>
+            {% else %}
+              <a href="{% url 'ReviewerSubmissionView' sub.slug %}" role="button" class="btn btn-outline-primary btn-sm">View</a>
+            {% endif %} </td>
+          {% endfor %}
+          <td><code>{{student.overall_score}}</code></td>
+          <td>{% if student.done %}<span class="badge badge-success">yes</span>{% else %}<span class="badge badge-danger">no</span>{% endif %}</td>
+        </tr>
+      {% endfor %}
+      </tbody>
+    </table>
+  </div>
+</div>
+
+{% endblock main %}
+
+{% block script_block %}
+<script>
+  $(document).ready(function() {
+    $('[id^=list-id-]').DataTable({
+      "paging":     false,
+      "info":       false,
+      "searching":  false,
+      "stateSave":  true,
+      "order":      [[ 0, 'desc' ]],
+    });
+  });
+</script>
+{% endblock script_block %}
diff --git a/core/templates/core/r/student_submission_list.html b/core/templates/core/r/student_submission_list.html
index 59bea6014bdaabec8d182e335d77db81e34ed8be..484bdd8e06323d57ae8256cd09ad8ef8f53fe145 100644
--- a/core/templates/core/r/student_submission_list.html
+++ b/core/templates/core/r/student_submission_list.html
@@ -21,7 +21,7 @@
       <tbody>
         {% for submission in submission_list %}
         <tr>
-          <td class="align-middle fit"> <a href="{% url 'SubmissionViewReviewer' submission.slug %}" class="btn btn-outline-primary mb-1" name="edit" value="View">View submission</a></td>
+          <td class="align-middle fit"> <a href="{% url 'ReviewerSubmissionView' submission.slug %}" class="btn btn-outline-primary mb-1" name="edit" value="View">View submission</a></td>
           <td class="align-middle"> {{ submission.type }} </td>
           <td class="align-middle"> {{ submission.student }} </td>
           <td class="align-middle fit">
diff --git a/core/templates/core/r/tutor_list_card.html b/core/templates/core/r/tutor_list_card.html
index 4fac5db444705dd4a9a48af36cef6f9d9b98bb74..455927dc5cb645f7150f0bca91d4a60e9d3b6b0e 100644
--- a/core/templates/core/r/tutor_list_card.html
+++ b/core/templates/core/r/tutor_list_card.html
@@ -10,7 +10,7 @@
       <tbody>
         <tr>
           <td>{{tutor.username}}</td>
-          <td><code>{{tutor.corrected_submissions__count}}</code></td>
+          <td><code>{{tutor.feedback_list__count}}</code></td>
         </tr>
       </tbody>
       {% endfor %}
diff --git a/core/templates/core/s/single_submission.html b/core/templates/core/s/single_submission.html
index 0a018c7ce279971284553026f7157c3f5a547929..cf08272e7c32364a1e96e5c3a246bd59ba0b48d7 100644
--- a/core/templates/core/s/single_submission.html
+++ b/core/templates/core/s/single_submission.html
@@ -15,22 +15,9 @@
         <ul class="list-group list-group-flush">
           <li class="list-group-item"><strong class="mr-2">Submission Type: </strong> {{ submission.type }} </li>
           <li class="list-group-item"><strong class="mr-2">Student: </strong> {{ submission.student }}</li>
-          {% if feedback and is_reviewer %}
-          <li class="list-group-item">
-            <strong class="mr-2">Status: </strong> {% include "core/feedback_badge.html" %}
-            <span class="badge badge-warning ml-2">Only visible to reviewer</span>
-          </li>
-          <li class="list-group-item">
-            <strong class="mr-2">Tutor: </strong> {{ feedback.of_tutor }}
-            <span class="badge badge-warning ml-2">Only visible to reviewer</span>
-          </li>
-          {% endif %}
           <li class="list-group-item"><strong class="mr-2">Score: </strong>
             {% if feedback and feedback.status == feedback.ACCEPTED %}
             <code> {{ feedback.score }} / {{submission.type.full_score}} </code>
-            {% elif feedback and is_reviewer %}
-            <code> {{ feedback.score }} / {{submission.type.full_score}} </code>
-            <span class="badge badge-warning ml-2">Only visible to reviewer</span>
             {% else %}
             <span class="badge badge-danger">No Feedback</span>
             {% endif %}
@@ -38,18 +25,7 @@
         </ul>
       </div>
       <div class="card-footer">
-        {% if is_reviewer %}
-        <a href="{% url 'create_feedback_for_submission' submission.slug %}" class="btn btn-success">
-          {% if feedback %}
-          Edit Feedback
-          {% else %}
-          Create Feedback
-          {% endif %}
-        </a>
-        <a href="{% url 'submission_list' %}" class="btn btn-outline-success">Back</a>
-        {% else %}
         <a href="{% url 'start' %}" class="btn btn-success">Back</a>
-        {% endif %}
       </div>
     </div>
   </div>
@@ -61,18 +37,15 @@
         <div class="editor-code" id="textarea_submission">{{submission.text}}</div>
       </div>
     </div>
+    {% include "core/component/tests_editor.html" %}
   </div>
 
-
   {% if feedback %}
-  {% if feedback.status == feedback.ACCEPTED or is_reviewer %}
+  {% if feedback.status == feedback.ACCEPTED %}
   <div class="col-4 my-4">
     <div class="card">
       <div class="card-block">
         <div class="card-header">Our feedback
-          {% if is_reviewer %}
-          <span class="badge badge-warning ml-2">Only visible to reviewer</span>
-          {% endif %}
         </div>
         <div class="editor-code" id="textarea_feedback">{{ feedback.text }}</div>
       </div>
diff --git a/core/templates/core/s/student_startpage.html b/core/templates/core/s/student_startpage.html
index ca5a7f723c14abdcf7244beeedf3af11925638dc..cd016dd7f5b03e445b895b4b2149f082118e580c 100644
--- a/core/templates/core/s/student_startpage.html
+++ b/core/templates/core/s/student_startpage.html
@@ -5,11 +5,10 @@
 {% block navbar %} Student Exam View {% endblock navbar %}
 
 {% block body_block %}
-
 <div class="row justify-content-center">
   <div class="col-6">
     <div class="row my-3">
-      <h2>Hello {{ student.student }}</h2>
+      <h2>Submissions of {{ student.name }}</h2>
     </div>
 
     <div class="row my-2">
@@ -21,10 +20,10 @@
           <th></th>
         </thead>
         <tbody>
-          {% for submission in submission_list %}
+          {% for submission in student.submissions.all %}
           <tr class="align-middle">
             <td class="align-middle">
-              {% if submission.seen %}
+              {% if submission.seen_by_student %}
               <span class="badge badge-success">Seen</span>
               {% endif %}
             </td>
@@ -38,7 +37,7 @@
               {% endif %}
               {% endwith %}
             </td>
-            <td class="align-middle"><a class="btn btn-primary" href="{% url 'SubmissionViewStudent' submission.slug %}">View</a></td>
+            <td class="align-middle"><a class="btn btn-primary" href="{% url 'StudentSubmissionView' submission.slug %}">View</a></td>
           </tr>
           {% endfor %}
         </tbody>
@@ -46,7 +45,4 @@
     </div>
   </div>
 </div>
-
-
-
 {% endblock body_block %}
diff --git a/core/templates/core/t/tutor_startpage.html b/core/templates/core/t/tutor_startpage.html
index c9c3277ad5f2ca06d345280e6b3e6cc24b5d240a..52836ebf0bb4bb65d215a602108ca300b513f097 100644
--- a/core/templates/core/t/tutor_startpage.html
+++ b/core/templates/core/t/tutor_startpage.html
@@ -38,7 +38,7 @@
         <tbody>
           <tr>
             <td class="fit"><strong>Your contribution:</strong></td>
-            <td colspan="6"><code>{% if feedback_list|length > 0 %} {{feedback_list|length}} {% else %} None. Sad. {% endif %}</code></td>
+            <td colspan="6"><code>{% if tutor.feedback_list.all|length > 0 %} {{tutor.feedback_list.all|length}} {% else %} None. Sad. {% endif %}</code></td>
           </tr>
         </tbody>
       </table>
@@ -88,7 +88,7 @@
           </tr>
         </thead>
         <tbody>
-          {% for feedback in feedback_list %}
+          {% for feedback in tutor.feedback_list.all %}
           <tr>
             <td>
               {% include "core/component/feedback_badge.html" %}
diff --git a/core/urls.py b/core/urls.py
index 42c5917be07fa9905c8f360a0ffe5ec3b7ff47ab..867d523a9e2629c36d74dcdda61c6a9514a50d8c 100644
--- a/core/urls.py
+++ b/core/urls.py
@@ -14,12 +14,13 @@ urlpatterns = [
     url(r'^feedback/edit/(?P<feedback_slug>\w+)/$', views.FeedbackEdit.as_view(), name='FeedbackEdit'),
     url(r'^feedback/delete/(?P<feedback_slug>\w+)/$', views.delete_feedback, name='FeedbackDelete'),
 
-    url(r'^r/submission/list/$', views.get_submission_list, name='submission_list'),
-    url(r'^r/submission/view/(?P<slug>\w+)/$', views.SubmissionViewReviewer.as_view(), name='SubmissionViewReviewer'),
+    url(r'^r/student/list/$', views.ReviewerStudentListView.as_view(), name='ReviewerStudentListView'),
+    url(r'^r/submission/list/$', views.ReviewerSubmissionListView.as_view(), name='ReviewerSubmissionListView'),
+    url(r'^r/submission/view/(?P<slug>\w+)/$', views.ReviewerSubmissionView.as_view(), name='ReviewerSubmissionView'),
     url(r'^r/submission/create-feedback-for/(?P<slug>\w+)/$', views.create_feedback_for_submission, name='create_feedback_for_submission'),
     url(r'^r/submission/download/(?P<slug>\w+)/$', views.download_submissions, name='download_submissions'),
 
-    url(r'^s/submission/view/(?P<slug>\w+)/$', views.SubmissionViewStudent.as_view(), name='SubmissionViewStudent'),
+    url(r'^s/submission/view/(?P<slug>\w+)/$', views.StudentSubmissionView.as_view(), name='StudentSubmissionView'),
 
     url(r'^csv/$', views.export_csv, name='export')
 ]
diff --git a/core/views/__init__.py b/core/views/__init__.py
index 9c990fbac6cd13d181bd5e7d190e563dc61067e2..67868d83114fb61ae463b7c2cd5b9bb0127f5bb1 100644
--- a/core/views/__init__.py
+++ b/core/views/__init__.py
@@ -1,6 +1,9 @@
 from .login import *
 from .feedback import *
+from .generics import *
+
 from .submission import *
 from .user_startpages import *
 from .index import *
 from .export_csv import *
+
diff --git a/core/views/export_csv.py b/core/views/export_csv.py
index 3606541433009a0b9ddd26a528cfb9e35e820995..1e1a7fdd12d212085b0f36a0bef223019bdd2106 100644
--- a/core/views/export_csv.py
+++ b/core/views/export_csv.py
@@ -1,8 +1,9 @@
 import csv
+
 from django.http import HttpResponse
 
-from core.models import Student, SubmissionType, Submission
 from core.custom_annotations import group_required
+from core.models import Student, SubmissionType
 
 
 @group_required('Reviewers')
@@ -15,17 +16,13 @@ def export_csv(request):
     writer.writerow(['Matrikel', 'Username',  'Name', 'Sum'] +
                     [s.name for s in SubmissionType.objects.all().order_by('name')])
 
-    for student in Student.objects.all():
-        submissions = Submission.objects.filter(student=student)
-        score_list  = [s.feedback.score if hasattr(s, 'feedback') else 0 for s in submissions.order_by('type__name')]
-        if not score_list:
-            score_list = [0] * SubmissionType.objects.count()
+    for student in Student.get_overall_score_annotated_submission_list():
         writer.writerow([
             student.matrikel_no,
             student.user.username,
             student.name,
-            sum(score_list),
-            *score_list
+            student.overall_score,
+            *student.score_per_submission().values()
         ])
 
     return response
diff --git a/core/views/feedback.py b/core/views/feedback.py
index 5c60bc4e254c638feede522bbc7906ff1b613861..db95a26d8457c2286f871350787b1fa5846f4b9b 100644
--- a/core/views/feedback.py
+++ b/core/views/feedback.py
@@ -1,7 +1,7 @@
 from random import choice
 
 from django.contrib import messages
-from django.http import Http404, HttpResponseRedirect, HttpResponse
+from django.http import Http404, HttpResponse, HttpResponseRedirect
 from django.urls import reverse
 from django.utils.decorators import method_decorator
 from django.views.generic.edit import UpdateView
diff --git a/core/views/generics.py b/core/views/generics.py
new file mode 100644
index 0000000000000000000000000000000000000000..6afd8c675c3dc4707226755f5d773e2852fedf20
--- /dev/null
+++ b/core/views/generics.py
@@ -0,0 +1,58 @@
+from django.utils.decorators import method_decorator
+from django.views.generic import DetailView, ListView, View
+
+from core.custom_annotations import group_required
+from core.models import SubmissionType, get_annotated_tutor_list
+
+
+class StudentView(View):
+
+    @method_decorator(group_required('Students',))
+    def dispatch(self, *args, **kwargs):
+        return super().dispatch(*args, **kwargs)
+
+
+class StudentListView(StudentView, ListView):
+    pass
+
+
+class StudentDetailView(StudentView, DetailView):
+    pass
+
+
+class TutorView(View):
+
+    @method_decorator(group_required('Tutors',))
+    def dispatch(self, *args, **kwargs):
+        return super().dispatch(*args, **kwargs)
+
+
+class TutorListView(TutorView, ListView):
+    pass
+
+
+class TutorDetailView(TutorView, DetailView):
+    pass
+
+
+class ReviewerView(View):
+
+    @method_decorator(group_required('Reviewers',))
+    def dispatch(self, *args, **kwargs):
+        return super().dispatch(*args, **kwargs)
+
+
+class ReviewerListView(ReviewerView, ListView):
+
+    def get_context_data(self, **kwargs):
+        context = super().get_context_data(**kwargs)
+
+        return {
+            'submission_type_list': SubmissionType.get_annotated_feedback_count(),
+            'tutor_list':           get_annotated_tutor_list(),
+            **context,
+        }
+
+
+class ReviewerDetailView(ReviewerView, DetailView):
+    pass
diff --git a/core/views/index.py b/core/views/index.py
index 9a6e56f787a88d764a4b487e906d041d782f7413..8c55022c3c84be1d0c2a0cb25f2d2b73bff9cce6 100644
--- a/core/views/index.py
+++ b/core/views/index.py
@@ -1,11 +1,5 @@
 from django.shortcuts import render
 
-from core.models import SubmissionType
-
 
 def index(request):
-    context = {
-        'boldmessage': 'Delbert Grady says hey there world!',
-        'submission_types': SubmissionType.objects.all(),
-    }
-    return render(request, 'core/index.html', context)
+    return render(request, 'core/index.html')
diff --git a/core/views/login.py b/core/views/login.py
index 4ebc8c7620d762bce09e61acd43440443c571bbd..708a1a89a1f8e67add915f8541d86dbb64111e02 100644
--- a/core/views/login.py
+++ b/core/views/login.py
@@ -1,12 +1,11 @@
+from django.contrib import messages
 from django.contrib.auth import authenticate, login, logout
 from django.contrib.auth.decorators import login_required
-from django.contrib import messages
 from django.http import HttpResponseRedirect
 from django.urls import reverse
 
 from core.custom_annotations import in_groups
 
-
 __all__ = ('user_login', 'user_logout')
 
 def is_disabled(user):
diff --git a/core/views/submission.py b/core/views/submission.py
index 99d4b13c3fb70592a19c7c503bf1497d8c9ecd52..8d7eff78bfe5f9a5572f091d1c2284826a87916d 100644
--- a/core/views/submission.py
+++ b/core/views/submission.py
@@ -1,48 +1,45 @@
-from django.utils.decorators import method_decorator
 from django.views.generic import DetailView
-from django.shortcuts import render
 
-from core.custom_annotations import group_required, in_groups
-from core.models import Submission, Feedback
+from core.custom_annotations import in_groups
+from core.models import Feedback, Student, Submission
+from core.views.generics import (ReviewerDetailView, ReviewerListView,
+                                 StudentView)
 
-from .user_startpages import get_annotated_feedback_count, get_annotated_tutor_list
 
+class StudentSubmissionView(StudentView, DetailView):
 
-class SubmissionViewStudent(DetailView):
-
-    template_name = 'core/s/single_submission.html'
-    model = Submission
-
-    @method_decorator(group_required('Students',))
-    def dispatch(self, *args, **kwargs):
-        return super(SubmissionViewStudent, self).dispatch(*args, **kwargs)
+    template_name   = 'core/s/single_submission.html'
+    model           = Submission
 
     def get_object(self):
         obj = Submission.objects.get(slug=self.kwargs['slug'])
         if in_groups(self.request.user, ('Students', )) and hasattr(obj, 'feedback') and obj.feedback.status == Feedback.ACCEPTED:
-            obj.seen = True
+            obj.seen_by_student = True
             obj.save()
         return obj
 
 
-class SubmissionViewReviewer(DetailView):
+class ReviewerSubmissionView(ReviewerDetailView):
 
+    model         = Submission
     template_name = 'core/r/single_submission.html'
-    model = Submission
-
-    @method_decorator(group_required('Reviewers',))
-    def dispatch(self, *args, **kwargs):
-        return super(SubmissionViewReviewer, self).dispatch(*args, **kwargs)
 
     def get_object(self):
         return Submission.objects.get(slug=self.kwargs['slug'])
 
 
-@group_required('Reviewers')
-def get_submission_list(request):
-    context = {
-        'submission_list': Submission.objects.all(),
-        'submission_type_list': get_annotated_feedback_count(),
-        'tutor_list': get_annotated_tutor_list(),
-    }
-    return render(request, 'core/r/student_submission_list.html', context)
+class ReviewerSubmissionListView(ReviewerListView):
+
+    model               = Submission
+    template_name       = 'core/r/student_submission_list.html'
+    context_object_name = 'submission_list'
+
+
+class ReviewerStudentListView(ReviewerListView):
+
+    model               = Student
+    template_name       = 'core/r/student_list.html'
+    context_object_name = 'student_list'
+
+    def get_queryset(self):
+        return self.model.get_overall_score_annotated_submission_list()
diff --git a/core/views/user_startpages.py b/core/views/user_startpages.py
index f6fd29f16b8398882e791c24200b08170bca4b7b..9d1ae9803b5204044d8e9825b28a45b4dd3d78b1 100644
--- a/core/views/user_startpages.py
+++ b/core/views/user_startpages.py
@@ -1,87 +1,67 @@
 from django.contrib.auth.decorators import login_required
 from django.contrib.auth.models import User
-from django.db.models import Count, Q, F, Case, When, IntegerField, Value
 from django.http import HttpResponseRedirect
-from django.shortcuts import render
 from django.urls import reverse
 
-from core.custom_annotations import group_required, in_groups
-from core.models import Feedback, Submission, SubmissionType
+from core.custom_annotations import in_groups
+from core.models import Feedback, Student, SubmissionType
+from core.views.generics import (ReviewerListView, StudentDetailView,
+                                 TutorDetailView)
 
 
 @login_required(login_url='/')
 def user_home(request):
     if in_groups(request.user, ('Students', )):
-        return student_view(request)
+        return StudentStartPage.as_view()(request)
     elif in_groups(request.user, ('Tutors', )):
-        return tutor_view(request)
+        return TutorStartPage.as_view()(request)
     elif in_groups(request.user, ('Reviewers', )):
-        return reviewer_view(request)
+        return ReviewerFeedbackListView.as_view()(request)
     else:
         return HttpResponseRedirect(reverse('index'))
 
-def get_annotated_feedback_count():
-    """ Annotates submission lists with counts
-
-    count both
-        * number of submission per submission type
-        * count of received feedback per submission type
-        *
-    Alternative with case
-        Count(Case(
-            When(submissions__feedback_list__origin=Feedback.MANUAL,
-                then=Value(1)), output_field=IntegerField())
-        )
-
-    Returns:
-        annotated queryset
-    """
-    return SubmissionType.objects\
-        .annotate( # to display only manual
-            feedback_count=Count(
-                Case(
-                    When(Q(submissions__feedback__isnull=False) & Q(submissions__feedback__status=Feedback.ACCEPTED),
-                        then=Value(1)), output_field=IntegerField(),
-                )
-            )
-        ).annotate(
-            submission_count=Count('submissions')
-        ).annotate(
-            percentage=(F('feedback_count') * 100 / F('submission_count'))
-        ).all().order_by('name')
-
-def get_annotated_tutor_list():
-    return User.objects.annotate(Count('corrected_submissions')).filter(groups__name='Tutors') \
-                                                                .order_by('-corrected_submissions__count')
-
-@group_required('Tutors')
-def tutor_view(request):
-    submission_type = get_annotated_feedback_count()
-    context = {
-        'submission_type_list': submission_type,
-        'feedback_list': Feedback.objects.filter(of_tutor=request.user),
-        'feedback_open_list': Feedback.objects.filter(Q(status=Feedback.OPEN) & ~Q(of_tutor=request.user)),
-    }
-    return render(request, 'core/t/tutor_startpage.html', context)
-
-
-@group_required('Students')
-def student_view(request):
-    context = {
-        'student': request.user,
-        'submission_list': Submission.objects.filter(student__user=request.user)
-    }
-    return render(request, 'core/s/student_startpage.html', context)
-
-
-@group_required('Reviewers')
-def reviewer_view(request):
-    context = {
-        'submission_type_list': get_annotated_feedback_count(),
-        'tutor_list': get_annotated_tutor_list(),
-        'feedback_list_manual': Feedback.objects.filter(origin=Feedback.MANUAL),
-        'feedback_list_empty': Feedback.objects.filter(origin=Feedback.WAS_EMPTY),
-        'feedback_list_did_not_compile': Feedback.objects.filter(origin=Feedback.DID_NOT_COMPILE),
-        'feedback_list_could_not_link': Feedback.objects.filter(origin=Feedback.COULD_NOT_LINK),
-    }
-    return render(request, 'core/r/reviewer_startpage.html', context)
+
+class TutorStartPage(TutorDetailView):
+
+    model               = User
+    template_name       = 'core/t/tutor_startpage.html'
+    context_object_name = 'tutor'
+
+    def get_object(self):
+        return self.request.user
+
+    def get_context_data(self, **kwargs):
+        context = super().get_context_data(**kwargs)
+
+        return {
+            'submission_type_list': SubmissionType.get_annotated_feedback_count(),
+            'feedback_open_list': Feedback.get_open_feedback(self.get_object()),
+            **context
+        }
+
+
+class StudentStartPage(StudentDetailView):
+
+    model           = Student
+    template_name   = 'core/s/student_startpage.html'
+
+    def get_object(self):
+        return self.request.user.student
+
+
+class ReviewerFeedbackListView(ReviewerListView):
+    """ This is the de facto startpage of the reviewer accounts"""
+
+    model         = Feedback
+    template_name = 'core/r/reviewer_startpage.html'
+
+    def get_context_data(self, **kwargs):
+        context = super().get_context_data(**kwargs)
+
+        return {
+            'feedback_list_manual': self.model.objects.filter(origin=Feedback.MANUAL),
+            'feedback_list_empty': self.model.objects.filter(origin=Feedback.WAS_EMPTY),
+            'feedback_list_did_not_compile': self.model.objects.filter(origin=Feedback.DID_NOT_COMPILE),
+            'feedback_list_could_not_link': self.model.objects.filter(origin=Feedback.COULD_NOT_LINK),
+            **context
+        }
diff --git a/grady/database_router.py b/grady/database_router.py
deleted file mode 100644
index 03721181cd262785fc0275a2f3b833cac256cce6..0000000000000000000000000000000000000000
--- a/grady/database_router.py
+++ /dev/null
@@ -1,40 +0,0 @@
-
-class AuthRouter(object):
-    """
-    A router to control all database operations on models in the
-    auth application.
-    """
-    def db_for_read(self, model, **hints):
-        """
-        Attempts to read auth models go to auth_db.
-        """
-        if model._meta.app_label == 'auth':
-            return 'auth_db'
-        return None
-
-    def db_for_write(self, model, **hints):
-        """
-        Attempts to write auth models go to auth_db.
-        """
-        if model._meta.app_label == 'auth':
-            return 'auth_db'
-        return None
-
-    def allow_relation(self, obj1, obj2, **hints):
-        """
-        Allow relations if a model in the auth app is involved.
-        """
-        if obj1._meta.app_label == 'auth' or \
-           obj2._meta.app_label == 'auth':
-           return True
-        return True
-
-    def allow_migrate(self, db, app_label, model_name=None, **hints):
-        """
-        Make sure the auth app only appears in the 'auth_db'
-        database.
-        """
-        if app_label == 'auth':
-            return db == 'auth_db'
-        return None
-
diff --git a/grady/settings/default.py b/grady/settings/default.py
index beabda381a278e282bf350d221b088141f89a0e1..cdd76ebdd3bafa7746c0e9c36d25b131fdaf57fb 100644
--- a/grady/settings/default.py
+++ b/grady/settings/default.py
@@ -91,9 +91,6 @@ AUTH_PASSWORD_VALIDATORS = [
     {
         'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
     },
-    {
-        'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
-    },
     {
         'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
     },
@@ -126,6 +123,9 @@ GRAPH_MODELS = {
     'group_models': True,
 }
 
+LOGIN_REDIRECT_URL  = '/'
+LOGIN_URL           = '/'
+
 
 MESSAGE_TAGS = {
     messages.DEBUG: 'alert-info',
diff --git a/grrr.py b/grrr.py
new file mode 100644
index 0000000000000000000000000000000000000000..d97add4cdc76dd8c8a857ba4c502994d2a1c4d3d
--- /dev/null
+++ b/grrr.py
@@ -0,0 +1,137 @@
+import argparse
+import csv
+import os
+os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'grady.settings')
+import secrets
+import sys
+import json
+
+import django
+django.setup()
+
+from django.contrib.auth.models import User
+
+from core.models import Student, Submission
+import util.importer
+
+def parseme():
+    parser      = argparse.ArgumentParser()
+    subparsers  = parser.add_subparsers(dest="command")
+
+    parser.add_argument(
+        '-o', '--output',
+        help='Where the output goes (not info messages)',
+        default=sys.stdout,
+        type=argparse.FileType(mode='r'),
+    )
+
+    ### parser for printing out new passwordlists ###
+    passwordlist = subparsers.add_parser(
+        'passwordlist',
+        help='all student passwords will be changed and a list of these password will be printed'
+    )
+    passwordlist.add_argument(
+        'instance',
+        default='',
+        help='name of the instance that generated the passwords'
+    )
+
+    ### parser for replacing usernames ###
+    replaceusernames = subparsers.add_parser(
+        'replaceusernames',
+        help='replaces all usernames based on a matrikel_no -> new_name dict (input should be JSON)'
+    )
+    replaceusernames.add_argument(
+        'matno2username_dict',
+        help='the mapping as a JSON file',
+        default=sys.stdin,
+        type=argparse.FileType('r')
+    )
+
+    ### parser for enabling or disabling users ###
+    enableusers = subparsers.add_parser(
+        'enableusers',
+        help='All user accounts will be disabled'
+    )
+    enableusers.add_argument(
+        'switch',
+        choices=('on', 'off'),
+        default='on',
+        help='enable all users (on) or disable all (off)'
+    )
+    filter_group = enableusers.add_mutually_exclusive_group()
+    filter_group.add_argument(
+        '-e', '--exclude',
+        default=(),
+        nargs='+',
+        help='give exceptions by username in a comma separated list'
+    )
+    filter_group.add_argument(
+        '-i', '--include',
+        help='only apply to these users',
+        nargs='+',
+        default=())
+
+    ### parser for extracting submissions ###
+    subparsers.add_parser('extractsubmissions')
+
+    ### parser for extracting submissions ###
+    subparsers.add_parser('importer')
+
+    return parser.parse_args()
+
+
+def handle_passwordlist(output=sys.stdout, instance="", **kwargs):
+    with open('/usr/share/dict/words') as words:
+        choose_from = list({word.strip().lower()
+                            for word in words if 5 < len(word) < 8})
+
+    writer = csv.writer(output)
+    writer.writerow(['Name', 'Matrikel', 'Username', 'password', 'instance'])
+
+    for student in Student.objects.all():
+        password = ''.join(secrets.choice(choose_from) for _ in range(3))
+
+        student.user.set_password(password)
+        student.user.save()
+
+        writer.writerow([student.name, student.matrikel_no,
+                         student.user.username, password, instance])
+
+
+def handle_enableusers(switch, exclude, include, **kwargs):
+
+    if include:
+        for user in User.objects.filter(username__in=include):
+            user.is_active = switch == 'on'
+            user.save()
+    else: # this includes nothing set
+        for user in User.objects.exclude(username__in=exclude):
+            user.is_active = switch == 'on'
+            user.save()
+
+
+def handle_replaceusernames(matno2username_dict, **kwargs):
+    matno2username = json.JSONDecoder().decode(matno2username_dict.read())
+    for student in Student.objects.all():
+        if student.matrikel_no in matno2username:
+            new_name = matno2username[student.matrikel_no]
+            student.user.username = new_name
+            student.user.save()
+
+
+def handle_extractsubmissions(output, **kwargs):
+    for submission in Submission.objects.filter(feedback__isnull=False).order_by('type'):
+        print(submission.feedback.score, repr(submission.text), file=open(str(submission.type).replace(' ', '_'), 'a'))
+
+
+def handle_importer(**kwargs):
+    util.importer.start()
+
+def main():
+    args = parseme()
+    if args.command:
+        globals()['handle_' + args.command](**vars(args))
+
+if __name__ == '__main__':
+    main()
diff --git a/populatedb.py b/populatedb.py
deleted file mode 100644
index 3b237129cc704c8b5ec720d87260d0f7561b32e5..0000000000000000000000000000000000000000
--- a/populatedb.py
+++ /dev/null
@@ -1,294 +0,0 @@
-import os
-import csv
-os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'grady.settings')
-
-import django
-import xkcdpass.xkcd_password as xp
-import json
-import argparse
-from collections import namedtuple
-django.setup()
-
-INFO = 1
-
-HTML_DIR = 'html'
-SOLUTION_DIR = 'code/code-lsg'
-
-wordfile = xp.locate_wordfile()
-wordlist = xp.generate_wordlist(wordfile=wordfile, min_length=5, max_length=8)
-
-
-from django.contrib.auth.models import Group, User
-
-from core.models import Student, Submission, SubmissionType, Feedback
-
-if INFO:
-    info = print
-else:
-    info = lambda _: None
-
-
-def parseme():
-    parser = argparse.ArgumentParser()
-    parser.add_argument(
-        '--create-users',
-        help='Will just populate auth_db',
-        action='store_true')
-    parser.add_argument(
-        '--compiler-output-only',
-        help='Will only add compiler output to existing data',
-        action='store_true')
-    parser.add_argument(
-        'DATADIR',
-        help='a folder containing a predefined set of files with information',
-        default='data')
-    parser.add_argument(
-        '-s', '--submissions',
-        help='A file with submission code and student user names',
-        default='submissions.json',
-        metavar='FILE')
-    parser.add_argument(
-        '-t', '--tutors',
-        help='A list of tutor names',
-        default='tutors',
-        metavar="FILE")
-    parser.add_argument(
-        '-r', '--reviewers',
-        help='A list of reviewer names',
-        default='reviewers',
-        metavar="FILE")
-    parser.add_argument(
-        '-st', '--submission_types',
-        help='some kind of descriptions for all the submission types',
-        default='submission_types.csv',
-        metavar="FILE")
-
-    args = parser.parse_args()
-
-    args.tutors             = os.path.join(args.DATADIR, args.tutors)
-    args.reviewers          = os.path.join(args.DATADIR, args.reviewers)
-    args.submissions        = os.path.join(args.DATADIR, args.submissions)
-    args.submission_types   = os.path.join(args.DATADIR, args.submission_types)
-
-    return args
-
-
-def add_submission_type(name,
-                        score,
-                        task_description="__task_description: what the student saw",
-                        possible_solution="__possible_solution: a sample solution",
-                        correction_guideline="__possible_solution: a way to correct the task",):
-    task, created = SubmissionType.objects.get_or_create(name=name)
-    task.full_score = int(score)
-    task.task_description = task_description
-    task.possible_solution = possible_solution
-    task.correction_guideline = correction_guideline
-    task.save()
-    if created:
-        info(f"- Created Task {task.name}")
-    else:
-        info(f"- Task {task.name} was already created")
-    return task
-
-
-def student_has_all_submissions(student):
-    return Submission.objects.filter(student=student).count() \
-        == SubmissionType.objects.all().count()
-
-
-def add_submission(type, text, student, compiler_output):
-    if student_has_all_submissions(student):
-        return None
-
-    sub = Submission()
-    sub.type = type
-    sub.text = text
-    sub.student = student
-    sub.pre_corrections = compiler_output
-    sub.save()
-    add_auto_feedback(sub, compiler_output)
-    info(f"- Created Submission of Type {sub.type}")
-    return sub
-
-
-def add_compiler_output_only(type, text, student, compiler_output):
-
-    sub = Submission.objects.get(type=type, student=student)
-    if not sub:
-        return
-    sub.pre_corrections = compiler_output
-    sub.save()
-    info(f"- Added compiler output to submission {sub.type}")
-    return sub
-
-
-def add_auto_feedback(submission, compiler_output):
-    if submission.text and not compiler_output:
-        return # let the tutor do his job
-
-    def deduct_feedback_type() -> (str, str):
-        if not submission.text:
-            return Feedback.WAS_EMPTY, Feedback.ACCEPTED
-        elif compiler_output.endswith('DID NOT COMPILE'):
-            return Feedback.DID_NOT_COMPILE, Feedback.NEEDS_REVIEW
-        elif compiler_output.endswith('COULD NOT LINK'):
-            return Feedback.COULD_NOT_LINK, Feedback.NEEDS_REVIEW
-        return None, None
-
-    auto_correct, _ = User.objects.get_or_create(username='auto_correct')
-    feedback = Feedback()
-    feedback.text = "--- Was generated automatically ---"
-    feedback.origin, feedback.status = deduct_feedback_type()
-    if feedback.origin is None and feedback.status is None:
-        return
-    feedback.of_submission = submission
-    feedback.of_tutor = auto_correct
-    feedback.save()
-    if feedback.origin == Feedback.WAS_EMPTY:
-        submission.final_feedback = feedback
-        submission.save()
-    info(f"- Created {feedback.origin} Feedback for Submission {submission}")
-    return feedback
-
-
-def add_student(username, name, matrikel_no):
-    student_group, _ = Group.objects.get_or_create(name='Students')
-    student, created = Student.objects.get_or_create(
-        matrikel_no=matrikel_no,
-        user=add_user(username, student_group)
-    )
-    if created:
-        student.name = name
-        student.matrikel_no = matrikel_no
-        student.save()
-    return student
-
-
-def add_user(username, group):
-    user, created = User.objects.get_or_create(username=username)
-
-    if created:
-        password = xp.generate_xkcdpassword(wordlist, numwords=2)
-        login_writer.writerow([username, password])
-        user.set_password(password)
-        group.user_set.add(user)
-        info(f"- Created user {user} and added him to group {group}")
-        user.save()
-    else:
-        info(f"- User {user} of group {group} was already created.")
-
-    return user
-
-
-def add_group(group_name):
-    group, _ = Group.objects.get_or_create(name=group_name)
-    info(f"- Created group {group}")
-    return group
-
-
-def create_superuser():
-    try:
-        username = 'doncamillo'
-        password = xp.generate_xkcdpassword(wordlist, numwords=2)
-        login_writer.writerow(username, password)
-        User.objects.create_superuser(
-            username=username, password=password, email='mail-gardy@jmx.io')
-    except Exception as e:
-        info("- Superuser was already created.")
-        return
-
-
-class PopulateDatabase:
-
-    """docstring for PopulateDatabase"""
-
-    __slots__ = (
-        'args',
-        'type_dict',
-        'student_group',
-        'tutor_group',
-        'reviewer_group',
-    )
-
-    def __init__(self, args):
-        self.args = args
-        if self.args.create_users:
-            self.create_groups()
-            self.create_user_accounts()
-        else: # dirty
-            self.create_submission_types()
-            self.populate_submissions()
-
-    def create_groups(self):
-        self.student_group  = add_group('Students')
-        self.tutor_group    = add_group('Tutors')
-        self.reviewer_group = add_group('Reviewers')
-
-    def create_user_accounts(self):
-        with open(self.args.tutors) as tutors:
-            for tutor in tutors:
-                add_user(tutor.strip(), self.tutor_group)
-
-        with open(self.args.reviewers) as reviewers:
-            for reviewer in reviewers:
-                add_user(reviewer.strip(), self.reviewer_group)
-
-    def create_submission_types(self):
-        submission_type = namedtuple('submission_type', 'id name score')
-        with open(args.submission_types) as data:
-            types = list(submission_type(*line.strip().split(', '))
-                         for line in data if line)
-
-        self.type_dict = {}
-        for t in types:
-            with \
-                    open(os.path.join(self.args.DATADIR, SOLUTION_DIR, t.id + '-lsg.c' )) as lsg, \
-                    open(os.path.join(self.args.DATADIR, HTML_DIR, t.id + '.html' )) as desc:
-                self.type_dict[t.id] = add_submission_type(
-                    f"[{t.id}] {t.name}",
-                    t.score,
-                    desc.read(),
-                    lsg.read(),
-                )
-
-    def populate_submissions(self):
-        with open(self.args.submissions) as data:
-            stud_data = json.JSONDecoder().decode(data.read())
-
-        for user, userdata in stud_data.items():
-            student = add_student(
-                user, userdata['name'], userdata['matrikel_no'])
-            for s, code in userdata['submissions'].items():
-                if self.args.compiler_output_only:
-                    add_compiler_output_only(
-                        self.type_dict[s], code, student,
-                        userdata['compiler_output'][s]
-                    )
-                else:
-                    add_submission(
-                        self.type_dict[s], code, student,
-                        userdata['compiler_output'][s]
-                    )
-
-
-# Start execution here!
-if __name__ == '__main__':
-    args = parseme()
-    print("Starting population script...")
-
-    LOGIN_FILE = 'login_data.csv'
-
-    try:
-        login_data_f = open(LOGIN_FILE, 'a')
-        if os.stat(LOGIN_FILE).st_size == 0:
-            login_writer = csv.writer(login_data_f)
-            login_writer.writerow(['username', 'password'])
-        else:
-            login_writer = csv.writer(login_data_f)
-
-        # start the actual population
-        create_superuser()
-        PopulateDatabase(args)
-
-    finally:
-        login_data_f.close()
diff --git a/requirements.txt b/requirements.txt
index 738aeb1fbf3b3846632bb180edd110e5b8b2b543..b6bb6dad38921a3b22dcd34bd9b20d0ea06c89d4 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,8 +1,8 @@
 Django~=1.10.6
 django-extensions~=1.7.7
+djangorestframework~=3.6.3
+django_compressor~=2.1.1
 gunicorn~=19.7.0
 psycopg2~=2.7.1
-xkcdpass~=1.9.5
 xlrd~=1.0.0
-django_compressor~=2.1.1
 lxml~=3.8.0
diff --git a/scripts/README.rst b/scripts/README.rst
index f07fc7c98533c47e545e02655b1c317d173e884e..46af791e4de48682ed54ba493832131d7dae2f81 100644
--- a/scripts/README.rst
+++ b/scripts/README.rst
@@ -1,7 +1,7 @@
 What is this directory about?
 =============================
 
-Well, it just servers as a collection of files that currently live in folders
+Well, it just serves as a collection of files that currently live in folders
 not part of the git repository, since they contain volatile or test data. I
 include them here for the sake of completeness, but they will be removed in
 later versions, since their work has to be encapsulated in the overall process.
diff --git a/scripts/matrikel_to_email.py b/scripts/matrikel_to_email.py
deleted file mode 100644
index 920a278fbbf3c4adae90750c8936aedb414d1831..0000000000000000000000000000000000000000
--- a/scripts/matrikel_to_email.py
+++ /dev/null
@@ -1,10 +0,0 @@
-from itertools import chain
-import re, json
-
-OUTFILE = 'matno2email.json'
-
-with \
-        open('binf1801-flexnow-20170329.csv') as inf, \
-        open('bphy1601-flexnow-20170328.csv') as phy, \
-        open(OUTFILE, "w") as out:
-    out.write(json.JSONEncoder().encode({matno : email for (matno, email) in (re.split(r'[\t;]', line.strip()) for line in chain(inf, phy) if line)})) # i just love one liners
diff --git a/util/__init__.py b/util/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/util/convert.py b/util/convert.py
new file mode 100755
index 0000000000000000000000000000000000000000..5798b2458bbc61f25700a90283b73efa379644c3
--- /dev/null
+++ b/util/convert.py
@@ -0,0 +1,155 @@
+#!/usr/local/bin/python3
+""" a simple script that converts ilias exam output to readable json
+
+The json output will look like this:
+{
+    "max.mustermann": { <<--- OR all uppercase letter of the name + username/matrikel_no
+        "matrikel_no": "12345678",
+        "name": "Mustermann, Max",
+        "task_list": {
+            "[task_id_1]": "print Hello World!",
+            ....,
+            "[task_id_n]": "#include <stdio.h> etc."
+        }
+    },
+    ... ans so on
+}
+
+usage: convert.py [-h] [-u USERNAMES] [-n NUMBER_OF_TASKS] INFILE OUTFILE
+
+positional arguments:
+  INFILE                Ilias exam data
+  OUTFILE               Where to write the final file
+
+optional arguments:
+  -h, --help            show this help message and exit
+  -u USERNAMES, --usernames USERNAMES
+                        a json dict matno -> email
+  -n NUMBER_OF_TASKS, --NUMBER_OF_TASKS NUMBER_OF_TASKS
+                        Where to write the final file
+
+
+Author: Jan Maximilian Michal
+Date: 30 March 2017
+"""
+
+import argparse
+import json
+import os
+import re
+import urllib.parse
+from collections import defaultdict, namedtuple
+
+from xlrd import open_workbook
+
+parser = argparse.ArgumentParser()
+parser.add_argument('INFILE', help='Ilias exam data')
+parser.add_argument('OUTFILE', help='Where to write the final file')
+parser.add_argument('-u', '--usernames', help='a json dict matno -> email')
+parser.add_argument(
+    '-n', '--NUMBER_OF_TASKS',
+    default=0, # don't check
+    metavar='NUMBER_OF_TASKS',
+    type=int,
+    help='Where to write the final file')
+
+
+
+# one user has one submission (code) per task
+# yes, I know it is possible to name match groups via (?P<name>) but
+# I like this solution better since it gets the job done nicely
+user_head = namedtuple('user_head', 'kohorte, name')
+user_head_re = re.compile(r'^Ergebnisse von Testdurchlauf (?P<kohorte>\d+) für (?P<name>[\w\s\.,-]+)$')
+
+# one task has a title and id and hpfly code
+task_head_re = re.compile(r'^Quellcode Frage(?P<title>.*) \d{8}$')
+
+# nor parsing the weird mat no
+matno_re = re.compile(r'^(?P<matrikel_no>\d{8})-(\d{3})-(\d{3})$')
+
+# Modify these iterators in order to change extraction behaviour
+
+def converter(infile, usernames=None, number_of_tasks=0,):
+
+    def sheet_iter_meta(sheet):
+        """ yield first and second col entry as tuple of (name, matnr) """
+        for row in (sheet.row(i) for i in range(1, sheet.nrows)):
+            m = re.search(matno_re, row[1].value)
+            yield row[0].value, m.group('matrikel_no') if m else row[1].value
+
+
+    def sheet_iter_data(sheet):
+        """ yields all rows that are not of empty type as one string """
+        for row in (sheet.row(i) for i in range(sheet.nrows)):
+            if any(map(lambda c: c.ctype, row)):
+                yield ''.join(c.value for c in row)
+
+    # meta sheet contains ilias evaluation names usernames etc - data contains code
+    meta, *data = open_workbook(infile, open(os.devnull, 'w')).sheets()
+
+    # nice!
+    name2mat = dict(sheet_iter_meta(meta))
+
+    # from xls to lists and namedtuples
+    # [ [user0, task0_h, code0, ..., taskn, coden ], ..., [...] ]
+    root = []
+    for sheet in data:
+        for row in sheet_iter_data(sheet):
+            user = re.search(user_head_re, row)
+            task = re.search(task_head_re, row)
+            if user:
+                root.append([user_head(*user.groups())])
+            elif task:
+                root[-1].append(task.group('title'))
+            else: # should be code
+                root[-1].append(urllib.parse.unquote(row).strip())
+
+    if number_of_tasks:
+        for (user, *task_list) in sorted(root, key=lambda u: u[0].name):
+            assert len(task_list) == number_of_tasks * 2
+
+    mat_to_email = defaultdict(str)
+    if usernames:
+        with open(usernames) as data:
+            mat_to_email.update(json.JSONDecoder().decode(data.read()))
+
+    def get_username(user):
+        if name2mat[user.name] in mat_to_email:
+            return mat_to_email[name2mat[user.name]].split('@')[0]
+        return ''.join(filter(str.isupper, user.name)) + name2mat[user.name]
+
+    usernames = {user.name : get_username(user) for (user, *_) in root}
+
+    # form list to json_like via comprehension
+    # the format {userinitials + matrikel_no : {name:, matrikel_no:, tasklist: {id:, ..., id:}}}
+    return {
+        usernames[user.name] : {
+            'name' : user.name,
+            'email' : mat_to_email[name2mat[user.name]],
+            'matrikel_no' : name2mat[user.name],
+            'submissions' : [
+                {
+                    "type" : task,
+                    "code" : code,
+                    "tests" : {},
+                } for task, code in zip(task_list[::2], task_list[1::2])
+            ]
+        } for (user, *task_list) in sorted(root, key=lambda u: u[0].name)
+    }
+
+def write_to_file(json_dict, outfile):
+    # just encode python style
+    with open(outfile, "w") as out:
+        out.write(json.JSONEncoder().encode(json_dict))
+
+    print(f"Wrote data to {outfile}. Done.")
+
+
+def main():
+    args = parser.parse_args()
+    json_dict = converter(args.INFILE, args.usernames, args.NUMBER_OF_TASKS)
+    write_to_file(json_dict, args.OUTFILE)
+
+if __name__ == '__main__':
+    SCRIPT = True
+    main()
diff --git a/util/importer.py b/util/importer.py
new file mode 100644
index 0000000000000000000000000000000000000000..9b6226ec326e7367607ce16e6263b0c874820b40
--- /dev/null
+++ b/util/importer.py
@@ -0,0 +1,443 @@
+import csv
+import json
+import os
+import readline
+import secrets
+import configparser
+from typing import Callable
+
+from django.contrib.auth.models import Group, User
+
+import util.convert
+import util.processing
+from core.models import (ExamType, Feedback, Student, Submission,
+                         SubmissionType, Test)
+from util.messages import *
+from util.processing import EmptyTest
+
+STUDENTS  = Group.objects.get(name='Students')
+TUTORS    = Group.objects.get(name='Tutors')
+REVIEWERS = Group.objects.get(name='Reviewers')
+
+HISTFILE  = '.importer_history'
+RECORDS   = '.importer'
+PASSWORDS = '.importer_passwords'
+
+YES = 'Y/n'
+NO  = 'y/N'
+
+valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False}
+
+FEEDBACK_MAPPER = {
+    util.processing.EmptyTest.__name__    : Feedback.WAS_EMPTY,
+    util.processing.CompileTest.__name__  : Feedback.DID_NOT_COMPILE,
+    util.processing.LinkTest.__name__     : Feedback.COULD_NOT_LINK,
+    util.processing.UnitTestTest.__name__ : Feedback.FAILED_UNIT_TESTS,
+}
+
+TEST_ORDER = (
+    util.processing.EmptyTest.__name__,
+    util.processing.CompileTest.__name__,
+    util.processing.LinkTest.__name__,
+    util.processing.UnitTestTest.__name__,
+)
+
+
+class chdir_context(object):
+    """
+    Step into a directory temporarily.
+    """
+
+    def __init__(self, path):
+        self.old_dir = os.getcwd()
+        self.new_dir = path
+
+    def __enter__(self):
+        info(f'Changing to {self.new_dir}')
+        os.chdir(self.new_dir)
+
+    def __exit__(self, *args):
+        info(f'Returning to {self.new_dir}')
+        os.chdir(self.old_dir)
+
+
+def get_xkcd_password(k=2):
+    with open('/usr/share/dict/words') as words:
+        choose_from = list({word.strip().lower()
+                            for word in words if 5 < len(word) < 8})
+
+    return ''.join(secrets.choice(choose_from) for _ in range(k))
+
+
+def i(prompt: str, default: str='', is_path: bool=False, is_file: bool=False):
+    if default is YES or default is NO:
+        answer = valid[input(f'[Q] {prompt} ({default}): ').lower() or ('y' if YES == default else 'n')]
+    elif default:
+        answer = input(f'[Q] {prompt} ({default}): ') or default
+    else:
+        answer = input(f'[Q] {prompt}: ')
+
+    if (is_path or is_file) and not os.path.exists(answer) or is_file and not os.path.isfile(answer):
+        warn(f'The {"path" if is_path else "file"} does not exist. Please try again.')
+        return i(prompt, default, is_path, is_file)
+
+    return answer
+
+
+def store_password(username, group, password):
+    storage = configparser.ConfigParser()
+    storage.read(PASSWORDS)
+
+    if not group in storage:
+        storage[group] = {}
+
+    storage[group][username] = password
+
+    with open(PASSWORDS, 'w') as passwd_file:
+        storage.write(passwd_file)
+
+
+def add_user(username: str, group: str, **kwargs):
+    """ This is a specific wrapper for the django update_or_create method of
+    objects.
+        * A new user is created and password and group are set accordingly
+        * If the user was there before password is NOT change but group is. A
+          user must only have one group.
+
+    Args:
+        username (str): the username is the login name
+        group (str): the (only) group the user should belong to
+        **kwargs: more attributes for user creation
+
+    Returns:
+        TYPE: Description
+    """
+    username = username.strip()
+
+    user, created = User.objects.update_or_create(
+        username=username,
+        defaults=kwargs
+    )
+
+    if created:
+        password = get_xkcd_password()
+        user.set_password(password)
+        user.save()
+
+        store_password(username, group.name, password)
+
+    user.groups.clear() # remove all other groups
+    group.user_set.add(user)
+
+    return user
+
+
+def add_student(username, email, submissions, **kwargs):
+
+    user        = add_user(username, STUDENTS, email=email)
+    student, _  = Student.objects.update_or_create(
+        user=user,
+        defaults={'user' : user, **kwargs}
+    )
+
+    return student
+
+
+def add_submission(student_obj, code, tests, type):
+
+    submission_type = SubmissionType.objects.get(name=type)
+
+    submission_obj, _ = Submission.objects.update_or_create(
+        type=submission_type,
+        student=student_obj,
+        defaults={'text' : code}
+    )
+
+    auto_correct, _ = User.objects.get_or_create(
+        username='auto_correct',
+        defaults={'is_active': False}
+    )
+
+    available_tests = util.processing.Test.available_tests()
+
+    for name, test_data in ((name, tests[name]) for name in TEST_ORDER):
+        test_obj, created = Test.objects.update_or_create(
+            name=test_data['name'],
+            submission=submission_obj,
+            defaults={
+                'label': test_data['label'],
+                'annotation': test_data['annotation'],
+            }
+        )
+
+        if test_obj.label == available_tests[test_obj.name].label_failure\
+                and not hasattr(test_obj.submission, 'feedback')\
+                and not test_obj.name == util.processing.UnitTestTest.__name__:
+            Feedback.objects.update_or_create(
+                of_submission=submission_obj,
+                defaults={
+                    'of_tutor'  : auto_correct,
+                    'score'     : 0,
+                    'text'      : test_obj.label,
+                    'origin'    : FEEDBACK_MAPPER[test_obj.name],
+                    'status'    : Feedback.ACCEPTED if test_obj.name == EmptyTest.__name__ else Feedback.EDITABLE,
+                }
+            )
+
+
+def add_user_list(lst, group, **kwargs):
+    for name in lst:
+        add_user(name, group, **kwargs)
+
+
+def call_loader(func: Callable) -> None:
+    """ This function handles if a function will be executed at all. Currently
+    it just checks in the RECORDS file for the name of the function. If it is
+    present the function will not be executed
+
+    Args:
+        func (Callable): the loader specified below
+    """
+    if os.path.exists(RECORDS):
+        with open(RECORDS, 'r') as records_f:
+            done = [line.strip() for line in records_f]
+
+        if func.__name__ in done and not \
+                i(f'{func.__name__} has already been processed once. Proceed anyway?', NO):
+            return
+
+    func() # This executes the specified loader
+
+    with open(RECORDS, 'a') as records_f:
+        records_f.write(func.__name__)
+        records_f.write('\n')
+
+    info(f'{func.__name__} is done.')
+
+
+def do_convert_xls():
+
+    ans = i('''Do you want to convert the ILIAS .xls output to .json?''', YES)
+    if not ans:
+        return
+
+    infile  = i('Please provide the path to the .xls file', is_file=True)
+    outfile = i('Where should the output go?', 'submissons.json')
+
+    json_dict = util.convert.converter(infile)
+    util.convert.write_to_file(json_dict, outfile)
+
+
+def do_load_submission_types():
+
+    print(
+    '''For the following import you need three files:
+
+    1) A .csv file where the columns are: id, name, score
+    2) A path to a directory where I can find sample solutions named
+        <id>-lsg.c
+    3) A path to a directory where I can find HTML files with an accurate
+        description of the task. File name pattern has to be: <id>.html
+
+    Example:
+        $ cat submission_types.csv
+        a01, Alpha Team, 10
+        a02, Beta Distribution, 10
+        a03, Gamma Ray, 20
+
+        $ tree -L 2
+        .
+        ├── code-lsg
+        │   ├── a01-lsg.c
+        │   ├── a02-lsg.c
+        │   └── a03-lsg.c
+        └── html
+            ├── a01.html
+            ├── a02.html
+            └── a03.html
+    ''')
+
+    path = i('Where are your files located?', '.', is_path=True)
+
+    with chdir_context(path):
+        submission_types_csv    = i('CSV file',         'submission_types.csv')
+        lsg_dir                 = i('solution dir',     'code-lsg')
+        desc_dir                = i('descriptions dir', 'html')
+
+        with open(submission_types_csv, encoding='utf-8') as tfile:
+            csv_rows = [row for row in csv.reader(tfile)]
+
+        for row in csv_rows:
+            tid, name, score = (col.strip() for col in row)
+            with \
+                    open(os.path.join(lsg_dir, tid + '-lsg.c'), encoding='utf-8') as lsg,\
+                    open(os.path.join(desc_dir, tid + '.html'), encoding='utf-8') as desc:
+                data={
+                    'name'          : name,
+                    'description'   : desc.read(),
+                    'solution'      : lsg.read(),
+                    'full_score'    : int(score),
+                }
+            _, created = SubmissionType.objects.update_or_create(
+                name=name,
+                defaults=data
+            )
+            info(f'{"Created" if created else "Updated"} {name}')
+
+
+def do_load_module_descriptions():
+
+    print('''
+    This loader imports descriptions of modules in an exam. This step is purely
+    optional -- Grady works just fine without these information. If you want to
+    distinguish students within one instance or give information about the
+    grading type you should provide this info.
+
+    CSV file format: module_reference, total_score, pass_score, pass_only
+
+    Example:
+        B.Inf.1801,  90, 45, yes
+        B.Mat.31415, 50, 10, no
+    ''')
+
+    module_description_csv = i(
+        'Where is the file?', 'modules.csv', is_file=True)
+
+    with open(module_description_csv, encoding='utf-8') as tfile:
+        csv_rows = [row for row in csv.reader(tfile)]
+
+    for row in csv_rows:
+        data = {
+            field : kind(data) for field, kind, data in zip(
+                ('module_reference', 'total_score', 'pass_score', 'pass_only'),
+                (str, int, int, lambda x: x == 'yes'),
+                (col.strip() for col in row)
+            )
+        }
+
+        _, created = ExamType.objects.update_or_create(
+            module_reference=data['module_reference'],
+            defaults=data,
+        )
+
+        info(f'{"Created" if created else "Updated"} ExamType {data["module_reference"]}')
+
+
+def do_preprocess_submissions():
+
+    print('''
+    Preprocessing might take some time depending on the amount of data
+    and the complexity of the programs and the corresponding unit tests. You can
+    specify what test you want to run.
+
+    Tests do depend on each other. Therefore specifying a test will also
+    result in running all its dependencies\n''')
+
+    test_enum = dict(enumerate(util.processing.Test.available_tests()))
+
+    print('The following test are available:\n')
+    print('\t[q] Do nothing')
+    for j, test in test_enum.items():
+        print(f'\t[{j}] {test}')
+    print()
+
+    answer = i('Which tests do you want to run?')
+
+    if not answer or answer == 'q':
+        return
+
+    raise NotImplementedError
+
+
+def do_load_submissions():
+
+    file = i('Get me the file with all the submissions', 'submissions.json')
+
+    exam = {}
+    if ExamType.objects.all() and i('Do you want to add module/exam information?', NO):
+        exam_query_set = ExamType.objects.all()
+        print('You have the following choices:\n')
+        for j, exam_type in enumerate(exam_query_set):
+            print(f'\t[{j}] {exam_type.module_reference}')
+        print()
+
+        exam = i('Choose wisely')
+        exam = {'exam' : exam_query_set[int(exam)]}
+
+    with open(file) as submission_file:
+        submissions = json.JSONDecoder().decode(submission_file.read())
+
+    for username, data in submissions.items():
+        student_obj = add_student(username, **exam, **data)
+
+        for submission_obj in data['submissions']:
+            add_submission(student_obj, **submission_obj)
+
+
+def do_load_tutors():
+
+    print('Please import tutor users by providing one name per line')
+    tutors    = i('List of tutors', 'tutors', is_file=True)
+
+    with open(tutors) as tutors_f:
+        add_user_list(tutors_f, TUTORS)
+
+
+def do_load_reviewer():
+
+    print('Please import reviewer users by providing one name per line')
+    reviewers = i('List of reviewers', 'reviewers', is_file=True)
+
+    with open(reviewers) as reviewers_f:
+        add_user_list(reviewers_f, REVIEWERS, is_staff=True)
+
+
+call_order = (
+    do_convert_xls,
+    do_load_submission_types,
+    do_load_module_descriptions,
+    do_preprocess_submissions,
+    do_load_submissions,
+    do_load_tutors,
+    do_load_reviewer
+)
+
+
+def start():
+
+    if os.path.exists(HISTFILE):
+        readline.read_history_file(HISTFILE)
+
+    print('''Welcome to the Grady importer!
+
+    This script aims at making the setup of the database as easy as possible. It
+    at the same time serves as a documentation on how data is imported in Grady.
+    Let\'s dive right in.\n''')
+
+    try:
+        print('The following importers are available:\n')
+        for fid, func in enumerate(call_order):
+            print(f'\t[{fid}] {func.__name__}')
+        print('\t[q] exit')
+        print()
+
+        fid = i('Choose a number or hit enter to start at the beginning')
+
+        if not fid:
+            for func in call_order:
+                call_loader(func)
+        elif fid in ('q', 'quit', 'exit'):
+            return
+        elif not 0 <= int(fid) < len(call_order):
+            warn('There is no loader with this number')
+        else:
+            call_loader(call_order[int(fid)])
+
+    except (EOFError, KeyboardInterrupt) as err:
+        print()
+        return
+    except Exception as err:
+        import traceback
+        traceback.print_exc()
+    finally:
+        readline.write_history_file(HISTFILE)
diff --git a/util/messages.py b/util/messages.py
new file mode 100644
index 0000000000000000000000000000000000000000..8f236313b598099954497a5e3a78e93a5d61ae12
--- /dev/null
+++ b/util/messages.py
@@ -0,0 +1,21 @@
+import sys
+
+
+def warn(*message):
+    print('[W]', *message)
+
+def debug(*message):
+    print('[DEBUG]', *message)
+
+def info(*message):
+    print('[I]', *message)
+
+def error(*message):
+    print('[E]', *message)
+
+def abort(*message):
+    print('[FATAL]', *message)
+    sys.exit('exiting...')
+
+def exit(message='exiting...'):
+    sys.exit(*message)
diff --git a/util/processing.py b/util/processing.py
new file mode 100644
index 0000000000000000000000000000000000000000..e83a44eb791ff800ab9dcd1dff54ee20bcde682e
--- /dev/null
+++ b/util/processing.py
@@ -0,0 +1,195 @@
+import abc
+import hashlib
+import json
+import os
+import re
+import shutil
+import subprocess
+import tempfile
+
+try:
+    import testcases
+except ModuleNotFoundError:
+    from util import testcases
+
+DESCFILE    = '../data/descfile.txt'
+BINARIES    = '../data/klausur_zweittermin/bin'
+OBJECTS     = '../data/klausur_zweittermin/objects'
+SUBMISSIONS = '../data/binf1801_pre.json'
+HEADER      = '../data/klausur_zweittermin/code-testing'
+
+
+def run_cmd(cmd, stdin=None, check=False, timeout=1):
+    return subprocess.run(
+        'ulimit -v 1024; gtimeout 0.2 ' + cmd,
+        stderr=subprocess.PIPE,
+        stdout=subprocess.PIPE,
+        input=stdin,
+        shell=True,
+        check=check,
+        encoding='utf-8',
+        timeout=timeout
+    )
+
+
+def all_subclasses(cls):
+    return cls.__subclasses__() \
+        + [g for s in cls.__subclasses__() for g in all_subclasses(s)]
+
+
+def sha1(submission_obj):
+    return hashlib.sha1(submission_obj['code'].encode()).hexdigest()
+
+
+class Test(metaclass=abc.ABCMeta):
+    """docstring for IliasQuestion"""
+
+    @classmethod
+    def available_tests(cls):
+        return {sub.__name__ : sub for sub in all_subclasses(cls)}
+
+    def __new__(cls, *args, **kwargs):
+        assert hasattr(cls, 'depends'),         "depends not defined"
+        assert hasattr(cls, 'label_success'),   "label_success not defined"
+        assert hasattr(cls, 'label_failure'),   "label_failure not defined"
+        return super().__new__(cls)
+
+    def __init__(self, submission_obj, **kwargs):
+
+        if not self.dependencies_satisfied(submission_obj):
+            self.result     = False
+            self.annotation = "TEST DEPENDENCY NOT MET"
+            self.serialize(submission_obj)
+
+        elif str(self) in submission_obj['tests']:
+            self.deserialize(submission_obj['tests'][str(self)])
+
+        else:
+            self.result, self.annotation = self.run_test(
+                submission_obj, **kwargs)
+            self.serialize(submission_obj)
+
+    def __bool__(self):
+        return self.result
+
+    def __str__(self):
+        return self.__class__.__name__
+
+    def dependencies_satisfied(self, submission_obj):
+        return all(dep(submission_obj).result for dep in self.depends)
+
+    def deserialize(self, test):
+        self.result     = test['label'] == self.label_success
+        self.annotation = test['annotation']
+
+    def serialize(self, submission_obj):
+        as_dict = {
+            'name'       : str(self),
+            'annotation' : self.annotation
+        }
+
+        if self.result:
+            as_dict['label'] = self.label_success
+        else:
+            as_dict['label'] = self.label_failure
+
+        submission_obj['tests'][str(self)] = as_dict
+
+    @abc.abstractmethod
+    def run_test(self, submission_obj) -> (bool, str):
+        return NotImplemented
+
+
+class EmptyTest(Test):
+    """docstring for EmptyTest"""
+
+    depends         = ()
+    label_success   = 'NOT_EMPTY'
+    label_failure   = 'EMPTY'
+
+    def run_test(self, submission_obj):
+        return bool(submission_obj['code'].strip()), ""
+
+
+class CompileTest(Test):
+
+    depends         = (EmptyTest, )
+    label_success   = 'COMPILATION_SUCCESSFUL'
+    label_failure   = 'COMPILATION_FAILED'
+
+    def run_test(self, submission_obj):
+
+        ret = run_cmd("gcc-7 -std=c11 -Wall -c -xc -Icode-testing -o code.o -",
+                      submission_obj['code'])
+        return not ret.returncode, ret.stderr
+
+
+class LinkTest(Test):
+
+    depends         = (CompileTest, )
+    label_success   = 'LINKING_SUCCESSFUL'
+    label_failure   = 'LINKING_FAILED'
+
+    def run_test(self, submission_obj):
+
+        t = submission_obj['type']
+        m = re.search(r'(a0\d)', t)
+
+        ret = run_cmd(f"gcc-7 -o code objects/{m.group(0)}-testing.o code.o")
+        return not ret.returncode, ret.stderr
+
+
+class UnitTestTest(Test):
+    """docstring for UnitTestTest"""
+
+    depends         = (LinkTest, )
+    label_success   = 'UNITTEST_SUCCSESSFUL'
+    label_failure   = 'UNITTEST_FAILED'
+
+    @staticmethod
+    def testcase(i, args, stdout):
+        try:
+            ret = run_cmd("./code %s" % args, check=True, timeout=0.1)
+            assert ret.stdout == stdout
+        except AssertionError:
+            return False, f"Case #{i:>2}: [ASSERT FAILED] ./program {args} WAS '{ret.stdout.strip()}' SHOULD '{stdout.strip()}'"
+        except subprocess.CalledProcessError as err:
+            return False, f"Case #{i:>2}: [FAILED] ./program {args} WITH ERROR '{err.stderr.strip()}'"
+        except subprocess.TimeoutExpired:
+            return False, f"Case #{i:>2}: [TIMEOUT] ./program {args}"
+        else:
+            return True,  f"Case #{i:>2}: [SUCCESS] ./program {args}"
+
+    def run_test(self, submission_obj):
+
+        task = testcases_dict[submission_obj['type']]
+        results, messages = zip(*list(self.testcase(i, case, result)
+                                      for i, (case, result) in enumerate(zip(task['cases'], task['results']))))
+
+        return all(results), '\n'.join(messages)
+
+
+def processing(highest_test):
+
+    with open(SUBMISSIONS) as submission_file:
+        submissions = json.JSONDecoder().decode(submission_file.read())
+
+    # Get something disposable
+    path = tempfile.mkdtemp()
+    run_cmd(f'cp -r {OBJECTS}  {path}')
+    run_cmd(f'cp -r {BINARIES} {path}')
+    run_cmd(f'cp -r {HEADER} {path}')
+    os.chdir(path)
+
+    for username, data in submissions.items():
+        for submission_obj in data['submissions']:
+            highest_test(submission_obj)
+            run_cmd('rm code*')
+
+    shutil.rmtree(path)
+    return submissions
+
+
+if __name__ == '__main__':
+    testcases_dict = testcases.evaluated_testcases(DESCFILE)
+    print(json.dumps(processing(UnitTestTest), sort_keys=True, indent=4))
diff --git a/util/testcases.py b/util/testcases.py
new file mode 100644
index 0000000000000000000000000000000000000000..b94fab230a0c11b6cb850b00e1e83d95431ab705
--- /dev/null
+++ b/util/testcases.py
@@ -0,0 +1,98 @@
+import json
+import os
+import random
+import re
+from string import ascii_letters, digits
+
+try:
+    import processing
+except ModuleNotFoundError:
+    from util import processing
+
+types = ('integer', 'unsigned_integer', 'character', 'string')
+list_sep = '...'
+
+re_task = re.compile(
+    r'^-- (?P<title>.*)\n(USAGE: (?P<cmd>[\./\w]+) (?P<syntax>.*)|NO EXECUTABLE)', re.MULTILINE)
+re_args = re.compile(rf"<({'|'.join(types)}|{'|'.join(t + '_list' for t in types)})>")
+
+
+def call_function(name: str, *args, **kwargs):
+    return globals()[name](*args, **kwargs)
+
+
+def integer(bounds=50):
+    return random.randint(-bounds, bounds)
+
+
+def unsigned_integer(upper=50):
+    return random.randint(0, upper)
+
+
+def character():
+    return random.choice(10*ascii_letters + 2*digits + '%*+,-./:?@[]^_{}~')
+
+
+def string(lenght=31):
+    return ''.join(character() for i in range(2, 2 + unsigned_integer(lenght)))
+
+
+def type_list(_type):
+    def generic_list():
+        return ' '.join(str(call_function(_type)) for i in range(2, unsigned_integer(6) * 2))
+    return generic_list
+
+
+def rubbish():
+    return str(call_function(random.choice(tuple(t + '_list' for t in types) + types)))
+
+
+def argument_generator(syntax):
+    syntax, _ = re.subn(
+        r'<([\w\s]+)> <\1> \.\.\. <\1> <\1>', r'<\1_list>', syntax)
+    syntax, _ = re.subn(r'<(\w+)\s(\w+)>', r'<\1_\2>', syntax)
+
+    return ' '.join(str(call_function(arg)) for arg in re.findall(re_args, syntax))
+
+
+def testcases_generator(task, n=10):
+    syntax = task.group('syntax')
+
+    if not syntax:
+        return
+
+    yield ''
+    yield '0'
+
+    for i in range(n//2):
+        yield rubbish()
+
+    for i in range(n):
+        yield argument_generator(syntax)
+
+
+def testcases(description_path):
+    for t in types:
+        globals()[t + '_list'] = type_list(t) # I fucking love it
+
+    with open(description_path) as description_file:
+        description = description_file.read()
+
+    return {
+        task['title'] : {
+            'cmd' : task['cmd'],
+            'cases' : [t for t in testcases_generator(task)]
+        } for task in re.finditer(re_task, description)
+    }
+
+def evaluated_testcases(description_path):
+    task_testcases = testcases(description_path)
+
+    for task in filter(lambda t: t['cmd'], task_testcases.values()):
+        path_to_binary = os.path.join(os.path.join(processing.BINARIES, os.path.basename(task['cmd'])))
+        task['results'] = [processing.run_cmd(f"{path_to_binary} {case}").stdout for case in task['cases']]
+
+    return task_testcases
+
+if __name__ == '__main__':
+    print(json.dumps(evaluated_testcases(processing.DESCFILE), sort_keys=True, indent=4))