diff --git a/convert.py b/convert.py
index e93ba416676f884f407ed08920ae9435027c17c8..6829bf87eacb60f8eb777db62f1715f6f64d5a64 100755
--- a/convert.py
+++ b/convert.py
@@ -43,6 +43,7 @@ from xlrd import open_workbook
 parser = argparse.ArgumentParser()
 parser.add_argument('INFILE', help='Ilias exam data')
 parser.add_argument('OUTFILE', help='Where to write the final file')
+parser.add_argument('-u', '--usernames', help='a json dict matno -> email')
 parser.add_argument(
     '-n', '--NUMBER_OF_TASKS',
     default=0, # don't check
@@ -104,11 +105,19 @@ if args.NUMBER_OF_TASKS:
     for (user, *task_list) in sorted(root, key=lambda u: u[0].name):
         assert len(task_list) == args.NUMBER_OF_TASKS * 2
 
+usernames = {}
+if args.usernames:
+    with open(args.usernames) as data:
+        mat_to_email = json.JSONDecoder().decode(data.read())
+    usernames = {user.name : mat_to_email[name2mat[user.name]].split('@')[0] for (user, *_) in root}
+
+else: # legacy support / fallback
+    usernames = {user.name : ''.join(filter(str.isupper, user.name)) + name2mat[user.name] for (user, *_) in root}
 
 # form list to json_like via comprehension
 # the format {userinitials + matrikel_no : {name:, matrikel_no:, tasklist: {id:, ..., id:}}}
 json_dict = {
-    ''.join(filter(str.isupper, user.name)) + name2mat[user.name] : {
+    usernames[user.name] : {
         'name' : user.name,
         'matrikel_no' : name2mat[user.name],
         'submissions' : {
diff --git a/core/views/user_startpages.py b/core/views/user_startpages.py
index 22846e7e8e4f454a5bc31d843c1acb3d96455720..c2d12ba9da44d5be1dfae895b5b2db071af3be70 100644
--- a/core/views/user_startpages.py
+++ b/core/views/user_startpages.py
@@ -66,7 +66,7 @@ def reviewer_view(request):
         'submission_type_list': get_annotated_feedback_count(),
         'tutor_list': User.objects.annotate(Count('corrected_submissions')).filter(groups__name='Tutors'),
         'submission_list': Submission.objects.all(),
-        'feedback_list_manual': Feedback.objects.filter(origin=Feedback.MANUAL).order_by('-status', '-modified'),
+        'feedback_list_manual': Feedback.objects.filter(origin=Feedback.MANUAL).order_by('-status', 'of_submission__student', '-modified'),
         'feedback_list_empty': Feedback.objects.filter(origin=Feedback.WAS_EMPTY),
         'feedback_list_did_not_compile': Feedback.objects.filter(origin=Feedback.DID_NOT_COMPILE),
         'feedback_list_could_not_link': Feedback.objects.filter(origin=Feedback.COULD_NOT_LINK),
diff --git a/scripts/README.rst b/scripts/README.rst
new file mode 100644
index 0000000000000000000000000000000000000000..f07fc7c98533c47e545e02655b1c317d173e884e
--- /dev/null
+++ b/scripts/README.rst
@@ -0,0 +1,11 @@
+What is this directory about?
+=============================
+
+Well, it just servers as a collection of files that currently live in folders
+not part of the git repository, since they contain volatile or test data. I
+include them here for the sake of completeness, but they will be removed in
+later versions, since their work has to be encapsulated in the overall process.
+When documentations becomes more accurate a detailed explanation on how to use
+them will be added.
+
+.. note:: Please keep in mind: These file are of poor quality and are likely to fail if not used in a correct manner.
diff --git a/scripts/compile.py b/scripts/compile.py
new file mode 100755
index 0000000000000000000000000000000000000000..091669a73d967085f2668d1f80a007a0221302df
--- /dev/null
+++ b/scripts/compile.py
@@ -0,0 +1,104 @@
+#!/usr/local/bin/python3
+"""This script adds compiler output to the json output of the convert script
+
+[description]
+"""
+import subprocess
+import json
+import os
+
+TEMP_DIR        = 'temp_code'
+OUTFILE         = 'submissions_compiled.json'
+INFILE          = 'submissions.json'
+OBJECT_DIR      = 'klausur_tag_01/objects'
+
+ALL_OK          = 0
+COMPILE_ERROR   = 1
+LINKER_ERROR    = 2
+WARNINGS        = 3
+
+
+def get_compiler_output(task_id, text):
+    dst = open(os.path.join(TEMP_DIR, task_id + '.o'), 'w')
+    try:
+        compile_cmd = subprocess.run(
+            [
+                "gcc-6", "-Wall", "-std=c11", "-c", "-xc",
+                "-o", dst.name,
+                f"-I{TEMP_DIR}",
+                "-"
+            ],
+            stderr=subprocess.PIPE,
+            input=text,
+            encoding='utf-8',
+        )
+
+        if compile_cmd.returncode:
+            return compile_cmd, None, COMPILE_ERROR # it fucking failed
+
+        object_code = os.path.join(OBJECT_DIR, f"{task_id}-testing.o")
+        if os.path.exists(object_code): # this is ok
+            link_cmd = subprocess.run(
+                [
+                    "gcc-6",
+                    "-o", "/dev/null",
+                    dst.name,
+                    object_code
+
+                ],
+                stderr=subprocess.PIPE,
+                encoding='utf-8',
+            )
+        else: # this case is weird
+            if task_id == 'a05': # for day 2 task a05 its ok to just compile
+                return compile_cmd, None, compile_cmd.returncode
+            elif task_id == 'a06':
+                link_cmd = subprocess.run(
+                    [
+                        "gcc-6",
+                        "-o", "/dev/null",
+                        dst.name,
+                        os.path.join(TEMP_DIR, "a05.o"),
+                    ],
+                    stderr=subprocess.PIPE,
+                    encoding='utf-8',
+                )
+
+        if link_cmd.returncode:
+            return compile_cmd, link_cmd, LINKER_ERROR
+
+        return compile_cmd, link_cmd, ALL_OK
+    finally:
+        dst.close()
+
+
+def main():
+    with open(INFILE, 'r', encoding='utf-8') as submissions:
+        data = json.JSONDecoder().decode(submissions.read())
+
+    total = len(data)
+    for i, (username, userinfo) in enumerate(data.items()):
+        print(f"\r- {i+1}/{total} done. processing submissions of {username}\t\t\t\t", end='')
+
+        # create new entry
+        co = userinfo['compiler_output'] = {}
+
+        for task_id, submission in userinfo['submissions'].items():
+            if submission:
+                _compile, _link, err = get_compiler_output(task_id, submission)
+                msg = {
+                    COMPILE_ERROR: "\n\n[FAIL] DID NOT COMPILE",
+                    LINKER_ERROR: "\n\n[FAIL] COULD NOT LINK",
+                    ALL_OK: "",
+                }[err]
+                co[task_id] = _compile.stderr + (_link.stderr if _link else "") + msg
+            else:
+                co[task_id] = ""
+
+    print()
+
+    with open(OUTFILE, 'w', encoding='utf-8') as submissions:
+        submissions.write(json.JSONEncoder().encode(data))
+
+if __name__ == '__main__':
+    main()
diff --git a/scripts/matrikel_to_email.py b/scripts/matrikel_to_email.py
new file mode 100644
index 0000000000000000000000000000000000000000..920a278fbbf3c4adae90750c8936aedb414d1831
--- /dev/null
+++ b/scripts/matrikel_to_email.py
@@ -0,0 +1,10 @@
+from itertools import chain
+import re, json
+
+OUTFILE = 'matno2email.json'
+
+with \
+        open('binf1801-flexnow-20170329.csv') as inf, \
+        open('bphy1601-flexnow-20170328.csv') as phy, \
+        open(OUTFILE, "w") as out:
+    out.write(json.JSONEncoder().encode({matno : email for (matno, email) in (re.split(r'[\t;]', line.strip()) for line in chain(inf, phy) if line)})) # i just love one liners