diff --git a/core/models.py b/core/models.py
deleted file mode 100644
index 77ae5c3f3a3e34905609aa1e8ffb697be2be5811..0000000000000000000000000000000000000000
--- a/core/models.py
+++ /dev/null
@@ -1,769 +0,0 @@
-'''
-Grady Model Description
------------------------
-
-See docstring of the individual models for information on the setup of the
-database.
-'''
-
-import logging
-import secrets
-import uuid
-from collections import OrderedDict
-from random import randrange
-from typing import Dict
-
-import constance
-from django.contrib.auth import get_user_model
-from django.contrib.auth.models import AbstractUser, UserManager
-from django.db import models, transaction
-from django.db.models import (BooleanField, Case, Count, F, IntegerField, Q,
-                              QuerySet, Sum, Value, When)
-from django.db.models.functions import Coalesce
-
-log = logging.getLogger(__name__)
-config = constance.config
-
-
-def random_matrikel_no() -> str:
-    """Use as a default value for student's matriculation number.
-
-    Returns:
-        str: an eight digit number
-    """
-    return str(10_000_000 + randrange(90_000_000))
-
-
-def get_annotated_tutor_list() -> QuerySet:
-    """All tutor accounts are annotate with a field that includes the number of
-    feedback that tutor has collaborated in.
-
-    Returns:
-        TYPE: the annotated QuerySet
-    """
-    return get_user_model().objects\
-        .filter(groups__name='Tutors')\
-        .annotate(Count('feedback_list'))\
-        .order_by('-feedback_list__count')
-
-
-def get_random_element_from_queryset(queryset):
-    qs_elements = queryset.all()
-    length = len(qs_elements)
-    index = secrets.choice(range(length))
-    return qs_elements[index]
-
-
-class ExamType(models.Model):
-    """A model that contains information about the module a submission can
-    belong to. The information is not needed and is currently, just used to
-    detect if students already have enough points to pass an exam.
-
-    It is NOT intended to use this for including different exams regarding
-    submissions types.
-
-    Attributes
-    ----------
-    module_reference : CharField
-        a unique reference that identifies a module within the university
-    pass_only : BooleanField
-        True if no grade is given
-    pass_score : PositiveIntegerField
-        minimum score for (just) passing
-    total_score : PositiveIntegerField
-        maximum score for the exam (currently never used anywhere)
-    """
-    class Meta:
-        verbose_name = "ExamType"
-        verbose_name_plural = "ExamTypes"
-
-    def __str__(self) -> str:
-        return self.module_reference
-
-    exam_type_id = models.UUIDField(primary_key=True,
-                                    default=uuid.uuid4,
-                                    editable=False)
-    module_reference = models.CharField(max_length=50, unique=True)
-    total_score = models.PositiveIntegerField()
-    pass_score = models.PositiveIntegerField()
-    pass_only = models.BooleanField(default=False)
-
-
-class SubmissionType(models.Model):
-    """This model mostly holds meta information about the kind of task that was
-    presented to the student. It serves as a foreign key for the submissions
-    that are of this type. This model is currently NOT exposed directly in a
-    view.
-
-    Attributes
-    ----------
-    description : TextField
-        The task description the student had to fulfill. The content may be
-        HTML formatted.
-    full_score : PositiveIntegerField
-        Maximum score one can get on that one
-    name : CharField
-        The original title of the exam. This is wildly used as an identifier by
-        the preprocessing scripts.
-    solution : TextField
-        A sample solution or a correction guideline
-    """
-
-    C = 'c'
-    JAVA = 'java'
-    MIPS = 'mipsasm'
-    HASKELL = 'haskell'
-
-    LANGUAGE_CHOICES = (
-        (C, 'C syntax highlighting'),
-        (JAVA, 'Java syntax highlighting'),
-        (MIPS, 'Mips syntax highlighting'),
-        (HASKELL, 'Haskell syntax highlighting'),
-    )
-
-    submission_type_id = models.UUIDField(primary_key=True,
-                                          default=uuid.uuid4,
-                                          editable=False)
-    name = models.CharField(max_length=100, unique=True)
-    full_score = models.PositiveIntegerField(default=0)
-    description = models.TextField()
-    solution = models.TextField()
-    programming_language = models.CharField(max_length=25,
-                                            choices=LANGUAGE_CHOICES,
-                                            default=C)
-
-    def __str__(self) -> str:
-        return self.name
-
-    class Meta:
-        verbose_name = "SubmissionType"
-        verbose_name_plural = "SubmissionType Set"
-
-    @classmethod
-    def get_annotated_feedback_count(cls) -> QuerySet:
-        """ Annotates submission lists with counts
-
-        The following fields are annotated:
-            * number of submissions per submission type
-            * count of received *accepted* feedback per submission type
-            * and finally the progress on each submission type as percentage
-
-        The QuerySet that is return is ordered by name lexicographically.
-
-        Returns:
-            The annotated QuerySet as described above
-        """
-        return cls.objects\
-            .annotate(  # to display only manual
-                feedback_final=Count(
-                    Case(When(
-                        Q(submissions__meta__has_final_feedback=True),
-                        then=Value(1)), output_field=IntegerField())
-                ),
-                feedback_in_validation=Count(
-                    Case(When(
-                        Q(submissions__meta__done_assignments=1) &
-                        Q(submissions__meta__has_final_feedback=False),
-                        then=Value(1)), output_field=IntegerField())
-                ),
-                feedback_in_conflict=Count(
-                    Case(When(
-                        Q(submissions__meta__done_assignments=2) &
-                        Q(submissions__meta__has_final_feedback=False),
-                        then=Value(1)), output_field=IntegerField())
-                ),
-                submission_count=Count('submissions'),
-            ).order_by('name')
-
-
-class TutorManager(UserManager):
-
-    def get_queryset(self):
-        return super().get_queryset().filter(role=UserAccount.TUTOR)
-
-    def with_feedback_count(self):
-        def _get_counter(stage):
-            return Count(Case(
-                When(
-                    Q(subscriptions__feedback_stage=stage) &
-                    Q(subscriptions__assignments__is_done=True),
-                    then=Value(1))),
-                output_field=IntegerField())
-
-        return self.get_queryset() \
-            .annotate(feedback_created=_get_counter(
-                SubmissionSubscription.FEEDBACK_CREATION)) \
-            .annotate(feedback_validated=_get_counter(
-                SubmissionSubscription.FEEDBACK_VALIDATION))
-
-
-class UserAccount(AbstractUser):
-    """
-    An abstract base class implementing a fully featured User model with
-    admin-compliant permissions.
-
-    Username and password are required. Other fields are optional.
-    """
-
-    STUDENT = 'Student'
-    TUTOR = 'Tutor'
-    REVIEWER = 'Reviewer'
-
-    ROLE_CHOICES = (
-        (STUDENT, 'student'),
-        (TUTOR, 'tutor'),
-        (REVIEWER, 'reviewer')
-    )
-
-    # Fields
-    role = models.CharField(max_length=50, choices=ROLE_CHOICES)
-    user_id = models.UUIDField(primary_key=True,
-                               default=uuid.uuid4,
-                               editable=False)
-
-    fullname = models.CharField('full name', max_length=70, blank=True)
-    is_admin = models.BooleanField(default=False)
-
-    # Managers
-    objects = UserManager()
-    tutors = TutorManager()
-
-    # Helper methods
-    def is_student(self):
-        return self.role == 'Student'
-
-    def is_tutor(self):
-        return self.role == 'Tutor'
-
-    def is_reviewer(self):
-        return self.role == 'Reviewer'
-
-    # All of these methods are deprecated and should be replaced by custom
-    # Managers (see tutor manager)
-    @classmethod
-    def get_students(cls):
-        return cls.objects.filter(role=cls.STUDENT)
-
-    @classmethod
-    def get_tutors(cls):
-        return cls.objects.filter(role=cls.TUTOR)
-
-    @classmethod
-    def get_reviewers(cls):
-        return cls.objects.filter(role=cls.REVIEWER)
-
-
-class StudentInfo(models.Model):
-    """
-    The StudentInfo model includes all information of a student, that we got
-    from the E-Learning output, along with some useful classmethods that
-    provide specially annotated QuerySets.
-
-    Information like email (if given), and the username are stored in the
-    associated user model.
-
-    Attributes:
-        exam (ForeignKey):
-            Which module the student wants to be graded in
-
-        has_logged_in (BooleanField):
-            Login is permitted once. If this is set the user can not log in.
-
-        matrikel_no (CharField):
-            The matriculation number of the student
-    """
-    student_id = models.UUIDField(primary_key=True,
-                                  default=uuid.uuid4,
-                                  editable=False)
-    has_logged_in = models.BooleanField(default=False)
-    matrikel_no = models.CharField(unique=True,
-                                   max_length=30,
-                                   default=random_matrikel_no)
-    exam = models.ForeignKey('ExamType',
-                             on_delete=models.CASCADE,
-                             related_name='students',
-                             null=False)
-    user = models.OneToOneField(get_user_model(),
-                                on_delete=models.CASCADE,
-                                related_name='student')
-
-    # Managed by signals
-    total_score = models.PositiveIntegerField(default=0)
-    passes_exam = models.BooleanField(default=False)
-
-    def update_total_score(self):
-        ''' This helper is invoked after feedback changes '''
-        self.total_score = self.submissions.aggregate(
-            Sum('feedback__score'))['feedback__score__sum'] or 0
-        if self.exam is not None:
-            self.passes_exam = self.total_score >= self.exam.pass_score
-        self.save()
-
-    def score_per_submission(self) -> Dict[str, int]:
-        """ TODO: get rid of it and use an annotation. """
-        if self.submissions.all():
-            return OrderedDict({
-                s.type.name: s.feedback.score if hasattr(s, 'feedback') else 0
-                for s in self.submissions.order_by('type__name')
-            })
-
-        return OrderedDict({
-            t.name: 0 for t in SubmissionType.objects.all()
-        })
-
-    @classmethod
-    def get_annotated_score_submission_list(cls) -> QuerySet:
-        """Can be used to quickly annotate a user with the necessary
-        information on the overall score of a student and if he does not need
-        any more correction.
-
-        A student is done if
-            * module type was pass_only and student has enough points
-            * every submission got accepted feedback
-
-        Returns
-        -------
-        QuerySet
-            the annotated QuerySet as described above.
-        """
-        return cls.objects.annotate(
-            overall_score=Coalesce(Sum('submissions__feedback__score'),
-                                   Value(0)),
-        ).annotate(
-            done=Case(
-                When(exam__pass_score__lt=F('overall_score'), then=Value(1)),
-                default=Value(0),
-                output_field=BooleanField()
-            )
-        ).order_by('user__username')
-
-    def disable(self):
-        """The student won't be able to login in anymore, but his current
-        session can be continued until s/he logs out.
-        """
-        self.has_logged_in = True
-        self.save()
-
-    def __str__(self) -> str:
-        return self.user.username
-
-    class Meta:
-        verbose_name = "Student"
-        verbose_name_plural = "Student Set"
-
-
-class Test(models.Model):
-    """Tests contain information that has been unapproved by automated tests,
-    and directly belongs to a submission. Often certain Feedback was already
-    given by information provided by these tests.
-
-    Attributes
-    ----------
-    annotation : TextField
-        All the output of the test (e.g. compiler output)
-    label : CharField
-        Indicates SUCCES or FAILURE
-    name : CharField
-        The name of the test that was performed
-    submission : ForeignKey
-        The submission the tests where unapproved on
-    """
-    test_id = models.UUIDField(primary_key=True,
-                               default=uuid.uuid4,
-                               editable=False)
-
-    name = models.CharField(max_length=30)
-    label = models.CharField(max_length=50)
-    annotation = models.TextField()
-    submission = models.ForeignKey('submission',
-                                   related_name='tests',
-                                   on_delete=models.CASCADE,)
-
-    class Meta:
-        verbose_name = "Test"
-        verbose_name_plural = "Tests"
-        unique_together = (('submission', 'name'),)
-
-    def __str__(self) -> str:
-        return f'{self.name} {self.label}'
-
-
-class Submission(models.Model):
-    """The answer of a student to a specific question. Holds the answer and
-    very often serves as ForeignKey.
-
-    With the method assign_tutor feedback for a submission can be created and a
-    tutor will be assigned to this feedback permanently (unless deleted by a
-    reviewer or if it gets reassigned). There cannot be more than ONE feedback
-    per Submission.
-
-    Attributes
-    ----------
-    seen_by_student : BooleanField
-        True if the student saw his accepted feedback.
-    student : ForgeignKey
-        The student how cause all of this
-    text : TextField
-        The code/text submitted by the student
-    type : OneToOneField
-        Relation to the type containing meta information
-    """
-    submission_id = models.UUIDField(primary_key=True,
-                                     default=uuid.uuid4,
-                                     editable=False)
-    seen_by_student = models.BooleanField(default=False)
-    text = models.TextField(blank=True)
-    type = models.ForeignKey(
-        SubmissionType,
-        on_delete=models.PROTECT,
-        related_name='submissions')
-    student = models.ForeignKey(
-        StudentInfo,
-        on_delete=models.CASCADE,
-        related_name='submissions')
-
-    class Meta:
-        verbose_name = "Submission"
-        verbose_name_plural = "Submission Set"
-        unique_together = (('type', 'student'),)
-        ordering = ('type__name',)
-
-    def __str__(self) -> str:
-        return "Submission {}".format(self.pk)
-
-
-class Feedback(models.Model):
-    """
-    Attributes
-    ----------
-    score : PositiveIntegerField
-        A score that has been assigned to he submission. Is final if it was
-        accepted.
-    created : DateTimeField
-        When the feedback was initially created
-    of_submission : OneToOneField
-        The submission this feedback belongs to. It finally determines how many
-        points a student receives for his submission.
-    origin : IntegerField
-        Of whom was this feedback originally created. She below for the choices
-    """
-    score = models.DecimalField(max_digits=5, decimal_places=2, default=0)
-    created = models.DateTimeField(auto_now_add=True)
-    is_final = models.BooleanField(default=False)
-
-    of_submission = models.OneToOneField(
-        Submission,
-        on_delete=models.CASCADE,
-        related_name='feedback')
-
-    # the denominators that are allowed for the decimal score interpreted as a fraction
-    ALLOWED_DENOMINATORS = [1, 2]
-
-    # how was this feedback created
-    (
-        WAS_EMPTY,
-        FAILED_UNIT_TESTS,
-        DID_NOT_COMPILE,
-        COULD_NOT_LINK,
-        MANUAL,
-    ) = range(5)
-    ORIGIN = (
-        (WAS_EMPTY, 'was empty'),
-        (FAILED_UNIT_TESTS, 'passed unittests'),
-        (DID_NOT_COMPILE, 'did not compile'),
-        (COULD_NOT_LINK, 'could not link'),
-        (MANUAL, 'created by a human. yak!'),
-    )
-    origin = models.IntegerField(
-        choices=ORIGIN,
-        default=MANUAL,
-    )
-
-    class Meta:
-        verbose_name = "Feedback"
-        verbose_name_plural = "Feedback Set"
-
-    def __str__(self) -> str:
-        return 'Feedback for {}'.format(self.of_submission)
-
-    def is_full_score(self) -> bool:
-        return self.of_submission.type.full_score == self.score
-
-    def get_full_score(self) -> int:
-        return self.of_submission.type.full_score
-
-
-class FeedbackComment(models.Model):
-    """ This Class contains the Feedback for a specific line of a Submission"""
-    comment_id = models.UUIDField(primary_key=True,
-                                  default=uuid.uuid4,
-                                  editable=False)
-    text = models.TextField()
-    created = models.DateTimeField(auto_now_add=True)
-    modified = models.DateTimeField(auto_now=True)
-
-    visible_to_student = models.BooleanField(default=True)
-
-    of_line = models.PositiveIntegerField(default=0)
-    of_tutor = models.ForeignKey(
-        get_user_model(),
-        related_name="comment_list",
-        on_delete=models.PROTECT
-    )
-    of_feedback = models.ForeignKey(
-        Feedback,
-        related_name="feedback_lines",
-        on_delete=models.CASCADE,
-        null=True
-    )
-
-    class Meta:
-        verbose_name = "Feedback Comment"
-        verbose_name_plural = "Feedback Comments"
-        ordering = ('created',)
-        unique_together = ('of_line', 'of_tutor', 'of_feedback')
-
-    def __str__(self):
-        return 'Comment on line {} of tutor {}: "{}"'.format(self.of_line,
-                                                             self.of_tutor,
-                                                             self.text)
-
-
-class SubscriptionEnded(Exception):
-    pass
-
-
-class SubscriptionTemporarilyEnded(Exception):
-    pass
-
-
-class NotMoreThanTwoOpenAssignmentsAllowed(Exception):
-    pass
-
-
-class SubmissionSubscription(models.Model):
-
-    RANDOM = 'random'
-    STUDENT_QUERY = 'student'
-    EXAM_TYPE_QUERY = 'exam'
-    SUBMISSION_TYPE_QUERY = 'submission_type'
-
-    type_query_mapper = {
-        RANDOM: '__any',
-        STUDENT_QUERY: 'student__pk',
-        EXAM_TYPE_QUERY: 'student__exam__pk',
-        SUBMISSION_TYPE_QUERY: 'type__pk',
-    }
-
-    QUERY_CHOICE = (
-        (RANDOM, 'Query for any submission'),
-        (STUDENT_QUERY, 'Query for submissions of student'),
-        (EXAM_TYPE_QUERY, 'Query for submissions of exam type'),
-        (SUBMISSION_TYPE_QUERY, 'Query for submissions of submissions_type'),
-    )
-
-    FEEDBACK_CREATION = 'feedback-creation'
-    FEEDBACK_VALIDATION = 'feedback-validation'
-    FEEDBACK_CONFLICT_RESOLUTION = 'feedback-conflict-resolution'
-
-    assignment_count_on_stage = {
-        FEEDBACK_CREATION: 0,
-        FEEDBACK_VALIDATION: 1,
-        FEEDBACK_CONFLICT_RESOLUTION: 2,
-    }
-
-    stages = (
-        (FEEDBACK_CREATION, 'No feedback was ever assigned'),
-        (FEEDBACK_VALIDATION, 'Feedback exists but is not validated'),
-        (FEEDBACK_CONFLICT_RESOLUTION, 'Previous correctors disagree'),
-    )
-
-    deactivated = models.BooleanField(default=False)
-    subscription_id = models.UUIDField(primary_key=True,
-                                       default=uuid.uuid4,
-                                       editable=False)
-    owner = models.ForeignKey(get_user_model(),
-                              on_delete=models.CASCADE,
-                              related_name='subscriptions')
-    query_key = models.UUIDField(null=True)
-    query_type = models.CharField(max_length=75,
-                                  choices=QUERY_CHOICE,
-                                  default=RANDOM)
-    feedback_stage = models.CharField(choices=stages,
-                                      max_length=40,
-                                      default=FEEDBACK_CREATION)
-
-    class Meta:
-        unique_together = ('owner',
-                           'query_key',
-                           'query_type',
-                           'feedback_stage')
-
-    def _get_submission_base_query(self) -> QuerySet:
-        """ Get all submissions that are filtered by the query key and type,
-        e.g. all submissions of one student or submission type.
-        """
-        if self.query_type == self.RANDOM:
-            return MetaSubmission.objects.all()
-
-        return MetaSubmission.objects.filter(
-            **{'submission__' + self.type_query_mapper[self.query_type]:
-               self.query_key})
-
-    def _get_submissions_that_do_not_have_final_feedback(self) -> QuerySet:
-        """ There are a number of conditions to check for each submission
-
-        1. The submission does not have final feedback
-        2. The submission was not shown to this user before
-        3. The submission is not currently assigned to somebody else
-
-        Returns:
-            QuerySet -- a list of all submissions ready for consumption
-        """
-        return self._get_submission_base_query() \
-            .select_for_update(of=('self',)).exclude(
-            Q(has_final_feedback=True) |
-            Q(has_active_assignment=True) |
-            Q(feedback_authors=self.owner)
-        )
-
-    def _get_available_submissions_in_subscription_stage(self) -> QuerySet:
-        """ Another filter this time it returns all the submissions that
-        are valid in this stage. That means all previous stages have been
-        completed.
-
-        Raises:
-            SubscriptionEnded -- if the subscription will not yield
-                                 subscriptions in the future
-            SubscriptionTemporarilyEnded -- wait until new become available
-        """
-        candidates = self._get_submissions_that_do_not_have_final_feedback()
-
-        if candidates.count() == 0:
-            raise SubscriptionEnded(
-                f'The task which user {self.owner} subscribed to is done')
-
-        done_assignments_count = self.assignment_count_on_stage[self.feedback_stage]  # noqa
-        stage_candidates = candidates.filter(
-            done_assignments=done_assignments_count,
-        )
-
-        if stage_candidates.count() == 0:
-            raise SubscriptionTemporarilyEnded(
-                'Currently unavailable. Please check for more soon. '
-                'Submissions remaining: %s' % stage_candidates.count())
-
-        if (config.STOP_ON_PASS and
-                self.feedback_stage == self.FEEDBACK_CREATION):
-            stage_candidates = stage_candidates.exclude(
-                Q(submission__student__passes_exam=True) &
-                Q(submission__student__exam__pass_only=True)
-            )
-
-        return stage_candidates
-
-    @transaction.atomic
-    def get_remaining_not_final(self) -> int:
-        return self._get_submissions_that_do_not_have_final_feedback().count()
-
-    @transaction.atomic
-    def get_available_in_stage(self) -> int:
-        try:
-            return self._get_available_submissions_in_subscription_stage().count()  # noqa
-        except (SubscriptionTemporarilyEnded, SubscriptionEnded):
-            return 0
-
-    @transaction.atomic
-    def get_or_create_work_assignment(self):
-        taskqueryset = self._get_available_submissions_in_subscription_stage()
-        task = get_random_element_from_queryset(taskqueryset)
-        if self.assignments.filter(is_done=False).count() >= 2:
-            raise NotMoreThanTwoOpenAssignmentsAllowed(
-                'Not more than 2 active assignments allowed.')
-
-        log.info(f'{self.owner} is assigned to {task} ({self.feedback_stage})')
-        return TutorSubmissionAssignment.objects.create(
-            subscription=self,
-            submission=task.submission)
-
-    @transaction.atomic
-    def reserve_all_assignments_for_a_student(self):
-        assert self.query_type == self.STUDENT_QUERY
-
-        meta_submissions = self._get_submissions_that_do_not_have_final_feedback()  # noqa
-
-        for meta in meta_submissions:
-            submission = meta.submission
-            if hasattr(submission, 'assignments'):
-                submission.assignments.filter(is_done=False).delete()
-            TutorSubmissionAssignment.objects.create(
-                subscription=self,
-                submission=submission
-            )
-
-        log.info(f'Loaded all subscriptions of student {self.query_key}')
-
-    @transaction.atomic
-    def delete(self):
-        self.assignments.filter(is_done=False).delete()
-        if self.assignments.count() == 0:
-            super().delete()
-        else:
-            self.deactivated = True
-            self.save()
-
-
-class DeletionOfDoneAssignmentsNotPermitted(Exception):
-    pass
-
-
-class MetaSubmission(models.Model):
-
-    submission = models.OneToOneField('submission',
-                                      related_name='meta',
-                                      on_delete=models.CASCADE)
-    done_assignments = models.PositiveIntegerField(default=0)
-    has_active_assignment = models.BooleanField(default=False)
-
-    has_feedback = models.BooleanField(default=False)
-    has_final_feedback = models.BooleanField(default=False)
-
-    feedback_authors = models.ManyToManyField(get_user_model())
-
-    def __str__(self):
-        return f''' Submission Meta of {self.submission}
-
-        done_assignments      = {self.done_assignments}
-        has_active_assignment = {self.has_active_assignment}
-        has_feedback          = {self.has_feedback}
-        has_final_feedback    = {self.has_final_feedback}
-        feedback_authors      = {self.feedback_authors.values_list('username',
-                                                                   flat=True)}
-        '''
-
-
-class TutorSubmissionAssignment(models.Model):
-
-    assignment_id = models.UUIDField(primary_key=True,
-                                     default=uuid.uuid4,
-                                     editable=False)
-    submission = models.ForeignKey(Submission,
-                                   on_delete=models.CASCADE,
-                                   related_name='assignments')
-    subscription = models.ForeignKey(SubmissionSubscription,
-                                     on_delete=models.CASCADE,
-                                     related_name='assignments')
-    is_done = models.BooleanField(default=False)
-    created = models.DateTimeField(auto_now_add=True)
-
-    def __str__(self):
-        return (f'{self.subscription.owner} assigned to {self.submission}'
-                f' (done={self.is_done})')
-
-    def delete(self, *args, **kwargs):
-        if self.is_done:
-            raise DeletionOfDoneAssignmentsNotPermitted()
-        super().delete(*args, **kwargs)
-
-    class Meta:
-        unique_together = ('submission', 'subscription')
diff --git a/core/models/__init__.py b/core/models/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..36b8767e32f067b20940f8e2ed04938681652d75
--- /dev/null
+++ b/core/models/__init__.py
@@ -0,0 +1,11 @@
+from .exam_type import ExamType  # noqa
+from .submission_type import SubmissionType  # noqa
+from .user_account import UserAccount, TutorManager  # noqa
+from .student_info import StudentInfo, random_matrikel_no  # noqa
+from .test import Test  # noqa
+from .submission import Submission, MetaSubmission  # noqa
+from .feedback import Feedback, FeedbackComment  # noqa
+from .subscription import (NotMoreThanTwoOpenAssignmentsAllowed, SubmissionSubscription,  # noqa
+                           SubscriptionTemporarilyEnded, SubscriptionEnded)  # noqa
+from .assignment import DeletionOfDoneAssignmentsNotPermitted, TutorSubmissionAssignment  # noqa
+from .label import Label
diff --git a/core/models/assignment.py b/core/models/assignment.py
new file mode 100644
index 0000000000000000000000000000000000000000..fc8cfa17a8dbd90fa730178ce72bf5ed16782ea1
--- /dev/null
+++ b/core/models/assignment.py
@@ -0,0 +1,41 @@
+import logging
+import uuid
+
+import constance
+from django.db import models
+
+from core.models.submission import Submission
+
+log = logging.getLogger(__name__)
+config = constance.config
+
+
+class DeletionOfDoneAssignmentsNotPermitted(Exception):
+    pass
+
+
+class TutorSubmissionAssignment(models.Model):
+
+    assignment_id = models.UUIDField(primary_key=True,
+                                     default=uuid.uuid4,
+                                     editable=False)
+    submission = models.ForeignKey(Submission,
+                                   on_delete=models.CASCADE,
+                                   related_name='assignments')
+    subscription = models.ForeignKey('SubmissionSubscription',
+                                     on_delete=models.CASCADE,
+                                     related_name='assignments')
+    is_done = models.BooleanField(default=False)
+    created = models.DateTimeField(auto_now_add=True)
+
+    def __str__(self):
+        return (f'{self.subscription.owner} assigned to {self.submission}'
+                f' (done={self.is_done})')
+
+    def delete(self, *args, **kwargs):
+        if self.is_done:
+            raise DeletionOfDoneAssignmentsNotPermitted()
+        super().delete(*args, **kwargs)
+
+    class Meta:
+        unique_together = ('submission', 'subscription')
diff --git a/core/models/exam_type.py b/core/models/exam_type.py
new file mode 100644
index 0000000000000000000000000000000000000000..9036fbbacd2aa189c3f1c75cc9f699538b4646c8
--- /dev/null
+++ b/core/models/exam_type.py
@@ -0,0 +1,45 @@
+
+import logging
+import uuid
+
+import constance
+from django.db import models
+from django.db.models import BooleanField
+
+log = logging.getLogger(__name__)
+config = constance.config
+
+
+class ExamType(models.Model):
+    """A model that contains information about the module a submission can
+    belong to. The information is not needed and is currently, just used to
+    detect if students already have enough points to pass an exam.
+
+    It is NOT intended to use this for including different exams regarding
+    submissions types.
+
+    Attributes
+    ----------
+    module_reference : CharField
+        a unique reference that identifies a module within the university
+    pass_only : BooleanField
+        True if no grade is given
+    pass_score : PositiveIntegerField
+        minimum score for (just) passing
+    total_score : PositiveIntegerField
+        maximum score for the exam (currently never used anywhere)
+    """
+    class Meta:
+        verbose_name = "ExamType"
+        verbose_name_plural = "ExamTypes"
+
+    def __str__(self) -> str:
+        return self.module_reference
+
+    exam_type_id = models.UUIDField(primary_key=True,
+                                    default=uuid.uuid4,
+                                    editable=False)
+    module_reference = models.CharField(max_length=50, unique=True)
+    total_score = models.PositiveIntegerField()
+    pass_score = models.PositiveIntegerField()
+    pass_only = models.BooleanField(default=False)
diff --git a/core/models/feedback.py b/core/models/feedback.py
new file mode 100644
index 0000000000000000000000000000000000000000..45e070637a0c9185e4c6bd0594857b230837b30a
--- /dev/null
+++ b/core/models/feedback.py
@@ -0,0 +1,109 @@
+import logging
+import uuid
+
+import constance
+from django.contrib.auth import get_user_model
+from django.db import models
+from django.db.models import (IntegerField,)
+
+from core.models.submission import Submission
+
+log = logging.getLogger(__name__)
+config = constance.config
+
+
+class Feedback(models.Model):
+    """
+    Attributes
+    ----------
+    score : PositiveIntegerField
+        A score that has been assigned to he submission. Is final if it was
+        accepted.
+    created : DateTimeField
+        When the feedback was initially created
+    of_submission : OneToOneField
+        The submission this feedback belongs to. It finally determines how many
+        points a student receives for his submission.
+    origin : IntegerField
+        Of whom was this feedback originally created. She below for the choices
+    """
+    score = models.DecimalField(max_digits=5, decimal_places=2, default=0)
+    created = models.DateTimeField(auto_now_add=True)
+    is_final = models.BooleanField(default=False)
+
+    of_submission = models.OneToOneField(
+        Submission,
+        on_delete=models.CASCADE,
+        related_name='feedback')
+
+    # the denominators that are allowed for the decimal score interpreted as a fraction
+    ALLOWED_DENOMINATORS = [1, 2]
+
+    # how was this feedback created
+    (
+        WAS_EMPTY,
+        FAILED_UNIT_TESTS,
+        DID_NOT_COMPILE,
+        COULD_NOT_LINK,
+        MANUAL,
+    ) = range(5)
+    ORIGIN = (
+        (WAS_EMPTY, 'was empty'),
+        (FAILED_UNIT_TESTS, 'passed unittests'),
+        (DID_NOT_COMPILE, 'did not compile'),
+        (COULD_NOT_LINK, 'could not link'),
+        (MANUAL, 'created by a human. yak!'),
+    )
+    origin = models.IntegerField(
+        choices=ORIGIN,
+        default=MANUAL,
+    )
+
+    class Meta:
+        verbose_name = "Feedback"
+        verbose_name_plural = "Feedback Set"
+
+    def __str__(self) -> str:
+        return 'Feedback for {}'.format(self.of_submission)
+
+    def is_full_score(self) -> bool:
+        return self.of_submission.type.full_score == self.score
+
+    def get_full_score(self) -> int:
+        return self.of_submission.type.full_score
+
+
+class FeedbackComment(models.Model):
+    """ This Class contains the Feedback for a specific line of a Submission"""
+    comment_id = models.UUIDField(primary_key=True,
+                                  default=uuid.uuid4,
+                                  editable=False)
+    text = models.TextField()
+    created = models.DateTimeField(auto_now_add=True)
+    modified = models.DateTimeField(auto_now=True)
+
+    visible_to_student = models.BooleanField(default=True)
+
+    of_line = models.PositiveIntegerField(default=0)
+    of_tutor = models.ForeignKey(
+        get_user_model(),
+        related_name="comment_list",
+        on_delete=models.PROTECT
+    )
+    of_feedback = models.ForeignKey(
+        Feedback,
+        related_name="feedback_lines",
+        on_delete=models.CASCADE,
+        null=True
+    )
+
+    class Meta:
+        verbose_name = "Feedback Comment"
+        verbose_name_plural = "Feedback Comments"
+        ordering = ('created',)
+        unique_together = ('of_line', 'of_tutor', 'of_feedback')
+
+    def __str__(self):
+        return 'Comment on line {} of tutor {}: "{}"'.format(self.of_line,
+                                                             self.of_tutor,
+                                                             self.text)
diff --git a/core/models/label.py b/core/models/label.py
new file mode 100644
index 0000000000000000000000000000000000000000..5e83a32fb043e938261368d0939a9902b4b135c2
--- /dev/null
+++ b/core/models/label.py
@@ -0,0 +1,16 @@
+import logging
+
+from django.db import models
+
+from core.models.feedback import Feedback, FeedbackComment
+
+log = logging.getLogger(__name__)
+
+
+class Label(models.Model):
+    name = models.CharField(max_length=50)
+    description = models.TextField()
+    feedback = models.ManyToManyField(Feedback, related_name='labels')
+    feedback_comments = models.ManyToManyField(FeedbackComment, related_name='labels')
+
+
diff --git a/core/models/student_info.py b/core/models/student_info.py
new file mode 100644
index 0000000000000000000000000000000000000000..b69806cf8b60ec499ab7b2fabdae1d1ff04a2974
--- /dev/null
+++ b/core/models/student_info.py
@@ -0,0 +1,125 @@
+import logging
+import uuid
+from collections import OrderedDict
+from random import randrange
+from typing import Dict
+
+import constance
+from django.contrib.auth import get_user_model
+from django.db import models
+from django.db.models import (BooleanField, Case, F,
+                              QuerySet, Sum, Value, When)
+from django.db.models.functions import Coalesce
+
+from core.models.submission_type import SubmissionType
+
+log = logging.getLogger(__name__)
+config = constance.config
+
+
+def random_matrikel_no() -> str:
+    """Use as a default value for student's matriculation number.
+
+    Returns:
+        str: an eight digit number
+    """
+    return str(10_000_000 + randrange(90_000_000))
+
+
+class StudentInfo(models.Model):
+    """
+    The StudentInfo model includes all information of a student, that we got
+    from the E-Learning output, along with some useful classmethods that
+    provide specially annotated QuerySets.
+
+    Information like email (if given), and the username are stored in the
+    associated user model.
+
+    Attributes:
+        exam (ForeignKey):
+            Which module the student wants to be graded in
+
+        has_logged_in (BooleanField):
+            Login is permitted once. If this is set the user can not log in.
+
+        matrikel_no (CharField):
+            The matriculation number of the student
+    """
+    student_id = models.UUIDField(primary_key=True,
+                                  default=uuid.uuid4,
+                                  editable=False)
+    has_logged_in = models.BooleanField(default=False)
+    matrikel_no = models.CharField(unique=True,
+                                   max_length=30,
+                                   default=random_matrikel_no)
+    exam = models.ForeignKey('ExamType',
+                             on_delete=models.CASCADE,
+                             related_name='students',
+                             null=False)
+    user = models.OneToOneField(get_user_model(),
+                                on_delete=models.CASCADE,
+                                related_name='student')
+
+    # Managed by signals
+    total_score = models.PositiveIntegerField(default=0)
+    passes_exam = models.BooleanField(default=False)
+
+    def update_total_score(self):
+        ''' This helper is invoked after feedback changes '''
+        self.total_score = self.submissions.aggregate(
+            Sum('feedback__score'))['feedback__score__sum'] or 0
+        if self.exam is not None:
+            self.passes_exam = self.total_score >= self.exam.pass_score
+        self.save()
+
+    def score_per_submission(self) -> Dict[str, int]:
+        """ TODO: get rid of it and use an annotation. """
+        if self.submissions.all():
+            return OrderedDict({
+                s.type.name: s.feedback.score if hasattr(s, 'feedback') else 0
+                for s in self.submissions.order_by('type__name')
+            })
+
+        return OrderedDict({
+            t.name: 0 for t in SubmissionType.objects.all()
+        })
+
+    @classmethod
+    def get_annotated_score_submission_list(cls) -> QuerySet:
+        """Can be used to quickly annotate a user with the necessary
+        information on the overall score of a student and if he does not need
+        any more correction.
+
+        A student is done if
+            * module type was pass_only and student has enough points
+            * every submission got accepted feedback
+
+        Returns
+        -------
+        QuerySet
+            the annotated QuerySet as described above.
+        """
+        return cls.objects.annotate(
+            overall_score=Coalesce(Sum('submissions__feedback__score'),
+                                   Value(0)),
+        ).annotate(
+            done=Case(
+                When(exam__pass_score__lt=F('overall_score'), then=Value(1)),
+                default=Value(0),
+                output_field=BooleanField()
+            )
+        ).order_by('user__username')
+
+    def disable(self):
+        """The student won't be able to login in anymore, but his current
+        session can be continued until s/he logs out.
+        """
+        self.has_logged_in = True
+        self.save()
+
+    def __str__(self) -> str:
+        return self.user.username
+
+    class Meta:
+        verbose_name = "Student"
+        verbose_name_plural = "Student Set"
diff --git a/core/models/submission.py b/core/models/submission.py
new file mode 100644
index 0000000000000000000000000000000000000000..a2169d08597b8b06a29030e35f7c294209d447ee
--- /dev/null
+++ b/core/models/submission.py
@@ -0,0 +1,81 @@
+import logging
+import uuid
+
+import constance
+from django.contrib.auth import get_user_model
+from django.db import models
+from django.db.models import (BooleanField)
+
+from core.models.submission_type import SubmissionType
+
+log = logging.getLogger(__name__)
+config = constance.config
+
+
+class Submission(models.Model):
+    """The answer of a student to a specific question. Holds the answer and
+    very often serves as ForeignKey.
+
+    With the method assign_tutor feedback for a submission can be created and a
+    tutor will be assigned to this feedback permanently (unless deleted by a
+    reviewer or if it gets reassigned). There cannot be more than ONE feedback
+    per Submission.
+
+    Attributes
+    ----------
+    seen_by_student : BooleanField
+        True if the student saw his accepted feedback.
+    student : ForgeignKey
+        The student how cause all of this
+    text : TextField
+        The code/text submitted by the student
+    type : OneToOneField
+        Relation to the type containing meta information
+    """
+    submission_id = models.UUIDField(primary_key=True,
+                                     default=uuid.uuid4,
+                                     editable=False)
+    seen_by_student = models.BooleanField(default=False)
+    text = models.TextField(blank=True)
+    type = models.ForeignKey(
+        SubmissionType,
+        on_delete=models.PROTECT,
+        related_name='submissions')
+    student = models.ForeignKey(
+        'StudentInfo',
+        on_delete=models.CASCADE,
+        related_name='submissions')
+
+    class Meta:
+        verbose_name = "Submission"
+        verbose_name_plural = "Submission Set"
+        unique_together = (('type', 'student'),)
+        ordering = ('type__name',)
+
+    def __str__(self) -> str:
+        return "Submission {}".format(self.pk)
+
+
+class MetaSubmission(models.Model):
+
+    submission = models.OneToOneField('submission',
+                                      related_name='meta',
+                                      on_delete=models.CASCADE)
+    done_assignments = models.PositiveIntegerField(default=0)
+    has_active_assignment = models.BooleanField(default=False)
+
+    has_feedback = models.BooleanField(default=False)
+    has_final_feedback = models.BooleanField(default=False)
+
+    feedback_authors = models.ManyToManyField(get_user_model())
+
+    def __str__(self):
+        return f''' Submission Meta of {self.submission}
+
+        done_assignments      = {self.done_assignments}
+        has_active_assignment = {self.has_active_assignment}
+        has_feedback          = {self.has_feedback}
+        has_final_feedback    = {self.has_final_feedback}
+        feedback_authors      = {self.feedback_authors.values_list('username',
+                                                                   flat=True)}
+        '''
diff --git a/core/models/submission_type.py b/core/models/submission_type.py
new file mode 100644
index 0000000000000000000000000000000000000000..632d48867f05c409fd17c9ffbe3fae1323cf1a14
--- /dev/null
+++ b/core/models/submission_type.py
@@ -0,0 +1,98 @@
+import logging
+import uuid
+
+import constance
+from django.db import models
+from django.db.models import (Case, Count, IntegerField, Q,
+                              Value, When)
+from django.db.models.query import QuerySet
+
+log = logging.getLogger(__name__)
+config = constance.config
+
+
+class SubmissionType(models.Model):
+    """This model mostly holds meta information about the kind of task that was
+    presented to the student. It serves as a foreign key for the submissions
+    that are of this type. This model is currently NOT exposed directly in a
+    view.
+
+    Attributes
+    ----------
+    description : TextField
+        The task description the student had to fulfill. The content may be
+        HTML formatted.
+    full_score : PositiveIntegerField
+        Maximum score one can get on that one
+    name : CharField
+        The original title of the exam. This is wildly used as an identifier by
+        the preprocessing scripts.
+    solution : TextField
+        A sample solution or a correction guideline
+    """
+
+    C = 'c'
+    JAVA = 'java'
+    MIPS = 'mipsasm'
+    HASKELL = 'haskell'
+
+    LANGUAGE_CHOICES = (
+        (C, 'C syntax highlighting'),
+        (JAVA, 'Java syntax highlighting'),
+        (MIPS, 'Mips syntax highlighting'),
+        (HASKELL, 'Haskell syntax highlighting'),
+    )
+
+    submission_type_id = models.UUIDField(primary_key=True,
+                                          default=uuid.uuid4,
+                                          editable=False)
+    name = models.CharField(max_length=100, unique=True)
+    full_score = models.PositiveIntegerField(default=0)
+    description = models.TextField()
+    solution = models.TextField()
+    programming_language = models.CharField(max_length=25,
+                                            choices=LANGUAGE_CHOICES,
+                                            default=C)
+
+    def __str__(self) -> str:
+        return self.name
+
+    class Meta:
+        verbose_name = "SubmissionType"
+        verbose_name_plural = "SubmissionType Set"
+
+    @classmethod
+    def get_annotated_feedback_count(cls) -> QuerySet:
+        """ Annotates submission lists with counts
+
+        The following fields are annotated:
+            * number of submissions per submission type
+            * count of received *accepted* feedback per submission type
+            * and finally the progress on each submission type as percentage
+
+        The QuerySet that is return is ordered by name lexicographically.
+
+        Returns:
+            The annotated QuerySet as described above
+        """
+        return cls.objects\
+            .annotate(  # to display only manual
+                feedback_final=Count(
+                    Case(When(
+                        Q(submissions__meta__has_final_feedback=True),
+                        then=Value(1)), output_field=IntegerField())
+                ),
+                feedback_in_validation=Count(
+                    Case(When(
+                        Q(submissions__meta__done_assignments=1) &
+                        Q(submissions__meta__has_final_feedback=False),
+                        then=Value(1)), output_field=IntegerField())
+                ),
+                feedback_in_conflict=Count(
+                    Case(When(
+                        Q(submissions__meta__done_assignments=2) &
+                        Q(submissions__meta__has_final_feedback=False),
+                        then=Value(1)), output_field=IntegerField())
+                ),
+                submission_count=Count('submissions'),
+            ).order_by('name')
diff --git a/core/models/subscription.py b/core/models/subscription.py
new file mode 100644
index 0000000000000000000000000000000000000000..6788aad671723d518e0719df3338eb3d50c6e252
--- /dev/null
+++ b/core/models/subscription.py
@@ -0,0 +1,205 @@
+import logging
+import secrets
+import uuid
+
+import constance
+from django.contrib.auth import get_user_model
+from django.db import models, transaction
+from django.db.models import (Q, QuerySet)
+
+from core.models.submission import MetaSubmission
+from core.models.assignment import TutorSubmissionAssignment
+
+log = logging.getLogger(__name__)
+config = constance.config
+
+
+class SubscriptionEnded(Exception):
+    pass
+
+
+class SubscriptionTemporarilyEnded(Exception):
+    pass
+
+
+class NotMoreThanTwoOpenAssignmentsAllowed(Exception):
+    pass
+
+
+def get_random_element_from_queryset(queryset):
+    qs_elements = queryset.all()
+    length = len(qs_elements)
+    index = secrets.choice(range(length))
+    return qs_elements[index]
+
+
+class SubmissionSubscription(models.Model):
+
+    RANDOM = 'random'
+    STUDENT_QUERY = 'student'
+    EXAM_TYPE_QUERY = 'exam'
+    SUBMISSION_TYPE_QUERY = 'submission_type'
+
+    type_query_mapper = {
+        RANDOM: '__any',
+        STUDENT_QUERY: 'student__pk',
+        EXAM_TYPE_QUERY: 'student__exam__pk',
+        SUBMISSION_TYPE_QUERY: 'type__pk',
+    }
+
+    QUERY_CHOICE = (
+        (RANDOM, 'Query for any submission'),
+        (STUDENT_QUERY, 'Query for submissions of student'),
+        (EXAM_TYPE_QUERY, 'Query for submissions of exam type'),
+        (SUBMISSION_TYPE_QUERY, 'Query for submissions of submissions_type'),
+    )
+
+    FEEDBACK_CREATION = 'feedback-creation'
+    FEEDBACK_VALIDATION = 'feedback-validation'
+    FEEDBACK_CONFLICT_RESOLUTION = 'feedback-conflict-resolution'
+
+    assignment_count_on_stage = {
+        FEEDBACK_CREATION: 0,
+        FEEDBACK_VALIDATION: 1,
+        FEEDBACK_CONFLICT_RESOLUTION: 2,
+    }
+
+    stages = (
+        (FEEDBACK_CREATION, 'No feedback was ever assigned'),
+        (FEEDBACK_VALIDATION, 'Feedback exists but is not validated'),
+        (FEEDBACK_CONFLICT_RESOLUTION, 'Previous correctors disagree'),
+    )
+
+    deactivated = models.BooleanField(default=False)
+    subscription_id = models.UUIDField(primary_key=True,
+                                       default=uuid.uuid4,
+                                       editable=False)
+    owner = models.ForeignKey(get_user_model(),
+                              on_delete=models.CASCADE,
+                              related_name='subscriptions')
+    query_key = models.UUIDField(null=True)
+    query_type = models.CharField(max_length=75,
+                                  choices=QUERY_CHOICE,
+                                  default=RANDOM)
+    feedback_stage = models.CharField(choices=stages,
+                                      max_length=40,
+                                      default=FEEDBACK_CREATION)
+
+    class Meta:
+        unique_together = ('owner',
+                           'query_key',
+                           'query_type',
+                           'feedback_stage')
+
+    def _get_submission_base_query(self) -> QuerySet:
+        """ Get all submissions that are filtered by the query key and type,
+        e.g. all submissions of one student or submission type.
+        """
+        if self.query_type == self.RANDOM:
+            return MetaSubmission.objects.all()
+
+        return MetaSubmission.objects.filter(
+            **{'submission__' + self.type_query_mapper[self.query_type]:
+               self.query_key})
+
+    def _get_submissions_that_do_not_have_final_feedback(self) -> QuerySet:
+        """ There are a number of conditions to check for each submission
+
+        1. The submission does not have final feedback
+        2. The submission was not shown to this user before
+        3. The submission is not currently assigned to somebody else
+
+        Returns:
+            QuerySet -- a list of all submissions ready for consumption
+        """
+        return self._get_submission_base_query() \
+            .select_for_update(of=('self',)).exclude(
+            Q(has_final_feedback=True) |
+            Q(has_active_assignment=True) |
+            Q(feedback_authors=self.owner)
+        )
+
+    def _get_available_submissions_in_subscription_stage(self) -> QuerySet:
+        """ Another filter this time it returns all the submissions that
+        are valid in this stage. That means all previous stages have been
+        completed.
+
+        Raises:
+            SubscriptionEnded -- if the subscription will not yield
+                                 subscriptions in the future
+            SubscriptionTemporarilyEnded -- wait until new become available
+        """
+        candidates = self._get_submissions_that_do_not_have_final_feedback()
+
+        if candidates.count() == 0:
+            raise SubscriptionEnded(
+                f'The task which user {self.owner} subscribed to is done')
+
+        done_assignments_count = self.assignment_count_on_stage[self.feedback_stage]  # noqa
+        stage_candidates = candidates.filter(
+            done_assignments=done_assignments_count,
+        )
+
+        if stage_candidates.count() == 0:
+            raise SubscriptionTemporarilyEnded(
+                'Currently unavailable. Please check for more soon. '
+                'Submissions remaining: %s' % stage_candidates.count())
+
+        if (config.STOP_ON_PASS and
+                self.feedback_stage == self.FEEDBACK_CREATION):
+            stage_candidates = stage_candidates.exclude(
+                Q(submission__student__passes_exam=True) &
+                Q(submission__student__exam__pass_only=True)
+            )
+
+        return stage_candidates
+
+    @transaction.atomic
+    def get_remaining_not_final(self) -> int:
+        return self._get_submissions_that_do_not_have_final_feedback().count()
+
+    @transaction.atomic
+    def get_available_in_stage(self) -> int:
+        try:
+            return self._get_available_submissions_in_subscription_stage().count()  # noqa
+        except (SubscriptionTemporarilyEnded, SubscriptionEnded):
+            return 0
+
+    @transaction.atomic
+    def get_or_create_work_assignment(self):
+        taskqueryset = self._get_available_submissions_in_subscription_stage()
+        task = get_random_element_from_queryset(taskqueryset)
+        if self.assignments.filter(is_done=False).count() >= 2:
+            raise NotMoreThanTwoOpenAssignmentsAllowed(
+                'Not more than 2 active assignments allowed.')
+
+        log.info(f'{self.owner} is assigned to {task} ({self.feedback_stage})')
+        return TutorSubmissionAssignment.objects.create(
+            subscription=self,
+            submission=task.submission)
+
+    @transaction.atomic
+    def reserve_all_assignments_for_a_student(self):
+        assert self.query_type == self.STUDENT_QUERY
+
+        meta_submissions = self._get_submissions_that_do_not_have_final_feedback()  # noqa
+
+        for meta in meta_submissions:
+            submission = meta.submission
+            if hasattr(submission, 'assignments'):
+                submission.assignments.filter(is_done=False).delete()
+            TutorSubmissionAssignment.objects.create(
+                subscription=self,
+                submission=submission
+            )
+
+        log.info(f'Loaded all subscriptions of student {self.query_key}')
+
+    @transaction.atomic
+    def delete(self):
+        self.assignments.filter(is_done=False).delete()
+        if self.assignments.count() == 0:
+            super().delete()
+        else:
+            self.deactivated = True
+            self.save()
diff --git a/core/models/test.py b/core/models/test.py
new file mode 100644
index 0000000000000000000000000000000000000000..b43d3b2cd2469d175809c94f5c8972751c864eea
--- /dev/null
+++ b/core/models/test.py
@@ -0,0 +1,44 @@
+import logging
+import uuid
+
+import constance
+from django.db import models
+
+log = logging.getLogger(__name__)
+config = constance.config
+
+
+class Test(models.Model):
+    """Tests contain information that has been unapproved by automated tests,
+    and directly belongs to a submission. Often certain Feedback was already
+    given by information provided by these tests.
+
+    Attributes
+    ----------
+    annotation : TextField
+        All the output of the test (e.g. compiler output)
+    label : CharField
+        Indicates SUCCES or FAILURE
+    name : CharField
+        The name of the test that was performed
+    submission : ForeignKey
+        The submission the tests where unapproved on
+    """
+    test_id = models.UUIDField(primary_key=True,
+                               default=uuid.uuid4,
+                               editable=False)
+
+    name = models.CharField(max_length=30)
+    label = models.CharField(max_length=50)
+    annotation = models.TextField()
+    submission = models.ForeignKey('submission',
+                                   related_name='tests',
+                                   on_delete=models.CASCADE,)
+
+    class Meta:
+        verbose_name = "Test"
+        verbose_name_plural = "Tests"
+        unique_together = (('submission', 'name'),)
+
+    def __str__(self) -> str:
+        return f'{self.name} {self.label}'
diff --git a/core/models/user_account.py b/core/models/user_account.py
new file mode 100644
index 0000000000000000000000000000000000000000..f218e5cdc9ee7d54026a5c35949a20a2e0981af0
--- /dev/null
+++ b/core/models/user_account.py
@@ -0,0 +1,91 @@
+import logging
+import uuid
+
+import constance
+from django.contrib.auth.models import AbstractUser, UserManager
+from django.db import models
+from django.db.models import (Case, Count, IntegerField, Q,
+                              Value, When)
+from factory.django import get_model
+
+log = logging.getLogger(__name__)
+config = constance.config
+
+
+class TutorManager(UserManager):
+
+    def get_queryset(self):
+        return super().get_queryset().filter(role=UserAccount.TUTOR)
+
+    def with_feedback_count(self):
+        def _get_counter(stage):
+            return Count(Case(
+                When(
+                    Q(subscriptions__feedback_stage=stage) &
+                    Q(subscriptions__assignments__is_done=True),
+                    then=Value(1))),
+                output_field=IntegerField())
+
+        submission_subscription_model = get_model('core', 'SubmissionSubscription')  # noqa
+
+        return self.get_queryset() \
+            .annotate(feedback_created=_get_counter(
+                submission_subscription_model.FEEDBACK_CREATION)) \
+            .annotate(feedback_validated=_get_counter(
+                submission_subscription_model.FEEDBACK_VALIDATION))
+
+
+class UserAccount(AbstractUser):
+    """
+    An abstract base class implementing a fully featured User model with
+    admin-compliant permissions.
+
+    Username and password are required. Other fields are optional.
+    """
+
+    STUDENT = 'Student'
+    TUTOR = 'Tutor'
+    REVIEWER = 'Reviewer'
+
+    ROLE_CHOICES = (
+        (STUDENT, 'student'),
+        (TUTOR, 'tutor'),
+        (REVIEWER, 'reviewer')
+    )
+
+    # Fields
+    role = models.CharField(max_length=50, choices=ROLE_CHOICES)
+    user_id = models.UUIDField(primary_key=True,
+                               default=uuid.uuid4,
+                               editable=False)
+
+    fullname = models.CharField('full name', max_length=70, blank=True)
+    is_admin = models.BooleanField(default=False)
+
+    # Managers
+    objects = UserManager()
+    tutors = TutorManager()
+
+    # Helper methods
+    def is_student(self):
+        return self.role == 'Student'
+
+    def is_tutor(self):
+        return self.role == 'Tutor'
+
+    def is_reviewer(self):
+        return self.role == 'Reviewer'
+
+    # All of these methods are deprecated and should be replaced by custom
+    # Managers (see tutor manager)
+    @classmethod
+    def get_students(cls):
+        return cls.objects.filter(role=cls.STUDENT)
+
+    @classmethod
+    def get_tutors(cls):
+        return cls.objects.filter(role=cls.TUTOR)
+
+    @classmethod
+    def get_reviewers(cls):
+        return cls.objects.filter(role=cls.REVIEWER)
diff --git a/core/serializers/label.py b/core/serializers/label.py
new file mode 100644
index 0000000000000000000000000000000000000000..36bdb84861d9d8ce2e00c050289ffd48311d88ca
--- /dev/null
+++ b/core/serializers/label.py
@@ -0,0 +1,13 @@
+from rest_framework import serializers
+
+from core.models import Label
+
+
+class LabelSerializer(serializers.ModelSerializer):
+    class Meta:
+        model = Label
+        fields = (
+            'pk',
+            'name',
+            'description'
+        )
diff --git a/core/views/common_views.py b/core/views/common_views.py
index 66bb478ea488b352c8a78619e69ceccd664166a9..c4a84262962a9f32813a6e20ce7886f3fc738f9b 100644
--- a/core/views/common_views.py
+++ b/core/views/common_views.py
@@ -195,7 +195,7 @@ class UserAccountViewSet(viewsets.ReadOnlyModelViewSet):
             and \
             (old_password is None or
                 not check_password(old_password, user.password)):
-                    return Response(status=status.HTTP_401_UNAUTHORIZED)
+            return Response(status=status.HTTP_401_UNAUTHORIZED)
 
         new_password = request.data.get('new_password')
         # validate password