'''
Grady Model Description
-----------------------

See docstring of the individual models for information on the setup of the
database.
'''

from collections import OrderedDict
from random import randrange
from typing import Dict, Union

from django.contrib.auth import get_user_model
from django.contrib.auth.models import AbstractUser
from django.db import models
from django.db.models import Value as V
from django.db.models import (BooleanField, Case, Count, F, IntegerField, Q,
                              QuerySet, Sum, When)
from django.db.models.functions import Coalesce


def random_matrikel_no() -> str:
    """Use as a default value for student's matriculation number.

    Returns:
        str: an eight digit number
    """
    return str(10_000_000 + randrange(90_000_000))


def get_annotated_tutor_list() -> QuerySet:
    """All tutor accounts are annotate with a field that includes the number of
    feedback that tutor has collaborated in.

    Returns:
        TYPE: the annotated QuerySet
    """
    return get_user_model().objects\
        .filter(groups__name='Tutors')\
        .annotate(Count('feedback_list'))\
        .order_by('-feedback_list__count')


class ExamType(models.Model):
    """A model that contains information about the module a submission can
    belong to. The information is not needed and is currently, just used to
    detect if students already have enough points to pass an exam.

    It is NOT intended to use this for including different exams regarding
    submissions types.

    Attributes
    ----------
    module_reference : CharField
        a unique reference that identifies a module within the university
    pass_only : BooleanField
        True if no grade is given
    pass_score : PositiveIntegerField
        minimum score for (just) passing
    total_score : PositiveIntegerField
        maximum score for the exam (currently never used anywhere)
    """
    class Meta:
        verbose_name = "ExamType"
        verbose_name_plural = "ExamTypes"

    def __str__(self) -> str:
        return self.module_reference

    module_reference = models.CharField(max_length=50, unique=True)
    total_score = models.PositiveIntegerField()
    pass_score = models.PositiveIntegerField()
    pass_only = models.BooleanField(default=False)


class SubmissionType(models.Model):
    """This model mostly holds meta information about the kind of task that was
    presented to the student. It serves as a foreign key for the submissions
    that are of this type. This model is currently NOT exposed directly in a
    view.

    Attributes
    ----------
    description : TextField
        The task description the student had to fulfill. The content may be
        HTML formatted.
    full_score : PositiveIntegerField
        Maximum score one can get on that one
    name : CharField
        The original title of the exam. This is wildly used as an identifier by
        the preprocessing scripts.
    solution : TextField
        A sample solution or a correction guideline
    """
    name = models.CharField(max_length=50, unique=True)
    full_score = models.PositiveIntegerField(default=0)
    description = models.TextField()
    solution = models.TextField()

    def __str__(self) -> str:
        return self.name

    class Meta:
        verbose_name = "SubmissionType"
        verbose_name_plural = "SubmissionType Set"

    @classmethod
    def get_annotated_feedback_count(cls) -> QuerySet:
        """ Annotates submission lists with counts

        The following fields are annotated:
            * number of submissions per submission type
            * count of received *accepted* feedback per submission type
            * and finally the progress on each submission type as percentage

        The QuerySet that is return is ordered by name lexicographically.

        Returns:
            The annotated QuerySet as described above
        """
        return cls.objects\
            .annotate(  # to display only manual
                feedback_count=Count(
                    Case(
                        When(
                            Q(submissions__feedback__isnull=False) &
                            Q(submissions__feedback__status=Feedback.ACCEPTED),
                            then=V(1)), output_field=IntegerField(),
                    )
                )
            ).annotate(
                submission_count=Count('submissions')
            ).annotate(
                percentage=(F('feedback_count') * 100 / F('submission_count'))
            ).order_by('name')


class UserAccount(AbstractUser):
    """
    An abstract base class implementing a fully featured User model with
    admin-compliant permissions.

    Username and password are required. Other fields are optional.
    """

    fullname = models.CharField('full name', max_length=70, blank=True)
    is_admin = models.BooleanField(default=False)

    def get_associated_user(self) -> models.Model:
        """ Returns the user type that is associated with this user obj """
        return \
            (hasattr(self, 'student') and self.student) or \
            (hasattr(self, 'reviewer') and self.reviewer) or \
            (hasattr(self, 'tutor') and self.tutor)


class Tutor(models.Model):
    user = models.OneToOneField(
        get_user_model(), unique=True,
        on_delete=models.CASCADE, related_name='tutor')

    def get_feedback_count(self) -> int:
        return self.feedback_list.count()


class Reviewer(models.Model):
    user = models.OneToOneField(
        get_user_model(), unique=True,
        on_delete=models.CASCADE, related_name='reviewer')


class Student(models.Model):
    """
    The student model includes all information of a student, that we got from
    the E-Learning output, along with some useful classmethods that provide
    specially annotated QuerySets.

    Information like email (if given), and the username are stored in the
    associated user model.

    Attributes:
        exam (ForeignKey):
            Which module the student wants to be graded in

        has_logged_in (BooleanField):
            Login is permitted once. If this is set the user can not log in.

        matrikel_no (CharField):
            The matriculation number of the student
    """
    has_logged_in = models.BooleanField(default=False)
    matrikel_no = models.CharField(
        unique=True, max_length=8, default=random_matrikel_no)
    exam = models.ForeignKey(
        'ExamType', on_delete=models.SET_NULL,
        related_name='students', null=True)
    user = models.OneToOneField(
        get_user_model(), unique=True,
        on_delete=models.CASCADE, related_name='student')

    def score_per_submission(self) -> Dict[str, int]:
        """ TODO: get rid of it and use an annotation.

        Returns:
            TYPE: Description
        """
        if self.submissions.all():
            return OrderedDict({
                s.type: s.feedback.score if hasattr(s, 'feedback') else 0
                for s in self.submissions.all()
            })

        return OrderedDict({
            t.name: 0 for t in SubmissionType.objects.all()
        })

    @classmethod
    def get_overall_score_annotated_submission_list(cls) -> QuerySet:
        """Can be used to quickly annotate a user with the necessary
        information on the overall score of a student and if he does not need
        any more correction.

        A student is done if
            * module type was pass_only and student has enough points
            * every submission got accepted feedback

        Returns
        -------
        QuerySet
            the annotated QuerySet as described above.
        """
        return cls.objects.annotate(
            overall_score=Coalesce(Sum('submissions__feedback__score'), V(0)),
        ).annotate(
            done=Case(
                When(exam__pass_score__lt=F('overall_score'), then=V(1)),
                default=V(0),
                output_field=BooleanField()
            )
        )

    def disable(self):
        """The student won't be able to login in anymore, but his current
        session can be continued until s/he logs out.
        """
        self.has_logged_in = True
        self.save()

    def __str__(self) -> str:
        return self.user.username

    class Meta:
        verbose_name = "Student"
        verbose_name_plural = "Student Set"


class Test(models.Model):
    """Tests contain information that has been generated by automated tests,
    and directly belongs to a submission. Often certain Feedback was already
    given by information provided by these tests.

    Attributes
    ----------
    annotation : TextField
        All the output of the test (e.g. compiler output)
    label : CharField
        Indicates SUCCES or FAILURE
    name : CharField
        The name of the test that was performed
    submission : ForeignKey
        The submission the tests where generated on
    """
    name = models.CharField(max_length=30)
    label = models.CharField(max_length=50)
    annotation = models.TextField()
    submission = models.ForeignKey(
        'submission',
        related_name='tests',
        on_delete=models.CASCADE,
    )

    class Meta:
        verbose_name = "Test"
        verbose_name_plural = "Tests"
        unique_together = (('submission', 'name'),)

    def __str__(self) -> str:
        return f'{self.name} {self.label}'


class Submission(models.Model):
    """The answer of a student to a specific question. Holds the answer and
    very often serves as ForeignKey.

    With the method assign_tutor feedback for a submission can be created and a
    tutor will be assigned to this feedback permanently (unless deleted by a
    reviewer or if it gets reassigned). There cannot be more than ONE feedback
    per Submission.

    Attributes
    ----------
    seen_by_student : BooleanField
        True if the student saw his accepted feedback.
    student : OneToOneField
        The student how cause all of this
    text : TextField
        The code/text submitted by the student
    type : OneToOneField
        Relation to the type containing meta information
    """
    seen_by_student = models.BooleanField(default=False)
    text = models.TextField(blank=True)
    type = models.ForeignKey(
        SubmissionType,
        on_delete=models.PROTECT,
        related_name='submissions')
    student = models.ForeignKey(
        Student,
        on_delete=models.CASCADE,
        related_name='submissions')

    class Meta:
        verbose_name = "Submission"
        verbose_name_plural = "Submission Set"
        unique_together = (('type', 'student'),)
        ordering = ('type__name',)

    def __str__(self) -> str:
        return "Submission of type '{}' from Student '{}'".format(
            self.type,
            self.student
        )

    @classmethod
    def assign_tutor(cls, tutor: Tutor, slug: str=None) -> bool:
        """Assigns a tutor to a submission

        A submission is not assigned to the specified tutor in the case
            1. the tutor already has a feedback in progress
            2. there is no more feedback to give

        Parameters
        ----------
        tutor : User object
            The tutor that a submission should be assigned to.
        slug : None, optional
            If a slug for a submission is given the belonging Feedback is
            assigned to the tutor. If this submission had feedback before
            the tutor that worked on it, is unassigned.

        Returns
        -------
        bool
            Returns True only if feedback was actually assigned otherwise False

        """

        # Get a submission from the submission set
        unfinished = Feedback.tutor_unfinished_feedback(tutor)
        if unfinished:
            return False

        candidates = cls.objects.filter(
            (
                Q(feedback__isnull=True)
                | Q(feedback__origin=Feedback.DID_NOT_COMPILE)
                | Q(feedback__origin=Feedback.COULD_NOT_LINK)
                | Q(feedback__origin=Feedback.FAILED_UNIT_TESTS)
            )
            & ~Q(feedback__of_tutor=tutor)
        )

        # we want a submission of a specific type
        if slug:
            candidates = candidates.filter(type__slug=slug)

        # we couldn't find any submission to correct
        if not candidates:
            return False

        submission = candidates[0]
        feedback = submission.feedback if hasattr(
            submission, 'feedback') else Feedback()
        feedback.origin = Feedback.MANUAL
        feedback.status = Feedback.EDITABLE
        feedback.of_tutor = tutor
        feedback.of_submission = submission
        feedback.save()
        return True


class Feedback(models.Model):
    """
    Attributes
    ----------
    created : DateTimeField
        When the feedback was initially created
    modified : DateTimeField
        The last time this feedback was modified
    of_reviewer : ForeignKey
        The reviewer that accepted/corrected a feedback
    of_submission : OneToOneField
        The submission this feedback belongs to. It finally determines how many
        points a student receives for his submission.
    of_tutor : ForeignKey
        The tutor/reviewer how last edited the feedback
    ORIGIN : TYPE
        Description
    origin : IntegerField
        Of whom was this feedback originally created. She below for the choices
    score : PositiveIntegerField
        A score that has been assigned to he submission. Is final if it was
        accepted.
    STATUS : The status determines
        Description
    status : PositiveIntegerField
        The status roughly determines in which state a feedback is in. A just
        initiated submission is editable. Based on the status feedback is
        presented to different types of users. Students may see feedback only
        if it has been accepted, while reviewers have access at any time.
    text : TextField
        Detailed description by the tutor about what went wrong.
        Every line in the feedback should correspond with a line in the
        students submission, maybe with additional comments appended.

    """
    text = models.TextField()
    score = models.PositiveIntegerField(default=0)
    created = models.DateTimeField(auto_now_add=True)
    modified = models.DateTimeField(auto_now=True)

    of_submission = models.OneToOneField(
        Submission,
        on_delete=models.CASCADE,
        related_name='feedback',
        unique=True,
        blank=False,
        null=False)
    of_tutor = models.ForeignKey(
        Tutor,
        on_delete=models.SET_NULL,
        related_name='feedback_list',
        blank=True,
        null=True)
    of_reviewer = models.ForeignKey(
        Reviewer,
        on_delete=models.SET_NULL,
        related_name='reviewed_submissions',
        blank=True,
        null=True)

    # what is the current status of our feedback
    (
        EDITABLE,
        OPEN,
        NEEDS_REVIEW,
        ACCEPTED,
    ) = range(4)  # this order matters
    STATUS = (
        (EDITABLE, 'editable'),
        (OPEN, 'request reassignment'),
        (NEEDS_REVIEW, 'request review'),
        (ACCEPTED, 'accepted'),
    )
    status = models.IntegerField(
        choices=STATUS,
        default=EDITABLE,
    )

    # how was this feedback created
    (
        WAS_EMPTY,
        FAILED_UNIT_TESTS,
        DID_NOT_COMPILE,
        COULD_NOT_LINK,
        MANUAL,
    ) = range(5)
    ORIGIN = (
        (WAS_EMPTY, 'was empty'),
        (FAILED_UNIT_TESTS, 'passed unittests'),
        (DID_NOT_COMPILE, 'did not compile'),
        (COULD_NOT_LINK, 'could not link'),
        (MANUAL, 'created by a human. yak!'),
    )
    origin = models.IntegerField(
        choices=ORIGIN,
        default=MANUAL,
    )

    class Meta:
        verbose_name = "Feedback"
        verbose_name_plural = "Feedback Set"

    def __str__(self) -> str:
        return 'Feedback for {}'.format(self.of_submission)

    def is_full_score(self) -> bool:
        return self.of_submission.type.full_score == self.score

    def get_full_score(self) -> int:
        return self.of_submission.type.full_score

    @classmethod
    def get_open_feedback(cls, user: Union[Tutor, Reviewer]) -> QuerySet:
        """For a user, returns the feedback that is up for reassignment that
        does not belong to the user.

        Parameters
        ----------
        user : User object
            The user for which feedback should not be returned. Often the user
            that is currently searching for a task someone else does not want
            to do.

        Returns
        -------
        QuerySet
            All feedback objects that are open for reassignment that do not
            belong to the user
        """
        return cls.objects.filter(
            Q(status=Feedback.OPEN) &
            ~Q(of_tutor=user)  # you shall not request your own feedback
        )

    @classmethod
    def tutor_unfinished_feedback(cls, user: Union[Tutor, Reviewer]):
        """Gets only the feedback that is assigned and not accepted. A tutor
        should have only one feedback assigned that is not accepted

        Parameters
        ----------
        user : User object
            The tutor who formed the request

        Returns
        -------
            The feedback or none if no feedback was assigned
        """
        tutor_feedback = cls.objects.filter(
            Q(of_tutor=user), Q(status=Feedback.EDITABLE),
        )
        return tutor_feedback[0] if tutor_feedback else None

    def tutor_assigned_feedback(cls, user: Union[Tutor, Reviewer]):
        """Gets all feedback that is assigned to the tutor including
        all status cases.

        Returns
        -------
        a QuerySet of tasks that have been assigned to this tutor

        Parameters
        ----------
        user : User object
            The user for which the feedback should be returned
        """
        tutor_feedback = cls.objects.filter(of_tutor=user)
        return tutor_feedback

    def finalize_feedback(self, user: Union[Tutor, Reviewer]):
        """Used to mark feedback as accepted (reviewed).

        Parameters
        ----------
        user : User object
            The tutor/reviewer that marks some feedback as accepted
        """
        self.status = Feedback.ACCEPTED
        self.of_reviewer = user
        self.save()

    def reassign_to_tutor(self, user: Union[Tutor, Reviewer]):
        """When a tutor does not want to correct some task they can pass it
        along to another tutor who will accept the request.

        Parameters
        ----------
        User object
            The user to which to feedback should be assigned to
        """
        assert self.status == Feedback.OPEN
        self.of_tutor = user
        self.status = Feedback.EDITABLE
        self.save()