'''
Grady Model Description
-----------------------

See docstring of the individual models for information on the setup of the
database.
'''

import logging
import secrets
import uuid
from collections import OrderedDict
from random import randrange
from typing import Dict

import constance
from django.contrib.auth import get_user_model
from django.contrib.auth.models import AbstractUser, UserManager
from django.db import models, transaction
from django.db.models import (BooleanField, Case, Count, F, IntegerField, Q,
                              QuerySet, Sum, Value, When)
from django.db.models.functions import Coalesce

log = logging.getLogger(__name__)
config = constance.config


def random_matrikel_no() -> str:
    """Use as a default value for student's matriculation number.

    Returns:
        str: an eight digit number
    """
    return str(10_000_000 + randrange(90_000_000))


def get_annotated_tutor_list() -> QuerySet:
    """All tutor accounts are annotate with a field that includes the number of
    feedback that tutor has collaborated in.

    Returns:
        TYPE: the annotated QuerySet
    """
    return get_user_model().objects\
        .filter(groups__name='Tutors')\
        .annotate(Count('feedback_list'))\
        .order_by('-feedback_list__count')


def get_random_element_from_queryset(queryset):
    qs_elements = queryset.all()
    length = len(qs_elements)
    index = secrets.choice(range(length))
    return qs_elements[index]


class ExamType(models.Model):
    """A model that contains information about the module a submission can
    belong to. The information is not needed and is currently, just used to
    detect if students already have enough points to pass an exam.

    It is NOT intended to use this for including different exams regarding
    submissions types.

    Attributes
    ----------
    module_reference : CharField
        a unique reference that identifies a module within the university
    pass_only : BooleanField
        True if no grade is given
    pass_score : PositiveIntegerField
        minimum score for (just) passing
    total_score : PositiveIntegerField
        maximum score for the exam (currently never used anywhere)
    """
    class Meta:
        verbose_name = "ExamType"
        verbose_name_plural = "ExamTypes"

    def __str__(self) -> str:
        return self.module_reference

    exam_type_id = models.UUIDField(primary_key=True,
                                    default=uuid.uuid4,
                                    editable=False)
    module_reference = models.CharField(max_length=50, unique=True)
    total_score = models.PositiveIntegerField()
    pass_score = models.PositiveIntegerField()
    pass_only = models.BooleanField(default=False)


class SubmissionType(models.Model):
    """This model mostly holds meta information about the kind of task that was
    presented to the student. It serves as a foreign key for the submissions
    that are of this type. This model is currently NOT exposed directly in a
    view.

    Attributes
    ----------
    description : TextField
        The task description the student had to fulfill. The content may be
        HTML formatted.
    full_score : PositiveIntegerField
        Maximum score one can get on that one
    name : CharField
        The original title of the exam. This is wildly used as an identifier by
        the preprocessing scripts.
    solution : TextField
        A sample solution or a correction guideline
    """

    C = 'c'
    JAVA = 'java'
    MIPS = 'mipsasm'
    HASKELL = 'haskell'

    LANGUAGE_CHOICES = (
        (C, 'C syntax highlighting'),
        (JAVA, 'Java syntax highlighting'),
        (MIPS, 'Mips syntax highlighting'),
        (HASKELL, 'Haskell syntax highlighting'),
    )

    submission_type_id = models.UUIDField(primary_key=True,
                                          default=uuid.uuid4,
                                          editable=False)
    name = models.CharField(max_length=100, unique=True)
    full_score = models.PositiveIntegerField(default=0)
    description = models.TextField()
    solution = models.TextField()
    programming_language = models.CharField(max_length=25,
                                            choices=LANGUAGE_CHOICES,
                                            default=C)

    def __str__(self) -> str:
        return self.name

    class Meta:
        verbose_name = "SubmissionType"
        verbose_name_plural = "SubmissionType Set"

    @classmethod
    def get_annotated_feedback_count(cls) -> QuerySet:
        """ Annotates submission lists with counts

        The following fields are annotated:
            * number of submissions per submission type
            * count of received *accepted* feedback per submission type
            * and finally the progress on each submission type as percentage

        The QuerySet that is return is ordered by name lexicographically.

        Returns:
            The annotated QuerySet as described above
        """
        return cls.objects\
            .annotate(  # to display only manual
                feedback_final=Count(
                    Case(When(
                        Q(submissions__meta__has_final_feedback=True),
                        then=Value(1)), output_field=IntegerField())
                ),
                feedback_in_validation=Count(
                    Case(When(
                        Q(submissions__meta__done_assignments=1) &
                        Q(submissions__meta__has_final_feedback=False),
                        then=Value(1)), output_field=IntegerField())
                ),
                feedback_in_conflict=Count(
                    Case(When(
                        Q(submissions__meta__done_assignments=2) &
                        Q(submissions__meta__has_final_feedback=False),
                        then=Value(1)), output_field=IntegerField())
                ),
                submission_count=Count('submissions'),
            ).order_by('name')


class TutorManager(UserManager):

    def get_queryset(self):
        return super().get_queryset().filter(role=UserAccount.TUTOR)

    def with_feedback_count(self):
        def _get_counter(stage):
            return Count(Case(
                When(
                    Q(subscriptions__feedback_stage=stage) &
                    Q(subscriptions__assignments__is_done=True),
                    then=Value(1))),
                output_field=IntegerField())

        return self.get_queryset() \
            .annotate(feedback_created=_get_counter(
                SubmissionSubscription.FEEDBACK_CREATION)) \
            .annotate(feedback_validated=_get_counter(
                SubmissionSubscription.FEEDBACK_VALIDATION))


class UserAccount(AbstractUser):
    """
    An abstract base class implementing a fully featured User model with
    admin-compliant permissions.

    Username and password are required. Other fields are optional.
    """

    STUDENT = 'Student'
    TUTOR = 'Tutor'
    REVIEWER = 'Reviewer'

    ROLE_CHOICES = (
        (STUDENT, 'student'),
        (TUTOR, 'tutor'),
        (REVIEWER, 'reviewer')
    )

    # Fields
    role = models.CharField(max_length=50, choices=ROLE_CHOICES)
    user_id = models.UUIDField(primary_key=True,
                               default=uuid.uuid4,
                               editable=False)

    fullname = models.CharField('full name', max_length=70, blank=True)
    is_admin = models.BooleanField(default=False)

    # Managers
    objects = UserManager()
    tutors = TutorManager()

    # Helper methods
    def is_student(self):
        return self.role == 'Student'

    def is_tutor(self):
        return self.role == 'Tutor'

    def is_reviewer(self):
        return self.role == 'Reviewer'

    # All of these methods are deprecated and should be replaced by custom
    # Managers (see tutor manager)
    @classmethod
    def get_students(cls):
        return cls.objects.filter(role=cls.STUDENT)

    @classmethod
    def get_tutors(cls):
        return cls.objects.filter(role=cls.TUTOR)

    @classmethod
    def get_reviewers(cls):
        return cls.objects.filter(role=cls.REVIEWER)


class StudentInfo(models.Model):
    """
    The StudentInfo model includes all information of a student, that we got
    from the E-Learning output, along with some useful classmethods that
    provide specially annotated QuerySets.

    Information like email (if given), and the username are stored in the
    associated user model.

    Attributes:
        exam (ForeignKey):
            Which module the student wants to be graded in

        has_logged_in (BooleanField):
            Login is permitted once. If this is set the user can not log in.

        matrikel_no (CharField):
            The matriculation number of the student
    """
    student_id = models.UUIDField(primary_key=True,
                                  default=uuid.uuid4,
                                  editable=False)
    has_logged_in = models.BooleanField(default=False)
    matrikel_no = models.CharField(unique=True,
                                   max_length=30,
                                   default=random_matrikel_no)
    exam = models.ForeignKey('ExamType',
                             on_delete=models.CASCADE,
                             related_name='students',
                             null=False)
    user = models.OneToOneField(get_user_model(),
                                on_delete=models.CASCADE,
                                related_name='student')

    # Managed by signals
    total_score = models.PositiveIntegerField(default=0)
    passes_exam = models.BooleanField(default=False)

    def update_total_score(self):
        ''' This helper is invoked after feedback changes '''
        self.total_score = self.submissions.aggregate(
            Sum('feedback__score'))['feedback__score__sum'] or 0
        if self.exam is not None:
            self.passes_exam = self.total_score >= self.exam.pass_score
        self.save()

    def score_per_submission(self) -> Dict[str, int]:
        """ TODO: get rid of it and use an annotation. """
        if self.submissions.all():
            return OrderedDict({
                s.type.name: s.feedback.score if hasattr(s, 'feedback') else 0
                for s in self.submissions.order_by('type__name')
            })

        return OrderedDict({
            t.name: 0 for t in SubmissionType.objects.all()
        })

    @classmethod
    def get_annotated_score_submission_list(cls) -> QuerySet:
        """Can be used to quickly annotate a user with the necessary
        information on the overall score of a student and if he does not need
        any more correction.

        A student is done if
            * module type was pass_only and student has enough points
            * every submission got accepted feedback

        Returns
        -------
        QuerySet
            the annotated QuerySet as described above.
        """
        return cls.objects.annotate(
            overall_score=Coalesce(Sum('submissions__feedback__score'),
                                   Value(0)),
        ).annotate(
            done=Case(
                When(exam__pass_score__lt=F('overall_score'), then=Value(1)),
                default=Value(0),
                output_field=BooleanField()
            )
        ).order_by('user__username')

    def disable(self):
        """The student won't be able to login in anymore, but his current
        session can be continued until s/he logs out.
        """
        self.has_logged_in = True
        self.save()

    def __str__(self) -> str:
        return self.user.username

    class Meta:
        verbose_name = "Student"
        verbose_name_plural = "Student Set"


class Test(models.Model):
    """Tests contain information that has been unapproved by automated tests,
    and directly belongs to a submission. Often certain Feedback was already
    given by information provided by these tests.

    Attributes
    ----------
    annotation : TextField
        All the output of the test (e.g. compiler output)
    label : CharField
        Indicates SUCCES or FAILURE
    name : CharField
        The name of the test that was performed
    submission : ForeignKey
        The submission the tests where unapproved on
    """
    test_id = models.UUIDField(primary_key=True,
                               default=uuid.uuid4,
                               editable=False)

    name = models.CharField(max_length=30)
    label = models.CharField(max_length=50)
    annotation = models.TextField()
    submission = models.ForeignKey('submission',
                                   related_name='tests',
                                   on_delete=models.CASCADE,)

    class Meta:
        verbose_name = "Test"
        verbose_name_plural = "Tests"
        unique_together = (('submission', 'name'),)

    def __str__(self) -> str:
        return f'{self.name} {self.label}'


class Submission(models.Model):
    """The answer of a student to a specific question. Holds the answer and
    very often serves as ForeignKey.

    With the method assign_tutor feedback for a submission can be created and a
    tutor will be assigned to this feedback permanently (unless deleted by a
    reviewer or if it gets reassigned). There cannot be more than ONE feedback
    per Submission.

    Attributes
    ----------
    seen_by_student : BooleanField
        True if the student saw his accepted feedback.
    student : ForgeignKey
        The student how cause all of this
    text : TextField
        The code/text submitted by the student
    type : OneToOneField
        Relation to the type containing meta information
    """
    submission_id = models.UUIDField(primary_key=True,
                                     default=uuid.uuid4,
                                     editable=False)
    seen_by_student = models.BooleanField(default=False)
    text = models.TextField(blank=True)
    type = models.ForeignKey(
        SubmissionType,
        on_delete=models.PROTECT,
        related_name='submissions')
    student = models.ForeignKey(
        StudentInfo,
        on_delete=models.CASCADE,
        related_name='submissions')

    class Meta:
        verbose_name = "Submission"
        verbose_name_plural = "Submission Set"
        unique_together = (('type', 'student'),)
        ordering = ('type__name',)

    def __str__(self) -> str:
        return "Submission {}".format(self.pk)


class Feedback(models.Model):
    """
    Attributes
    ----------
    score : PositiveIntegerField
        A score that has been assigned to he submission. Is final if it was
        accepted.
    created : DateTimeField
        When the feedback was initially created
    of_submission : OneToOneField
        The submission this feedback belongs to. It finally determines how many
        points a student receives for his submission.
    origin : IntegerField
        Of whom was this feedback originally created. She below for the choices
    """
    score = models.PositiveIntegerField(default=0)
    created = models.DateTimeField(auto_now_add=True)
    is_final = models.BooleanField(default=False)

    of_submission = models.OneToOneField(
        Submission,
        on_delete=models.CASCADE,
        related_name='feedback')

    # how was this feedback created
    (
        WAS_EMPTY,
        FAILED_UNIT_TESTS,
        DID_NOT_COMPILE,
        COULD_NOT_LINK,
        MANUAL,
    ) = range(5)
    ORIGIN = (
        (WAS_EMPTY, 'was empty'),
        (FAILED_UNIT_TESTS, 'passed unittests'),
        (DID_NOT_COMPILE, 'did not compile'),
        (COULD_NOT_LINK, 'could not link'),
        (MANUAL, 'created by a human. yak!'),
    )
    origin = models.IntegerField(
        choices=ORIGIN,
        default=MANUAL,
    )

    class Meta:
        verbose_name = "Feedback"
        verbose_name_plural = "Feedback Set"

    def __str__(self) -> str:
        return 'Feedback for {}'.format(self.of_submission)

    def is_full_score(self) -> bool:
        return self.of_submission.type.full_score == self.score

    def get_full_score(self) -> int:
        return self.of_submission.type.full_score


class FeedbackComment(models.Model):
    """ This Class contains the Feedback for a specific line of a Submission"""
    comment_id = models.UUIDField(primary_key=True,
                                  default=uuid.uuid4,
                                  editable=False)
    text = models.TextField()
    created = models.DateTimeField(auto_now_add=True)
    modified = models.DateTimeField(auto_now=True)

    visible_to_student = models.BooleanField(default=True)

    of_line = models.PositiveIntegerField(default=0)
    of_tutor = models.ForeignKey(
        get_user_model(),
        related_name="comment_list",
        on_delete=models.PROTECT
    )
    of_feedback = models.ForeignKey(
        Feedback,
        related_name="feedback_lines",
        on_delete=models.CASCADE,
        null=True
    )

    class Meta:
        verbose_name = "Feedback Comment"
        verbose_name_plural = "Feedback Comments"
        ordering = ('created',)
        unique_together = ('of_line', 'of_tutor', 'of_feedback')

    def __str__(self):
        return 'Comment on line {} of tutor {}: "{}"'.format(self.of_line,
                                                             self.of_tutor,
                                                             self.text)


class SubscriptionEnded(Exception):
    pass


class SubscriptionTemporarilyEnded(Exception):
    pass


class NotMoreThanTwoOpenAssignmentsAllowed(Exception):
    pass


class SubmissionSubscription(models.Model):

    RANDOM = 'random'
    STUDENT_QUERY = 'student'
    EXAM_TYPE_QUERY = 'exam'
    SUBMISSION_TYPE_QUERY = 'submission_type'

    type_query_mapper = {
        RANDOM: '__any',
        STUDENT_QUERY: 'student__pk',
        EXAM_TYPE_QUERY: 'student__exam__pk',
        SUBMISSION_TYPE_QUERY: 'type__pk',
    }

    QUERY_CHOICE = (
        (RANDOM, 'Query for any submission'),
        (STUDENT_QUERY, 'Query for submissions of student'),
        (EXAM_TYPE_QUERY, 'Query for submissions of exam type'),
        (SUBMISSION_TYPE_QUERY, 'Query for submissions of submissions_type'),
    )

    FEEDBACK_CREATION = 'feedback-creation'
    FEEDBACK_VALIDATION = 'feedback-validation'
    FEEDBACK_CONFLICT_RESOLUTION = 'feedback-conflict-resolution'

    assignment_count_on_stage = {
        FEEDBACK_CREATION: 0,
        FEEDBACK_VALIDATION: 1,
        FEEDBACK_CONFLICT_RESOLUTION: 2,
    }

    stages = (
        (FEEDBACK_CREATION, 'No feedback was ever assigned'),
        (FEEDBACK_VALIDATION, 'Feedback exists but is not validated'),
        (FEEDBACK_CONFLICT_RESOLUTION, 'Previous correctors disagree'),
    )

    deactivated = models.BooleanField(default=False)
    subscription_id = models.UUIDField(primary_key=True,
                                       default=uuid.uuid4,
                                       editable=False)
    owner = models.ForeignKey(get_user_model(),
                              on_delete=models.CASCADE,
                              related_name='subscriptions')
    query_key = models.UUIDField(null=True)
    query_type = models.CharField(max_length=75,
                                  choices=QUERY_CHOICE,
                                  default=RANDOM)
    feedback_stage = models.CharField(choices=stages,
                                      max_length=40,
                                      default=FEEDBACK_CREATION)

    class Meta:
        unique_together = ('owner',
                           'query_key',
                           'query_type',
                           'feedback_stage')

    def _get_submission_base_query(self) -> QuerySet:
        """ Get all submissions that are filtered by the query key and type,
        e.g. all submissions of one student or submission type.
        """
        if self.query_type == self.RANDOM:
            return MetaSubmission.objects.all()

        return MetaSubmission.objects.filter(
            **{'submission__' + self.type_query_mapper[self.query_type]:
               self.query_key})

    def _get_submissions_that_do_not_have_final_feedback(self) -> QuerySet:
        """ There are a number of conditions to check for each submission

        1. The submission does not have final feedback
        2. The submission was not shown to this user before
        3. The submission is not currently assigned to somebody else

        Returns:
            QuerySet -- a list of all submissions ready for consumption
        """
        return self._get_submission_base_query() \
            .select_for_update(of=('self',)).exclude(
            Q(has_final_feedback=True) |
            Q(has_active_assignment=True) |
            Q(feedback_authors=self.owner)
        )

    def _get_available_submissions_in_subscription_stage(self) -> QuerySet:
        """ Another filter this time it returns all the submissions that
        are valid in this stage. That means all previous stages have been
        completed.

        Raises:
            SubscriptionEnded -- if the subscription will not yield
                                 subscriptions in the future
            SubscriptionTemporarilyEnded -- wait until new become available
        """
        candidates = self._get_submissions_that_do_not_have_final_feedback()

        if candidates.count() == 0:
            raise SubscriptionEnded(
                f'The task which user {self.owner} subscribed to is done')

        done_assignments_count = self.assignment_count_on_stage[self.feedback_stage]  # noqa
        stage_candidates = candidates.filter(
            done_assignments=done_assignments_count,
        )

        if stage_candidates.count() == 0:
            raise SubscriptionTemporarilyEnded(
                'Currently unavailable. Please check for more soon. '
                'Submissions remaining: %s' % stage_candidates.count())

        if (config.STOP_ON_PASS and
                self.feedback_stage == self.FEEDBACK_CREATION):
            stage_candidates = stage_candidates.exclude(
                Q(submission__student__passes_exam=True) &
                Q(submission__student__exam__pass_only=True)
            )

        return stage_candidates

    @transaction.atomic
    def get_remaining_not_final(self) -> int:
        return self._get_submissions_that_do_not_have_final_feedback().count()

    @transaction.atomic
    def get_available_in_stage(self) -> int:
        try:
            return self._get_available_submissions_in_subscription_stage().count()  # noqa
        except (SubscriptionTemporarilyEnded, SubscriptionEnded):
            return 0

    @transaction.atomic
    def get_or_create_work_assignment(self):
        taskqueryset = self._get_available_submissions_in_subscription_stage()
        task = get_random_element_from_queryset(taskqueryset)
        if self.assignments.filter(is_done=False).count() >= 2:
            raise NotMoreThanTwoOpenAssignmentsAllowed(
                'Not more than 2 active assignments allowed.')

        log.info(f'{self.owner} is assigned to {task} ({self.feedback_stage})')
        return TutorSubmissionAssignment.objects.create(
            subscription=self,
            submission=task.submission)

    @transaction.atomic
    def reserve_all_assignments_for_a_student(self):
        assert self.query_type == self.STUDENT_QUERY

        meta_submissions = self._get_submissions_that_do_not_have_final_feedback()  # noqa

        for meta in meta_submissions:
            submission = meta.submission
            if hasattr(submission, 'assignments'):
                submission.assignments.filter(is_done=False).delete()
            TutorSubmissionAssignment.objects.create(
                subscription=self,
                submission=submission
            )

        log.info(f'Loaded all subscriptions of student {self.query_key}')

    @transaction.atomic
    def delete(self):
        self.assignments.filter(is_done=False).delete()
        if self.assignments.count() == 0:
            super().delete()
        else:
            self.deactivated = True
            self.save()


class DeletionOfDoneAssignmentsNotPermitted(Exception):
    pass


class MetaSubmission(models.Model):

    submission = models.OneToOneField('submission',
                                      related_name='meta',
                                      on_delete=models.CASCADE)
    done_assignments = models.PositiveIntegerField(default=0)
    has_active_assignment = models.BooleanField(default=False)

    has_feedback = models.BooleanField(default=False)
    has_final_feedback = models.BooleanField(default=False)

    feedback_authors = models.ManyToManyField(get_user_model())

    def __str__(self):
        return f''' Submission Meta of {self.submission}

        done_assignments      = {self.done_assignments}
        has_active_assignment = {self.has_active_assignment}
        has_feedback          = {self.has_feedback}
        has_final_feedback    = {self.has_final_feedback}
        feedback_authors      = {self.feedback_authors.values_list('username',
                                                                   flat=True)}
        '''


class TutorSubmissionAssignment(models.Model):

    assignment_id = models.UUIDField(primary_key=True,
                                     default=uuid.uuid4,
                                     editable=False)
    submission = models.ForeignKey(Submission,
                                   on_delete=models.CASCADE,
                                   related_name='assignments')
    subscription = models.ForeignKey(SubmissionSubscription,
                                     on_delete=models.CASCADE,
                                     related_name='assignments')
    is_done = models.BooleanField(default=False)
    created = models.DateTimeField(auto_now_add=True)

    def __str__(self):
        return (f'{self.subscription.owner} assigned to {self.submission}'
                f' (done={self.is_done})')

    def delete(self, *args, **kwargs):
        if self.is_done:
            raise DeletionOfDoneAssignmentsNotPermitted()
        super().delete(*args, **kwargs)

    class Meta:
        unique_together = ('submission', 'subscription')