Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • j.michal/grady
1 result
Show changes
Commits on Source (15)
Showing
with 958 additions and 773 deletions
......@@ -4,6 +4,8 @@
*.pot
*.py[co]
.tox/
*.ipynb
.ipynb_checkpoints/
__pycache__
MANIFEST
.coverage
......
......@@ -16,7 +16,7 @@ class Migration(migrations.Migration):
name='useraccount',
managers=[
('objects', django.contrib.auth.models.UserManager()),
('tutors', core.models.TutorManager()),
('tutors', core.models.TutorReviewerManager()),
],
),
]
# Generated by Django 2.1.4 on 2019-04-25 15:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0013_auto_20190308_1448'),
]
operations = [
migrations.CreateModel(
name='FeedbackLabel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('description', models.TextField()),
('feedback', models.ManyToManyField(related_name='labels', to='core.Feedback')),
('feedback_comments', models.ManyToManyField(related_name='labels', to='core.FeedbackComment')),
],
),
]
# Generated by Django 2.1.4 on 2019-04-30 17:01
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0014_feedbacklabel'),
]
operations = [
migrations.AddField(
model_name='feedbacklabel',
name='colour',
field=models.CharField(default='#b0b0b0', max_length=7, validators=[django.core.validators.RegexValidator(code='nomatch', message='Colour must be in format: #[0-9A-F]{7}', regex='^#[0-9A-F]{6}$')]),
),
]
# Generated by Django 2.2 on 2019-05-21 18:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0015_feedbacklabel_colour'),
]
operations = [
migrations.AlterField(
model_name='feedbackcomment',
name='text',
field=models.TextField(blank=True),
),
]
# Generated by Django 2.1.4 on 2019-06-04 16:31
import core.models.user_account
import django.contrib.auth.models
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0016_auto_20190521_1803'),
]
operations = [
migrations.AlterModelManagers(
name='useraccount',
managers=[
('objects', django.contrib.auth.models.UserManager()),
('corrector', core.models.user_account.TutorReviewerManager()),
],
),
]
This diff is collapsed.
from .exam_type import ExamType # noqa
from .submission_type import SubmissionType # noqa
from .user_account import UserAccount, TutorReviewerManager # noqa
from .student_info import StudentInfo, random_matrikel_no # noqa
from .test import Test # noqa
from .submission import Submission, MetaSubmission # noqa
from .feedback import Feedback, FeedbackComment # noqa
from .subscription import (NotMoreThanTwoOpenAssignmentsAllowed, SubmissionSubscription, # noqa
SubscriptionTemporarilyEnded, SubscriptionEnded) # noqa
from .assignment import DeletionOfDoneAssignmentsNotPermitted, TutorSubmissionAssignment # noqa
from .label import FeedbackLabel # noqa
import logging
import uuid
import constance
from django.db import models
from core.models.submission import Submission
log = logging.getLogger(__name__)
config = constance.config
class DeletionOfDoneAssignmentsNotPermitted(Exception):
pass
class TutorSubmissionAssignment(models.Model):
assignment_id = models.UUIDField(primary_key=True,
default=uuid.uuid4,
editable=False)
submission = models.ForeignKey(Submission,
on_delete=models.CASCADE,
related_name='assignments')
subscription = models.ForeignKey('SubmissionSubscription',
on_delete=models.CASCADE,
related_name='assignments')
is_done = models.BooleanField(default=False)
created = models.DateTimeField(auto_now_add=True)
def __str__(self):
return (f'{self.subscription.owner} assigned to {self.submission}'
f' (done={self.is_done})')
def delete(self, *args, **kwargs):
if self.is_done:
raise DeletionOfDoneAssignmentsNotPermitted()
super().delete(*args, **kwargs)
class Meta:
unique_together = ('submission', 'subscription')
import logging
import uuid
import constance
from django.db import models
log = logging.getLogger(__name__)
config = constance.config
class ExamType(models.Model):
"""A model that contains information about the module a submission can
belong to. The information is not needed and is currently, just used to
detect if students already have enough points to pass an exam.
It is NOT intended to use this for including different exams regarding
submissions types.
Attributes
----------
module_reference : CharField
a unique reference that identifies a module within the university
pass_only : BooleanField
True if no grade is given
pass_score : PositiveIntegerField
minimum score for (just) passing
total_score : PositiveIntegerField
maximum score for the exam (currently never used anywhere)
"""
class Meta:
verbose_name = "ExamType"
verbose_name_plural = "ExamTypes"
def __str__(self) -> str:
return self.module_reference
exam_type_id = models.UUIDField(primary_key=True,
default=uuid.uuid4,
editable=False)
module_reference = models.CharField(max_length=50, unique=True)
total_score = models.PositiveIntegerField()
pass_score = models.PositiveIntegerField()
pass_only = models.BooleanField(default=False)
import logging
import uuid
import constance
from django.contrib.auth import get_user_model
from django.db import models
from core.models.submission import Submission
log = logging.getLogger(__name__)
config = constance.config
class Feedback(models.Model):
"""
Attributes
----------
score : PositiveIntegerField
A score that has been assigned to he submission. Is final if it was
accepted.
created : DateTimeField
When the feedback was initially created
of_submission : OneToOneField
The submission this feedback belongs to. It finally determines how many
points a student receives for his submission.
origin : IntegerField
Of whom was this feedback originally created. She below for the choices
"""
score = models.DecimalField(max_digits=5, decimal_places=2, default=0)
created = models.DateTimeField(auto_now_add=True)
is_final = models.BooleanField(default=False)
of_submission = models.OneToOneField(
Submission,
on_delete=models.CASCADE,
related_name='feedback')
# the denominators that are allowed for the decimal score interpreted as a fraction
ALLOWED_DENOMINATORS = [1, 2]
# how was this feedback created
(
WAS_EMPTY,
FAILED_UNIT_TESTS,
DID_NOT_COMPILE,
COULD_NOT_LINK,
MANUAL,
) = range(5)
ORIGIN = (
(WAS_EMPTY, 'was empty'),
(FAILED_UNIT_TESTS, 'passed unittests'),
(DID_NOT_COMPILE, 'did not compile'),
(COULD_NOT_LINK, 'could not link'),
(MANUAL, 'created by a human. yak!'),
)
origin = models.IntegerField(
choices=ORIGIN,
default=MANUAL,
)
class Meta:
verbose_name = "Feedback"
verbose_name_plural = "Feedback Set"
def __str__(self) -> str:
return 'Feedback for {}'.format(self.of_submission)
def is_full_score(self) -> bool:
return self.of_submission.type.full_score == self.score
def get_full_score(self) -> int:
return self.of_submission.type.full_score
class FeedbackComment(models.Model):
""" This Class contains the Feedback for a specific line of a Submission"""
comment_id = models.UUIDField(primary_key=True,
default=uuid.uuid4,
editable=False)
text = models.TextField(blank=True)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
visible_to_student = models.BooleanField(default=True)
of_line = models.PositiveIntegerField(default=0)
of_tutor = models.ForeignKey(
get_user_model(),
related_name="comment_list",
on_delete=models.PROTECT
)
of_feedback = models.ForeignKey(
Feedback,
related_name="feedback_lines",
on_delete=models.CASCADE,
null=True
)
class Meta:
verbose_name = "Feedback Comment"
verbose_name_plural = "Feedback Comments"
ordering = ('created',)
unique_together = ('of_line', 'of_tutor', 'of_feedback')
def __str__(self):
return 'Comment on line {} of tutor {}: "{}"'.format(self.of_line,
self.of_tutor,
self.text)
import logging
from django.core.validators import RegexValidator
from django.db import models
from core.models.feedback import Feedback, FeedbackComment
log = logging.getLogger(__name__)
HexColourValidator = RegexValidator(
regex='^#[0-9A-F]{6}$',
message='Colour must be in format: #[0-9A-F]{7}',
code='nomatch')
class FeedbackLabel(models.Model):
name = models.CharField(max_length=50, unique=True)
description = models.TextField()
colour = models.CharField(validators=[HexColourValidator], max_length=7, default='#b0b0b0')
feedback = models.ManyToManyField(Feedback, related_name='labels')
feedback_comments = models.ManyToManyField(FeedbackComment, related_name='labels')
import logging
import uuid
from collections import OrderedDict
from random import randrange
from typing import Dict
import constance
from django.contrib.auth import get_user_model
from django.db import models
from django.db.models import (BooleanField, Case, F,
QuerySet, Sum, Value, When)
from django.db.models.functions import Coalesce
from core.models.submission_type import SubmissionType
log = logging.getLogger(__name__)
config = constance.config
def random_matrikel_no() -> str:
"""Use as a default value for student's matriculation number.
Returns:
str: an eight digit number
"""
return str(10_000_000 + randrange(90_000_000))
class StudentInfo(models.Model):
"""
The StudentInfo model includes all information of a student, that we got
from the E-Learning output, along with some useful classmethods that
provide specially annotated QuerySets.
Information like email (if given), and the username are stored in the
associated user model.
Attributes:
exam (ForeignKey):
Which module the student wants to be graded in
has_logged_in (BooleanField):
Login is permitted once. If this is set the user can not log in.
matrikel_no (CharField):
The matriculation number of the student
"""
student_id = models.UUIDField(primary_key=True,
default=uuid.uuid4,
editable=False)
has_logged_in = models.BooleanField(default=False)
matrikel_no = models.CharField(unique=True,
max_length=30,
default=random_matrikel_no)
exam = models.ForeignKey('ExamType',
on_delete=models.CASCADE,
related_name='students',
null=False)
user = models.OneToOneField(get_user_model(),
on_delete=models.CASCADE,
related_name='student')
# Managed by signals
total_score = models.PositiveIntegerField(default=0)
passes_exam = models.BooleanField(default=False)
def update_total_score(self):
''' This helper is invoked after feedback changes '''
self.total_score = self.submissions.aggregate(
Sum('feedback__score'))['feedback__score__sum'] or 0
if self.exam is not None:
self.passes_exam = self.total_score >= self.exam.pass_score
self.save()
def score_per_submission(self) -> Dict[str, int]:
""" TODO: get rid of it and use an annotation. """
if self.submissions.all():
return OrderedDict({
s.type.name: s.feedback.score if hasattr(s, 'feedback') else 0
for s in self.submissions.order_by('type__name')
})
return OrderedDict({
t.name: 0 for t in SubmissionType.objects.all()
})
@classmethod
def get_annotated_score_submission_list(cls) -> QuerySet:
"""Can be used to quickly annotate a user with the necessary
information on the overall score of a student and if he does not need
any more correction.
A student is done if
* module type was pass_only and student has enough points
* every submission got accepted feedback
Returns
-------
QuerySet
the annotated QuerySet as described above.
"""
return cls.objects.annotate(
overall_score=Coalesce(Sum('submissions__feedback__score'),
Value(0)),
).annotate(
done=Case(
When(exam__pass_score__lt=F('overall_score'), then=Value(1)),
default=Value(0),
output_field=BooleanField()
)
).order_by('user__username')
def disable(self):
"""The student won't be able to login in anymore, but his current
session can be continued until s/he logs out.
"""
self.has_logged_in = True
self.save()
def __str__(self) -> str:
return self.user.username
class Meta:
verbose_name = "Student"
verbose_name_plural = "Student Set"
import logging
import uuid
import constance
from django.contrib.auth import get_user_model
from django.db import models
from core.models.submission_type import SubmissionType
log = logging.getLogger(__name__)
config = constance.config
class Submission(models.Model):
"""The answer of a student to a specific question. Holds the answer and
very often serves as ForeignKey.
With the method assign_tutor feedback for a submission can be created and a
tutor will be assigned to this feedback permanently (unless deleted by a
reviewer or if it gets reassigned). There cannot be more than ONE feedback
per Submission.
Attributes
----------
seen_by_student : BooleanField
True if the student saw his accepted feedback.
student : ForgeignKey
The student how cause all of this
text : TextField
The code/text submitted by the student
type : OneToOneField
Relation to the type containing meta information
"""
submission_id = models.UUIDField(primary_key=True,
default=uuid.uuid4,
editable=False)
seen_by_student = models.BooleanField(default=False)
text = models.TextField(blank=True)
type = models.ForeignKey(
SubmissionType,
on_delete=models.PROTECT,
related_name='submissions')
student = models.ForeignKey(
'StudentInfo',
on_delete=models.CASCADE,
related_name='submissions')
class Meta:
verbose_name = "Submission"
verbose_name_plural = "Submission Set"
unique_together = (('type', 'student'),)
ordering = ('type__name',)
def __str__(self) -> str:
return "Submission {}".format(self.pk)
class MetaSubmission(models.Model):
submission = models.OneToOneField('submission',
related_name='meta',
on_delete=models.CASCADE)
done_assignments = models.PositiveIntegerField(default=0)
has_active_assignment = models.BooleanField(default=False)
has_feedback = models.BooleanField(default=False)
has_final_feedback = models.BooleanField(default=False)
feedback_authors = models.ManyToManyField(get_user_model())
def __str__(self):
return f''' Submission Meta of {self.submission}
done_assignments = {self.done_assignments}
has_active_assignment = {self.has_active_assignment}
has_feedback = {self.has_feedback}
has_final_feedback = {self.has_final_feedback}
feedback_authors = {self.feedback_authors.values_list('username',
flat=True)}
'''
import logging
import uuid
import constance
from django.db import models
from django.db.models import (Case, Count, IntegerField, Q,
Value, When)
from django.db.models.query import QuerySet
log = logging.getLogger(__name__)
config = constance.config
class SubmissionType(models.Model):
"""This model mostly holds meta information about the kind of task that was
presented to the student. It serves as a foreign key for the submissions
that are of this type. This model is currently NOT exposed directly in a
view.
Attributes
----------
description : TextField
The task description the student had to fulfill. The content may be
HTML formatted.
full_score : PositiveIntegerField
Maximum score one can get on that one
name : CharField
The original title of the exam. This is wildly used as an identifier by
the preprocessing scripts.
solution : TextField
A sample solution or a correction guideline
"""
C = 'c'
JAVA = 'java'
MIPS = 'mipsasm'
HASKELL = 'haskell'
LANGUAGE_CHOICES = (
(C, 'C syntax highlighting'),
(JAVA, 'Java syntax highlighting'),
(MIPS, 'Mips syntax highlighting'),
(HASKELL, 'Haskell syntax highlighting'),
)
submission_type_id = models.UUIDField(primary_key=True,
default=uuid.uuid4,
editable=False)
name = models.CharField(max_length=100, unique=True)
full_score = models.PositiveIntegerField(default=0)
description = models.TextField()
solution = models.TextField()
programming_language = models.CharField(max_length=25,
choices=LANGUAGE_CHOICES,
default=C)
def __str__(self) -> str:
return self.name
class Meta:
verbose_name = "SubmissionType"
verbose_name_plural = "SubmissionType Set"
@classmethod
def get_annotated_feedback_count(cls) -> QuerySet:
""" Annotates submission lists with counts
The following fields are annotated:
* number of submissions per submission type
* count of received *accepted* feedback per submission type
* and finally the progress on each submission type as percentage
The QuerySet that is return is ordered by name lexicographically.
Returns:
The annotated QuerySet as described above
"""
return cls.objects\
.annotate( # to display only manual
feedback_final=Count(
Case(When(
Q(submissions__meta__has_final_feedback=True),
then=Value(1)), output_field=IntegerField())
),
feedback_in_validation=Count(
Case(When(
Q(submissions__meta__done_assignments=1) &
Q(submissions__meta__has_final_feedback=False),
then=Value(1)), output_field=IntegerField())
),
feedback_in_conflict=Count(
Case(When(
Q(submissions__meta__done_assignments=2) &
Q(submissions__meta__has_final_feedback=False),
then=Value(1)), output_field=IntegerField())
),
submission_count=Count('submissions'),
).order_by('name')
import logging
import secrets
import uuid
import constance
from django.contrib.auth import get_user_model
from django.db import models, transaction
from django.db.models import (Q, QuerySet)
from core.models.submission import MetaSubmission
from core.models.assignment import TutorSubmissionAssignment
log = logging.getLogger(__name__)
config = constance.config
class SubscriptionEnded(Exception):
pass
class SubscriptionTemporarilyEnded(Exception):
pass
class NotMoreThanTwoOpenAssignmentsAllowed(Exception):
pass
def get_random_element_from_queryset(queryset):
qs_elements = queryset.all()
length = len(qs_elements)
index = secrets.choice(range(length))
return qs_elements[index]
class SubmissionSubscription(models.Model):
RANDOM = 'random'
STUDENT_QUERY = 'student'
EXAM_TYPE_QUERY = 'exam'
SUBMISSION_TYPE_QUERY = 'submission_type'
type_query_mapper = {
RANDOM: '__any',
STUDENT_QUERY: 'student__pk',
EXAM_TYPE_QUERY: 'student__exam__pk',
SUBMISSION_TYPE_QUERY: 'type__pk',
}
QUERY_CHOICE = (
(RANDOM, 'Query for any submission'),
(STUDENT_QUERY, 'Query for submissions of student'),
(EXAM_TYPE_QUERY, 'Query for submissions of exam type'),
(SUBMISSION_TYPE_QUERY, 'Query for submissions of submissions_type'),
)
FEEDBACK_CREATION = 'feedback-creation'
FEEDBACK_VALIDATION = 'feedback-validation'
FEEDBACK_CONFLICT_RESOLUTION = 'feedback-conflict-resolution'
assignment_count_on_stage = {
FEEDBACK_CREATION: 0,
FEEDBACK_VALIDATION: 1,
FEEDBACK_CONFLICT_RESOLUTION: 2,
}
stages = (
(FEEDBACK_CREATION, 'No feedback was ever assigned'),
(FEEDBACK_VALIDATION, 'Feedback exists but is not validated'),
(FEEDBACK_CONFLICT_RESOLUTION, 'Previous correctors disagree'),
)
deactivated = models.BooleanField(default=False)
subscription_id = models.UUIDField(primary_key=True,
default=uuid.uuid4,
editable=False)
owner = models.ForeignKey(get_user_model(),
on_delete=models.CASCADE,
related_name='subscriptions')
query_key = models.UUIDField(null=True)
query_type = models.CharField(max_length=75,
choices=QUERY_CHOICE,
default=RANDOM)
feedback_stage = models.CharField(choices=stages,
max_length=40,
default=FEEDBACK_CREATION)
class Meta:
unique_together = ('owner',
'query_key',
'query_type',
'feedback_stage')
def _get_submission_base_query(self) -> QuerySet:
""" Get all submissions that are filtered by the query key and type,
e.g. all submissions of one student or submission type.
"""
if self.query_type == self.RANDOM:
return MetaSubmission.objects.all()
return MetaSubmission.objects.filter(
**{'submission__' + self.type_query_mapper[self.query_type]:
self.query_key})
def _get_submissions_that_do_not_have_final_feedback(self) -> QuerySet:
""" There are a number of conditions to check for each submission
1. The submission does not have final feedback
2. The submission was not shown to this user before
3. The submission is not currently assigned to somebody else
Returns:
QuerySet -- a list of all submissions ready for consumption
"""
return self._get_submission_base_query() \
.select_for_update(of=('self',)).exclude(
Q(has_final_feedback=True) |
Q(has_active_assignment=True) |
Q(feedback_authors=self.owner)
)
def _get_available_submissions_in_subscription_stage(self) -> QuerySet:
""" Another filter this time it returns all the submissions that
are valid in this stage. That means all previous stages have been
completed.
Raises:
SubscriptionEnded -- if the subscription will not yield
subscriptions in the future
SubscriptionTemporarilyEnded -- wait until new become available
"""
candidates = self._get_submissions_that_do_not_have_final_feedback()
if candidates.count() == 0:
raise SubscriptionEnded(
f'The task which user {self.owner} subscribed to is done')
done_assignments_count = self.assignment_count_on_stage[self.feedback_stage] # noqa
stage_candidates = candidates.filter(
done_assignments=done_assignments_count,
)
if stage_candidates.count() == 0:
raise SubscriptionTemporarilyEnded(
'Currently unavailable. Please check for more soon. '
'Submissions remaining: %s' % stage_candidates.count())
if (config.STOP_ON_PASS and
self.feedback_stage == self.FEEDBACK_CREATION):
stage_candidates = stage_candidates.exclude(
Q(submission__student__passes_exam=True) &
Q(submission__student__exam__pass_only=True)
)
return stage_candidates
@transaction.atomic
def get_remaining_not_final(self) -> int:
return self._get_submissions_that_do_not_have_final_feedback().count()
@transaction.atomic
def get_available_in_stage(self) -> int:
try:
return self._get_available_submissions_in_subscription_stage().count() # noqa
except (SubscriptionTemporarilyEnded, SubscriptionEnded):
return 0
@transaction.atomic
def get_or_create_work_assignment(self):
taskqueryset = self._get_available_submissions_in_subscription_stage()
task = get_random_element_from_queryset(taskqueryset)
if self.assignments.filter(is_done=False).count() >= 2:
raise NotMoreThanTwoOpenAssignmentsAllowed(
'Not more than 2 active assignments allowed.')
log.info(f'{self.owner} is assigned to {task} ({self.feedback_stage})')
return TutorSubmissionAssignment.objects.create(
subscription=self,
submission=task.submission)
@transaction.atomic
def reserve_all_assignments_for_a_student(self):
assert self.query_type == self.STUDENT_QUERY
meta_submissions = self._get_submissions_that_do_not_have_final_feedback() # noqa
for meta in meta_submissions:
submission = meta.submission
if hasattr(submission, 'assignments'):
submission.assignments.filter(is_done=False).delete()
TutorSubmissionAssignment.objects.create(
subscription=self,
submission=submission
)
log.info(f'Loaded all subscriptions of student {self.query_key}')
@transaction.atomic
def delete(self):
self.assignments.filter(is_done=False).delete()
if self.assignments.count() == 0:
super().delete()
else:
self.deactivated = True
self.save()
import logging
import uuid
import constance
from django.db import models
log = logging.getLogger(__name__)
config = constance.config
class Test(models.Model):
"""Tests contain information that has been unapproved by automated tests,
and directly belongs to a submission. Often certain Feedback was already
given by information provided by these tests.
Attributes
----------
annotation : TextField
All the output of the test (e.g. compiler output)
label : CharField
Indicates SUCCES or FAILURE
name : CharField
The name of the test that was performed
submission : ForeignKey
The submission the tests where unapproved on
"""
test_id = models.UUIDField(primary_key=True,
default=uuid.uuid4,
editable=False)
name = models.CharField(max_length=30)
label = models.CharField(max_length=50)
annotation = models.TextField()
submission = models.ForeignKey('submission',
related_name='tests',
on_delete=models.CASCADE,)
class Meta:
verbose_name = "Test"
verbose_name_plural = "Tests"
unique_together = (('submission', 'name'),)
def __str__(self) -> str:
return f'{self.name} {self.label}'
import logging
import uuid
import constance
from django.contrib.auth.models import AbstractUser, UserManager
from django.db import models
from django.db.models import (Case, Count, IntegerField, Q,
Value, When)
from django.apps import apps
log = logging.getLogger(__name__)
config = constance.config
class TutorReviewerManager(UserManager):
def get_queryset(self):
return super().get_queryset().filter(
Q(role=UserAccount.TUTOR) | Q(role=UserAccount.REVIEWER))
def with_feedback_count(self):
def _get_counter(stage):
return Count(Case(
When(
Q(subscriptions__feedback_stage=stage) &
Q(subscriptions__assignments__is_done=True),
then=Value(1))),
output_field=IntegerField())
submission_subscription_model = apps.get_model('core', 'SubmissionSubscription') # noqa
return self.get_queryset() \
.annotate(feedback_created=_get_counter(
submission_subscription_model.FEEDBACK_CREATION)) \
.annotate(feedback_validated=_get_counter(
submission_subscription_model.FEEDBACK_VALIDATION))
class UserAccount(AbstractUser):
"""
An abstract base class implementing a fully featured User model with
admin-compliant permissions.
Username and password are required. Other fields are optional.
"""
STUDENT = 'Student'
TUTOR = 'Tutor'
REVIEWER = 'Reviewer'
ROLE_CHOICES = (
(STUDENT, 'student'),
(TUTOR, 'tutor'),
(REVIEWER, 'reviewer')
)
# Fields
role = models.CharField(max_length=50, choices=ROLE_CHOICES)
user_id = models.UUIDField(primary_key=True,
default=uuid.uuid4,
editable=False)
fullname = models.CharField('full name', max_length=70, blank=True)
is_admin = models.BooleanField(default=False)
# Managers
objects = UserManager()
corrector = TutorReviewerManager()
# Helper methods
def is_student(self):
return self.role == 'Student'
def is_tutor(self):
return self.role == 'Tutor'
def is_reviewer(self):
return self.role == 'Reviewer'
# All of these methods are deprecated and should be replaced by custom
# Managers (see tutor manager)
@classmethod
def get_students(cls):
return cls.objects.filter(role=cls.STUDENT)
@classmethod
def get_tutors(cls):
return cls.objects.filter(role=cls.TUTOR)
@classmethod
def get_reviewers(cls):
return cls.objects.filter(role=cls.REVIEWER)
......@@ -24,8 +24,8 @@ class IsUserRoleGenericPermission(permissions.BasePermission):
user.role in self.roles)
if not is_authorized:
log.warn('User "%s" has no permission to view %s',
user.username, view.__class__.__name__)
log.warning('User "%s" has no permission to view %s',
user.username, view.__class__.__name__)
return is_authorized
......
......@@ -4,4 +4,5 @@ from .feedback import (FeedbackSerializer, FeedbackCommentSerializer, # noqa
from .subscription import * # noqa
from .student import * # noqa
from .submission import * # noqa
from .tutor import TutorSerializer # noqa
from .tutor import CorrectorSerializer # noqa
from .label import LabelSerializer # noqa