Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _get_class_models(self, dataset_id):
dataset_id_filter = Q(dataset_id=dataset_id)
return GroupDeletion(
"Class models",
querysets=[
ExamAssignment.objects.filter(dataset_id_filter),
Exam.objects.filter(dataset_id_filter),
LessonAssignment.objects.filter(dataset_id_filter),
Lesson.objects.filter(dataset_id_filter),
AdHocGroup.objects.filter(dataset_id_filter),
LearnerGroup.objects.filter(dataset_id_filter),
Classroom.objects.filter(dataset_id_filter),
],
def get_delete(self, options, keeping_users, update_classes):
if not options["delete"]:
return ([], [])
users_not_to_delete = [u.id for u in keeping_users]
admins = self.default_facility.get_admins()
users_not_to_delete += admins.values_list("id", flat=True)
if options["userid"]:
users_not_to_delete.append(options["userid"])
users_to_delete = FacilityUser.objects.filter(
facility=self.default_facility
).exclude(id__in=users_not_to_delete)
# Classes not included in the csv will be cleared of users,
# but not deleted to keep possible lessons and quizzes created for them:
classes_not_to_clear = [c.id for c in update_classes]
classes_to_clear = (
Classroom.objects.filter(parent=self.default_facility)
.exclude(id__in=classes_not_to_clear)
.values_list("id", flat=True)
)
return (users_to_delete, classes_to_clear)
# users_with_logs
"uwl": users_with_logs,
# anon_visitors_with_logs
"vwl": anon_visitors_with_logs,
# first
"f" : first_interaction_timestamp("%Y-%m-%d") if first_interaction_timestamp else None,
# last
"l": last_interaction_timestamp("%Y-%m-%d") if last_interaction_timestamp else None,
# summ_started
"ss": summarylogs.count(),
# summ_complete
"sc": summarylogs.exclude(completion_timestamp=None).count(),
# sess_kinds
"sk": sesslogs_by_kind,
# class_count
"crc": Classroom.objects.filter(dataset_id=dataset_id).count(),
# group_count
"grc": LearnerGroup.objects.filter(dataset_id=dataset_id).count(),
# lesson_count
"lec": Lesson.objects.filter(dataset_id=dataset_id).count(),
# exam_count
"ec": Exam.objects.filter(dataset_id=dataset_id).count(),
# exam_log_count
"elc": ExamLog.objects.filter(dataset_id=dataset_id).count(),
# att_log_count
"alc": AttemptLog.objects.filter(dataset_id=dataset_id).count(),
# exam_att_log_count
"ealc": ExamAttemptLog.objects.filter(dataset_id=dataset_id).count(),
# sess_user_count
"suc": contsessions_user.count(),
# sess_anon_count
"sac": contsessions_anon.count(),
def annotate_queryset(self, queryset):
return (
queryset.annotate(
num_users=SQCount(
FacilityUser.objects.filter(facility=OuterRef("id")), field="id"
)
)
.annotate(
num_classrooms=SQCount(
Classroom.objects.filter(parent=OuterRef("id")), field="id"
)
)
.annotate(
last_synced=Subquery(
TransferSession.objects.filter(
filter=Cast(OuterRef("dataset"), TextField())
)
.order_by("-last_activity_timestamp")
.values("last_activity_timestamp")
)
logger.info("Creating csv file {filename}".format(filename=filepath))
writer.writeheader()
usernames = set()
query = (
queryset.values("pk")
.annotate(
kind=Subquery(
Role.objects.filter(collection_id=facility.id)
.values("kind")
.filter(user_id=OuterRef("id"))
)
)
.annotate(
enrolled=GroupConcatSubquery(
Classroom.objects.filter(membership__user_id=OuterRef("id")).values(
"name"
),
field="name",
)
)
.annotate(
assigned=GroupConcatSubquery(
Classroom.objects.filter(
role__kind="coach", role__user=OuterRef("id")
).values("name"),
field="name",
)
)
)
for item in query.values(*db_columns):
def get_num_classrooms(self, instance):
return Classroom.objects.filter(parent=instance).count()