From 8e95bb374c107e407ddf45d4f2cfe89e9acd1e30 Mon Sep 17 00:00:00 2001 From: ubuntu Date: Thu, 17 Feb 2022 17:53:27 -0500 Subject: [PATCH 001/100] initial 2.0 conversions --- adl_lrs/settings.py | 4 +- lrs/managers/ActivityManager.py | 12 +-- lrs/managers/ActivityProfileManager.py | 4 +- lrs/managers/ActivityStateManager.py | 2 +- lrs/managers/AgentProfileManager.py | 4 +- lrs/models.py | 78 ++++++++++++++-- lrs/tasks.py | 14 ++- lrs/tests/test_ActivityProfile.py | 10 +- lrs/tests/test_StatementFilter.py | 2 +- lrs/utils/StatementValidator.py | 65 +++++++++++-- lrs/utils/XAPIVersionHeaderMiddleware.py | 3 + lrs/utils/__init__.py | 42 ++++++++- lrs/utils/etag.py | 4 +- lrs/utils/req_parse.py | 13 ++- lrs/utils/req_process.py | 21 ++++- lrs/utils/req_validate.py | 111 ++++++++++------------- 16 files changed, 278 insertions(+), 111 deletions(-) diff --git a/adl_lrs/settings.py b/adl_lrs/settings.py index 1d2c2dfd..4409bc1f 100644 --- a/adl_lrs/settings.py +++ b/adl_lrs/settings.py @@ -103,9 +103,9 @@ ) # Current xAPI version -XAPI_VERSION = '1.0.3' +XAPI_VERSION = '2.0.0' -XAPI_VERSIONS = ['1.0.0', '1.0.1', '1.0.2', XAPI_VERSION] +XAPI_VERSIONS = ['1.0.0', '1.0.1', '1.0.2', '1.0.3', XAPI_VERSION] # Where to be redirected after logging in LOGIN_REDIRECT_URL = '/me' diff --git a/lrs/managers/ActivityManager.py b/lrs/managers/ActivityManager.py index 567bbc5f..20811305 100644 --- a/lrs/managers/ActivityManager.py +++ b/lrs/managers/ActivityManager.py @@ -82,21 +82,17 @@ def populate(self, data): can_define = True try: # Using get or create inside try for racing issue - self.activity, act_created = Activity.objects.get_or_create( - activity_id=activity_id, authority=self.auth) + self.activity, act_created = Activity.objects.get_or_create(activity_id=activity_id, authority=self.auth) except IntegrityError: - self.activity = Activity.objects.get( - activity_id=activity_id) + self.activity = Activity.objects.get(activity_id=activity_id) act_created = False # If activity DNE and cannot define - create activity without auth else: try: # Using get or create inside try for racing issue - self.activity, act_created = Activity.objects.get_or_create( - activity_id=activity_id) + self.activity, act_created = Activity.objects.get_or_create(activity_id=activity_id) except IntegrityError: - self.activity = Activity.objects.get( - activity_id=activity_id) + self.activity = Activity.objects.get(activity_id=activity_id) act_created = False # If you retrieved an activity that has no auth but user has define # permissions, user becomes authority over activity diff --git a/lrs/managers/ActivityProfileManager.py b/lrs/managers/ActivityProfileManager.py index 01985553..608a9fcb 100644 --- a/lrs/managers/ActivityProfileManager.py +++ b/lrs/managers/ActivityProfileManager.py @@ -40,6 +40,9 @@ def post_profile(self, request_dict): # If incoming profile is application/json and if a profile didn't # already exist with the same activityId and profileId if created: + # xAPI 2.0 Addition: + etag.check_preconditions(request_dict, p, created) + p.json_profile = post_profile p.content_type = "application/json" p.etag = etag.create_tag(post_profile) @@ -75,7 +78,6 @@ def put_profile(self, request_dict): except: profile = ContentFile(str(request_dict['profile'])) - etag.check_preconditions(request_dict, p, created) # If a profile already existed with the profileId and activityId if not created: if p.profile: diff --git a/lrs/managers/ActivityStateManager.py b/lrs/managers/ActivityStateManager.py index 15624056..f3655218 100644 --- a/lrs/managers/ActivityStateManager.py +++ b/lrs/managers/ActivityStateManager.py @@ -66,6 +66,7 @@ def post_state(self, request_dict): # already exist with the same agent, stateId, actId, and/or # registration if created: + etag.check_preconditions(request_dict, s, created, False) s.json_state = post_state s.content_type = "application/json" s.etag = etag.create_tag(post_state) @@ -107,7 +108,6 @@ def put_state(self, request_dict): except: post_state = ContentFile(str(request_dict['state'])) - etag.check_preconditions(request_dict, s, created, False) # If a state already existed with the profileId and activityId if not created: if s.state: diff --git a/lrs/managers/AgentProfileManager.py b/lrs/managers/AgentProfileManager.py index 326d7d86..1eb9b891 100644 --- a/lrs/managers/AgentProfileManager.py +++ b/lrs/managers/AgentProfileManager.py @@ -38,6 +38,7 @@ def post_profile(self, request_dict): # If incoming profile is application/json and if a profile didn't # already exist with the same agent and profileId if created: + etag.check_preconditions(request_dict, p, created) p.json_profile = post_profile p.content_type = "application/json" p.etag = etag.create_tag(post_profile) @@ -73,7 +74,6 @@ def put_profile(self, request_dict): except: profile = ContentFile(str(request_dict['profile'])) - etag.check_preconditions(request_dict, p, created) # If it already exists delete it if p.profile: try: @@ -85,7 +85,7 @@ def put_profile(self, request_dict): # Profile being PUT is json else: # (overwrite existing profile data) - etag.check_preconditions(request_dict, p, created) + etag.parse_datetime(request_dict, p, created) the_profile = request_dict['profile'] p.json_profile = the_profile p.content_type = request_dict['headers']['CONTENT_TYPE'] diff --git a/lrs/models.py b/lrs/models.py index 7ebea6dd..dc47dd74 100644 --- a/lrs/models.py +++ b/lrs/models.py @@ -22,8 +22,6 @@ STATEMENT_ATTACHMENT_UPLOAD_TO = "attachment_payloads" # Called when a user is created, saved, or logging in - - def attach_user(sender, **kwargs): user = kwargs["instance"] if kwargs["created"]: @@ -31,8 +29,8 @@ def attach_user(sender, **kwargs): **{'name': user.username, 'mbox': 'mailto:%s' % user.email, 'objectType': 'Agent'})[0] agent.user = user agent.save() -post_save.connect(attach_user, sender=User) +post_save.connect(attach_user, sender=User) class Verb(models.Model): verb_id = models.CharField( @@ -293,9 +291,47 @@ def __unicode__(self): return json.dumps(self.to_dict(), sort_keys=False) +class RelevantType(models.Model): + relevantType = models.CharField(max_length=MAX_URL_LENGTH, blank=True, db_index=True) + + +class ContextAgent(models.Model): + objectType = models.CharField(max_length=14, blank=True, default="contextAgent") + agent = models.ForeignKey( + Agent, related_name="conag_agent", on_delete=models.SET_NULL, blank=True, db_index=True, null=True) + relevantType = models.ManyToManyField(RelevantType, blank=True, related_name="conag_relevantType") + + def to_dict(self, ids_only=False): + ret = OrderedDict() + if self.agent: + ret['agent'] = self.agent.to_dict(ids_only) + if self.relevantType.all(): + ret['relevantType'] = [relType.relevantType for relType in self.relevantType.all()] + + ret['objectType'] = self.objectType + + return ret + + +class ContextGroup(models.Model): + objectType = models.CharField(max_length=14, blank=True, default="contextGroup") + group = models.ForeignKey( + Agent, related_name="congrp_group", on_delete=models.SET_NULL, blank=True, db_index=True, null=True) + relevantType = models.ManyToManyField(RelevantType, related_name="congrp_relevantType") + + def to_dict(self, ids_only=False): + ret = OrderedDict() + if self.group: + ret['group'] = self.group.to_dict(ids_only) + if self.relevantType.all(): + ret['relevantType'] = [relType.relevantType for relType in self.relevantType.all()] + + ret['objectType'] = self.objectType + + return ret + class Activity(models.Model): - activity_id = models.CharField( - max_length=MAX_URL_LENGTH, db_index=True, unique=True) + activity_id = models.CharField(max_length=MAX_URL_LENGTH, db_index=True, unique=True) canonical_data = JSONField(default=dict) authority = models.ForeignKey(Agent, null=True, on_delete=models.CASCADE) @@ -381,6 +417,10 @@ class SubStatement(models.Model): Activity, related_name="sub_context_ca_category") context_ca_other = models.ManyToManyField( Activity, related_name="sub_context_ca_other") + + context_contextAgents = JSONField(default=list, blank=True) + context_contextGroups = JSONField(default=list, blank=True) + # context also has a stmt field which is a statementref context_statement = models.CharField(max_length=40, blank=True) @@ -461,6 +501,13 @@ def to_dict(self, lang=None, ids_only=False): ret['context']['extensions'] = self.context_extensions if not ret['context']['contextActivities']: del ret['context']['contextActivities'] + + + if self.context_contextAgents: + ret['context']['contextAgents'] = self.context_contextAgents + if self.context_contextGroups: + ret['context']['contextGroups'] = self.context_contextGroups + if not ret['context']: del ret['context'] @@ -538,6 +585,10 @@ class Statement(models.Model): Activity, related_name="stmt_context_ca_category") context_ca_other = models.ManyToManyField( Activity, related_name="stmt_context_ca_other") + + context_contextAgents = JSONField(default=list, blank=True) + context_contextGroups = JSONField(default=list, blank=True) + # context also has a stmt field which is a statementref context_statement = models.CharField(max_length=40, blank=True) version = models.CharField(max_length=7) @@ -613,33 +664,48 @@ def to_dict(self, lang=None, ret_format='exact'): 'id': self.context_statement, 'objectType': 'StatementRef'} ret['context']['contextActivities'] = OrderedDict() + if self.context_ca_parent.all(): ret['context']['contextActivities']['parent'] = [cap.return_activity_with_lang_format( lang, ids_only) for cap in self.context_ca_parent.all()] + if self.context_ca_grouping.all(): ret['context']['contextActivities']['grouping'] = [cag.return_activity_with_lang_format( lang, ids_only) for cag in self.context_ca_grouping.all()] + if self.context_ca_category.all(): ret['context']['contextActivities']['category'] = [cac.return_activity_with_lang_format( lang, ids_only) for cac in self.context_ca_category.all()] + if self.context_ca_other.all(): ret['context']['contextActivities']['other'] = [cao.return_activity_with_lang_format( lang, ids_only) for cao in self.context_ca_other.all()] + if self.context_extensions: ret['context']['extensions'] = self.context_extensions + if not ret['context']['contextActivities']: del ret['context']['contextActivities'] + + if self.context_contextAgents: + ret['context']['contextAgents'] = self.context_contextAgents + if self.context_contextGroups: + ret['context']['contextGroups'] = self.context_contextGroups + if not ret['context']: del ret['context'] ret['timestamp'] = self.timestamp.isoformat() ret['stored'] = self.stored.isoformat() + ret['version'] = self.version + if self.authority is not None: ret['authority'] = self.authority.to_dict(ids_only) - ret['version'] = self.version + if self.stmt_attachments.all(): ret['attachments'] = [a.return_attachment_with_lang( lang) for a in self.stmt_attachments.all()] + return ret def get_a_name(self): diff --git a/lrs/tasks.py b/lrs/tasks.py index 2d77f483..962d8fdc 100644 --- a/lrs/tasks.py +++ b/lrs/tasks.py @@ -6,6 +6,7 @@ import requests import uuid from hashlib import sha1 +from datetime import datetime from celery import shared_task from celery.exceptions import SoftTimeLimitExceeded @@ -14,6 +15,7 @@ from django.conf import settings from django.db import transaction from django.db.models import Q +from django.utils.timezone import utc from .utils.StatementValidator import StatementValidator @@ -66,7 +68,7 @@ def check_statement_hooks(stmt_ids): celery_logger.exception("Could not send statements to hook %s: %s" % ( str(config['endpoint']), str(e))) except SoftTimeLimitExceeded: - celery_logger.exception("Statement hook task timed out") + celery_logger.exception("Statement hook task timed out.") def parse_filter(filters, filterQ): @@ -173,17 +175,23 @@ def set_object_agent_query(q, agent, or_operand): if or_operand: return q | (Q(actor=agent) | Q(object_agent=agent) | Q(authority=agent) | Q(context_instructor=agent) | Q(context_team=agent) | + Q(context_contextAgents=agent) | Q(context_contextGroups=agent) | Q(object_substatement__actor=agent) | Q(object_substatement__object_agent=agent) | Q(object_substatement__context_instructor=agent) | - Q(object_substatement__context_team=agent)) + Q(object_substatement__context_team=agent) | + Q(object_substatement__context_contextAgents=agent) | + Q(object_substatement__context_contextGroups=agent)) return q & (Q(actor=agent) | Q(object_agent=agent) | Q(authority=agent) | Q(context_instructor=agent) | Q(context_team=agent) | + Q(context_contextAgents=agent) | Q(context_contextGroups=agent) | Q(object_substatement__actor=agent) | Q(object_substatement__object_agent=agent) | Q(object_substatement__context_instructor=agent) | - Q(object_substatement__context_team=agent)) + Q(object_substatement__context_team=agent) | + Q(object_substatement__context_contextAgents=agent) | + Q(object_substatement__context_contextGroups=agent)) # Retrieve JSON data from ID diff --git a/lrs/tests/test_ActivityProfile.py b/lrs/tests/test_ActivityProfile.py index 3d389cdc..d3578968 100644 --- a/lrs/tests/test_ActivityProfile.py +++ b/lrs/tests/test_ActivityProfile.py @@ -140,13 +140,13 @@ def test_put_no_params(self): put = self.client.put(reverse('lrs:activity_profile'), content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION) self.assertEqual( - put.content, 'Error -- activity_profile - method = PUT, but activityId parameter missing..') + put.content, 'Error -- activity_profile - method = PUT, but activityId parameter missing.') def test_put_no_activityId(self): put = self.client.put(reverse('lrs:activity_profile'), { 'profileId': '10'}, content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION) self.assertEqual( - put.content, 'Error -- activity_profile - method = PUT, but activityId parameter missing..') + put.content, 'Error -- activity_profile - method = PUT, but activityId parameter missing.') def test_put_no_profileId(self): testparams = {'activityId': 'act:act:act'} @@ -155,7 +155,7 @@ def test_put_no_profileId(self): put = self.client.put(path, content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION) self.assertEqual( - put.content, 'Error -- activity_profile - method = PUT, but profileId parameter missing..') + put.content, 'Error -- activity_profile - method = PUT, but profileId parameter missing.') def test_put_etag_missing_on_change(self): path = '%s?%s' % (reverse('lrs:activity_profile'), @@ -390,7 +390,7 @@ def test_get_no_activityId_with_profileId(self): 'profileId': self.testprofileId3}, X_Experience_API_Version=settings.XAPI_VERSION, Authorization=self.auth) self.assertEqual(response.status_code, 400) self.assertEqual( - response.content, 'Error -- activity_profile - method = GET, but activityId parameter missing..') + response.content, 'Error -- activity_profile - method = GET, but activityId parameter missing.') def test_get_no_activityId_with_since(self): since = "2012-07-01T13:30:00+04:00" @@ -398,7 +398,7 @@ def test_get_no_activityId_with_since(self): 'since': since}, X_Experience_API_Version=settings.XAPI_VERSION, Authorization=self.auth) self.assertEqual(response.status_code, 400) self.assertEqual( - response.content, 'Error -- activity_profile - method = GET, but activityId parameter missing..') + response.content, 'Error -- activity_profile - method = GET, but activityId parameter missing.') def test_delete(self): response = self.client.delete(reverse('lrs:activity_profile'), { diff --git a/lrs/tests/test_StatementFilter.py b/lrs/tests/test_StatementFilter.py index 3411a355..be9d85a5 100644 --- a/lrs/tests/test_StatementFilter.py +++ b/lrs/tests/test_StatementFilter.py @@ -43,7 +43,7 @@ def setUp(self): form = {"username": self.username, "email": self.email, "password": self.password, "password2": self.password} self.client.post(reverse(register), form, - X_Experience_API_Version="1.0") + X_Experience_API_Version="2.0") def tearDown(self): settings.SERVER_STMT_LIMIT = 100 diff --git a/lrs/utils/StatementValidator.py b/lrs/utils/StatementValidator.py index 653641cc..9b652f92 100644 --- a/lrs/utils/StatementValidator.py +++ b/lrs/utils/StatementValidator.py @@ -1,9 +1,12 @@ import re -from isodate.isodatetime import parse_datetime + from rfc3987 import parse as iriparse from uuid import UUID -from . import convert_to_datatype +from django.core.exceptions import ValidationError +from django.utils.timezone import utc + +from . import convert_to_datatype, validate_timestamp from ..exceptions import ParamError statement_allowed_fields = ['id', 'actor', 'verb', 'object', 'result', 'stored', @@ -44,6 +47,10 @@ 'revision', 'platform', 'language', 'statement', 'extensions'] +context_agent_allowed_fields = ['objectType', 'agent', 'relevantTypes'] + +context_group_allowed_fields = ['objectType', 'group', 'relevantTypes'] + class StatementValidator(): @@ -68,6 +75,16 @@ def __init__(self, data=None): def validate(self): # If list, validate each stmt inside if isinstance(self.data, list): + + # First check if list has duplicate stmt IDs. + set_ids = set() + for st in self.data: + if 'id' in st: + if st['id'] in set_ids: + self.return_error("Statement batch contains duplicate IDs") + else: + set_ids.add(st['id']) + for st in self.data: self.validate_statement(st) return "All Statements are valid" @@ -181,7 +198,7 @@ def validate_statement(self, stmt): # is 1.0.0 + if 'version' in stmt: if isinstance(stmt['version'], str): - version_regex = re.compile("^1\.0(\.\d+)?$") + version_regex = re.compile("^(1|2)\.0(\.\d+)?$") if not version_regex.match(stmt['version']): self.return_error( "%s is not a supported version" % stmt['version']) @@ -196,7 +213,7 @@ def validate_statement(self, stmt): if 'timestamp' in stmt: timestamp = stmt['timestamp'] try: - parse_datetime(timestamp) + validate_timestamp(timestamp) # Reject statements that don't comply with ISO 8601 offsets if timestamp.endswith("-00") or timestamp.endswith("-0000") or timestamp.endswith("-00:00"): @@ -211,7 +228,7 @@ def validate_statement(self, stmt): if 'stored' in stmt: stored = stmt['stored'] try: - parse_datetime(stored) + validate_timestamp(stored) except Exception as e: self.return_error( "Stored error - There was an error while parsing the date from %s -- Error: %s" % (stored, str(e))) @@ -645,7 +662,7 @@ def validate_substatement(self, substmt): if 'timestamp' in substmt: timestamp = substmt['timestamp'] try: - parse_datetime(timestamp) + validate_timestamp(timestamp) # Reject statements that don't comply with ISO 8601 offsets if timestamp.endswith("-00") or timestamp.endswith("-0000") or timestamp.endswith("-00:00"): @@ -811,6 +828,14 @@ def validate_context(self, context, stmt_object): if 'contextActivities' in context: self.validate_context_activities(context['contextActivities']) + # # If contextAgents given, ensure they are valid contextAgents. + if 'contextAgents' in context: + self.validate_context_agents(context['contextAgents']) + + # # If contextGroups given, ensure they are valid contextGroups. + if 'contextGroups' in context: + self.validate_context_groups(context['contextGroups']) + # If extensions, validate if 'extensions' in context: self.validate_extensions(context['extensions'], 'context extensions') @@ -832,4 +857,30 @@ def validate_context_activities(self, conacts): self.validate_activity(conact[1]) else: self.return_error( - "contextActivities is not formatted correctly") \ No newline at end of file + "contextActivities is not formatted correctly") + + def validate_context_agents(self, conags): + + self.check_if_list(congrps, "Context Agents") + + for sub in conags: + if sub["objectType"] != "contextAgent": + raise ValidationError("[objectType] for Context Agent entries must be 'contextAgent'") + + if not isinstance(sub["relevantTypes"], list): + raise ValidationError("[relevantTypes] for Context Agent entries must be a list") + + self.validate_agent(sub["agent"], 'Context agent') + + def validate_context_groups(self, congrps): + + self.check_if_list(congrps, "Context Groups") + + for sub in congrps: + if sub["objectType"] != "contextGroup": + raise ValidationError("[objectType] for Context Group entries must be 'contextGroup'") + + if not isinstance(sub["relevantTypes"], list): + raise ValidationError("[relevantTypes] for Context Group entries must be a list") + + self.validate_agent(sub["group"], 'Context group') \ No newline at end of file diff --git a/lrs/utils/XAPIVersionHeaderMiddleware.py b/lrs/utils/XAPIVersionHeaderMiddleware.py index af53f5e7..be660027 100644 --- a/lrs/utils/XAPIVersionHeaderMiddleware.py +++ b/lrs/utils/XAPIVersionHeaderMiddleware.py @@ -42,6 +42,9 @@ def process_request(self, request): if version == '1.0' or (version.startswith('1.0') and \ version in settings.XAPI_VERSIONS): return None + elif version == '2.0' or (version.startswith('2.0') and \ + version in settings.XAPI_VERSIONS): + return None else: resp = HttpResponse("X-Experience-API-Version is not supported", status=400) resp['X-Experience-API-Version'] = settings.XAPI_VERSION diff --git a/lrs/utils/__init__.py b/lrs/utils/__init__.py index 682089cd..57a42ea1 100644 --- a/lrs/utils/__init__.py +++ b/lrs/utils/__init__.py @@ -1,15 +1,51 @@ import ast import json -import urllib.request, urllib.parse, urllib.error -import urllib.parse +import urllib +import urlparse + +from datetime import datetime +from isodate.isodates import parse_date from isodate.isodatetime import parse_datetime +from isodate.isoerror import ISO8601Error +from isodate.isotime import parse_time + from django.conf import settings +from django.utils.timezone import utc from ..exceptions import ParamError agent_ifps_can_only_be_one = ['mbox', 'mbox_sha1sum', 'openid', 'account'] +# Exception type to accommodate RFC 3339 timestamp validation. +class RFC3339Error(ValueError): + pass + +def validate_timestamp(time_str): + time_ret = None + + try: + time_ret = parse_datetime(time_str) + except (Exception, ISO8601Error): + try: + date_out, time_out = time_str.split(" ") + except ValueError: + raise RFC3339Error("Time designators 'T' or ' ' missing. Unable to parse datetime string %r." % time_str) + else: + date_temp = parse_date(date_out) + time_temp = parse_time(time_out) + time_ret = datetime.combine(date_temp, time_temp) + + if time_ret is not None: + rfc_ret = None + try: + rfc_ret = time_ret.replace(tzinfo=utc) + except ValueError: + rfc_ret = time_ret + + return rfc_ret + + def get_agent_ifp(data): ifp_sent = [ a for a in agent_ifps_can_only_be_one if data.get(a, None) is not None] @@ -32,7 +68,7 @@ def get_agent_ifp(data): def convert_to_datetime_object(timestr): try: - date_object = parse_datetime(timestr) + date_object = validate_timestamp(timestr) except ValueError as e: raise ParamError( "There was an error while parsing the date from %s -- Error: %s" % (timestr, str(e))) diff --git a/lrs/utils/etag.py b/lrs/utils/etag.py index ab876a2b..62ac276d 100644 --- a/lrs/utils/etag.py +++ b/lrs/utils/etag.py @@ -49,8 +49,8 @@ def check_preconditions(request, contents, created, required=True): else: # If there are both, if none match takes precendence if request_etag[IF_NONE_MATCH]: - # only check if the content already exists. if it did not - # already exist it should pass + # Only check if the content already exists. if it did not + # already exist it should pass. if exists: if request_etag[IF_NONE_MATCH] == "*": raise EtagPreconditionFail("Resource detected") diff --git a/lrs/utils/req_parse.py b/lrs/utils/req_parse.py index b6864720..14c180d1 100644 --- a/lrs/utils/req_parse.py +++ b/lrs/utils/req_parse.py @@ -3,8 +3,8 @@ import email import hashlib import json + from isodate.isoerror import ISO8601Error -from isodate.isodatetime import parse_datetime from Crypto.PublicKey import RSA from jose import jws @@ -13,7 +13,7 @@ from django.urls import reverse from django.http import QueryDict -from . import convert_to_datatype, convert_post_body_to_dict +from . import convert_to_datatype, convert_post_body_to_dict, validate_timestamp from .etag import get_etag_info from ..exceptions import OauthUnauthorized, OauthBadRequest, ParamError, BadRequest @@ -198,7 +198,7 @@ def parse_cors_request(request, r_dict): # treat these form params as headers header_list = ['X-Experience-API-Version', 'Content-Type', 'If-Match', \ - 'If-None-Match', 'Authorization', 'Content-Length'] + 'If-None-Match', 'Authorization', 'Content-Length', 'Last-Modified'] header_dict = {k:body[k] for k in body if k in header_list} r_dict['headers'].update(header_dict) if 'If-Match' in r_dict['headers']: @@ -472,14 +472,14 @@ def get_headers(headers): # Get updated header if 'HTTP_UPDATED' in headers: try: - header_dict['updated'] = parse_datetime( + header_dict['updated'] = validate_timestamp( headers.pop('HTTP_UPDATED')) except (Exception, ISO8601Error): raise ParamError( "Updated header was not a valid ISO8601 timestamp") elif 'updated' in headers: try: - header_dict['updated'] = parse_datetime(headers.pop('updated')) + header_dict['updated'] = validate_timestamp(headers.pop('updated')) except (Exception, ISO8601Error): raise ParamError( "Updated header was not a valid ISO8601 timestamp") @@ -493,8 +493,7 @@ def get_headers(headers): # FireFox automatically adds ;charset=foo to the end of headers. This # will strip it out if ';' in header_dict['CONTENT_TYPE'] and 'boundary' not in header_dict['CONTENT_TYPE']: - header_dict['CONTENT_TYPE'] = header_dict['CONTENT_TYPE'].split(';')[ - 0] + header_dict['CONTENT_TYPE'] = header_dict['CONTENT_TYPE'].split(';')[0] # Get etag header_dict['ETAG'] = get_etag_info(headers) diff --git a/lrs/utils/req_process.py b/lrs/utils/req_process.py index 202f008b..dc42ad17 100644 --- a/lrs/utils/req_process.py +++ b/lrs/utils/req_process.py @@ -1,6 +1,9 @@ +import copy import json +import re +import unicodedata import uuid -import copy + from datetime import datetime from django.http import HttpResponse, HttpResponseNotFound, JsonResponse @@ -26,6 +29,22 @@ def process_statement(stmt, auth, payload_sha2s): if 'version' not in stmt: stmt['version'] = settings.XAPI_VERSIONS[0] + # Check for result -> duration and truncate seconds if needed. + if 'result' in stmt: + if 'duration' in stmt['result']: + stmt_dur = stmt['result']['duration'] + sec_split = re.findall("\d+(?:\.\d+)?S", stmt_dur) + if sec_split: + sec_as_str = sec_split[0] + sec_as_num = float(sec_as_str.replace('S', '')) + + if not sec_as_num.is_integer(): + sec_trunc = round(sec_as_num, 2) + else: + sec_trunc = int(sec_as_num) + + stmt['result']['duration'] = unicodedata.normalize("NFKD", stmt_dur.replace(sec_as_str, str(sec_trunc) + 'S')) + # Convert context activities to list if dict if 'context' in stmt and 'contextActivities' in stmt['context']: for k, v in list(stmt['context']['contextActivities'].items()): diff --git a/lrs/utils/req_validate.py b/lrs/utils/req_validate.py index 92a6a3fb..9684ee69 100644 --- a/lrs/utils/req_validate.py +++ b/lrs/utils/req_validate.py @@ -1,8 +1,6 @@ -from isodate.isodatetime import parse_datetime -from isodate.isoerror import ISO8601Error import uuid -from . import get_agent_ifp, convert_to_datatype +from . import get_agent_ifp, convert_to_datatype, validate_timestamp, RFC3339Error from .authorization import auth from .StatementValidator import StatementValidator @@ -194,17 +192,17 @@ def statements_get(req_dict): if 'since' in req_dict['params']: try: - parse_datetime(req_dict['params']['since']) - except (Exception, ISO8601Error): + validate_timestamp(req_dict['params']['since']) + except (Exception, RFC3339Error): raise ParamError( - "since parameter was not a valid ISO8601 timestamp") + "since parameter was not a valid RFC3339 timestamp") if 'until' in req_dict['params']: try: - parse_datetime(req_dict['params']['until']) - except (Exception, ISO8601Error): + validate_timestamp(req_dict['params']['until']) + except (Exception, RFC3339Error): raise ParamError( - "until parameter was not a valid ISO8601 timestamp") + "until parameter was not a valid RFC3339 timestamp") if 'ascending' in req_dict['params']: if req_dict['params']['ascending'].lower() == 'true': @@ -334,24 +332,20 @@ def validate_attachments(attachment_data, content_type): @auth def activity_state_post(req_dict): - rogueparams = set(req_dict['params']) - \ - set(["activityId", "agent", "stateId", "registration"]) + rogueparams = set(req_dict['params']) - set(["activityId", "agent", "stateId", "registration"]) if rogueparams: - raise ParamError( - "The post activity state request contained unexpected parameters: %s" % ", ".join(rogueparams)) + raise ParamError("The post activity state request contained unexpected parameters: " + ", ".join(rogueparams)) validator = StatementValidator() if 'activityId' in req_dict['params']: validator.validate_iri( req_dict['params']['activityId'], "activityId param for activity state") else: - err_msg = "Error -- activity_state - method = %s, but activityId parameter is missing.." % req_dict[ - 'method'] + err_msg = f"Error -- activity_state - method = {req_dict['method']}, but activityId parameter is missing." raise ParamError(err_msg) if 'stateId' not in req_dict['params']: - err_msg = "Error -- activity_state - method = %s, but stateId parameter is missing.." % req_dict[ - 'method'] + err_msg = f"Error -- activity_state - method = {req_dict['method']}, but stateId parameter is missing." raise ParamError(err_msg) if 'registration' in req_dict['params']: @@ -363,12 +357,11 @@ def activity_state_post(req_dict): agent = convert_to_datatype(req_dict['params']['agent']) req_dict['params']['agent'] = agent except Exception: - raise ParamError("agent param %s is not valid" % \ - req_dict['params']['agent']) + raise ParamError(f"agent param {req_dict['params']['agent']} is not valid") + validator.validate_agent(agent, "Agent param") else: - err_msg = "Error -- activity_state - method = %s, but agent parameter is missing.." % req_dict[ - 'method'] + err_msg = f"Error -- activity_state - method = {req_dict['method']}, but agent parameter is missing." raise ParamError(err_msg) # Must have body included for state @@ -432,12 +425,12 @@ def activity_state_put(req_dict): validator.validate_iri( req_dict['params']['activityId'], "activityId param for activity state") else: - err_msg = "Error -- activity_state - method = %s, but activityId parameter is missing.." % req_dict[ + err_msg = "Error -- activity_state - method = %s, but activityId parameter is missing." % req_dict[ 'method'] raise ParamError(err_msg) if 'stateId' not in req_dict['params']: - err_msg = "Error -- activity_state - method = %s, but stateId parameter is missing.." % req_dict[ + err_msg = "Error -- activity_state - method = %s, but stateId parameter is missing." % req_dict[ 'method'] raise ParamError(err_msg) @@ -454,7 +447,7 @@ def activity_state_put(req_dict): req_dict['params']['agent']) validator.validate_agent(agent, "Agent param") else: - err_msg = "Error -- activity_state - method = %s, but agent parameter is missing.." % req_dict[ + err_msg = "Error -- activity_state - method = %s, but agent parameter is missing." % req_dict[ 'method'] raise ParamError(err_msg) @@ -485,7 +478,7 @@ def activity_state_get(req_dict): validator.validate_iri( req_dict['params']['activityId'], "activityId param for activity state") else: - err_msg = "Error -- activity_state - method = %s, but activityId parameter is missing.." % req_dict[ + err_msg = "Error -- activity_state - method = %s, but activityId parameter is missing." % req_dict[ 'method'] raise ParamError(err_msg) @@ -502,16 +495,16 @@ def activity_state_get(req_dict): req_dict['params']['agent']) validator.validate_agent(agent, "Agent param") else: - err_msg = "Error -- activity_state - method = %s, but agent parameter is missing.." % req_dict[ + err_msg = "Error -- activity_state - method = %s, but agent parameter is missing." % req_dict[ 'method'] raise ParamError(err_msg) if 'since' in req_dict['params']: try: - parse_datetime(req_dict['params']['since']) - except (Exception, ISO8601Error): + validate_timestamp(req_dict['params']['since']) + except (Exception, RFC3339Error): raise ParamError( - "Since parameter was not a valid ISO8601 timestamp") + "Since parameter was not a valid RFC3339 timestamp") # Extra validation if oauth if req_dict['auth']['type'] == 'oauth': @@ -532,7 +525,7 @@ def activity_state_delete(req_dict): validator.validate_iri( req_dict['params']['activityId'], "activityId param for activity state") else: - err_msg = "Error -- activity_state - method = %s, but activityId parameter is missing.." % req_dict[ + err_msg = "Error -- activity_state - method = %s, but activityId parameter is missing." % req_dict[ 'method'] raise ParamError(err_msg) @@ -549,7 +542,7 @@ def activity_state_delete(req_dict): req_dict['params']['agent']) validator.validate_agent(agent, "Agent param") else: - err_msg = "Error -- activity_state - method = %s, but agent parameter is missing.." % req_dict[ + err_msg = "Error -- activity_state - method = %s, but agent parameter is missing." % req_dict[ 'method'] raise ParamError(err_msg) @@ -571,12 +564,12 @@ def activity_profile_post(req_dict): validator.validate_iri( req_dict['params']['activityId'], "activityId param for activity profile") else: - err_msg = "Error -- activity_profile - method = %s, but activityId parameter missing.." % req_dict[ + err_msg = "Error -- activity_profile - method = %s, but activityId parameter missing." % req_dict[ 'method'] raise ParamError(err_msg) if 'profileId' not in req_dict['params']: - err_msg = "Error -- activity_profile - method = %s, but profileId parameter missing.." % req_dict[ + err_msg = "Error -- activity_profile - method = %s, but profileId parameter missing." % req_dict[ 'method'] raise ParamError(err_msg) @@ -625,12 +618,12 @@ def activity_profile_put(req_dict): validator.validate_iri( req_dict['params']['activityId'], "activityId param for activity profile") else: - err_msg = "Error -- activity_profile - method = %s, but activityId parameter missing.." % req_dict[ + err_msg = "Error -- activity_profile - method = %s, but activityId parameter missing." % req_dict[ 'method'] raise ParamError(err_msg) if 'profileId' not in req_dict['params']: - err_msg = "Error -- activity_profile - method = %s, but profileId parameter missing.." % req_dict[ + err_msg = "Error -- activity_profile - method = %s, but profileId parameter missing." % req_dict[ 'method'] raise ParamError(err_msg) @@ -646,27 +639,24 @@ def activity_profile_put(req_dict): @auth def activity_profile_get(req_dict): - rogueparams = set(req_dict['params']) - \ - set(["activityId", "profileId", "since"]) + rogueparams = set(req_dict['params']) - set(["activityId", "profileId", "since"]) if rogueparams: raise ParamError( "The get activity profile request contained unexpected parameters: %s" % ", ".join(rogueparams)) validator = StatementValidator() if 'activityId' in req_dict['params']: - validator.validate_iri( - req_dict['params']['activityId'], "activityId param for activity profile") + validator.validate_iri(req_dict['params']['activityId'], "activityId param for activity profile") else: - err_msg = "Error -- activity_profile - method = %s, but activityId parameter missing.." % req_dict[ - 'method'] + err_msg = f"Error -- activity_profile - method = {req_dict['method']}, but activityId parameter missing." raise ParamError(err_msg) if 'since' in req_dict['params']: try: - parse_datetime(req_dict['params']['since']) - except (Exception, ISO8601Error): + validate_timestamp(req_dict['params']['since']) + except (Exception, RFC3339Error): raise ParamError( - "Since parameter was not a valid ISO8601 timestamp") + "Since parameter was not a valid RFC3339 timestamp") return req_dict @@ -683,12 +673,12 @@ def activity_profile_delete(req_dict): validator.validate_iri( req_dict['params']['activityId'], "activityId param for activity profile") else: - err_msg = "Error -- activity_profile - method = %s, but activityId parameter missing.." % req_dict[ + err_msg = "Error -- activity_profile - method = %s, but activityId parameter missing." % req_dict[ 'method'] raise ParamError(err_msg) if 'profileId' not in req_dict['params']: - err_msg = "Error -- activity_profile - method = %s, but profileId parameter missing.." % req_dict[ + err_msg = "Error -- activity_profile - method = %s, but profileId parameter missing." % req_dict[ 'method'] raise ParamError(err_msg) @@ -699,19 +689,16 @@ def activity_profile_delete(req_dict): def activities_get(req_dict): rogueparams = set(req_dict['params']) - set(["activityId"]) if rogueparams: - raise ParamError( - "The get activities request contained unexpected parameters: %s" % ", ".join(rogueparams)) + raise ParamError("The get activities request contained unexpected parameters: " + ", ".join(rogueparams)) validator = StatementValidator() try: activity_id = req_dict['params']['activityId'] except KeyError: - err_msg = "Error -- activities - method = %s, but activityId parameter is missing" % req_dict[ - 'method'] + err_msg = f"Error -- activities - method = {eq_dict['method']}, but activityId parameter is missing" raise ParamError(err_msg) else: - validator.validate_iri( - activity_id, "activityId param") + validator.validate_iri(activity_id, "activityId param") # Try to retrieve activity, if DNE then return empty else return activity # info @@ -741,12 +728,12 @@ def agent_profile_post(req_dict): req_dict['params']['agent']) validator.validate_agent(agent, "Agent param") else: - err_msg = "Error -- agent_profile - method = %s, but agent parameter missing.." % req_dict[ + err_msg = "Error -- agent_profile - method = %s, but agent parameter missing." % req_dict[ 'method'] raise ParamError(err_msg) if 'profileId' not in req_dict['params']: - err_msg = "Error -- agent_profile - method = %s, but profileId parameter missing.." % req_dict[ + err_msg = "Error -- agent_profile - method = %s, but profileId parameter missing." % req_dict[ 'method'] raise ParamError(err_msg) @@ -806,12 +793,12 @@ def agent_profile_put(req_dict): req_dict['params']['agent']) validator.validate_agent(agent, "Agent param") else: - err_msg = "Error -- agent_profile - method = %s, but agent parameter missing.." % req_dict[ + err_msg = "Error -- agent_profile - method = %s, but agent parameter missing." % req_dict[ 'method'] raise ParamError(err_msg) if 'profileId' not in req_dict['params']: - err_msg = "Error -- agent_profile - method = %s, but profileId parameter missing.." % req_dict[ + err_msg = "Error -- agent_profile - method = %s, but profileId parameter missing." % req_dict[ 'method'] raise ParamError(err_msg) @@ -845,16 +832,16 @@ def agent_profile_get(req_dict): req_dict['params']['agent']) validator.validate_agent(agent, "Agent param") else: - err_msg = "Error -- agent_profile - method = %s, but agent parameter missing.." % req_dict[ + err_msg = "Error -- agent_profile - method = %s, but agent parameter missing." % req_dict[ 'method'] raise ParamError(err_msg) if 'since' in req_dict['params']: try: - parse_datetime(req_dict['params']['since']) - except (Exception, ISO8601Error): + validate_timestamp(req_dict['params']['since']) + except (Exception, RFC3339Error): raise ParamError( - "Since parameter was not a valid ISO8601 timestamp") + "Since parameter was not a valid RFC3339 timestamp") # Extra validation if oauth if req_dict['auth']['type'] == 'oauth': @@ -879,12 +866,12 @@ def agent_profile_delete(req_dict): req_dict['params']['agent']) validator.validate_agent(agent, "Agent param") else: - err_msg = "Error -- agent_profile - method = %s, but agent parameter missing.." % req_dict[ + err_msg = "Error -- agent_profile - method = %s, but agent parameter missing." % req_dict[ 'method'] raise ParamError(err_msg) if 'profileId' not in req_dict['params']: - err_msg = "Error -- agent_profile - method = %s, but profileId parameter missing.." % req_dict[ + err_msg = "Error -- agent_profile - method = %s, but profileId parameter missing." % req_dict[ 'method'] raise ParamError(err_msg) From e7d0761c14f971f7083a0ad84242bd5f6f881a9c Mon Sep 17 00:00:00 2001 From: ubuntu Date: Thu, 17 Feb 2022 18:09:32 -0500 Subject: [PATCH 002/100] fixing urlparse python2 issue --- lrs/utils/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lrs/utils/__init__.py b/lrs/utils/__init__.py index 57a42ea1..2b54d0e0 100644 --- a/lrs/utils/__init__.py +++ b/lrs/utils/__init__.py @@ -1,7 +1,8 @@ import ast import json import urllib -import urlparse + +from urllib.parse import parse_qs, urlparse from datetime import datetime from isodate.isodates import parse_date From 8ef4981ac681dac24f1aad72567f19ba653231df Mon Sep 17 00:00:00 2001 From: Trey Hayden <33755714+vbhayden@users.noreply.github.com> Date: Thu, 17 Feb 2022 18:12:32 -0500 Subject: [PATCH 003/100] Update docker-compose.yml --- docker/docker-compose.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 7272f0ac..563f9a32 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -49,6 +49,8 @@ services: - amqp networks: - public + volumes: + - ./data/lrs/logs:/opt/lrs/logs # nginx proxy used to handle LRS traffic nginx: @@ -86,4 +88,4 @@ services: networks: public: - driver: bridge \ No newline at end of file + driver: bridge From 345b60544ffd2f2141800c2cc5db7ad0f2591bba Mon Sep 17 00:00:00 2001 From: ubuntu Date: Fri, 18 Feb 2022 09:08:18 -0500 Subject: [PATCH 004/100] adjusting etag behavior for misc resources --- lrs/managers/ActivityProfileManager.py | 4 ++-- lrs/managers/ActivityStateManager.py | 4 ++-- lrs/managers/AgentProfileManager.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/lrs/managers/ActivityProfileManager.py b/lrs/managers/ActivityProfileManager.py index 608a9fcb..64b4db3b 100644 --- a/lrs/managers/ActivityProfileManager.py +++ b/lrs/managers/ActivityProfileManager.py @@ -41,7 +41,7 @@ def post_profile(self, request_dict): # already exist with the same activityId and profileId if created: # xAPI 2.0 Addition: - etag.check_preconditions(request_dict, p, created) + etag.check_preconditions(request_dict, p, created, required=False) p.json_profile = post_profile p.content_type = "application/json" @@ -90,7 +90,7 @@ def put_profile(self, request_dict): self.save_non_json_profile(p, created, profile, request_dict) # Profile being PUT is json else: - etag.check_preconditions(request_dict, p, created) + etag.check_preconditions(request_dict, p, created, required=False) # If a profile already existed with the profileId and activityId # (overwrite existing profile data) the_profile = request_dict['profile'] diff --git a/lrs/managers/ActivityStateManager.py b/lrs/managers/ActivityStateManager.py index f3655218..15685ebc 100644 --- a/lrs/managers/ActivityStateManager.py +++ b/lrs/managers/ActivityStateManager.py @@ -66,7 +66,7 @@ def post_state(self, request_dict): # already exist with the same agent, stateId, actId, and/or # registration if created: - etag.check_preconditions(request_dict, s, created, False) + etag.check_preconditions(request_dict, s, created, required=False) s.json_state = post_state s.content_type = "application/json" s.etag = etag.create_tag(post_state) @@ -119,7 +119,7 @@ def put_state(self, request_dict): self.save_non_json_state(s, post_state, request_dict) # State being PUT is json else: - etag.check_preconditions(request_dict, s, created, False) + etag.check_preconditions(request_dict, s, created, required=False) the_state = request_dict['state'] s.json_state = the_state s.content_type = request_dict['headers']['CONTENT_TYPE'] diff --git a/lrs/managers/AgentProfileManager.py b/lrs/managers/AgentProfileManager.py index 1eb9b891..7678b768 100644 --- a/lrs/managers/AgentProfileManager.py +++ b/lrs/managers/AgentProfileManager.py @@ -38,7 +38,7 @@ def post_profile(self, request_dict): # If incoming profile is application/json and if a profile didn't # already exist with the same agent and profileId if created: - etag.check_preconditions(request_dict, p, created) + etag.check_preconditions(request_dict, s, created, required=False) p.json_profile = post_profile p.content_type = "application/json" p.etag = etag.create_tag(post_profile) From 80fded399d27464b05bd8398a25b65b3a5e7604f Mon Sep 17 00:00:00 2001 From: ubuntu Date: Fri, 18 Feb 2022 09:27:44 -0500 Subject: [PATCH 005/100] typo on etag content --- lrs/managers/AgentProfileManager.py | 2 +- lrs/utils/req_process.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/lrs/managers/AgentProfileManager.py b/lrs/managers/AgentProfileManager.py index 7678b768..5843db5a 100644 --- a/lrs/managers/AgentProfileManager.py +++ b/lrs/managers/AgentProfileManager.py @@ -38,7 +38,7 @@ def post_profile(self, request_dict): # If incoming profile is application/json and if a profile didn't # already exist with the same agent and profileId if created: - etag.check_preconditions(request_dict, s, created, required=False) + etag.check_preconditions(request_dict, p, created, required=False) p.json_profile = post_profile p.content_type = "application/json" p.etag = etag.create_tag(post_profile) diff --git a/lrs/utils/req_process.py b/lrs/utils/req_process.py index dc42ad17..449bba10 100644 --- a/lrs/utils/req_process.py +++ b/lrs/utils/req_process.py @@ -469,8 +469,7 @@ def agent_profile_get(req_dict): else: ap = AgentProfileManager(a) - profile_id = req_dict['params'].get( - 'profileId', None) if 'params' in req_dict else None + profile_id = req_dict['params'].get('profileId', None) if 'params' in req_dict else None if profile_id: resource = ap.get_profile(profile_id) if resource.profile: From 039ee4bb061fab4d16d816ea8a015837a269ca31 Mon Sep 17 00:00:00 2001 From: Trey Date: Wed, 12 Apr 2023 14:18:47 -0400 Subject: [PATCH 006/100] updating nginx config for 80->443 --- docker/nginx/default.conf | 38 +++++++++++++++++++------------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/docker/nginx/default.conf b/docker/nginx/default.conf index 235c52df..84aa8a31 100644 --- a/docker/nginx/default.conf +++ b/docker/nginx/default.conf @@ -43,30 +43,30 @@ http { sendfile on; + server { + listen 80; + server_name _; + return 444; + } + server { + listen 443; + server_name _; + + ssl_certificate /usr/share/keys/live/$HOSTNAME/fullchain.pem; + ssl_certificate_key /usr/share/keys/live/$HOSTNAME/privkey.pem; + + ssl_protocols TLSv1 TLSv1.1 TLSv1.2; + ssl_ciphers HIGH:!aNULL:!MD5; + + return 444; + } + server { listen 80; server_name $HOSTNAME; location / { - include proxy_headers.conf; - proxy_pass http://lrs:8000; - } - - # Static assets for the LRS - location /static { - autoindex on; - expires 1w; - alias /opt/lrs/lrs-static; - } - location /static/el-pagination { - autoindex on; - expires 1w; - alias /opt/lrs/ep-static; - } - location /static/admin { - autoindex on; - expires 1w; - alias /opt/lrs/admin-static; + return 301 https://$server_name$request_uri; } location ~ /.well-known/acme-challenge { From 829b0e4a885b69212bd55a6a823f6c0a47712411 Mon Sep 17 00:00:00 2001 From: Trey Date: Wed, 12 Apr 2023 14:29:43 -0400 Subject: [PATCH 007/100] cleaning up nginx formatting --- docker/nginx/Dockerfile | 2 -- docker/nginx/default.conf | 42 ++++++++++++--------------------------- 2 files changed, 13 insertions(+), 31 deletions(-) diff --git a/docker/nginx/Dockerfile b/docker/nginx/Dockerfile index 575abc91..69c783a4 100644 --- a/docker/nginx/Dockerfile +++ b/docker/nginx/Dockerfile @@ -1,8 +1,6 @@ FROM nginx:alpine -ARG LRS_ROOT ARG HOSTNAME -ARG PORT # Move our configuration into place # diff --git a/docker/nginx/default.conf b/docker/nginx/default.conf index 84aa8a31..d796fccf 100644 --- a/docker/nginx/default.conf +++ b/docker/nginx/default.conf @@ -12,36 +12,20 @@ events { http { include mime.types; default_type application/octet-stream; - keepalive_timeout 1200s; - keepalive_requests 100000; + sendfile on; + keepalive_timeout 65; + + proxy_buffer_size 128k; + proxy_buffers 4 256k; + proxy_busy_buffers_size 256k; + + client_body_in_file_only clean; + client_body_buffer_size 32; - # reduce the data that needs to be sent over network -- for testing environment - gzip on; - # gzip_static on; - gzip_min_length 10240; - gzip_comp_level 1; - gzip_vary on; - gzip_disable msie6; - gzip_proxied expired no-cache no-store private auth; - gzip_types - # text/html is always compressed by HttpGzipModule - text/css - text/javascript - text/xml - text/plain - text/x-component - application/javascript - application/x-javascript - application/json - application/xml - application/rss+xml - application/atom+xml - font/truetype - font/opentype - application/vnd.ms-fontobject - image/svg+xml; + client_max_body_size 300M; - sendfile on; + access_log /var/log/nginx/access.log; + error_log /var/log/nginx/access.log debug; server { listen 80; @@ -132,7 +116,6 @@ http { proxy_read_timeout 300; location / { - include proxy_headers.conf; proxy_pass http://lrs:8000; } @@ -160,3 +143,4 @@ http { } } } + From 0e5a97051618558f18fcaa048e8c6bb56b8159ee Mon Sep 17 00:00:00 2001 From: Trey Date: Wed, 12 Apr 2023 14:51:01 -0400 Subject: [PATCH 008/100] cannot believe it was on 90 --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 9871ebb6..8bc2eaed 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -71,7 +71,7 @@ services: args: HOSTNAME: "${HOSTNAME}" ports: - - "90:90" + - "80:80" - "443:443" container_name: docker_nginx volumes: From 3f19c0713bb9b5537ae950540e2b0b82bc368e9f Mon Sep 17 00:00:00 2001 From: Trey Date: Wed, 12 Apr 2023 14:55:13 -0400 Subject: [PATCH 009/100] updating certbot generate path --- docker/certbot/generate.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/certbot/generate.sh b/docker/certbot/generate.sh index c964ee9e..70d3968e 100755 --- a/docker/certbot/generate.sh +++ b/docker/certbot/generate.sh @@ -1,6 +1,6 @@ #!/bin/bash -rm -rf ./certbot/etc +rm -rf ./docker/certbot/etc docker-compose run certbot \ certonly --webroot \ From 93380641706f220799a602e0933978d402bf6ea5 Mon Sep 17 00:00:00 2001 From: ubuntu Date: Wed, 12 Apr 2023 17:06:53 -0400 Subject: [PATCH 010/100] variable name fixes --- lrs/utils/req_validate.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/lrs/utils/req_validate.py b/lrs/utils/req_validate.py index 276d9378..157b9bdb 100644 --- a/lrs/utils/req_validate.py +++ b/lrs/utils/req_validate.py @@ -62,7 +62,7 @@ def validate_body(body, auth, content_type): for statement in body: server_validate_statement(statement, auth, content_type) except ValueError: - raise ValueError(f"'id' not iterable within statement: {stmt}, {type(stmt)}), {auth}, {content_type}") + raise ValueError(f"'id' not iterable within statement: {statement}, {type(statement)}), {auth}, {content_type}") def server_validate_statement(stmt, auth, content_type): try: @@ -704,7 +704,7 @@ def activities_get(req_dict): try: activity_id = req_dict['params']['activityId'] except KeyError: - err_msg = f"Error -- activities - method = {eq_dict['method']}, but activityId parameter is missing" + err_msg = f"Error -- activities - method = {req_dict['method']}, but activityId parameter is missing" raise ParamError(err_msg) else: validator.validate_iri(activity_id, "activityId param") @@ -773,8 +773,7 @@ def agent_profile_post(req_dict): agent = req_dict['params']['agent'] a = Agent.objects.retrieve_or_create(**agent)[0] try: - p = AgentProfile.objects.get( - profile_id=req_dict['params']['profileId'], agent=a) + p = AgentProfile.objects.get(profile_id=req_dict['params']['profileId'], agent=a) exists = True except AgentProfile.DoesNotExist: pass From 47d08ace528d27343d79bdc08c3fad8d1a237983 Mon Sep 17 00:00:00 2001 From: ubuntu Date: Wed, 12 Apr 2023 19:47:18 -0400 Subject: [PATCH 011/100] fixing etag typo --- lrs/managers/AgentProfileManager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lrs/managers/AgentProfileManager.py b/lrs/managers/AgentProfileManager.py index 5843db5a..f4838614 100644 --- a/lrs/managers/AgentProfileManager.py +++ b/lrs/managers/AgentProfileManager.py @@ -85,7 +85,7 @@ def put_profile(self, request_dict): # Profile being PUT is json else: # (overwrite existing profile data) - etag.parse_datetime(request_dict, p, created) + etag.check_preconditions(request_dict, p, created) the_profile = request_dict['profile'] p.json_profile = the_profile p.content_type = request_dict['headers']['CONTENT_TYPE'] From 69662f5ca98af1e5080747b3b79b1046ca0d755b Mon Sep 17 00:00:00 2001 From: Trey Date: Mon, 17 Apr 2023 17:11:13 -0400 Subject: [PATCH 012/100] adding contextAgents/Groups to validator --- lrs/utils/StatementValidator.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/lrs/utils/StatementValidator.py b/lrs/utils/StatementValidator.py index 9b652f92..8bb90e07 100644 --- a/lrs/utils/StatementValidator.py +++ b/lrs/utils/StatementValidator.py @@ -44,7 +44,8 @@ score_allowed_fields = ['scaled', 'raw', 'min', 'max'] context_allowed_fields = ['registration', 'instructor', 'team', 'contextActivities', - 'revision', 'platform', 'language', 'statement', 'extensions'] + 'revision', 'platform', 'language', 'statement', 'extensions' + 'contextAgents', 'contextGroups'] context_agent_allowed_fields = ['objectType', 'agent', 'relevantTypes'] @@ -159,8 +160,8 @@ def validate_uuid(self, uuid, field): try: val = UUID(uuid, version=4) except ValueError: - self.return_error( - "%s - %s is not a valid UUID" % (field, uuid)) + self.return_error(f"{field} - {uuid} is not a valid UUID") + return val.hex == uuid else: self.return_error("%s must be a string type" % field) @@ -178,8 +179,7 @@ def check_allowed_fields(self, allowed, obj, obj_name): # Check for fields that aren't in spec failed_list = [x for x in list(obj.keys()) if x not in allowed] if failed_list: - self.return_error("Invalid field(s) found in %s - %s" % - (obj_name, ', '.join(failed_list))) + self.return_error(f"Invalid field(s) found in {obj_name} - {', '.join(failed_list)}") def check_required_fields(self, required, obj, obj_name): for field in required: From 6aba2728cce7135c58c621cc8e6780740de00e79 Mon Sep 17 00:00:00 2001 From: Trey Date: Tue, 18 Apr 2023 13:44:24 -0400 Subject: [PATCH 013/100] cleanup to validator --- lrs/utils/StatementValidator.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lrs/utils/StatementValidator.py b/lrs/utils/StatementValidator.py index 8bb90e07..5a229851 100644 --- a/lrs/utils/StatementValidator.py +++ b/lrs/utils/StatementValidator.py @@ -44,7 +44,7 @@ score_allowed_fields = ['scaled', 'raw', 'min', 'max'] context_allowed_fields = ['registration', 'instructor', 'team', 'contextActivities', - 'revision', 'platform', 'language', 'statement', 'extensions' + 'revision', 'platform', 'language', 'statement', 'extensions', 'contextAgents', 'contextGroups'] @@ -436,7 +436,7 @@ def validate_verb(self, verb, stmt_object=None): self.validate_iri(verb['id'], 'Verb id') if verb['id'] == "http://adlnet.gov/expapi/verbs/voided": - if stmt_object['objectType']: + if stmt_object is not None and stmt_object['objectType']: if stmt_object['objectType'] != "StatementRef": raise ParamError( "Statement with voided verb must have StatementRef as objectType") @@ -861,7 +861,7 @@ def validate_context_activities(self, conacts): def validate_context_agents(self, conags): - self.check_if_list(congrps, "Context Agents") + self.check_if_list(conags, "Context Agents") for sub in conags: if sub["objectType"] != "contextAgent": From 227b969f0c6b621b8d86a0ae4422217379e98325 Mon Sep 17 00:00:00 2001 From: Trey Date: Tue, 18 Apr 2023 15:01:27 -0400 Subject: [PATCH 014/100] requiring etags for resources --- lrs/managers/ActivityProfileManager.py | 10 ++++++---- lrs/managers/ActivityStateManager.py | 4 ++-- lrs/managers/AgentProfileManager.py | 2 +- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/lrs/managers/ActivityProfileManager.py b/lrs/managers/ActivityProfileManager.py index 64b4db3b..246b57d5 100644 --- a/lrs/managers/ActivityProfileManager.py +++ b/lrs/managers/ActivityProfileManager.py @@ -41,7 +41,7 @@ def post_profile(self, request_dict): # already exist with the same activityId and profileId if created: # xAPI 2.0 Addition: - etag.check_preconditions(request_dict, p, created, required=False) + etag.check_preconditions(request_dict, p, created, required=True) p.json_profile = post_profile p.content_type = "application/json" @@ -65,8 +65,10 @@ def post_profile(self, request_dict): def put_profile(self, request_dict): # Get the profile, or if not already created, create one - p, created = ActivityProfile.objects.get_or_create(profile_id=request_dict[ - 'params']['profileId'], activity_id=request_dict['params']['activityId']) + p, created = ActivityProfile.objects.get_or_create( + profile_id=request_dict['params']['profileId'], + activity_id=request_dict['params']['activityId'] + ) # Profile being PUT is not json if "application/json" not in request_dict['headers']['CONTENT_TYPE']: @@ -90,7 +92,7 @@ def put_profile(self, request_dict): self.save_non_json_profile(p, created, profile, request_dict) # Profile being PUT is json else: - etag.check_preconditions(request_dict, p, created, required=False) + etag.check_preconditions(request_dict, p, created, required=True) # If a profile already existed with the profileId and activityId # (overwrite existing profile data) the_profile = request_dict['profile'] diff --git a/lrs/managers/ActivityStateManager.py b/lrs/managers/ActivityStateManager.py index 15685ebc..1a703c9b 100644 --- a/lrs/managers/ActivityStateManager.py +++ b/lrs/managers/ActivityStateManager.py @@ -66,7 +66,7 @@ def post_state(self, request_dict): # already exist with the same agent, stateId, actId, and/or # registration if created: - etag.check_preconditions(request_dict, s, created, required=False) + etag.check_preconditions(request_dict, s, created, required=True) s.json_state = post_state s.content_type = "application/json" s.etag = etag.create_tag(post_state) @@ -119,7 +119,7 @@ def put_state(self, request_dict): self.save_non_json_state(s, post_state, request_dict) # State being PUT is json else: - etag.check_preconditions(request_dict, s, created, required=False) + etag.check_preconditions(request_dict, s, created, required=True) the_state = request_dict['state'] s.json_state = the_state s.content_type = request_dict['headers']['CONTENT_TYPE'] diff --git a/lrs/managers/AgentProfileManager.py b/lrs/managers/AgentProfileManager.py index f4838614..8439fc9f 100644 --- a/lrs/managers/AgentProfileManager.py +++ b/lrs/managers/AgentProfileManager.py @@ -38,7 +38,7 @@ def post_profile(self, request_dict): # If incoming profile is application/json and if a profile didn't # already exist with the same agent and profileId if created: - etag.check_preconditions(request_dict, p, created, required=False) + etag.check_preconditions(request_dict, p, created, required=True) p.json_profile = post_profile p.content_type = "application/json" p.etag = etag.create_tag(post_profile) From 88cfe299548849f8f25c2c502a7468730c04d5f4 Mon Sep 17 00:00:00 2001 From: Trey Date: Tue, 18 Apr 2023 15:23:19 -0400 Subject: [PATCH 015/100] misread requirements, reverting required tag --- lrs/managers/ActivityProfileManager.py | 4 ++-- lrs/managers/ActivityStateManager.py | 4 ++-- lrs/managers/AgentProfileManager.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/lrs/managers/ActivityProfileManager.py b/lrs/managers/ActivityProfileManager.py index 246b57d5..459b7234 100644 --- a/lrs/managers/ActivityProfileManager.py +++ b/lrs/managers/ActivityProfileManager.py @@ -41,7 +41,7 @@ def post_profile(self, request_dict): # already exist with the same activityId and profileId if created: # xAPI 2.0 Addition: - etag.check_preconditions(request_dict, p, created, required=True) + etag.check_preconditions(request_dict, p, created, required=False) p.json_profile = post_profile p.content_type = "application/json" @@ -92,7 +92,7 @@ def put_profile(self, request_dict): self.save_non_json_profile(p, created, profile, request_dict) # Profile being PUT is json else: - etag.check_preconditions(request_dict, p, created, required=True) + etag.check_preconditions(request_dict, p, created, required=False) # If a profile already existed with the profileId and activityId # (overwrite existing profile data) the_profile = request_dict['profile'] diff --git a/lrs/managers/ActivityStateManager.py b/lrs/managers/ActivityStateManager.py index 1a703c9b..15685ebc 100644 --- a/lrs/managers/ActivityStateManager.py +++ b/lrs/managers/ActivityStateManager.py @@ -66,7 +66,7 @@ def post_state(self, request_dict): # already exist with the same agent, stateId, actId, and/or # registration if created: - etag.check_preconditions(request_dict, s, created, required=True) + etag.check_preconditions(request_dict, s, created, required=False) s.json_state = post_state s.content_type = "application/json" s.etag = etag.create_tag(post_state) @@ -119,7 +119,7 @@ def put_state(self, request_dict): self.save_non_json_state(s, post_state, request_dict) # State being PUT is json else: - etag.check_preconditions(request_dict, s, created, required=True) + etag.check_preconditions(request_dict, s, created, required=False) the_state = request_dict['state'] s.json_state = the_state s.content_type = request_dict['headers']['CONTENT_TYPE'] diff --git a/lrs/managers/AgentProfileManager.py b/lrs/managers/AgentProfileManager.py index 8439fc9f..f4838614 100644 --- a/lrs/managers/AgentProfileManager.py +++ b/lrs/managers/AgentProfileManager.py @@ -38,7 +38,7 @@ def post_profile(self, request_dict): # If incoming profile is application/json and if a profile didn't # already exist with the same agent and profileId if created: - etag.check_preconditions(request_dict, p, created, required=True) + etag.check_preconditions(request_dict, p, created, required=False) p.json_profile = post_profile p.content_type = "application/json" p.etag = etag.create_tag(post_profile) From 8ff9e5966587e8ef1f630564145556c28bfd76b4 Mon Sep 17 00:00:00 2001 From: Trey Date: Tue, 18 Apr 2023 15:25:21 -0400 Subject: [PATCH 016/100] updating agent profile --- lrs/managers/AgentProfileManager.py | 29 +++++++++++++++++++++++------ 1 file changed, 23 insertions(+), 6 deletions(-) diff --git a/lrs/managers/AgentProfileManager.py b/lrs/managers/AgentProfileManager.py index f4838614..17620216 100644 --- a/lrs/managers/AgentProfileManager.py +++ b/lrs/managers/AgentProfileManager.py @@ -45,18 +45,34 @@ def post_profile(self, request_dict): # If incoming profile is application/json and if a profile already # existed with the same agent and profileId else: - orig_prof = json.loads(p.json_profile) - post_profile = json.loads(post_profile) - merged = json.dumps( - dict(list(orig_prof.items()) + list(post_profile.items()))) - p.json_profile = merged - p.etag = etag.create_tag(merged) + + ## + ## 1.0.3 Behaviour + ## + # orig_prof = json.loads(p.json_profile) + # post_profile = json.loads(post_profile) + # merged = json.dumps( + # dict(list(orig_prof.items()) + list(post_profile.items()))) + # p.json_profile = merged + # p.etag = etag.create_tag(merged) + + + ## + ## 2.0 Behaviour to mirror PUT behaviour. + ## + # (overwrite existing profile data) + etag.check_preconditions(request_dict, p, created) + the_profile = request_dict['profile'] + p.json_profile = the_profile + p.content_type = request_dict['headers']['CONTENT_TYPE'] + p.etag = etag.create_tag(the_profile) # Set updated if 'updated' in request_dict['headers'] and request_dict['headers']['updated']: p.updated = request_dict['headers']['updated'] else: p.updated = datetime.datetime.utcnow().replace(tzinfo=utc) + p.save() def put_profile(self, request_dict): @@ -96,6 +112,7 @@ def put_profile(self, request_dict): p.updated = request_dict['headers']['updated'] else: p.updated = datetime.datetime.utcnow().replace(tzinfo=utc) + p.save() def get_profile(self, profile_id): From 25d8c30d47ff936bf96eb2a9a3699c6c115dadfd Mon Sep 17 00:00:00 2001 From: Trey Date: Tue, 18 Apr 2023 16:56:02 -0400 Subject: [PATCH 017/100] adjusting activity profile --- lrs/managers/ActivityProfileManager.py | 9 ++-- lrs/utils/etag.py | 68 +++++++++++++------------- 2 files changed, 41 insertions(+), 36 deletions(-) diff --git a/lrs/managers/ActivityProfileManager.py b/lrs/managers/ActivityProfileManager.py index 459b7234..85dba4ce 100644 --- a/lrs/managers/ActivityProfileManager.py +++ b/lrs/managers/ActivityProfileManager.py @@ -34,18 +34,21 @@ def save_non_json_profile(self, p, created, profile, request_dict): def post_profile(self, request_dict): # get/create profile - p, created = ActivityProfile.objects.get_or_create(activity_id=request_dict['params']['activityId'], - profile_id=request_dict['params']['profileId']) + p, created = ActivityProfile.objects.get_or_create( + activity_id=request_dict['params']['activityId'], + profile_id=request_dict['params']['profileId'] + ) post_profile = request_dict['profile'] # If incoming profile is application/json and if a profile didn't # already exist with the same activityId and profileId if created: # xAPI 2.0 Addition: - etag.check_preconditions(request_dict, p, created, required=False) + etag.check_preconditions(request_dict, p, created, required=True) p.json_profile = post_profile p.content_type = "application/json" p.etag = etag.create_tag(post_profile) + # If incoming profile is application/json and if a profile already # existed with the same activityId and profileId else: diff --git a/lrs/utils/etag.py b/lrs/utils/etag.py index 62ac276d..d122bb2c 100644 --- a/lrs/utils/etag.py +++ b/lrs/utils/etag.py @@ -27,46 +27,48 @@ def get_etag_info(headers): def check_preconditions(request, contents, created, required=True): - if required: - exists = False - if not created: - exists = True - - try: - request_etag = request['headers']['ETAG'] - if not request_etag[IF_MATCH] and not request_etag[IF_NONE_MATCH]: - if exists: - raise MissingEtagInfoExists( - "If-Match and If-None-Match headers were missing. One of these headers is required for this request.") - raise MissingEtagInfo( - "If-Match and If-None-Match headers were missing. One of these headers is required for this request.") - except KeyError: + if not required: + return + + exists = False + if not created: + exists = True + + try: + request_etag = request['headers']['ETAG'] + if not request_etag[IF_MATCH] and not request_etag[IF_NONE_MATCH]: if exists: raise MissingEtagInfoExists( "If-Match and If-None-Match headers were missing. One of these headers is required for this request.") raise MissingEtagInfo( "If-Match and If-None-Match headers were missing. One of these headers is required for this request.") - else: - # If there are both, if none match takes precendence - if request_etag[IF_NONE_MATCH]: - # Only check if the content already exists. if it did not - # already exist it should pass. - if exists: - if request_etag[IF_NONE_MATCH] == "*": + except KeyError: + if exists: + raise MissingEtagInfoExists( + "If-Match and If-None-Match headers were missing. One of these headers is required for this request.") + raise MissingEtagInfo( + "If-Match and If-None-Match headers were missing. One of these headers is required for this request.") + else: + # If there are both, if none match takes precendence + if request_etag[IF_NONE_MATCH]: + # Only check if the content already exists. if it did not + # already exist it should pass. + if exists: + if request_etag[IF_NONE_MATCH] == "*": + raise EtagPreconditionFail("Resource detected") + else: + if '"%s"' % contents.etag in request_etag[IF_NONE_MATCH]: raise EtagPreconditionFail("Resource detected") - else: - if '"%s"' % contents.etag in request_etag[IF_NONE_MATCH]: - raise EtagPreconditionFail("Resource detected") + else: + if not exists: + contents.delete() + raise EtagPreconditionFail( + "Resource does not exist") else: - if not exists: - contents.delete() - raise EtagPreconditionFail( - "Resource does not exist") - else: - if request_etag[IF_MATCH] != "*": - if '"%s"' % contents.etag not in request_etag[IF_MATCH]: - raise EtagPreconditionFail( - "No resources matched your etag precondition") + if request_etag[IF_MATCH] != "*": + if '"%s"' % contents.etag not in request_etag[IF_MATCH]: + raise EtagPreconditionFail( + "No resources matched your etag precondition") From 28b8b364e110aa1f1e59d42de82eb9d6f865dd15 Mon Sep 17 00:00:00 2001 From: Ubuntu Date: Thu, 20 Apr 2023 13:40:47 +0000 Subject: [PATCH 018/100] push test --- test | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 test diff --git a/test b/test new file mode 100644 index 00000000..e69de29b From 800c4c40dd19c3c14a861de9663df1327bb2d7d5 Mon Sep 17 00:00:00 2001 From: Godloveet <129637691+Godloveet@users.noreply.github.com> Date: Tue, 25 Apr 2023 08:04:32 -0500 Subject: [PATCH 019/100] webhook test --- just a test | 1 + 1 file changed, 1 insertion(+) create mode 100644 just a test diff --git a/just a test b/just a test new file mode 100644 index 00000000..5afdde0f --- /dev/null +++ b/just a test @@ -0,0 +1 @@ +Webhook push test with jenkins server. From 6056412d577342a7e8774eb6ca9ac44890f2eae8 Mon Sep 17 00:00:00 2001 From: Godloveet <129637691+Godloveet@users.noreply.github.com> Date: Mon, 1 May 2023 04:47:03 -0500 Subject: [PATCH 020/100] Add files via upload --- deployment.yml | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 deployment.yml diff --git a/deployment.yml b/deployment.yml new file mode 100644 index 00000000..d58968ee --- /dev/null +++ b/deployment.yml @@ -0,0 +1,19 @@ +name: Deploy +on: + push: + branches: + - master + +jobs: + deploy: + name: 🚀 Deploy + runs-on: ubuntu-latest + steps: + - name: executing remote ssh commands using password + uses: appleboy/ssh-action@v0.1.10 + with: + host: ${{ secrets.HOST }} + username: ${{ secrets.USERNAME }} + key: ${{ secrets.KEY }} + script: | + sudo /home/ubuntu/workflow/script.sh From bf469d289503524bb98ff4c64aafb51eb9e09c56 Mon Sep 17 00:00:00 2001 From: Godloveet <129637691+Godloveet@users.noreply.github.com> Date: Mon, 1 May 2023 04:58:25 -0500 Subject: [PATCH 021/100] Add files via upload --- script.sh | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 script.sh diff --git a/script.sh b/script.sh new file mode 100644 index 00000000..e28252ae --- /dev/null +++ b/script.sh @@ -0,0 +1,22 @@ +# if ADL_LRS directory does exist then pull the latest code or else clone the repo +if [ -d "ADL_LRS" ]; then + echo "Directory ADL_LRS exists." + cd ADL_TEST + git pull + cd .. +else + echo "Directory ADL_LRS does not exists." + git clone https://github.com/adlnet/ADL_LRS.git +fi + +sudo cp /home/ubuntu/workflow/settings.ini /home/ubuntu/ADL_LRS/settings.ini +sudo cp /home/ubuntu/workflow/docker/settings.ini /home/ubuntu/ADL_LRS/docker/lrs/settings.ini +sudo cp /home/ubuntu/workflow/.env /home/ubuntu/ADL_LRS/.env + +cd ADL_LRS +sudo usermod -aG docker $USER +sudo ./init-ssl.sh localhost +sudo docker-compose stop + +sudo docker-compose build --no-cache +docker-compose up -d \ No newline at end of file From 60d9531eeb1941dceadc08cb2c9ee6e4372a203c Mon Sep 17 00:00:00 2001 From: root Date: Tue, 2 May 2023 11:33:43 +0000 Subject: [PATCH 022/100] made changes --- just a test | 1 - script.sh | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) delete mode 100644 just a test diff --git a/just a test b/just a test deleted file mode 100644 index 5afdde0f..00000000 --- a/just a test +++ /dev/null @@ -1 +0,0 @@ -Webhook push test with jenkins server. diff --git a/script.sh b/script.sh index e28252ae..995b2fd7 100644 --- a/script.sh +++ b/script.sh @@ -1,4 +1,4 @@ -# if ADL_LRS directory does exist then pull the latest code or else clone the repo +# if ADL_LRS directory does exist then, pull the latest code or else clone the repo if [ -d "ADL_LRS" ]; then echo "Directory ADL_LRS exists." cd ADL_TEST @@ -19,4 +19,4 @@ sudo ./init-ssl.sh localhost sudo docker-compose stop sudo docker-compose build --no-cache -docker-compose up -d \ No newline at end of file +docker-compose up -d From 78250c21336234f6f38711909d6344af3e46832c Mon Sep 17 00:00:00 2001 From: Godloveet <129637691+Godloveet@users.noreply.github.com> Date: Tue, 2 May 2023 07:04:41 -0500 Subject: [PATCH 023/100] Change port in the nginx --- docker-compose.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 8bc2eaed..31faa338 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -71,8 +71,8 @@ services: args: HOSTNAME: "${HOSTNAME}" ports: - - "80:80" - - "443:443" + - "90:90" + - "441:443" container_name: docker_nginx volumes: - ./docker/nginx/letsencrypt:/usr/share/nginx/html From c16ec6ac1ba96963e614827e3ac6000bc9cbccdf Mon Sep 17 00:00:00 2001 From: Godloveet <129637691+Godloveet@users.noreply.github.com> Date: Tue, 2 May 2023 07:09:01 -0500 Subject: [PATCH 024/100] Update docker-compose.yml --- docker-compose.yml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 31faa338..42a9a002 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -84,19 +84,19 @@ services: # Certbot for SSL automation # - certbot: - container_name: 'docker_certbot' - image: certbot/certbot - volumes: + #certbot: + #container_name: 'docker_certbot' + #image: certbot/certbot + #volumes: - ./docker/keys:/var/lib/letsencrypt - ./docker/nginx/letsencrypt:/data/letsencrypt - ./docker/certbot/etc:/etc/letsencrypt - ./docker/certbot/log:/var/log/letsencrypt - depends_on: + #depends_on: - nginx - networks: + #networks: - public -networks: - public: - driver: bridge +#networks: + #public: + #driver: bridge From 9a3f9e25ae9619bb4d978fb70761224d0b1bdab0 Mon Sep 17 00:00:00 2001 From: Godloveet <129637691+Godloveet@users.noreply.github.com> Date: Tue, 2 May 2023 07:12:35 -0500 Subject: [PATCH 025/100] Committed docker-compose --- docker-compose.yml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 42a9a002..31faa338 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -84,19 +84,19 @@ services: # Certbot for SSL automation # - #certbot: - #container_name: 'docker_certbot' - #image: certbot/certbot - #volumes: + certbot: + container_name: 'docker_certbot' + image: certbot/certbot + volumes: - ./docker/keys:/var/lib/letsencrypt - ./docker/nginx/letsencrypt:/data/letsencrypt - ./docker/certbot/etc:/etc/letsencrypt - ./docker/certbot/log:/var/log/letsencrypt - #depends_on: + depends_on: - nginx - #networks: + networks: - public -#networks: - #public: - #driver: bridge +networks: + public: + driver: bridge From e68fcd31d0e6e5a4df87ce0b33593e4c700de150 Mon Sep 17 00:00:00 2001 From: Ubuntu Date: Fri, 5 May 2023 14:40:48 +0000 Subject: [PATCH 026/100] adjusted docker-compose config --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 9871ebb6..31faa338 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -72,7 +72,7 @@ services: HOSTNAME: "${HOSTNAME}" ports: - "90:90" - - "443:443" + - "441:443" container_name: docker_nginx volumes: - ./docker/nginx/letsencrypt:/usr/share/nginx/html From a7ee55d6ea82a58d048fb23f7840bae26a28000c Mon Sep 17 00:00:00 2001 From: Ubuntu Date: Fri, 5 May 2023 15:06:10 +0000 Subject: [PATCH 027/100] done --- .github/workflow/deployment.yml | 19 +++++++++++++++++++ deployment.yml | 2 +- 2 files changed, 20 insertions(+), 1 deletion(-) create mode 100644 .github/workflow/deployment.yml diff --git a/.github/workflow/deployment.yml b/.github/workflow/deployment.yml new file mode 100644 index 00000000..b7872976 --- /dev/null +++ b/.github/workflow/deployment.yml @@ -0,0 +1,19 @@ +name: Deploy +on: + push: + branches: + - python3-xapi-2.0 + +jobs: + deploy: + name: 🚀 Deploy + runs-on: ubuntu-latest + steps: + - name: executing remote ssh commands using password + uses: appleboy/ssh-action@v0.1.10 + with: + host: ${{ secrets.HOST }} + username: ${{ secrets.USERNAME }} + key: ${{ secrets.KEY }} + script: | + sudo /home/ubuntu/workflow/script.sh diff --git a/deployment.yml b/deployment.yml index d58968ee..b7872976 100644 --- a/deployment.yml +++ b/deployment.yml @@ -2,7 +2,7 @@ name: Deploy on: push: branches: - - master + - python3-xapi-2.0 jobs: deploy: From b49abd2e77080800f15e60da984da5d5baba5dbd Mon Sep 17 00:00:00 2001 From: Trey Date: Thu, 18 May 2023 11:24:39 -0400 Subject: [PATCH 028/100] adding redis to example ini --- .gitignore | 2 ++ settings.ini.example | 3 +++ 2 files changed, 5 insertions(+) diff --git a/.gitignore b/.gitignore index 2692c543..cf194344 100644 --- a/.gitignore +++ b/.gitignore @@ -13,6 +13,8 @@ federated-analytics.js lrs/celery.py .vscode/ +docker-compose.dev.yml + docker/lrs/settings.ini docker/settings.ini settings.ini diff --git a/settings.ini.example b/settings.ini.example index 28be61f2..fc525a28 100644 --- a/settings.ini.example +++ b/settings.ini.example @@ -47,3 +47,6 @@ PASSWORD: rabbitmq HOST: amqp PORT: 5672 VHOST: / + +[redis] +URL: redis://redis:6379/0 From 6b7dd45fc428d5b104548f59c69e2db86cc7560f Mon Sep 17 00:00:00 2001 From: Trey Date: Thu, 18 May 2023 11:59:58 -0400 Subject: [PATCH 029/100] using truncation for duration accuracy --- lrs/utils/req_process.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/lrs/utils/req_process.py b/lrs/utils/req_process.py index 449bba10..db1466b5 100644 --- a/lrs/utils/req_process.py +++ b/lrs/utils/req_process.py @@ -3,6 +3,7 @@ import re import unicodedata import uuid +import math from datetime import datetime @@ -39,7 +40,9 @@ def process_statement(stmt, auth, payload_sha2s): sec_as_num = float(sec_as_str.replace('S', '')) if not sec_as_num.is_integer(): - sec_trunc = round(sec_as_num, 2) + ### xAPI 2.0: Truncation required for comparison, not rounding etc. + # sec_trunc = round(sec_as_num, 2) + sec_trunc = math.floor(sec_as_num * 100) / 100 else: sec_trunc = int(sec_as_num) From b019cb05927fff011fce2f909eef4fc1a109b184 Mon Sep 17 00:00:00 2001 From: Trey Date: Fri, 19 May 2023 10:43:24 -0400 Subject: [PATCH 030/100] typing / intellisense cleanup --- lrs/managers/StatementManager.py | 24 +++++++-- lrs/models.py | 28 +++++++---- lrs/utils/__init__.py | 7 +-- lrs/utils/req_process.py | 74 +++++++++++++-------------- lrs/utils/req_validate.py | 86 +++++++++++++++++++------------- 5 files changed, 128 insertions(+), 91 deletions(-) diff --git a/lrs/managers/StatementManager.py b/lrs/managers/StatementManager.py index 20c991c4..5cb146d0 100644 --- a/lrs/managers/StatementManager.py +++ b/lrs/managers/StatementManager.py @@ -12,11 +12,14 @@ class StatementManager(): + model_object: Statement + def __init__(self, stmt_data, auth_info, payload_sha2s): # auth_info contains define, endpoint, user, and request authority if self.__class__.__name__ == 'StatementManager': # Full statement is for a statement only, same with authority self.set_authority(auth_info, stmt_data) + self.populate(auth_info, stmt_data, payload_sha2s) def set_authority(self, auth_info, stmt_data): @@ -176,6 +179,9 @@ def build_statement_object(self, auth_info, stmt_data): statement_object_data['id']) del stmt_data['object'] + def build_model_object(self, auth_info, stmt_data) -> Statement: + return self.build_statement(auth_info, stmt_data) + def populate(self, auth_info, stmt_data, payload_sha2s): if self.__class__.__name__ == 'StatementManager': stmt_data['voided'] = False @@ -192,16 +198,24 @@ def populate(self, auth_info, stmt_data, payload_sha2s): 'timestamp']) attachment_data = stmt_data.pop('attachments', None) - if self.__class__.__name__ == 'StatementManager': - # Save statement/substatement - self.model_object = self.build_statement(auth_info, stmt_data) - else: - self.model_object = self.build_substatement(auth_info, stmt_data) + # if self.__class__.__name__ == 'StatementManager': + # # Save statement/substatement + # self.build_statement(auth_info, stmt_data) + # else: + # self.model_object = self.build_substatement(auth_info, stmt_data) + + self.model_object = self.build_model_object(auth_info, stmt_data) + if attachment_data: self.build_attachments(auth_info, attachment_data, payload_sha2s) class SubStatementManager(StatementManager): + model_object: SubStatement + def __init__(self, substmt_data, auth_info): StatementManager.__init__(self, substmt_data, auth_info, None) + + def build_model_object(self, auth_info, stmt_data) -> SubStatement: + return self.build_substatement(auth_info, stmt_data) \ No newline at end of file diff --git a/lrs/models.py b/lrs/models.py index dc47dd74..9376336c 100644 --- a/lrs/models.py +++ b/lrs/models.py @@ -1,6 +1,8 @@ import ast import json import uuid + +from typing import List from collections import OrderedDict from django.db import models, IntegrityError @@ -88,12 +90,10 @@ def retrieve_or_create(self, **kwargs): ifp_sent = [ a for a in agent_ifps_can_only_be_one if kwargs.get(a, None) is not None] is_group = kwargs.get('objectType', None) == "Group" - has_member = False + member = None # Set member if incoming group if is_group: member = kwargs.pop('member', None) - if member: - has_member = True # Create agent based on IFP if ifp_sent: # Get IFP @@ -126,7 +126,7 @@ def retrieve_or_create(self, **kwargs): created = False # For identified groups with members - if is_group and has_member: + if is_group and (member is not None): # If newly created identified group add all of the incoming # members if created: @@ -135,8 +135,7 @@ def retrieve_or_create(self, **kwargs): agent.save() # Only way it doesn't have IFP is if anonymous group else: - agent, created = self.retrieve_or_create_anonymous_group( - member, kwargs) + agent, created = self.retrieve_or_create_anonymous_group(member, kwargs) return agent, created def retrieve_or_create_anonymous_group(self, member, kwargs): @@ -426,6 +425,10 @@ class SubStatement(models.Model): def to_dict(self, lang=None, ids_only=False): ret = OrderedDict() + + assert isinstance(self.actor, Agent) + assert isinstance(self.verb, Verb) + ret['actor'] = self.actor.to_dict(ids_only) ret['verb'] = self.verb.return_verb_with_lang(lang, ids_only) @@ -604,6 +607,10 @@ def to_dict(self, lang=None, ret_format='exact'): ids_only = True if ret_format == 'ids' else False ret['id'] = str(self.statement_id) + + assert isinstance(self.actor, Agent) + assert isinstance(self.verb, Verb) + ret['actor'] = self.actor.to_dict(ids_only) ret['verb'] = self.verb.return_verb_with_lang(lang, ids_only) @@ -702,9 +709,10 @@ def to_dict(self, lang=None, ret_format='exact'): if self.authority is not None: ret['authority'] = self.authority.to_dict(ids_only) - if self.stmt_attachments.all(): - ret['attachments'] = [a.return_attachment_with_lang( - lang) for a in self.stmt_attachments.all()] + attachments_relation = getattr(self, "stmt_attachments", None) + if (attachments_relation is not None): + attachments: List[StatementAttachment] = attachments_relation.all() + ret['attachments'] = [a.return_attachment_with_lang(lang) for a in attachments] return ret @@ -737,7 +745,7 @@ def _save(self, name, content, max_length=None): # if the file exists, do not call the superclasses _save method return name # if the file is new, DO call it - return super(AttachmentFileSystemStorage, self)._save(name, content) + return super(AttachmentFileSystemStorage, self).save(name, content, max_length=max_length) class StatementAttachment(models.Model): diff --git a/lrs/utils/__init__.py b/lrs/utils/__init__.py index 2b54d0e0..6a617b2a 100644 --- a/lrs/utils/__init__.py +++ b/lrs/utils/__init__.py @@ -2,7 +2,7 @@ import json import urllib -from urllib.parse import parse_qs, urlparse +from urllib.parse import parse_qs, parse_qsl, urlparse, unquote_plus from datetime import datetime from isodate.isodates import parse_date @@ -24,6 +24,7 @@ class RFC3339Error(ValueError): def validate_timestamp(time_str): time_ret = None + rfc_ret = None try: time_ret = parse_datetime(time_str) @@ -111,10 +112,10 @@ def convert_post_body_to_dict(incoming_data): for p in pairs: # this is checked for cors requests if p.startswith('content='): - if p == urllib.parse.unquote_plus(p): + if p == unquote_plus(p): encoded = False break - qs = urllib.parse.parse_qsl(decoded) + qs = parse_qsl(decoded) return dict((k, v) for k, v in qs), encoded diff --git a/lrs/utils/req_process.py b/lrs/utils/req_process.py index db1466b5..f60a4e8d 100644 --- a/lrs/utils/req_process.py +++ b/lrs/utils/req_process.py @@ -33,20 +33,20 @@ def process_statement(stmt, auth, payload_sha2s): # Check for result -> duration and truncate seconds if needed. if 'result' in stmt: if 'duration' in stmt['result']: - stmt_dur = stmt['result']['duration'] - sec_split = re.findall("\d+(?:\.\d+)?S", stmt_dur) + duration = stmt['result']['duration'] + sec_split = re.findall(r"\d+(?:\.\d+)?S", duration) if sec_split: - sec_as_str = sec_split[0] - sec_as_num = float(sec_as_str.replace('S', '')) + seconds_str = sec_split[0] + seconds = float(seconds_str.replace('S', '')) - if not sec_as_num.is_integer(): + if not seconds.is_integer(): ### xAPI 2.0: Truncation required for comparison, not rounding etc. # sec_trunc = round(sec_as_num, 2) - sec_trunc = math.floor(sec_as_num * 100) / 100 + seconds_truncated = math.floor(seconds * 100) / 100 else: - sec_trunc = int(sec_as_num) + seconds_truncated = int(seconds) - stmt['result']['duration'] = unicodedata.normalize("NFKD", stmt_dur.replace(sec_as_str, str(sec_trunc) + 'S')) + stmt['result']['duration'] = unicodedata.normalize("NFKD", duration.replace(seconds_str, str(seconds_truncated) + 'S')) # Convert context activities to list if dict if 'context' in stmt and 'contextActivities' in stmt['context']: @@ -74,6 +74,7 @@ def process_statement(stmt, auth, payload_sha2s): if stmt['verb'].verb_id == 'http://adlnet.gov/expapi/verbs/voided': return st.statement_id, st.object_statementref + return st.statement_id, None @@ -253,16 +254,16 @@ def build_response(stmt_result, single=False): # Iterate through each attachment in each statement for stmt in statements: if 'attachments' in stmt: - st_atts = Statement.objects.get( - statement_id=stmt['id']).stmt_attachments - if st_atts: - for att in st_atts.all(): - if att.payload: + statement_db_obj = Statement.objects.get(statement_id=stmt['id']) + attachments = getattr(statement_db_obj, "stmt_attachments", None) + if attachments: + for attachment in attachments.all(): + if attachment.payload: sha2s.append({ - "sha2": att.canonical_data['sha2'], - "payload": att.payload, - "contentType": att.canonical_data['contentType'] + "sha2": attachment.canonical_data['sha2'], + "payload": attachment.payload, + "contentType": attachment.canonical_data['contentType'] }) # sha2s.append( @@ -391,36 +392,32 @@ def activity_profile_get(req_dict): # Instantiate ActivityProfile ap = ActivityProfileManager() # Get profileId and activityId - profile_id = req_dict['params'].get( - 'profileId', None) if 'params' in req_dict else None - activity_id = req_dict['params'].get( - 'activityId', None) if 'params' in req_dict else None + profile_id = req_dict['params'].get('profileId', None) if 'params' in req_dict else None + activity_id = req_dict['params'].get('activityId', None) if 'params' in req_dict else None + since = req_dict['params'].get('since', None) if 'params' in req_dict else None # If the profileId exists, get the profile and return it in the response if profile_id: resource = ap.get_profile(profile_id, activity_id) if resource.profile: try: - response = HttpResponse( - resource.profile.read(), content_type=resource.content_type) + response = HttpResponse(resource.profile.read(), content_type=resource.content_type) except IOError: - response = HttpResponseNotFound( - "Error reading file, could not find: %s" % profile_id) + response = HttpResponseNotFound("Error reading file, could not find: %s" % profile_id) else: - response = HttpResponse( - resource.json_profile, content_type=resource.content_type) + response = HttpResponse(resource.json_profile, content_type=resource.content_type) + response['ETag'] = '"%s"' % resource.etag return response # Return IDs of profiles stored since profileId was not submitted - since = req_dict['params'].get( - 'since', None) if 'params' in req_dict else None - resource = ap.get_profile_ids(activity_id, since) - response = JsonResponse([k for k in resource], safe=False) - response['since'] = since + elif since is not None: + resource = ap.get_profile_ids(activity_id, since) - return response + response = JsonResponse([k for k in resource], safe=False) + response['since'] = since + return response def activity_profile_delete(req_dict): # Instantiate activity profile @@ -473,6 +470,8 @@ def agent_profile_get(req_dict): ap = AgentProfileManager(a) profile_id = req_dict['params'].get('profileId', None) if 'params' in req_dict else None + since = req_dict['params'].get('since', None) if 'params' in req_dict else None + if profile_id: resource = ap.get_profile(profile_id) if resource.profile: @@ -483,14 +482,13 @@ def agent_profile_get(req_dict): resource.json_profile, content_type=resource.content_type) response['ETag'] = '"%s"' % resource.etag return response + + elif since is not None: - since = req_dict['params'].get( - 'since', None) if 'params' in req_dict else None - resource = ap.get_profile_ids(since) - response = JsonResponse([k for k in resource], safe=False) - - return response + resource = ap.get_profile_ids(since) + response = JsonResponse([k for k in resource], safe=False) + return response def agent_profile_delete(req_dict): agent = req_dict['params']['agent'] diff --git a/lrs/utils/req_validate.py b/lrs/utils/req_validate.py index 157b9bdb..c7652eb6 100644 --- a/lrs/utils/req_validate.py +++ b/lrs/utils/req_validate.py @@ -8,7 +8,7 @@ from .authorization import auth from .StatementValidator import StatementValidator -from ..models import Statement, Agent, Activity, ActivityState, ActivityProfile, AgentProfile +from ..models import Statement, Agent, Activity, ActivityState, ActivityProfile, AgentProfile, Verb from ..exceptions import ParamConflict, ParamError, Forbidden, BadRequest, IDNotFoundError @@ -50,19 +50,24 @@ def validate_void_statement(void_id): raise IDNotFoundError( "Something went wrong. %s statements found with id %s" % (len(stmts), void_id)) elif len(stmts) == 1: - if stmts[0].voided: + target_statement = stmts[0] + if target_statement.voided: err_msg = "Statement with ID: %s is already voided, cannot unvoid. Please re-issue the statement under a new ID." % void_id raise BadRequest(err_msg) - if stmts[0].verb.verb_id == "http://adlnet.gov/expapi/verbs/voided": + + assert isinstance(target_statement.verb, Verb) + if target_statement.verb.verb_id == "http://adlnet.gov/expapi/verbs/voided": err_msg = "Statement with ID: %s is a voiding statement and cannot be voided." % void_id raise BadRequest(err_msg) def validate_body(body, auth, content_type): + statement_being_checked = None try: for statement in body: + statement_being_checked = statement server_validate_statement(statement, auth, content_type) except ValueError: - raise ValueError(f"'id' not iterable within statement: {statement}, {type(statement)}), {auth}, {content_type}") + raise ValueError(f"'id' not iterable within statement: {statement_being_checked}, {type(statement_being_checked)}), {auth}, {content_type}") def server_validate_statement(stmt, auth, content_type): try: @@ -94,8 +99,8 @@ def statements_post(req_dict): validator.validate() except Exception as e: raise BadRequest(str(e)) - except ParamError as e: - raise ParamError(str(e)) + # except ParamError as e: + # raise ParamError(str(e)) if isinstance(req_dict['body'], dict): body = [req_dict['body']] @@ -149,7 +154,7 @@ def validate_statementId(req_dict): mine_only = auth and 'statements_mine_only' in auth if auth['agent']: - if mine_only and st.authority.id != auth['agent'].id: + if mine_only and getattr(st.authority, "id", "") != auth['agent'].id: err_msg = "Incorrect permissions to view statements" raise Forbidden(err_msg) @@ -316,8 +321,8 @@ def statements_put(req_dict): validator.validate() except Exception as e: raise BadRequest(str(e)) - except ParamError as e: - raise ParamError(str(e)) + # except ParamError as e: + # raise ParamError(str(e)) validate_body([req_dict['body']], req_dict['auth'], req_dict['headers']['CONTENT_TYPE']) return req_dict @@ -398,26 +403,32 @@ def activity_state_post(req_dict): agent = req_dict['params']['agent'] a = Agent.objects.retrieve_or_create(**agent)[0] exists = False + previous_state = None if registration: try: - s = ActivityState.objects.get(state_id=req_dict['params']['stateId'], agent=a, - activity_id=req_dict['params']['activityId'], registration_id=req_dict['params']['registration']) - exists = True + previous_state = ActivityState.objects.get( + state_id=req_dict['params']['stateId'], + agent=a, + activity_id=req_dict['params']['activityId'], registration_id=req_dict['params']['registration']) + except ActivityState.DoesNotExist: pass else: try: - s = ActivityState.objects.get(state_id=req_dict['params']['stateId'], agent=a, - activity_id=req_dict['params']['activityId']) - exists = True + previous_state = ActivityState.objects.get( + state_id=req_dict['params']['stateId'], + agent=a, + activity_id=req_dict['params']['activityId']) + except ActivityState.DoesNotExist: pass - if exists and str(s.content_type) != "application/json": - raise ParamError("Activity state already exists but is not JSON, cannot update it with new JSON document") + if previous_state is not None: + if str(previous_state.content_type) != "application/json": + raise ParamError("Activity state already exists but is not JSON, cannot update it with new JSON document") + return req_dict - @auth def activity_state_put(req_dict): rogueparams = set(req_dict['params']) - \ @@ -598,17 +609,20 @@ def activity_profile_post(req_dict): req_dict['profile'] = raw_profile # Check the content type if the document already exists - exists = False + previous_profile = None try: - p = ActivityProfile.objects.get(activity_id=req_dict['params']['activityId'], - profile_id=req_dict['params']['profileId']) - exists = True + previous_profile = ActivityProfile.objects.get( + activity_id=req_dict['params']['activityId'], + profile_id=req_dict['params']['profileId']) + except ActivityProfile.DoesNotExist: pass # Since document to be POSTed has to be json, so does the existing document - if exists and str(p.content_type) != "application/json": - raise ParamError("Activity profile already exists but is not JSON, cannot update it with new JSON document") + if previous_profile is not None: + if str(previous_profile.content_type) != "application/json": + raise ParamError("Activity profile already exists but is not JSON, cannot update it with new JSON document") + return req_dict @@ -769,18 +783,19 @@ def agent_profile_post(req_dict): req_dict['profile'] = raw_profile # Check the content type if the document already exists - exists = False agent = req_dict['params']['agent'] a = Agent.objects.retrieve_or_create(**agent)[0] + + previous_profile = None try: - p = AgentProfile.objects.get(profile_id=req_dict['params']['profileId'], agent=a) - exists = True + previous_profile = AgentProfile.objects.get(profile_id=req_dict['params']['profileId'], agent=a) except AgentProfile.DoesNotExist: pass # Since document to be POSTed has to be json, so does the existing document - if exists and str(p.content_type) != "application/json": + if (previous_profile is not None) and str(previous_profile.content_type) != "application/json": raise ParamError("Agent profile already exists but is not JSON, cannot update it with new JSON document") + return req_dict @@ -903,13 +918,14 @@ def agents_get(req_dict): raise ParamError(err_msg) validator = StatementValidator() - if 'agent' in req_dict['params']: - try: - agent = convert_to_datatype(req_dict['params']['agent']) - except Exception: - raise ParamError("agent param %s is not valid" % \ - req_dict['params']['agent']) - validator.validate_agent(agent, "Agent param") + + agent = None + try: + agent = convert_to_datatype(req_dict['params']['agent']) + except Exception: + raise ParamError(f"agent param {req_dict['params']['agent']} is not valid") + + validator.validate_agent(agent, "Agent param") params = get_agent_ifp(agent) if not Agent.objects.filter(**params).exists(): From 879e170ec331e5832ea847134ae19c1e74695dc4 Mon Sep 17 00:00:00 2001 From: Trey Date: Fri, 19 May 2023 13:50:57 -0400 Subject: [PATCH 031/100] adding result truncation for signature comparison --- lrs/utils/__init__.py | 20 ++++++ lrs/utils/req_parse.py | 139 +++++++++++++++++++++++++++------------ lrs/utils/req_process.py | 17 +---- 3 files changed, 120 insertions(+), 56 deletions(-) diff --git a/lrs/utils/__init__.py b/lrs/utils/__init__.py index 6a617b2a..9387cb52 100644 --- a/lrs/utils/__init__.py +++ b/lrs/utils/__init__.py @@ -1,6 +1,9 @@ import ast import json import urllib +import re +import unicodedata +import math from urllib.parse import parse_qs, parse_qsl, urlparse, unquote_plus @@ -146,3 +149,20 @@ def get_lang(langdict, lang): pass first = next(iter(langdict.items())) return {first[0]: first[1]} + +def truncate_duration(duration): + sec_split = re.findall(r"\d+(?:\.\d+)?S", duration) + if sec_split: + seconds_str = sec_split[0] + seconds = float(seconds_str.replace('S', '')) + + if not seconds.is_integer(): + ### xAPI 2.0: Truncation required for comparison, not rounding etc. + # sec_trunc = round(sec_as_num, 2) + seconds_truncated = math.floor(seconds * 100) / 100 + else: + seconds_truncated = int(seconds) + + return unicodedata.normalize("NFKD", duration.replace(seconds_str, str(seconds_truncated) + 'S')) + else: + return duration diff --git a/lrs/utils/req_parse.py b/lrs/utils/req_parse.py index 14c180d1..6aeba0a3 100644 --- a/lrs/utils/req_parse.py +++ b/lrs/utils/req_parse.py @@ -4,6 +4,7 @@ import hashlib import json +from isodate import parse_duration from isodate.isoerror import ISO8601Error from Crypto.PublicKey import RSA from jose import jws @@ -13,7 +14,7 @@ from django.urls import reverse from django.http import QueryDict -from . import convert_to_datatype, convert_post_body_to_dict, validate_timestamp +from . import convert_to_datatype, convert_post_body_to_dict, validate_timestamp, truncate_duration from .etag import get_etag_info from ..exceptions import OauthUnauthorized, OauthBadRequest, ParamError, BadRequest @@ -85,10 +86,11 @@ def set_normal_authorization(request, r_dict): r_dict['auth']['endpoint'] = get_endpoint(request) if auth_params[:6] == 'OAuth ': oauth_request = get_oauth_request(request) - # Returns HttpBadRequest if missing any params + + # Returns HttpBadResponse if missing any params missing = require_params(oauth_request) if missing: - raise missing + raise ParamError("OAuth request was missing required parameters.") check = CheckOauth() e_type, error = check.check_access_token(request) @@ -303,76 +305,99 @@ def validate_hash(part_hash, part): "Hash header %s did not match calculated hash" \ % part_hash) +def is_a_signature(attachment): + usage_type = getattr(attachment, 'usageType', None) + return usage_type == "http://adlnet.gov/expapi/attachments/signature" + +def get_signature(attachment): + return getattr(attachment, "sha2", None) + def parse_signature_attachments(r_dict, part_dict): # Find the signature sha2 from the list attachment values in the # statements (there should only be one) signed_stmts = [] unsigned_stmts = [] stmt_attachment_pairs = [] + if isinstance(r_dict['body'], list): - for stmt in r_dict['body']: - if 'attachments' in stmt: - stmt_attachment_pairs.append((stmt, [a.get('sha2', None) for a in stmt['attachments'] - if a.get('usageType', None) == "http://adlnet.gov/expapi/attachments/signature"])) + for statement in r_dict['body']: + if 'attachments' in statement: + attachments = statement["attachments"] + signatures = [get_signature(a) for a in attachments if is_a_signature(a)] + stmt_attachment_pairs.append((statement, signatures)) else: if 'attachments' in r_dict['body']: - stmt_attachment_pairs = [(r_dict['body'], [a.get('sha2', None) for a in r_dict['body']['attachments'] - if a.get('usageType', None) == "http://adlnet.gov/expapi/attachments/signature"])] - signed_stmts = [sap for sap in stmt_attachment_pairs if sap[1]] - unsigned_stmts = [sap for sap in stmt_attachment_pairs if not sap[1]] + statement = r_dict["body"] + attachments = statement["attachments"] + signatures = [get_signature(a) for a in attachments if is_a_signature(a)] + stmt_attachment_pairs.append((statement, signatures)) + + signed_stmts = [sap for sap in stmt_attachment_pairs if len(sap[1]) >= 1] + unsigned_stmts = [sap for sap in stmt_attachment_pairs if len(sap[1]) == 0] if unsigned_stmts: - for tup in unsigned_stmts: - validate_non_signature_attachment(unsigned_stmts, r_dict['payload_sha2s'], part_dict) + validate_non_signature_attachment(unsigned_stmts, r_dict['payload_sha2s'], part_dict) if signed_stmts: handle_signatures(signed_stmts, r_dict['payload_sha2s'], part_dict) -def validate_non_signature_attachment(unsigned_stmts, sha2s, part_dict): - for tup in unsigned_stmts: - atts = tup[0]['attachments'] - for att in atts: - sha2 = att.get('sha2') +def validate_non_signature_attachment(unsigned_stmts, sha2s_on_request, part_dict): + for statement_signature_tuple in unsigned_stmts: + + statement, _ = statement_signature_tuple + attachments = statement['attachments'] + + for attachment in attachments: + sha2 = attachment.get('sha2') # If there isn't a fileUrl, the sha field must match # a received attachment payload - if 'fileUrl' not in att: + if 'fileUrl' not in attachment: # Should be listed in sha2s - sha2s couldn't not match - if sha2 not in sha2s: - raise BadRequest( - "Could not find attachment payload with sha: %s" % sha2) + if sha2 not in sha2s_on_request: + raise BadRequest(f"Could not find attachment payload with sha: {sha2}") + +def handle_signatures(stmt_tuples, sha2s_on_request, part_dict): + for statement_signature_tuple in stmt_tuples: -def handle_signatures(stmt_tuples, sha2s, part_dict): - for tup in stmt_tuples: - for sha2 in tup[1]: + _, signatures = statement_signature_tuple + + for sha2 in signatures: # Should be listed in sha2s - sha2s couldn't not match - if sha2 not in sha2s: - raise BadRequest( - "Could not find attachment payload with sha: %s" % sha2) + if sha2 not in sha2s_on_request: + raise BadRequest(f"Could not find attachment payload with sha: {sha2}") + part = part_dict[sha2] + # Content type must be set to octet/stream if part['Content-Type'] != 'application/octet-stream': - raise BadRequest( - "Signature attachment must have Content-Type of "\ - "'application/octet-stream'") - validate_signature(tup, part) + raise BadRequest("Signature attachment must have Content-Type of 'application/octet-stream'") + + validate_signature(statement_signature_tuple, part) + +def validate_signature(statement_signature_tuple, part): -def validate_signature(tup, part): - sha2_key = tup[1][0] + statement, signatures = statement_signature_tuple + + sha2_key = signatures[0] signature = get_part_payload(part) algorithm = jws.get_unverified_headers(signature).get('alg', None) + if not algorithm: raise BadRequest( "No signing algorithm found for JWS signature") + if algorithm != 'RS256' and algorithm != 'RS384' and algorithm != 'RS512': raise BadRequest( "JWS signature must be calculated with SHA-256, SHA-384 or" \ "SHA-512 algorithms") + x5c = jws.get_unverified_headers(signature).get('x5c', None) jws_payload = jws.get_unverified_claims(signature) - body_payload = tup[0] + body_payload = statement + # If x.509 was used to sign, the public key should be in the x5c header and you need to verify it # If using RS256, RS384, or RS512 some JWS libs require a real private key to create JWS - xAPI spec # only has SHOULD - need to look into. If x.509 is necessary then @@ -382,22 +407,48 @@ def validate_signature(tup, part): try: verified = jws.verify( signature, cert_to_key(x5c[0]), algorithm) + except Exception as e: - raise BadRequest("The JWS is not valid: %s" % str(e)) + raise BadRequest(f"The JWS is not valid: {str(e)}") + else: if not verified: - raise BadRequest( - "The JWS is not valid - could not verify signature") + raise BadRequest("The JWS is not valid - could not verify signature") + # Compare statements if not compare_payloads(jws_payload, body_payload, sha2_key): - raise BadRequest( - "The JWS is not valid - payload and body statements do not match") + raise BadRequest("The JWS is not valid - payload and body statements do not match") + else: # Compare statements if not compare_payloads(jws_payload, body_payload, sha2_key): - raise BadRequest( - "The JWS is not valid - payload and body statements do not match") + raise BadRequest("The JWS is not valid - payload and body statements do not match") + +def prepare_result_for_equivalence_check(statement_dict): + """ + The Duration property of a result must only be compared against the first + two decimal places of its Seconds property. Anything beyond that must + be truncated, per the 2.0 spec. + """ + if "result" in statement_dict and "duration" in statement_dict["result"]: + duration = statement_dict["result"]["duration"] + statement_dict["result"]["duration"] = truncate_duration(duration) + return statement_dict + +def prepare_statement_for_equivalence_check(statement_dict, is_substatement=False) -> dict: + + prepare_result_for_equivalence_check(statement_dict) + + if not is_substatement: + return statement_dict + + if "object" in statement_dict and "objectType" in statement_dict["object"]: + object_type = statement_dict["object"]["objectType"] + if object_type == "SubStatement": + prepare_statement_for_equivalence_check(statement_dict["object"], is_substatement=True) + + return statement_dict def compare_payloads(jws_payload, body_payload, sha2_key): # Need to copy the dict so use dict() @@ -413,6 +464,7 @@ def compare_payloads(jws_payload, body_payload, sha2_key): jws_placeholder.pop("timestamp", None) jws_placeholder.pop("version", None) jws_placeholder.pop("attachments", None) + # JWT specific standard fields jws_placeholder.pop("iss", None) jws_placeholder.pop("sub", None) @@ -430,6 +482,9 @@ def compare_payloads(jws_payload, body_payload, sha2_key): body_placeholder.pop("version", None) body_placeholder.pop("attachments", None) + prepare_statement_for_equivalence_check(body_placeholder) + prepare_statement_for_equivalence_check(jws_placeholder) + return json.dumps(jws_placeholder, sort_keys=True) == json.dumps(body_placeholder, sort_keys=True) diff --git a/lrs/utils/req_process.py b/lrs/utils/req_process.py index f60a4e8d..5fe1dca5 100644 --- a/lrs/utils/req_process.py +++ b/lrs/utils/req_process.py @@ -11,6 +11,7 @@ from django.conf import settings from django.utils.timezone import utc +from . import truncate_duration from .retrieve_statement import complex_get, parse_more_request from ..exceptions import NotFound from ..models import Statement, Agent, Activity @@ -34,20 +35,8 @@ def process_statement(stmt, auth, payload_sha2s): if 'result' in stmt: if 'duration' in stmt['result']: duration = stmt['result']['duration'] - sec_split = re.findall(r"\d+(?:\.\d+)?S", duration) - if sec_split: - seconds_str = sec_split[0] - seconds = float(seconds_str.replace('S', '')) - - if not seconds.is_integer(): - ### xAPI 2.0: Truncation required for comparison, not rounding etc. - # sec_trunc = round(sec_as_num, 2) - seconds_truncated = math.floor(seconds * 100) / 100 - else: - seconds_truncated = int(seconds) - - stmt['result']['duration'] = unicodedata.normalize("NFKD", duration.replace(seconds_str, str(seconds_truncated) + 'S')) - + stmt['result']['duration'] = truncate_duration(duration) + # Convert context activities to list if dict if 'context' in stmt and 'contextActivities' in stmt['context']: for k, v in list(stmt['context']['contextActivities'].items()): From 7cff6c44844399ba2ae7d2a6a43121842d1cb390 Mon Sep 17 00:00:00 2001 From: Trey Date: Fri, 19 May 2023 13:51:18 -0400 Subject: [PATCH 032/100] cleanup / intellisense fixes --- lrs/models.py | 12 ++++-------- lrs/utils/StatementValidator.py | 6 ++++-- oauth_provider/utils.py | 7 +++++-- oauth_provider/views.py | 25 +++++++++++++------------ requirements.txt | 2 +- 5 files changed, 27 insertions(+), 25 deletions(-) diff --git a/lrs/models.py b/lrs/models.py index 9376336c..66e7c4e8 100644 --- a/lrs/models.py +++ b/lrs/models.py @@ -580,14 +580,10 @@ class Statement(models.Model): context_platform = models.CharField(max_length=50, blank=True) context_language = models.CharField(max_length=50, blank=True) context_extensions = JSONField(default=dict, blank=True) - context_ca_parent = models.ManyToManyField( - Activity, related_name="stmt_context_ca_parent") - context_ca_grouping = models.ManyToManyField( - Activity, related_name="stmt_context_ca_grouping") - context_ca_category = models.ManyToManyField( - Activity, related_name="stmt_context_ca_category") - context_ca_other = models.ManyToManyField( - Activity, related_name="stmt_context_ca_other") + context_ca_parent = models.ManyToManyField(Activity, related_name="stmt_context_ca_parent") + context_ca_grouping = models.ManyToManyField(Activity, related_name="stmt_context_ca_grouping") + context_ca_category = models.ManyToManyField(Activity, related_name="stmt_context_ca_category") + context_ca_other = models.ManyToManyField(Activity, related_name="stmt_context_ca_other") context_contextAgents = JSONField(default=list, blank=True) context_contextGroups = JSONField(default=list, blank=True) diff --git a/lrs/utils/StatementValidator.py b/lrs/utils/StatementValidator.py index 5a229851..3e2ba557 100644 --- a/lrs/utils/StatementValidator.py +++ b/lrs/utils/StatementValidator.py @@ -89,9 +89,11 @@ def validate(self): for st in self.data: self.validate_statement(st) return "All Statements are valid" + elif isinstance(self.data, dict): self.validate_statement(self.data) return "Statement is valid" + else: self.return_error(f"There are no statements to validate, payload: {self.data}") @@ -157,12 +159,12 @@ def validate_iri(self, iri_value, field): def validate_uuid(self, uuid, field): if isinstance(uuid, str): + val = None try: val = UUID(uuid, version=4) + return val.hex == uuid except ValueError: self.return_error(f"{field} - {uuid} is not a valid UUID") - - return val.hex == uuid else: self.return_error("%s must be a string type" % field) diff --git a/oauth_provider/utils.py b/oauth_provider/utils.py index 60b8afc0..c90d2708 100644 --- a/oauth_provider/utils.py +++ b/oauth_provider/utils.py @@ -2,6 +2,9 @@ import binascii import urllib.request, urllib.parse, urllib.error import oauth2 as oauth + +from typing import Union + from urllib.parse import urlparse, urlunparse from Crypto.PublicKey import RSA @@ -149,7 +152,7 @@ def verify_xauth_request(request, oauth_request): return user -def require_params(oauth_request, parameters=None): +def require_params(oauth_request, parameters=None) -> Union[HttpResponse, None]: """ Ensures that the request contains all required parameters. """ params = [ 'oauth_consumer_key', @@ -163,7 +166,7 @@ def require_params(oauth_request, parameters=None): missing = list(param for param in params if param not in oauth_request) if missing: - return HttpResponseBadRequest('Missing OAuth parameters: %s' % (', '.join(missing))) + return HttpResponseBadRequest('Missing OAuth parameters: ' + (', '.join(missing))) return None diff --git a/oauth_provider/views.py b/oauth_provider/views.py index a43a0be7..47be33e6 100644 --- a/oauth_provider/views.py +++ b/oauth_provider/views.py @@ -68,6 +68,7 @@ def request_token(request): @login_required(login_url="/accounts/login") def user_authorization(request, form_class=AuthorizeRequestTokenForm): + incoming_token = None if request.method.lower() == 'get': if 'oauth_token' not in request.GET: return HttpResponseBadRequest('No request token specified.') @@ -80,8 +81,7 @@ def user_authorization(request, form_class=AuthorizeRequestTokenForm): oauth_request = get_oauth_request(request) try: - request_token = store.get_request_token( - request, oauth_request, incoming_token) + request_token = store.get_request_token(request, oauth_request, incoming_token) except InvalidTokenError: return HttpResponse('Invalid request token: %s' % incoming_token, status=401) @@ -172,15 +172,13 @@ def access_token(request): if not is_xauth: # Check Parameters - missing_params = require_params( - oauth_request, ('oauth_token', 'oauth_verifier')) + missing_params = require_params(oauth_request, ('oauth_token', 'oauth_verifier')) if missing_params is not None: return missing_params # Check Request Token try: - request_token = store.get_request_token( - request, oauth_request, oauth_request['oauth_token']) + request_token = store.get_request_token(request, oauth_request, oauth_request['oauth_token']) except InvalidTokenError: return HttpResponse('Invalid request token: %s' % oauth_request['oauth_token'], status=401) if not request_token.is_approved: @@ -246,9 +244,11 @@ def access_token(request): @login_required(login_url="/accounts/login") def authorize_client(request, token=None, callback=None, params=None, form=None): + + assert isinstance(token, Token) if not form: - form = AuthorizeRequestTokenForm(initial={'scopes': token.scope_to_list(), - 'obj_id': token.pk}) + form = AuthorizeRequestTokenForm(initial={'scopes': token.scope_to_list(), 'obj_id': token.pk}) + d = {} d['oauth_scopes'] = settings.OAUTH_SCOPES d['scopes'] = json.dumps(token.scope_to_list()) @@ -257,6 +257,7 @@ def authorize_client(request, token=None, callback=None, params=None, form=None) d['description'] = token.consumer.description d['params'] = params d['oauth_token'] = token.key + return render(request, 'oauth_authorize_client.html', d) @@ -269,10 +270,10 @@ def callback_view(request, **args): try: oauth_token = Token.objects.get(key=args['oauth_token']) except AttributeError as e: - send_oauth_error('https' if request.is_secure() else 'http', - get_current_site(request).domain, e) + return send_oauth_error('https' if request.is_secure() else 'http', get_current_site(request).domain, e) except Token.DoesNotExist as e: - send_oauth_error('https' if request.is_secure() else 'http', - get_current_site(request).domain, e) + return send_oauth_error('https' if request.is_secure() else 'http', get_current_site(request).domain, e) + d['verifier'] = oauth_token.verifier + return render(request, 'oauth_verifier_pin.html', d) diff --git a/requirements.txt b/requirements.txt index daf209e8..c6ecbdd4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -14,4 +14,4 @@ python-jose==3.3.0 pytz==2021.3 requests==2.26.0 rfc3987==1.3.8 -supervisor==4.2.3 \ No newline at end of file +supervisor==4.2.3 From 81193dee3daba2ace0db68e3b611f0475f591190 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Fri, 19 May 2023 19:46:30 -0400 Subject: [PATCH 033/100] removing placeholder gitlab files --- deployment.yml | 19 ------------------- test | 0 2 files changed, 19 deletions(-) delete mode 100644 deployment.yml delete mode 100644 test diff --git a/deployment.yml b/deployment.yml deleted file mode 100644 index b7872976..00000000 --- a/deployment.yml +++ /dev/null @@ -1,19 +0,0 @@ -name: Deploy -on: - push: - branches: - - python3-xapi-2.0 - -jobs: - deploy: - name: 🚀 Deploy - runs-on: ubuntu-latest - steps: - - name: executing remote ssh commands using password - uses: appleboy/ssh-action@v0.1.10 - with: - host: ${{ secrets.HOST }} - username: ${{ secrets.USERNAME }} - key: ${{ secrets.KEY }} - script: | - sudo /home/ubuntu/workflow/script.sh diff --git a/test b/test deleted file mode 100644 index e69de29b..00000000 From 34d883839f1f12c66b077f34b57652e1623b0002 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Fri, 19 May 2023 22:28:36 -0400 Subject: [PATCH 034/100] updating requirements --- requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index c6ecbdd4..5d3f3874 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -Django==3.2.16 +Django==3.2.19 amqp==5.0.9 bcoding==1.5 celery==5.2.3 @@ -8,7 +8,7 @@ django-jsonify==0.3.0 django-recaptcha==3.0.0 isodate==0.6.1 oauth2==1.9.0.post1 -psycopg2==2.9.3 +psycopg2-binary==2.9.3 pycryptodome==3.12.0 python-jose==3.3.0 pytz==2021.3 From c3084d472754e106ad5e6be2f01234680ad2c075 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Sat, 20 May 2023 08:15:44 -0400 Subject: [PATCH 035/100] more cleanup, cert typing --- lrs/utils/XAPIVersionHeaderMiddleware.py | 6 ++---- lrs/utils/req_parse.py | 23 +++++++++++------------ 2 files changed, 13 insertions(+), 16 deletions(-) diff --git a/lrs/utils/XAPIVersionHeaderMiddleware.py b/lrs/utils/XAPIVersionHeaderMiddleware.py index be660027..c599d792 100644 --- a/lrs/utils/XAPIVersionHeaderMiddleware.py +++ b/lrs/utils/XAPIVersionHeaderMiddleware.py @@ -39,11 +39,9 @@ def process_request(self, request): break if version: - if version == '1.0' or (version.startswith('1.0') and \ - version in settings.XAPI_VERSIONS): + if version == '1.0' or (version.startswith('1.0') and version in settings.XAPI_VERSIONS): return None - elif version == '2.0' or (version.startswith('2.0') and \ - version in settings.XAPI_VERSIONS): + elif version == '2.0' or (version.startswith('2.0') and version in settings.XAPI_VERSIONS): return None else: resp = HttpResponse("X-Experience-API-Version is not supported", status=400) diff --git a/lrs/utils/req_parse.py b/lrs/utils/req_parse.py index 6aeba0a3..3fd63237 100644 --- a/lrs/utils/req_parse.py +++ b/lrs/utils/req_parse.py @@ -86,6 +86,9 @@ def set_normal_authorization(request, r_dict): r_dict['auth']['endpoint'] = get_endpoint(request) if auth_params[:6] == 'OAuth ': oauth_request = get_oauth_request(request) + + if oauth_request is None: + raise ParamError("OAuth config could not be determined from request.") # Returns HttpBadResponse if missing any params missing = require_params(oauth_request) @@ -94,16 +97,16 @@ def set_normal_authorization(request, r_dict): check = CheckOauth() e_type, error = check.check_access_token(request) + if e_type and error: if e_type == 'auth': raise OauthUnauthorized(error) else: raise OauthBadRequest(error) + # Consumer and token should be clean by now - consumer = store.get_consumer( - request, oauth_request, oauth_request['oauth_consumer_key']) - token = store.get_access_token( - request, oauth_request, consumer, oauth_request.get_parameter('oauth_token')) + consumer = store.get_consumer(request, oauth_request, oauth_request['oauth_consumer_key']) + token = store.get_access_token(request, oauth_request, consumer, oauth_request.get_parameter('oauth_token')) # Set consumer and token for authentication piece r_dict['auth']['oauth_consumer'] = consumer @@ -386,13 +389,10 @@ def validate_signature(statement_signature_tuple, part): algorithm = jws.get_unverified_headers(signature).get('alg', None) if not algorithm: - raise BadRequest( - "No signing algorithm found for JWS signature") + raise BadRequest("No signing algorithm found for JWS signature") if algorithm != 'RS256' and algorithm != 'RS384' and algorithm != 'RS512': - raise BadRequest( - "JWS signature must be calculated with SHA-256, SHA-384 or" \ - "SHA-512 algorithms") + raise BadRequest("JWS signature must be calculated with SHA-256, SHA-384 or SHA-512 algorithms") x5c = jws.get_unverified_headers(signature).get('x5c', None) jws_payload = jws.get_unverified_claims(signature) @@ -405,8 +405,8 @@ def validate_signature(statement_signature_tuple, part): if x5c: verified = False try: - verified = jws.verify( - signature, cert_to_key(x5c[0]), algorithm) + key = cert_to_key(x5c[0]).exportKey() + verified = jws.verify(signature, key, algorithm) except Exception as e: raise BadRequest(f"The JWS is not valid: {str(e)}") @@ -510,7 +510,6 @@ def get_part_payload(part): def cert_to_key(cert): return RSA.importKey(base64.b64decode(cert)) - def get_endpoint(request): # Used for OAuth scope parts = request.path.split("/") From 80727e8d79651fcfb50168372961318f042816c8 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Sun, 21 May 2023 15:09:49 -0400 Subject: [PATCH 036/100] initial etag cleanup --- lrs/managers/ActivityProfileManager.py | 4 +- lrs/managers/ActivityStateManager.py | 5 +- lrs/managers/AgentProfileManager.py | 6 +- lrs/utils/etag.py | 78 +++++++++++++------------- lrs/utils/req_process.py | 12 ++-- 5 files changed, 53 insertions(+), 52 deletions(-) diff --git a/lrs/managers/ActivityProfileManager.py b/lrs/managers/ActivityProfileManager.py index 85dba4ce..b2558dd0 100644 --- a/lrs/managers/ActivityProfileManager.py +++ b/lrs/managers/ActivityProfileManager.py @@ -43,7 +43,7 @@ def post_profile(self, request_dict): # already exist with the same activityId and profileId if created: # xAPI 2.0 Addition: - etag.check_preconditions(request_dict, p, created, required=True) + etag.check_modification_conditions(request_dict, p, created, required=True) p.json_profile = post_profile p.content_type = "application/json" @@ -95,7 +95,7 @@ def put_profile(self, request_dict): self.save_non_json_profile(p, created, profile, request_dict) # Profile being PUT is json else: - etag.check_preconditions(request_dict, p, created, required=False) + etag.check_modification_conditions(request_dict, p, created, required=False) # If a profile already existed with the profileId and activityId # (overwrite existing profile data) the_profile = request_dict['profile'] diff --git a/lrs/managers/ActivityStateManager.py b/lrs/managers/ActivityStateManager.py index 15685ebc..37f4d151 100644 --- a/lrs/managers/ActivityStateManager.py +++ b/lrs/managers/ActivityStateManager.py @@ -66,7 +66,7 @@ def post_state(self, request_dict): # already exist with the same agent, stateId, actId, and/or # registration if created: - etag.check_preconditions(request_dict, s, created, required=False) + etag.check_modification_conditions(request_dict, s, created, required=False) s.json_state = post_state s.content_type = "application/json" s.etag = etag.create_tag(post_state) @@ -117,9 +117,10 @@ def put_state(self, request_dict): # probably was json before s.json_state = {} self.save_non_json_state(s, post_state, request_dict) + # State being PUT is json else: - etag.check_preconditions(request_dict, s, created, required=False) + etag.check_modification_conditions(request_dict, s, created, required=False) the_state = request_dict['state'] s.json_state = the_state s.content_type = request_dict['headers']['CONTENT_TYPE'] diff --git a/lrs/managers/AgentProfileManager.py b/lrs/managers/AgentProfileManager.py index 17620216..450e1e58 100644 --- a/lrs/managers/AgentProfileManager.py +++ b/lrs/managers/AgentProfileManager.py @@ -38,7 +38,7 @@ def post_profile(self, request_dict): # If incoming profile is application/json and if a profile didn't # already exist with the same agent and profileId if created: - etag.check_preconditions(request_dict, p, created, required=False) + etag.check_modification_conditions(request_dict, p, created, required=False) p.json_profile = post_profile p.content_type = "application/json" p.etag = etag.create_tag(post_profile) @@ -61,7 +61,7 @@ def post_profile(self, request_dict): ## 2.0 Behaviour to mirror PUT behaviour. ## # (overwrite existing profile data) - etag.check_preconditions(request_dict, p, created) + etag.check_modification_conditions(request_dict, p, created) the_profile = request_dict['profile'] p.json_profile = the_profile p.content_type = request_dict['headers']['CONTENT_TYPE'] @@ -101,7 +101,7 @@ def put_profile(self, request_dict): # Profile being PUT is json else: # (overwrite existing profile data) - etag.check_preconditions(request_dict, p, created) + etag.check_modification_conditions(request_dict, p, created) the_profile = request_dict['profile'] p.json_profile = the_profile p.content_type = request_dict['headers']['CONTENT_TYPE'] diff --git a/lrs/utils/etag.py b/lrs/utils/etag.py index d122bb2c..3e72cfb1 100644 --- a/lrs/utils/etag.py +++ b/lrs/utils/etag.py @@ -13,6 +13,7 @@ def create_tag(resource): def get_etag_info(headers): etag = {} etag[IF_MATCH] = headers.get(IF_MATCH, None) + if not etag[IF_MATCH]: etag[IF_MATCH] = headers.get('If_Match', None) if not etag[IF_MATCH]: @@ -23,55 +24,52 @@ def get_etag_info(headers): etag[IF_NONE_MATCH] = headers.get('If_None_Match', None) if not etag[IF_NONE_MATCH]: etag[IF_NONE_MATCH] = headers.get('If-None-Match', None) + return etag -def check_preconditions(request, contents, created, required=True): +def check_modification_conditions(request, record, created, required=True): if not required: return - exists = False - if not created: - exists = True - - try: - request_etag = request['headers']['ETAG'] - if not request_etag[IF_MATCH] and not request_etag[IF_NONE_MATCH]: - if exists: - raise MissingEtagInfoExists( - "If-Match and If-None-Match headers were missing. One of these headers is required for this request.") - raise MissingEtagInfo( - "If-Match and If-None-Match headers were missing. One of these headers is required for this request.") - except KeyError: - if exists: - raise MissingEtagInfoExists( - "If-Match and If-None-Match headers were missing. One of these headers is required for this request.") - raise MissingEtagInfo( - "If-Match and If-None-Match headers were missing. One of these headers is required for this request.") - else: - # If there are both, if none match takes precendence - if request_etag[IF_NONE_MATCH]: - # Only check if the content already exists. if it did not - # already exist it should pass. - if exists: - if request_etag[IF_NONE_MATCH] == "*": + record_already_exists = not created + etag_headers = request['headers'].get('ETAG') + + if etag_headers is None: + raise MissingEtagInfo("Could not determine etag headers for this request.") + + header_if_match = etag_headers.get(IF_MATCH) + header_if_none_match = etag_headers.get(IF_NONE_MATCH) + + has_if_match = header_if_match is not None + has_if_none_match = header_if_none_match is not None + + missing_if_match = not has_if_match + missing_if_none_match = not has_if_none_match + + if missing_if_match and missing_if_none_match: + raise MissingEtagInfo("If-Match and If-None-Match headers were missing. One of these headers is required for this request.") + + # If there are both, if none match takes precendence + if has_if_none_match: + # Only check if the content already exists. if it did not + # already exist it should pass. + if record_already_exists: + if etag_headers[IF_NONE_MATCH] == "*": + raise EtagPreconditionFail("Resource detected") + else: + if f'"{record.etag}"' in etag_headers[IF_NONE_MATCH]: raise EtagPreconditionFail("Resource detected") - else: - if '"%s"' % contents.etag in request_etag[IF_NONE_MATCH]: - raise EtagPreconditionFail("Resource detected") + + if has_if_match: + if created: + record.delete() + raise EtagPreconditionFail("Resource does not exist") else: - if not exists: - contents.delete() - raise EtagPreconditionFail( - "Resource does not exist") - else: - if request_etag[IF_MATCH] != "*": - if '"%s"' % contents.etag not in request_etag[IF_MATCH]: - raise EtagPreconditionFail( - "No resources matched your etag precondition") + if etag_headers[IF_MATCH] != "*": + if f'"{record.etag}"' not in etag_headers[IF_MATCH]: + raise EtagPreconditionFail("No resources matched your etag precondition") - - class MissingEtagInfo(BadRequest): def __init__(self, msg): diff --git a/lrs/utils/req_process.py b/lrs/utils/req_process.py index 5fe1dca5..09a667ea 100644 --- a/lrs/utils/req_process.py +++ b/lrs/utils/req_process.py @@ -330,16 +330,18 @@ def activity_state_get(req_dict): else: registration = req_dict['params'].get('registration', None) actstate = ActivityStateManager(a) + # state id means we want only 1 item if state_id: resource = actstate.get_state(activity_id, registration, state_id) + if resource.state: - response = HttpResponse( - resource.state.read(), content_type=resource.content_type) + response = HttpResponse(resource.state.read(), content_type=resource.content_type) else: - response = HttpResponse( - resource.json_state, content_type=resource.content_type) - response['ETag'] = '"%s"' % resource.etag + response = HttpResponse(resource.json_state, content_type=resource.content_type) + + response['ETag'] = f'"{resource.etag}"' + # no state id means we want an array of state ids else: since = req_dict['params'].get('since', None) From f7c66fc4040c59ed045d8dc604bde8168dc7f4f2 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Sun, 21 May 2023 15:10:12 -0400 Subject: [PATCH 037/100] f-string cleanup --- lrs/models.py | 3 +-- lrs/utils/req_validate.py | 22 ++++++++-------------- 2 files changed, 9 insertions(+), 16 deletions(-) diff --git a/lrs/models.py b/lrs/models.py index 66e7c4e8..9b3cb1ae 100644 --- a/lrs/models.py +++ b/lrs/models.py @@ -766,8 +766,7 @@ def __unicode__(self): class ActivityState(models.Model): state_id = models.CharField(max_length=MAX_URL_LENGTH) - updated = models.DateTimeField( - auto_now_add=True, blank=True, db_index=True) + updated = models.DateTimeField(auto_now_add=True, blank=True, db_index=True) activity_id = models.CharField(max_length=MAX_URL_LENGTH, db_index=True) registration_id = models.CharField(max_length=40, db_index=True) content_type = models.CharField(max_length=255, blank=True) diff --git a/lrs/utils/req_validate.py b/lrs/utils/req_validate.py index c7652eb6..2569b84d 100644 --- a/lrs/utils/req_validate.py +++ b/lrs/utils/req_validate.py @@ -484,44 +484,38 @@ def activity_state_put(req_dict): @auth def activity_state_get(req_dict): - rogueparams = set(req_dict['params']) - set(["activityId", - "agent", "stateId", "registration", "since"]) + rogueparams = set(req_dict['params']) - set(["activityId", "agent", "stateId", "registration", "since"]) if rogueparams: - raise ParamError( - "The get activity state request contained unexpected parameters: %s" % ", ".join(escape(param) for param in rogueparams)) + raise ParamError("The get activity state request contained unexpected parameters: %s" % ", ".join(escape(param) for param in rogueparams)) validator = StatementValidator() if 'activityId' in req_dict['params']: validator.validate_iri( req_dict['params']['activityId'], "activityId param for activity state") else: - err_msg = "Error -- activity_state - method = %s, but activityId parameter is missing." % req_dict[ - 'method'] + err_msg = f"Error -- activity_state - method = {req_dict['method']}, but activityId parameter is missing." raise ParamError(err_msg) if 'registration' in req_dict['params']: - validator.validate_uuid( - req_dict['params']['registration'], "registration param for activity state") + validator.validate_uuid(req_dict['params']['registration'], "registration param for activity state") if 'agent' in req_dict['params']: try: agent = convert_to_datatype(req_dict['params']['agent']) req_dict['params']['agent'] = agent except Exception: - raise ParamError("agent param %s is not valid" % \ - req_dict['params']['agent']) + raise ParamError("agent param %s is not valid" % req_dict['params']['agent']) + validator.validate_agent(agent, "Agent param") else: - err_msg = "Error -- activity_state - method = %s, but agent parameter is missing." % req_dict[ - 'method'] + err_msg = f"Error -- activity_state - method = {req_dict['method']}, but agent parameter is missing." raise ParamError(err_msg) if 'since' in req_dict['params']: try: validate_timestamp(req_dict['params']['since']) except (Exception, RFC3339Error): - raise ParamError( - "Since parameter was not a valid RFC3339 timestamp") + raise ParamError("Since parameter was not a valid RFC3339 timestamp") # Extra validation if oauth if req_dict['auth']['type'] == 'oauth': From 71e7364725c293dc3c73c36c1ac10de3c186af20 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Mon, 22 May 2023 09:03:11 -0400 Subject: [PATCH 038/100] fixing compose ports --- docker-compose.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 31faa338..8bc2eaed 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -71,8 +71,8 @@ services: args: HOSTNAME: "${HOSTNAME}" ports: - - "90:90" - - "441:443" + - "80:80" + - "443:443" container_name: docker_nginx volumes: - ./docker/nginx/letsencrypt:/usr/share/nginx/html From 797fe347ee0b07d514cefbeeb3aa751c2c687dfc Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Mon, 22 May 2023 14:05:39 -0400 Subject: [PATCH 039/100] small cleanup for ActivityManager --- lrs/managers/ActivityManager.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/lrs/managers/ActivityManager.py b/lrs/managers/ActivityManager.py index 20811305..1d102c2a 100644 --- a/lrs/managers/ActivityManager.py +++ b/lrs/managers/ActivityManager.py @@ -12,10 +12,15 @@ def __init__(self, data, auth=None, define=True): self.populate(data) def update_language_maps(self, incoming_act_def): + + if self.activity is None: + return + # If there was no definition in the canonical data, and there is an # incoming one, set it to incoming data if 'definition' not in self.activity.canonical_data and incoming_act_def: self.activity.canonical_data['definition'] = incoming_act_def + # Else there was existing canonical data, and there in an incoming one, # only update lang maps (name, desc, interaction activities) elif 'definition' in self.activity.canonical_data and incoming_act_def: @@ -28,10 +33,12 @@ def update_language_maps(self, incoming_act_def): if 'description' not in self.activity.canonical_data['definition']: self.activity.canonical_data['definition']['description'] = {} - self.activity.canonical_data['definition']['name'] = dict(list(self.activity.canonical_data['definition']['name'].items()) + - list(incoming_act_def['name'].items())) - self.activity.canonical_data['definition']['description'] = dict(list(self.activity.canonical_data['definition']['description'].items()) + - list(incoming_act_def['description'].items())) + updated_name = dict(list(self.activity.canonical_data['definition']['name'].items()) + list(incoming_act_def['name'].items())) + updated_desc = dict(list(self.activity.canonical_data['definition']['description'].items()) + list(incoming_act_def['description'].items())) + + self.activity.canonical_data['definition']['name'] = updated_name + self.activity.canonical_data['definition']['description'] = updated_desc + if 'scale' in incoming_act_def and 'scale' in self.activity.canonical_data['definition']: trans = {x['id']: x['description'] for x in incoming_act_def['scale']} From ee962a8ed3bfe128463e268c0efd46ae72678a42 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Mon, 22 May 2023 14:06:04 -0400 Subject: [PATCH 040/100] etag updates for State Resource --- lrs/managers/ActivityStateManager.py | 136 ++++++++++++++++----------- lrs/utils/etag.py | 54 ++++++++--- 2 files changed, 119 insertions(+), 71 deletions(-) diff --git a/lrs/managers/ActivityStateManager.py b/lrs/managers/ActivityStateManager.py index 37f4d151..09b56120 100644 --- a/lrs/managers/ActivityStateManager.py +++ b/lrs/managers/ActivityStateManager.py @@ -1,11 +1,13 @@ import datetime import json +from typing import Tuple + from django.core.files.base import ContentFile from django.utils.timezone import utc from ..models import ActivityState -from ..exceptions import IDNotFoundError +from ..exceptions import IDNotFoundError, BadRequest from ..utils import etag @@ -31,6 +33,14 @@ def save_non_json_state(self, s, state, request_dict): s.save() + def get_record(self, **kwargs) -> Tuple[ActivityState, bool]: + + if "registration" in kwargs and kwargs.get("registration", None) is None: + del kwargs["registration"] + + return ActivityState.objects.get_or_create(**kwargs) + + def get_state_set(self, activity_id, registration, since): if registration: # Registration and since @@ -39,107 +49,121 @@ def get_state_set(self, activity_id, registration, since): activity_id=activity_id, registration_id=registration, updated__gt=since) # Registration else: - state_set = self.Agent.activitystate_set.filter( - activity_id=activity_id, registration_id=registration) + state_set = self.Agent.activitystate_set.filter(activity_id=activity_id, registration_id=registration) else: # Since if since: - state_set = self.Agent.activitystate_set.filter( - activity_id=activity_id, updated__gt=since) + state_set = self.Agent.activitystate_set.filter(activity_id=activity_id, updated__gt=since) # Neither else: - state_set = self.Agent.activitystate_set.filter( - activity_id=activity_id) + state_set = self.Agent.activitystate_set.filter(activity_id=activity_id) return state_set def post_state(self, request_dict): - registration = request_dict['params'].get('registration', None) - if registration: - s, created = ActivityState.objects.get_or_create(state_id=request_dict['params']['stateId'], agent=self.Agent, - activity_id=request_dict['params']['activityId'], - registration_id=request_dict['params']['registration']) - else: - s, created = ActivityState.objects.get_or_create(state_id=request_dict['params']['stateId'], agent=self.Agent, - activity_id=request_dict['params']['activityId']) - post_state = request_dict['state'] + + state_record, created = self.get_record( + state_id=request_dict['params']['stateId'], + agent=self.Agent, + activity_id=request_dict['params']['activityId'], + registration_id=request_dict['params'].get('registration', None) + ) + + state_document_contents = request_dict['state'] + + etag.check_modification_conditions(request_dict, state_record, created, required=True) + # If incoming state is application/json and if a state didn't # already exist with the same agent, stateId, actId, and/or # registration if created: - etag.check_modification_conditions(request_dict, s, created, required=False) - s.json_state = post_state - s.content_type = "application/json" - s.etag = etag.create_tag(post_state) + state_record.json_state = state_document_contents + state_record.content_type = "application/json" + state_record.etag = etag.create_tag(state_document_contents) + + elif state_record.content_type != "application/json": + raise BadRequest("A matching non-JSON document already exists and cannot be merged or replaced.") + + elif "application/json" not in request_dict['headers']['CONTENT_TYPE']: + raise BadRequest("A non-JSON document cannot be used to update an existing JSON document.") + # If incoming state is application/json and if a state already # existed with the same agent, stateId, actId, and/or registration else: - orig_state = json.loads(s.json_state) - post_state = json.loads(post_state) - merged = json.dumps( - dict(list(orig_state.items()) + list(post_state.items()))) - s.json_state = merged - s.etag = etag.create_tag(merged) - - # Set updated + previous_state_document = json.loads(state_record.json_state) + updated_state_document = json.loads(state_document_contents) + + previous_properties = list(previous_state_document.items()) + updated_properties = list(updated_state_document.items()) + + merged = json.dumps(dict(previous_properties + updated_properties)) + + state_record.json_state = merged + state_record.etag = etag.create_tag(merged) + + # Set updated if 'updated' in request_dict['headers'] and request_dict['headers']['updated']: - s.updated = request_dict['headers']['updated'] + state_record.updated = request_dict['headers']['updated'] else: - s.updated = datetime.datetime.utcnow().replace(tzinfo=utc) - s.save() + state_record.updated = datetime.datetime.utcnow().replace(tzinfo=utc) + + state_record.save() def put_state(self, request_dict): - registration = request_dict['params'].get('registration', None) - if registration: - s, created = ActivityState.objects.get_or_create(state_id=request_dict['params']['stateId'], - agent=self.Agent, - activity_id=request_dict['params']['activityId'], - registration_id=request_dict['params']['registration']) - else: - s, created = ActivityState.objects.get_or_create(state_id=request_dict['params']['stateId'], - agent=self.Agent, - activity_id=request_dict['params']['activityId']) + + state_record, created = self.get_record( + state_id=request_dict['params']['stateId'], + agent=self.Agent, + activity_id=request_dict['params']['activityId'], + registration_id=request_dict['params'].get('registration', None) + ) + + state_document_contents = request_dict['state'] + + etag.check_modification_conditions(request_dict, state_record, created, required=True) if "application/json" not in request_dict['headers']['CONTENT_TYPE']: try: - post_state = ContentFile(request_dict['state'].read()) + state_document_contents = ContentFile(state_document_contents.read()) except: try: - post_state = ContentFile(request_dict['state']) + state_document_contents = ContentFile(state_document_contents) except: - post_state = ContentFile(str(request_dict['state'])) + state_document_contents = ContentFile(str(state_document_contents)) # If a state already existed with the profileId and activityId if not created: - if s.state: + if state_record.state: try: - s.state.delete() + state_record.state.delete() except OSError: # probably was json before - s.json_state = {} - self.save_non_json_state(s, post_state, request_dict) + state_record.json_state = {} + + self.save_non_json_state(state_record, state_document_contents, request_dict) # State being PUT is json else: - etag.check_modification_conditions(request_dict, s, created, required=False) the_state = request_dict['state'] - s.json_state = the_state - s.content_type = request_dict['headers']['CONTENT_TYPE'] - s.etag = etag.create_tag(the_state) + state_record.json_state = the_state + state_record.content_type = request_dict['headers']['CONTENT_TYPE'] + state_record.etag = etag.create_tag(the_state) # Set updated if 'updated' in request_dict['headers'] and request_dict['headers']['updated']: - s.updated = request_dict['headers']['updated'] + state_record.updated = request_dict['headers']['updated'] else: - s.updated = datetime.datetime.utcnow().replace(tzinfo=utc) - s.save() + state_record.updated = datetime.datetime.utcnow().replace(tzinfo=utc) + + state_record.save() def get_state(self, activity_id, registration, state_id): try: if registration: return self.Agent.activitystate_set.get(state_id=state_id, activity_id=activity_id, registration_id=registration) return self.Agent.activitystate_set.get(state_id=state_id, activity_id=activity_id) + except ActivityState.DoesNotExist: - err_msg = 'There is no activity state associated with the id: %s' % state_id + err_msg = f'There is no activity state associated with the id: {state_id}' raise IDNotFoundError(err_msg) def get_state_ids(self, activity_id, registration, since): diff --git a/lrs/utils/etag.py b/lrs/utils/etag.py index 3e72cfb1..06eae641 100644 --- a/lrs/utils/etag.py +++ b/lrs/utils/etag.py @@ -31,7 +31,7 @@ def get_etag_info(headers): def check_modification_conditions(request, record, created, required=True): if not required: return - + record_already_exists = not created etag_headers = request['headers'].get('ETAG') @@ -47,29 +47,53 @@ def check_modification_conditions(request, record, created, required=True): missing_if_match = not has_if_match missing_if_none_match = not has_if_none_match - if missing_if_match and missing_if_none_match: - raise MissingEtagInfo("If-Match and If-None-Match headers were missing. One of these headers is required for this request.") + # There are additional checks for PUT + was_put_request = request['method'] == "PUT" + + if was_put_request and record_already_exists and missing_if_match and missing_if_none_match: + error_message = f"A document matching your query already exists, but the request did not include ETag headers. " \ + + f"If you would like to override the document, provide the following header:: " \ + + f"If-Match: \"{record['etag']}\"" + + raise Conflict(error_message) - # If there are both, if none match takes precendence - if has_if_none_match: + # Check against the If-None-Match condition. + # + # We should only perform this check if the request has provided a header + # here and if the record itself already exists. + # + # If the record doesn't exist, then there's no match and this check is satisfied etc. + if has_if_none_match and record_already_exists: + # Only check if the content already exists. if it did not # already exist it should pass. - if record_already_exists: - if etag_headers[IF_NONE_MATCH] == "*": + wildcard_provided = etag_headers[IF_NONE_MATCH] == "*" + if wildcard_provided: + raise EtagPreconditionFail("Resource detected") + + else: + if f'"{record.etag}"' in etag_headers[IF_NONE_MATCH]: raise EtagPreconditionFail("Resource detected") - else: - if f'"{record.etag}"' in etag_headers[IF_NONE_MATCH]: - raise EtagPreconditionFail("Resource detected") + # Check against the If-Match condition. + # + # It's unlikely that this will be checked along with the If-None-Match condition, + # but we should still honor that weird use case. if has_if_match: + + # We only created a record if the provided query didn't match anything if created: record.delete() raise EtagPreconditionFail("Resource does not exist") - else: - if etag_headers[IF_MATCH] != "*": - if f'"{record.etag}"' not in etag_headers[IF_MATCH]: - raise EtagPreconditionFail("No resources matched your etag precondition") - + + wildcard_provided = etag_headers[IF_MATCH] == "*" + matched_inclusively = f'"{record.etag}"' in etag_headers[IF_MATCH] + + etag_header_matches_record = matched_inclusively or wildcard_provided + + if not etag_header_matches_record: + raise EtagPreconditionFail("No resources matched your etag precondition") + class MissingEtagInfo(BadRequest): def __init__(self, msg): From af949cf9bd8f6215f713856719553dbdc797d2aa Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Mon, 22 May 2023 14:15:43 -0400 Subject: [PATCH 041/100] matching structure of agent profile comments --- lrs/managers/ActivityStateManager.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lrs/managers/ActivityStateManager.py b/lrs/managers/ActivityStateManager.py index 09b56120..ecb34341 100644 --- a/lrs/managers/ActivityStateManager.py +++ b/lrs/managers/ActivityStateManager.py @@ -121,6 +121,7 @@ def put_state(self, request_dict): etag.check_modification_conditions(request_dict, state_record, created, required=True) + # State being PUT is not json if "application/json" not in request_dict['headers']['CONTENT_TYPE']: try: state_document_contents = ContentFile(state_document_contents.read()) From 007adec13e8d914732889f2f2df43db6080a9a38 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Mon, 22 May 2023 14:15:58 -0400 Subject: [PATCH 042/100] etag updates for Agent Profile Resource --- lrs/managers/AgentProfileManager.py | 100 +++++++++++++++------------- 1 file changed, 52 insertions(+), 48 deletions(-) diff --git a/lrs/managers/AgentProfileManager.py b/lrs/managers/AgentProfileManager.py index 450e1e58..2c659c8a 100644 --- a/lrs/managers/AgentProfileManager.py +++ b/lrs/managers/AgentProfileManager.py @@ -6,7 +6,7 @@ from django.utils.timezone import utc from ..models import AgentProfile -from ..exceptions import IDNotFoundError, ParamError +from ..exceptions import IDNotFoundError, ParamError, BadRequest from ..utils import etag @@ -30,90 +30,94 @@ def save_non_json_profile(self, p, profile, request_dict): p.save() def post_profile(self, request_dict): + # get/create profile - p, created = AgentProfile.objects.get_or_create( - profile_id=request_dict['params']['profileId'], agent=self.Agent) + profile_record, created = AgentProfile.objects.get_or_create(profile_id=request_dict['params']['profileId'], agent=self.Agent) + profile_document_contents = request_dict['profile'] - post_profile = request_dict['profile'] + etag.check_modification_conditions(request_dict, profile_record, created, required=True) + # If incoming profile is application/json and if a profile didn't # already exist with the same agent and profileId if created: - etag.check_modification_conditions(request_dict, p, created, required=False) - p.json_profile = post_profile - p.content_type = "application/json" - p.etag = etag.create_tag(post_profile) + profile_record.json_profile = profile_document_contents + profile_record.content_type = "application/json" + profile_record.etag = etag.create_tag(profile_document_contents) + + elif profile_record.content_type != "application/json": + raise BadRequest("A matching non-JSON document already exists and cannot be merged or replaced.") + + elif "application/json" not in request_dict['headers']['CONTENT_TYPE']: + raise BadRequest("A non-JSON document cannot be used to update an existing JSON document.") + # If incoming profile is application/json and if a profile already # existed with the same agent and profileId else: - ## - ## 1.0.3 Behaviour - ## - # orig_prof = json.loads(p.json_profile) - # post_profile = json.loads(post_profile) - # merged = json.dumps( - # dict(list(orig_prof.items()) + list(post_profile.items()))) - # p.json_profile = merged - # p.etag = etag.create_tag(merged) + previous_profile = json.loads(profile_record.json_profile) + updated_profile = json.loads(profile_document_contents) + previous_profile_properties = list(previous_profile.items()) + updated_profile_properties = list(updated_profile.items()) - ## - ## 2.0 Behaviour to mirror PUT behaviour. - ## - # (overwrite existing profile data) - etag.check_modification_conditions(request_dict, p, created) - the_profile = request_dict['profile'] - p.json_profile = the_profile - p.content_type = request_dict['headers']['CONTENT_TYPE'] - p.etag = etag.create_tag(the_profile) + merged = json.dumps(dict(previous_profile_properties + updated_profile_properties)) + profile_record.json_profile = merged + profile_record.content_type = request_dict['headers']['CONTENT_TYPE'] + profile_record.etag = etag.create_tag(merged) + # Set updated if 'updated' in request_dict['headers'] and request_dict['headers']['updated']: - p.updated = request_dict['headers']['updated'] + profile_record.updated = request_dict['headers']['updated'] else: - p.updated = datetime.datetime.utcnow().replace(tzinfo=utc) + profile_record.updated = datetime.datetime.utcnow().replace(tzinfo=utc) - p.save() + profile_record.save() def put_profile(self, request_dict): # get/create profile - p, created = AgentProfile.objects.get_or_create( - profile_id=request_dict['params']['profileId'], agent=self.Agent) + profile_record, created = AgentProfile.objects.get_or_create(profile_id=request_dict['params']['profileId'], agent=self.Agent) + + profile_document_contents = request_dict['state'] + + etag.check_modification_conditions(request_dict, profile_record, created, required=True) # Profile being PUT is not json if "application/json" not in request_dict['headers']['CONTENT_TYPE']: try: - profile = ContentFile(request_dict['profile'].read()) + profile = ContentFile(profile_document_contents.read()) except: try: - profile = ContentFile(request_dict['profile']) + profile = ContentFile(profile_document_contents) except: - profile = ContentFile(str(request_dict['profile'])) + profile = ContentFile(str(profile_document_contents)) # If it already exists delete it - if p.profile: - try: - p.profile.delete() - except OSError: - # probably was json before - p.json_profile = {} - self.save_non_json_profile(p, profile, request_dict) + if not created: + if profile_record.profile: + try: + profile_record.profile.delete() + except OSError: + # probably was json before + profile_record.json_profile = {} + + self.save_non_json_profile(profile_record, profile, request_dict) + # Profile being PUT is json else: # (overwrite existing profile data) - etag.check_modification_conditions(request_dict, p, created) the_profile = request_dict['profile'] - p.json_profile = the_profile - p.content_type = request_dict['headers']['CONTENT_TYPE'] - p.etag = etag.create_tag(the_profile) + profile_record.json_profile = the_profile + profile_record.content_type = request_dict['headers']['CONTENT_TYPE'] + profile_record.etag = etag.create_tag(the_profile) # Set updated if 'updated' in request_dict['headers'] and request_dict['headers']['updated']: - p.updated = request_dict['headers']['updated'] + profile_record.updated = request_dict['headers']['updated'] else: - p.updated = datetime.datetime.utcnow().replace(tzinfo=utc) + profile_record.updated = datetime.datetime.utcnow().replace(tzinfo=utc) - p.save() + profile_record.save() def get_profile(self, profile_id): try: From 4e4259c89cf76d13ee581069f007f6631a4fad24 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Mon, 22 May 2023 14:53:17 -0400 Subject: [PATCH 043/100] etag updates for Activity Profile Resource --- lrs/managers/ActivityProfileManager.py | 94 ++++++++++++++++---------- 1 file changed, 59 insertions(+), 35 deletions(-) diff --git a/lrs/managers/ActivityProfileManager.py b/lrs/managers/ActivityProfileManager.py index b2558dd0..1cad8223 100644 --- a/lrs/managers/ActivityProfileManager.py +++ b/lrs/managers/ActivityProfileManager.py @@ -6,7 +6,7 @@ from django.utils.timezone import utc from ..models import ActivityProfile -from ..exceptions import IDNotFoundError, ParamError +from ..exceptions import IDNotFoundError, ParamError, BadRequest from ..utils import etag @@ -34,81 +34,98 @@ def save_non_json_profile(self, p, created, profile, request_dict): def post_profile(self, request_dict): # get/create profile - p, created = ActivityProfile.objects.get_or_create( + profile_record, created = ActivityProfile.objects.get_or_create( activity_id=request_dict['params']['activityId'], profile_id=request_dict['params']['profileId'] ) - post_profile = request_dict['profile'] + profile_document_contents = request_dict['profile'] + + etag.check_modification_conditions(request_dict, profile_record, created, required=True) + # If incoming profile is application/json and if a profile didn't # already exist with the same activityId and profileId if created: - # xAPI 2.0 Addition: - etag.check_modification_conditions(request_dict, p, created, required=True) - - p.json_profile = post_profile - p.content_type = "application/json" - p.etag = etag.create_tag(post_profile) + profile_record.json_profile = profile_document_contents + profile_record.content_type = "application/json" + profile_record.etag = etag.create_tag(profile_document_contents) + elif profile_record.content_type != "application/json": + raise BadRequest("A matching non-JSON document already exists and cannot be merged or replaced.") + + elif "application/json" not in request_dict['headers']['CONTENT_TYPE']: + raise BadRequest("A non-JSON document cannot be used to update an existing JSON document.") + # If incoming profile is application/json and if a profile already # existed with the same activityId and profileId else: - orig_prof = json.loads(p.json_profile) - post_profile = json.loads(request_dict['profile']) - merged = json.dumps( - dict(list(orig_prof.items()) + list(post_profile.items()))) - p.json_profile = merged - p.etag = etag.create_tag(merged) + + previous_profile = json.loads(profile_record.json_profile) + updated_profile = json.loads(profile_document_contents) + + previous_profile_properties = list(previous_profile.items()) + updated_profile_properties = list(updated_profile.items()) + + merged = json.dumps(dict(previous_profile_properties + updated_profile_properties)) + + profile_record.json_profile = merged + profile_record.content_type = request_dict['headers']['CONTENT_TYPE'] + profile_record.etag = etag.create_tag(merged) # Set updated if 'updated' in request_dict['headers'] and request_dict['headers']['updated']: - p.updated = request_dict['headers']['updated'] + profile_record.updated = request_dict['headers']['updated'] else: - p.updated = datetime.datetime.utcnow().replace(tzinfo=utc) - p.save() + profile_record.updated = datetime.datetime.utcnow().replace(tzinfo=utc) + + profile_record.save() def put_profile(self, request_dict): # Get the profile, or if not already created, create one - p, created = ActivityProfile.objects.get_or_create( + profile_record, created = ActivityProfile.objects.get_or_create( profile_id=request_dict['params']['profileId'], activity_id=request_dict['params']['activityId'] ) + profile_document_contents = request_dict['state'] + + etag.check_modification_conditions(request_dict, profile_record, created, required=True) # Profile being PUT is not json if "application/json" not in request_dict['headers']['CONTENT_TYPE']: try: - profile = ContentFile(request_dict['profile'].read()) + profile = ContentFile(profile_document_contents.read()) except: try: - profile = ContentFile(request_dict['profile']) + profile = ContentFile(profile_document_contents) except: - profile = ContentFile(str(request_dict['profile'])) + profile = ContentFile(str(profile_document_contents)) # If a profile already existed with the profileId and activityId if not created: - if p.profile: + if profile_record.profile: try: - p.profile.delete() + profile_record.profile.delete() except OSError: # probably was json before - p.json_profile = {} + profile_record.json_profile = {} - self.save_non_json_profile(p, created, profile, request_dict) + self.save_non_json_profile(profile_record, created, profile, request_dict) + # Profile being PUT is json else: - etag.check_modification_conditions(request_dict, p, created, required=False) + etag.check_modification_conditions(request_dict, profile_record, created, required=False) # If a profile already existed with the profileId and activityId # (overwrite existing profile data) the_profile = request_dict['profile'] - p.json_profile = the_profile - p.content_type = request_dict['headers']['CONTENT_TYPE'] - p.etag = etag.create_tag(the_profile) + profile_record.json_profile = the_profile + profile_record.content_type = request_dict['headers']['CONTENT_TYPE'] + profile_record.etag = etag.create_tag(the_profile) # Set updated if 'updated' in request_dict['headers'] and request_dict['headers']['updated']: - p.updated = request_dict['headers']['updated'] + profile_record.updated = request_dict['headers']['updated'] else: - p.updated = datetime.datetime.utcnow().replace(tzinfo=utc) - p.save() + profile_record.updated = datetime.datetime.utcnow().replace(tzinfo=utc) + profile_record.save() def get_profile(self, profile_id, activity_id): # Retrieve the profile with the given profileId and activity @@ -142,8 +159,15 @@ def get_profile_ids(self, activity_id, since=None): def delete_profile(self, request_dict): # Get profile and delete it try: - self.get_profile(request_dict['params']['profileId'], request_dict[ - 'params']['activityId']).delete() + profile_record = self.get_profile( + request_dict['params']['profileId'], + request_dict['params']['activityId'] + ) + + etag.check_modification_conditions(request_dict, profile_record, False, required=True) + + profile_record.delete() + # we don't want it anyway except ActivityProfile.DoesNotExist: pass From 9acd8a5fad4dc8c35c31dcb315ed99aa4f3aa0ef Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Mon, 22 May 2023 14:53:44 -0400 Subject: [PATCH 044/100] etag updates for document deletion --- lrs/managers/ActivityStateManager.py | 17 ++++++++++++++--- lrs/managers/AgentProfileManager.py | 17 +++++++++++++---- lrs/utils/req_process.py | 2 +- 3 files changed, 28 insertions(+), 8 deletions(-) diff --git a/lrs/managers/ActivityStateManager.py b/lrs/managers/ActivityStateManager.py index ecb34341..8605afe8 100644 --- a/lrs/managers/ActivityStateManager.py +++ b/lrs/managers/ActivityStateManager.py @@ -178,15 +178,26 @@ def delete_state(self, request_dict): state_id = request_dict['params'].get('stateId', None) activity_id = request_dict['params']['activityId'] registration = request_dict['params'].get('registration', None) + try: # Bulk delete if stateId is not in params if not state_id: states = self.get_state_set(activity_id, registration, None) - for s in states: - s.delete() # bulk delete skips the custom delete function + for state_record in states: + assert isinstance(state_record, ActivityState) + etag.check_modification_conditions(request_dict, state_record, False, required=True) + + for state_record in states: + assert isinstance(state_record, ActivityState) + state_record.delete() + # Single delete else: - self.get_state(activity_id, registration, state_id).delete() + state_record = self.get_state(activity_id, registration, state_id) + etag.check_modification_conditions(request_dict, state_record, False, required=True) + + state_record.delete() + except ActivityState.DoesNotExist: pass except IDNotFoundError: diff --git a/lrs/managers/AgentProfileManager.py b/lrs/managers/AgentProfileManager.py index 2c659c8a..af9ea197 100644 --- a/lrs/managers/AgentProfileManager.py +++ b/lrs/managers/AgentProfileManager.py @@ -76,8 +76,10 @@ def post_profile(self, request_dict): def put_profile(self, request_dict): # get/create profile - profile_record, created = AgentProfile.objects.get_or_create(profile_id=request_dict['params']['profileId'], agent=self.Agent) - + profile_record, created = AgentProfile.objects.get_or_create( + profile_id=request_dict['params']['profileId'], + agent=self.Agent + ) profile_document_contents = request_dict['state'] etag.check_modification_conditions(request_dict, profile_record, created, required=True) @@ -142,9 +144,16 @@ def get_profile_ids(self, since=None): 'profile_id', flat=True) return ids - def delete_profile(self, profile_id): + def delete_profile(self, request_dict): try: - self.get_profile(profile_id).delete() + profile_record = self.get_profile( + request_dict['params']['profileId'] + ) + + etag.check_modification_conditions(request_dict, profile_record, False, required=True) + + profile_record.delete() + # we don't want it anyway except AgentProfile.DoesNotExist: pass diff --git a/lrs/utils/req_process.py b/lrs/utils/req_process.py index 09a667ea..8f9b84b7 100644 --- a/lrs/utils/req_process.py +++ b/lrs/utils/req_process.py @@ -488,7 +488,7 @@ def agent_profile_delete(req_dict): return HttpResponse('', status=204) profile_id = req_dict['params']['profileId'] ap = AgentProfileManager(a) - ap.delete_profile(profile_id) + ap.delete_profile(req_dict) return HttpResponse('', status=204) From 76dd36c7acbabd97c5f126141effdbf44374935a Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Mon, 22 May 2023 15:56:33 -0400 Subject: [PATCH 045/100] fixing recursion typo --- lrs/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lrs/models.py b/lrs/models.py index 9b3cb1ae..40b9dafe 100644 --- a/lrs/models.py +++ b/lrs/models.py @@ -741,7 +741,7 @@ def _save(self, name, content, max_length=None): # if the file exists, do not call the superclasses _save method return name # if the file is new, DO call it - return super(AttachmentFileSystemStorage, self).save(name, content, max_length=max_length) + return super(FileSystemStorage, self).save(name, content, max_length=max_length) class StatementAttachment(models.Model): From 9c7bf601a38a1d68d5387cf34de5c588589e51a6 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Mon, 22 May 2023 16:01:45 -0400 Subject: [PATCH 046/100] removing internal overwrite of attachment saving --- lrs/models.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lrs/models.py b/lrs/models.py index 40b9dafe..77fec514 100644 --- a/lrs/models.py +++ b/lrs/models.py @@ -736,10 +736,11 @@ class AttachmentFileSystemStorage(FileSystemStorage): def get_available_name(self, name, max_length=None): return name - def _save(self, name, content, max_length=None): + def save(self, name: str, content, max_length=None): if self.exists(name): # if the file exists, do not call the superclasses _save method return name + # if the file is new, DO call it return super(FileSystemStorage, self).save(name, content, max_length=max_length) From 68a93080683b63679fe256b72cb61a0d6c5f1e09 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Mon, 22 May 2023 16:50:57 -0400 Subject: [PATCH 047/100] req_parse cleanup + attachment fix --- lrs/utils/req_parse.py | 27 +++++++++++++-------------- 1 file changed, 13 insertions(+), 14 deletions(-) diff --git a/lrs/utils/req_parse.py b/lrs/utils/req_parse.py index 3fd63237..f4ae365d 100644 --- a/lrs/utils/req_parse.py +++ b/lrs/utils/req_parse.py @@ -124,6 +124,7 @@ def parse_post_put_body(request, r_dict): # signed statements) if 'multipart/mixed' in r_dict['headers']['CONTENT_TYPE']: parse_attachment(request, r_dict) + # If it's any other content-type try parsing it out else: body_str = request.body.decode("utf-8") if isinstance(request.body, bytes) else request.body @@ -241,11 +242,9 @@ def parse_attachment(request, r_dict): lines = message.splitlines() if not lines[0].startswith('Content-Type: multipart/mixed; boundary='): if 'boundary' in r_dict['headers']['CONTENT_TYPE']: - message = "Content-Type:" + \ - r_dict['headers']['CONTENT_TYPE'] + "\r\n" + message + message = "Content-Type:" + r_dict['headers']['CONTENT_TYPE'] + "\r\n" + message else: - raise BadRequest( - "Could not find the boundary for the multipart content") + raise BadRequest("Could not find the boundary for the multipart content") # end workaround msg = email.message_from_string(message) @@ -262,8 +261,10 @@ def parse_attachment(request, r_dict): except Exception: raise ParamError("Statement was not valid JSON") + stmt_sha2s = [] if isinstance(r_dict['body'], dict): - stmt_sha2s = [a['sha2'] for a in r_dict['body']['attachments'] if 'attachments' in r_dict['body']] + if "attachments" in r_dict['body']: + stmt_sha2s = [a['sha2'] for a in r_dict['body']['attachments']] else: stmt_sha2s = [a['sha2'] for s in r_dict['body'] if 'attachments' in s for a in s['attachments']] @@ -274,29 +275,26 @@ def parse_attachment(request, r_dict): encoding = part.get('Content-Transfer-Encoding', None) if encoding != "binary": - raise BadRequest( - "Each attachment part should have 'binary' as Content-Transfer-Encoding") + raise BadRequest("Each attachment part should have 'binary' as Content-Transfer-Encoding") if 'X-Experience-API-Hash' not in part: - raise BadRequest( - "X-Experience-API-Hash header was missing from attachment") + raise BadRequest("X-Experience-API-Hash header was missing from attachment") part_hash = part.get('X-Experience-API-Hash') validate_hash(part_hash, part) part_dict[part_hash] = part - r_dict['payload_sha2s'] = [ - p['X-Experience-API-Hash'] for p in msg.get_payload() - ] + r_dict['payload_sha2s'] = [p['X-Experience-API-Hash'] for p in msg.get_payload()] if not set(r_dict['payload_sha2s']).issubset(set(stmt_sha2s)): raise BadRequest("Not all attachments match with statement payload") parse_signature_attachments(r_dict, part_dict) + else: - raise ParamError( - "This content was not multipart for the multipart request.") + raise ParamError("This content was not multipart for the multipart request.") + # Saves all attachments (including signatures) to memory temporarily # for further processing temp_save_attachments(msg) @@ -328,6 +326,7 @@ def parse_signature_attachments(r_dict, part_dict): attachments = statement["attachments"] signatures = [get_signature(a) for a in attachments if is_a_signature(a)] stmt_attachment_pairs.append((statement, signatures)) + else: if 'attachments' in r_dict['body']: statement = r_dict["body"] From 258eb42797e6a780487fe96f8a3460d18affd985 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Mon, 22 May 2023 17:09:32 -0400 Subject: [PATCH 048/100] updating b64 function --- lrs/utils/authorization.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lrs/utils/authorization.py b/lrs/utils/authorization.py index 1e457e58..facf26f5 100755 --- a/lrs/utils/authorization.py +++ b/lrs/utils/authorization.py @@ -101,10 +101,10 @@ def inner(request, *args, **kwargs): return func(request, *args, **kwargs) return inner -def decode_base64_string(base64_message): - base64_bytes = base64_message.encode("ascii") - message_bytes = base64.b64decode(base64_bytes) - message = message_bytes.decode("ascii") +def decode_base64_string(base64_message: str): + base64_bytes = base64_message.encode("utf-8") + message_bytes = base64.b64decode(base64_bytes + b"====") + message = message_bytes.decode("utf-8") return message From e86885921879721c95fe6f68a2c47683d27fd063 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Mon, 22 May 2023 17:21:13 -0400 Subject: [PATCH 049/100] adding catch for b64 parser --- lrs/utils/authorization.py | 120 +++++++++++++++++++++---------------- 1 file changed, 69 insertions(+), 51 deletions(-) diff --git a/lrs/utils/authorization.py b/lrs/utils/authorization.py index facf26f5..d0a6bdda 100755 --- a/lrs/utils/authorization.py +++ b/lrs/utils/authorization.py @@ -74,7 +74,8 @@ def inner(request, *args, **kwargs): if len(auth) == 2: if auth[0].lower() == 'basic': - auth_parsed = decode_base64_string(auth[1]) + auth_parsed, success = decode_base64_string(auth[1]) + [uname, passwd] = auth_parsed.split(':') if uname and passwd: @@ -84,6 +85,10 @@ def inner(request, *args, **kwargs): request.META[ 'lrs-user'] = (False, "Unauthorized: Authorization failed, please verify your username and password") request.META['lrs-user'] = (True, user) + + elif not success + request.META[ + 'lrs-user'] = (False, f"Unauthorized: Could not determine base 64 auth from: {auth[1]}") else: request.META[ 'lrs-user'] = (False, "Unauthorized: The format of the HTTP Basic Authorization Header value is incorrect") @@ -102,11 +107,15 @@ def inner(request, *args, **kwargs): return inner def decode_base64_string(base64_message: str): - base64_bytes = base64_message.encode("utf-8") - message_bytes = base64.b64decode(base64_bytes + b"====") - message = message_bytes.decode("utf-8") + try: + base64_bytes = base64_message.encode("utf-8") + message_bytes = base64.b64decode(base64_bytes + b"====") + message = message_bytes.decode("utf-8") - return message + return message, True + + except UnicodeDecodeError: + return "", False def get_user_from_auth(auth): if not auth: @@ -185,54 +194,63 @@ def validate_oauth_scope(req_dict): def http_auth_helper(request): - if 'Authorization' in request['headers']: - auth = request['headers']['Authorization'].split() - if len(auth) == 2: - if auth[0].lower() == 'basic': - # Currently, only basic http auth is used. - auth_parsed = decode_base64_string(auth[1]) - try: - auth_parsed = decode_base64_string(auth[1]) - [uname, passwd] = auth_parsed.split(':') - except Exception as e: - raise BadRequest(f"Authorization failure: {e}, {auth[1]} was type {type(auth[1])} -> {auth_parsed}") - # Sent in empty auth - now allowed when not allowing empty auth - # in settings - if not uname and not passwd and not settings.ALLOW_EMPTY_HTTP_AUTH: - raise BadRequest('Must supply auth credentials') - elif not uname and not passwd and settings.ALLOW_EMPTY_HTTP_AUTH: - request['auth']['user'] = None - request['auth']['agent'] = None - elif uname or passwd: - user = authenticate(username=uname, password=passwd) - if user: - # If the user successfully logged in, then add/overwrite - # the user object of this request. - request['auth']['user'] = user - try: - request['auth']['agent'] = user.agent - except Exception: - # Gets here if for some reason the agent is deleted - agent = Agent.objects.retrieve_or_create( - **{'name': user.username, 'mbox': 'mailto:%s' % user.email, \ - 'objectType': 'Agent'})[0] - agent.user = user - agent.save() - request['auth']['agent'] = user.agent - else: - raise Unauthorized( - "Authorization failed, please verify your username and password") - request['auth']['define'] = True - else: - raise Unauthorized( - "HTTP Basic Authorization Header must start with Basic") - else: - raise Unauthorized( - "The format of the HTTP Basic Authorization Header value is incorrect") - else: - # The username/password combo was incorrect, or not provided. + if not 'Authorization' in request['headers']: raise Unauthorized("Authorization header missing") + auth = request['headers']['Authorization'].split() + + if len(auth) != 2: + raise Unauthorized("The format of the HTTP Basic Authorization Header value is incorrect") + + if auth[0].lower() != 'basic': + raise Unauthorized("HTTP Basic Authorization Header must start with Basic") + + # Currently, only basic http auth is used. + auth_parsed, success = decode_base64_string(auth[1]) + + if not success: + raise Unauthorized(f"Unable to parse credentials as base 64: {auth[1]}") + + try: + auth_parsed, success = decode_base64_string(auth[1]) + + if not success: + raise Unauthorized(f"Unable to parse credentials as base 64: {auth[1]}") + + [uname, passwd] = auth_parsed.split(':') + except Exception as e: + raise BadRequest(f"Authorization failure: {e}, {auth[1]} was type {type(auth[1])} -> {auth_parsed}") + + # Sent in empty auth - now allowed when not allowing empty auth + # in settings + if not uname and not passwd and not settings.ALLOW_EMPTY_HTTP_AUTH: + raise BadRequest('Must supply auth credentials') + + elif not uname and not passwd and settings.ALLOW_EMPTY_HTTP_AUTH: + request['auth']['user'] = None + request['auth']['agent'] = None + + elif uname or passwd: + user = authenticate(username=uname, password=passwd) + if user: + # If the user successfully logged in, then add/overwrite + # the user object of this request. + request['auth']['user'] = user + try: + request['auth']['agent'] = user.agent + except Exception: + # Gets here if for some reason the agent is deleted + agent = Agent.objects.retrieve_or_create( + **{'name': user.username, 'mbox': 'mailto:%s' % user.email, \ + 'objectType': 'Agent'})[0] + agent.user = user + agent.save() + request['auth']['agent'] = user.agent + else: + raise Unauthorized( + "Authorization failed, please verify your username and password") + + request['auth']['define'] = True def oauth_helper(request): token = request['auth']['oauth_token'] From a05e03f6b8cb4986c5985556be479d7eaa06af01 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Mon, 22 May 2023 17:27:15 -0400 Subject: [PATCH 050/100] reverting catch syntax --- lrs/utils/authorization.py | 23 +++++------------------ 1 file changed, 5 insertions(+), 18 deletions(-) diff --git a/lrs/utils/authorization.py b/lrs/utils/authorization.py index d0a6bdda..0eef63cd 100755 --- a/lrs/utils/authorization.py +++ b/lrs/utils/authorization.py @@ -74,8 +74,7 @@ def inner(request, *args, **kwargs): if len(auth) == 2: if auth[0].lower() == 'basic': - auth_parsed, success = decode_base64_string(auth[1]) - + auth_parsed = decode_base64_string(auth[1]) [uname, passwd] = auth_parsed.split(':') if uname and passwd: @@ -85,10 +84,6 @@ def inner(request, *args, **kwargs): request.META[ 'lrs-user'] = (False, "Unauthorized: Authorization failed, please verify your username and password") request.META['lrs-user'] = (True, user) - - elif not success - request.META[ - 'lrs-user'] = (False, f"Unauthorized: Could not determine base 64 auth from: {auth[1]}") else: request.META[ 'lrs-user'] = (False, "Unauthorized: The format of the HTTP Basic Authorization Header value is incorrect") @@ -112,10 +107,10 @@ def decode_base64_string(base64_message: str): message_bytes = base64.b64decode(base64_bytes + b"====") message = message_bytes.decode("utf-8") - return message, True + return message except UnicodeDecodeError: - return "", False + return "" def get_user_from_auth(auth): if not auth: @@ -206,17 +201,9 @@ def http_auth_helper(request): raise Unauthorized("HTTP Basic Authorization Header must start with Basic") # Currently, only basic http auth is used. - auth_parsed, success = decode_base64_string(auth[1]) - - if not success: - raise Unauthorized(f"Unable to parse credentials as base 64: {auth[1]}") - + auth_parsed = decode_base64_string(auth[1]) try: - auth_parsed, success = decode_base64_string(auth[1]) - - if not success: - raise Unauthorized(f"Unable to parse credentials as base 64: {auth[1]}") - + auth_parsed = decode_base64_string(auth[1]) [uname, passwd] = auth_parsed.split(':') except Exception as e: raise BadRequest(f"Authorization failure: {e}, {auth[1]} was type {type(auth[1])} -> {auth_parsed}") From f7c3b337fa33adfcf013d5ff2c9be4d965b8edd0 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Mon, 22 May 2023 17:34:11 -0400 Subject: [PATCH 051/100] reverting to ascii --- lrs/utils/authorization.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lrs/utils/authorization.py b/lrs/utils/authorization.py index 0eef63cd..f562de0a 100755 --- a/lrs/utils/authorization.py +++ b/lrs/utils/authorization.py @@ -103,9 +103,9 @@ def inner(request, *args, **kwargs): def decode_base64_string(base64_message: str): try: - base64_bytes = base64_message.encode("utf-8") + base64_bytes = base64_message.encode("ascii") message_bytes = base64.b64decode(base64_bytes + b"====") - message = message_bytes.decode("utf-8") + message = message_bytes.decode("ascii") return message From 98b9fb62b8cf935b0aa859012cd434ce9f579890 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Mon, 22 May 2023 17:44:15 -0400 Subject: [PATCH 052/100] shortening b64 function --- lrs/utils/authorization.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/lrs/utils/authorization.py b/lrs/utils/authorization.py index f562de0a..c2810ff4 100755 --- a/lrs/utils/authorization.py +++ b/lrs/utils/authorization.py @@ -103,13 +103,9 @@ def inner(request, *args, **kwargs): def decode_base64_string(base64_message: str): try: - base64_bytes = base64_message.encode("ascii") - message_bytes = base64.b64decode(base64_bytes + b"====") - message = message_bytes.decode("ascii") - - return message + return base64.b64decode(base64_message).decode("utf-8") - except UnicodeDecodeError: + except Exception: return "" def get_user_from_auth(auth): @@ -203,7 +199,6 @@ def http_auth_helper(request): # Currently, only basic http auth is used. auth_parsed = decode_base64_string(auth[1]) try: - auth_parsed = decode_base64_string(auth[1]) [uname, passwd] = auth_parsed.split(':') except Exception as e: raise BadRequest(f"Authorization failure: {e}, {auth[1]} was type {type(auth[1])} -> {auth_parsed}") From 4a5561aa2d4c2863182b9c8a33def552aefed5cb Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Mon, 22 May 2023 17:58:34 -0400 Subject: [PATCH 053/100] accounting for multiple colons --- lrs/utils/authorization.py | 29 +++++++++++++++++++---------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/lrs/utils/authorization.py b/lrs/utils/authorization.py index c2810ff4..62e3ba4c 100755 --- a/lrs/utils/authorization.py +++ b/lrs/utils/authorization.py @@ -73,13 +73,11 @@ def inner(request, *args, **kwargs): auth = auth.split() if len(auth) == 2: if auth[0].lower() == 'basic': - - auth_parsed = decode_base64_string(auth[1]) - [uname, passwd] = auth_parsed.split(':') + + uname, passwd = decode_basic_auth_string(auth[1]) if uname and passwd: - user = authenticate( - username=uname, password=passwd) + user = authenticate(username=uname, password=passwd) if not user: request.META[ 'lrs-user'] = (False, "Unauthorized: Authorization failed, please verify your username and password") @@ -101,12 +99,22 @@ def inner(request, *args, **kwargs): return func(request, *args, **kwargs) return inner -def decode_base64_string(base64_message: str): +def decode_basic_auth_string(base64_message: str): + try: - return base64.b64decode(base64_message).decode("utf-8") + decoded = base64.b64decode(base64_message).decode("utf-8") + except UnicodeDecodeError: + raise Unauthorized(f"Could not parse the provided auth string into base 64 -- received: {base64_message}") + + if ":" not in decoded: + raise Unauthorized(f"Improper Credential format, encoded value must have the form username:password -- parsed as: {decoded}") - except Exception: - return "" + parts = decoded.split(":") + + username = parts[0] + password = "".join(parts[1:]) + + return username, password def get_user_from_auth(auth): if not auth: @@ -197,7 +205,8 @@ def http_auth_helper(request): raise Unauthorized("HTTP Basic Authorization Header must start with Basic") # Currently, only basic http auth is used. - auth_parsed = decode_base64_string(auth[1]) + username, password = decode_basic_auth_string(auth[1]) + try: [uname, passwd] = auth_parsed.split(':') except Exception as e: From 096c0f7c48ec284776114cf558e8d15f6cce0164 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Mon, 22 May 2023 18:03:48 -0400 Subject: [PATCH 054/100] fixing rename --- lrs/utils/authorization.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/lrs/utils/authorization.py b/lrs/utils/authorization.py index 62e3ba4c..669f4da8 100755 --- a/lrs/utils/authorization.py +++ b/lrs/utils/authorization.py @@ -205,12 +205,12 @@ def http_auth_helper(request): raise Unauthorized("HTTP Basic Authorization Header must start with Basic") # Currently, only basic http auth is used. - username, password = decode_basic_auth_string(auth[1]) + uname, passwd = decode_basic_auth_string(auth[1]) - try: - [uname, passwd] = auth_parsed.split(':') - except Exception as e: - raise BadRequest(f"Authorization failure: {e}, {auth[1]} was type {type(auth[1])} -> {auth_parsed}") + # try: + # [uname, passwd] = auth_parsed.split(':') + # except Exception as e: + # raise BadRequest(f"Authorization failure: {e}, {auth[1]} was type {type(auth[1])} -> {auth_parsed}") # Sent in empty auth - now allowed when not allowing empty auth # in settings From 989520bddbc8f3011c6e38f1ddd14c86c84e7ea4 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Mon, 22 May 2023 18:20:29 -0400 Subject: [PATCH 055/100] more b64 fixes --- lrs/utils/authorization.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/lrs/utils/authorization.py b/lrs/utils/authorization.py index 669f4da8..7c88c222 100755 --- a/lrs/utils/authorization.py +++ b/lrs/utils/authorization.py @@ -1,4 +1,5 @@ import base64 +import binascii from functools import wraps from django.conf import settings @@ -102,10 +103,14 @@ def inner(request, *args, **kwargs): def decode_basic_auth_string(base64_message: str): try: - decoded = base64.b64decode(base64_message).decode("utf-8") + decoded = base64.b64decode(base64_message + "==").decode("utf-8") + except UnicodeDecodeError: raise Unauthorized(f"Could not parse the provided auth string into base 64 -- received: {base64_message}") + except binascii.Error: + raise Unauthorized(f"Could not parse the provided auth string into base 64 -- received: {base64_message}") + if ":" not in decoded: raise Unauthorized(f"Improper Credential format, encoded value must have the form username:password -- parsed as: {decoded}") From d03c23306f6fa8af7ad294a27b0207d2993731ad Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Mon, 22 May 2023 18:24:37 -0400 Subject: [PATCH 056/100] full auth revert --- lrs/utils/authorization.py | 126 +++++++++++++++++-------------------- 1 file changed, 56 insertions(+), 70 deletions(-) diff --git a/lrs/utils/authorization.py b/lrs/utils/authorization.py index 7c88c222..1e457e58 100755 --- a/lrs/utils/authorization.py +++ b/lrs/utils/authorization.py @@ -1,5 +1,4 @@ import base64 -import binascii from functools import wraps from django.conf import settings @@ -74,11 +73,13 @@ def inner(request, *args, **kwargs): auth = auth.split() if len(auth) == 2: if auth[0].lower() == 'basic': - - uname, passwd = decode_basic_auth_string(auth[1]) + + auth_parsed = decode_base64_string(auth[1]) + [uname, passwd] = auth_parsed.split(':') if uname and passwd: - user = authenticate(username=uname, password=passwd) + user = authenticate( + username=uname, password=passwd) if not user: request.META[ 'lrs-user'] = (False, "Unauthorized: Authorization failed, please verify your username and password") @@ -100,26 +101,12 @@ def inner(request, *args, **kwargs): return func(request, *args, **kwargs) return inner -def decode_basic_auth_string(base64_message: str): - - try: - decoded = base64.b64decode(base64_message + "==").decode("utf-8") - - except UnicodeDecodeError: - raise Unauthorized(f"Could not parse the provided auth string into base 64 -- received: {base64_message}") - - except binascii.Error: - raise Unauthorized(f"Could not parse the provided auth string into base 64 -- received: {base64_message}") - - if ":" not in decoded: - raise Unauthorized(f"Improper Credential format, encoded value must have the form username:password -- parsed as: {decoded}") - - parts = decoded.split(":") - - username = parts[0] - password = "".join(parts[1:]) +def decode_base64_string(base64_message): + base64_bytes = base64_message.encode("ascii") + message_bytes = base64.b64decode(base64_bytes) + message = message_bytes.decode("ascii") - return username, password + return message def get_user_from_auth(auth): if not auth: @@ -198,55 +185,54 @@ def validate_oauth_scope(req_dict): def http_auth_helper(request): - if not 'Authorization' in request['headers']: - raise Unauthorized("Authorization header missing") - - auth = request['headers']['Authorization'].split() - - if len(auth) != 2: - raise Unauthorized("The format of the HTTP Basic Authorization Header value is incorrect") - - if auth[0].lower() != 'basic': - raise Unauthorized("HTTP Basic Authorization Header must start with Basic") - - # Currently, only basic http auth is used. - uname, passwd = decode_basic_auth_string(auth[1]) - - # try: - # [uname, passwd] = auth_parsed.split(':') - # except Exception as e: - # raise BadRequest(f"Authorization failure: {e}, {auth[1]} was type {type(auth[1])} -> {auth_parsed}") - - # Sent in empty auth - now allowed when not allowing empty auth - # in settings - if not uname and not passwd and not settings.ALLOW_EMPTY_HTTP_AUTH: - raise BadRequest('Must supply auth credentials') - - elif not uname and not passwd and settings.ALLOW_EMPTY_HTTP_AUTH: - request['auth']['user'] = None - request['auth']['agent'] = None - - elif uname or passwd: - user = authenticate(username=uname, password=passwd) - if user: - # If the user successfully logged in, then add/overwrite - # the user object of this request. - request['auth']['user'] = user - try: - request['auth']['agent'] = user.agent - except Exception: - # Gets here if for some reason the agent is deleted - agent = Agent.objects.retrieve_or_create( - **{'name': user.username, 'mbox': 'mailto:%s' % user.email, \ - 'objectType': 'Agent'})[0] - agent.user = user - agent.save() - request['auth']['agent'] = user.agent + if 'Authorization' in request['headers']: + auth = request['headers']['Authorization'].split() + if len(auth) == 2: + if auth[0].lower() == 'basic': + # Currently, only basic http auth is used. + auth_parsed = decode_base64_string(auth[1]) + try: + auth_parsed = decode_base64_string(auth[1]) + [uname, passwd] = auth_parsed.split(':') + except Exception as e: + raise BadRequest(f"Authorization failure: {e}, {auth[1]} was type {type(auth[1])} -> {auth_parsed}") + # Sent in empty auth - now allowed when not allowing empty auth + # in settings + if not uname and not passwd and not settings.ALLOW_EMPTY_HTTP_AUTH: + raise BadRequest('Must supply auth credentials') + elif not uname and not passwd and settings.ALLOW_EMPTY_HTTP_AUTH: + request['auth']['user'] = None + request['auth']['agent'] = None + elif uname or passwd: + user = authenticate(username=uname, password=passwd) + if user: + # If the user successfully logged in, then add/overwrite + # the user object of this request. + request['auth']['user'] = user + try: + request['auth']['agent'] = user.agent + except Exception: + # Gets here if for some reason the agent is deleted + agent = Agent.objects.retrieve_or_create( + **{'name': user.username, 'mbox': 'mailto:%s' % user.email, \ + 'objectType': 'Agent'})[0] + agent.user = user + agent.save() + request['auth']['agent'] = user.agent + else: + raise Unauthorized( + "Authorization failed, please verify your username and password") + request['auth']['define'] = True + else: + raise Unauthorized( + "HTTP Basic Authorization Header must start with Basic") else: raise Unauthorized( - "Authorization failed, please verify your username and password") - - request['auth']['define'] = True + "The format of the HTTP Basic Authorization Header value is incorrect") + else: + # The username/password combo was incorrect, or not provided. + raise Unauthorized("Authorization header missing") + def oauth_helper(request): token = request['auth']['oauth_token'] From fef69800ca0a5f36261b34eaf76bf631c516ad78 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Mon, 22 May 2023 18:41:47 -0400 Subject: [PATCH 057/100] adding debug exception to b64 --- lrs/utils/authorization.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/lrs/utils/authorization.py b/lrs/utils/authorization.py index 1e457e58..09587773 100755 --- a/lrs/utils/authorization.py +++ b/lrs/utils/authorization.py @@ -101,12 +101,13 @@ def inner(request, *args, **kwargs): return func(request, *args, **kwargs) return inner -def decode_base64_string(base64_message): - base64_bytes = base64_message.encode("ascii") - message_bytes = base64.b64decode(base64_bytes) - message = message_bytes.decode("ascii") +def decode_base64_string(message: str) -> str: - return message + try: + return base64.b64decode(message).decode("utf-8") + + except Exception as e: + raise Exception(f"Unable to decode base 64 auth:: Received:: {message}") def get_user_from_auth(auth): if not auth: From bdfd861b9dc12fdb1a64133e98e80f9d53417727 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Mon, 22 May 2023 18:51:05 -0400 Subject: [PATCH 058/100] removing log --- lrs/utils/authorization.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/lrs/utils/authorization.py b/lrs/utils/authorization.py index 09587773..d988bbc7 100755 --- a/lrs/utils/authorization.py +++ b/lrs/utils/authorization.py @@ -102,12 +102,11 @@ def inner(request, *args, **kwargs): return inner def decode_base64_string(message: str) -> str: - try: return base64.b64decode(message).decode("utf-8") except Exception as e: - raise Exception(f"Unable to decode base 64 auth:: Received:: {message}") + raise Exception(f"Unable to decode base 64 auth.") def get_user_from_auth(auth): if not auth: From a8d156737033fb3c2b10acec47aa36d2994e4acb Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Tue, 23 May 2023 07:57:53 -0400 Subject: [PATCH 059/100] fixing kwarg name --- lrs/managers/ActivityStateManager.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/lrs/managers/ActivityStateManager.py b/lrs/managers/ActivityStateManager.py index 8605afe8..ec28bdc2 100644 --- a/lrs/managers/ActivityStateManager.py +++ b/lrs/managers/ActivityStateManager.py @@ -35,8 +35,8 @@ def save_non_json_state(self, s, state, request_dict): def get_record(self, **kwargs) -> Tuple[ActivityState, bool]: - if "registration" in kwargs and kwargs.get("registration", None) is None: - del kwargs["registration"] + if "registration_id" in kwargs and kwargs.get("registration_id", None) is None: + del kwargs["registration_id"] return ActivityState.objects.get_or_create(**kwargs) @@ -45,8 +45,7 @@ def get_state_set(self, activity_id, registration, since): if registration: # Registration and since if since: - state_set = self.Agent.activitystate_set.filter( - activity_id=activity_id, registration_id=registration, updated__gt=since) + state_set = self.Agent.activitystate_set.filter(activity_id=activity_id, registration_id=registration, updated__gt=since) # Registration else: state_set = self.Agent.activitystate_set.filter(activity_id=activity_id, registration_id=registration) From f802c74068005530202b9fd2ae5fe96556aa3045 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Tue, 23 May 2023 08:05:52 -0400 Subject: [PATCH 060/100] fixing key for profile docs --- lrs/managers/ActivityProfileManager.py | 2 +- lrs/managers/AgentProfileManager.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lrs/managers/ActivityProfileManager.py b/lrs/managers/ActivityProfileManager.py index 1cad8223..d1dac305 100644 --- a/lrs/managers/ActivityProfileManager.py +++ b/lrs/managers/ActivityProfileManager.py @@ -85,7 +85,7 @@ def put_profile(self, request_dict): profile_id=request_dict['params']['profileId'], activity_id=request_dict['params']['activityId'] ) - profile_document_contents = request_dict['state'] + profile_document_contents = request_dict['profile'] etag.check_modification_conditions(request_dict, profile_record, created, required=True) diff --git a/lrs/managers/AgentProfileManager.py b/lrs/managers/AgentProfileManager.py index af9ea197..a88fffd1 100644 --- a/lrs/managers/AgentProfileManager.py +++ b/lrs/managers/AgentProfileManager.py @@ -80,7 +80,7 @@ def put_profile(self, request_dict): profile_id=request_dict['params']['profileId'], agent=self.Agent ) - profile_document_contents = request_dict['state'] + profile_document_contents = request_dict['profile'] etag.check_modification_conditions(request_dict, profile_record, created, required=True) From 99f889540066590d00955e0691fb4ecf9fb16f27 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Tue, 23 May 2023 08:31:39 -0400 Subject: [PATCH 061/100] adding null check to version middleware --- lrs/utils/XAPIVersionHeaderMiddleware.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lrs/utils/XAPIVersionHeaderMiddleware.py b/lrs/utils/XAPIVersionHeaderMiddleware.py index c599d792..e472e5d5 100644 --- a/lrs/utils/XAPIVersionHeaderMiddleware.py +++ b/lrs/utils/XAPIVersionHeaderMiddleware.py @@ -52,7 +52,8 @@ def process_request(self, request): resp['X-Experience-API-Version'] = settings.XAPI_VERSION return resp - def process_response(self, request, response): - response['X-Experience-API-Version'] = settings.XAPI_VERSION + if response is not None: + response['X-Experience-API-Version'] = settings.XAPI_VERSION + return response From fea82dca9c8bc2e73fd388ec932d9593163c152b Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Tue, 23 May 2023 08:38:28 -0400 Subject: [PATCH 062/100] request processing cleanup --- lrs/utils/req_process.py | 11 +++++------ lrs/utils/req_validate.py | 11 ++++++----- lrs/views.py | 6 +++++- 3 files changed, 16 insertions(+), 12 deletions(-) diff --git a/lrs/utils/req_process.py b/lrs/utils/req_process.py index 8f9b84b7..98f5671f 100644 --- a/lrs/utils/req_process.py +++ b/lrs/utils/req_process.py @@ -217,14 +217,11 @@ def statements_get(req_dict): st = Statement.objects.get(statement_id=req_dict['statementId']) stmt_dict = st.to_dict(ret_format=req_dict['params']['format']) if req_dict['params']['attachments']: - stmt_result, mime_type, content_length = build_response( - stmt_dict, True) - resp = HttpResponse(stmt_result, content_type=mime_type, - status=200) + stmt_result, mime_type, content_length = build_response(stmt_dict, True) + resp = HttpResponse(stmt_result, content_type=mime_type, status=200) else: stmt_result = json.dumps(stmt_dict, sort_keys=False) - resp = HttpResponse( - stmt_result, content_type=mime_type, status=200) + resp = HttpResponse(stmt_result, content_type=mime_type, status=200) content_length = len(stmt_result) # Complex GET else: @@ -457,6 +454,7 @@ def agent_profile_get(req_dict): a = Agent.objects.retrieve(**agent) if not a: response = HttpResponseNotFound("No agent found for agent profile get") + return response else: ap = AgentProfileManager(a) @@ -471,6 +469,7 @@ def agent_profile_get(req_dict): else: response = HttpResponse( resource.json_profile, content_type=resource.content_type) + response['ETag'] = '"%s"' % resource.etag return response diff --git a/lrs/utils/req_validate.py b/lrs/utils/req_validate.py index 2569b84d..110c52a7 100644 --- a/lrs/utils/req_validate.py +++ b/lrs/utils/req_validate.py @@ -409,7 +409,9 @@ def activity_state_post(req_dict): previous_state = ActivityState.objects.get( state_id=req_dict['params']['stateId'], agent=a, - activity_id=req_dict['params']['activityId'], registration_id=req_dict['params']['registration']) + activity_id=req_dict['params']['activityId'], + registration_id=req_dict['params']['registration'] + ) except ActivityState.DoesNotExist: pass @@ -845,8 +847,7 @@ def agent_profile_get(req_dict): agent = convert_to_datatype(req_dict['params']['agent']) req_dict['params']['agent'] = agent except Exception: - raise ParamError("agent param %s is not valid" % \ - req_dict['params']['agent']) + raise ParamError("agent param %s is not valid" % req_dict['params']['agent']) validator.validate_agent(agent, "Agent param") else: err_msg = "Error -- agent_profile - method = %s, but agent parameter missing." % req_dict[ @@ -857,12 +858,12 @@ def agent_profile_get(req_dict): try: validate_timestamp(req_dict['params']['since']) except (Exception, RFC3339Error): - raise ParamError( - "Since parameter was not a valid RFC3339 timestamp") + raise ParamError("Since parameter was not a valid RFC3339 timestamp") # Extra validation if oauth if req_dict['auth']['type'] == 'oauth': validate_oauth_for_documents(req_dict, "agent profile") + return req_dict diff --git a/lrs/views.py b/lrs/views.py index 5266c6cb..24ccb996 100644 --- a/lrs/views.py +++ b/lrs/views.py @@ -207,13 +207,17 @@ def handle_request(request, more_id=None): try: r_dict = req_parse.parse(request, more_id) path = request.path.lower() + if path.endswith('/'): path = path.rstrip('/') + # Cutoff more_id if 'more' in path: path = reverse('lrs:statements').lower() + "/" + "more" + req_dict = validators[path][r_dict['method']](r_dict) return processors[path][req_dict['method']](req_dict) + except (BadRequest, OauthBadRequest, SuspiciousOperation) as err: status = 400 log_exception(status, request.path) @@ -226,7 +230,7 @@ def handle_request(request, more_id=None): except Forbidden as forb: status = 403 log_exception(status, request.path) - response = HttpResponse(str(msg), status=status) + response = HttpResponse(str(forb), status=status) except NotFound as nf: status = 404 log_exception(status, request.path) From 664b35b31f9ac2f2fe08dd5dc6fce95d4930c0a3 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Tue, 23 May 2023 08:48:54 -0400 Subject: [PATCH 063/100] allowing since to be none for profile get --- lrs/utils/req_process.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/lrs/utils/req_process.py b/lrs/utils/req_process.py index 98f5671f..b07bc7bb 100644 --- a/lrs/utils/req_process.py +++ b/lrs/utils/req_process.py @@ -464,16 +464,14 @@ def agent_profile_get(req_dict): if profile_id: resource = ap.get_profile(profile_id) if resource.profile: - response = HttpResponse( - resource.profile.read(), content_type=resource.content_type) + response = HttpResponse(resource.profile.read(), content_type=resource.content_type) else: - response = HttpResponse( - resource.json_profile, content_type=resource.content_type) + response = HttpResponse(resource.json_profile, content_type=resource.content_type) response['ETag'] = '"%s"' % resource.etag return response - elif since is not None: + else: resource = ap.get_profile_ids(since) response = JsonResponse([k for k in resource], safe=False) @@ -485,7 +483,7 @@ def agent_profile_delete(req_dict): a = Agent.objects.retrieve(**agent) if not a: return HttpResponse('', status=204) - profile_id = req_dict['params']['profileId'] + ap = AgentProfileManager(a) ap.delete_profile(req_dict) @@ -495,8 +493,7 @@ def agent_profile_delete(req_dict): def agents_get(req_dict): a = Agent.objects.get(**req_dict['agent_ifp']) agent_data = json.dumps(a.to_dict_person(), sort_keys=False) - resp = HttpResponse( - agent_data, content_type="application/json", status=200) + resp = HttpResponse(agent_data, content_type="application/json", status=200) resp['Content-Length'] = str(len(agent_data)) return resp From 3f3d36151cbcaff1a6dc6fb392380174d95fb990 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Tue, 23 May 2023 10:45:01 -0400 Subject: [PATCH 064/100] adding empty activity request --- lrs/models.py | 2 ++ lrs/utils/req_process.py | 17 +++++++++++------ lrs/utils/req_validate.py | 5 +---- 3 files changed, 14 insertions(+), 10 deletions(-) diff --git a/lrs/models.py b/lrs/models.py index 77fec514..94ee75cb 100644 --- a/lrs/models.py +++ b/lrs/models.py @@ -337,7 +337,9 @@ class Activity(models.Model): def return_activity_with_lang_format(self, lang=None, ids_only=False): if ids_only: return {'id': self.activity_id} + ret = self.canonical_data + if 'objectType' not in self.canonical_data: ret['objectType'] = 'Activity' if 'definition' in self.canonical_data: diff --git a/lrs/utils/req_process.py b/lrs/utils/req_process.py index b07bc7bb..57684632 100644 --- a/lrs/utils/req_process.py +++ b/lrs/utils/req_process.py @@ -8,6 +8,7 @@ from datetime import datetime from django.http import HttpResponse, HttpResponseNotFound, JsonResponse +from django.core.exceptions import ObjectDoesNotExist from django.conf import settings from django.utils.timezone import utc @@ -417,12 +418,16 @@ def activity_profile_delete(req_dict): def activities_get(req_dict): activity_id = req_dict['params']['activityId'] - act = Activity.objects.get( - activity_id=activity_id, authority__isnull=False) - return_act = json.dumps( - act.return_activity_with_lang_format(['all']), sort_keys=False) - resp = HttpResponse( - return_act, content_type="application/json", status=200) + + try : + activity_record = Activity.objects.get(activity_id=activity_id, authority__isnull=False) + return_act = json.dumps(activity_record.return_activity_with_lang_format(['all']), sort_keys=False) + + except ObjectDoesNotExist: + activity_stub = {"activityId": activity_id} + return_act = json.dumps(activity_stub) + + resp = HttpResponse(return_act, content_type="application/json", status=200) resp['Content-Length'] = str(len(return_act)) return resp diff --git a/lrs/utils/req_validate.py b/lrs/utils/req_validate.py index 110c52a7..da51442e 100644 --- a/lrs/utils/req_validate.py +++ b/lrs/utils/req_validate.py @@ -705,10 +705,7 @@ def activity_profile_delete(req_dict): def activities_get(req_dict): rogueparams = set(req_dict['params']) - set(["activityId"]) if rogueparams: - - raise ParamError( - "The get activities request contained unexpected parameters: %s" % ", ".join(escape(param) for param in rogueparams)) - + raise ParamError("The get activities request contained unexpected parameters: %s" % ", ".join(escape(param) for param in rogueparams)) validator = StatementValidator() try: From 3815ec27767bf685de7df1ed80dd5846ca1411de Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Tue, 23 May 2023 10:50:06 -0400 Subject: [PATCH 065/100] swapping to specific dne --- lrs/utils/req_process.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/lrs/utils/req_process.py b/lrs/utils/req_process.py index 57684632..ceaf7319 100644 --- a/lrs/utils/req_process.py +++ b/lrs/utils/req_process.py @@ -8,7 +8,6 @@ from datetime import datetime from django.http import HttpResponse, HttpResponseNotFound, JsonResponse -from django.core.exceptions import ObjectDoesNotExist from django.conf import settings from django.utils.timezone import utc @@ -423,7 +422,7 @@ def activities_get(req_dict): activity_record = Activity.objects.get(activity_id=activity_id, authority__isnull=False) return_act = json.dumps(activity_record.return_activity_with_lang_format(['all']), sort_keys=False) - except ObjectDoesNotExist: + except Activity.DoesNotExist: activity_stub = {"activityId": activity_id} return_act = json.dumps(activity_stub) From 497c064d24499737c8719359757bdcde0941e377 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Tue, 23 May 2023 10:53:25 -0400 Subject: [PATCH 066/100] removing 1.0.3 validation for activity dne --- lrs/utils/req_validate.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/lrs/utils/req_validate.py b/lrs/utils/req_validate.py index da51442e..3b3b1432 100644 --- a/lrs/utils/req_validate.py +++ b/lrs/utils/req_validate.py @@ -716,14 +716,6 @@ def activities_get(req_dict): else: validator.validate_iri(activity_id, "activityId param") - # Try to retrieve activity, if DNE then return empty else return activity - # info - try: - Activity.objects.get(activity_id=activity_id, authority__isnull=False) - except Activity.DoesNotExist: - err_msg = "No activity found with ID %s" % activity_id - raise IDNotFoundError(err_msg) - return req_dict From 0b810434c68aa85a60b0b3e9654ea8e1a9cd33ed Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Tue, 23 May 2023 11:00:39 -0400 Subject: [PATCH 067/100] fixing activity dne return format --- lrs/utils/req_process.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/lrs/utils/req_process.py b/lrs/utils/req_process.py index ceaf7319..3a37345e 100644 --- a/lrs/utils/req_process.py +++ b/lrs/utils/req_process.py @@ -423,7 +423,10 @@ def activities_get(req_dict): return_act = json.dumps(activity_record.return_activity_with_lang_format(['all']), sort_keys=False) except Activity.DoesNotExist: - activity_stub = {"activityId": activity_id} + activity_stub = { + "id": activity_id, + "objectType": "Activity" + } return_act = json.dumps(activity_stub) resp = HttpResponse(return_act, content_type="application/json", status=200) From bc1dc5cd05f860ba0ffc7f452a37994ad9165ee6 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Tue, 23 May 2023 11:22:48 -0400 Subject: [PATCH 068/100] removing subscript notation in etag check --- lrs/utils/etag.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lrs/utils/etag.py b/lrs/utils/etag.py index 06eae641..e0001a9a 100644 --- a/lrs/utils/etag.py +++ b/lrs/utils/etag.py @@ -51,9 +51,10 @@ def check_modification_conditions(request, record, created, required=True): was_put_request = request['method'] == "PUT" if was_put_request and record_already_exists and missing_if_match and missing_if_none_match: + proper_etag = getattr(record, "etag", None); error_message = f"A document matching your query already exists, but the request did not include ETag headers. " \ + f"If you would like to override the document, provide the following header:: " \ - + f"If-Match: \"{record['etag']}\"" + + f"If-Match: \"{proper_etag}\"" raise Conflict(error_message) From 92715616bb299381e4575dcf0643ecb930480ec9 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Tue, 23 May 2023 11:43:05 -0400 Subject: [PATCH 069/100] fixing response path for activity profile --- lrs/utils/req_process.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/lrs/utils/req_process.py b/lrs/utils/req_process.py index 3a37345e..53163083 100644 --- a/lrs/utils/req_process.py +++ b/lrs/utils/req_process.py @@ -399,11 +399,13 @@ def activity_profile_get(req_dict): return response # Return IDs of profiles stored since profileId was not submitted - elif since is not None: + else: resource = ap.get_profile_ids(activity_id, since) response = JsonResponse([k for k in resource], safe=False) - response['since'] = since + + if since is not None: + response['since'] = since return response From 2e2fc995bfc6e3a4cad3e3e56981fe6664d95241 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Tue, 23 May 2023 21:17:32 -0400 Subject: [PATCH 070/100] adding utc timestamp conversion --- lrs/utils/req_process.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/lrs/utils/req_process.py b/lrs/utils/req_process.py index 53163083..ce9ed3bd 100644 --- a/lrs/utils/req_process.py +++ b/lrs/utils/req_process.py @@ -5,7 +5,7 @@ import uuid import math -from datetime import datetime +from datetime import datetime, timezone from django.http import HttpResponse, HttpResponseNotFound, JsonResponse from django.conf import settings @@ -51,7 +51,13 @@ def process_statement(stmt, auth, payload_sha2s): stmt['object']['context']['contextActivities'][k] = [v] # Add stored time - stmt['stored'] = datetime.utcnow().replace(tzinfo=utc).isoformat() + stmt['stored'] = datetime.now(utc).utcnow().replace(tzinfo=utc).isoformat() + + # Check if timestamp uses UTC, replace otherwise + if "timestamp" in stmt: + timestamp_original = stmt["timestamp"] + timestamp_utc = datetime.fromisoformat(timestamp_original).astimezone(tz=timezone.utc).isoformat() + stmt["timestamp"] = timestamp_utc # Add stored as timestamp if timestamp not present if 'timestamp' not in stmt: @@ -149,6 +155,7 @@ def process_complex_get(req_dict): def statements_post(req_dict): auth = req_dict['auth'] + # If single statement, put in list if isinstance(req_dict['body'], dict): body = [req_dict['body']] @@ -157,13 +164,16 @@ def statements_post(req_dict): stmt_responses = process_body(body, auth, req_dict.get('payload_sha2s', None)) stmt_ids = [stmt_tup[0] for stmt_tup in stmt_responses] - stmts_to_void = [str(stmt_tup[1]) - for stmt_tup in stmt_responses if stmt_tup[1]] + stmts_to_void = [str(stmt_tup[1]) for stmt_tup in stmt_responses if stmt_tup[1]] + check_activity_metadata.delay(stmt_ids) + if stmts_to_void: Statement.objects.filter(statement_id__in=stmts_to_void).update(voided=True) + if settings.USE_HOOKS: check_statement_hooks.delay(stmt_ids) + return JsonResponse([st for st in stmt_ids], safe=False) From 78635812217f0b86f0e43c3f3ad3beb519c2c4dc Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Tue, 23 May 2023 22:56:37 -0400 Subject: [PATCH 071/100] updating timestamp for states --- lrs/managers/ActivityStateManager.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lrs/managers/ActivityStateManager.py b/lrs/managers/ActivityStateManager.py index ec28bdc2..01f67890 100644 --- a/lrs/managers/ActivityStateManager.py +++ b/lrs/managers/ActivityStateManager.py @@ -1,7 +1,7 @@ -import datetime import json from typing import Tuple +from datetime import datetime, timezone from django.core.files.base import ContentFile from django.utils.timezone import utc @@ -23,7 +23,7 @@ def save_non_json_state(self, s, state, request_dict): if 'updated' in request_dict['headers'] and request_dict['headers']['updated']: s.updated = request_dict['headers']['updated'] else: - s.updated = datetime.datetime.utcnow().replace(tzinfo=utc) + s.updated = datetime.utcnow().replace(tzinfo=timezone.utc) # Go to beginning of file state.seek(0) @@ -103,7 +103,7 @@ def post_state(self, request_dict): if 'updated' in request_dict['headers'] and request_dict['headers']['updated']: state_record.updated = request_dict['headers']['updated'] else: - state_record.updated = datetime.datetime.utcnow().replace(tzinfo=utc) + state_record.updated = datetime.utcnow().replace(tzinfo=timezone.utc) state_record.save() @@ -152,7 +152,7 @@ def put_state(self, request_dict): if 'updated' in request_dict['headers'] and request_dict['headers']['updated']: state_record.updated = request_dict['headers']['updated'] else: - state_record.updated = datetime.datetime.utcnow().replace(tzinfo=utc) + state_record.updated = datetime.utcnow().replace(tzinfo=timezone.utc) state_record.save() From 295fdc4bd4b9745deba7051f9a3b1c96dfbae992 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Tue, 23 May 2023 22:56:58 -0400 Subject: [PATCH 072/100] fixing signature processing --- lrs/utils/req_parse.py | 22 +++++++++------------- 1 file changed, 9 insertions(+), 13 deletions(-) diff --git a/lrs/utils/req_parse.py b/lrs/utils/req_parse.py index f4ae365d..abd8fc6f 100644 --- a/lrs/utils/req_parse.py +++ b/lrs/utils/req_parse.py @@ -307,11 +307,11 @@ def validate_hash(part_hash, part): % part_hash) def is_a_signature(attachment): - usage_type = getattr(attachment, 'usageType', None) + usage_type = attachment.get('usageType', None) return usage_type == "http://adlnet.gov/expapi/attachments/signature" def get_signature(attachment): - return getattr(attachment, "sha2", None) + return attachment.get("sha2", None) def parse_signature_attachments(r_dict, part_dict): # Find the signature sha2 from the list attachment values in the @@ -328,14 +328,14 @@ def parse_signature_attachments(r_dict, part_dict): stmt_attachment_pairs.append((statement, signatures)) else: - if 'attachments' in r_dict['body']: - statement = r_dict["body"] + statement = r_dict["body"] + if 'attachments' in statement: attachments = statement["attachments"] signatures = [get_signature(a) for a in attachments if is_a_signature(a)] stmt_attachment_pairs.append((statement, signatures)) - signed_stmts = [sap for sap in stmt_attachment_pairs if len(sap[1]) >= 1] - unsigned_stmts = [sap for sap in stmt_attachment_pairs if len(sap[1]) == 0] + signed_stmts = [(statement, signatures) for (statement, signatures) in stmt_attachment_pairs if signatures] + unsigned_stmts = [(statement, signatures) for (statement, signatures) in stmt_attachment_pairs if not signatures] if unsigned_stmts: validate_non_signature_attachment(unsigned_stmts, r_dict['payload_sha2s'], part_dict) @@ -361,9 +361,7 @@ def validate_non_signature_attachment(unsigned_stmts, sha2s_on_request, part_dic def handle_signatures(stmt_tuples, sha2s_on_request, part_dict): - for statement_signature_tuple in stmt_tuples: - - _, signatures = statement_signature_tuple + for statement, signatures in stmt_tuples: for sha2 in signatures: # Should be listed in sha2s - sha2s couldn't not match @@ -376,12 +374,10 @@ def handle_signatures(stmt_tuples, sha2s_on_request, part_dict): if part['Content-Type'] != 'application/octet-stream': raise BadRequest("Signature attachment must have Content-Type of 'application/octet-stream'") - validate_signature(statement_signature_tuple, part) - + validate_signature(statement, signatures, part) -def validate_signature(statement_signature_tuple, part): - statement, signatures = statement_signature_tuple +def validate_signature(statement, signatures, part): sha2_key = signatures[0] signature = get_part_payload(part) From ed709c221932d5d17731c572a9f4f36a6f006662 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Wed, 24 May 2023 08:52:22 -0400 Subject: [PATCH 073/100] swapping time parser for utc change --- lrs/utils/req_process.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lrs/utils/req_process.py b/lrs/utils/req_process.py index ce9ed3bd..39a0e025 100644 --- a/lrs/utils/req_process.py +++ b/lrs/utils/req_process.py @@ -5,6 +5,7 @@ import uuid import math +from isodate.isodatetime import parse_datetime from datetime import datetime, timezone from django.http import HttpResponse, HttpResponseNotFound, JsonResponse @@ -56,7 +57,7 @@ def process_statement(stmt, auth, payload_sha2s): # Check if timestamp uses UTC, replace otherwise if "timestamp" in stmt: timestamp_original = stmt["timestamp"] - timestamp_utc = datetime.fromisoformat(timestamp_original).astimezone(tz=timezone.utc).isoformat() + timestamp_utc = parse_datetime(timestamp_original).astimezone(tz=timezone.utc).isoformat() stmt["timestamp"] = timestamp_utc # Add stored as timestamp if timestamp not present From 4276857f5d60746a31bef2a2f74f5b0da4c30b8a Mon Sep 17 00:00:00 2001 From: ADLMeganBohland Date: Wed, 24 May 2023 09:25:13 -0400 Subject: [PATCH 074/100] Updated for line 39 in xAPI changelog list. Trey - I know we mentioned making it human readable, but where it is stored it is placed in isoformat, so it SHOULD be good. --- lrs/tests/test_Statement.py | 23 +++++++++++++++++++++++ lrs/utils/req_process.py | 12 +++++++++++- 2 files changed, 34 insertions(+), 1 deletion(-) diff --git a/lrs/tests/test_Statement.py b/lrs/tests/test_Statement.py index 03d5b121..35f17542 100644 --- a/lrs/tests/test_Statement.py +++ b/lrs/tests/test_Statement.py @@ -584,6 +584,29 @@ def test_list_post(self): self.assertEqual(list(lang_map2.keys())[0], "en-GB") self.assertEqual(list(lang_map2.values())[0], "failed") + #The LRS shall include a "last modified" header which matches the "stored" Timestamp of the statment + def test_last_modified_header(self): + self.bunchostmts() + getResponse = self.client.get(reverse( + 'lrs:statements'), X_Experience_API_Version=settings.XAPI_VERSION, Authorization=self.auth) + self.assertEqual(response.status_code, 200) + #assert it has/included correct header + self.assertIn('Last-Modified', getResponse._headers) + #And that it equals the stored timestamp + lastModified = getResponse._headers('Last-Modified') + timeStamp = self.stored + self.assertEqual(lastModified, timestamp) + + +self.bunchostmts() + getResponse = self.client.get(reverse( + 'lrs:statements'), X_Experience_API_Version=settings.XAPI_VERSION, Authorization=self.auth) + self.assertEqual(getResponse.status_code, 200) + jsn = json.loads(getResponse.content) + self.assertEqual(len(jsn["statements"]), 11) + self.assertIn('content-length', getResponse._headers) + + def test_put(self): guid = uuid.uuid4() diff --git a/lrs/utils/req_process.py b/lrs/utils/req_process.py index 449bba10..3a34e09d 100644 --- a/lrs/utils/req_process.py +++ b/lrs/utils/req_process.py @@ -210,6 +210,10 @@ def statements_more_get(req_dict): else: resp = HttpResponse(json.dumps(stmt_result), content_type=mime_type, status=200) +##Create the response header + +##last modified is in humane readable info like dddec 25, so there is a conversion needed +#for now iso format andresolved later , update to return statment result resp['Content-Length'] = str(content_length) return resp @@ -230,13 +234,19 @@ def statements_get(req_dict): status=200) else: stmt_result = json.dumps(stmt_dict, sort_keys=False) + resp = HttpResponse( stmt_result, content_type=mime_type, status=200) + + resp['Last-Modified'] = st['stored'] + content_length = len(stmt_result) # Complex GET else: resp, content_length = process_complex_get(req_dict) - resp['Content-Length'] = str(content_length) + resp['Content-Length'] = str(content_length) + + resp['Last-Modified'] = st['stored'] return resp From 009d4ece9347936af1de531669c16d203b67cdc0 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Wed, 24 May 2023 09:25:30 -0400 Subject: [PATCH 075/100] resolving null case for digest --- lrs/utils/retrieve_statement.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/lrs/utils/retrieve_statement.py b/lrs/utils/retrieve_statement.py index c546b0f3..86513f29 100644 --- a/lrs/utils/retrieve_statement.py +++ b/lrs/utils/retrieve_statement.py @@ -166,17 +166,19 @@ def create_under_limit_stmt_result(stmt_set, stored, language, stmt_format): stmt_result['more'] = "" return stmt_result - def create_cache_key(): # Create unique hash data to use for the cache key - hash_data = [] - hash_data.append(str(datetime.now())) - hash_data.append(str(uuid.uuid4())) + hash_data = [ + str(datetime.now()), + str(uuid.uuid4()) + ] # Create cache key from hashed data (always 32 digits) - key = hashlib.md5(bcoding.bencode(hash_data)).hexdigest() - return key + bcode = bcoding.bencode(hash_data) + assert bcode is not None + key = hashlib.md5(bcode).hexdigest() + return key def create_over_limit_stmt_result(stmt_list, stored, limit, language, stmt_format, attachments): # First time someone queries POST/GET From a2a61f889cd7c0b3ade5cfc874469a7de6770a77 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Wed, 24 May 2023 11:42:36 -0400 Subject: [PATCH 076/100] blocking alternate request syntax --- lrs/utils/req_parse.py | 3 ++- lrs/utils/req_validate.py | 7 +++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/lrs/utils/req_parse.py b/lrs/utils/req_parse.py index abd8fc6f..8a145d8e 100644 --- a/lrs/utils/req_parse.py +++ b/lrs/utils/req_parse.py @@ -51,7 +51,8 @@ def parse(request, more_id=None): # lookin for weird IE CORS stuff.. it'll be a post with a 'method' url # param if request.method == 'POST' and 'method' in request.GET: - parse_cors_request(request, r_dict) + raise BadRequest("Alternate Request Syntax is no longer allowed.") + # Just parse body for all non IE CORS stuff else: parse_normal_request(request, r_dict) diff --git a/lrs/utils/req_validate.py b/lrs/utils/req_validate.py index 3b3b1432..73bbec0a 100644 --- a/lrs/utils/req_validate.py +++ b/lrs/utils/req_validate.py @@ -91,8 +91,7 @@ def server_validate_statement(stmt, auth, content_type): @auth def statements_post(req_dict): if list(req_dict['params'].keys()): - raise ParamError("The post statements request contained unexpected parameters: %s" % ", ".join( - list(req_dict['params'].keys()))) + raise ParamError("The post statements request cannot contain query parameters.") try: validator = StatementValidator(req_dict['body']) @@ -284,8 +283,8 @@ def statements_put(req_dict): # Find any unexpected parameters rogueparams = set(req_dict['params']) - set(["statementId"]) if rogueparams: - raise ParamError( - "The put statements request contained unexpected parameters: %s" % ", ".join(escape(param) for param in rogueparams)) + rogueparams_str = ", ".join(escape(param) for param in rogueparams) + raise ParamError(f"The put statements request cannot contain parameters other than 'statementId' -- found: {rogueparams_str}") # Statement id can must be supplied in query param. If in the body too, it # must be the same From 5f8852e0949ce75db7b78791ccef6278e056ab8b Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Wed, 24 May 2023 12:02:43 -0400 Subject: [PATCH 077/100] accounting for rfc+utc conversion --- lrs/utils/req_process.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/lrs/utils/req_process.py b/lrs/utils/req_process.py index 39a0e025..b3fbc8e5 100644 --- a/lrs/utils/req_process.py +++ b/lrs/utils/req_process.py @@ -56,9 +56,15 @@ def process_statement(stmt, auth, payload_sha2s): # Check if timestamp uses UTC, replace otherwise if "timestamp" in stmt: - timestamp_original = stmt["timestamp"] - timestamp_utc = parse_datetime(timestamp_original).astimezone(tz=timezone.utc).isoformat() - stmt["timestamp"] = timestamp_utc + rfc_timestamp = stmt["timestamp"] + rfc_timestamp_had_space = " " in rfc_timestamp + + iso_timestamp = rfc_timestamp.replace(" ", "T") + + timestamp_utc = parse_datetime(iso_timestamp).astimezone(tz=timezone.utc).isoformat() + timestamp_final = timestamp_utc.replace("T", " ") if rfc_timestamp_had_space else timestamp_utc + + stmt["timestamp"] = timestamp_final # Add stored as timestamp if timestamp not present if 'timestamp' not in stmt: From becd4a86417e5d356b5b7191a5298140662c49f2 Mon Sep 17 00:00:00 2001 From: ADLMeganBohland Date: Thu, 25 May 2023 09:31:09 -0400 Subject: [PATCH 078/100] Fixed LRS with Trey --- lrs/utils/req_process.py | 50 +++++++++++++++++++++++++++------------- 1 file changed, 34 insertions(+), 16 deletions(-) diff --git a/lrs/utils/req_process.py b/lrs/utils/req_process.py index 3a34e09d..fbf3c879 100644 --- a/lrs/utils/req_process.py +++ b/lrs/utils/req_process.py @@ -151,7 +151,8 @@ def process_complex_get(req_dict): if isinstance(stmt_result, dict): stmt_result = json.dumps(stmt_result) resp = HttpResponse(stmt_result, content_type=mime_type, status=200) - return resp, content_length + + return resp, content_length, stmt_result def statements_post(req_dict): @@ -210,43 +211,60 @@ def statements_more_get(req_dict): else: resp = HttpResponse(json.dumps(stmt_result), content_type=mime_type, status=200) -##Create the response header -##last modified is in humane readable info like dddec 25, so there is a conversion needed -#for now iso format andresolved later , update to return statment result resp['Content-Length'] = str(content_length) + latest_stored = datetime.min + for stmt in stmt_result["statements"]: + stored = datetime.fromisoformat(stmt['stored']) + if stored > latest_stored: + latest_stored = stored + + resp['Last-Modified'] = latest_stored.strftime("%a, %d-%b-%Y %H:%M:%S %Z") + return resp def statements_get(req_dict): stmt_result = {} mime_type = "application/json" + + # If statementId is in req_dict then it is a single get - can still include attachments # or have a different format if 'statementId' in req_dict: st = Statement.objects.get(statement_id=req_dict['statementId']) stmt_dict = st.to_dict(ret_format=req_dict['params']['format']) + + #Grab datetime where it is stored as string, convert to datetime and format + last_stored = datetime.fromisoformat(stmt_dict['stored']).strftime("%a, %d-%b-%Y %H:%M:%S %Z") + if req_dict['params']['attachments']: - stmt_result, mime_type, content_length = build_response( - stmt_dict, True) - resp = HttpResponse(stmt_result, content_type=mime_type, - status=200) + stmt_result, mime_type, content_length = build_response(stmt_dict, True) + resp = HttpResponse(stmt_result, content_type=mime_type, status=200) else: stmt_result = json.dumps(stmt_dict, sort_keys=False) - resp = HttpResponse( - stmt_result, content_type=mime_type, status=200) - - resp['Last-Modified'] = st['stored'] - + resp = HttpResponse(stmt_result, content_type=mime_type, status=200) + content_length = len(stmt_result) + + #stored is in iso we need it like this "Last-Modified: , :: UTC" + resp['Last-Modified'] = last_stored + # Complex GET else: - resp, content_length = process_complex_get(req_dict) - resp['Content-Length'] = str(content_length) + resp, content_length, stmt_result = process_complex_get(req_dict) - resp['Last-Modified'] = st['stored'] + latest_stored = datetime.min + for stmt in stmt_result["statements"]: + stored = datetime.fromisoformat(stmt['stored']) + if stored > latest_stored: + latest_stored = stored + + resp['Content-Length'] = str(content_length) + + resp['Last-Modified'] = latest_stored.strftime("%a, %d-%b-%Y %H:%M:%S %Z") return resp From 08383c787a599f38db171767a6d87b5967b5392c Mon Sep 17 00:00:00 2001 From: ADLMeganBohland Date: Thu, 25 May 2023 13:27:54 -0400 Subject: [PATCH 079/100] Updated ActivityState resource to send lastmodifie --- lrs/utils/req_process.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/lrs/utils/req_process.py b/lrs/utils/req_process.py index fbf3c879..2d3ecbdf 100644 --- a/lrs/utils/req_process.py +++ b/lrs/utils/req_process.py @@ -368,6 +368,7 @@ def activity_state_get(req_dict): # state id means we want only 1 item if state_id: resource = actstate.get_state(activity_id, registration, state_id) + #MB Resource is a model of AgentProfile, it has an 'updated' property if resource.state: response = HttpResponse( resource.state.read(), content_type=resource.content_type) @@ -375,12 +376,18 @@ def activity_state_get(req_dict): response = HttpResponse( resource.json_state, content_type=resource.content_type) response['ETag'] = '"%s"' % resource.etag + + #MB place our header (updated is saved as a datetime field, so it doesn't need to be pulled from isoformat) + response['Last-Modified'] = resource.updated.strftime("%a, %d-%b-%Y %H:%M:%S %Z") + # no state id means we want an array of state ids else: since = req_dict['params'].get('since', None) resource = actstate.get_state_ids(activity_id, registration, since) response = JsonResponse([k for k in resource], safe=False) - + #MB place our header (updated is saved as a datetime field, so it doesn't need to be pulled from isoformat) + response['Last-Modified'] = resource.updated.strftime("%a, %d-%b-%Y %H:%M:%S %Z") + return response From 1612a6947d08daf0e2247af87ba358dc930e8c7b Mon Sep 17 00:00:00 2001 From: ADLMeganBohland Date: Thu, 25 May 2023 14:14:07 -0400 Subject: [PATCH 080/100] Updated for changelog checklist --- lrs/models.py | 3 +-- lrs/utils/req_process.py | 9 ++++++--- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/lrs/models.py b/lrs/models.py index dc47dd74..d8b6f400 100644 --- a/lrs/models.py +++ b/lrs/models.py @@ -762,8 +762,7 @@ def __unicode__(self): class ActivityState(models.Model): state_id = models.CharField(max_length=MAX_URL_LENGTH) - updated = models.DateTimeField( - auto_now_add=True, blank=True, db_index=True) + updated = models.DateTimeField(auto_now_add=True, blank=True, db_index=True) activity_id = models.CharField(max_length=MAX_URL_LENGTH, db_index=True) registration_id = models.CharField(max_length=40, db_index=True) content_type = models.CharField(max_length=255, blank=True) diff --git a/lrs/utils/req_process.py b/lrs/utils/req_process.py index 2d3ecbdf..b86fd5d7 100644 --- a/lrs/utils/req_process.py +++ b/lrs/utils/req_process.py @@ -385,9 +385,7 @@ def activity_state_get(req_dict): since = req_dict['params'].get('since', None) resource = actstate.get_state_ids(activity_id, registration, since) response = JsonResponse([k for k in resource], safe=False) - #MB place our header (updated is saved as a datetime field, so it doesn't need to be pulled from isoformat) - response['Last-Modified'] = resource.updated.strftime("%a, %d-%b-%Y %H:%M:%S %Z") - + return response @@ -442,6 +440,9 @@ def activity_profile_get(req_dict): response = HttpResponse( resource.json_profile, content_type=resource.content_type) response['ETag'] = '"%s"' % resource.etag + + #MB place our header (updated is saved as a datetime field, so it doesn't need to be pulled from isoformat) + response['Last-Modified'] = resource.updated.strftime("%a, %d-%b-%Y %H:%M:%S %Z") return response # Return IDs of profiles stored since profileId was not submitted @@ -514,6 +515,8 @@ def agent_profile_get(req_dict): response = HttpResponse( resource.json_profile, content_type=resource.content_type) response['ETag'] = '"%s"' % resource.etag + #place our header (updated is saved as a datetime field, so it doesn't need to be pulled from isoformat) + response['Last-Modified'] = resource.updated.strftime("%a, %d-%b-%Y %H:%M:%S %Z") return response since = req_dict['params'].get( From d932a365b0d81432d96d741c57ee2ad2fd560f13 Mon Sep 17 00:00:00 2001 From: hsmith-adl Date: Fri, 26 May 2023 13:17:40 -0400 Subject: [PATCH 081/100] Updated context groups and context agnts for xAPI2 --- .gitignore | 2 +- lrs/utils/StatementValidator.py | 158 ++++++++++++++++++++++++++------ 2 files changed, 129 insertions(+), 31 deletions(-) diff --git a/.gitignore b/.gitignore index cf194344..33bb6ad7 100644 --- a/.gitignore +++ b/.gitignore @@ -12,7 +12,7 @@ media/ federated-analytics.js lrs/celery.py .vscode/ - +lrs-env/ docker-compose.dev.yml docker/lrs/settings.ini diff --git a/lrs/utils/StatementValidator.py b/lrs/utils/StatementValidator.py index 3e2ba557..c4ed0948 100644 --- a/lrs/utils/StatementValidator.py +++ b/lrs/utils/StatementValidator.py @@ -56,6 +56,7 @@ class StatementValidator(): def __init__(self, data=None): + """Initialize the validator with the data to be validated.""" # If incoming is a string, ast eval it (exception will be caught with # whatever is calling validator) if data: @@ -73,7 +74,9 @@ def __init__(self, data=None): except Exception as e: self.return_error(str(e)) + def validate(self): + """Validate the data passed into the validator.""" # If list, validate each stmt inside if isinstance(self.data, list): @@ -97,10 +100,14 @@ def validate(self): else: self.return_error(f"There are no statements to validate, payload: {self.data}") + def return_error(self, err_msg): + """Return a ParamError with the given message.""" raise ParamError(err_msg) + def validate_email(self, email): + """Validate an email address.""" if isinstance(email, str): if email.startswith("mailto:"): email_re = re.compile("[^@]+@[^@]+\.[^@]+") @@ -113,7 +120,9 @@ def validate_email(self, email): else: self.return_error("mbox value must be a string type") + def validate_language(self, lang, field): + """Validate a language code.""" if not isinstance(lang, str): self.return_error( "language %s is not valid in %s" % (lang, field)) @@ -128,17 +137,22 @@ def validate_language(self, lang, field): self.return_error( "language %s is not valid in %s" % (lang, field)) + def validate_lang_map(self, lang_map, field): + """Validate a language map.""" for lang in lang_map: self.validate_language(lang, field) def validate_dict_values(self, values, field): + """Validate that all values in a dict are not null.""" for v in values: if not v: self.return_error("%s contains a null value" % field) + def validate_email_sha1sum(self, sha1sum): + """Validate an email sha1sum.""" if isinstance(sha1sum, str): sha1sum_re = re.compile('([a-fA-F\d]{40}$)') if not sha1sum_re.match(sha1sum): @@ -147,17 +161,21 @@ def validate_email_sha1sum(self, sha1sum): else: self.return_error("mbox_sha1sum value must be a string type") + def validate_iri(self, iri_value, field): + """Validate an IRI.""" if isinstance(iri_value, str): try: iriparse(iri_value, rule='IRI') - except Exception: + except ValueError: self.return_error( "%s with value %s was not a valid IRI" % (field, iri_value)) else: self.return_error("%s must be a string type" % field) + def validate_uuid(self, uuid, field): + """Validate a UUID.""" if isinstance(uuid, str): val = None try: @@ -168,27 +186,33 @@ def validate_uuid(self, uuid, field): else: self.return_error("%s must be a string type" % field) + def check_if_dict(self, obj, field): + """Check if an object is a dict.""" if not isinstance(obj, dict): self.return_error( "%s is not a properly formatted dictionary" % field) def check_if_list(self, obj, field): + """Check if an object is a list.""" if not isinstance(obj, list): self.return_error("%s is not a properly formatted array" % field) def check_allowed_fields(self, allowed, obj, obj_name): + """Check if an object has fields that are not allowed.""" # Check for fields that aren't in spec failed_list = [x for x in list(obj.keys()) if x not in allowed] if failed_list: self.return_error(f"Invalid field(s) found in {obj_name} - {', '.join(failed_list)}") def check_required_fields(self, required, obj, obj_name): + """Check if an object has fields that are required.""" for field in required: if field not in obj: self.return_error("%s is missing in %s" % (field, obj_name)) def validate_statement(self, stmt): + """Validate a statement.""" # Ensure dict was submitted as stmt and check allowed and required # fields self.check_if_dict(stmt, "Statement") @@ -266,6 +290,7 @@ def validate_statement(self, stmt): self.validate_attachments(stmt['attachments']) def validate_authority_group(self, authority): + """Validate a group representing an authority.""" if len(authority['member']) != 2: self.return_error( "Groups representing authorities must only contain 2 members") @@ -275,6 +300,7 @@ def validate_authority_group(self, authority): "Groups representing authorities must not contain an inverse functional identifier") def validate_attachments(self, attachments): + """Validate attachments.""" # Ensure attachments is a list self.check_if_list(attachments, "Attachments") @@ -324,6 +350,7 @@ def validate_attachments(self, attachments): list(attach['description'].keys()), "attachment description") def validate_extensions(self, extensions, field): + """Validate extensions.""" # Ensure incoming extensions is a dict self.check_if_dict(extensions, "%s extensions" % field) @@ -332,6 +359,7 @@ def validate_extensions(self, extensions, field): self.validate_iri(k, field) def validate_agent(self, agent, placement): + """Validate an agent.""" # Ensure incoming agent is a dict and check allowed fields self.check_if_dict(agent, "Agent in %s" % placement) self.check_allowed_fields(agent_allowed_fields, agent, "Agent/Group") @@ -388,6 +416,7 @@ def validate_agent(self, agent, placement): self.validate_members(agent) def validate_members(self, agent): + """Validate members of a group.""" # Ensure member list is array members = agent['member'] self.check_if_list(members, "Members") @@ -403,6 +432,7 @@ def validate_members(self, agent): self.validate_agent(agent, 'member') def validate_ifi(self, ifis, ifi_value): + """Validate an inverse functional identifier.""" # Validate each IFI accordingly if ifis == 'mbox': self.validate_email(ifi_value) @@ -414,6 +444,7 @@ def validate_ifi(self, ifis, ifi_value): self.validate_account(ifi_value) def validate_account(self, account): + """Validate an account.""" # Ensure incoming account is a dict and check allowed and required # fields self.check_if_dict(account, "Account") @@ -428,6 +459,7 @@ def validate_account(self, account): self.return_error("account name must be a string") def validate_verb(self, verb, stmt_object=None): + """Validate a verb.""" # Ensure incoming verb is a dict and check allowed fields self.check_if_dict(verb, "Verb") self.check_allowed_fields(verb_allowed_fields, verb, "Verb") @@ -453,6 +485,7 @@ def validate_verb(self, verb, stmt_object=None): self.validate_dict_values(list(verb['display'].values()), "verb display") def validate_object(self, stmt_object): + """Validate an object.""" # Ensure incoming object is a dict self.check_if_dict(stmt_object, "Object") @@ -471,6 +504,7 @@ def validate_object(self, stmt_object): "The objectType in the statement's object is not valid - %s" % stmt_object['objectType']) def validate_statementref(self, ref): + """Validate a StatementRef.""" # Ensure incoming StatementRef is a dictionary an check allowed and # required fields self.check_if_dict(ref, "StatementRef") @@ -487,6 +521,7 @@ def validate_statementref(self, ref): self.validate_uuid(ref['id'], 'StatementRef id') def validate_activity(self, activity): + """Validate an activity.""" # Ensure incoming activity is a dict and check allowed fields self.check_if_dict(activity, "Activity") self.check_allowed_fields( @@ -504,6 +539,7 @@ def validate_activity(self, activity): self.validate_activity_definition(activity['definition']) def validate_activity_definition(self, definition): + """Validate an activity definition.""" # Ensure incoming def is a dict and check allowed fields self.check_if_dict(definition, "Activity definition") @@ -568,6 +604,7 @@ def validate_activity_definition(self, definition): definition['extensions'], 'activity definition extensions') def check_other_interaction_component_fields(self, allowed, definition): + """Check if other interaction component fields are included when they shouldn't be.""" interaction_components = set( ["choices", "scale", "source", "target", "steps"]) keys = set(definition.keys()) @@ -582,6 +619,7 @@ def check_other_interaction_component_fields(self, allowed, definition): # not_allowed = any(x in keys for x in interaction_components if x not in allowed) def validate_interaction_types(self, interactionType, definition): + """Validate interaction types.""" if interactionType == "choice" or interactionType == "sequencing": # If choices included, ensure it is an array and validate it if 'choices' in definition: @@ -623,15 +661,16 @@ def validate_interaction_types(self, interactionType, definition): self.validate_interaction_activities(steps, 'steps') def validate_interaction_activities(self, activities, field): + """Validate interaction activities.""" id_list = [] for act in activities: # Ensure each interaction activity is a dict and check allowed # fields - self.check_if_dict(act, "%s interaction component" % field) + self.check_if_dict(act, f"{field} interaction component") self.check_allowed_fields( - int_act_fields, act, "Activity definition %s" % field) + int_act_fields, act, f"Activity definition {field}") self.check_required_fields( - int_act_fields, act, "Activity definition %s" % field) + int_act_fields, act, f"Activity definition {field}") # Ensure id value is string if not isinstance(act['id'], str): @@ -642,17 +681,19 @@ def validate_interaction_activities(self, activities, field): if 'description' in act: # Ensure description is a dict (language map) self.check_if_dict( - act['description'], "%s interaction component description" % field) + act['description'], f"{field} interaction component description") self.validate_lang_map(list(act['description'].keys( - )), "%s interaction component description" % field) + )), f"{field} interaction component description") # Check and make sure all ids being listed are unique dups = set([i for i in id_list if id_list.count(i) > 1]) if dups: self.return_error( - "Interaction activities shared the same id(s) (%s) which is not allowed" % ' '.join(dups)) + f"Interaction activities shared the same id(s)\ + ({' '.join(dups)}) which is not allowed") def validate_substatement(self, substmt): + """Validate substatement.""" # Ensure incoming substmt is a dict and check allowed and required # fields self.check_if_dict(substmt, "SubStatement") @@ -669,11 +710,15 @@ def validate_substatement(self, substmt): # Reject statements that don't comply with ISO 8601 offsets if timestamp.endswith("-00") or timestamp.endswith("-0000") or timestamp.endswith("-00:00"): self.return_error( - "Timestamp error - Substatement Timestamp Illegal offset (-00, -0000, or -00:00) %s" % timestamp) + f"Timestamp error\ + - Substatement Timestamp Illegal offset (-00, -0000, or -00:00)\ + {timestamp}") - except Exception as e: + except Exception as error: self.return_error( - "Timestamp error - There was an error while parsing the date from %s -- Error: %s" % (timestamp, str(e))) + f"Timestamp error \ + - There was an error while parsing the date from {timestamp} \ + -- Error: {str(error)}") # Can't next substmts in other substmts - if not supplied it is an # Activity @@ -698,6 +743,7 @@ def validate_substatement(self, substmt): self.validate_context(substmt['context'], substmt['object']) def validate_result(self, result): + """Validate result.""" # Ensure incoming result is dict and check allowed fields self.check_if_dict(result, "Result") self.check_allowed_fields(result_allowed_fields, result, "Result") @@ -731,6 +777,7 @@ def validate_result(self, result): self.validate_score(result['score']) def validate_score(self, score): + """Validate score.""" # Ensure incoming score is a dict and check allowed fields self.check_if_dict(score, "Score") self.check_allowed_fields(score_allowed_fields, score, "Score") @@ -777,6 +824,7 @@ def validate_score(self, score): "Score scaled value in statement result must be between -1 and 1") def validate_context(self, context, stmt_object): + """Validate context.""" # Ensure incoming context is a dict and check allowed fields self.check_if_dict(context, "Context") self.check_allowed_fields(context_allowed_fields, context, "Context") @@ -843,14 +891,15 @@ def validate_context(self, context, stmt_object): self.validate_extensions(context['extensions'], 'context extensions') def validate_context_activities(self, conacts): + """Validate context activities.""" # Ensure incoming conact is dict self.check_if_dict(conacts, "Context activity") context_activity_types = ['parent', 'grouping', 'category', 'other'] for conact in list(conacts.items()): # Check if conact is a valid type if not conact[0] in context_activity_types: - self.return_error("Context activity type is not valid - %s - must be %s" % - (conact[0], ', '.join(context_activity_types))) + self.return_error(f"Context activity type is not valid \ + - {conact[0]} - must be{', '.join(context_activity_types)}") # Ensure conact is a list or dict if isinstance(conact[1], list): for act in conact[1]: @@ -862,27 +911,76 @@ def validate_context_activities(self, conacts): "contextActivities is not formatted correctly") def validate_context_agents(self, conags): - + """Validate context agents.""" self.check_if_list(conags, "Context Agents") - - for sub in conags: - if sub["objectType"] != "contextAgent": - raise ValidationError("[objectType] for Context Agent entries must be 'contextAgent'") - if not isinstance(sub["relevantTypes"], list): - raise ValidationError("[relevantTypes] for Context Agent entries must be a list") - - self.validate_agent(sub["agent"], 'Context agent') + for context_agent in conags: + context_agent_objtype = context_agent.get("objectType", None) + if context_agent_objtype is None: + self.return_error("[objectType]\ + Context Agent entries must have an objectType") + # Validate that objectType is 'contextAgent' + if context_agent_objtype != "contextAgent": + self.return_error(f"[objectType]\ + Context Agent entries must be 'contextAgent',\ + got {context_agent_objtype}") + + context_agent_reltypes = context_agent.get("relevantTypes", None) + if context_agent_reltypes is None: + self.return_error("[relevantTypes]\ + Context Agent entries must have relevantTypes") + + # Validate that relevantTypes is a list + if not isinstance(context_agent_reltypes, list) or not context_agent_reltypes: + self.return_error("[relevantTypes]\ + Context Agent entries must be a non-empty list") + + # Validate that all elements in relevantTypes are valid IRIs + for relevant_type in context_agent_reltypes: + self.validate_iri(relevant_type, "relevantTypes") + + # Validate the agent object + context_agent_agent = context_agent.get("agent", None) + if context_agent_agent is None: + self.return_error("[agent]\ + Context Agent entries must have an agent") + + self.validate_agent(context_agent_agent, 'Context agent') def validate_context_groups(self, congrps): - + """Validate context groups.""" self.check_if_list(congrps, "Context Groups") - - for sub in congrps: - if sub["objectType"] != "contextGroup": - raise ValidationError("[objectType] for Context Group entries must be 'contextGroup'") - - if not isinstance(sub["relevantTypes"], list): - raise ValidationError("[relevantTypes] for Context Group entries must be a list") - self.validate_agent(sub["group"], 'Context group') \ No newline at end of file + for context_groups in congrps: + # Validate that objectType is 'contextGroup' + context_groups_objtype = context_groups.get("objectType", None) + if context_groups_objtype is None: + self.return_error("[objectType]\ + Context Group entries must have an objectType") + + if context_groups_objtype != "contextGroup": + self.return_error(f"[objectType]\ + Context Group entries must be 'contextGroup',\ + got {context_groups_objtype}") + + # Validate that relevantTypes is a list + context_groups_reltypes = context_groups.get("relevantTypes", None) + if context_groups_reltypes is None: + self.return_error("[relevantTypes]\ + Context Group entries must have relevantTypes") + + if not isinstance(context_groups_reltypes, list) or not context_groups_reltypes: + self.return_error("[relevantTypes]\ + Context Group entries must be a non-empty list") + + # Validate that all elements in relevantTypes are valid IRIs + for relevant_type in context_groups_reltypes: + self.validate_iri(relevant_type, "relevantTypes") + + context_groups_group = context_groups.get("group", None) + if context_groups_group is None: + self.return_error("[group]\ + Context Group entries must have a group") + + # Validate the group object + self.validate_agent(context_groups_group, 'Context group') From 9ae1fad89e5f6f5aa8b522f5b70a772c59bf66b1 Mon Sep 17 00:00:00 2001 From: TreyH Date: Fri, 26 May 2023 14:06:32 -0400 Subject: [PATCH 082/100] resolving type issue with statement gets --- lrs/utils/req_process.py | 21 ++++++++++++--------- lrs/utils/retrieve_statement.py | 10 +++++----- 2 files changed, 17 insertions(+), 14 deletions(-) diff --git a/lrs/utils/req_process.py b/lrs/utils/req_process.py index 4ef7479e..57a5dfe4 100644 --- a/lrs/utils/req_process.py +++ b/lrs/utils/req_process.py @@ -149,14 +149,18 @@ def process_complex_get(req_dict): # If attachments=True in req_dict then include the attachment payload and # return different mime type if attachments: - stmt_result, mime_type, content_length = build_response(stmt_result) - resp = HttpResponse(stmt_result, content_type=mime_type, status=200) + stmt_result_str, mime_type, content_length = build_response(stmt_result) + resp = HttpResponse(stmt_result_str, content_type=mime_type, status=200) + # Else attachments are false for the complex get so just dump the # stmt_result else: if isinstance(stmt_result, dict): - stmt_result = json.dumps(stmt_result) - resp = HttpResponse(stmt_result, content_type=mime_type, status=200) + stmt_result_str = json.dumps(stmt_result) + else: + stmt_result_str = stmt_result + + resp = HttpResponse(stmt_result_str, content_type=mime_type, status=200) return resp, content_length, stmt_result @@ -213,14 +217,13 @@ def statements_more_get(req_dict): if attachments: stmt_result, mime_type, content_length = build_response(stmt_result) resp = HttpResponse(stmt_result, content_type=mime_type, status=200) + # If not, just dump the stmt_result else: if isinstance(stmt_result, str): - resp = HttpResponse( - stmt_result, content_type=mime_type, status=200) + resp = HttpResponse(stmt_result, content_type=mime_type, status=200) else: - resp = HttpResponse(json.dumps(stmt_result), - content_type=mime_type, status=200) + resp = HttpResponse(json.dumps(stmt_result), content_type=mime_type, status=200) resp['Content-Length'] = str(content_length) @@ -239,7 +242,6 @@ def statements_get(req_dict): stmt_result = {} mime_type = "application/json" - # If statementId is in req_dict then it is a single get - can still include attachments # or have a different format if 'statementId' in req_dict: @@ -338,6 +340,7 @@ def build_response(stmt_result, single=False): string_list.append("--" + boundary + "--\r\n") mime_type = 'multipart/mixed; boundary=' + '"%s"' % boundary attachment_body = "".join([str(s) for s in string_list]) + return attachment_body, mime_type, len(attachment_body) diff --git a/lrs/utils/retrieve_statement.py b/lrs/utils/retrieve_statement.py index 86513f29..ef4e4850 100644 --- a/lrs/utils/retrieve_statement.py +++ b/lrs/utils/retrieve_statement.py @@ -16,7 +16,7 @@ from ..exceptions import NotFound -def complex_get(param_dict, limit, language, stmt_format, attachments): +def complex_get(param_dict, limit, language, stmt_format, attachments) -> dict: # keep track if a filter other than time or sequence is used reffilter = False @@ -113,14 +113,14 @@ def complex_get(param_dict, limit, language, stmt_format, attachments): 'object_agent', 'object_activity', 'object_substatement') \ .prefetch_related('context_ca_parent', 'context_ca_grouping', 'context_ca_category', 'context_ca_other') \ .filter(untilQ & sinceQ & authQ & agentQ & verbQ & activityQ & registrationQ).distinct() + # Workaround since flat doesn't work with UUIDFields st_ids = stmtset.values_list('statement_id') stmtset = [st_id[0] for st_id in st_ids] if reffilter: stmtset = stmtset + stmt_ref_search(stmtset, untilQ, sinceQ) - actual_length = Statement.objects.filter( - Q(statement_id__in=stmtset) & voidQ).distinct().count() + actual_length = Statement.objects.filter(Q(statement_id__in=stmtset) & voidQ).distinct().count() else: actual_length = len(stmtset) @@ -150,7 +150,7 @@ def set_limit(req_limit): return req_limit -def create_under_limit_stmt_result(stmt_set, stored, language, stmt_format): +def create_under_limit_stmt_result(stmt_set, stored, language, stmt_format) -> dict: stmt_result = {} if stmt_set: stmt_set = Statement.objects.select_related('actor', 'verb', 'context_team', 'context_instructor', 'authority', @@ -180,7 +180,7 @@ def create_cache_key(): key = hashlib.md5(bcode).hexdigest() return key -def create_over_limit_stmt_result(stmt_list, stored, limit, language, stmt_format, attachments): +def create_over_limit_stmt_result(stmt_list, stored, limit, language, stmt_format, attachments) -> dict: # First time someone queries POST/GET result = {} cache_list = [] From 46d94e820ee2539ad54ca3896cdeed3942f9457a Mon Sep 17 00:00:00 2001 From: TreyH Date: Fri, 26 May 2023 14:16:23 -0400 Subject: [PATCH 083/100] fixing timestamp comparison --- lrs/utils/req_process.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lrs/utils/req_process.py b/lrs/utils/req_process.py index 57a5dfe4..d19b0ba8 100644 --- a/lrs/utils/req_process.py +++ b/lrs/utils/req_process.py @@ -215,8 +215,8 @@ def statements_more_get(req_dict): # If there are attachments, include them in the payload if attachments: - stmt_result, mime_type, content_length = build_response(stmt_result) - resp = HttpResponse(stmt_result, content_type=mime_type, status=200) + stmt_result_str, mime_type, content_length = build_response(stmt_result) + resp = HttpResponse(stmt_result_str, content_type=mime_type, status=200) # If not, just dump the stmt_result else: @@ -230,7 +230,7 @@ def statements_more_get(req_dict): latest_stored = datetime.min for stmt in stmt_result["statements"]: stored = datetime.fromisoformat(stmt['stored']) - if stored > latest_stored: + if stored > latest_stored.astimezone(stored.tzinfo): latest_stored = stored resp['Last-Modified'] = latest_stored.strftime("%a, %d-%b-%Y %H:%M:%S %Z") @@ -267,7 +267,7 @@ def statements_get(req_dict): latest_stored = datetime.min for stmt in stmt_result["statements"]: stored = datetime.fromisoformat(stmt['stored']) - if stored > latest_stored: + if stored > latest_stored.astimezone(stored.tzinfo): latest_stored = stored resp['Content-Length'] = str(content_length) From 272bfc257ccf4af7b0efb286845bef526807b743 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Tue, 30 May 2023 14:26:06 -0400 Subject: [PATCH 084/100] resolving timezone disparity for last-modified --- lrs/utils/__init__.py | 12 +++++++++++- lrs/utils/req_process.py | 27 +++++++++------------------ tests/__init__.py | 0 tests/utils.py | 33 +++++++++++++++++++++++++++++++++ 4 files changed, 53 insertions(+), 19 deletions(-) create mode 100644 tests/__init__.py create mode 100644 tests/utils.py diff --git a/lrs/utils/__init__.py b/lrs/utils/__init__.py index 9387cb52..5e46609d 100644 --- a/lrs/utils/__init__.py +++ b/lrs/utils/__init__.py @@ -7,7 +7,7 @@ from urllib.parse import parse_qs, parse_qsl, urlparse, unquote_plus -from datetime import datetime +from datetime import datetime, timezone from isodate.isodates import parse_date from isodate.isodatetime import parse_datetime from isodate.isoerror import ISO8601Error @@ -166,3 +166,13 @@ def truncate_duration(duration): return unicodedata.normalize("NFKD", duration.replace(seconds_str, str(seconds_truncated) + 'S')) else: return duration + +def last_modified_from_statements(statements: list) -> datetime: + + latest_stored = datetime.min.replace(tzinfo=timezone.utc) + for stmt in statements: + stored = datetime.fromisoformat(stmt['stored']) + if stored.astimezone(timezone.utc) > latest_stored.astimezone(timezone.utc): + latest_stored = stored + + return latest_stored diff --git a/lrs/utils/req_process.py b/lrs/utils/req_process.py index d19b0ba8..e5baf317 100644 --- a/lrs/utils/req_process.py +++ b/lrs/utils/req_process.py @@ -12,7 +12,7 @@ from django.conf import settings from django.utils.timezone import utc -from . import truncate_duration +from . import truncate_duration, last_modified_from_statements from .retrieve_statement import complex_get, parse_more_request from ..exceptions import NotFound from ..models import Statement, Agent, Activity @@ -225,14 +225,9 @@ def statements_more_get(req_dict): else: resp = HttpResponse(json.dumps(stmt_result), content_type=mime_type, status=200) - resp['Content-Length'] = str(content_length) + latest_stored = last_modified_from_statements(stmt_result["statements"]) - latest_stored = datetime.min - for stmt in stmt_result["statements"]: - stored = datetime.fromisoformat(stmt['stored']) - if stored > latest_stored.astimezone(stored.tzinfo): - latest_stored = stored - + resp['Content-Length'] = str(content_length) resp['Last-Modified'] = latest_stored.strftime("%a, %d-%b-%Y %H:%M:%S %Z") return resp @@ -249,13 +244,13 @@ def statements_get(req_dict): stmt_dict = st.to_dict(ret_format=req_dict['params']['format']) if req_dict['params']['attachments']: - stmt_result, mime_type, content_length = build_response(stmt_dict, True) - resp = HttpResponse(stmt_result, content_type=mime_type, status=200) + response_body, mime_type, content_length = build_response(stmt_dict, True) + resp = HttpResponse(response_body, content_type=mime_type, status=200) else: - stmt_result = json.dumps(stmt_dict, sort_keys=False) - resp = HttpResponse(stmt_result, content_type=mime_type, status=200) + response_body = json.dumps(stmt_dict, sort_keys=False) + resp = HttpResponse(response_body, content_type=mime_type, status=200) - content_length = len(stmt_result) + content_length = len(response_body) resp['Content-Length'] = str(content_length) resp['Last-Modified'] = datetime.fromisoformat(stmt_dict['stored']).strftime("%a, %d-%b-%Y %H:%M:%S %Z") @@ -264,11 +259,7 @@ def statements_get(req_dict): else: resp, content_length, stmt_result = process_complex_get(req_dict) - latest_stored = datetime.min - for stmt in stmt_result["statements"]: - stored = datetime.fromisoformat(stmt['stored']) - if stored > latest_stored.astimezone(stored.tzinfo): - latest_stored = stored + latest_stored = last_modified_from_statements(stmt_result["statements"]) resp['Content-Length'] = str(content_length) resp['Last-Modified'] = latest_stored.strftime("%a, %d-%b-%Y %H:%M:%S %Z") diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 00000000..e6fa1c94 --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,33 @@ +import unittest + +from datetime import datetime, timezone + +def last_modified_from_statements(statements: list) -> datetime: + + latest_stored = datetime.min.replace(tzinfo=timezone.utc) + for stmt in statements: + stored = datetime.fromisoformat(stmt['stored']) + if stored.astimezone(timezone.utc) > latest_stored.astimezone(timezone.utc): + latest_stored = stored + + return latest_stored + +class TestUtilityMethods(unittest.TestCase): + + def test_last_modified_helper(self): + + expected_time = datetime.utcnow() + expected_time_str = expected_time.isoformat() + + statements = [ + { "stored": expected_time_str }, + { "stored": expected_time_str } + ] + + last_modified = last_modified_from_statements(statements) + + self.assertTrue(expected_time == last_modified) + + +if __name__=="__main__": + unittest.main() \ No newline at end of file From a7715335ae9cf11f70edef27ad9fab4474e26f0e Mon Sep 17 00:00:00 2001 From: Godloveet Date: Thu, 8 Jun 2023 15:34:59 -0500 Subject: [PATCH 085/100] Adding CICD Pipeline --- .github/workflow/cicd.yml | 51 +++++++++++++++++++ .../{deployment.yml => deployment.yml_old} | 0 2 files changed, 51 insertions(+) create mode 100644 .github/workflow/cicd.yml rename .github/workflow/{deployment.yml => deployment.yml_old} (100%) diff --git a/.github/workflow/cicd.yml b/.github/workflow/cicd.yml new file mode 100644 index 00000000..ce74572f --- /dev/null +++ b/.github/workflow/cicd.yml @@ -0,0 +1,51 @@ +name: Deploy ADL LRS Application (Python) +'on': + push: + branches: + - python3-xapi-2.0 +jobs: + build-and-test: + runs-on: python:3.0 + steps: + - name: Checkout code + uses: actions/checkout@v3 + + - name: Build and test + run: | + pip3 install -r requirements.txt + python3 -m unittest discover ./tests + + deploy-dev: + runs-on: ubuntu-latest + environment: dev + needs: build-and-test + steps: + - name: Configure SSH + run: | + echo "Deployed on dev successfully" + # mkdir -p ~/.ssh + # echo "${{ secrets.ADL_WEB_SSH_KEY }}" > ~/.ssh/id_rsa + # chmod 600 ~/.ssh/id_rsa + # ssh-keyscan lrrs.ci.adlnet.gov >> ~/.ssh/known_hosts + + # - name: Deploy code on Dev + # run: | + # ssh ubuntu@lrrs.ci.adlnet.gov "cd /home/ubuntu/lrs-test-frontend && git pull origin ${GITHUB_REF#refs/heads/} && sudo docker-compose up -d --build" + + # deploy-staging: + # runs-on: ubuntu-latest + # environment: staging + # needs: + # - build-and-test + # - deploy-dev + # steps: + # - name: Configure SSH + # run: | + # mkdir -p ~/.ssh + # echo "${{ secrets.ADL_STAGIGN_SSH_KEY }}" > ~/.ssh/id_rsa + # chmod 600 ~/.ssh/id_rsa + # ssh-keyscan lrstest.staging.adlnet.gov >> ~/.ssh/known_hosts + + # - name: Deploy code on Staging + # run: | + # ssh ubuntu@lrstest.staging.adlnet.gov "cd /home/ubuntu/lrs-test-frontend && git pull origin ${GITHUB_REF#refs/heads/} && sudo docker-compose up -d --build" \ No newline at end of file diff --git a/.github/workflow/deployment.yml b/.github/workflow/deployment.yml_old similarity index 100% rename from .github/workflow/deployment.yml rename to .github/workflow/deployment.yml_old From 09d0bfba634d6f9783b098728ac9a64e7fa5096c Mon Sep 17 00:00:00 2001 From: Godloveet Date: Thu, 8 Jun 2023 15:36:17 -0500 Subject: [PATCH 086/100] Updating directory name --- .github/{workflow => workflows}/cicd.yml | 0 .github/{workflow => workflows}/deployment.yml_old | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename .github/{workflow => workflows}/cicd.yml (100%) rename .github/{workflow => workflows}/deployment.yml_old (100%) diff --git a/.github/workflow/cicd.yml b/.github/workflows/cicd.yml similarity index 100% rename from .github/workflow/cicd.yml rename to .github/workflows/cicd.yml diff --git a/.github/workflow/deployment.yml_old b/.github/workflows/deployment.yml_old similarity index 100% rename from .github/workflow/deployment.yml_old rename to .github/workflows/deployment.yml_old From 63cd6a04bdc908d55026a672b720a00421614abe Mon Sep 17 00:00:00 2001 From: Godloveet Date: Thu, 8 Jun 2023 15:44:59 -0500 Subject: [PATCH 087/100] Adding Build/Test Pipeline Steps --- .github/workflows/cicd.yml | 28 +++++++++++++++++++++- .github/workflows/python-pipelien.yml_ | 32 ++++++++++++++++++++++++++ 2 files changed, 59 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/python-pipelien.yml_ diff --git a/.github/workflows/cicd.yml b/.github/workflows/cicd.yml index ce74572f..1ef5a845 100644 --- a/.github/workflows/cicd.yml +++ b/.github/workflows/cicd.yml @@ -5,11 +5,37 @@ name: Deploy ADL LRS Application (Python) - python3-xapi-2.0 jobs: build-and-test: - runs-on: python:3.0 + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] + steps: - name: Checkout code uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + # - name: Install dependencies + # run: | + # python -m pip install --upgrade pip + # pip install ruff pytest + # if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + + # - name: Lint with ruff + # run: | + # # stop the build if there are Python syntax errors or undefined names + # ruff --format=github --select=E9,F63,F7,F82 --target-version=py37 . + # # default set of ruff rules with GitHub Annotations + # ruff --format=github --target-version=py37 . + + # - name: Test with pytest + # run: | + # pytest + - name: Build and test run: | pip3 install -r requirements.txt diff --git a/.github/workflows/python-pipelien.yml_ b/.github/workflows/python-pipelien.yml_ new file mode 100644 index 00000000..7cb4861a --- /dev/null +++ b/.github/workflows/python-pipelien.yml_ @@ -0,0 +1,32 @@ +name: Python package + +on: [push] + +jobs: + build: + + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] + + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install ruff pytest + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + - name: Lint with ruff + run: | + # stop the build if there are Python syntax errors or undefined names + ruff --format=github --select=E9,F63,F7,F82 --target-version=py37 . + # default set of ruff rules with GitHub Annotations + ruff --format=github --target-version=py37 . + - name: Test with pytest + run: | + pytest \ No newline at end of file From 558ec34b1a7b202f96a665ffa55f0b3fccfc58df Mon Sep 17 00:00:00 2001 From: Godloveet Date: Thu, 8 Jun 2023 16:43:43 -0500 Subject: [PATCH 088/100] Adding Deploy Steps --- .github/workflows/cicd.yml | 54 ++++++++++++++++++++------------------ script.sh | 2 +- 2 files changed, 29 insertions(+), 27 deletions(-) diff --git a/.github/workflows/cicd.yml b/.github/workflows/cicd.yml index 1ef5a845..c16f2381 100644 --- a/.github/workflows/cicd.yml +++ b/.github/workflows/cicd.yml @@ -41,37 +41,39 @@ jobs: pip3 install -r requirements.txt python3 -m unittest discover ./tests - deploy-dev: - runs-on: ubuntu-latest - environment: dev - needs: build-and-test - steps: - - name: Configure SSH - run: | - echo "Deployed on dev successfully" - # mkdir -p ~/.ssh - # echo "${{ secrets.ADL_WEB_SSH_KEY }}" > ~/.ssh/id_rsa - # chmod 600 ~/.ssh/id_rsa - # ssh-keyscan lrrs.ci.adlnet.gov >> ~/.ssh/known_hosts - - # - name: Deploy code on Dev - # run: | - # ssh ubuntu@lrrs.ci.adlnet.gov "cd /home/ubuntu/lrs-test-frontend && git pull origin ${GITHUB_REF#refs/heads/} && sudo docker-compose up -d --build" - - # deploy-staging: + # deploy-dev: # runs-on: ubuntu-latest - # environment: staging - # needs: - # - build-and-test - # - deploy-dev + # environment: dev + # needs: build-and-test # steps: # - name: Configure SSH # run: | # mkdir -p ~/.ssh - # echo "${{ secrets.ADL_STAGIGN_SSH_KEY }}" > ~/.ssh/id_rsa + # echo "${{ secrets.ADLNET_DEV_SSH_KEY }}" > ~/.ssh/id_rsa # chmod 600 ~/.ssh/id_rsa - # ssh-keyscan lrstest.staging.adlnet.gov >> ~/.ssh/known_hosts + # ssh-keyscan "${{ secrets.ADLNET_DEV_HOST }} >> ~/.ssh/known_hosts - # - name: Deploy code on Staging + # - name: Deploy code on Dev # run: | - # ssh ubuntu@lrstest.staging.adlnet.gov "cd /home/ubuntu/lrs-test-frontend && git pull origin ${GITHUB_REF#refs/heads/} && sudo docker-compose up -d --build" \ No newline at end of file + # ssh ${{ secrets.ADLNET_DEV_HOST_USER }}@${{ secrets.ADLNET_DEV_HOST }} "cd ${{ secrets.ADLNET_DEV_PROJECT_DIR }} && git pull origin ${GITHUB_REF#refs/heads/} && sudo docker-compose up -d --build" + + deploy-staging: + runs-on: ubuntu-latest + environment: staging + needs: + - build-and-test + #- deploy-dev + steps: + - name: Configure SSH + run: | + mkdir -p ~/.ssh + echo "${{ secrets.ADLNET_STAGING_SSH_KEY }}" > ~/.ssh/id_rsa + chmod 600 ~/.ssh/id_rsa + ssh-keyscan "${{ secrets.ADLNET_STAGING_HOST }} >> ~/.ssh/known_hosts + + - name: Deploy code on Staging + run: | + ssh ${{ secrets.ADLNET_STAGING_HOST_USER }}@${{ secrets.ADLNET_STAGING_HOST }} " + cd ${{ secrets.ADLNET_STAGING_PROJECT_DIR }} && \ + git pull origin ${GITHUB_REF#refs/heads/} && \ + echo 'deplooyed successfully on server'" \ No newline at end of file diff --git a/script.sh b/script.sh index 995b2fd7..1715eb33 100644 --- a/script.sh +++ b/script.sh @@ -1,7 +1,7 @@ # if ADL_LRS directory does exist then, pull the latest code or else clone the repo if [ -d "ADL_LRS" ]; then echo "Directory ADL_LRS exists." - cd ADL_TEST + cd ADL_LRS git pull cd .. else From 6e61715615260f05f402eb076b93e9a63cf27652 Mon Sep 17 00:00:00 2001 From: Godloveet Date: Thu, 8 Jun 2023 16:45:39 -0500 Subject: [PATCH 089/100] Adding Deploy Steps --- .github/workflows/cicd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/cicd.yml b/.github/workflows/cicd.yml index c16f2381..4422159e 100644 --- a/.github/workflows/cicd.yml +++ b/.github/workflows/cicd.yml @@ -69,7 +69,7 @@ jobs: mkdir -p ~/.ssh echo "${{ secrets.ADLNET_STAGING_SSH_KEY }}" > ~/.ssh/id_rsa chmod 600 ~/.ssh/id_rsa - ssh-keyscan "${{ secrets.ADLNET_STAGING_HOST }} >> ~/.ssh/known_hosts + ssh-keyscan "${{ secrets.ADLNET_STAGING_HOST }}" >> ~/.ssh/known_hosts - name: Deploy code on Staging run: | From ed6ddcd61973b5bed9d740a5ed946832a3ef96c9 Mon Sep 17 00:00:00 2001 From: Godloveet Date: Thu, 8 Jun 2023 16:53:22 -0500 Subject: [PATCH 090/100] Adding Deploy Steps --- .github/workflows/cicd.yml | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/.github/workflows/cicd.yml b/.github/workflows/cicd.yml index 4422159e..1271af4d 100644 --- a/.github/workflows/cicd.yml +++ b/.github/workflows/cicd.yml @@ -8,7 +8,8 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] + python-version: ["3.7"] + #python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] steps: - name: Checkout code @@ -75,5 +76,23 @@ jobs: run: | ssh ${{ secrets.ADLNET_STAGING_HOST_USER }}@${{ secrets.ADLNET_STAGING_HOST }} " cd ${{ secrets.ADLNET_STAGING_PROJECT_DIR }} && \ + + echo 'Pulling Latest Code from Repository...' && \ git pull origin ${GITHUB_REF#refs/heads/} && \ - echo 'deplooyed successfully on server'" \ No newline at end of file + + echo 'Copying Required Config files...' && \ + sudo cp /home/ubuntu/workflow/settings.ini /home/ubuntu/ADL_LRS/settings.ini && \ + sudo cp /home/ubuntu/workflow/docker/settings.ini /home/ubuntu/ADL_LRS/docker/lrs/settings.ini && \ + sudo cp /home/ubuntu/workflow/.env /home/ubuntu/ADL_LRS/.env && \ + + echo 'Changing ownership of docker directory...' && \ + sudo usermod -aG docker $USER && \ + + echo 'Copying installing ssl certificate...' && \ + sudo ./init-ssl.sh localhost && \ + + echo 'Rebuilding Docker Containers..' && \ + sudo docker-compose stop && \ + sudo docker-compose build --no-cache && \ + docker-compose up -d && \ + echo 'deployed successfully on server'" \ No newline at end of file From 842158b3170ba9caedc50bd5c3318c0e2401016c Mon Sep 17 00:00:00 2001 From: Godloveet Date: Thu, 8 Jun 2023 16:56:02 -0500 Subject: [PATCH 091/100] Validating Deploy Pipelien --- .github/workflows/cicd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/cicd.yml b/.github/workflows/cicd.yml index 1271af4d..8527742d 100644 --- a/.github/workflows/cicd.yml +++ b/.github/workflows/cicd.yml @@ -82,7 +82,7 @@ jobs: echo 'Copying Required Config files...' && \ sudo cp /home/ubuntu/workflow/settings.ini /home/ubuntu/ADL_LRS/settings.ini && \ - sudo cp /home/ubuntu/workflow/docker/settings.ini /home/ubuntu/ADL_LRS/docker/lrs/settings.ini && \ + #sudo cp /home/ubuntu/workflow/docker/settings.ini /home/ubuntu/ADL_LRS/docker/lrs/settings.ini && \ sudo cp /home/ubuntu/workflow/.env /home/ubuntu/ADL_LRS/.env && \ echo 'Changing ownership of docker directory...' && \ From 75c07ec82e68cd825edc58751eea32b9c1f1ac9a Mon Sep 17 00:00:00 2001 From: Godloveet Date: Thu, 8 Jun 2023 16:57:42 -0500 Subject: [PATCH 092/100] Validating Deploy Pipelien --- .github/workflows/cicd.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/cicd.yml b/.github/workflows/cicd.yml index 8527742d..c3b4c2bd 100644 --- a/.github/workflows/cicd.yml +++ b/.github/workflows/cicd.yml @@ -85,9 +85,6 @@ jobs: #sudo cp /home/ubuntu/workflow/docker/settings.ini /home/ubuntu/ADL_LRS/docker/lrs/settings.ini && \ sudo cp /home/ubuntu/workflow/.env /home/ubuntu/ADL_LRS/.env && \ - echo 'Changing ownership of docker directory...' && \ - sudo usermod -aG docker $USER && \ - echo 'Copying installing ssl certificate...' && \ sudo ./init-ssl.sh localhost && \ From f927ab816f20751656d20d49a457aad950c6024e Mon Sep 17 00:00:00 2001 From: Godloveet Date: Thu, 8 Jun 2023 17:04:30 -0500 Subject: [PATCH 093/100] Validating Deploy Pipelien --- .github/workflows/cicd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/cicd.yml b/.github/workflows/cicd.yml index c3b4c2bd..dac5f887 100644 --- a/.github/workflows/cicd.yml +++ b/.github/workflows/cicd.yml @@ -91,5 +91,5 @@ jobs: echo 'Rebuilding Docker Containers..' && \ sudo docker-compose stop && \ sudo docker-compose build --no-cache && \ - docker-compose up -d && \ + sudo docker-compose up -d && \ echo 'deployed successfully on server'" \ No newline at end of file From a845c71e41727c12ee1f426a0cbd1ee9b5009e8a Mon Sep 17 00:00:00 2001 From: Godloveet Date: Thu, 8 Jun 2023 17:08:29 -0500 Subject: [PATCH 094/100] Validating the pipeline with a small change on homepage. --- .github/workflows/deployment.yml_old | 19 --------------- .github/workflows/python-pipelien.yml_ | 32 -------------------------- adl_lrs/templates/home.html | 2 +- 3 files changed, 1 insertion(+), 52 deletions(-) delete mode 100644 .github/workflows/deployment.yml_old delete mode 100644 .github/workflows/python-pipelien.yml_ diff --git a/.github/workflows/deployment.yml_old b/.github/workflows/deployment.yml_old deleted file mode 100644 index b7872976..00000000 --- a/.github/workflows/deployment.yml_old +++ /dev/null @@ -1,19 +0,0 @@ -name: Deploy -on: - push: - branches: - - python3-xapi-2.0 - -jobs: - deploy: - name: 🚀 Deploy - runs-on: ubuntu-latest - steps: - - name: executing remote ssh commands using password - uses: appleboy/ssh-action@v0.1.10 - with: - host: ${{ secrets.HOST }} - username: ${{ secrets.USERNAME }} - key: ${{ secrets.KEY }} - script: | - sudo /home/ubuntu/workflow/script.sh diff --git a/.github/workflows/python-pipelien.yml_ b/.github/workflows/python-pipelien.yml_ deleted file mode 100644 index 7cb4861a..00000000 --- a/.github/workflows/python-pipelien.yml_ +++ /dev/null @@ -1,32 +0,0 @@ -name: Python package - -on: [push] - -jobs: - build: - - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] - - steps: - - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install ruff pytest - if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - - name: Lint with ruff - run: | - # stop the build if there are Python syntax errors or undefined names - ruff --format=github --select=E9,F63,F7,F82 --target-version=py37 . - # default set of ruff rules with GitHub Annotations - ruff --format=github --target-version=py37 . - - name: Test with pytest - run: | - pytest \ No newline at end of file diff --git a/adl_lrs/templates/home.html b/adl_lrs/templates/home.html index 30937d6a..71edb606 100644 --- a/adl_lrs/templates/home.html +++ b/adl_lrs/templates/home.html @@ -20,7 +20,7 @@ {% block content %}
-

ADL Learning Record Store

+

ADL Learning Record Store.

Welcome to the ADL LRS. This is a reference implementation of an LRS as described in the Experience API Specification From 19c8af7fad9dd9a5d838bac3fa86c4ff906fe40c Mon Sep 17 00:00:00 2001 From: Godloveet Date: Thu, 8 Jun 2023 17:12:45 -0500 Subject: [PATCH 095/100] Reverting the change from Homepage Staging. --- adl_lrs/templates/home.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/adl_lrs/templates/home.html b/adl_lrs/templates/home.html index 71edb606..30937d6a 100644 --- a/adl_lrs/templates/home.html +++ b/adl_lrs/templates/home.html @@ -20,7 +20,7 @@ {% block content %}

-

ADL Learning Record Store.

+

ADL Learning Record Store

Welcome to the ADL LRS. This is a reference implementation of an LRS as described in the Experience API Specification From 4cb9fc73e357716c8e1ea3533437ae5285ed0cae Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Fri, 9 Jun 2023 12:22:01 -0400 Subject: [PATCH 096/100] updating test structure --- tests/utils.py => lrs/tests/test_utils.py | 17 ++++------- {tests => lrs/tests_old}/__init__.py | 0 lrs/{tests => tests_old}/test_Activity.py | 0 .../test_ActivityProfile.py | 0 .../test_ActivityState.py | 0 lrs/{tests => tests_old}/test_Agent.py | 0 lrs/{tests => tests_old}/test_AgentManager.py | 0 lrs/{tests => tests_old}/test_AgentProfile.py | 0 .../test_AttachmentAndSigned.py | 0 lrs/{tests => tests_old}/test_Auth.py | 0 lrs/{tests => tests_old}/test_OAuth.py | 0 lrs/{tests => tests_old}/test_Statement.py | 0 .../test_StatementFilter.py | 0 .../test_StatementManager.py | 0 .../test_StatementMore.py | 0 lrs/utils/__init__.py | 27 ----------------- lrs/utils/req_parse.py | 3 +- lrs/utils/req_process.py | 2 +- lrs/utils/time.py | 30 +++++++++++++++++++ test-lrs.sh | 4 +++ 20 files changed, 42 insertions(+), 41 deletions(-) rename tests/utils.py => lrs/tests/test_utils.py (53%) rename {tests => lrs/tests_old}/__init__.py (100%) rename lrs/{tests => tests_old}/test_Activity.py (100%) rename lrs/{tests => tests_old}/test_ActivityProfile.py (100%) rename lrs/{tests => tests_old}/test_ActivityState.py (100%) rename lrs/{tests => tests_old}/test_Agent.py (100%) rename lrs/{tests => tests_old}/test_AgentManager.py (100%) rename lrs/{tests => tests_old}/test_AgentProfile.py (100%) rename lrs/{tests => tests_old}/test_AttachmentAndSigned.py (100%) rename lrs/{tests => tests_old}/test_Auth.py (100%) rename lrs/{tests => tests_old}/test_OAuth.py (100%) rename lrs/{tests => tests_old}/test_Statement.py (100%) rename lrs/{tests => tests_old}/test_StatementFilter.py (100%) rename lrs/{tests => tests_old}/test_StatementManager.py (100%) rename lrs/{tests => tests_old}/test_StatementMore.py (100%) create mode 100644 lrs/utils/time.py create mode 100755 test-lrs.sh diff --git a/tests/utils.py b/lrs/tests/test_utils.py similarity index 53% rename from tests/utils.py rename to lrs/tests/test_utils.py index e6fa1c94..2a62037a 100644 --- a/tests/utils.py +++ b/lrs/tests/test_utils.py @@ -1,16 +1,10 @@ import unittest +import sys -from datetime import datetime, timezone +sys.path.append("../..") -def last_modified_from_statements(statements: list) -> datetime: - - latest_stored = datetime.min.replace(tzinfo=timezone.utc) - for stmt in statements: - stored = datetime.fromisoformat(stmt['stored']) - if stored.astimezone(timezone.utc) > latest_stored.astimezone(timezone.utc): - latest_stored = stored - - return latest_stored +from datetime import datetime +from ..utils.time import last_modified_from_statements class TestUtilityMethods(unittest.TestCase): @@ -28,6 +22,5 @@ def test_last_modified_helper(self): self.assertTrue(expected_time == last_modified) - if __name__=="__main__": - unittest.main() \ No newline at end of file + unittest.main() diff --git a/tests/__init__.py b/lrs/tests_old/__init__.py similarity index 100% rename from tests/__init__.py rename to lrs/tests_old/__init__.py diff --git a/lrs/tests/test_Activity.py b/lrs/tests_old/test_Activity.py similarity index 100% rename from lrs/tests/test_Activity.py rename to lrs/tests_old/test_Activity.py diff --git a/lrs/tests/test_ActivityProfile.py b/lrs/tests_old/test_ActivityProfile.py similarity index 100% rename from lrs/tests/test_ActivityProfile.py rename to lrs/tests_old/test_ActivityProfile.py diff --git a/lrs/tests/test_ActivityState.py b/lrs/tests_old/test_ActivityState.py similarity index 100% rename from lrs/tests/test_ActivityState.py rename to lrs/tests_old/test_ActivityState.py diff --git a/lrs/tests/test_Agent.py b/lrs/tests_old/test_Agent.py similarity index 100% rename from lrs/tests/test_Agent.py rename to lrs/tests_old/test_Agent.py diff --git a/lrs/tests/test_AgentManager.py b/lrs/tests_old/test_AgentManager.py similarity index 100% rename from lrs/tests/test_AgentManager.py rename to lrs/tests_old/test_AgentManager.py diff --git a/lrs/tests/test_AgentProfile.py b/lrs/tests_old/test_AgentProfile.py similarity index 100% rename from lrs/tests/test_AgentProfile.py rename to lrs/tests_old/test_AgentProfile.py diff --git a/lrs/tests/test_AttachmentAndSigned.py b/lrs/tests_old/test_AttachmentAndSigned.py similarity index 100% rename from lrs/tests/test_AttachmentAndSigned.py rename to lrs/tests_old/test_AttachmentAndSigned.py diff --git a/lrs/tests/test_Auth.py b/lrs/tests_old/test_Auth.py similarity index 100% rename from lrs/tests/test_Auth.py rename to lrs/tests_old/test_Auth.py diff --git a/lrs/tests/test_OAuth.py b/lrs/tests_old/test_OAuth.py similarity index 100% rename from lrs/tests/test_OAuth.py rename to lrs/tests_old/test_OAuth.py diff --git a/lrs/tests/test_Statement.py b/lrs/tests_old/test_Statement.py similarity index 100% rename from lrs/tests/test_Statement.py rename to lrs/tests_old/test_Statement.py diff --git a/lrs/tests/test_StatementFilter.py b/lrs/tests_old/test_StatementFilter.py similarity index 100% rename from lrs/tests/test_StatementFilter.py rename to lrs/tests_old/test_StatementFilter.py diff --git a/lrs/tests/test_StatementManager.py b/lrs/tests_old/test_StatementManager.py similarity index 100% rename from lrs/tests/test_StatementManager.py rename to lrs/tests_old/test_StatementManager.py diff --git a/lrs/tests/test_StatementMore.py b/lrs/tests_old/test_StatementMore.py similarity index 100% rename from lrs/tests/test_StatementMore.py rename to lrs/tests_old/test_StatementMore.py diff --git a/lrs/utils/__init__.py b/lrs/utils/__init__.py index 5e46609d..39c3ae0b 100644 --- a/lrs/utils/__init__.py +++ b/lrs/utils/__init__.py @@ -149,30 +149,3 @@ def get_lang(langdict, lang): pass first = next(iter(langdict.items())) return {first[0]: first[1]} - -def truncate_duration(duration): - sec_split = re.findall(r"\d+(?:\.\d+)?S", duration) - if sec_split: - seconds_str = sec_split[0] - seconds = float(seconds_str.replace('S', '')) - - if not seconds.is_integer(): - ### xAPI 2.0: Truncation required for comparison, not rounding etc. - # sec_trunc = round(sec_as_num, 2) - seconds_truncated = math.floor(seconds * 100) / 100 - else: - seconds_truncated = int(seconds) - - return unicodedata.normalize("NFKD", duration.replace(seconds_str, str(seconds_truncated) + 'S')) - else: - return duration - -def last_modified_from_statements(statements: list) -> datetime: - - latest_stored = datetime.min.replace(tzinfo=timezone.utc) - for stmt in statements: - stored = datetime.fromisoformat(stmt['stored']) - if stored.astimezone(timezone.utc) > latest_stored.astimezone(timezone.utc): - latest_stored = stored - - return latest_stored diff --git a/lrs/utils/req_parse.py b/lrs/utils/req_parse.py index 8a145d8e..7853db9a 100644 --- a/lrs/utils/req_parse.py +++ b/lrs/utils/req_parse.py @@ -14,7 +14,8 @@ from django.urls import reverse from django.http import QueryDict -from . import convert_to_datatype, convert_post_body_to_dict, validate_timestamp, truncate_duration +from . import convert_to_datatype, convert_post_body_to_dict, validate_timestamp +from .time import truncate_duration from .etag import get_etag_info from ..exceptions import OauthUnauthorized, OauthBadRequest, ParamError, BadRequest diff --git a/lrs/utils/req_process.py b/lrs/utils/req_process.py index e5baf317..3db2e66f 100644 --- a/lrs/utils/req_process.py +++ b/lrs/utils/req_process.py @@ -12,7 +12,7 @@ from django.conf import settings from django.utils.timezone import utc -from . import truncate_duration, last_modified_from_statements +from .time import truncate_duration, last_modified_from_statements from .retrieve_statement import complex_get, parse_more_request from ..exceptions import NotFound from ..models import Statement, Agent, Activity diff --git a/lrs/utils/time.py b/lrs/utils/time.py new file mode 100644 index 00000000..5a5a6755 --- /dev/null +++ b/lrs/utils/time.py @@ -0,0 +1,30 @@ +import re, math, unicodedata + +from datetime import datetime, timezone + +def truncate_duration(duration): + sec_split = re.findall(r"\d+(?:\.\d+)?S", duration) + if sec_split: + seconds_str = sec_split[0] + seconds = float(seconds_str.replace('S', '')) + + if not seconds.is_integer(): + ### xAPI 2.0: Truncation required for comparison, not rounding etc. + # sec_trunc = round(sec_as_num, 2) + seconds_truncated = math.floor(seconds * 100) / 100 + else: + seconds_truncated = int(seconds) + + return unicodedata.normalize("NFKD", duration.replace(seconds_str, str(seconds_truncated) + 'S')) + else: + return duration + +def last_modified_from_statements(statements: list) -> datetime: + + latest_stored = datetime.min.replace(tzinfo=timezone.utc) + for stmt in statements: + stored = datetime.fromisoformat(stmt['stored']) + if stored.astimezone(timezone.utc) > latest_stored.astimezone(timezone.utc): + latest_stored = stored + + return latest_stored \ No newline at end of file diff --git a/test-lrs.sh b/test-lrs.sh new file mode 100755 index 00000000..13adca47 --- /dev/null +++ b/test-lrs.sh @@ -0,0 +1,4 @@ +#!/bin/bash + +cp settings.ini.example adl_lrs/settings.ini +python3 -m unittest discover -s ./lrs/tests -p "test_*.py" -t . From 7bb90e27e33b1d30e98ca3ae1ee6effc047722f1 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Fri, 9 Jun 2023 12:22:52 -0400 Subject: [PATCH 097/100] updating gitlab test command --- .github/workflows/cicd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/cicd.yml b/.github/workflows/cicd.yml index dac5f887..714fb1d6 100644 --- a/.github/workflows/cicd.yml +++ b/.github/workflows/cicd.yml @@ -40,7 +40,7 @@ jobs: - name: Build and test run: | pip3 install -r requirements.txt - python3 -m unittest discover ./tests + ./test-lrs.sh # deploy-dev: # runs-on: ubuntu-latest From 47f377aadf590434908de15819de050dcd793b78 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Fri, 9 Jun 2023 12:29:43 -0400 Subject: [PATCH 098/100] updating celery version --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 5d3f3874..deef4c34 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ Django==3.2.19 amqp==5.0.9 bcoding==1.5 -celery==5.2.3 +celery==5.3 django-cors-headers==3.10.1 django-defender==0.9.6 django-jsonify==0.3.0 From bd02fec0d876639e574d937614d36a448193bbc9 Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Fri, 9 Jun 2023 12:30:48 -0400 Subject: [PATCH 099/100] skipping 5.3 celery --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index deef4c34..b5739986 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ Django==3.2.19 amqp==5.0.9 bcoding==1.5 -celery==5.3 +celery==5.2.7 django-cors-headers==3.10.1 django-defender==0.9.6 django-jsonify==0.3.0 From 7d891105320fb24eedaf6e9e40c87dd18e74a2eb Mon Sep 17 00:00:00 2001 From: Trey Hayden Date: Fri, 9 Jun 2023 12:39:29 -0400 Subject: [PATCH 100/100] adding manual importlib --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index b5739986..6ea1cf5c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,6 +6,7 @@ django-cors-headers==3.10.1 django-defender==0.9.6 django-jsonify==0.3.0 django-recaptcha==3.0.0 +importlib-metadata==4.12.0 isodate==0.6.1 oauth2==1.9.0.post1 psycopg2-binary==2.9.3