From 4037a31640bce9881be74d832cc4908c744863b9 Mon Sep 17 00:00:00 2001 From: Pushyami Gundala Date: Mon, 6 Nov 2023 15:09:30 -0500 Subject: [PATCH 1/9] proposed version updates --- requirements.txt | 65 ++++++++++++++++++++++++------------------------ 1 file changed, 33 insertions(+), 32 deletions(-) diff --git a/requirements.txt b/requirements.txt index 71c55ad3..ba31f46d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,42 +1,43 @@ -gunicorn==20.1.0 +gunicorn==20.1.0 #21.2.0 -Django==3.2.20 -whitenoise==5.3.0 +Django==3.2.20 #4.2.7 +whitenoise==5.3.0 #6.6.0 -# No update since 2018 -django-cron==0.6.0 -django-watchman==1.2.0 -django-su==0.9.0 -django-mysql==3.12.0 +django-cron==0.6.0 #deprecated + +django-watchman==1.2.0 #1.3.0 +django-su==0.9.0 #1.0.0 +django-mysql==3.12.0 #4.12.0 # The alternative to this is redis, but we don't use redis yet -django-constance[database]==2.8.0 -django-webpack-loader==1.5.0 -django-csp==3.7 -django-import-export==2.6.0 -django-admin-rangefilter==0.8.1 +django-constance[database]==2.8.0 #3.1.0 +django-webpack-loader==1.5.0 #2.0.1 + +django-csp==3.7 #no updates +django-import-export==2.6.0 #3.3.1 +django-admin-rangefilter==0.8.1 #0.11.2 # Django/Python fontawesome library supporting CSS in UI -fontawesomefree==6.1.1 +fontawesomefree==6.1.1 #6.4.2 # graphql -graphene-django==2.15.0 -django-filter==2.4.0 +graphene-django==2.15.0 #3.1.5 +django-filter==2.4.0 #23.3(tag naming convention is changed ) # object-level permissions -rules==3.0 +rules==3.0 #3.3 # These should be okay to update minors -numpy==1.23.3 -pandas==1.4.4 -pangres==4.1.2 - -SQLAlchemy==1.4.22 -psycopg2==2.9.1 -mysqlclient==2.0.3 -google-cloud-bigquery[pandas]==3.3.2 - -debugpy==1.4.1 -jsonschema==3.2.0 -pinax-eventlog==5.1.1 -pycryptodome==3.10.1 -PyLTI1p3==1.12.1 -hjson==3.0.1 +numpy==1.23.3 #1.26.1 +pandas==1.4.4 #2.1.2 +pangres==4.1.2 #4.2.1 + +SQLAlchemy==1.4.22 #2.0.23 +psycopg2==2.9.1 #2.9.9 +mysqlclient==2.0.3 #2.2.0 +google-cloud-bigquery[pandas]==3.3.2 #3.13.0 + +debugpy==1.4.1 #1.8.0 +jsonschema==3.2.0 #4.19.2 +pinax-eventlog==5.1.1 #no updates +pycryptodome==3.10.1 #3.19.0 +PyLTI1p3==1.12.1 #2.0.0 (last update Nov 2022) +hjson==3.0.1 #3.1.0 From 3a86b4b4cae1a1db09e3ba8335d311f5a52349eb Mon Sep 17 00:00:00 2001 From: Pushyami Gundala Date: Fri, 10 Nov 2023 14:45:01 -0500 Subject: [PATCH 2/9] Initial Test --- Dockerfile | 2 +- config/cron_udp.hjson | 20 +++++----- dashboard/admin.py | 8 ++-- dashboard/common/db_util.py | 2 +- dashboard/cron.py | 52 +++++++++++++------------ dashboard/management/commands/course.py | 6 +-- dashboard/management/commands/term.py | 6 +-- dashboard/models.py | 4 +- dashboard/settings.py | 7 +++- dashboard/urls.py | 2 +- requirements.txt | 52 ++++++++++++------------- 11 files changed, 85 insertions(+), 76 deletions(-) diff --git a/Dockerfile b/Dockerfile index e1f43959..f9ccad55 100644 --- a/Dockerfile +++ b/Dockerfile @@ -39,7 +39,7 @@ WORKDIR /code COPY requirements.txt . RUN apt-get update && \ apt-get install -y --no-install-recommends \ - build-essential curl apt-transport-https libpq-dev netcat-traditional jq python3-dev xmlsec1 cron git && \ + build-essential curl apt-transport-https libpq-dev netcat-traditional default-libmysqlclient-dev pkg-config jq python3-dev xmlsec1 cron git && \ apt-get upgrade -y # Install MariaDB from the mariadb repository rather than using Debians diff --git a/config/cron_udp.hjson b/config/cron_udp.hjson index f63cae7c..45fe42dd 100644 --- a/config/cron_udp.hjson +++ b/config/cron_udp.hjson @@ -41,8 +41,8 @@ left join entity.course_grade cg on cse.course_section_id = cg.course_section_id and cse.person_id = cg.person_id where - co.lms_int_id in %(course_ids)s - and cse.role in ('Student', 'TeachingAssistant', 'Teacher') + co.lms_int_id = ANY(%(course_ids)s) + and cse.role = ANY(ARRAY['Student', 'Teacher', 'TeachingAssistant']::text[]) and cse.role_status = 'Enrolled' and cse.enrollment_status = 'Active' order by user_id @@ -56,14 +56,14 @@ la.visibility = 'everyone' and la.status = 'published' and la.course_offering_id = co.id - and co.lms_int_id in %(course_ids)s + and co.lms_int_id = ANY(%(course_ids)s) ), assignment_grp as ( select lg.* from entity.learner_activity_group lg, keymap.course_offering co where lg.status = 'available' and lg.course_offering_id = co.id - and co.lms_int_id in %(course_ids)s + and co.lms_int_id = ANY(%(course_ids)s) ), assign_more as ( select distinct(a.learner_activity_group_id), da.group_points from assignment_details a @@ -125,7 +125,7 @@ la.visibility = 'everyone' and la.status = 'published' and la.course_offering_id = co.id - and co.lms_int_id in %(course_ids)s + and co.lms_int_id = ANY(%(course_ids)s) and la.learner_activity_id = la_km.id and la.learner_activity_group_id = lag_km.id ) @@ -147,7 +147,7 @@ keymap.course_offering co_km where lag.course_offering_id = co_km.id - and co_km.lms_int_id in %(course_ids)s + and co_km.lms_int_id = ANY(%(course_ids)s) group by co_km.lms_int_id ''', "term": @@ -182,7 +182,7 @@ LEFT OUTER JOIN entity.academic_term at1 on (co.academic_term_id = at1.academic_term_id), keymap.course_offering co2, keymap.academic_term at2 - WHERE co2.lms_int_id in %(course_ids)s + WHERE co2.lms_int_id = ANY(%(course_ids)s) and co.course_offering_id = co2.id and at1.academic_term_id = at2.id ''', @@ -196,7 +196,7 @@ where f.course_offering_id = co_km.id and f.file_id = f_km.id - and co_km.lms_int_id in %(course_ids)s + and co_km.lms_int_id = ANY(%(course_ids)s) order by id ''', "submission": @@ -212,7 +212,7 @@ left join keymap.course_offering co on cs.le_current_course_offering_id = co.id where - co.lms_int_id in %(course_ids)s + co.lms_int_id = ANY(%(course_ids)s) and cse.role_status ='Enrolled' and cse."role" = 'Student' and cse.enrollment_status = 'Active' @@ -244,7 +244,7 @@ left join keymap.course_offering co on co.id = la.course_offering_id join keymap.person p on p.id = lar.person_id where - co.lms_int_id in %(course_ids)s + co.lms_int_id = ANY(%(course_ids)s) and la.status = 'published' ) select diff --git a/dashboard/admin.py b/dashboard/admin.py index 9a8d4b77..005706b7 100644 --- a/dashboard/admin.py +++ b/dashboard/admin.py @@ -68,6 +68,7 @@ def clean(self): return self.cleaned_data +@admin.register(AcademicTerms) class TermAdmin(admin.ModelAdmin): exclude = ('id',) list_display = ('canvas_id', 'name', 'date_start', 'date_end') @@ -77,6 +78,7 @@ def has_add_permission(self, request): return False +@admin.register(Course) class CourseAdmin(admin.ModelAdmin): inlines = [CourseViewOptionInline, ] form = CourseForm @@ -95,11 +97,13 @@ def clear_course_updated_dates(self, request, queryset): self.message_user(request, "All selected last updated values cleared.") # Need this method to correctly display the line breaks + @admin.display( + description="Course View Option(s)" + ) def _courseviewoption(self, obj): return mark_safe(linebreaksbr(obj.courseviewoption)) # Known mypy issue: https://github.com/python/mypy/issues/708 - _courseviewoption.short_description = "Course View Option(s)" # type: ignore[attr-defined] def course_link(self, obj): return format_html('Link', obj.absolute_url) @@ -160,8 +164,6 @@ def has_change_permission(request, obj=None): def has_delete_permission(request, obj=None): return False -admin.site.register(AcademicTerms, TermAdmin) -admin.site.register(Course, CourseAdmin) # Remove the pinax LogAdmin and add ours admin.site.unregister(Log) diff --git a/dashboard/common/db_util.py b/dashboard/common/db_util.py index e19ff87d..36d0d2d1 100644 --- a/dashboard/common/db_util.py +++ b/dashboard/common/db_util.py @@ -37,7 +37,7 @@ def create_sqlalchemy_engine(db_params: DjangoDBParams) -> Engine: if new_db_params['ENGINE'] == (BACKENDS_PATH + 'mysql'): return create_engine(f'mysql+mysqldb://{core_string}?charset=utf8mb4') else: - return create_engine('postgresql://' + core_string) + return create_engine('postgresql+psycopg://' + core_string) def canvas_id_to_incremented_id(canvas_id): diff --git a/dashboard/cron.py b/dashboard/cron.py index 7a49055e..d50e58d3 100644 --- a/dashboard/cron.py +++ b/dashboard/cron.py @@ -5,7 +5,7 @@ import hjson import pandas as pd -import pytz +from zoneinfo import ZoneInfo import pangres from django.conf import settings @@ -13,7 +13,7 @@ from django.db.models import QuerySet from django_cron import CronJobBase, Schedule from google.cloud import bigquery -from sqlalchemy import types +from sqlalchemy import types, text from sqlalchemy.engine import ResultProxy from dashboard.common import db_util, utils @@ -68,12 +68,12 @@ def util_function(sql_string, mysql_table, param_object=None, table_identifier=N # execute database query def execute_db_query(query: str, params: List = None) -> ResultProxy: - with engine.connect() as connection: + with engine.begin() as connection: connection.detach() if params: - return connection.execute(query, params) + return connection.execute(text(query), params) else: - return connection.execute(query) + return connection.execute(text(query)) # remove all records inside the specified table @@ -99,7 +99,7 @@ def soft_update_datetime_field( f'Skipped update of {field_name} for {model_name} instance ({model_inst.id}); existing value was found') else: if warehouse_field_value: - warehouse_field_value = warehouse_field_value.replace(tzinfo=pytz.UTC) + warehouse_field_value = warehouse_field_value.replace(tzinfo=ZoneInfo('UTC')) setattr(model_inst, field_name, warehouse_field_value) logger.info(f'Updated {field_name} for {model_name} instance ({model_inst.id})') return [field_name] @@ -124,7 +124,7 @@ def verify_course_ids(self): logger.debug("in checking course") supported_courses = Course.objects.get_supported_courses() course_ids = [str(x) for x in supported_courses.values_list('id', flat=True)] - courses_data = pd.read_sql(queries['course'], data_warehouse_engine, params={'course_ids': tuple(course_ids)}) + courses_data = pd.read_sql(queries['course'], data_warehouse_engine, params={'course_ids': course_ids}) # error out when course id is invalid, otherwise add DataFrame to list for course_id, data_last_updated in supported_courses: if course_id not in list(courses_data['id']): @@ -151,7 +151,7 @@ def update_user(self): # cron status status = "" - logger.debug("in update with data warehouse user") + logger.info("in update with data warehouse user") # delete all records in the table first status += delete_all_records_in_table("user") @@ -160,7 +160,7 @@ def update_user(self): status += util_function( queries['user'], 'user', - {'course_ids': tuple(self.valid_locked_course_ids), + {'course_ids': self.valid_locked_course_ids, 'canvas_data_id_increment': settings.CANVAS_DATA_ID_INCREMENT }) @@ -193,13 +193,13 @@ def update_canvas_resource(self): # cron status status = "" - logger.debug("in update canvas resource") + logger.info("in update canvas resource") # Select all the files for these courses # convert int array to str array df_attach = pd.read_sql(queries['resource'], data_warehouse_engine, - params={'course_ids': tuple(self.valid_locked_course_ids)}) + params={'course_ids': self.valid_locked_course_ids }) logger.debug(df_attach) # Update these back again based on the dataframe # Remove any rows where file_state is not available! @@ -217,6 +217,8 @@ def update_resource_access(self): # cron status status = "" + logger.info("in update resource access") + # return string with concatenated SQL insert result return_string = "" @@ -437,6 +439,8 @@ def update_groups(self): # cron status status = "" + logger.info("update_groups(): ") + # delete all records in assignment_group table status += delete_all_records_in_table("assignment_groups") @@ -447,7 +451,7 @@ def update_groups(self): # loop through multiple course ids status += util_function(queries['assignment_groups'], 'assignment_groups', - {'course_ids': tuple(self.valid_locked_course_ids)}) + {'course_ids': self.valid_locked_course_ids}) return status @@ -463,7 +467,7 @@ def update_assignment(self): # loop through multiple course ids status += util_function(queries['assignment'], 'assignment', - {'course_ids': tuple(self.valid_locked_course_ids), + {'course_ids': self.valid_locked_course_ids, 'time_zone': settings.TIME_ZONE}) return status @@ -483,7 +487,7 @@ def submission(self): status += util_function(queries['submission'], 'submission', { - 'course_ids': tuple(self.valid_locked_course_ids), + 'course_ids': self.valid_locked_course_ids, 'canvas_data_id_increment': settings.CANVAS_DATA_ID_INCREMENT, 'time_zone': settings.TIME_ZONE }) @@ -503,7 +507,7 @@ def weight_consideration(self): # loop through multiple course ids status += util_function(queries['assignment_weight'], 'assignment_weight_consideration', - {'course_ids': tuple(self.valid_locked_course_ids)}, + {'course_ids': self.valid_locked_course_ids }, 'weight') logger.debug(status + "\n\n") @@ -543,7 +547,7 @@ def update_course(self, warehouse_courses_data: pd.DataFrame) -> str: Updates course records with data returned from verify_course_ids, only making changes when necessary. """ status: str = '' - logger.debug('update_course()') + logger.info('update_course()') logger.debug(warehouse_courses_data.to_json(orient='records')) courses: QuerySet = Course.objects.filter(id__in=self.valid_locked_course_ids) @@ -588,7 +592,7 @@ def do(self) -> str: status = "" - run_start = datetime.now(pytz.UTC) + run_start = datetime.now(ZoneInfo('UTC')) status += f"Start cron: {str(run_start)} UTC\n" course_verification = self.verify_course_ids() invalid_course_id_list = course_verification.invalid_course_ids @@ -616,16 +620,16 @@ def do(self) -> str: # Update the date unless there is an exception exception_in_run = False logger.info("** course") - status += self.update_course(course_verification.course_data) + # status += self.update_course(course_verification.course_data) logger.info("** user") - status += self.update_user() + # status += self.update_user() logger.info("** assignment") - status += self.update_groups() - status += self.update_assignment() - status += self.submission() - status += self.weight_consideration() + # status += self.update_groups() + # status += self.update_assignment() + # status += self.submission() + # status += self.weight_consideration() logger.info("** resources") if 'show_resources_accessed' not in settings.VIEWS_DISABLED: @@ -639,7 +643,7 @@ def do(self) -> str: if settings.DATABASES.get('DATA_WAREHOUSE', {}).get('IS_UNIZIN'): logger.info("** informational") - status += self.update_unizin_metadata() + # status += self.update_unizin_metadata() all_str_course_ids = set( str(x) for x in Course.objects.get_supported_courses().values_list('id', flat=True) diff --git a/dashboard/management/commands/course.py b/dashboard/management/commands/course.py index b983a88f..97184a9d 100644 --- a/dashboard/management/commands/course.py +++ b/dashboard/management/commands/course.py @@ -2,7 +2,7 @@ from dashboard.models import Course, CourseViewOption, AcademicTerms from dashboard.common.db_util import canvas_id_to_incremented_id from datetime import datetime -import pytz +from zoneinfo import ZoneInfo class Command(BaseCommand): def add_arguments(self, parser): @@ -18,10 +18,10 @@ def handle(self, *args, **options): name = options.get('name') date_start = options.get('date_start') if date_start is not None: - date_start = datetime.strptime(date_start, '%Y-%m-%d %H:%M:%S').replace(tzinfo=pytz.UTC) + date_start = datetime.strptime(date_start, '%Y-%m-%d %H:%M:%S').replace(tzinfo=ZoneInfo('UTC')) date_end = options.get('date_end') if date_end is not None: - date_end = datetime.strptime(date_end, '%Y-%m-%d %H:%M:%S').replace(tzinfo=pytz.UTC) + date_end = datetime.strptime(date_end, '%Y-%m-%d %H:%M:%S').replace(tzinfo=ZoneInfo('UTC')) prefixed_course_id = canvas_id_to_incremented_id(course_id) if term_id is not None: diff --git a/dashboard/management/commands/term.py b/dashboard/management/commands/term.py index cd31f468..ba9ffd7d 100644 --- a/dashboard/management/commands/term.py +++ b/dashboard/management/commands/term.py @@ -2,7 +2,7 @@ from dashboard.models import AcademicTerms from dashboard.common.db_util import canvas_id_to_incremented_id from datetime import datetime -import pytz +from zoneinfo import ZoneInfo class Command(BaseCommand): def add_arguments(self, parser): @@ -14,8 +14,8 @@ def add_arguments(self, parser): def handle(self, *args, **options): term_id = options.get('term_id') name = options.get('name') - date_start = datetime.strptime(options.get('date_start'), '%Y-%m-%d %H:%M:%S').replace(tzinfo=pytz.UTC) - date_end = datetime.strptime(options.get('date_end'), '%Y-%m-%d %H:%M:%S').replace(tzinfo=pytz.UTC) + date_start = datetime.strptime(options.get('date_start'), '%Y-%m-%d %H:%M:%S').replace(tzinfo=ZoneInfo('UTC')) + date_end = datetime.strptime(options.get('date_end'), '%Y-%m-%d %H:%M:%S').replace(tzinfo=ZoneInfo('UTC')) prefixed_term_id = canvas_id_to_incremented_id(term_id) diff --git a/dashboard/models.py b/dashboard/models.py index f3061508..39b3fb39 100644 --- a/dashboard/models.py +++ b/dashboard/models.py @@ -11,7 +11,7 @@ from datetime import datetime, timedelta from typing import Optional, Union -import pytz +from zoneinfo import ZoneInfo from django.conf import settings from django.core.exceptions import ObjectDoesNotExist from django.db import models @@ -219,7 +219,7 @@ def determine_date_start(self) -> datetime: date_start = self.term.date_start else: logger.info(f"No date_start value was found for course {self.name} ({self.canvas_id}) or term; setting to current date and time") - date_start = datetime.now(pytz.UTC) + date_start = datetime.now(ZoneInfo('UTC')) return date_start def determine_date_end(self, start: Union[datetime, None] = None) -> datetime: diff --git a/dashboard/settings.py b/dashboard/settings.py index 6d8b3cb9..4eb34406 100644 --- a/dashboard/settings.py +++ b/dashboard/settings.py @@ -169,7 +169,11 @@ def apply_env_overrides(env: Dict[str, Any], environ: os._Environ) -> Dict[str, CRON_QUERY_FILE = os.path.join(BASE_DIR, ENV.get('CRON_QUERY_FILE', 'config/cron_udp.hjson')) -STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage' +STORAGES = { + "staticfiles": { + "BACKEND": 'whitenoise.storage.CompressedManifestStaticFilesStorage', + }, +} CONTEXT_PROCESSORS = [ 'django.contrib.auth.context_processors.auth', 'django.template.context_processors.debug', @@ -273,7 +277,6 @@ def apply_env_overrides(env: Dict[str, Any], environ: os._Environ) -> Dict[str, USE_I18N = True -USE_L10N = True USE_TZ = True diff --git a/dashboard/urls.py b/dashboard/urls.py index dcc2c0c0..542a2adf 100644 --- a/dashboard/urls.py +++ b/dashboard/urls.py @@ -19,8 +19,8 @@ from django.contrib.auth.decorators import login_required from django.conf import settings -from django.conf.urls import include from django.conf.urls.static import static +from django.urls import include from django.urls import path, re_path from dashboard.graphql.view import DashboardGraphQLView diff --git a/requirements.txt b/requirements.txt index ba31f46d..1ce4835a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,43 +1,43 @@ -gunicorn==20.1.0 #21.2.0 +gunicorn==21.2.0 -Django==3.2.20 #4.2.7 -whitenoise==5.3.0 #6.6.0 +Django==4.2.7 +whitenoise==6.6.0 django-cron==0.6.0 #deprecated -django-watchman==1.2.0 #1.3.0 -django-su==0.9.0 #1.0.0 -django-mysql==3.12.0 #4.12.0 +django-watchman==1.3.0 +django-su==1.0.0 +django-mysql==4.12.0 # The alternative to this is redis, but we don't use redis yet -django-constance[database]==2.8.0 #3.1.0 -django-webpack-loader==1.5.0 #2.0.1 +django-constance[database]==3.1.0 +django-webpack-loader==2.0.1 django-csp==3.7 #no updates -django-import-export==2.6.0 #3.3.1 -django-admin-rangefilter==0.8.1 #0.11.2 +django-import-export==3.3.1 +django-admin-rangefilter==0.11.2 # Django/Python fontawesome library supporting CSS in UI -fontawesomefree==6.1.1 #6.4.2 +fontawesomefree==6.4.2 # graphql -graphene-django==2.15.0 #3.1.5 -django-filter==2.4.0 #23.3(tag naming convention is changed ) +graphene-django==3.1.5 +django-filter==23.3 # object-level permissions -rules==3.0 #3.3 +rules==3.3 # These should be okay to update minors -numpy==1.23.3 #1.26.1 -pandas==1.4.4 #2.1.2 -pangres==4.1.2 #4.2.1 +numpy==1.26.1 +pandas==2.1.2 +pangres==4.2.1 -SQLAlchemy==1.4.22 #2.0.23 -psycopg2==2.9.1 #2.9.9 -mysqlclient==2.0.3 #2.2.0 -google-cloud-bigquery[pandas]==3.3.2 #3.13.0 +SQLAlchemy==2.0.23 +psycopg==3.1.12 +mysqlclient==2.2.0 +google-cloud-bigquery[pandas]==3.13.0 -debugpy==1.4.1 #1.8.0 -jsonschema==3.2.0 #4.19.2 +debugpy==1.8.0 +jsonschema==4.19.2 pinax-eventlog==5.1.1 #no updates -pycryptodome==3.10.1 #3.19.0 -PyLTI1p3==1.12.1 #2.0.0 (last update Nov 2022) -hjson==3.0.1 #3.1.0 +pycryptodome==3.19.0 +PyLTI1p3==2.0.0 #no further update Nov 2022) +hjson==3.1.0 From 305f047a70c9da7ea5f6a99a6ede9e6da83849a8 Mon Sep 17 00:00:00 2001 From: Pushyami Gundala Date: Mon, 13 Nov 2023 10:49:50 -0500 Subject: [PATCH 3/9] Fixes for resource view --- dashboard/cron.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/dashboard/cron.py b/dashboard/cron.py index d50e58d3..3f973fa6 100644 --- a/dashboard/cron.py +++ b/dashboard/cron.py @@ -7,7 +7,7 @@ import pandas as pd from zoneinfo import ZoneInfo import pangres - +import psycopg from django.conf import settings from django.db import connections as conns, models from django.db.models import QuerySet @@ -44,6 +44,9 @@ def split_list(a_list: list, size: int = 20): # the util function +def util_submission_function(sql_string, mysql_table, param_object=None, table_identifier=None): + pass + def util_function(sql_string, mysql_table, param_object=None, table_identifier=None): logger.debug(f'sql={sql_string}') @@ -67,7 +70,7 @@ def util_function(sql_string, mysql_table, param_object=None, table_identifier=N # execute database query -def execute_db_query(query: str, params: List = None) -> ResultProxy: +def execute_db_query(query: str, params: Dict = None) -> ResultProxy: with engine.begin() as connection: connection.detach() if params: @@ -77,7 +80,7 @@ def execute_db_query(query: str, params: List = None) -> ResultProxy: # remove all records inside the specified table -def delete_all_records_in_table(table_name: str, where_clause: str = "", where_params: List = None): +def delete_all_records_in_table(table_name: str, where_clause: str = "", where_params: Dict = None): # delete all records in the table first, can have an optional where clause result_proxy = execute_db_query(f"delete from {table_name} {where_clause}", where_params) return(f"\n{result_proxy.rowcount} rows deleted from {table_name}\n") @@ -233,7 +236,7 @@ def update_resource_access(self): logger.info(f"Deleting all records in resource_access after {data_last_updated}") - status += delete_all_records_in_table("resource_access", f"WHERE access_time > %s", [data_last_updated, ]) + status += delete_all_records_in_table("resource_access", f"WHERE access_time > :data_last_updated", {'data_last_updated': data_last_updated }) # loop through multiple course ids, 20 at a time # (This is set by the CRON_BQ_IN_LIMIT from settings) @@ -395,7 +398,7 @@ def update_resource_access(self): student_enrollment_type = User.EnrollmentType.STUDENT student_enrollment_df = pd.read_sql( 'select user_id, course_id from user where enrollment_type= %s', - engine, params={student_enrollment_type}) + engine, params=[(str(student_enrollment_type),)]) resource_access_df = pd.merge( resource_access_df, student_enrollment_df, on=['user_id', 'course_id'], @@ -628,14 +631,15 @@ def do(self) -> str: logger.info("** assignment") # status += self.update_groups() # status += self.update_assignment() - # status += self.submission() + status += self.submission() # status += self.weight_consideration() logger.info("** resources") if 'show_resources_accessed' not in settings.VIEWS_DISABLED: try: - status += self.update_resource_access() - status += self.update_canvas_resource() + # status += self.update_resource_access() + # status += self.update_canvas_resource() + pass except Exception as e: logger.error(f"Exception running BigQuery update: {str(e)}") status += str(e) From a32cc3f3d96706774d2729b5d2eb9eafd01125c9 Mon Sep 17 00:00:00 2001 From: Pushyami Gundala Date: Fri, 17 Nov 2023 09:20:31 -0500 Subject: [PATCH 4/9] Fixing the cron and GraphQL issues --- .github/workflows/main.yml | 3 ++- config/cron_udp.hjson | 13 +++++---- dashboard/cron.py | 52 +++++++++++++++++++++++++----------- dashboard/graphql/objects.py | 2 +- dashboard/graphql/query.py | 2 +- dashboard/graphql/view.py | 3 ++- dashboard/urls.py | 4 +-- docs/loading_data.md | 6 +++++ requirements.txt | 1 + 9 files changed, 58 insertions(+), 28 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index ee8e0469..dc4048a8 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -6,6 +6,7 @@ on: branches: - master - '[0-9][0-9][0-9][0-9].[0-9][0-9].*' # 2021.01.01 + - i1544_backend_dependencies env: IMAGE_TAG: latest @@ -14,7 +15,7 @@ env: jobs: build: # to test a feature, change the repo name to your github id - if: github.repository_owner == 'tl-its-umich-edu' + if: github.repository_owner == 'pushyamig' runs-on: ubuntu-latest steps: diff --git a/config/cron_udp.hjson b/config/cron_udp.hjson index 45fe42dd..0f6304a9 100644 --- a/config/cron_udp.hjson +++ b/config/cron_udp.hjson @@ -212,7 +212,7 @@ left join keymap.course_offering co on cs.le_current_course_offering_id = co.id where - co.lms_int_id = ANY(%(course_ids)s) + co.lms_int_id = ANY(:course_ids) and cse.role_status ='Enrolled' and cse."role" = 'Student' and cse.enrollment_status = 'Active' @@ -228,13 +228,13 @@ lar.published_score as published_score, lar.response_date as submitted_at, lar.graded_date as graded_at, - timezone(%(time_zone)s, lar.posted_at AT TIME ZONE 'UTC') as grade_posted_local_date, + timezone(:time_zone, lar.posted_at AT TIME ZONE 'UTC') as grade_posted_local_date, lar.grading_status as submission_workflow_state, la.title as title, lar.learner_activity_result_id as learner_activity_result_id, lar.person_id as short_user_id, cast(lar2.lms_int_id as BIGINT) as submission_id, - (cast(%(canvas_data_id_increment)s as bigint) + cast(p.lms_ext_id as bigint)) as canvas_user_id + (cast(:canvas_data_id_increment as bigint) + cast(p.lms_ext_id as bigint)) as canvas_user_id from entity.learner_activity_result lar join enrollment on lar.person_id= enrollment.user_id join enrollment e on lar.person_id = e.user_id @@ -244,7 +244,7 @@ left join keymap.course_offering co on co.id = la.course_offering_id join keymap.person p on p.id = lar.person_id where - co.lms_int_id = ANY(%(course_ids)s) + co.lms_int_id = ANY(:course_ids) and la.status = 'published' ) select @@ -267,7 +267,10 @@ grade_posted_local_date from submission - ); + ) + ''', + "submission_with_avg_score": + ''' select f.id::bigint, f.assignment_id::bigint assignment_id, diff --git a/dashboard/cron.py b/dashboard/cron.py index 3f973fa6..f4f2cb94 100644 --- a/dashboard/cron.py +++ b/dashboard/cron.py @@ -15,6 +15,7 @@ from google.cloud import bigquery from sqlalchemy import types, text from sqlalchemy.engine import ResultProxy +from sqlalchemy.orm import Session, sessionmaker from dashboard.common import db_util, utils from dashboard.models import Course, Resource, AcademicTerms, User @@ -487,13 +488,33 @@ def submission(self): # loop through multiple course ids # filter out not released grades (submission_dim.posted_at date is not null) and partial grades (submission_dim.workflow_state != 'graded') - status += util_function(queries['submission'], - 'submission', - { - 'course_ids': self.valid_locked_course_ids, - 'canvas_data_id_increment': settings.CANVAS_DATA_ID_INCREMENT, - 'time_zone': settings.TIME_ZONE - }) + query_params = { + 'course_ids': self.valid_locked_course_ids, + 'time_zone': settings.TIME_ZONE, + 'canvas_data_id_increment': settings.CANVAS_DATA_ID_INCREMENT, + } + Session = sessionmaker(bind=data_warehouse_engine) + try: + # Create a session + with Session() as session: + # Execute the first query to create the temporary table + session.execute(text(queries['submission']).bindparams(**query_params)) + + # Execute the second query using the temporary table + result = session.execute(text(queries['submission_with_avg_score'])) + df = pd.DataFrame(result.fetchall(), columns=result.keys()) + df = df.drop_duplicates(keep='first') + df.to_sql(con=engine, name='submission', if_exists='append', index=False) + + except Exception as e: + logger.exception('Error running sql on table submission', e) + raise + status+=f"{str(df.shape[0])} submission: {query_params}\n" + + # returns the row size of dataframe + return status + + return status @@ -623,23 +644,22 @@ def do(self) -> str: # Update the date unless there is an exception exception_in_run = False logger.info("** course") - # status += self.update_course(course_verification.course_data) + status += self.update_course(course_verification.course_data) logger.info("** user") - # status += self.update_user() + status += self.update_user() logger.info("** assignment") - # status += self.update_groups() - # status += self.update_assignment() + status += self.update_groups() + status += self.update_assignment() status += self.submission() - # status += self.weight_consideration() + status += self.weight_consideration() logger.info("** resources") if 'show_resources_accessed' not in settings.VIEWS_DISABLED: try: - # status += self.update_resource_access() - # status += self.update_canvas_resource() - pass + status += self.update_resource_access() + status += self.update_canvas_resource() except Exception as e: logger.error(f"Exception running BigQuery update: {str(e)}") status += str(e) @@ -647,7 +667,7 @@ def do(self) -> str: if settings.DATABASES.get('DATA_WAREHOUSE', {}).get('IS_UNIZIN'): logger.info("** informational") - # status += self.update_unizin_metadata() + status += self.update_unizin_metadata() all_str_course_ids = set( str(x) for x in Course.objects.get_supported_courses().values_list('id', flat=True) diff --git a/dashboard/graphql/objects.py b/dashboard/graphql/objects.py index bbb9be68..b9f3a897 100644 --- a/dashboard/graphql/objects.py +++ b/dashboard/graphql/objects.py @@ -6,7 +6,7 @@ from graphql import GraphQLError from dashboard.rules import is_admin_or_instructor_in_course_id -from dashboard.models import Course, User, Assignment, Submission, \ +from dashboard.models import Course, Assignment, Submission, \ AssignmentGroups, UserDefaultSelection, AcademicTerms import logging diff --git a/dashboard/graphql/query.py b/dashboard/graphql/query.py index 780cc561..68f6ce22 100644 --- a/dashboard/graphql/query.py +++ b/dashboard/graphql/query.py @@ -3,7 +3,7 @@ from dashboard.models import Course from dashboard.graphql.objects import CourseType -from dashboard.rules import is_admin_or_enrolled_in_course, is_admin +from dashboard.rules import is_admin_or_enrolled_in_course from graphql import GraphQLError import logging diff --git a/dashboard/graphql/view.py b/dashboard/graphql/view.py index bf27b2d9..0367f6ed 100644 --- a/dashboard/graphql/view.py +++ b/dashboard/graphql/view.py @@ -8,7 +8,7 @@ AssignmentGroupByCourseIdAndIdLoader, AssignmentWeightConsiderationByCourseIdLoader, \ UserDefaultSelectionsByCourseIdAndUserLoader, UserDefaultSelectionByCourseIdAndUserAndViewTypeLoader, \ AcademicTermByIdLoader - +from graphql_core_promise import PromiseExecutionContext from django.db.models import Q from dashboard.models import User from pinax.eventlog.models import log as eventlog @@ -18,6 +18,7 @@ class DashboardGraphQLView(LoginRequiredMixin, GraphQLView): + execution_context_class = PromiseExecutionContext def get_context(self, request): loaders = { 'assignment_weight_consideration_by_course_id_loader': AssignmentWeightConsiderationByCourseIdLoader( diff --git a/dashboard/urls.py b/dashboard/urls.py index 542a2adf..d50b57b3 100644 --- a/dashboard/urls.py +++ b/dashboard/urls.py @@ -27,8 +27,6 @@ from django.views.decorators.cache import cache_page -from dashboard.middleware.disableintrospection import DisableIntrospectionMiddleware - from . import views import watchman.views @@ -44,7 +42,7 @@ path('admin/', admin.site.urls), # Note the absence of a trailing slash; adding one breaks the GraphQL implementation. - path('graphql', DashboardGraphQLView.as_view( middleware=[] if settings.DEBUG else [DisableIntrospectionMiddleware],graphiql=settings.DEBUG)), + path('graphql', DashboardGraphQLView.as_view(graphiql=settings.DEBUG)), # This is the courses catch-all. Most user-initiated requests will match the regular expression; then the React # front-end will manage any additional routing. diff --git a/docs/loading_data.md b/docs/loading_data.md index b9336800..a89a935c 100644 --- a/docs/loading_data.md +++ b/docs/loading_data.md @@ -18,6 +18,12 @@ docker exec -it student_dashboard /bin/bash -c \ After about 30 to 60 seconds, the cron job should have completed and you should have data! In the admin interface, there is a table where you can check the status of the cron job runs. +debugging cron use the following command. Once you run this command then go to VSCode and start the 'MyLA Docker Cron'. Add some +Breakpoint to debug at desired location. +```sh +docker exec -it student_dashboard /bin/bash -c "DEBUGPY_WAIT_FOR_ATTACH=True DEBUGPY_ENABLE=TRUE DEBUGPY_REMOTE_PORT=3001 ./manage_debugpy.py runcrons --force" +``` + ### Cron scheduling for deployment > **Note:** Cron scheduling functionality settings may be removed in the future, diff --git a/requirements.txt b/requirements.txt index 1ce4835a..5a84545b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -20,6 +20,7 @@ fontawesomefree==6.4.2 # graphql graphene-django==3.1.5 +graphql-core-promise==3.2.3.post1 django-filter==23.3 # object-level permissions From 60fd762eb699611fe8469b6479e73ce6a4414486 Mon Sep 17 00:00:00 2001 From: Pushyami Gundala Date: Fri, 17 Nov 2023 11:26:58 -0500 Subject: [PATCH 5/9] minor change --- .github/workflows/main.yml | 3 +-- dashboard/cron.py | 9 +++------ docs/github_actions.md | 2 +- docs/loading_data.md | 2 +- 4 files changed, 6 insertions(+), 10 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index dc4048a8..ee8e0469 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -6,7 +6,6 @@ on: branches: - master - '[0-9][0-9][0-9][0-9].[0-9][0-9].*' # 2021.01.01 - - i1544_backend_dependencies env: IMAGE_TAG: latest @@ -15,7 +14,7 @@ env: jobs: build: # to test a feature, change the repo name to your github id - if: github.repository_owner == 'pushyamig' + if: github.repository_owner == 'tl-its-umich-edu' runs-on: ubuntu-latest steps: diff --git a/dashboard/cron.py b/dashboard/cron.py index f4f2cb94..8f03e655 100644 --- a/dashboard/cron.py +++ b/dashboard/cron.py @@ -7,7 +7,7 @@ import pandas as pd from zoneinfo import ZoneInfo import pangres -import psycopg + from django.conf import settings from django.db import connections as conns, models from django.db.models import QuerySet @@ -15,9 +15,9 @@ from google.cloud import bigquery from sqlalchemy import types, text from sqlalchemy.engine import ResultProxy -from sqlalchemy.orm import Session, sessionmaker +from sqlalchemy.orm import sessionmaker -from dashboard.common import db_util, utils +from dashboard.common import db_util from dashboard.models import Course, Resource, AcademicTerms, User @@ -45,9 +45,6 @@ def split_list(a_list: list, size: int = 20): # the util function -def util_submission_function(sql_string, mysql_table, param_object=None, table_identifier=None): - pass - def util_function(sql_string, mysql_table, param_object=None, table_identifier=None): logger.debug(f'sql={sql_string}') diff --git a/docs/github_actions.md b/docs/github_actions.md index 5f20f813..064cba01 100644 --- a/docs/github_actions.md +++ b/docs/github_actions.md @@ -5,6 +5,6 @@ 2. The action is triggered whenever a commit is made to the `master` branch. E.g., when a pull request is merged to `master`. 3. OpenShift projects can periodically pull this image from GHCR. Configure only **_NON-PRODUCTION_** MyLA projects to pull the imageā€¦ ```sh - oc tag ghcr.io/tl-its-umich-edu/my-learning-analytics:latest my-learning-analytics:latest --scheduled + oc tag ghcr.io/tl-its-umich-edu/my-learning-analytics:latest my-learning-analytics:latest --scheduled --reference-policy=local ``` See the OpenShift documentation "[Managing image streams: Configuring periodic importing of image stream tags](https://docs.openshift.com/container-platform/4.11/openshift_images/image-streams-manage.html#images-imagestream-import_image-streams-managing)" for details. diff --git a/docs/loading_data.md b/docs/loading_data.md index a89a935c..3def539f 100644 --- a/docs/loading_data.md +++ b/docs/loading_data.md @@ -18,7 +18,7 @@ docker exec -it student_dashboard /bin/bash -c \ After about 30 to 60 seconds, the cron job should have completed and you should have data! In the admin interface, there is a table where you can check the status of the cron job runs. -debugging cron use the following command. Once you run this command then go to VSCode and start the 'MyLA Docker Cron'. Add some +Debugging cron use the following command. Once you run this command then go to VSCode and start the 'MyLA Docker Cron'. Add some Breakpoint to debug at desired location. ```sh docker exec -it student_dashboard /bin/bash -c "DEBUGPY_WAIT_FOR_ATTACH=True DEBUGPY_ENABLE=TRUE DEBUGPY_REMOTE_PORT=3001 ./manage_debugpy.py runcrons --force" From b04db2221d2d4af6cb600780ed2c959e48159a81 Mon Sep 17 00:00:00 2001 From: Pushyami Gundala Date: Fri, 17 Nov 2023 12:38:22 -0500 Subject: [PATCH 6/9] Adding the trusted origin change to env_sample.json --- config/env_sample.hjson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/env_sample.hjson b/config/env_sample.hjson index cf33dbc3..56891edf 100644 --- a/config/env_sample.hjson +++ b/config/env_sample.hjson @@ -53,7 +53,7 @@ # Enable secure cookies, also set your trusted origin (example of instructure.com) # This needs to be true for deployments or when testing LTI with ngrok or loophole. "CSRF_COOKIE_SECURE": false, - "CSRF_TRUSTED_ORIGINS": ["instructure.com"], + "CSRF_TRUSTED_ORIGINS": ["https://*.instructure.com", "https://*.umich.edu"], # If you have a proxy that sets this header then set this to true. Default is false "USE_X_FORWARDED_HOST": false, # SameSite settings for Session and CSRF (defaults in settings.py should work), if you do want non-string None set to null. From 817c422ef00d2298822e3ad82ef08a5b9eb67009 Mon Sep 17 00:00:00 2001 From: Pushyami Gundala Date: Fri, 17 Nov 2023 13:26:21 -0500 Subject: [PATCH 7/9] adding the disable introspection middleware --- .github/workflows/main.yml | 3 ++- dashboard/urls.py | 4 +++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index ee8e0469..dc4048a8 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -6,6 +6,7 @@ on: branches: - master - '[0-9][0-9][0-9][0-9].[0-9][0-9].*' # 2021.01.01 + - i1544_backend_dependencies env: IMAGE_TAG: latest @@ -14,7 +15,7 @@ env: jobs: build: # to test a feature, change the repo name to your github id - if: github.repository_owner == 'tl-its-umich-edu' + if: github.repository_owner == 'pushyamig' runs-on: ubuntu-latest steps: diff --git a/dashboard/urls.py b/dashboard/urls.py index d50b57b3..542a2adf 100644 --- a/dashboard/urls.py +++ b/dashboard/urls.py @@ -27,6 +27,8 @@ from django.views.decorators.cache import cache_page +from dashboard.middleware.disableintrospection import DisableIntrospectionMiddleware + from . import views import watchman.views @@ -42,7 +44,7 @@ path('admin/', admin.site.urls), # Note the absence of a trailing slash; adding one breaks the GraphQL implementation. - path('graphql', DashboardGraphQLView.as_view(graphiql=settings.DEBUG)), + path('graphql', DashboardGraphQLView.as_view( middleware=[] if settings.DEBUG else [DisableIntrospectionMiddleware],graphiql=settings.DEBUG)), # This is the courses catch-all. Most user-initiated requests will match the regular expression; then the React # front-end will manage any additional routing. From c74f4e3df1a0adf117ad54846a8203afcc4dbf6a Mon Sep 17 00:00:00 2001 From: Pushyami Gundala Date: Fri, 17 Nov 2023 13:38:51 -0500 Subject: [PATCH 8/9] Fixing the Codacy errors --- .github/workflows/main.yml | 3 +-- dashboard/cron.py | 8 ++------ dashboard/graphql/view.py | 2 +- dashboard/management/commands/course.py | 2 +- dashboard/management/commands/term.py | 2 +- dashboard/models.py | 2 +- 6 files changed, 7 insertions(+), 12 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index dc4048a8..ee8e0469 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -6,7 +6,6 @@ on: branches: - master - '[0-9][0-9][0-9][0-9].[0-9][0-9].*' # 2021.01.01 - - i1544_backend_dependencies env: IMAGE_TAG: latest @@ -15,7 +14,7 @@ env: jobs: build: # to test a feature, change the repo name to your github id - if: github.repository_owner == 'pushyamig' + if: github.repository_owner == 'tl-its-umich-edu' runs-on: ubuntu-latest steps: diff --git a/dashboard/cron.py b/dashboard/cron.py index 8f03e655..20f2a9bd 100644 --- a/dashboard/cron.py +++ b/dashboard/cron.py @@ -2,10 +2,10 @@ import logging from collections import namedtuple from typing import Any, Dict, List, Union +from zoneinfo import ZoneInfo import hjson import pandas as pd -from zoneinfo import ZoneInfo import pangres from django.conf import settings @@ -504,16 +504,12 @@ def submission(self): df.to_sql(con=engine, name='submission', if_exists='append', index=False) except Exception as e: - logger.exception('Error running sql on table submission', e) + logger.exception('Error running sql on table submission', str(e)) raise status+=f"{str(df.shape[0])} submission: {query_params}\n" # returns the row size of dataframe return status - - - - return status def weight_consideration(self): # load the assignment weight consider information with in a course. Some assignments don't have weight consideration diff --git a/dashboard/graphql/view.py b/dashboard/graphql/view.py index 0367f6ed..6e238c53 100644 --- a/dashboard/graphql/view.py +++ b/dashboard/graphql/view.py @@ -1,5 +1,6 @@ from graphene_django.views import GraphQLView from django.contrib.auth.mixins import LoginRequiredMixin +from graphql_core_promise import PromiseExecutionContext from dashboard.common.db_util import canvas_id_to_incremented_id from dashboard.graphql.loaders import AssignmentsByCourseIdLoader, \ SubmissionsByAssignmentIdLoader, SubmissionByAssignmentIdAndUserIdLoader, \ @@ -8,7 +9,6 @@ AssignmentGroupByCourseIdAndIdLoader, AssignmentWeightConsiderationByCourseIdLoader, \ UserDefaultSelectionsByCourseIdAndUserLoader, UserDefaultSelectionByCourseIdAndUserAndViewTypeLoader, \ AcademicTermByIdLoader -from graphql_core_promise import PromiseExecutionContext from django.db.models import Q from dashboard.models import User from pinax.eventlog.models import log as eventlog diff --git a/dashboard/management/commands/course.py b/dashboard/management/commands/course.py index 97184a9d..881e9974 100644 --- a/dashboard/management/commands/course.py +++ b/dashboard/management/commands/course.py @@ -2,7 +2,7 @@ from dashboard.models import Course, CourseViewOption, AcademicTerms from dashboard.common.db_util import canvas_id_to_incremented_id from datetime import datetime -from zoneinfo import ZoneInfo +from zoneinfo import ZoneInfo class Command(BaseCommand): def add_arguments(self, parser): diff --git a/dashboard/management/commands/term.py b/dashboard/management/commands/term.py index ba9ffd7d..a9e08c64 100644 --- a/dashboard/management/commands/term.py +++ b/dashboard/management/commands/term.py @@ -2,7 +2,7 @@ from dashboard.models import AcademicTerms from dashboard.common.db_util import canvas_id_to_incremented_id from datetime import datetime -from zoneinfo import ZoneInfo +from zoneinfo import ZoneInfo class Command(BaseCommand): def add_arguments(self, parser): diff --git a/dashboard/models.py b/dashboard/models.py index 39b3fb39..e669d876 100644 --- a/dashboard/models.py +++ b/dashboard/models.py @@ -11,7 +11,7 @@ from datetime import datetime, timedelta from typing import Optional, Union -from zoneinfo import ZoneInfo +from zoneinfo import ZoneInfo from django.conf import settings from django.core.exceptions import ObjectDoesNotExist from django.db import models From ca01926cd8de9537ed3c7edf31d2288e20e641f6 Mon Sep 17 00:00:00 2001 From: Pushyami Gundala Date: Mon, 18 Dec 2023 11:19:36 -0500 Subject: [PATCH 9/9] Code review changes --- docs/CONTRIBUTING.md | 4 ++++ docs/github_actions.md | 2 ++ docs/loading_data.md | 6 ------ 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/CONTRIBUTING.md b/docs/CONTRIBUTING.md index 44977680..f0390fe7 100644 --- a/docs/CONTRIBUTING.md +++ b/docs/CONTRIBUTING.md @@ -172,3 +172,7 @@ When prompted, specify the password for the root MySQL user. It should be found in the `MYSQL.ROOT_PASSWORD` property of `env.hjson`. [Next: Contributors](../docs/CONTRIBUTORS.md) + +### Dependency Upgrade + + For Auto-upgrade django to future version, use the helpful library called [django-upgrade](https://github.com/adamchainz/django-upgrade ) diff --git a/docs/github_actions.md b/docs/github_actions.md index 064cba01..e53f9500 100644 --- a/docs/github_actions.md +++ b/docs/github_actions.md @@ -8,3 +8,5 @@ oc tag ghcr.io/tl-its-umich-edu/my-learning-analytics:latest my-learning-analytics:latest --scheduled --reference-policy=local ``` See the OpenShift documentation "[Managing image streams: Configuring periodic importing of image stream tags](https://docs.openshift.com/container-platform/4.11/openshift_images/image-streams-manage.html#images-imagestream-import_image-streams-managing)" for details. + + `reference-policy=local` : If you want to instruct OpenShift Container Platform to always fetch the tagged image from the [integrated registry](https://docs.openshift.com/container-platform/4.11/openshift_images/managing_images/tagging-images.html#images-add-tags-to-imagestreams_tagging-images) diff --git a/docs/loading_data.md b/docs/loading_data.md index 3def539f..b9336800 100644 --- a/docs/loading_data.md +++ b/docs/loading_data.md @@ -18,12 +18,6 @@ docker exec -it student_dashboard /bin/bash -c \ After about 30 to 60 seconds, the cron job should have completed and you should have data! In the admin interface, there is a table where you can check the status of the cron job runs. -Debugging cron use the following command. Once you run this command then go to VSCode and start the 'MyLA Docker Cron'. Add some -Breakpoint to debug at desired location. -```sh -docker exec -it student_dashboard /bin/bash -c "DEBUGPY_WAIT_FOR_ATTACH=True DEBUGPY_ENABLE=TRUE DEBUGPY_REMOTE_PORT=3001 ./manage_debugpy.py runcrons --force" -``` - ### Cron scheduling for deployment > **Note:** Cron scheduling functionality settings may be removed in the future,