def insert_lms_section_deleted(connection: Connection, sis_identifier: str, source_system: str): connection.execute(f""" insert into lms.lmssection (sourcesystemidentifier, sourcesystem, sissectionidentifier, title, sourcecreatedate, sourcelastmodifieddate, createdate, lastmodifieddate, deletedat) values ('{sis_identifier}' ,'{source_system}' ,'{sis_identifier}' ,'test section deleted' ,now() ,now() ,now() ,now() ,now() ) """)
def insert_lms_user_deleted( connection: Connection, sis_identifier: str, email_address: str, source_system: str ): connection.execute( f""" INSERT INTO [lms].[LMSUser] ([SourceSystemIdentifier] ,[SourceSystem] ,[UserRole] ,[SISUserIdentifier] ,[LocalUserIdentifier] ,[Name] ,[EmailAddress] ,[SourceCreateDate] ,[SourceLastModifiedDate] ,[CreateDate] ,[LastModifiedDate] ,[DeletedAt]) VALUES (N'{sis_identifier}' ,N'{source_system}' ,N'{USER_ROLE}' ,N'{sis_identifier}1' ,N'{sis_identifier}2' ,N'{sis_identifier}3' ,N'{email_address}' ,NULL ,NULL ,CAST(N'2021-01-01 00:00:00' AS DateTime) ,CAST(N'2021-01-01 00:00:00' AS DateTime) ,CAST(N'2021-01-02 00:00:00' AS DateTime) ) """ )
def insert_edfi_section_association(connection: Connection, section_identifier: str, student_id: str): connection.execute(f""" insert into edfi.studentsectionassociation ( id, begindate, localcoursecode, schoolid, schoolyear, sectionidentifier, sessionname, studentusi) select (select md5(random()::text || random()::text)::uuid), now() begindate, localcoursecode, schoolid, schoolyear, sectionidentifier, sessionname, (select studentusi from edfi.student where studentuniqueid = '{student_id}' limit 1) as studentusi from edfi.section where sectionidentifier = '{section_identifier}' limit 1 """)
def insert_edfi_student_electronic_mail( connection: Connection, student_usi: int, email_address: str, ): connection.execute( f""" INSERT INTO [edfi].[StudentEducationOrganizationAssociationElectronicMail] ([EducationOrganizationId] ,[ElectronicMailTypeDescriptorId] ,[StudentUSI] ,[ElectronicMailAddress] ,[PrimaryEmailAddressIndicator] ,[DoNotPublishIndicator] ,[CreateDate]) VALUES (1 ,1 ,{student_usi} ,N'{email_address}' ,NULL ,NULL ,CAST(N'2021-01-01 00:00:00' AS DateTime) ) """ )
def insert_edfi_section_association( connection: Connection, section_identifier: str, student_id: str): connection.execute( f""" insert into edfi.StudentSectionAssociation ( BeginDate, LocalCourseCode, SchoolId, SchoolYear, SectionIdentifier, SessionName, StudentUSI) select top 1 GETDATE() BeginDate, localcoursecode, SchoolId, schoolyear, sectionidentifier, sessionname, (select top 1 studentUSI from edfi.student where StudentUniqueId = N'{student_id}') as StudentUSI from edfi.section WHERE SectionIdentifier = N'{section_identifier}' """)
def fetch(query: str, conn: pyodbc.Connection, chunksize: Optional[int] = None): """ Run query and fetch results. Args: query (str): SQL statement conn (pyodbc.Connection): query engine connection object chunksize (int): Chunksize in bytes Returns: results: list of row object or None (if query fails) """ try: curr = conn.cursor().execute(query) except Exception as e: msg = str(e) + '----' + 'The failed query: {query}'.format(query=query) raise Exception(msg) else: if chunksize is None: results = curr.fetchall() else: results = _fetch_many(curr, chunksize) finally: if conn is None: conn.close() return results
def insert_lms_user_deleted(connection: Connection, sis_identifier: str, email_address: str, source_system: str): connection.execute(f""" insert into lms.lmsuser (sourcesystemidentifier ,sourcesystem ,userrole ,sisuseridentifier ,localuseridentifier ,name ,emailaddress ,sourcecreatedate ,sourcelastmodifieddate ,createdate ,lastmodifieddate ,deletedat) values ('{sis_identifier}' ,'{source_system}' ,'{USER_ROLE}' ,'{sis_identifier}1' ,'{sis_identifier}2' ,'{sis_identifier}3' ,'{email_address}' ,NULL ,NULL ,'2021-01-01 00:00:00' ,'2021-01-01 00:00:00' ,'2021-01-02 00:00:00' ) """)
def insert_lms_section_deleted( connection: Connection, sis_identifier: str, source_system: str ): connection.execute( f""" INSERT INTO [lms].[LMSSection] (SourceSystemIdentifier, SourceSystem, SISSectionIdentifier, Title, SourceCreateDate, SourceLastModifiedDate, CreateDate, LastModifiedDate, [DeletedAt]) VALUES (N'{sis_identifier}' ,N'{source_system}' ,N'{sis_identifier}' ,N'test section deleted' ,GETDATE() ,GETDATE() ,GETDATE() ,GETDATE() ,GETDATE() ) """ )
def insert_lms_section(connection: Connection, sis_identifier: str, source_system: str): connection.execute( f""" INSERT INTO [lms].[LMSSection] ( SourceSystemIdentifier, SourceSystem, SISSectionIdentifier, Title, SourceCreateDate, SourceLastModifiedDate, CreateDate, LastModifiedDate) VALUES (N'{sis_identifier}' ,N'{source_system}' ,N'{sis_identifier}' ,N'section title' ,CAST(N'2021-01-01 00:00:00' AS DateTime) ,CAST(N'2021-01-01 00:00:00' AS DateTime) ,CAST(N'2021-01-01 00:00:00' AS DateTime) ,CAST(N'2021-01-01 00:00:00' AS DateTime) ) """ )
def load_manifest(json_data: dict, conn: pyodbc.Connection) -> Exception: """ Use SQL to load records into the data structure Args: json_data: manifest in a Python dictionary conn: database connection object Returns: true if creation is successful, otherwise returns the exception """ cursor = conn.cursor() fieldnamesql = "INSERT INTO ICO.inventory (vendor_id, vendor_product_code, quantity_kg, date_arrival)" today = date.today() mydate = today.strftime("%Y-%m-%d") try: items = json_data.get("items") for item in items: valsql = fieldnamesql + " VALUES (" valsql += "'" + item.get("vendor_id") + "', " valsql += "'" + item.get("vendor_product_code") + "', " valsql += str(item.get("quantity_kg")) + ", " valsql += "'" + mydate + "')" print("Inserting: " + valsql) cursor.execute(valsql) conn.commit() except Exception as exp: return exp return None
def insert_lmsx_assignmentsubmissionstatus_descriptor(connection: Connection, id: int): connection.execute(f""" insert into lmsx.submissionstatusdescriptor (submissionstatusdescriptorid) values ( {str(id)} ) """)
def insert_lmsx_assignmentsubmissionstatus_descriptor(connection: Connection, id: int): connection.execute( f""" INSERT INTO [lmsx].[SubmissionStatusDescriptor] (SubmissionStatusDescriptorId) VALUES ( {str(id)} ) """ )
def insert_lmsx_assignmentcategory_descriptor(connection: Connection, id: int): connection.execute( f""" INSERT INTO [lmsx].[AssignmentCategoryDescriptor] (AssignmentCategoryDescriptorId) VALUES ( {str(id)} ) """ )
def insert_lmsx_sourcesystem_descriptor(connection: Connection, id: int): connection.execute( f""" INSERT INTO [lmsx].[LMSSourceSystemDescriptor] (LMSSourceSystemDescriptorId) VALUES ( {str(id)} ) """ )
def _run_query(query: str, conn: pyodbc.Connection) -> None: """ Run query without fetching results. Args: query (str): SQL statement (example: 'select x, y from table_XY') conn (pyodbc.Connection): query engine connection object """ conn.cursor().execute(query)
def insert_edfi_student_with_usi( connection: Connection, student_usi: int, id: str = "00000000-0000-0000-0000-000000000000", ): connection.execute( f""" SET IDENTITY_INSERT edfi.Student ON; INSERT INTO [edfi].[Student] ([StudentUSI] ,[PersonalTitlePrefix] ,[FirstName] ,[MiddleName] ,[LastSurname] ,[GenerationCodeSuffix] ,[MaidenName] ,[BirthDate] ,[BirthCity] ,[BirthStateAbbreviationDescriptorId] ,[BirthInternationalProvince] ,[BirthCountryDescriptorId] ,[DateEnteredUS] ,[MultipleBirthStatus] ,[BirthSexDescriptorId] ,[CitizenshipStatusDescriptorId] ,[StudentUniqueId] ,[CreateDate] ,[LastModifiedDate] ,[Id]) VALUES ({student_usi} ,NULL ,N'FirstName' ,NULL ,N'LastName' ,NULL ,NULL ,CAST(N'2010-01-01 00:00:00' AS DateTime) ,NULL ,NULL ,NULL ,NULL ,NULL ,NULL ,NULL ,NULL ,N'{student_usi}{student_usi}' ,CAST(N'2021-01-01 00:00:00' AS DateTime) ,CAST(N'2021-01-01 00:00:00' AS DateTime) ,CAST('{id}' AS UNIQUEIDENTIFIER) ); SET IDENTITY_INSERT edfi.Student OFF; """ )
def insert_lms_assignment_submissions( connection: Connection, lms_assignmen_identifier: int, source_system_identifier: str, lms_assignment_id: int, lms_user_identifier: int, submission_status: str, source_system: str = "Test_LMS", isDeleted: bool = False, ): # it is not necessary to have a different title and description since # both should be updated when required connection.execute( f""" SET IDENTITY_INSERT lms.AssignmentSubmission ON; INSERT INTO [lms].[AssignmentSubmission] ( [AssignmentSubmissionIdentifier] ,[SourceSystemIdentifier] ,[SourceSystem] ,[AssignmentIdentifier] ,[LMSUserIdentifier] ,[SubmissionStatus] ,[SubmissionDateTime] ,[EarnedPoints] ,[Grade] ,[SourceCreateDate] ,[SourceLastModifiedDate] ,[CreateDate] ,[LastModifiedDate] ,[DeletedAt] ) VALUES ( {lms_assignmen_identifier}, N'{source_system_identifier}', N'{source_system}', {lms_assignment_id}, {lms_user_identifier}, N'{submission_status}', GETDATE(), 0, N'{GRADE}', GETDATE(), GETDATE(), GETDATE(), GETDATE(), {'GETDATE()' if isDeleted else 'NULL'} ); SET IDENTITY_INSERT lms.AssignmentSubmission OFF; """ )
def transfer_data(source_db_cursor: pyodbc.Cursor, dest_db_cursor: pyodbc.Cursor, dest_db_conn: pyodbc.Connection): ''' Extracts users data from source database and stores them in destination database. ''' print('Extracting users data from source database.') source_db_cursor.execute('SELECT * FROM users') rows = source_db_cursor.fetchall() print('Transferring users data to destination database.') for row in rows: dest_db_cursor.execute(SQL_INSERT_DATA, (row.id, row.name, row.city)) print(f'{len(rows)} rows transferred\n') dest_db_conn.commit()
def insert_edfi_student_with_usi( connection: Connection, student_usi: int, id: str = "00000000-0000-0000-0000-000000000000", ): connection.execute(f""" insert into edfi.student (studentusi ,personaltitleprefix ,firstname ,middlename ,lastsurname ,generationcodesuffix ,maidenname ,birthdate ,birthcity ,birthstateabbreviationdescriptorid ,birthinternationalprovince ,birthcountrydescriptorid ,dateenteredus ,multiplebirthstatus ,birthsexdescriptorid ,citizenshipstatusdescriptorid ,studentuniqueid ,createdate ,lastmodifieddate ,id) overriding system value values ({student_usi} ,NULL ,'FirstName' ,NULL ,'LastName' ,NULL ,NULL ,'2010-01-01 00:00:00' ,NULL ,NULL ,NULL ,NULL ,NULL ,NULL ,NULL ,NULL ,'{student_usi}{student_usi}' ,'2021-01-01 00:00:00' ,'2021-01-01 00:00:00' ,'{id}' ); """)
def insert_lms_assignment_submissions( connection: Connection, lms_assignment_identifier: int, source_system_identifier: str, lms_assignment_id: int, lms_user_identifier: int, submission_status: str, source_system: str = "Test_LMS", isDeleted: bool = False, ): # it is not necessary to have a different title and description since # both should be updated when required connection.execute(f""" insert into lms.assignmentsubmission ( assignmentsubmissionidentifier ,sourcesystemidentifier ,sourcesystem ,assignmentidentifier ,lmsuseridentifier ,submissionstatus ,submissiondatetime ,earnedpoints ,grade ,sourcecreatedate ,sourcelastmodifieddate ,createdate ,lastmodifieddate ,deletedat ) overriding system value values ( {lms_assignment_identifier}, '{source_system_identifier}', '{source_system}', {lms_assignment_id}, {lms_user_identifier}, '{submission_status}', now(), 0, '{GRADE}', now(), now(), now(), now(), {'now()' if isDeleted else 'NULL'} ); """)
def get_ims_notifications(conn: pyodbc.Connection) -> List[dict]: sql = """ SELECT n.CreationDate, n.RequiredEndDate, n.NotificationNumber, i.ID as IMSTag_ID, i.tag, mel.* FROM TechnicalInfoTag as t, TechnicalInfoTag_IMSTag as ti, Notification as n, IMSTag i, ModelElement me left join aas.vModelElementLimitPivot mel on me.ID = mel.ModelElement_ID where t.ID = ti.TechnicalInfoTag_ID and ti.IMSTag_ID = i.ID and t.ID = n.TechnicalInfoTag_ID and i.ID = me.IMSTag_ID and (n.FailureImpact IN ('D','S') or NOTIFICATIONTYPE IN ('M2','M3','M4')) order by n.NotificationNumber desc """ cursor = conn.cursor() cursor.execute(sql) desc = cursor.description column_names = [col[0] for col in desc] data = [dict(zip(column_names, row)) for row in cursor.fetchall()] cursor.close() return data
def get_table_row_counts(self, conn: pyodbc.Connection) -> Dict: qry = self.get_table_row_counts_qry(self.source) with conn.cursor() as cursor: data = cursor.execute(qry).fetchall() columns = [key[0] for key in cursor.description] data = rows_to_json(data, columns) return {row['table_name']: row['row_count'] for row in data}
def get_table_columns(self, conn: pyodbc.Connection) -> Dict: qry = self.get_table_meta_qry(self.source) with conn.cursor() as cursor: data = cursor.execute(qry).fetchall() columns = [key[0] for key in cursor.description] data = rows_to_json(data, columns) return self.process_table_columns(data)
def get_all_locations(conn: pyodbc.Connection) -> List[Location]: """ Queries the SQL database for every X, Y, latitude, longitude, address and UPRN under a specific constraint and creates a list of location objects containing each one Args: conn (pyodbc.Connection): The connection to the database to query Returns: List[Location]: A list of Location objects containing information about the locations returned by the SQL query """ locations = [] with open('.\\get_all_locations_per_round.sql', 'r') as loc_query_f: loc_query_all = loc_query_f.read() cursor = conn.cursor() cursor.execute(loc_query_all) locs = cursor.fetchall() for loc in locs: locations.append( Location('{:.2f}'.format(loc.x), '{:.2f}'.format(loc.y), '{:.15f}'.format(loc.lat), '{:.15f}'.format(loc.lng), loc.addr, loc.street, loc.town, loc.postcode, str(loc.uprn), [loc.REF, loc.RECY, loc.MIX, loc.GLASS, loc.GW])) return locations
def route(connection: pyodbc.Connection): sql = SqlChain( """INSERT INTO test(id, x, ch) VALUES ('a', 1, 'a'), ('b', 2, 'b')""", cursor=connection.cursor()) res_sql = sql.chain("""SELECT * FROM test;""") sql.execute() return res_sql.get()
def insert_descriptor(connection: Connection, namespace: str, value: str): connection.execute( f""" INSERT INTO [edfi].[Descriptor] ( [Namespace], [CodeValue], [ShortDescription], [Description]) VALUES ( N'{namespace}', N'{value}', N'{value}', N'{value}' ) """ )
def insert_descriptor(connection: Connection, namespace: str, value: str): connection.execute(f""" insert into edfi.descriptor ( namespace, codevalue, shortdescription, description, id) values ( '{namespace}', '{value}', '{value}', '{value}', (select md5(random()::text || random()::text)::uuid) ) """)
def update_dolphin_database(dbname: pyodbc.Connection, updatesql: str) -> int: dolphincursor = dbname.cursor() try: if DolphinTestModeEnabled: logging.info( "Dolphin Test mode is enabled. We would update Dolphin with {0}" .format(updatesql)) else: dolphincursor.execute(updatesql) dbname.commit() except Exception as e: logging.error( "Dolphin database update failed with {0}, sql = {1}".format( e, updatesql)) dbname.rollback() return -1 dolphincursor.close() return 0
def insert_lms_assignment( connection: Connection, source_system_identifier: str, source_system: str, section_identifier: int, assignment_category: str, title_and_description: str = "default title and description", past_due_date: bool = False ) -> int: # it is not necessary to have a different title and description since # both should be updated when required connection.execute( f""" INSERT INTO [lms].[Assignment] ( SourceSystemIdentifier, SourceSystem, LMSSectionIdentifier, Title, AssignmentCategory, AssignmentDescription, CreateDate, LastModifiedDate { ",DueDateTime" if past_due_date else "" } ) VALUES ( N'{source_system_identifier}', N'{source_system}', {section_identifier}, N'{title_and_description}', N'{assignment_category}', N'{title_and_description}', GETDATE(), GETDATE() { ",DATEADD(year, -1, GETDATE())" if past_due_date else "" } ) """ ) result: Cursor = connection.execute("SELECT @@identity") return int(result.fetchone()[0])
def set_up_db(con: pyodbc.Connection, /) -> None: print("Setting up db...") fp = pathlib.Path(__file__).parent / "fixtures" / "setup_db.sql" run_queries_in_file(con=con, fp=fp) con.execute("ANALYZE hr.employee;") con.execute("ANALYZE sales.customer;") con.execute("ANALYZE sales.employee_customer;")