def execute_update_usage_stats_tables(event, context): """ The summary stats are produced daily as static tables. These could be rendered as views or materialized views but since the data is not changing that frequently that adds significant processing load for little benefit. By regenerating the stats on a schedule we can make the interface much faster and keep the database processing load lighter. """ status = 0 commands = [] try: dbh = DatabaseHandle(app) scripts = app.utilities.list_files_from_path( "chalicelib/api/derived_stats_tables/", "sql" ) for script in scripts: app.log.debug(f"Executing SQL from chalice/{script}") commands = read_script(script) app.log.debug(json.dumps(commands)) dbh.execute_commands(commands, "csw") status = 1 except Exception: app.log.error( "Update stats tables error: " + app.utilities.get_typed_exception() ) return {"status": status, "commands": commands}
def database_create_items(event, context): try: dbh = DatabaseHandle(app) created = dbh.create_items(event) json_data = app.utilities.to_json(created) except Exception as err: app.log.error(str(err)) json_data = None return json_data
def database_add_new_criteria(event, context): # try: dbh = DatabaseHandle(app) for criterion in criteria_finder(): app.log.debug(f"Updating criterion: {criterion}") dbh.create_or_update_criterion({"criterion_name": criterion}) # except Exception as err: # app.log.error(str(err)) return None
def database_list_models(event, context): try: dbh = DatabaseHandle(app) models = dbh.get_models() tables = models.keys() except Exception as err: app.log.error(str(err)) tables = [] return list(tables)
def database_run(event, context): try: dbh = DatabaseHandle(app) dbh.set_credentials(event["User"], event["Password"]) status = dbh.execute_commands(event["Commands"]) except Exception as err: app.log.error(str(err)) status = False return status
def database_create_item(event, context): try: dbh = DatabaseHandle(app) item = dbh.create_item(event) data = item.serialize() json_data = app.utilities.to_json(data) except Exception as err: app.log.error(str(err)) json_data = None return json_data
def database_get_item(event, context): app.log.debug("database_get_item function") try: dbh = DatabaseHandle(app) item = dbh.get_item(event) data = item.serialize() json_data = app.utilities.to_json(data) except Exception as err: app.log.error(str(err)) json_data = None return json_data
def database_create_tables(event, context): try: dbh = DatabaseHandle(app) table_list = [] message = "" for table_name in event["Tables"]: model = dbh.get_model(table_name) table_list.append(model) created = dbh.create_tables(table_list) except Exception as err: app.log.error(str(err)) created = False message = str(err) if created: response = ", ".join(event["Tables"]) else: response = f"Table create failed: {message}" return response
def database_get_all_items(event, context): app.log.debug("database_get_all_items function") try: data = [] n = 0 dbh = DatabaseHandle(app) db = dbh.get_handle() db.connect() model = dbh.get_model(event["Model"]) for item in model.select(): serialised_record = item.serialize() data.append(serialised_record) n += 1 app.log.debug(str(serialised_record)) app.log.debug(f"database_get_all_items fetched {n} records") db.close() json_data = app.utilities.to_json(data) except Exception as err: app.log.error(str(err)) json_data = None return json_data
def delete_expired_audits(): """ The summary stats are produced daily as static tables. These could be rendered as views or materialized views but since the data is not changing that frequently that adds significant processing load for little benefit. By regenerating the stats on a schedule we can make the interface much faster and keep the database processing load lighter. """ start_time = time() elapsed_time = 0 status = 0 deleted_audits = 0 remove_count = 100 # keep 6 months time_limit_days = 183 # run for 4 minutes # (lambda timeout is set to 5) execution_limit = 240 commands = [] try: dbh = DatabaseHandle(app) db = dbh.get_handle() app.log.debug(f"Listing oldest records") select_expired_audit_ids = f""" SELECT id FROM public.account_audit AS all_audits WHERE DATE_PART('day', CURRENT_TIMESTAMP - all_audits.date_updated) > {time_limit_days} ORDER BY id LIMIT {remove_count} OFFSET 0; """ audit_cursor = db.execute_sql(select_expired_audit_ids) delete_statements = [] for audit_row in audit_cursor.fetchall(): account_audit_id = audit_row[0] app.log.debug(f"Deleting audit: {account_audit_id}") delete_compliance = f""" DELETE FROM public.resource_compliance WHERE audit_resource_id IN ( SELECT id FROM public.audit_resource WHERE account_audit_id = {account_audit_id} ); """ delete_statements.append(delete_compliance) delete_resource = f""" DELETE FROM public.audit_resource WHERE account_audit_id = {account_audit_id}; """ delete_statements.append(delete_resource) delete_criterion = f""" DELETE FROM public.audit_criterion WHERE account_audit_id = {account_audit_id}; """ delete_statements.append(delete_criterion) delete_latest_audit = f""" DELETE FROM public.account_latest_audit WHERE account_audit_id = {account_audit_id}; """ delete_statements.append(delete_latest_audit) delete_audit = f""" DELETE FROM public.account_audit WHERE id = {account_audit_id}; """ delete_statements.append(delete_audit) app.log.debug(f"Running {len(delete_statements)} deletes") dbh.execute_commands(delete_statements, "csw") deleted_audits += 1 app.log.debug(f"Deleted {deleted_audits} audits so far") elapsed_time = time() - start_time if elapsed_time > execution_limit: break status = 1 except Exception as err: status = 0 app.log.error(str(err)) return {"status": status, "deleted_audits": deleted_audits}