def __init__(self): """Generic initialization code.""" # session folder (create if missing) self.session_folder = '../sessions' create_folder(self.session_folder) # log folder (create if missing) self.log_folder = f'{self.session_folder}/logs' create_folder(self.log_folder) # work folder (create and clear) self.work_folder = f'{self.session_folder}/{script_name()}/work' clear_folder(self.work_folder) # configuration engines self.config = None self.option = None # database self.database = None self.target_db_conn = None # parameter driven self.dataset_id = '' self.table_name = '' # since we start logging before we read config/options we log to known path vs dataset specific path log_setup(log_file_name=f'{self.log_folder}/{script_name()}.log') log_session_info()
def run(self): """ Options --onetime[=1] run once, then exit; use if called by an external scheduler. --nowait[=1] run immediately without waiting for scheduler to determine execution. """ # exit if __init__ didn't find a valid project file if not self.project_name: return # display application banner app_name = script_name() print(f'UDP {app_name.title()} {self.project_name}') copyright_year = f'{now():%Y}' copyright_message = f'Copyright (c) 2018-{copyright_year} Alterra Mountain Company, Inc.' print(copyright_message) # make sure root sessions folder exists create_folder(self.session_folder) # since we start logging before we read config/options we log to known path vs dataset specific path log_setup( log_file_name=f'{self.session_folder}/{self.project_name}.log') log_session_info() # common setup self.setup() # application specific startup logic self.start() # scheduling behavior based on --onetime, --nowait option if self.option('onetime') == '1': # one-time run; use when this script is being called by an external scheduler logger.info('Option(onetime=1): executing once') self.main() else: if self.option('nowait') == '1': # no-wait option; execute immediately without waiting for scheduler to initiate logger.info( 'Option(nowait=1): executing immediately, then following regular schedule' ) self.main() # standard wait for scheduled time slot and run logic while True: self.progress_message('waiting for next job ...') if self.schedule.wait(): self.main() if self.option('scheduled_onetime') == '1': logger.info( 'Option(scheduled_onetime=1): ran once at first scheduled timeslot' ) break else: break self.cleanup()
def main(): package_start_time = datetime.now() try: arg_val = get_args() conf.set_up('GENERAL', arg_val.env) log_setup('testlog') except Exception: logging.error()
def test(): # activate logging log_setup() log_session_info() # load standard config config = ConfigSectionKey("../conf", "../local") config.load("bootstrap.ini", "bootstrap") config.load("init.ini") config.load("connect.ini") setup(config)
def run(self, *args, **kwargs): """ Options --onetime[=1] run once, then exit; use if called by an external scheduler. --nonwait[=1] run immediately without waiting for scheduler to determine execution. """ # make sure root sessions folder exists create_folder('../sessions') # TODO: We start logging before we read config and options so we don't know datapool or anything else. # TODO: We should log to a default app log and then SWITCH LOG file over after we process options and # TODO: and config files ??? (2018-09-25) log_setup(log_file_name=f'../sessions/{script_name()}.log') log_session_info() self.setup(*args, **kwargs) self.start() # scheduling behavior based on --onetime, --nowait option if self.option('onetime') == '1': # one-time run; use when this script is being called by an external scheduler logger.info('Option(onetime=1): executing once') self.main() else: if self.option('nowait') == '1': # no-wait option; execute immediately without waiting for scheduler to initiate logger.info('Option(nowait=1): executing immediately, then following regular schedule') self.main() # standard wait for scheduled time slot and run logic while True: if self.schedule.wait(): self.main() else: break self.cleanup()
enter record in the database move file to /queries folder send message to SQS """ import logging import json import boto3 import os import traceback from datetime import datetime from botocore.exceptions import ClientError from common import S3_BUCKET, DB_NAME, SQS_URL, log_setup log_setup() S3 = boto3.client('s3') SQS = boto3.client('sqs') DYNAMO = boto3.client('dynamodb') def lambda_handler(event, context): logging.info("event= {}".format(json.dumps(event))) # default value, for error logging metadata = {'guid': str(datetime.utcnow())} try: record = event['Records'][0] s3_info = record['s3'] bucket = s3_info['bucket']['name'] key = s3_info['object']['key']
import boto3 import json import string import secrets import logging from common import log_setup, response, to_json, to_dynamo, to_dynamo_update from botocore.exceptions import ClientError LOG = log_setup() class BaseModel: singleton = None id_size = 8 # length of generated ID token def __init__(self): self.dynamo = boto3.client('dynamodb') @classmethod def get_singleton(cls): """ :return: an instance of the model class This is a singleton - will re-use the existing instance if one exists. """ if not cls.singleton: cls.singleton = cls() return cls.singleton def validate_for_create(self, item): """ If item is valid, return it with is_error=False
"dataVersion": "", "metadataVersion": "1" } json_queue_message = json.dumps(queue_message) # response = queue.get() # notification = ObjectstoreNotification(response) queue.put(json_queue_message) response = queue.get() notification = ObjectstoreNotification(response) queue.delete(notification) """ while True: time.sleep(1) response = queue.get() if response: notification = ObjectstoreNotification(response) queue.delete(notification) logger.info(f'Test mode: notification message = {notification}') else: break """ if __name__ == '__main__': log_setup(log_level=logging.INFO) log_session_info() main()
def main(): # default and custom basicConfig() log_setup() log_session_info() logger.info('Started') test()
key_vault.list() except KeyVaultException as e: logger.debug(f'Expected exception: {e}') with KeyVault(key_vault_name, key_vault_password) as key_vault: # key_vault.clear() key_vault.set('AMP_database_password', '$amp-password$') key_vault.set('RTP_DATABASE_PASSWORD', '$rtp-password$') key_vault.set('bye_database_password', 'bad-password$') key_vault.list() key_vault.delete('bye_database_password') key_vault.delete('bad_secret_name') key_vault.get('amp-database-PASSWORD') key_vault.get('$rtp-database-password$') key_vault.list() # test JIT secret expansion secret = Secret(key_vault_name, key_vault_password) secret('$amp_password$') secret(f'{secret_prefix}:amp-DATABASE-password$$$') secret(f'{secret_prefix}:@rtp-DATABASE_password') secret(f'{secret_prefix}:@USER-NAME:') secret(f'{secret_prefix}:bad_secret_name') # test code if __name__ == '__main__': log_setup(log_level=logging.DEBUG) log_session_info() test()
# common from common import log_setup from common import log_session_info # udp classes from config import ConfigSectionKey # module level logger logger = logging.getLogger(__name__) # temp test harness ... # test code def main(): config = ConfigSectionKey("../conf", "../local") config.load("project_capture_amc_rtp_sales.ini") for table_name in config.keys("table:*"): table = config(table_name) table.dump(dump_blank_values=False) # test code if __name__ == "__main__": log_setup(log_level=logging.WARNING) log_session_info() main()
def main(): #Call like this: python parse.py <filepath/filename> <lineno_to _start_from> common.log_setup() [infile,start_line]=cmd_args_parse()