def __init__(self, name): self.path = self.basepath self.name = name self.logger = init_logger(self.logfile) wxApp.__init__(self, redirect=False, filename=self.logfile) self.AppName = name self.AppDisplayName = name self.icon = wx.Icon(self.get_resource('Icon.ico'))
def main(start_date="", end_date="", start_days_before=0, end_days_before=0): config = ConfigLoader("config.yaml").load() init_logger(config["LOGGER"]) if not start_date: start_date = datetime.datetime.now(JST).date() end_date = datetime.datetime.now(JST).date() else: start_date = datetime.datetime.strptime(start_date, "%Y/%m/%d") end_date = datetime.datetime.strptime(end_date, "%Y/%m/%d") start_date = start_date - datetime.timedelta(days=start_days_before) end_date = end_date - datetime.timedelta(days=end_days_before) while start_date <= end_date: logging.info("Fetch and update report ({})".format(start_date)) fetch_and_update_ad_report(config, start_date) fetch_and_update_shop_report(config, start_date) start_date += datetime.timedelta(days=1)
def run_triggered_from_pubsub(event, context): """Triggered from a message on a Cloud Pub/Sub topic. Args: event (dict): Event payload. context (google.cloud.functions.Context): Metadata for the event. """ pubsub_message = base64.b64decode(event['data']).decode('utf-8') config = ConfigLoader("config_gcf.yaml", GoogleSecretManager("hackacademy-272812")).load() init_logger(config["LOGGER"]) # default today target_date = datetime.datetime.now(JST) if pubsub_message == YESTERDAY: target_date = target_date - datetime.timedelta(days=1) logging.info("Fetch and update report ({})".format(target_date.date())) fetch_and_update_ad_report(config, target_date.date()) fetch_and_update_shop_report(config, target_date.date())
def infer_file(plan, mapping, file_type): """Adds AVUs to a plan object pointing at a file based on file metadata. @param plan: Plan object @param mapping: Dictionary corresponding to a list of 'mapping' entries in the YAML configuration file @param file_type: File type as defined by the 'infer' entry in the YAML configuration file @return: Modified Plan object""" log = logger.init_logger(logger.DEFAULT_LOGGER, 'Planner') if file_type == 'variant': header = inferrers.get_variant_header(plan.path) elif file_type == 'sequence': header = inferrers.get_sequence_header(plan.path) for key in mapping.keys(): target = mapping[key].split('.') target_value = header try: if '*' in target: target_value = _resolve_wildcard(header, target) else: # Iteratively descend the header dictionary for subtarget in target: if subtarget == '?': target_value = target_value[list( target_value.keys())[0]] else: target_value = target_value[subtarget] except KeyError: # TODO: Abort execution? Continue after omitting bad target? log.warning("Metadata target {} not found in {}.".format( mapping[key], plan.path)) continue if target_value == False: log.warning("Metadata target {} not found in {}.".format( mapping[key], plan.path)) continue if type(target_value) == dict: try: target_value = _stringify_dict(target_value) except KeyError: continue plan.metadata.append(AVU(key, target_value)) return plan
def verify_config(yaml_file): """Parse configuration file 'file'. Returns True if the config appears valid, or a string describing the issue otherwise. @param file: Path to the configuration file. @return: True if config appears valid, problem string otherwise.""" log = logger.init_logger(logger.DEFAULT_LOGGER, 'Planner') log.info("Verifying configuration file...") with open(yaml_file) as file: config = safe_load(file) if len(config) == 0: return "Invalid configuration (empty file)" for entry in config.keys(): # If it's wrapped in forward slashes (ie, '/.cram/'), the entry # should be a valid regular expression. if entry[0] == "/" and entry[-1] == "/": regex = entry[1:-1] try: re.compile(regex) except (re.error, RecursionError): _err = "Invalid pattern (regex): {}".format(entry) return _err for avu in config[entry]: keys = avu.keys() if 'infer' not in keys: if 'attribute' not in keys: _err = "Invalid AVU (no attribute): {}".format(entry) return _err if 'value' not in keys: _err = "Invalid AVU (no value): {}".format(entry) return _err # unit field is optional, so don't check for it else: if 'mapping' not in keys: _err = ("Invalid dynamic AVU (no mapping): {}".format( entry)) return _err if avu['infer'] not in VALID_INFERS: _err = ("Invalid dynamic AVU (nonexistant infer): {}". format(entry)) return _err for mapping in avu['mapping'].values(): split_map = mapping.split('.') if split_map[0] == '*': _err = ("Invalid dynamic AVU (wildcard can't " + "come first)") return _err if avu['infer'] in ['sequence', 'variant']: # TODO: better way to check that an index if split_map[1] == '*' and len(split_map) == 2: _err = ( "Invalid dynamic AVU (variant and " + "sequence files can't have a wildcard " + "as the second index without a " + "non-wildcard third index.): {}".format( entry)) return _err if split_map.count('*') > 1: _err = ( "Invalid dynamic AVU (can't have multiple " + "wildcards in a single mapping): {}".format( entry)) return _err return True
def generate_plans(catalogue, yaml_file, progress_file, resume, include_collections=False): """Generates AVU dictionaries for iRODS objects based on the definitions in a config file. @param catalogue: Lists of iRODS paths in a dictionary {'objects': <list>, 'collections': <list>} @param yaml_file: Path to the configuration file @param ignore_collections: If True, only data objects will be returned @return: (iRODS path, AVU dictionary) tuples, as a generator""" log = logger.init_logger(logger.DEFAULT_LOGGER, "Planner") valid = verify_config(yaml_file) if valid != True: log.error("Configuration file error:\n\t{}".format(valid)) exit(1) with open(yaml_file) as file: config = safe_load(file) if not include_collections: _catalogue = {'objects': catalogue['objects']} else: _catalogue = catalogue if resume: print("Resuming from progress file...") with open(progress_file, 'rt') as f: # _catalogue['objects'] = list(set(_catalogue['objects']) - set(line.strip() for line in f)) This doesnt work for some reason progress_file_set = set(line.strip() for line in f) _catalogue['objects'] = list( set(_catalogue['objects']) - progress_file_set) if include_collections: _catalogue['collections'] = list( set(_catalogue['collections']) - progress_file_set) for object_type in _catalogue.keys(): for path in _catalogue[object_type]: if object_type == 'objects': plan_object = Plan(path, False, []) elif object_type == 'collections': plan_object = Plan(path, True, []) print("Planning AVUs for {}...".format(path)) avu_dict = {} # Prior to Python 3.7, dictionaries did not have an enforced # persistent order, so this might not work properly in older # versions. for pattern in reversed(list(config.keys())): if pattern[0] == "/" and pattern[-1] == "/": if not re.search(pattern[1:-1], path): # regex pattern didn't match continue else: if not fnmatch.fnmatch(path, pattern): # glob pattern didn't match continue for entry in config[pattern]: if 'attribute' in entry.keys(): # Fixed AVUs attribute = entry['attribute'] value = entry['value'] unit = None if 'unit' in entry.keys(): unit = entry['unit'] plan_object.metadata.append(AVU( attribute, value, unit)) elif 'infer' in entry.keys(): # Dynamic AVUs if entry['infer'] == 'variant': plan_object = infer_file(plan_object, entry['mapping'], 'variant') elif entry['infer'] == 'sequence': plan_object = infer_file(plan_object, entry['mapping'], 'sequence') yield plan_object
from core import scrapertools from core import filetools from core.item import * from platformcode import platformtools from core import httptools from core.mediainfo import MediaInfo from core import mediainfo from core import bbdd # Reloads reload(logger) reload(settings) reload(platformsettings) reload(platformtools) reload(jsontools) reload(servertools) reload(moduletools) reload(httptools) reload(scrapertools) reload(filetools) reload(bbdd) # Inits logger.init_logger() settings.check_directories() httptools.load_cookies() sysinfo.profile = platformtools.get_profile()[1] dump = datetime.datetime.strptime('20110101', '%Y%m%d') # Necesario para iniciar strptime bbdd.create()