def job(): logger = get_logger() logger.info('Starting scrape job') try: scrape(logger) except Exception as e: logger.error("Exception caught inside job " + str(e)) pass logger.info('Ending scrape job')
def __init__(self, client: Client, data: dict = None, page_id: str = None): self.client = client self.logger = custom_logging.get_logger(__name__) if data: self.data = data elif page_id: self.data = self.client.pages.retrieve(page_id=page_id) else: msg = 'Either data or page_id should be provided.' self.logger.error(msg) raise ValueError(msg)
def __init__(self, **kwargs): self.structural_kwargs = [ 'stepsize_search', 'clip_min', 'clip_max', 'constraint', 'num_iterations', 'initial_num_evals', 'max_num_evals', 'batch_size', 'verbose', 'gamma', ] self.logger = get_logger(__name__) self.queries = 0
def run_repeaters(): logger = custom_logging.get_logger(__name__) client = Client(auth=NOTION_API_TOKEN, logger=logger) repeaters_data = client.databases.query( TASK_REPEATER_DATABASE_ID, filter={ "property": "Active", "checkbox": { "equals": True } } ) repeaters = [TaskRepeater(client, data=data) for data in repeaters_data['results']] for repeater in repeaters: repeater.execute()
def legacy_unsupported_fields_update(template_url, task_url): logger = custom_logging.get_logger(__name__) try: logger.info('Authentication using `TOKEN_V2`') client = NotionClient(token_v2=LEGACY_NOTION_TOKEN_V2) logger.info('Getting a template page') template = client.get_block(template_url) logger.debug(template) logger.info('Getting a task page') task = client.get_block(task_url) logger.debug(task) if template.files: task.files = template.files if template.icon: task.icon = template.icon if template.cover: task.cover = template.cover except Exception as e: logger.exception(e)
def main(): logger = get_logger() logger.info('Scrape scheduled with time: ' + str(3) + ':' + str(30)) s.add_job(job, 'cron', hour=3, minute=30, timezone=pytz.timezone('US/Central')) # I am keeping this here because I use it anytime we need to test run the scheduler # schedules a job for now # s.add_job(job, 'date') s.start() try: # This is here to simulate application activity (which keeps the main thread alive). while True: sleep(2) except (KeyboardInterrupt, SystemExit): # Not strictly necessary if daemonic mode is enabled but should be done if possible s.shutdown()
import os import json import StringIO import re import xml.etree.ElementTree as ET import configurator_errors as err import custom_logging NS_SOAPENC = 'http://schemas.xmlsoap.org/soap/encoding/' NS_XSI = 'http://www.w3.org/2001/XMLSchema-instance' log = custom_logging.get_logger(__name__) easyconfigmap = {} with open('easyconfigmap.json', 'r') as f: parsed = json.load(f) for k, v in parsed.iteritems(): path = os.path.join('easyconfig', v) log.info('Loading easyconfig for {}: {}'.format(k, path)) with open(path, 'r') as ec: easyconfig = json.load(ec) easyconfigmap[k] = easyconfig def get_easyconfig(path): """Raise ConfiguratorUserError if not found""" if path in easyconfigmap: return easyconfigmap[path] else: raise err.ConfiguratorUserError(
def test_init_config(self): logger = custom_logging.get_logger('als.core') self.assertIsNotNone(logger)
def main(): try: run_repeaters() except Exception as e: logger = custom_logging.get_logger(__name__) logger.exception(e)
import os import random import string import numpy as np import _pickle as pickle from custom_logging import get_logger from scipy import misc from datetime import datetime util_logger = get_logger(__name__) def get_timer_suffix(date_object): return date_object.strftime('%m%d%y-%H%M%S') def get_random_upper_string(n): # https://stackoverflow.com/questions/2257441/random-string-generation-with-upper-case-letters-and-digits-in-python return ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(n)) def save_my_img(my_dir, my_name, img): t_path = os.path.join( my_dir, '{}_{}.png'.format(my_name, get_random_upper_string(8))) misc.imsave(t_path, img) return t_path
def __init__(self, tag, model, data, save_dir): self.tag = tag self.model = model self.data = data self.save_dir = save_dir self.logger = get_logger(__name__)