def __init__(self): self.logger = get_logger(__name__) self.conn = DatabaseHandler.get_connection() self.module_store = ModulesStore() self.package_store = PackageStore() self.update_store = UpdateStore() self.content_set_to_db_id = self._prepare_content_set_map()
def __init__(self): self.logger = get_logger(__name__) self.downloader = FileDownloader() self.cvemap_store = CvemapStore() self.updated = False self.lastmodified = None self.tmp_directory = tempfile.mkdtemp(prefix="cvemap-")
def __init__(self, base_model_name, input_shape): """It initializes the base model parameters. Arguments: base_model_name {string} -- A string containing the name of a base model. input_shape {(int, int)} -- A tuple indicating the dimensions of model input. """ #Input parameters self._base_model_name = base_model_name self._input_shape = input_shape #Validation if BaseModel.base_models.get(base_model_name) is None: raise ValueError( 'Base model: {} is invalid. Supported models are: {}'.format( base_model_name, BaseModel.base_models.keys())) #Logging self._logger = logging.get_logger(__name__) #Log input parameters self._logger.info( "Input parameters:: base_model_name: %s input_shape: %s", self._base_model_name, self._input_shape)
def start(host: str, port: int, registry: Registry) -> None: """ Start the API server """ logger = get_logger() app = Application(logger=logger, middlewares=[error_handler]) app.on_response_prepare.append(_set_server_header) app.on_shutdown.append(_shutdown) app["registry"] = registry # Routing app.router.add_route("*", "/", handler.registry) app.router.add_route("*", "/people", handler.people) app.router.add_route("*", "/people/{id}", handler.person) app.router.add_route("*", "/people/{id}/photo", handler.photo) app.router.add_route("*", "/groups", handler.groups) app.router.add_route("*", "/groups/{id}", handler.group) log(f"Starting API server on http://{host}:{port}", Level.Info) run_app(app, host=host, port=port, access_log=logger, access_log_format="%a \"%r\" %s %b", print=None)
def __init__(self, label_df, image_col, label_col, output_df_cols): """It initializes the required and optional parameters. Arguments: label_df {A Pandas DataFrame} -- It contains the input names and labels. image_col {string} -- The image column name in the dataframe. label_col {string} -- The label column name in the dataframe. output_df_cols {(string, string, string)} -- The pandas DataFrame headers in the tuple dataframe. """ #Required parameters self._label_df = label_df self._image_col = image_col self._label_col = label_col self._output_df_cols = output_df_cols #Derived parameters self._labelled_images = None #Logging self._logger = logging.get_logger(__name__) self._logger.info( 'label_df: %d image_col: %s label_col: %s output_df_cols: %s', len(self._label_df), self._image_col, self._label_col, self._output_df_cols)
def __init__(self, image_data_generator, dataframe, batch_size, subset, randomize=True): """It initializes the required and optional parameters Arguments: image_data_generator {An ImageDataGenerator object} -- A generator object that allows loading a data slice. dataframe {A pandas.DataFrame object} -- A data frame object containing the input data. batch_size {int} -- An integer value that indicates the size of a batch. subset {A ImageDataSubset object} -- A ImageDataSubset value to indicate the dataset subset. randomize {boolean} -- It indicates to randomize the dataframe. """ #Required parameters self._image_data_generator = image_data_generator self._dataframe = dataframe self._batch_size = batch_size self._subset = subset self._randomize = randomize #Internal parameters self._dataset_size = len(dataframe) #Randomization self._shuffled_indices = list(range(self._dataset_size)) #Logging self._logger = logging.get_logger(__name__) #Pre-randomization if self._randomize: random_shuffle(self._shuffled_indices)
def __init__(self, input_params, training_params, image_generation_params, transformation_params, checkpoint_callback, summary=True): """It initializes the training parameters. Arguments: input_params {operation.input.InputParameters} -- The input parameters for the training. training_params {operation.input.TrainingParameters} -- The parameter to start training. image_generation_params {operation.input.ImageGenerationParameters} -- The parameters required for image data generation. checkpoint_callback {model.callback.BatchTrainStateCheckpoint} -- The state checkpoint callback. """ #Required parameters self._input_params = input_params self._training_params = training_params self._image_generation_params = image_generation_params self._transformation_params = transformation_params self._checkpoint_callback = checkpoint_callback #Optional parameters self._summary = summary #Derived parameters self._transformer = ImageDataTransformation( parameters=self._transformation_params) #Logging self._logger = logging.get_logger(__name__)
def __init__(self, num_unfrozen_layers, configure_base=False, base_level=1): """It initializes the input parametes. Arguments: num_unfrozen_layers {int} -- The number of bottom layers to unfreeze for training configure_base {boolean} -- It indicates if the model or the base models are configuration candidates base_model_level {int} -- It indicates the level of the base models. """ #Input parameters self._num_unfrozen_layers = num_unfrozen_layers self._configure_base = configure_base self._base_level = base_level #Logging self._logger = logging.get_logger(__name__) #Validation if self._base_level < 0 or self._base_level > self.max_base_level: raise ValueError( "The valid values for base level are [0, {}]".format( self.max_base_level)) #Log input parameters self._logger.info( "Input parameters:: num_unfrozen_layers: %d configure_base: %s base_level: %d", self._num_unfrozen_layers, self._configure_base, self._base_level)
def __init__(self): self.logger = get_logger(__name__) self.downloader = FileDownloader() self.unpacker = FileUnpacker() self.repo_store = RepositoryStore() self.repositories = set() self.certs_tmp_directory = None self.certs_files = {}
def test_status_app_runner(self): """Test status app runner""" logger = logging.get_logger(__name__) app = status_app.create_status_app(logger) loop = asyncio.get_event_loop() runner, site = status_app.create_status_runner(app, "20000", logger, loop) assert runner is not None assert site is not None
def __init__(self): self.logger = get_logger(__name__) self.downloader = FileDownloader() self.unpacker = FileUnpacker() self.cverepo_store = CveRepoStore() self.repos = set() self.db_lastmodified = {} self.year_since = int(os.getenv('YEAR_SINCE', DEFAULT_YEAR_SINCE))
def __init__(self, dataframe, input_params, image_generation_params, transformer=None, randomize=True): """It initializes the dataframe object. Arguments: dataframe {Pandas DataFrame} -- A pandas dataframe object with columnar data with image names and labels. input_params {A InputDataParameter object} -- An input parameter object. image_generation_params {A ImageGenerationParameters object} -- A training data parameter object. transformer {A ImageDataTransformation object} -- It is used to transform the image objects. randomize {boolean} -- It indicates randomization of the input dataframe. """ #Required parameters self._dataframe = dataframe self._input_params = input_params self._image_generation_params = image_generation_params #Optional parameters self._transformer = transformer self._randomize = randomize #Caching self._image_cache = LRUCache( self._image_generation_params.image_cache_size) #Logging self._logger = logging.get_logger(__name__) #Metrics self._load_slice_metric = 'get_image_objects' #Create metrics Metric.create(self._load_slice_metric) #Compute the training and validation boundary using the validation split. boundary = int( ceil( len(self._dataframe) * (1. - self._image_generation_params.validation_split))) self._logger.info( "Validation split: {} Identified boundary: {}".format( self._image_generation_params.validation_split, boundary)) #Split the dataframe into training and validation. self._main_df = self._dataframe.loc[:(boundary - 1), :] self._validation_df = self._dataframe.loc[boundary:, :].reset_index( drop=True) n_dataframe = len(self._dataframe) n_main_df = len(self._main_df) n_validation_df = len(self._validation_df) self._logger.info( "Dataframe size: {} main set size: {} validation size: {}".format( n_dataframe, n_main_df, n_validation_df))
def __init__(self, params): #Required parameters self._params = params #Derived parameters self._client = Dropbox(self._params.auth_token) #Logging self._logger = logging.get_logger(__name__)
def execute(func, iterator, length, parallel, *args): logger = logging.get_logger(__name__) if parallel: print('Running parallel execution of function: ', func) return _execute_parallel(func, iterator, length, *args) else: logger.info('Running serial execution of function: %s', func) return _execute_serial(func, iterator, length, *args)
def __init__(self, dropbox): """It initializes and validates the input parameters. Arguments: dropbox {client.dropbox.DropboxConnection} -- The dropbox client. """ #Keyword parameters self._dropbox = dropbox #Logging self._logger = logging.get_logger(__name__)
def test_progress_logger(caplog): """Test ProgressLogger.""" logger = logging.get_logger(__name__) progress_logger = logging.ProgressLogger(logger, 3, log_interval=0) progress_logger.update() progress_logger.update() progress_logger.update() progress_logger.reset(4) progress_logger.update() assert caplog.records[0].message == ' 33.33 % completed [1/3]' assert caplog.records[1].message == ' 66.67 % completed [2/3]' assert caplog.records[2].message == '100.00 % completed [3/3]' assert caplog.records[3].message == ' 25.00 % completed [1/4]'
def __init__(self, instances): """It initializes the augmentation parameters Arguments: instances {[A ImageAugmentation.Instance object]} -- The list of augmentation instances. """ #Required parameters self._instances = instances #Validation if self._instances is None: raise ValueError( 'instances must be a list of ImageAugmentation.Instance') #Logging self._logger = logging.get_logger(__name__) self._logger.info('Got %d augmentation instances', len(self._instances))
def __init__(self, model, input_params, image_generation_params): """It sets up the input parameters. Arguments: model {A keras model object} -- The keras model object to use for making predictions. input_params {A InputParameters object} -- The input parameters. image_generation_params {A Path object} -- The parameters required for the image data generation. """ #Input parameters self._model = model self._input_params = input_params self._image_generation_params = image_generation_params #Validation if self._model is None: raise ValueError('The model object must be valid') #Logging self._logger = logging.get_logger(__file__)
def __init__(self, batch_input_files=[], checkpoint_batch_interval=1, epoch_begin_input_files=[], epoch_end_input_files=[], dropbox=None): """It initializes the parameters. Keyword Arguments: batch_input_files [iofiles.input_files.object] -- The list of input file objects to checkpoint on batch end. checkpoint_batch_interval {int} -- The number of batches after which to upload checkpoint the files. epoch_begin_input_files [iofiles.input_files.object] -- The list of input file objects to checkpoint on epoch begin. epoch_end_input_files [iofiles.input_files.object] -- The list of input file objects to checkpoint on epoch end. dropbox {client.dropbox.DropboxConnection} -- The dropbox client (default: {None}) """ super(BatchTrainStateCheckpoint, self).__init__() #Required parameters self._batch_input_files = batch_input_files self._checkpoint_batch_interval = checkpoint_batch_interval self._epoch_begin_input_files = epoch_begin_input_files self._epoch_end_input_files = epoch_end_input_files #Additional parameters self._dropbox = dropbox #Other parameters self._model = None self._input_data = None self._batch_id = 0 self._epoch_id = 0 self._result = None self._epoch_response = None #Logging self._logger = logging.get_logger(__name__) #Save TF session self._tf_session = K.get_session()
import socketserver import optparse import json from conf import settings from common.logging import get_logger from conf.settings import status_code log = get_logger("server") class Response(): def __init__(self, code, data=None): self.resp = {"code": code, "msg": status_code[code], "data": data} def json_str(self): return json.dumps(self.resp).encode("utf-8") def send(self, request): request.sendall(self.json_str()) class FTPHandler(socketserver.BaseRequestHandler): def handle(self): while True: data = self.request.recv(1024).strip() content = json.loads(data.decode("utf-8")) ''' { "action":"auth",
""" Module for /status API endpoint """ import connexion from peewee import IntegrityError from psycopg2 import IntegrityError as psycopg2IntegrityError from common.logging import get_logger from common.peewee_model import CveMetadata, DB, SystemPlatform, SystemVulnerabilities, \ Status, RHAccount, CveAccountData from .base import GetRequest, PatchRequest LOGGER = get_logger(__name__) class GetStatus(GetRequest): """GET to /v1/status""" _endpoint_name = r'/v1/status' @classmethod def handle_get(cls, **kwargs): # pylint: disable=unused-argument """Return the data from the Status table as JSON""" query = (Status.select().order_by(Status.id.asc()).dicts()) status_list = [] for status in query: status_list.append(status) LOGGER.debug(status_list) return {'data': status_list, 'meta': {'total_items': len(status_list)}}, 200
def __init__(self, methodName = 'runTest'): super(TestImageDataTransformation, self).__init__(methodName) #Logging logging.initialize(ut_constants.LOGGING_CLASS) self._logger = logging.get_logger(__name__)
import json import re import time from common.logging import get_logger from common.utils import retry from exceptions import ScrappingError from scrapper.scripts import xhr_intercept_response from scrapper.driver import forced_click from selenium.common.exceptions import ( TimeoutException, WebDriverException ) logger = get_logger(name='scrapper') def log(text): logger.debug(text) def encode_date(dt): return dt.strftime('%d/%m/%Y') @retry(exceptions=(TimeoutException, WebDriverException), logger=logger) def login(browser, username, password): log('Loading BANKIA main page') browser.get('https://www.bankia.es') try:
import re import schedule import _thread import time from functools import partial from common.logging import get_logger import bank logger = get_logger(name='scheduler') def schedule_loop(): logger.info('Starting scheduler') for count, job in enumerate(schedule.jobs, start=1): logger.info('Job #{} of {}: {}'.format( count, len(schedule.jobs), re.sub(r'(.*?)do.*', r'\1execute "{}"'.format(list(job.tags)[0]), job.__repr__()))) while True: try: schedule.run_pending() except Exception as exc: print(exc) time.sleep(2) def run_once(task, task_name): def wrapper(): time.sleep(2)
def __init__(self): self.logger = get_logger(__name__) self.repo = [] self.conn = DatabaseHandler.get_connection() self.cve_store = CveStore()
if __name__ == "__main__": #Parse commandline arguments args = parse_args() #Required params input_params = InputParameters(args) image_generation_params = ImageGenerationParameters(args) num_prediction_steps = args.num_prediction_steps dropbox_parameters = args.dropbox_parameters log_to_console = args.log_to_console #Initialize logging logging.initialize(__file__, log_to_console = log_to_console) logger = logging.get_logger(__name__) #Log input parameters logger.info('Running with parameters input_params: %s', input_params) logger.info('Additional parameters: image_generation_params: %s log_to_console: %s', image_generation_params, log_to_console) #Predictable randomness seed = 3 np_seed(seed) tf_seed(seed) #Dropbox dropbox = None if dropbox_parameters: dropbox_params = DropboxConnection.Parameters(dropbox_parameters[0], dropbox_parameters[1])
import csv from itertools import chain import output.base from common.drive_utils import permissions_to_string from common.logging import get_logger logger = get_logger(__name__) class CsvOutput(csv.DictWriter, output.base.AbstractOutput): """Generic CSV Output Class, mainly a wrapper around the standar csv.DictWritert""" def __init__(self, f, fieldnames, log_level, *args, **kwds): """ CsvOutput initializer :param f: file pointer used to write the CSV file :param fieldnames: the names of the columns in the CSV :param args: positional args for the wrapped csv.DictWriter :param kwds: named args for the wrapped csv.DictWriter """ self._f = f self._log_level = log_level sorted_fieldnames = ['id', 'name'] self._ignore_fields = {'permissions', 'internal_folder'} self._file_cache = set() sorted_fieldnames.extend( key for key in sorted( chain(fieldnames, ('owners', 'can_edit', 'can_comment',
def __init__(self): self.logger = get_logger(__name__) self.conn = DatabaseHandler.get_connection()
def __init__(self): self.queue = Queue() self.logger = get_logger(__name__) self.num_threads = int(os.getenv('THREADS', DEFAULT_THREADS))
from datetime import datetime from functools import partial from dateutil.relativedelta import relativedelta from .io import decode_bank, decode_card, decode_account, decode_local_account, decode_notifications, decode_scheduler_configuration from datatypes import Configuration, Category from common.logging import get_logger from common.notifications import get_notifier from common.utils import parse_bool, traceback_summary, get_nested_item import scrapper import database import rules import exceptions logger = get_logger(name='bank') def env(): return { 'database_folder': os.getenv('BANKING_DATABASE_FOLDER', './database'), 'main_config_file': os.getenv('BANKING_CONFIG_FILE', './banking.yaml'), 'metadata_file': os.getenv('BANKING_METADATA_FILE', './metadata.yaml'), 'categories_file': os.getenv('BANKING_CATEGORIES_FILE', './categories.yaml'), 'headless_browser': parse_bool(os.getenv('BANKING_HEADLESS_BROWSER', True)), 'close_browser':