def log_out():
    config(logfile="foo.log")

    logger = logging.getLogger("dials")
    logger.info("Hello")
    logger.warning("Watch out!")  # Colour
    logger.info("Å σ")
Exemple #2
0
def init_logging(level=None):
    """Initialize loggers, handlers and formatters."""
    logging.basicConfig()
    logging.config.dictConfig({
        'version': 1,
        'disable_existing_loggers': False,
        'formatters': {
            'simple': {
                'datefmt': '%Y-%m-%d %H:%M:%S %Z',
                'format': config().get('logger', 'format',
                                       logging.BASIC_FORMAT)
            }
        },
        'root': {
            'handlers': ['console'],
            'propagate': 1,
            'level': 'DEBUG'  # lowest level overridden by handlers
        },
        'handlers': {
            'console': {
                'class': 'logging.StreamHandler',
                'formatter': 'simple',
                'level': level
                or '%s' % config().get('logger', 'level', 'INFO')
            }
        }
    })
Exemple #3
0
def configure_logging(logging_level=None):
    if logging_level is None:
        logging_level = config("logging").get("LOGGING_LEVEL").upper()

    base_log_dir = os.path.expanduser(config("logging").get("BASE_LOG_DIR"))
    Path(base_log_dir).mkdir(parents=True, exist_ok=True)
    logging_config = get_logging_config_dict(logging_level, base_log_dir)
    logging.config.dictConfig(logging_config)
Exemple #4
0
def init_config(path):
    """Optionally override global configuration with local *.ini files."""

    local_file = os.path.join(get_working_dir(), ogre.config.FILENAME)

    if os.path.exists(local_file):
        config().override(local_file)

    if path is not None:
        config().override(path)
Exemple #5
0
def configure_logging(logging_level=None):
    if logging_level is None:
        logging_level = config("logging").get("LEVEL").upper()
        if "LOGGING_LEVEL" in config("logging"):
            # Support legacy config name e.g. BENTOML__LOGGING__LOGGING_LEVEL=debug
            logging_level = config("logging").get("LOGGING_LEVEL").upper()

    if get_debug_mode():
        logging_level = logging.getLevelName(logging.DEBUG)

    base_log_dir = os.path.expanduser(config("logging").get("BASE_LOG_DIR"))
    Path(base_log_dir).mkdir(parents=True, exist_ok=True)
    logging_config = get_logging_config_dict(logging_level, base_log_dir)
    logging.config.dictConfig(logging_config)
Exemple #6
0
def processAccountList(scope):
    accountInfoCSV, URL = config()
    username = os.environ["QUALYS_API_USERNAME"]
    #password = base64.b64decode(os.environ["QUALYS_API_PASSWORD"])
    password = os.environ["QUALYS_API_PASSWORD"]
    usrPass = str(username) + ':' + str(password)
    b64Val = base64.b64encode(usrPass.encode("utf-8"))
    b64Val = b64Val.decode('ASCII')
    headers = {
        'Accept': 'application/json',
        'content-type': 'application/json',
        'X-Requested-With': 'python requests',
        'Authorization': "Basic %s" % b64Val
    }

    with open(accountInfoCSV, mode='r') as csv_file:
        accountInfo = csv.DictReader(csv_file)
        #print("{0}\n".format(json.dumps(row)))
        if scope == "allAccounts":
            for row in accountInfo:
                controlFailures = cloudviewReport(row['cloud'],
                                                  row['accountId'], URL,
                                                  headers)
        else:
            for row in accountInfo:
                if row['accountId'] == scope:
                    controlFailures = cloudviewReport(row['cloud'],
                                                      row['accountId'], URL,
                                                      headers)
                    break
                elif row['BU'] == scope:
                    controlFailures = cloudviewReport(row['cloud'],
                                                      row['accountId'], URL,
                                                      headers)
Exemple #7
0
def adjust_optimizer(optimizer, epoch, config):
    """Reconfigures the optimizer according to epoch and config dict"""
    def modify_optimizer(optimizer, setting):
        if 'optimizer' in setting:
            optimizer = __optimizers[setting['optimizer']](
                optimizer.param_groups)
            logging.debug('OPTIMIZER - setting method = %s' %
                          setting['optimizer'])
        for param_group in optimizer.param_groups:
            for key in param_group.keys():
                if key in setting:
                    new_val = setting[key]
                    logging.debug('OPTIMIZER - setting %s = %s' %
                                  (key, new_val))
                    param_group[key] = new_val
        return optimizer

    if callable(config):
        optimizer = modify_optimizer(optimizer, config(epoch))
    else:
        for e in range(epoch + 1):  # run over all epochs - sticky setting
            if e in config:
                optimizer = modify_optimizer(optimizer, config[e])

    return optimizer
Exemple #8
0
def path_in_cache(file_path):
    textattack_cache_dir = config("CACHE_DIR")
    try:
        os.makedirs(textattack_cache_dir)
    except FileExistsError:  # cache path exists
        pass
    return os.path.join(textattack_cache_dir, file_path)
def post_to_slack(scope):
    accountInfoCSV, URL = config()
    username = os.environ["QUALYS_API_USERNAME"]
    password = base64.b64decode(os.environ["QUALYS_API_PASSWORD"])
    usrPass = str(username) + ':' + str(password)
    b64Val = base64.b64encode(usrPass)
    headers = {
        'Accept': 'application/json',
        'content-type': 'application/json',
        'Authorization': "Basic %s" % b64Val
    }

    with open(accountInfoCSV, mode='r') as csv_file:
        accountInfo = csv.DictReader(csv_file)
        #print "{0}\n".format(json.dumps(row))
        if scope == "AllAccounts":
            for row in accountInfo:
                cloudviewReport(row['cloud'], row['accountId'], row['webHook'],
                                URL, headers)
        else:
            for row in accountInfo:
                if row['accountId'] == scope:
                    cloudviewReport(row['cloud'], row['accountId'],
                                    row['webHook'], URL, headers)
                    break
                elif row['BU'] == scope:
                    cloudviewReport(row['cloud'], row['accountId'],
                                    row['webHook'], URL, headers)
Exemple #10
0
def init():
    def config():
        userDir = getPath('')
        defaults = dict(mhUserDir=userDir.replace('\\', '/'))

        try:
            filename = os.path.join(userDir, "logging.ini")
            if os.path.isfile(filename):
                logging.config.fileConfig(filename, defaults)
                return
        except Exception:
            pass

        try:
            logging.config.fileConfig(os.path.join('data', 'logging.ini'),
                                      defaults)
            return
        except Exception:
            pass

        try:
            logging.basicConfig(level=logging.DEBUG)
            return
        except Exception:
            pass

    logging.setLoggerClass(Logger)

    logging.getLogger('mh.callAsync').setLevel(logging.WARNING)
    logging.getLogger('mh.callEvent').setLevel(logging.WARNING)

    config()

    # Compatibility test for Python 2.6 logging module
    if hasattr(logging, "captureWarnings") and callable(
            logging.captureWarnings):
        logging.captureWarnings(True)

    try:
        logging.getLogger('OpenGL.formathandler').addFilter(NoiseFilter())
        logging.getLogger('OpenGL.extensions').addFilter(
            DowngradeFilter(logging.DEBUG))
    except Exception:
        import traceback
        traceback.print_exc()
Exemple #11
0
def _initLogging():
    """
    初始化日志组件
    """
    loggerLevel = _getloggerConfig('level', 'DEBUG')
    loggingDict = {
        'version': 1,
        'disable_existing_loggers': True,
        'formatters': {
            'simpleFormatter': {
                'format': _getloggerConfig('format', _DEFAULT_LOGGING_FORMAT),
                'datefmt': _getloggerConfig('datefmt',
                                            _DEFAULT_LOGGING_DATEFMT)
            },
        },
        'filters': {
            'simpleFilter': {
                '()': _SimpleLoggerFilter,
                'foo': 'bar',
            }
        },
        'handlers': {
            'null': {
                'level': 'DEBUG',
                'class': 'logging.NullHandler'
            },
            'console': {
                'level': 'DEBUG',
                'class': 'logging.StreamHandler',
                'stream': sys.stderr,
                'formatter': 'simpleFormatter'
            },
            'filehandler': {
                'level': _getloggerConfig('level', _DEFAULT_LOGGING_LEVEL),
                'class': _DEFAULT_LOGGING_FILEHANDLER,
                'filename': _getloggerConfig('filename'),
                'mode': 'a',
                'encoding': 'utf-8',
                'formatter': 'simpleFormatter'
            }
        },
        'loggers': {
            '': {
                'handlers': ['null'],
                'level': loggerLevel,
            },
            config().getAppName(): {
                'handlers': ['filehandler'],
                'level': loggerLevel,
                'propagate': False,
                'filters': ['simpleFilter']
            }
        }
    }

    logging.config.dictConfig(loggingDict)
Exemple #12
0
async def _update_date(trigger_id) -> None:
    """
    update the database table  with the execution date
    :param trigger_id: id to update
    :return: nothing
    """
    now = arrow.utcnow().to(
        config('TIME_ZONE')).format('YYYY-MM-DD HH:mm:ssZZ')
    trigger = await Trigger.objects.get(id=trigger_id)
    await trigger.update(date_triggered=now)
Exemple #13
0
def _post_install_if_needed():
    """ Runs _post_install if hasn't been run since install. """
    # Check for post-install file.
    post_install_file_path = os.path.join(config("CACHE_DIR"), "post_install_check")
    if os.path.exists(post_install_file_path):
        return
    # Run post-install.
    _post_install()
    # Create file that indicates post-install completed.
    open(post_install_file_path, "w").close()
def decrpt_has(key, file_hash):
    server_config = config(section='ipfs')
    api = ipfsApi.Client(server_config['endpoint'], server_config['port'])
    api.get(file_hash)
    outfile_name = 'plaintext.gz'
    decrypt_file(key.encode('utf-8'), file_hash, outfile_name)
    with gzip.open(outfile_name, "r") as file:
        data = file.read()

    return Response(data, mimetype='application/json')
Exemple #15
0
def init():
    def config():
        userDir = getPath("")
        defaults = dict(mhUserDir=userDir.replace("\\", "/"))

        try:
            filename = os.path.join(userDir, "logging.ini")
            if os.path.isfile(filename):
                logging.config.fileConfig(filename, defaults)
                return
        except Exception:
            pass

        try:
            logging.config.fileConfig(os.path.join("data", "logging.ini"), defaults)
            return
        except Exception:
            pass

        try:
            logging.basicConfig(level=logging.DEBUG)
            return
        except Exception:
            pass

    logging.setLoggerClass(Logger)

    logging.getLogger("mh.callAsync").setLevel(logging.WARNING)
    logging.getLogger("mh.callEvent").setLevel(logging.WARNING)

    config()

    # Compatibility test for Python 2.6 logging module
    if hasattr(logging, "captureWarnings") and callable(logging.captureWarnings):
        logging.captureWarnings(True)

    try:
        logging.getLogger("OpenGL.formathandler").addFilter(NoiseFilter())
        logging.getLogger("OpenGL.extensions").addFilter(DowngradeFilter(logging.DEBUG))
    except Exception:
        import traceback

        traceback.print_exc()
Exemple #16
0
def listener():
    '''
    Process handler that starts the listener
    '''
    global loggerQueue

    loggerConfig = {
              'version': 1,
              'disable_existing_loggers': True,
              'formatters': {
                             'detailed': {
                                          'class': 'logging.Formatter',
                                          'format': '%(asctime)s | %(levelname)s | %(processName)s | %(threadName)s | %(name)s: %(message)s'
                                          },
                             'simple': {
                                        'class': 'logging.Formatter',
                                        'format': '%(asctime)s | %(levelname)s | %(name)s: %(message)s'
                                        }
                             },
              'handlers': {
                           'console': {
                                       'class': 'logging.StreamHandler',
                                       'level': 'INFO',
                                       'formatter': 'simple'},
                           'file': {
                                    'class': 'logging.FileHandler',
                                    'level': config().loglevel,
                                    'filename': config().logfile,
                                    'mode': 'w',
                                    'formatter': 'detailed'}
                           },
              'root': {
                       'level': 'DEBUG',
                       'handlers': ['console', 'file']}
              }
    logging.config.dictConfig(loggerConfig)
    listener = logging.handlers.QueueListener(loggerQueue, Handler())
    listener.start()
    config().stopEvent.wait()
    logger.info('Closing Logger')
    listener.stop()
Exemple #17
0
def configure_logging(logging_level=None):
    base_log_dir = os.path.expanduser(config("logging").get("BASE_LOG_DIR"))
    Path(base_log_dir).mkdir(parents=True, exist_ok=True)
    if os.path.exists(config("logging").get("logging_config")):
        logging_config_path = config("logging").get("logging_config")
        with open(logging_config_path, "rb") as f:
            logging_config = YAML().load(f.read())
        logging.config.dictConfig(logging_config)
        logging.getLogger(__name__).debug(
            "Loaded logging configuration from %s." % logging_config_path)
    else:
        if logging_level is None:
            logging_level = config("logging").get("LEVEL").upper()
            if "LOGGING_LEVEL" in config("logging"):
                # Support legacy config name e.g. BENTOML__LOGGING__LOGGING_LEVEL=debug
                logging_level = config("logging").get("LOGGING_LEVEL").upper()

        if get_debug_mode():
            logging_level = logging.getLevelName(logging.DEBUG)

        logging_config = get_logging_config_dict(logging_level, base_log_dir)
        logging.config.dictConfig(logging_config)
        logging.getLogger(__name__).debug(
            "Loaded logging configuration from default configuration " +
            "and environment variables.")
def init():
    def config():
        userDir = getPath('')
        defaults = dict(mhUserDir = userDir.replace('\\','/'))

        try:
            filename = os.path.join(userDir, "logging.ini")
            if os.path.isfile(filename):
                logging.config.fileConfig(filename, defaults)
                return
        except Exception:
            pass

        try:
            logging.config.fileConfig(getSysDataPath('logging.ini'), defaults)
            return
        except Exception:
            pass

        try:
            logging.basicConfig(level = logging.DEBUG)
            return
        except Exception:
            pass

    logging.setLoggerClass(Logger)

    config()

    # Compatibility test for Python 2.6 logging module
    if hasattr(logging, "captureWarnings") and callable(logging.captureWarnings):
        logging.captureWarnings(True)

    try:
        logging.getLogger('OpenGL.formathandler').addFilter(NoiseFilter())
        logging.getLogger('OpenGL.extensions').addFilter(DowngradeFilter(logging.DEBUG))
    except Exception:
        import traceback
        traceback.print_exc()
Exemple #19
0
def init():
    def config():
        userDir = getPath('')
        defaults = dict(mhUserDir=userDir.replace('\\', '/'))

        try:
            filename = os.path.join(userDir, "logging.ini")
            if os.path.isfile(filename):
                logging.config.fileConfig(filename, defaults)
                return
        except Exception:
            pass

        try:
            logging.config.fileConfig(getSysDataPath('logging.ini'), defaults)
            return
        except Exception:
            pass

        try:
            logging.basicConfig(level=logging.DEBUG)
            return
        except Exception:
            pass

    logging.setLoggerClass(Logger)

    config()

    logging.captureWarnings(True)

    try:
        logging.getLogger('OpenGL.formathandler').addFilter(NoiseFilter())
        logging.getLogger('OpenGL.extensions').addFilter(
            DowngradeFilter(logging.DEBUG))
    except Exception:
        import traceback
        traceback.print_exc()
def get_conn(config):
    logger.info('S3_ACCESS_KEY: %s', config('s3_access_key'))
    logger.info('S3_SECRET_ACCESS_KEY: %s', '*****' if config('s3_secret_access_key') else '')
    logger.info('S3_REGION: %s', config('s3_region'))
    logger.info('S3_BUCKET: %s', config('s3_bucket'))

    session_kwargs = {}
    if config('s3_access_key') and config('s3_secret_access_key'):
        logger.info('S3_ACCESS_KEY and S3_SECRET_ACCESS_KEY set--using those.')
        session_kwargs['aws_access_key_id'] = config('s3_access_key')
        session_kwargs['aws_secret_access_key'] = config('s3_secret_access_key')

    session = boto3.session.Session(**session_kwargs)

    return session.client(
        service_name='s3',
        region_name=config('s3_region'),
        config=Config(s3={'addressing_style': 'path'})
    )
Exemple #21
0
def _setup_logging():
    # First do some basic log config...

    # Primeiro algumas configurações de log básico.

    # This is kind of a hack, but we need to keep track of the handler we
    # install so that we can, for example, uninstall it later.  This code
    # originally lived in pox.core, so we explicitly reference it here.
    """
  Este é tipo de um hack, mas precisamos acompanhar o manipulador que
  vamos instalar para que nós possamos, por exemplo, desinstalá-lo mais tarde.
  Este código, originalmente, viviam em pox.core, então nós vamos explicitamente
  referenciá-lo aqui.
  """

    pox.core._default_log_handler = logging.StreamHandler()
    formatter = logging.Formatter(logging.BASIC_FORMAT)
    pox.core._default_log_handler.setFormatter(formatter)
    logging.getLogger().addHandler(pox.core._default_log_handler)
    logging.getLogger().setLevel(logging.INFO)

    # Now set up from config file if specified...

    # Agora configure do arquivo de configuração se especificada...

    # TODO:
    #  I think we could move most of the special log stuff into
    #  the log module.  You'd just have to make a point to put the log
    #  module first on the commandline if you wanted later component
    #  initializations to honor it.  Or it could be special-cased?

    if _options.log_config is not None:
        if not os.path.exists(_options.log_config):
            print("Could not find logging config file:", _options.log_config)
            sys.exit(2)
        logging.config(_options.log_config, disable_existing_loggers=True)
Exemple #22
0
def local_datetime(utcdatetime, format=None, timezone=None):
    """
    Return local datetime based on the timezone
    It will automatically format the date. 
    To not format the date, set format=False
    
    :param utcdatetime: Arrow or string
    :param format: string of format or False
    :param timezone: string, ie: US/Eastern
    :return:
    """
    if utcdatetime is None:
        return None

    timezone = timezone or config("DATETIME_TIMEZONE", "US/Eastern")
    dt = utcdatetime.to(timezone) \
        if isinstance(utcdatetime, arrow.Arrow) \
        else arrow.get(utcdatetime, timezone)
    if format is False:
        return dt

    _ = config("DATETIME_FORMAT")
    format = _.get("default") or "MM/DD/YYYY" if not format else _.get(format)
    return dt.format(format)
Exemple #23
0
def create_app():
    environment = config('APPLICATION_ENV', default='Development')
    app = Flask(__name__)
    config_name = f'training.config.{environment}'
    app.config.from_object(config_name)

    db.init_app(app)
    migrate.init_app(app, db, directory=os.path.join(BASE_DIR, 'migrations'))

    logging.config.dictConfig(app.config['LOGGING'])
    logger.info(
        'loading application with configuration {}'.format(config_name))

    register_blueprints(app)

    return app
Exemple #24
0
def getTemplate(template, name):
    """
    Return a template from a file.

    @param template: Name of the template file in $GIP_LOCATION/templates.
    @param name: Entry in the template file; for now, this is the first
        entry of the DN.
    @return: Template string
    @raise e: ValueError if it is unable to find the template in the file.
    """

    cp = config()
    template_dirs = cp_getList(cp, 'gip', 'local_template_dirs', [])
    template_dirs.append(
        gipDir(
            os.path.expandvars('$GIP_LOCATION/templates'),
            '/usr/share/gip/templates'))
    tried = []
    fp = ''

    for template_dir in template_dirs:
        try:
            template_dir = os.path.expandvars(template_dir)
            fp = open("%s/%s" % (template_dir, template))
            break
        except IOError:
            tried.append("%s/%s" % (template_dir, template))

    if not fp:
        raise ValueError("Couldn't find template.  Searched %s" % tried)

    start_str = "dn: %s" % name
    mybuffer = ''
    recording = False
    for line in fp:
        if line.startswith(start_str):
            recording = True
        if recording:
            mybuffer += line
            if line == '\n':
                break

    fp.close()
    if not recording:
        raise ValueError("Unable to find %s in template %s" % (name, template))
    return mybuffer[:-1]
def post_to_slack(scope):
    accountInfoCSV, URL = config()
    username = os.environ["QUALYS_API_USERNAME"]
    password = Password()
    #passBytes=bytes(password, "utf-8")
    #passBytes=base64.b64decode(passBytes)
    usrPass = str(username) + ':' + str(password)
    usrPassBytes = bytes(usrPass, "utf-8")
    b64Val = base64.b64encode(usrPassBytes).decode("utf-8")
    headers = {
        'Accept': 'application/json',
        'content-type': 'application/json',
        'X-Requested-With': 'python requests',
        'Authorization': "Basic %s" % b64Val
    }

    with open(accountInfoCSV, mode='r') as csv_file:
        accountInfo = csv.DictReader(csv_file)
        #print ("{0}\n".format(json.dumps(row)))
        if scope == "allAccounts":
            for row in accountInfo:
                controlFailures = cloudviewReport(row['cloud'],
                                                  row['accountId'],
                                                  row['webHook'], URL, headers)
                if args.slack:
                    postSlackReport(controlFailures, row['accountId'],
                                    row['webHook'])
        else:
            for row in accountInfo:
                if row['accountId'] == scope:
                    controlFailures = cloudviewReport(row['cloud'],
                                                      row['accountId'],
                                                      row['webHook'], URL,
                                                      headers)
                    if args.slack:
                        postSlackReport(controlFailures, row['accountId'],
                                        row['webHook'])
                    break
                elif row['BU'] == scope:
                    controlFailures = cloudviewReport(row['cloud'],
                                                      row['accountId'],
                                                      row['webHook'], URL,
                                                      headers)
                    if args.slack:
                        postSlackReport(controlFailures, row['accountId'],
                                        row['webHook'])
Exemple #26
0
def getTemplate(template, name):
    """
    Return a template from a file.

    @param template: Name of the template file in $GIP_LOCATION/templates.
    @param name: Entry in the template file; for now, this is the first
        entry of the DN.
    @return: Template string
    @raise e: ValueError if it is unable to find the template in the file.
    """

    cp = config()
    template_dirs = cp_getList(cp, 'gip', 'local_template_dirs', [])
    template_dirs.append(
        gipDir(os.path.expandvars('$GIP_LOCATION/templates'),
               '/usr/share/gip/templates'))
    tried = []
    fp = ''

    for template_dir in template_dirs:
        try:
            template_dir = os.path.expandvars(template_dir)
            fp = open("%s/%s" % (template_dir, template))
            break
        except IOError:
            tried.append("%s/%s" % (template_dir, template))

    if not fp:
        raise ValueError("Couldn't find template.  Searched %s" % tried)

    start_str = "dn: %s" % name
    mybuffer = ''
    recording = False
    for line in fp:
        if line.startswith(start_str):
            recording = True
        if recording:
            mybuffer += line
            if line == '\n':
                break

    fp.close()
    if not recording:
        raise ValueError("Unable to find %s in template %s" % (name, template))
    return mybuffer[:-1]
Exemple #27
0
def main(args):
    """Application entry point."""
    try:
        init_config(args.config)
        init_logging(args.debug)

        logger.debug(str(config()))

        if os.path.exists(args.output) and not args.force_overwrite:
            logger.error(
                'File already exists. Use the -f flag to force overwrite.')
        else:
            model = Model(get_file_paths())
            report = Report(model)

            if report.save(args.output):
                webbrowser.open(args.output)

    except KeyboardInterrupt:
        logger.info('Aborted with ^C')
Exemple #28
0
def create_app():
    global app

    # app config
    app.config.from_object(config())

    # before/after request
    app.before_request(_before_request)
    app.teardown_request(_teardown_request)

    # logging config
    logging_config = os.environ.get("LOGGING_CONFIG",
                                    app.config.get("LOGGING_CONFIG"))
    if logging_config:
        logging.config.fileConfig(logging_config)

    # blueprints
    import api.test
    app.register_blueprint(api.test.bp_test)

    return app
def create_app():
    logging.config.fileConfig(config("LOG_CONFIG",
                                     default="{}/user_api/conf/logging.default.conf".format(getcwd())))
    app = Flask(__name__)
    app.config["DEBUG"] = config("DEBUG", cast=bool)
    app.config["PORT"] = config("PORT", cast=int)
    app.config["SQLALCHEMY_DATABASE_URI"] = config("SQLALCHEMY_DATABASE_URI")
    app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = config("SQLALCHEMY_TRACK_MODIFICATIONS",
                                                          cast=bool,
                                                          default=False)
    app.config["SECRET_KEY"] = config("SECRET_KEY")
    db.init_app(app)

    register_routes(app)
    register_handlers(app)

    return app
Exemple #30
0
def download_if_needed(folder_name):
    """ Folder name will be saved as `.cache/textattack/[folder name]`. If it
        doesn't exist on disk, the zip file will be downloaded and extracted. 
    
    Args:
        folder_name (str): path to folder or file in cache
    
    Returns:
        str: path to the downloaded folder or file on disk
    """
    cache_dest_path = path_in_cache(folder_name)
    os.makedirs(os.path.dirname(cache_dest_path), exist_ok=True)
    # Use a lock to prevent concurrent downloads.
    cache_dest_lock_path = cache_dest_path + '.lock'
    cache_file_lock = filelock.FileLock(cache_dest_lock_path)
    cache_file_lock.acquire()
    # Check if already downloaded.
    if os.path.exists(cache_dest_path):
        cache_file_lock.release()
        return cache_dest_path
    # If the file isn't found yet, download the zip file to the cache.
    downloaded_file = tempfile.NamedTemporaryFile(dir=config('CACHE_DIR'),
                                                  suffix='.zip',
                                                  delete=False)
    http_get(folder_name, downloaded_file)
    # Move or unzip the file.
    downloaded_file.close()
    if zipfile.is_zipfile(downloaded_file.name):
        unzip_file(downloaded_file.name, cache_dest_path)
    else:
        get_logger().info(
            f'Copying {downloaded_file.name} to {cache_dest_path}.')
        shutil.copyfile(downloaded_file.name, cache_dest_path)
    cache_file_lock.release()
    # Remove the temporary file.
    os.remove(downloaded_file.name)
    get_logger().info(f'Successfully saved {folder_name} to cache.')
    return cache_dest_path
    def with_connection_(*args, **kwargs):
        #1
        params = config()
        #2
        con_string = 'mysql+pymysql://' + params["user"] + ':' + params[
            "password"] + '@' + params["host"] + ':' + params[
                "port"] + '/' + params["database"]
        engine = create_engine(con_string)
        conn = engine.connect()

        logger.info('Connecting to the mysql database...')
        #3
        try:
            rv = func(engine, *args, **kwargs)
        except Exception:
            logger.error("Database connection error")
            raise
        else:
            conn.execute("commit;")
            logger.info("Commit done!")
        finally:
            conn.close()
            logger.info("DB connection closed.")
        return rv
Exemple #32
0
	
def create_port(type, cfg):
	return apply(getattr(sys.modules['gsm.port'], type), cfg)

def start_daemon(dev):
	port = apply(create_port, dev)
	gsm = GSM(port)
	sms = GSM0705(gsm)
	daemon = DAEMON(gsm, [])
	sms_handle = SmsHandle(daemon, sms, CmdProc(CMD))
	daemon.add_event_handle(sms.GSM0705_CMTI_HANDLE(sms_handle.execute))
	daemon.add_command(sms.delete_all, PRIV_M)
	daemon.run()

def config():
	reload(sys)
	sys.setdefaultencoding('utf-8')

	logging.config.fileConfig('log.cfg')
	
if __name__ == '__main__':
	config()
	
	dev_cfg = sys.argv[1:]
	if len(dev_cfg) > 0:
		logging.info('Starting...')
		start_daemon((dev_cfg[0], dev_cfg[1:]))
	else:
		sys.stderr.write('The device type is not specified.')
		sys.exit(-1)
import schedule
import time
import tweepy
import datetime
import logging

#getcontext().prec = 2

#TWITTER DEV SETTINGS
CONSUMER_KEY = ""
CONSUMER_SECRET = ""
ACCESS_KEY = ""
ACCESS_SECRET = ""

#Setup logging.
logging.config()


def get_data():
    for i in range(0, 50):
        url = "192.168.1.1{num:02d}".format(num=i)
        client = ModbusTcpClient(url)
        if client.connect() == True:
            logging.info("Controller found in {}".format(url))
            #print(url)
            rr = client.read_holding_registers(0, 60, unit=1)
            client.close()
            return rr.registers

    return
Exemple #34
0
import logging.config
from os import environ
from pathlib import Path

import chartkick
from decouple import Csv, config
from django.utils.log import DEFAULT_LOGGING

# Disable Django's logging setup
LOGGING_CONFIG = None

LOGLEVEL = environ.get("LOGLEVEL", "info").upper()

# Pushover Config
PUSHOVER_TOKEN = config("PUSHOVER_TOKEN")
PUSHOVER_USER_KEY = config("PUSHOVER_USER_KEY")

# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve(strict=True).parents[1]

DEBUG = config("DEBUG", default=False, cast=bool)

# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/

# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config("SECRET_KEY")

# Application definition
Exemple #35
0
 def start():
     print(colors.green("Starting frigg worker"))
     local_run("mkdir -p %s" % config('TMP_DIR'))
     fetcher()
Exemple #36
0
        """
        Retrieve a deeply nested value from the configuration.
        
        Like ``accessor['database.uri']`` to access
        ``accessor._data['database']['uri']``.
        """
        try:
            return reduce(lambda d, k: d[k], key.split('.'), self._data)
        except:
            return None

    @classmethod
    def instance(cls):
        if not cls._instance:
            cls._instance = ConfigAccessor()
            cls._instance.load('main.yml')
            cls._instance.load('{}.yml'.format(environment))
        return cls._instance

def config(key, default=None):
    accessor = ConfigAccessor.instance()
    value = accessor[key]
    if value != None:
        return value
    else:
        return default

import logging.config
logging.config.dictConfig(config('logger'))

Exemple #37
0
def get_config_from_filename_and_set_up_logging(filename):
    conf = config([filename])
    set_up_logging(conf)
    return conf
Exemple #38
0
def set_logger(cliargs):
    if cliargs.loglevel is not None:
        level = {'console': int(cliargs.loglevel[0]),
                'file': int(cliargs.loglevel[1:])}
    else:
        level = {'console': -1,
                'file': -1}

    if level['console'] not in (0, 1, 2, 3):
        level['console'] = config('Log').get_int('log_level_stdout')

        if level['console'] not in (0, 1, 2, 3):
            level['console'] = 1

    if level['file'] not in (0, 1, 2, 3):
        level['file'] = config('Log').get_int('log_level_file')

        if level['file'] not in (0, 1, 2, 3):
            level['file'] = 0

    if cliargs.logfile is not None:
        logfile = os.path.expanduser(cliargs.logfile)
    else:
        logfile = os.path.expanduser(config('Log')['log_file'])

    if level['file'] > 0:
        # Try to make the directory separately from the configuration, because
        # they could be set to different paths
        try:
            os.makedirs(os.path.dirname(logfile),
                                                mode=_USER_FOLDER_PERMISSIONS)
        except OSError as e:
            # ENOENT can happen if logfile is a string without "/", i.e. it
            # represents a file in the current folder
            if e.errno not in (errno.EEXIST, errno.ENOENT):
                raise

    console_level = ('CRITICAL', 'ERROR', 'INFO', 'DEBUG')[level['console']]
    file_level = ('CRITICAL', 'WARNING', 'INFO', 'DEBUG')[level['file']]
    console_formatter = ('simple_default', 'simple_default', 'simple_default',
                         'verbose_default')[level['console']]
    console_info_formatter = ('simple_info', 'simple_info', 'simple_info',
                              'verbose_info')[level['console']]
    file_low_formatter = ('simple', 'simple', 'simple', 'verbose')[
                                                                level['file']]
    file_formatter = ('simple', 'verbose', 'verbose', 'verbose')[level['file']]
    file_max_bytes = (1, 10000, 30000, 100000)[level['file']]
    file_delay = (True, True, False, False)[level['file']]
    handlers = [('null', 'console', 'console', 'console')[level['console']],
                ('null', 'console_info', 'console_info', 'console_info')
                 [level['console']],
                ('null', 'file_low', 'file_low', 'file_low')[level['file']],
                ('null', 'file', 'file', 'file')[level['file']]]

    logconfig = {
        'version': 1,
        'formatters': {
            'simple': {
                'format': '%(asctime)s %(relativeCreated)d | %(levelname)s: '
                                                                 '%(message)s',
                'datefmt': '%Y-%m-%d %H:%M'
            },
            'simple_info': {
                'format': ':: %(message)s'
            },
            'simple_default': {
                'format': '%(levelname)s: %(message)s'
            },
            'verbose': {
                'format': '%(asctime)s %(relativeCreated)d | %(levelname)s: '
                        '%(message)s [%(pathname)s %(lineno)d %(threadName)s]',
                'datefmt': '%Y-%m-%d %H:%M'
            },
            'verbose_info': {
                'format': '%(relativeCreated)d | :: %(message)s [%(module)s '
                                                '%(lineno)d %(threadName)s]'
            },
            'verbose_default': {
                'format': '%(relativeCreated)d | %(levelname)s: %(message)s '
                                    '[%(module)s %(lineno)d %(threadName)s]'
            }
        },
        'filters': {
            'console': {
                '()': 'outspline.coreaux.logger.LevelFilter',
                'levels': ('INFO', ),
                'inverse': True,
            },
            'console_info': {
                '()': 'outspline.coreaux.logger.LevelFilter',
                'levels': ('INFO', ),
                'inverse': False,
            },
            'file_low': {
                '()': 'outspline.coreaux.logger.LevelFilter',
                'levels': ('INFO', 'DEBUG'),
                'inverse': False,
            },
            'file': {
                '()': 'outspline.coreaux.logger.LevelFilter',
                'levels': ('INFO', 'DEBUG'),
                'inverse': True,
            }
        },
        'handlers': {
            'console': {
                'class': 'logging.StreamHandler',
                'level': console_level,
                'formatter': console_formatter,
                'filters': ['console', ]
            },
            'console_info': {
                'class': 'logging.StreamHandler',
                'level': console_level,
                'formatter': console_info_formatter,
                'filters': ['console_info', ]
            },
            'file_low': {
                'class': 'logging.handlers.RotatingFileHandler',
                'level': file_level,
                'formatter': file_low_formatter,
                'filename': logfile,
                'maxBytes': file_max_bytes,
                'backupCount': 1,
                'delay': file_delay,
                'filters': ['file_low', ]
            },
            'file': {
                'class': 'logging.handlers.RotatingFileHandler',
                'level': file_level,
                'formatter': file_formatter,
                'filename': logfile,
                'maxBytes': file_max_bytes,
                'backupCount': 1,
                'delay': file_delay,
                'filters': ['file', ]
            },
            'null': {
                'class': 'logging.NullHandler',
                'formatter': 'simple',
            }
        },
        'loggers': {
            'outspline': {
                'level': 'DEBUG',
                'handlers': handlers,
                'propagate': False
            }
        },
        'root': {
            'level': 'DEBUG'
        }
    }

    logging.config.dictConfig(logconfig)

    global log
    log = logging.getLogger('outspline')

    log.info('Start logging (level {}{}, file {})'.format(
                        str(level['console']), str(level['file']), logfile))
    log.info('{} version {} ({})'.format('Outspline',
                                    outspline.components.main.version,
                                    outspline.components.main.release_date))
Exemple #39
0
import logging.config

from .base import *


DEBUG = config('DEBUG', cast=bool, default=False)

ALLOWED_HOSTS += [
    '.ahmia.fi',   # Allow domain and subdomains
    '.ahmia.fi.',  # Also allow FQDN and subdomains
    '.msydqstlz2kzerdg.onion',
    '.msydqstlz2kzerdg.onion.',
]

DATABASES = {
    'default': {
        'ENGINE': 'django.db.backends.postgresql',
        'NAME': config('DB_NAME', default='postgres'),     # Database name
        'USER': config('DB_USER', default='postgres'),     # User with permissions on that DB
        'PASSWORD': config('DB_PASS', default=''),         # Password for the user specified above
        'HOST': config('DB_HOST', default="localhost"),    # Set to empty string for localhost
        'PORT': config('DB_PORT', default=5432, cast=int)  # pbbouncer port
    }
}

DEPLOYMENT_DIR = config('DEPLOYMENT_DIR', default='/usr/local/lib/ahmia-site/ahmia/')

# additionally to default LOGGING settings from base.py
LOGGING.update({
    'handlers': {
        'django_file': {
Exemple #40
0
import json
import logging
import logging.config

from decouple import config


def setup_logging(path: str = "logging.json") -> None:
    with open(path, "rt") as f:
        _config = json.load(f)
    logging.config.dictConfig(_config)


setup_logging()
DB_LINK = config("DB_LINK", cast=str)
BOT_API_TOKEN = config("BOT_API_TOKEN", cast=str)
def transaction_post():
    try:
        web3_config = config(section='web3')
        vault_config = config(section='vault')
        vault_conn = hvac.Client(url=vault_config['url'],
                                 token=vault_config['token'])
        w3 = Web3(Web3.HTTPProvider(web3_config['chain_ip']))

        with open(web3_config['contract_abi'], 'rb') as json_file:
            contract_json = json.load(json_file)

        contract_abi = contract_json['abi']
        token_contract = w3.eth.contract(web3_config['contract_addr'],
                                         abi=contract_abi)
        operator_address = token_contract.functions.getOperatorAccount().call()
        logging.info('operator=' + operator_address)
        # logging.info('operator='+operator_address)
        body = request.get_json()
        logging.info('pay load= %s' + json.dumps(body))

        if body is None:
            data = dict({'status': 'nodata'})
            Response(json.dumps(data), mimetype='application/json')

        dataset_id = uuid.UUID(
            body['dataset_id']).bytes  #convert UUID to bytes
        file_hash = body['data_hash']
        compression = body['data_compression']
        ipfs_hash = body['data_loc_hash']
        size = body['num_of_records']
        # Ethereum VM does not take floating points, use cents
        price = int(body['trade'] * 100)
        pricing_unit = body['pricing_unit']
        token_uri = body['access_url']
        buyer_account = body['buyer_wallet_addr']
        seller_account = body['seller_wallet_addr']
        seller_email = body['seller_email']

        # The operator mints a ERC-721 token for the seller
        # Estimate gas need
        gas_price = w3.eth.gasPrice
        estimated_gas = token_contract.functions.mint(
            dataset_id, file_hash, compression, ipfs_hash, size, price,
            pricing_unit, token_uri, seller_account).estimateGas({
                'nonce':
                w3.eth.getTransactionCount(operator_address),
                'from':
                operator_address
            })

        # logging.info ('estimated gas for minting a token = %d' % estimated_gas)
        logging.info('estimated gas for minting a token = %d' % estimated_gas)
        txn = token_contract.functions.mint(
            dataset_id, file_hash, compression, ipfs_hash, size, price,
            pricing_unit, token_uri, seller_account).buildTransaction({
                'nonce':
                w3.eth.getTransactionCount(operator_address),
                'from':
                operator_address,
                'gas':
                estimated_gas,
                'gasPrice':
                gas_price
            })
        vault_key_query = vault_conn.secrets.kv.v1.read_secret(
            path='cryptooperator', mount_point='/secret')
        private_key = vault_key_query['data']['pk']
        signed = w3.eth.account.signTransaction(txn, private_key)
        txn_hash = w3.eth.sendRawTransaction(signed.rawTransaction)
        logging.info("token mint: %s" % str(txn_hash.hex()))
        tx_receipt = w3.eth.getTransactionReceipt(txn_hash)
        while tx_receipt is None:
            tx_receipt = w3.eth.getTransactionReceipt(txn_hash)
            logging.info('waiting for transaction to be mined')
            time.sleep(1)

        mint_event = token_contract.events.MintToken().processReceipt(
            tx_receipt)
        token_id = mint_event[0]['args']['_tokenId']
        logging.info('token id = %d' % token_id)

        #Supply transaction and gas fees to seller
        current_balance = w3.eth.getBalance(seller_account)
        logging.info('current seller accout %s balance %d' %
                     (seller_account, current_balance))

        if (current_balance) < Web3.toWei(0.001, 'ether'):
            diff = Web3.toWei(0.001, 'ether') - current_balance
            logging.info('wei to send: %d' % diff)
            gas = w3.eth.estimateGas({
                'to': seller_account,
                'from': operator_address,
                'value': diff
            })
            signed_txn = w3.eth.account.signTransaction(
                dict(nonce=w3.eth.getTransactionCount(operator_address),
                     to=seller_account,
                     gas=gas,
                     gasPrice=gas_price,
                     value=diff), private_key)

            txn_hash = w3.eth.sendRawTransaction(signed_txn.rawTransaction)
            logging.info('tranfer tx hash-> %s' % str(txn_hash.hex()))
            while tx_receipt is None:
                tx_receipt = w3.eth.getTransactionReceipt(txn_hash)
                logging.info('waiting for transaction to be mined')
                time.sleep(0.1)

        # Transfer the data token from seller to buyer
        gas = token_contract.functions.purchaseWithFiat(
            token_id, 0, buyer_account).estimateGas({
                'nonce':
                w3.eth.getTransactionCount(seller_account),
                'from':
                seller_account
            })

        logging.info('estimate gas = %d' % gas)
        txn = token_contract.functions.purchaseWithFiat(token_id,0, buyer_account)\
            .buildTransaction({'nonce': w3.eth.getTransactionCount(seller_account),
                               'from': seller_account,
                               'gas': gas,
                               'gasPrice': w3.eth.gasPrice})
        logging.info('seller account = %s' % seller_account)
        kv_path = str(seller_email.encode('utf-8').hex()) + '-1'
        vault_key_query = vault_conn.secrets.kv.v1.read_secret(
            path=kv_path, mount_point='/secret')
        private_key = vault_key_query['data']['pk']
        signed = w3.eth.account.signTransaction(txn, private_key)
        txn_hash = w3.eth.sendRawTransaction(signed.rawTransaction)
        while tx_receipt is None:
            tx_receipt = w3.eth.getTransactionReceipt(txn_hash)
            logging.info('waiting for transaction to be mined')
            time.sleep(0.1)
        tx_hash_str = str(txn_hash.hex())
        logging.info('txn hash = %s' % tx_hash_str)
        data = dict({
            'status': 'ok',
            'token_id': token_id,
            'txn_hash': tx_hash_str
        })
        return Response(json.dumps(data), mimetype='application/json')

    except Exception as e:
        logging.error("Ouch Exception occurred", exc_info=True)
        data = dict({
            'status': 'failed',
            'error': 'settlement error check /tmp/orderlog'
        })
        return Response(json.dumps(data), mimetype='application/json')