Exemplo n.º 1
0
    def create_logger(self, log_level=logging.INFO):
        daiquiri.setup(
            outputs=(
                daiquiri.output.File(
                    directory=self.settings.get('LOG_PATH'),
                    program_name=self.project_name,
                ),
                daiquiri.output.STDOUT,
            )
        )

        daiquiri.getLogger(program_name=self.project_name).logger.level = log_level
        self.logger = daiquiri.getLogger(
            program_name=self.project_name, log_level=log_level
        )
Exemplo n.º 2
0
def setup_logging(verbose=0):
    global logging
    progname = basename(sys.argv[0])
    daiquiri.setup(
        level=(logging.ERROR - 10*verbose),
        program_name=progname)
    logging = daiquiri.getLogger(progname)
Exemplo n.º 3
0
import webinterface.queue as queue
from webinterface.common import templates
from webinterface.common import get_user_information

###################################################################################
## Helper classes
###################################################################################

daiquiri.setup(
    level=logging.INFO,
    outputs=(daiquiri.output.Stream(
        formatter=daiquiri.formatter.ColorFormatter(
            fmt="%(color)s%(levelname)-8.8s "
            "%(name)s: %(message)s%(color_stop)s")), ),
)
logger = daiquiri.getLogger("webgui")


class ExtendedUser(SimpleUser):
    def __init__(self, username: str, is_admin: False) -> None:
        self.username = username
        self.admin_status = is_admin

    @property
    def is_admin(self) -> bool:
        return self.admin_status


class SessionAuthBackend(AuthenticationBackend):
    async def authenticate(self, request):
Exemplo n.º 4
0
import click

import daiquiri

import fixtures

import pbr.version

import pkg_resources

import psutil

from pifpaf import util

LOG = daiquiri.getLogger("pifpaf")


def _format_multiple_exceptions(e, debug=False):
    valid_excs = []
    # NOTE(sileht): Why do I not use this ? :
    #   excs = list(e.args)
    # Because it raises SystemExit(2) on python3 !!?!?
    excs = []
    for i in range(len(e.args)):
        excs.append(e.args[i])
    while excs:
        (etype, value, tb) = excs.pop(0)
        if (etype == fixtures.MultipleExceptions):
            excs.extend(value.args)
        elif (etype == fixtures.SetupError):
Exemplo n.º 5
0
modelID = ''
sampleZ = None

daiquiri.setup(
    level=logging.INFO,
    outputs=(
        daiquiri.output.Stream(sys.stdout),
        daiquiri.output.File(
            './logs/logs.log',
            level=logging.INFO,
            formatter=daiquiri.formatter.ColorFormatter(
                fmt=
                "\n###### %(asctime)s [%(levelname)s] %(name)s -> %(message)s ######\n"
            )),
    ))
logger = daiquiri.getLogger(__name__, propagate=False)

logger.info('Parte la pipeline su dataset {} e modello {}'.format(
    datasetID, modelID))
if sampleZ:
    logger.info('La run è una prova con un sample n={}'.format(sampleZ))


class ETL(luigi.Task):

    dataset_id = luigi.Parameter(default=datasetID)
    path_dataset = luigi.Parameter(default=r'')

    def requires(self):
        return []
Exemplo n.º 6
0
    async def _extract_pulls_from_stream(
            self, installation: context.Installation) -> PullsToConsume:
        messages: typing.List[typing.Tuple[
            T_MessageID, T_MessagePayload]] = await self.redis_stream.xrange(
                installation.stream_name, count=config.STREAM_MAX_BATCH)
        LOG.debug(
            "read stream",
            stream_name=installation.stream_name,
            messages_count=len(messages),
        )
        statsd.histogram("engine.streams.size", len(messages))
        statsd.gauge("engine.streams.max_size", config.STREAM_MAX_BATCH)

        # TODO(sileht): Put this cache in Repository context
        opened_pulls_by_repo: typing.Dict[
            github_types.GitHubRepositoryName,
            typing.List[github_types.GitHubPullRequest], ] = {}

        # Groups stream by pull request
        pulls: PullsToConsume = PullsToConsume(collections.OrderedDict())
        for message_id, message in messages:
            data = msgpack.unpackb(message[b"event"], raw=False)
            repo_name = github_types.GitHubRepositoryName(data["repo"])
            repo_id = github_types.GitHubRepositoryIdType(data["repo_id"])
            source = typing.cast(context.T_PayloadEventSource, data["source"])
            if data["pull_number"] is not None:
                key = (
                    repo_name,
                    repo_id,
                    github_types.GitHubPullRequestNumber(data["pull_number"]),
                )
                group = pulls.setdefault(key, ([], []))
                group[0].append(message_id)
                group[1].append(source)
            else:
                logger = daiquiri.getLogger(
                    __name__,
                    gh_repo=repo_name,
                    gh_owner=installation.owner_login,
                    source=source,
                )
                if repo_name not in opened_pulls_by_repo:
                    try:
                        opened_pulls_by_repo[repo_name] = [
                            p async for p in installation.client.items(
                                f"/repos/{installation.owner_login}/{repo_name}/pulls"
                            )
                        ]
                    except Exception as e:
                        if exceptions.should_be_ignored(e):
                            opened_pulls_by_repo[repo_name] = []
                        else:
                            raise

                converted_messages = await self._convert_event_to_messages(
                    installation,
                    repo_id,
                    repo_name,
                    source,
                    opened_pulls_by_repo[repo_name],
                )

                logger.debug("event unpacked into %s messages",
                             len(converted_messages))
                messages.extend(converted_messages)
                deleted = await self.redis_stream.xdel(
                    installation.stream_name, message_id)
                if deleted != 1:
                    # FIXME(sileht): During shutdown, heroku may have already started
                    # another worker that have already take the lead of this stream_name
                    # This can create duplicate events in the streams but that should not
                    # be a big deal as the engine will not been ran by the worker that's
                    # shutdowning.
                    contents = await self.redis_stream.xrange(
                        installation.stream_name,
                        start=message_id,
                        end=message_id)
                    if contents:
                        logger.error(
                            "message `%s` have not been deleted has expected, "
                            "(result: %s), content of current message id: %s",
                            message_id,
                            deleted,
                            contents,
                        )
        return pulls
Exemplo n.º 7
0
"""geo3dfeatures package
"""

import logging

import daiquiri

__version__ = "0.4.0"

daiquiri.setup(
    level=logging.INFO,
    outputs=(daiquiri.output.Stream(
        formatter=daiquiri.formatter.ColorFormatter(fmt=(
            "%(color)s[%(asctime)s] %(module)s.%(funcName)s "
            "(%(levelname)s) -%(color_stop)s %(message)s"),
                                                    datefmt="%H:%M:%S")), ),
)
logger = daiquiri.getLogger("root")
Exemplo n.º 8
0
import logging
import common.monitor as monitor
import daiquiri

logger = daiquiri.getLogger("rule_evaluation")

safe_eval_cmds = {"float": float, "int": int, "str": str}


def replace_tags(rule, tags):
    """Replaces all tags with format @tagname@ in the given rule string with
       the corresponding values from the currently processed series (stored
       in the second argument)."""
    # Run the substitue operation manually instead of using
    # the standard string function to enforce that the values
    # read from the tags are treated as strings by default
    tags_found = []
    i = 0
    while i < len(rule):
        opening = rule.find("@", i)
        if opening < 0:
            break
        closing = rule.find("@", opening + 1)
        if closing < 0:
            break
        tagstring = rule[opening + 1:closing]
        if tagstring in tags:
            tags_found.append(tagstring)
        i = closing + 1

    for tag in tags_found:
Exemplo n.º 9
0
from cliff import command
from cliff import commandmanager
from cliff import lister

import daiquiri

import fixtures

import pbr.version

import pkg_resources

import six


LOG = daiquiri.getLogger("pifpaf")


def _format_multiple_exceptions(e, debug=False):
    valid_excs = []
    excs = list(e.args)
    while excs:
        (etype, value, tb) = excs.pop(0)
        if (etype == fixtures.MultipleExceptions):
            excs.extend(value.args)
        elif (etype == fixtures.SetupError):
            continue
        else:
            valid_excs.append((etype, value, tb))

    if len(valid_excs) == 1:
Exemplo n.º 10
0
 def get_logger(self, name='crontab_log'):
     logger = daiquiri.getLogger(name)
     return logger
Exemplo n.º 11
0
# App-specific includes
import common.monitor as monitor
from common.constants import mercure_defs

###################################################################################
## Configuration and initialization
###################################################################################

daiquiri.setup(
    level=logging.INFO,
    outputs=(daiquiri.output.Stream(
        formatter=daiquiri.formatter.ColorFormatter(
            fmt="%(color)s%(levelname)-8.8s "
            "%(name)s: %(message)s%(color_stop)s")), ),
)
logger = daiquiri.getLogger("bookkeeper")

bookkeeper_config = Config("configuration/bookkeeper.env")
BOOKKEEPER_PORT = bookkeeper_config("PORT", cast=int, default=8080)
BOOKKEEPER_HOST = bookkeeper_config("HOST", default="0.0.0.0")
DATABASE_URL = bookkeeper_config("DATABASE_URL",
                                 default="postgresql://mercure@localhost")

database = databases.Database(DATABASE_URL)
app = Starlette(debug=True)

###################################################################################
## Definition of database tables
###################################################################################

metadata = sqlalchemy.MetaData()
Exemplo n.º 12
0
from pathlib import Path
import uuid
import json
import shutil
import daiquiri
import socket
from datetime import datetime

# App-specific includes
import common.config as config
import common.rule_evaluation as rule_evaluation
import common.monitor as monitor
import common.helper as helper
from common.constants import mercure_defs, mercure_names, mercure_sections, mercure_rule, mercure_config, mercure_options, mercure_actions

logger = daiquiri.getLogger("generate_taskfile")


def compose_task(uid, uid_type, triggered_rules, tags_list, target):
    task_json = {}
    task_json.update(add_info(uid, uid_type, triggered_rules, tags_list))
    task_json.update(add_dispatching(triggered_rules, tags_list, target))
    task_json.update(add_processing(triggered_rules, tags_list))
    return task_json


def add_processing(applied_rule, tags_list):
    process_section = {}
    process_section[mercure_sections.PROCESS] = {}

    if (config.mercure[mercure_config.RULES][applied_rule].get(
Exemplo n.º 13
0
    Model for PASTA State of Health events

:Author:
    servilla

:Created:
    3/18/18
"""
import daiquiri
from sqlalchemy import Column, Integer, String, DateTime, ForeignKey, desc, asc
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker

from soh.config import Config

logger = daiquiri.getLogger('soh_db.py: ' + __name__)

Base = declarative_base()


class SohEvent(Base):
    __tablename__ = 'soh_event'

    event_id = Column(Integer(), primary_key=True, autoincrement=True)
    timestamp = Column(DateTime(), nullable=False)


class SohAssert(Base):
    __tablename__ = 'soh_assert'

    assert_id = Column(Integer(), primary_key=True, autoincrement=True)
Exemplo n.º 14
0
:Author:
    servilla

:Created:
    3/7/18
"""
import base64

import daiquiri
from flask_login import UserMixin
import requests

from webapp import login
from webapp.config import Config

logger = daiquiri.getLogger('user.py: ' + __name__)


class User(UserMixin):
    def __init__(self, auth_token=None):
        self._auth_token = auth_token

    @staticmethod
    def authenticate(user_dn=None, password=None):
        auth_token = None
        r = requests.get(Config.AUTH_URL, auth=(user_dn, password))
        if r.status_code == requests.codes.ok:
            auth_token = r.cookies['auth-token']
        return auth_token

    def get_id(self):
Exemplo n.º 15
0
import shutil
import time
from datetime import datetime
from pathlib import Path
from shlex import split
from subprocess import CalledProcessError, run

import daiquiri

from common.monitor import s_events, send_series_event, send_event, h_events, severity
from dispatch.retry import increase_retry
from dispatch.status import is_ready_for_sending
from common.constants import mercure_names


logger = daiquiri.getLogger("send")

DCMSEND_ERROR_CODES = {
    1: "EXITCODE_COMMANDLINE_SYNTAX_ERROR",
    21: "EXITCODE_NO_INPUT_FILES",
    22: "EXITCODE_INVALID_INPUT_FILE",
    23: "EXITCODE_NO_VALID_INPUT_FILES",
    43: "EXITCODE_CANNOT_WRITE_REPORT_FILE",
    60: "EXITCODE_CANNOT_INITIALIZE_NETWORK",
    61: "EXITCODE_CANNOT_NEGOTIATE_ASSOCIATION",
    62: "EXITCODE_CANNOT_SEND_REQUEST",
    65: "EXITCODE_CANNOT_ADD_PRESENTATION_CONTEXT",
}


def _create_command(target_info, folder):
Exemplo n.º 16
0
# coding: utf-8

"""Flask Web Application for Jitenshea (Bicycle-sharing data)
"""

import daiquiri
import logging

from flask import Flask, render_template, abort


daiquiri.setup(level=logging.INFO)
logger = daiquiri.getLogger("jitenshea-webapp")


app = Flask(__name__)
app.config['ERROR_404_HELP'] = False
app.config['SWAGGER_UI_DOC_EXPANSION'] = 'list'

CITIES = ['bordeaux', 'lyon']


def check_city(city):
    if city not in CITIES:
        abort(404, "City {} not found".format(city))

@app.route('/')
def index():
    return render_template("index.html")

@app.route('/doc/')
Exemplo n.º 17
0
    def _loop(self, max_iter=None):
        if not self._SILENCE_LOGGER:  # pragma: no cover don't want to clutter tests
            logger = daiquiri.getLogger(self._name)
            logger.info('Starting {}'.format(self._name))
        # fleming and dateutil have arguments that just differ by ending in an "s"
        fleming_kwargs = self._every_kwargs
        relative_delta_kwargs = {}

        # build the relative delta kwargs
        for k, v in self._every_kwargs.items():
            relative_delta_kwargs[k + 's'] = v

        # if a starting time was given use the floored second of that time as the previous time
        if self._starting is not None:
            previous_time = fleming.floor(self._starting, second=1)

        # otherwise use the interval floored value of now as the previous time
        else:
            previous_time = fleming.floor(datetime.datetime.now(),
                                          **fleming_kwargs)

        # keep track of iterations
        n_iter = 0
        # this is the infinite loop that runs the cron.  It will only be stopped when the
        # process is killed by its monitor.
        while True:
            n_iter += 1
            if max_iter is not None and n_iter > max_iter:
                break
            # everything is run in a try block so errors can be explicitly handled
            try:
                # push forward the previous/next times
                next_time = previous_time + relativedelta(
                    **relative_delta_kwargs)
                previous_time = next_time

                # get the current time
                now = datetime.datetime.now()

                # if our job ran longer than an interval, we will need to catch up
                if next_time < now:
                    continue

                # sleep until the computed time to run the function
                sleep_seconds = (next_time - now).total_seconds()
                time.sleep(sleep_seconds)

                # See what time it is on wakeup
                timestamp = datetime.datetime.now()

                # If passed until date, break out of here
                if self._until is not None and timestamp > self._until:
                    break

                # If not inhibited, run the function
                if self._is_uninhibited(timestamp):
                    self._log('Running {}'.format(self._name))
                    self._func(*self._func_args, **self._func_kwargs)

            except KeyboardInterrupt:  # pragma: no cover
                pass

            except:  # noqa
                # only raise the error if not in robust mode.
                if self._robust:
                    s = 'Error in tab\n' + traceback.format_exc()
                    logger = daiquiri.getLogger(self._name)
                    logger.error(s)
                else:
                    raise
        self._log('Finishing {}'.format(self._name))
Exemplo n.º 18
0
from werkzeug.utils import secure_filename

from deeposlandia import utils
from deeposlandia.inference import predict

MODELS = ('feature_detection', 'semantic_segmentation')
DATASETS = ('mapillary', 'shapes')
ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg'])
PROJECT_FOLDER = '/tmp/deeposlandia'
UPLOAD_FOLDER = os.path.join(PROJECT_FOLDER, 'uploads/')
PREDICT_FOLDER = os.path.join(PROJECT_FOLDER, 'predicted/')
os.makedirs(UPLOAD_FOLDER, exist_ok=True)
os.makedirs(PREDICT_FOLDER, exist_ok=True)

daiquiri.setup(level=logging.INFO)
logger = daiquiri.getLogger("deeposlandia-webapp")

app = Flask(__name__)
app.config['ERROR_404_HELP'] = False
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER


def check_model(model):
    """Check if `model` is valid, *i.e.* equal to `feature_detection` or
    `semantic_segmentation`

    Parameters
    ----------
    model : str
        String to verify
    """
Exemplo n.º 19
0
 def _log(self, msg):
     if self._verbose and not self._SILENCE_LOGGER:  # pragma: no cover
         logger = daiquiri.getLogger(self._name)
         logger.info(msg)
Exemplo n.º 20
0
import daiquiri

from datetime import datetime

from collections import OrderedDict
from pathlib import Path

from typing import List, Union

from IPython import get_ipython
from IPython.core.display import display, Javascript

from ipykernel.comm import Comm

logger = daiquiri.getLogger()

Jupyter = get_ipython()
"""Current InteractiveShell instance."""

_is_notebook = Jupyter and Jupyter.has_trait('kernel')


class CommError(Exception):
    """Base class for Comm related exceptions."""
    def __init__(self, *args, **kwargs):
        msg = ". ".join([*args[:1], "HINT: Try reloading <F5> the window."])

        args = msg, *args[1:]
        super().__init__(*args, **kwargs)
Exemplo n.º 21
0
def main(
    ctx, verbose, api_key, service_account,
    config_file, level, export, outputs
):
    """
    """

    Log(verbose).initialize()
    logger = daiquiri.getLogger(ctx.command.name, subsystem="MAIN")
    logger.info(
        "================ {0} =================".format(
            ctx.command.name
        )
    )
    # --config-file
    fn_config = os.path.expanduser(config_file)
    logger.debug(
        "Configuration file =====> {0}".format(fn_config)
    )
    if os.path.exists(fn_config):
        ctx.default_map = CONTEXT_SETTINGS['default_map']
        logger.debug(
            "Context with default map =====> {0}".format(
                json.dumps(ctx.default_map)
            )
        )
    else:
        logger.critical(
            "Configuration file {0} is not available!".format(
                fn_config
            )
        )
        raise click.Abort()

    try:
        pass
    except KeyError as e:
        logger.debug("Error =====> {0}".format(
            e.message
        ))
        raise

    scopes = []
    for wapor_data_key in ctx.default_map.keys():
        if wapor_data_key.startswith("google."):
            scopes.append(ctx.default_map[wapor_data_key]["scope"])
        if wapor_data_key == "gee_workspace_base":
            ee_workspace_base = ctx.default_map[wapor_data_key]
        if wapor_data_key == "gee_workspace_project":
            ee_workspace_wapor = ctx.default_map[wapor_data_key]
    logger.debug("Scopes =====> {0}".format(scopes))
    # --credential-file
    credentials = os.path.expanduser(service_account)
    logger.debug(
        "Credential file =====> {0}".format(credentials)
    )
    if os.path.exists(credentials):
        logger.info(
            "Authenticate with Service Account {0}".format(credentials)
        )
        auth = Initialize(
            CredentialConfigFile(credentials).get_sa_credentials(scopes)
        )
    elif api_key:
        logger.info(
            "Authenticate with Api Key {0}".format(api_key)
        )
        auth = Initialize(api_key)
    else:
        logger.info(
            "Neither Api Key nor Service Account has been provided!\
Please check the default Service Account file {0}".format(
                credentials
            )
        )
        raise click.Abort()

    ctx.obj = {
        'auth': auth,
        'EE_WORKSPACE_BASE': ee_workspace_base,
        'EE_WORKSPACE_WAPOR': os.path.join(
            ee_workspace_base,
            ee_workspace_wapor
        ),
        'level': level,
        'verbose': verbose,
        'export': export,
        'outputs': outputs
    }
Exemplo n.º 22
0
                                                  model_has_complex_texttypes)

from webapp.buttons import *
from webapp.home.exceptions import *
from webapp.pages import *

from webapp.home.views import process_up_button, process_down_button, get_help, get_helps
from metapype.eml import names
from metapype.model.node import Node

from webapp.home.intellectual_rights import (INTELLECTUAL_RIGHTS_CC0,
                                             INTELLECTUAL_RIGHTS_CC_BY)

from webapp.home.views import set_current_page, get_keywords

logger = daiquiri.getLogger('views: ' + __name__)
res_bp = Blueprint('res', __name__, template_folder='templates')


def log_error(msg):
    if current_user and hasattr(current_user, 'get_username'):
        logger.error(msg, USER=current_user.get_username())
    else:
        logger.error(msg)


def log_info(msg):
    if current_user and hasattr(current_user, 'get_username'):
        logger.info(msg, USER=current_user.get_username())
    else:
        logger.info(msg)
Exemplo n.º 23
0
def common(ctx, year, temporal_resolution, input_component, area_code, nodatavalue):
    """
        YEAR 2009|2010|...|2018|2019\n
        TEMPORAL_RESOLUTION A\n
        INPUT_COMPONENT E|T|I|AETI|NPP\n
        AREA_CODE: NA|BKA|AWA|KOG|ODN|ZAN\n
        NODATAVALUE: 255|-9999\n

        example general annual: wapor -l L1 common -- 2016 A E (255)\n
        example area code general annual: wapor -l L3 common -- 2016 A E BKA (255)\n
        example AETI annual: wapor -l L1 common -- 2016 A AETI (-9999)\n
        example area code AETI annual: wapor -l L3 common -- 2016 A AETI BKA (-9999)\n
    """

    Log(ctx.obj["verbose"]).initialize()
    logger = daiquiri.getLogger(ctx.command.name, subsystem="COMMON")
    logger.info(
        "================ {0} {1} calculation =================".format(
            ctx.command.name,
            temporal_resolution
        )
    )

    from algorithms.common import Common

    kwargs = {
        "year": year,
        "temporal_resolution": temporal_resolution,
        "component": input_component,
        "area_code": area_code,
        "nodatavalue": nodatavalue
    }
    context = ctx.obj.copy()
    context.update(kwargs)

    # Use class Name to express wapor name convention over GEE
    src_image_coll = CommonName(**context).src_collection()
    # L1_E_D, L1_T_D, L1_I_D
    # L3_E_D, L3_T_D, L3_I_D
    logger.debug(
        "src_image_coll variable =====> {0}".format(src_image_coll)
    )
    dst_image_coll = CommonName(**context).dst_collection()
    # L1_E_A, L1_T_A, L1_I_A
    # L3_E_A, L3_T_A, L3_I_A
    logger.debug(
        "dst_image_coll variable =====> {0}".format(dst_image_coll)
    )
    dst_asset_coll = CommonName(**context).dst_assetcollection_id()
    # projects/fao_wapor/L1_E_A
    # projects/fao_wapor/L3_E_A
    logger.debug(
        "dst_asset_coll variable =====> {0}".format(dst_asset_coll)
    )
    dst_asset_image = CommonName(**context).dst_image()
    # L1_E_16
    # L3_E_16_BKA
    logger.debug(
        "dst_asset_image variable =====> {0}".format(dst_asset_image)
    )
    dst_asset_id = CommonName(**context).dst_asset_id()
    # projects/fao_wapor/L1_E_A/L1_E_16
    # projects/fao_wapor/L3_E_A/L3_E_16_BKA
    logger.debug(
        "dst_asset_id variable =====> {0}".format(dst_asset_id)
    )

    kwargs.update(
        {
            "src_coll": os.path.join(
                os.path.join(
                    context["EE_WORKSPACE_WAPOR"],
                    context["level"]
                ),
                src_image_coll
            ),
            "dst_coll": dst_image_coll,
            "dst_asset_coll": dst_asset_coll,
            "dst_asset": dst_asset_id,
            "to_asset": context["export"],
            "intermediate_outputs": context["outputs"]
        }
    )
    logger.debug(
        "Input kwargs dictionary for Common process is =====> \n{0}".format(
            json.dumps(kwargs)
        )
    )

    # create the instance of the common script class
    proc = Common(**kwargs)
    # run the process and return the task id
    result = proc.process_annual()

    if result["errors"]:
        raise click.ClickException(
            "Commad execution has produced:\n{0}".format(
                json.dumps(result)
            )
        )
    else:
        click.echo(
            json.dumps(result)
        )
Exemplo n.º 24
0
    async def _consume_pulls(
        self,
        installation: context.Installation,
        pulls: PullsToConsume,
    ) -> None:
        LOG.debug("stream contains %d pulls",
                  len(pulls),
                  stream_name=installation.stream_name)
        for (repo, repo_id, pull_number), (message_ids,
                                           sources) in pulls.items():

            statsd.histogram("engine.streams.batch-size", len(sources))
            for source in sources:
                if "timestamp" in source:
                    statsd.histogram(
                        "engine.streams.events.latency",
                        (datetime.datetime.utcnow() -
                         datetime.datetime.fromisoformat(
                             source["timestamp"])).total_seconds(),
                    )

            logger = daiquiri.getLogger(
                __name__,
                gh_repo=repo,
                gh_owner=installation.owner_login,
                gh_pull=pull_number,
            )

            attempts_key = f"pull~{installation.owner_login}~{repo}~{pull_number}"
            try:
                async with self._translate_exception_to_retries(
                        installation.stream_name, attempts_key):
                    await run_engine(installation, repo_id, repo, pull_number,
                                     sources)
                await self.redis_stream.hdel("attempts", attempts_key)
                await self.redis_stream.execute_command(
                    "XDEL", installation.stream_name, *message_ids)
            except IgnoredException:
                await self.redis_stream.execute_command(
                    "XDEL", installation.stream_name, *message_ids)
                logger.debug("failed to process pull request, ignoring",
                             exc_info=True)
            except MaxPullRetry as e:
                await self.redis_stream.execute_command(
                    "XDEL", installation.stream_name, *message_ids)
                logger.error(
                    "failed to process pull request, abandoning",
                    attempts=e.attempts,
                    exc_info=True,
                )
            except PullRetry as e:
                logger.info(
                    "failed to process pull request, retrying",
                    attempts=e.attempts,
                    exc_info=True,
                )
            except StreamRetry:
                raise
            except StreamUnused:
                raise
            except vcr_errors_CannotOverwriteExistingCassetteException:
                raise
            except Exception:
                # Ignore it, it will retried later
                logger.error("failed to process pull request", exc_info=True)
Exemplo n.º 25
0
def aeti(ctx, year, temporal_resolution, input_component, area_code, dekad):
    """
        YEAR 2009|2010|...|2018|2019\n
        TEMPORAL_RESOLUTION D (DEKADAL)\n
        INPUT_COMPONENT AETI\n
        AREA_CODE: NA|BKA|AWA|KOG|ODN|ZAN\n
        DEKAD: 01|02|...|36\n

        example whole dekads: wapor -l L1 aeti -- 2016 D AETI\n
        example single dekad: wapor -l L1 aeti -- 2016 D AETI NA 01\n
        example area code whole dekads: wapor -l L3 aeti -- 2016 D AETI BKA\n
        example area code single dekad: wapor -l L3 aeti -- 2016 D AETI BKA 01\n
    """

    Log(ctx.obj["verbose"]).initialize()
    logger = daiquiri.getLogger(ctx.command.name, subsystem="AETI")
    logger.info(
        "================ {0} {1} calculation =================".format(
            ctx.command.name,
            temporal_resolution
        )
    )

    from algorithms.aeti import AETI

    kwargs = {
        "year": year,
        "temporal_resolution": temporal_resolution,
        "component": input_component, #  AETI
        "area_code": area_code,
        "dekad": dekad
    }
    context = ctx.obj.copy()
    context.update(kwargs)

    # Use class AETIName to express wapor name convention over GEE
    src_image_coll = AETIName(**context).src_collection()
    # L1_E_D, L1_T_D, L1_I_D
    logger.debug(
        "src_image_coll variable =====> {0}".format(src_image_coll)
    )
    dst_image_coll = AETIName(**context).dst_collection()
    # L1_AETI_D
    logger.debug(
        "dst_image_coll variable =====> {0}".format(dst_image_coll)
    )
    dst_asset_coll = AETIName(**context).dst_assetcollection_id()
    # projects/fao_wapor/L1_AETI_D
    logger.debug(
        "dst_asset_coll variable =====> {0}".format(dst_asset_coll)
    )
    dst_asset_images = AETIName(**context).dst_images()
    # [L1_AETI_1601,...,L1_AETI_1636]
    # [L3_AETI_1601_BKA,...,L3_AETI_1636_BKA]
    logger.debug(
        "dst_asset_images variable =====> {0}".format(dst_asset_images)
    )
    dst_asset_ids = AETIName(**context).dst_asset_ids()
    # [projects/fao_wapor/L1_AETI_D/L1_AETI_1601,...,
    # ,...,projects/fao_wapor/L1_AETI_D/L1_AETI_1636]
    # [projects/fao_wapor/L3_AETI_D/L3_AETI_1601_BKA,...,
    # ,...,projects/fao_wapor/L3_AETI_D/L3_AETI_1636_BKA]
    logger.debug(
        "dst_asset_ids variable =====> {0}".format(dst_asset_ids)
    )
    if "AETI" in dst_image_coll:
        if context["temporal_resolution"] in [
            tr.dekadal.value,
            tr.short_dekadal.value
        ]:
            # projects/fao_wapor/L1_E_D
            e = os.path.join(
                os.path.join(
                    context["EE_WORKSPACE_WAPOR"],
                    context["level"]
                ),
                src_image_coll[0]
            )
            # projects/fao_wapor/L1_T_D
            t = os.path.join(
                os.path.join(
                    context["EE_WORKSPACE_WAPOR"],
                    context["level"]
                ),
                src_image_coll[1]
            )
            # projects/fao_wapor/L1_I_D
            i = os.path.join(
                os.path.join(
                    context["EE_WORKSPACE_WAPOR"],
                    context["level"]
                ),
                src_image_coll[2]
            )

            colls = {"collI": i, "collE": e, "collT": t}
            kwargs.update(colls)

            kwargs.update(
                {
                    "dst_coll": dst_image_coll,
                    "dst_asset_coll": dst_asset_coll,
                    "dst_assets": dst_asset_ids,
                    "to_asset": context["export"],
                    "intermediate_outputs": context["outputs"]
                }
            )
            logger.debug(
                "Input kwargs dictionary for AETI process is =====> \n{0}".format(
                    json.dumps(kwargs)
                )
            )

            # Create Marmee object instance with specific inputs for AETI and filter
            aeti = AETI(**kwargs)

            # Run the process for dekadal
            try:
                # run the process and return the task id
                result = aeti.process_dekadal()

                if result["errors"]:
                    raise click.ClickException(
                        "Commad execution has produced:\n{0}".format(
                            json.dumps(result)
                        )
                    )
                else:
                    click.echo(
                        json.dumps(result)
                    )
            except Exception as e:
                raise

        elif context["temporal_resolution"] in [
            tr.annual.value,
            tr.short_annual.value
        ]:
            pass
            # Run the process for annual
            try:
                pass
                aeti.process_annual()
            except Exception as e:
                raise
        else:
            raise ValueError("Not allowed for wapor annual or dekadal.")
    else:
        raise ValueError("Wrong value for algorithm not being AETI")
Exemplo n.º 26
0
#!/usr/bin/env python3

import requests
import click
import logging
import daiquiri

daiquiri.setup(level=logging.INFO)

_LOGGER = daiquiri.getLogger(__name__)

DOCKERHUB_ORGANIZATION = 'radanalyticsio'
DOCKERHUB_API_URL = 'https://hub.docker.com/'
THOTH_ANALYZER_NAME = 'fridex/thoth-package-extract'


def list_dockerhub_images(dockerhub_user: str, dockerhub_password: str,
                          organization: str) -> list:
    """List images on docker hub in the given organization."""
    response = requests.post(DOCKERHUB_API_URL + '/v2/users/login/',
                             json={
                                 'username': dockerhub_user,
                                 'password': dockerhub_password
                             })
    response.raise_for_status()
    token = response.json()['token']

    # TODO: pagination
    response = requests.get(DOCKERHUB_API_URL +
                            f'/v2/repositories/{organization}',
                            headers={'Authorization': f'JWT {token}'},
Exemplo n.º 27
0
def AGBP(ctx, year, temporal_resolution, input_component, nodatavalue):
    """
        YEAR 2009|2010|...|2018|2019\n
        TEMPORAL_RESOLUTION A (ANNUAL)\n
        INPUT_COMPONENT NPP\n
        NODATAVALUE -9999\n

        example annual: wapor -l L1 agbp -- 2016 A NPP (-9999)\n
    """

    Log(ctx.obj["verbose"]).initialize()
    logger = daiquiri.getLogger(ctx.command.name, subsystem="AGBP")
    logger.info(
        "================ {0} {1} calculation =================".format(
            ctx.command.name,
            temporal_resolution
        )
    )

    from algorithms.agbp import AGBP

    kwargs = {
        "year": year,
        "temporal_resolution": temporal_resolution,
        "component": input_component,
        "nodatavalue": nodatavalue
    }
    context = ctx.obj.copy()
    context.update(kwargs)

    # Use class Name to express wapor name convention over GEE
    src_image_coll = AGBPName(**context).src_collection()
    # L1_NPP_D
    logger.debug(
        "AGBP src_image_coll variable =====> {0}".format(src_image_coll)
    )
    dst_image_coll = AGBPName(**context).dst_collection()
    # L1_AGBP_A
    logger.debug(
        "AGBP dst_image_coll variable =====> {0}".format(dst_image_coll)
    )
    dst_asset_coll = AGBPName(**context).dst_assetcollection_id()
    # projects/fao_wapor/L1_AGBP_A
    logger.debug(
        "AGBP dst_asset_coll variable =====> {0}".format(dst_asset_coll)
    )
    dst_asset_image = AGBPName(**context).dst_image()
    # L1_AGBP_16
    logger.debug(
        "AGBP dst_asset_image variable =====> {0}".format(dst_asset_image)
    )
    dst_asset_id = AGBPName(**context).dst_asset_id()
    # projects/fao_wapor/L1_AGBP_A/L1_AGBP_16
    logger.debug(
        "AGBP dst_asset_id variable =====> {0}".format(dst_asset_id)
    )

    kwargs.update(
        {
            "src_coll": os.path.join(
                os.path.join(
                    context["EE_WORKSPACE_WAPOR"],
                    context["level"]
                ),
                src_image_coll
            ),
            "dst_coll": dst_image_coll,
            "dst_asset_coll": dst_asset_coll,
            "dst_asset": dst_asset_id,
            "to_asset": context["export"],
            "intermediate_outputs": context["outputs"]
        }
    )
    logger.debug(
        "Input kwargs dictionary for AGBP process is =====> \n{0}".format(
            json.dumps(kwargs)
        )
    )

    # create the instance of the agbp script class
    proc = AGBP(**kwargs)
    # run the process and return the task id
    result = proc.process_annual()

    if result["errors"]:
        raise click.ClickException(
            "Commad execution has produced:\n{0}".format(
                json.dumps(result)
            )
        )
    else:
        click.echo(
            json.dumps(result)
        )
from utils import cloud_constants as cc
import boto3
import botocore
import os
from pathlib import Path
import daiquiri
import logging

daiquiri.setup(level=logging.INFO)
_logger = daiquiri.getLogger(__name__)

_aws_key_id = cc.AWS_S3_ACCESS_KEY_ID or os.environ.get('AWS_S3_ACCESS_KEY_ID')
_aws_secret_key = cc.AWS_S3_SECRET_ACCESS_KEY or os.environ.get(
    'AWS_S3_SECRET_ACCESS_KEY')
_aws_region = cc.AWS_S3_REGION or os.environ.get('AWS_S3_REGION', 'us-east-1')

session = boto3.session.Session(aws_access_key_id=_aws_key_id,
                                aws_secret_access_key=_aws_secret_key,
                                region_name=_aws_region)

S3_OBJ = session.resource(
    's3',
    config=botocore.client.Config(signature_version='s3v4'),
    use_ssl=True)


def s3_download_folder(
    s3_bucket_obj,
    bucket_dir_prefix='',
    download_path='./',
):
Exemplo n.º 29
0
def GBWP(ctx, year, temporal_resolution, season, input_component, area_code, nodatavalue):
    """
        YEAR 2009|2010|...|2018|2019\n
        TEMPORAL_RESOLUTION A (ANNUAL) S (SEASONAL)\n
        SEASON 1|2\n
        INPUT_COMPONENT AGBP\n
        AREA_CODE: NA|BKA|AWA|KOG|ODN|ZAN\n
        NODATAVALUE -9999\n

        example L1 annual: wapor -l L1 gbwp -- 2016 A -1 AGBP NA (-9999)\n
        example L2 seasonal: wapor -l L2 gbwp -- 2016 S 1 AGBP NA (-9999)\n
        example L3 seasonal: wapor -l L3 gbwp -- 2016 S 1 AGBP AWA (-9999)\n
    """

    Log(ctx.obj["verbose"]).initialize()
    logger = daiquiri.getLogger(ctx.command.name, subsystem="GBWP")
    logger.info(
        "================ {0} {1} calculation =================".format(
            ctx.command.name,
            temporal_resolution
        )
    )
    from algorithms.gbwp import GBWP

    kwargs = {
        "year": year,
        "temporal_resolution": temporal_resolution,
        "season": season,
        "component": input_component,
        "area_code": area_code,
        "nodatavalue": nodatavalue
    }
    context = ctx.obj.copy()
    context.update(kwargs)

    # Use class Name to express wapor name convention over GEE
    src_image_coll = GBWPName(**context).src_collection()
    # L1_AGBP_A | L2_AGBP_S | L3_AGBP_S
    logger.debug(
        "GBWP src_image_coll variable =====> {0}".format(src_image_coll)
    )
    dst_image_coll = GBWPName(**context).dst_collection()
    # L1_GBWP_A | L2_GBWP_S | L3_GBWP_S
    logger.debug(
        "GBWP dst_image_coll variable =====> {0}".format(dst_image_coll)
    )
    dst_asset_coll = GBWPName(**context).dst_assetcollection_id()
    # projects/fao-wapor/L1/L1_GBWP_A | projects/fao-wapor/L2/L2_GBWP_S
    # | projects/fao-wapor/L3/L3_GBWP_S
    logger.debug(
        "GBWP dst_asset_coll variable =====> {0}".format(dst_asset_coll)
    )
    dst_asset_image = GBWPName(**context).dst_image()
    # L1_GBWP_16 | L2_GBWP_16s1 | L3_GBWP_16s1_AWA
    logger.debug(
        "GBWP dst_asset_image variable =====> {0}".format(dst_asset_image)
    )
    dst_asset_id = GBWPName(**context).dst_asset_id()
    # projects/fao-wapor/L1/L1_GBWP_A/L1_GBWP_16 |
    # projects/fao-wapor/L2/L2_GBWP_S/L2_GBWP_16s1 |
    # projects/fao-wapor/L3/L3_GBWP_S/L3_GBWP_16s1_AWA
    logger.debug(
        "GBWP dst_asset_id variable =====> {0}".format(dst_asset_id)
    )

    kwargs.update(
        {
            "src_coll": os.path.join(
                os.path.join(
                    context["EE_WORKSPACE_WAPOR"],
                    context["level"]
                ),
                src_image_coll
            ),
            "dst_coll": dst_image_coll,
            "dst_asset_coll": dst_asset_coll,
            "dst_asset": dst_asset_id,
            "to_asset": context["export"],
            "intermediate_outputs": context["outputs"],
            "level": context["level"]
        }
    )
    logger.debug(
        "Input kwargs dictionary for GBWP process is =====> \n{0}".format(
            json.dumps(kwargs)
        )
    )

    # create the instance of the gbwp script class
    proc = GBWP(**kwargs)
    # run the process and return the task id
    if proc.config["season"] == '-1':
        result = proc.process_annual()
    else:
        result = proc.process_seasonal()

    if result["errors"]:
        raise click.ClickException(
            "Commad execution has produced:\n{0}".format(
                json.dumps(result)
            )
        )
    else:
        click.echo(
            json.dumps(result)
        )
Exemplo n.º 30
0
import asyncio
import dataclasses
import json

import daiquiri
import redis

from mergify_engine import check_api
from mergify_engine import context
from mergify_engine import exceptions
from mergify_engine import sub_utils
from mergify_engine import utils
from mergify_engine.actions.merge import helpers
from mergify_engine.clients import github

LOG = daiquiri.getLogger(__name__)


@dataclasses.dataclass
class Queue:
    redis: redis.Redis
    installation_id: int
    owner: str
    repo: str
    ref: str

    @property
    def log(self):
        return daiquiri.getLogger(__name__,
                                  gh_owner=self.owner,
                                  gh_repo=self.repo,
Exemplo n.º 31
0
:Synopsis:

:Author:
    servilla

:Created:
    3/30/18
"""
from http import HTTPStatus

import aiohttp
import daiquiri

from soh.config import Config

logger = daiquiri.getLogger('audit.py: ' + __name__)


async def is_down(host=None):
    url = 'http://' + host + ':8080/audit/docs/api'
    assert_is_down = True
    try:
        async with aiohttp.ClientSession() as session:
            async with session.get(url) as resp:
                r = resp.status
        assert_is_down = r != HTTPStatus.OK
        if assert_is_down:
            msg = f"{__file__}: Status code is {r}"
            logger.warning(msg)
    except Exception as e:
        logger.error(e)
Exemplo n.º 32
0
"""

import logging
import daiquiri

import numpy as np
import pandas as pd
from dateutil import parser
from workalendar.europe import France
from sklearn.cluster import KMeans
import xgboost as xgb

from jitenshea import config

daiquiri.setup(logging.INFO)
logger = daiquiri.getLogger("stats")

# French Calendar
cal = France()

SEED = 2018
np.random.seed(SEED)

CLUSTER_ACT_PATH_CSV ='jitenshea/data/cluster_activite.csv'


###################################
###         CLUSTER ACTIVITE
###################################

def preprocess_data_for_clustering(df):
Exemplo n.º 33
0
import glob
import itertools
import logging
import netrc
import operator
import os
import shutil
import subprocess
import sys
import tempfile
from urllib import parse

import daiquiri
import github

LOG = daiquiri.getLogger("git-pull-request")


def _run_shell_command(cmd, output=None, raise_on_error=True):
    if output is True:
        output = subprocess.PIPE

    LOG.debug("running %s", cmd)
    sub = subprocess.Popen(cmd, stdout=output, stderr=output)
    out = sub.communicate()
    if raise_on_error and sub.returncode:
        raise RuntimeError("%s returned %d" % (cmd, sub.returncode))

    if out[0] is not None:
        return out[0].strip().decode()
Exemplo n.º 34
0
# -*- coding: utf-8 -*-
import sys
import ssl
import socket
import logging
from http.server import HTTPServer
from socketserver import ThreadingMixIn

import daiquiri
daiquiri.setup(level=logging.DEBUG)
logger = daiquiri.getLogger()

from pyproxy import settings

class ThreadingHTTPServer(ThreadingMixIn, HTTPServer):
    address_family = socket.AF_INET6
    daemon_threads = True

    def handle_error(self, request, client_address):
        cls, e = sys.exc_info()[:2]
        if cls in (socket.error, ssl.SSLError):
            pass
        else:
            return HTTPServer.handle_error(self, request, client_address)

class ProxyFactory:

    @staticmethod
    def create(handler):
        # handler.protocol_version = settings.PROTOCOL_VERSION
        http_server = ThreadingHTTPServer((settings.HOST, settings.PORT), handler)
Exemplo n.º 35
0
from pathlib import Path
import uuid
import json
import shutil
import daiquiri

# App-specific includes
import common.config as config
import common.rule_evaluation as rule_evaluation
import common.monitor as monitor
import common.helper as helper
import common.notification as notification
from common.constants import mercure_defs, mercure_names, mercure_actions, mercure_rule, mercure_config, mercure_options, mercure_folders, mercure_events
from routing.generate_taskfile import generate_taskfile_route, generate_taskfile_process, create_study_task, create_series_task_processing

logger = daiquiri.getLogger("route_series")


def route_series(series_UID):
    """Processes the series with the given series UID from the incoming folder."""
    lock_file = Path(config.mercure[mercure_folders.INCOMING] + '/' +
                     str(series_UID) + mercure_names.LOCK)

    if lock_file.exists():
        # Series is locked, so another instance might be working on it
        return

    # Create lock file in the incoming folder and prevent other instances from working on this series
    try:
        lock = helper.FileLock(lock_file)
    except:
Exemplo n.º 36
0
from gnocchi import utils


OPTS = [
    cfg.StrOpt('file_basepath',
               default='/var/lib/gnocchi',
               help='Path used to store gnocchi data files.'),
    cfg.IntOpt('file_subdir_len',
               default=2, min=0, max=32,
               help='if > 0, this create a subdirectory for every N bytes'
               'of the metric uuid')
]

ATTRGETTER_METHOD = operator.attrgetter("method")

LOG = daiquiri.getLogger(__name__)

# Python 2 compatibility
try:
    FileNotFoundError
except NameError:
    FileNotFoundError = None


class FileStorage(storage.StorageDriver):
    WRITE_FULL = True
    CFG_PREFIX = 'gnocchi-storage-config'
    CFG_SUBDIR_LEN = 'subdir_len'

    def __init__(self, conf):
        super(FileStorage, self).__init__(conf)
Exemplo n.º 37
0
"""
import pathlib
import shutil
import tempfile

from typing import Optional, List

import repobee_plug as plug
import daiquiri
import git

from repobee_sanitizer import _sanitize, _fileutils, _syntax, _format

PLUGIN_NAME = "sanitizer"

LOGGER = daiquiri.getLogger(__file__)


class EmptyCommitError(plug.PlugError):
    pass


def check_repo_state(repo_root) -> Optional[str]:
    try:
        repo = git.Repo(repo_root)
    except git.InvalidGitRepositoryError as exc:
        raise plug.PlugError(f"Not a git repository: '{repo_root}'") from exc

    message = ""
    help_message = "\n\nUse --force to ingore this warning and sanitize anyway"