Ejemplo n.º 1
0
def test_log_info(capsys):
    logger = logging.get_logger(__name__, config)
    logger = logging.get_logger(__name__, config)

    logger.info("test_log_info")

    captured = capsys.readouterr()

    record_object = json.loads(captured.out)

    assert record_object["message"] == "test_log_info"
    assert record_object["logger"] == "test_logging"
    assert record_object["level"] == "info"
    assert record_object["timestamp"] != ""
Ejemplo n.º 2
0
 def __init__(self):
     configParser = ConfigParser()
     self.log = logging.get_logger(__name__, config=configParser)
     self.config = configParser.app_cfg
     try:
         self.rabbitClient = rabbit.RabbitClient()
     except pika.exceptions.AMQPConnectionError as error:
         self.log.error("Connection to RabbitMQ failed.")
         raise error
Ejemplo n.º 3
0
 def __init__(self):
     config_parser = ConfigParser()
     self.config = config_parser.app_cfg
     self.log = logging.get_logger(__name__, config=config_parser)
     self.threads = []
     try:
         self.rabbit_client = RabbitClient()
     except AMQPConnectionError as error:
         self.log.error("Connection to RabbitMQ failed.")
         raise error
Ejemplo n.º 4
0
def test_logger_name(caplog, capsys):
    logger = logging.get_logger("test_naam", config)

    logger.info("test")

    out, err = capsys.readouterr()

    record_object = json.loads(out)

    assert record_object["message"] == "test"
    assert record_object["logger"] == "test_naam"
    assert record_object["level"] == "info"
    assert record_object["timestamp"] != ""
Ejemplo n.º 5
0
def test_log_critical(caplog, capsys):
    logger = logging.get_logger(__name__, config)

    logger.critical("test_log_critical")

    out, err = capsys.readouterr()

    record_object = json.loads(out)

    assert record_object["message"] == "test_log_critical"
    assert record_object["logger"] == "test_logging"
    assert record_object["level"] == "critical"
    assert record_object["timestamp"] != ""
Ejemplo n.º 6
0
    def __init__(self):
        configParser = ConfigParser()
        self.log = logging.get_logger(__name__, config=configParser)
        self.config = configParser.app_cfg
        self.ftp_client = FTPClient(configParser)
        self.mh_client = MediahavenClient(configParser)
        self.event_parser = EventParser()

        try:
            self.rabbit_client = RabbitClient()
        except AMQPConnectionError as error:
            self.log.error("Connection to RabbitMQ failed.")
            raise error
Ejemplo n.º 7
0
    def __init__(self):
        configParser = ConfigParser()
        self.log = logging.get_logger(__name__, config=configParser)
        self.rabbitConfig = configParser.app_cfg["rabbitmq"]

        self.credentials = pika.PlainCredentials(self.rabbitConfig["username"],
                                                 self.rabbitConfig["password"])

        self.connection = pika.BlockingConnection(
            pika.ConnectionParameters(
                host=self.rabbitConfig["host"],
                port=self.rabbitConfig["port"],
                credentials=self.credentials,
            ))

        self.channel = self.connection.channel()
        self.prefetch_count = int(self.rabbitConfig["prefetch_count"])
Ejemplo n.º 8
0
 def __init__(self):
     configParser = ConfigParser()
     self.config = configParser.app_cfg
     self.log = logging.get_logger(__name__, config=configParser)
     self.mh_client = MediahavenClient(self.config)
     try:
         self.rabbit_client = RabbitClient()
     except AMQPConnectionError as error:
         self.log.error("Connection to RabbitMQ failed.")
         raise error
     self.pid_service = PIDService(self.config["pid-service"]["URL"])
     self.essence_linked_rk = self.config["rabbitmq"][
         "essence_linked_routing_key"]
     self.essence_unlinked_rk = self.config["rabbitmq"][
         "essence_unlinked_routing_key"]
     self.object_deleted_rk = self.config["rabbitmq"][
         "object_deleted_routing_key"]
     self.get_metadata_rk = self.config["rabbitmq"][
         "get_metadata_routing_key"]
Ejemplo n.º 9
0
@author: tina
"""
from sys import exit
import json
import uuid
import configparser
from json import JSONDecodeError
import pika
from retry import retry
from viaa.observability import logging
from viaa.configuration import ConfigParser
from s3_io.create_url_to_filesystem_task import process
config = ConfigParser()
config_ = configparser.ConfigParser()
logger = logging.get_logger('s3io', config)
swarmurl = config.app_cfg['castor']['swarmurl']


@retry(pika.exceptions.AMQPConnectionError,
       delay=5,
       tries=-1,
       backoff=2,
       jitter=(1, 3))
def __main__():
    """
    Description:

         - Cunsumes from rabbitMQ queue and creates a job,
         msg ack on async task create
Ejemplo n.º 10
0
 def __init__(self, configParser: ConfigParser = None):
     self.log = logging.get_logger(__name__, config=configParser)
     self.cfg: dict = configParser.app_cfg
     self.token_info = None
     self.url = f'{self.cfg["mediahaven"]["host"]}/media/'
Ejemplo n.º 11
0
Created on Wed Jan  8 16:25:28 2020

@author: tina
"""
from viaa.observability import logging
from viaa.configuration import ConfigParser
from celery import Celery
from kombu import Exchange, Queue
from s3_io.s3io_tools import SwarmS3Client, SwarmIo
import s3_io.celeryconfig as celeryconfig
from s3_io.remote_curl import RemoteCurl

app = Celery('s3io', )
app.config_from_object(celeryconfig)
config = ConfigParser()
logger = logging.get_logger('s3io')
app.config_from_object(celeryconfig)
app.conf.task_queues = (Queue('s3io',
                              Exchange('py-worker-s3io'),
                              routing_key='s3io'), )
app.conf.task_default_queue = 's3io'
app.conf.task_default_exchange_type = 'direct'
app.conf.task_default_routing_key = 's3io'

s3access_key = config.app_cfg['S3_TO_FTP']['s3access_key']
s3secret_key = config.app_cfg['S3_TO_FTP']['s3secret_key']
swarmurl = config.app_cfg['castor']['swarmurl']


@app.task(max_retries=5, bind=True)
def swarm_to_ftp(self, **body):
Ejemplo n.º 12
0
@author: tina
"""
import sys
import threading
import atexit
from s3_io.event_consumer import __main__ as Consume
from s3_io.s3io_api import __main__ as Api
from s3_io.s3io_tasks import app
from viaa.observability import logging
from viaa.configuration import ConfigParser
from celery.signals import setup_logging, task_postrun, task_prerun
from celery.result import AsyncResult


config = ConfigParser()
logger = logging.get_logger('s3io.scalable_worker', config)


@task_postrun.connect
def log_task_complete(sender, task_id, task, args, **kwargs):
    """Runs on task complete """
    # add_rabbithandler()
    result = AsyncResult(task_id).result
    if result is None:
        result = 'NO_RESULT_FOUND'


@task_prerun.connect
def log_task_Started(sender, task_id, task, args, **kwargs):
    """RUNS ON TASK START"""
    try:
Ejemplo n.º 13
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Apr  4 11:00:16 2020

@author: tina
"""

import paramiko
from viaa.observability import logging
from viaa.configuration import ConfigParser

config = ConfigParser()
logger = logging.get_logger('s3_auditlog_parser', config)


def remote_get(url, dest_path):
    """Description:

         - NOT USED atm

         - Downlod url to dest_path, using paramiko and curl

       Arguments:

            - dest_path: string

            - url : string

    """
    remote_client = paramiko.SSHClient()
Ejemplo n.º 14
0
    - Uses celery signature:

        s3_io.s3io_tasks.swarm_to_remote.s(body=msg)

0k2699098k-left.mp4
@author: tina
"""

import uuid
from viaa.observability import logging
from viaa.configuration import ConfigParser
from s3_io.s3io_tasks import swarm_to_remote

config = ConfigParser()
logger = logging.get_logger('s3io.task_creator')
extra = {'app_name': 's3io'}

rnd = str(uuid.uuid4().hex)
debug_msg = {
    "service_type": "celery",
    "service_name": "s3_to_filesystem",
    "service_version": "0.1",
    "x-request-id": rnd,
    "source": {
        "domain": {
            "name": "s3-qas.viaa.be"
        },
        "bucket": {
            "name": "mam-highresvideo"
        },
Ejemplo n.º 15
0
 def __init__(self, configParser: ConfigParser = None):
     self.log = logging.get_logger(__name__, config=configParser)
     self.cfg: dict = configParser.app_cfg
     self.host = self.__set_host()
     self.conn = self.__connect()
Ejemplo n.º 16
0
import functools
import time

from viaa.observability import logging

log = logging.get_logger(__name__)
log.setLevel("DEBUG")


class RetryException(Exception):
    """ Exception raised when an action needs to be retried
    in combination with _retry decorator"""

    pass


DELAY = 1
BACKOFF = 2
NUMBER_OF_TRIES = 5


def retry(exceptions):
    def decorator_retry(func):
        @functools.wraps(func)
        def wrapper(self, *args, **kwargs):
            delay = DELAY
            tries = NUMBER_OF_TRIES
            while tries:
                tries -= 1
                try:
                    return func(self, *args, **kwargs)
Ejemplo n.º 17
0
@author: tina
"""

import sys
import threading
import atexit
from s3_io.event_consumer import __main__ as Consume
from s3_io.s3io_api import __main__ as Api
from s3_io.s3io_tasks import app
from viaa.observability import logging
from viaa.configuration import ConfigParser
from celery.signals import setup_logging, task_postrun, task_prerun
from celery.result import AsyncResult

config = ConfigParser()
logger = logging.get_logger('s3io.worker', config)


@task_postrun.connect
def log_task_complete(sender, task_id, task, args, **kwargs):
    """Runs on task complete """
    # add_rabbithandler()
    result = AsyncResult(task_id).result
    if result is None:
        result = 'NO_RESULT_FOUND'


@task_prerun.connect
def log_task_Started(sender, task_id, task, args, **kwargs):
    """RUNS ON TASK START"""
    try:
Ejemplo n.º 18
0
r=RemoteCurl(url="http://10.50.152.194:80/tests3vents/0k2699098k-left.mp4", dest_path='/mnt/temptina/tmp/test 123456.x
```

@author: tina
"""
import os
import time
import threading
from functools import update_wrapper
import paramiko
import requests
from viaa.observability import logging
from requests.exceptions import HTTPError
from viaa.configuration import ConfigParser
config = ConfigParser()
logger = logging.get_logger('s3io.remote_curl')


def decorator(func_n):
    "Make function d a decorator: d wraps a function fn."

    def _d(f_n):
        return update_wrapper(func_n(f_n), f_n)

    update_wrapper(_d, func_n)
    return _d


@decorator
def timeit(func_name):
    """time a function, used as decorator"""
Ejemplo n.º 19
0
from fastapi import APIRouter, BackgroundTasks, Depends
from viaa.configuration import ConfigParser
from viaa.observability import logging

from app.core.event_handler import handle_event
from app.core.events_parser import parse_premis_events
from app.models.premis_events import PremisEvents
from app.models.xml_body import XmlBody

router = APIRouter()

config = ConfigParser()
log = logging.get_logger(__name__, config=config)


@router.post("/", status_code=202)
async def handle_events(
    background_tasks: BackgroundTasks,
    premis_events: PremisEvents = Depends(
        XmlBody(PremisEvents, parse_premis_events)),
):
    """
    Returns OK if the xml parsing didn't crash.
    """
    events = premis_events.events

    archived_events = [
        event for event in events if event.is_valid and event.has_valid_outcome
    ]

    log.info(
Ejemplo n.º 20
0
@author: tina
"""

import sys
import threading
import atexit
from s3_io.event_consumer import __main__ as Consume
from s3_io.s3io_api import __main__ as Api
from s3_io.s3io_tasks import app
from viaa.observability import logging
from viaa.configuration import ConfigParser
from celery.signals import setup_logging, task_postrun, task_prerun
from celery.result import AsyncResult

config = ConfigParser()
logger = logging.get_logger('s3io.input', config)


@task_postrun.connect
def log_task_complete(sender, task_id, task, args, **kwargs):
    """Runs on task complete """
    # add_rabbithandler()
    result = AsyncResult(task_id).result
    if result is None:
        result = 'NO_RESULT_FOUND'


@task_prerun.connect
def log_task_Started(sender, task_id, task, args, **kwargs):
    """RUNS ON TASK START"""
    try: