Exemple #1
0
def run():
    response_url = 'tcp://{}:{}'.format(
        config.get('storage-requests', 'worker_response_hostname'),
        config.get('storage-requests', 'worker_response_port'))
    control_url = 'tcp://{}:{}'.format(
        config.get('storage-requests', 'worker_control_hostname'),
        config.get('storage-requests', 'worker_control_port'))

    worker = Worker(connect_response=response_url,
                    connect_control=control_url,
                    identity=IDENTITY,
                    security_manager=WorkerSecurityManager(
                        connection_secret_key=config.get(
                            'security', 'connection_secret_key'),
                        connection_broker_public_key=config.get(
                            'security', 'connection_broker_public_key'),
                        message_secret_key=config.get('security',
                                                      'message_secret_key'),
                        message_public_keys=config.get('security',
                                                       'message_public_keys'),
                    ))

    worker.run()
Exemple #2
0
import msgpack
import zmq
from requests import codes

from nimbus import config
from nimbus.helpers import extract_content_from_message, decode
from nimbus.log import get_logger
from nimbus.statemanager import ConnectionStateManager
from nimbus.worker.context import ctx_request
from nimbus.worker.errors import RequestError
from nimbus.worker.request import Request

logger = get_logger(__name__)

SECONDS_BEFORE_CONTACT_CHECK = int(
    config.get('control', 'seconds_before_contact_check'))
SECONDS_BEFORE_DISCONNECT = int(
    config.get('control', 'seconds_before_disconnect'))


class BrokerStateManager:
    def __init__(self, seconds_before_contact_check,
                 seconds_before_disconnect):
        self._manager = ConnectionStateManager(seconds_before_contact_check,
                                               seconds_before_disconnect)
        self._broker = b'broker'

    def contact_from_broker(self):
        self._manager.contact_from(self._broker)

    def ping_broker(self):
Exemple #3
0
from app import views
from nimbus import config
from nimbus.worker.worker import Worker

views._prevent_pycharm_from_removing_import = True

response_url = 'tcp://{}:{}'.format(config.get('proxy-requests', 'worker_response_hostname'),
                                    config.get('proxy-requests', 'worker_response_port'))
control_url = 'tcp://{}:{}'.format(config.get('proxy-requests', 'worker_control_hostname'),
                                   config.get('proxy-requests', 'worker_control_port'))

worker = Worker(connect_response=response_url,
                connect_control=control_url)
Exemple #4
0
from pprint import pprint

from nimbus import config
from nimbus.client import Client

CONNECT_URL = 'tcp://{}:{}'.format(
    config.get('proxy-requests', 'client_hostname'),
    config.get('proxy-requests', 'client_port'))

client = Client(connect=CONNECT_URL)
response = client.list('file').response
pprint(response)

source = response[0]['source']

response = client.list('file', parameters={'source': source}).response
pprint(response)

response = client.list('file', parameters={
    'source': 'non-existing-id'
}).response
pprint(response)
Exemple #5
0
from nimbus import config
from nimbus.publisher.publisher import Publisher

zmq_worker_url = 'tcp://{}:{}'.format(
    config.get('publisher', 'worker_hostname'),
    config.get('publisher', 'worker_port'))
zmq_client_url = 'tcp://{}:{}'.format(
    config.get('publisher', 'client_hostname'),
    config.get('publisher', 'client_port'))

redis_host = config.get('redis', 'host')
redis_port = config.get('redis', 'port')
redis_db = config.get('redis', 'db')

publisher = Publisher(worker_bind=zmq_worker_url,
                      client_bind=zmq_client_url,
                      redis_host=redis_host,
                      redis_port=redis_port,
                      redis_db=redis_db)
publisher.run()
Exemple #6
0
from nimbus import config
from nimbus.broker import Broker, InvalidEndpoint, BrokerSecurityManager
from nimbus.helpers.message import decode


def validate_endpoints(worker_id, endpoints):
    for endpoint in endpoints:
        if endpoint[:len(worker_id) + 1] != decode(worker_id) + '/':
            raise InvalidEndpoint
    return endpoints


broker = Broker(
    worker_response_bind='tcp://{}:{}'.format(
        '*', config.get('storage-requests', 'worker_response_port')),
    worker_control_bind='tcp://{}:{}'.format(
        '*', config.get('storage-requests', 'worker_control_port')),
    client_bind='tcp://{}:{}'.format(
        '*', config.get('storage-requests', 'client_port')),
    validate_endpoints=validate_endpoints,
    security_manager=BrokerSecurityManager(
        connection_secret_key=config.get('security', 'connection_secret_key'),
        connection_public_keys=config.get('security',
                                          'connection_public_keys'),
        message_secret_key=config.get('security', 'message_secret_key'),
        message_public_keys=config.get('security', 'message_public_keys')))

broker.run()
Exemple #7
0
from nimbus import config
from nimbus.broker import Broker

zmq_worker_response_url = 'tcp://{}:{}'.format(
    config.get('requests', 'worker_response_hostname'),
    config.get('requests', 'worker_response_port'))
zmq_worker_control_url = 'tcp://{}:{}'.format(
    config.get('requests', 'worker_control_hostname'),
    config.get('requests', 'worker_control_port'))
zmq_client_url = 'tcp://{}:{}'.format(
    config.get('requests', 'client_hostname'),
    config.get('requests', 'client_port'))

redis_host = config.get('redis', 'host')
redis_port = config.get('redis', 'port')
redis_db = config.get('redis', 'db')

broker = Broker(worker_response_bind=zmq_worker_response_url,
                worker_control_bind=zmq_worker_control_url,
                client_bind=zmq_client_url,
                redis_host=redis_host,
                redis_port=redis_port,
                redis_db=redis_db)
broker.run()
Exemple #8
0
import uuid
from collections import deque, abc, namedtuple

import msgpack
import zmq
from redis import StrictRedis

from nimbus import config
from nimbus.helpers import decode, extract_source_from_message, extract_content_from_message
from nimbus.log import get_logger
from nimbus.statemanager import ConnectionStateManager

logger = get_logger(__name__)

SECONDS_BEFORE_CONTACT_CHECK = int(
    config.get('control', 'seconds_before_contact_check'))
SECONDS_BEFORE_UNREGISTER = int(
    config.get('control', 'seconds_before_unregister'))


class EmptyQueue(LookupError):
    pass


class WorkerIsAlreadyRegistered(AttributeError):
    pass


class ClientRequest:
    """
    Representation of a client request.
import os
import time

import requests

from app.models.hub import Hub
from nimbus import config
from nimbus.client import Client

filename = 'learningreact1.pdf'
with open(os.path.join('tmp', filename), 'rb') as f:
    file_content = f.read()

CONNECT_URL = 'tcp://{}:{}'.format(config.get('proxy-requests', 'client_hostname'),
                                   config.get('proxy-requests', 'client_port'))


def get_client():
    return Client(connect=CONNECT_URL, timeout=120)


hub = Hub.objects.first()

start = time.perf_counter()

print('Source: {}'.format(hub.cumulus_id))

print('Read file')
response = get_client().get(
    'file',
    parameters={'source': hub.cumulus_id,
Exemple #10
0
import os
import uuid

from Crypto.Hash import SHA3_256

from app.models.error import RemoteStorageError, HashError
from nimbus import config
from nimbus.client import Client
from nimbus.errors import ConnectionTimeoutError

LOCAL_CACHE = 'cache'
STORAGE_TIMEOUT = int(config.get('control', 'seconds_before_storage_timeout'))
CONNECT_URL = 'tcp://{}:{}'.format(
    config.get('storage-requests', 'client_hostname'),
    config.get('storage-requests', 'client_port'))

os.makedirs(LOCAL_CACHE, exist_ok=True)


def get_client():
    return Client(connect=CONNECT_URL, timeout=STORAGE_TIMEOUT)


class CachedObject:
    def __init__(self, expected_hash=None, file_path=None):
        self._expected_hash = expected_hash  # to detect if contents are the same as previously uploaded
        self._initial_hash = None  # to detect if new contents are the same as initial contents
        self._is_changed = False  # to detect if there is new content

        if file_path is not None:
            self._file_path = file_path
Exemple #11
0
import zmq

from nimbus import config
from nimbus.helpers import get_data_from_zmq

ZMQ_TIMEOUT_SEC = 10


class PublishContext:
    def __init__(self,
                 connect,
                 timeout=None):
        self._context = zmq.Context.instance()
        self._socket = self._context.socket(zmq.REQ)
        self._socket.connect(connect)
        if not timeout:
            self._timeout = ZMQ_TIMEOUT_SEC * 1000
        else:
            self._timeout = timeout * 1000

    def publish(self, topic, data):
        self._socket.send(msgpack.packb({'topic': topic,
                                         'data': data}))
        zmq_response = get_data_from_zmq(self._socket, self._timeout)
        return zmq_response


zmq_publisher = 'tcp://{}:{}'.format(config.get('publisher', 'worker_hostname'),
                                     config.get('publisher', 'worker_port'))
ctx_publisher = PublishContext(connect=zmq_publisher)
Exemple #12
0
from nimbus import config
from nimbus.broker import Broker

zmq_worker_response_url = 'tcp://{}:{}'.format(
    '*', config.get('proxy-requests', 'worker_response_port'))
zmq_worker_control_url = 'tcp://{}:{}'.format(
    '*', config.get('proxy-requests', 'worker_control_port'))
zmq_client_url = 'tcp://{}:{}'.format(
    '*', config.get('proxy-requests', 'client_port'))

broker = Broker(worker_response_bind=zmq_worker_response_url,
                worker_control_bind=zmq_worker_control_url,
                client_bind=zmq_client_url)

broker.run()
Exemple #13
0

log_format = '%(asctime)s - %(levelname)-8s - %(name)s - %(message)s'
mail_log_format = '%(asctime)s\n%(levelname)-8s\n%(name)s\n%(message)s'

log_level_mapper = {
    'error': logging.ERROR,
    'warning': logging.WARNING,
    'info': logging.INFO,
    'debug': logging.DEBUG,
}

# Logging to stdout
stream_handler = logging.StreamHandler(sys.stdout)
stream_handler.setFormatter(logging.Formatter(log_format))
stream_handler.setLevel(log_level_mapper[config.get('logging', 'stdout_level')])
root_logger.addHandler(stream_handler)

# Logging to file
file_handler = logging.FileHandler(config.get('logging', 'file_path'))
file_handler.setFormatter(logging.Formatter(log_format))
file_handler.setLevel(log_level_mapper[config.get('logging', 'file_level')])
root_logger.addHandler(file_handler)

# Logging to mail
if config.has_option('logging', 'mail_level'):
    mail_handler = SMTPHandler(mailhost='localhost',
                               fromaddr=config.get('logging', 'mail_from'),
                               toaddrs=config.get('logging', 'mail_to'),
                               subject='Error in {}'.format(config.get('general', 'name')))
    mail_handler.setFormatter(logging.Formatter(mail_log_format))
Exemple #14
0
import os
import shutil

import requests
from Crypto.Hash import SHA3_256
from nimbus import config
from nimbus.worker.context import ctx_request
from nimbus.worker.crypto import WorkerSecurityManager
from nimbus.worker.worker import Worker

STORAGE_DIR = 'cache/storage'
MINIMUM_FREE_MB = 128
MINIMUM_FREE_RATIO = 0.01
IDENTITY = config.get('storage', 'identity')

os.makedirs(STORAGE_DIR, exist_ok=True)


def read_file_with_chunks(file_path, chunk_size):
    with open(file_path, 'rb') as f:
        while True:
            chunk = f.read(chunk_size)
            if chunk:
                yield chunk
            else:
                break


def get_hash(file_path):
    chunk_size = 1024 * 1024
    hasher = SHA3_256.new()
Exemple #15
0
from app.helpers import one
from app.models.file import File
from nimbus import config
from nimbus.log import get_logger

logger = get_logger(__name__)

VERIFY_FRACTION = float(config.get('verify', 'fraction'))


def v_all(func):
    files_to_reconstruct = list()

    for file in File.objects:
        if not getattr(file, func)():
            files_to_reconstruct.append(file.uuid)
            logger.debug('{} check failed: {}: {}/{}/{}'.format(
                func, file.uuid, file.source.cumulus_id, file.collection,
                file.filename))

    logger.info('Files to reconstruct: {}'.format(len(files_to_reconstruct)))

    # for file_uuid in files_to_reconstruct:
    #     file = one(File.objects(uuid=file_uuid))
    #     file.reconstruct()


def v_random(func):
    pipeline = [{
        '$sample': {
            'size': int(len(File.objects) * VERIFY_FRACTION)