Beispiel #1
0
 def __init__(self, session):
     """
     Base class to be used by all API components.
     """
     # api session
     self.session = session
     self._logger = log.get_logger('blackfynn.api')
Beispiel #2
0
def test_log_level_based_on_env():
    import blackfynn.log as log

    # setting env var should change logging level
    os.environ['BLACKFYNN_LOG_LEVEL'] = 'WARN'
    warn_log = log.get_logger('warn_log')
    assert warn_log.getEffectiveLevel() == logging.WARN
def test_log_level_based_on_env():
    from blackfynn import log

    # setting env var should change logging level
    os.environ["BLACKFYNN_LOG_LEVEL"] = "WARN"
    warn_log = log.get_logger("warn_log")
    assert warn_log.getEffectiveLevel() == logging.WARN
Beispiel #4
0
    def __init__(self, func, uri, *args, **kwargs):
        self._func = func
        self._uri = uri
        self._args = args
        self._kwargs = kwargs
        self._response = None

        self._logger = log.get_logger('blackfynn.base.BlackfynnRequest')
Beispiel #5
0
    def __init__(self, ts, name, max_segment_size, aws_region):
        self.name = name
        self.max_segment_size = max_segment_size

        self.conn = boto3.client('kinesis', region_name=aws_region)

        # reference time-series
        self.ts = ts
        # cache channels
        self._channels = ts.channels

        self.registered = False

        self._logger = log.get_logger('blackfynn.streaming.TimeSeriesStream')
Beispiel #6
0
    def __init__(self,
                 profile=None,
                 api_token=None,
                 api_secret=None,
                 host=None,
                 streaming_host=None,
                 concepts_host=None,
                 env_override=True,
                 **overrides):

        self._logger = log.get_logger("blackfynn.client.Blackfynn")

        overrides.update({
            k: v
            for k, v in {
                'api_token': api_token,
                'api_secret': api_secret,
                'api_host': host,
                'api_streaming_host': streaming_host,
                'api_concepts_host': concepts_host,
            }.items() if v != None
        })
        self.settings = Settings(profile, overrides, env_override)

        if self.settings.api_token is None:
            raise Exception(
                'Error: No API token found. Cannot connect to Blackfynn.')
        if self.settings.api_secret is None:
            raise Exception(
                'Error: No API secret found. Cannot connect to Blackfynn.')

        # direct interface to REST API.
        self._api = ClientSession(self.settings)

        # account
        try:
            self._api.authenticate()
        except Exception as e:
            raise e

        self._api.register(CoreAPI, OrganizationsAPI, DatasetsAPI, FilesAPI,
                           DataAPI, PackagesAPI, TimeSeriesAPI, TabularAPI,
                           SecurityAPI, ComputeAPI, SearchAPI, IOAPI,
                           LedgerAPI, UserAPI, ModelsAPI, RecordsAPI,
                           ModelRelationshipsAPI,
                           ModelRelationshipInstancesAPI)

        self._api._context = self._api.organizations.get(
            self._api._organization)
Beispiel #7
0
    def __init__(self, settings):
        self._host = settings.api_host
        self._api_token = settings.api_token
        self._api_secret = settings.api_secret
        self._jwt = settings.jwt

        self._logger = log.get_logger('blackfynn.base.ClientSession')

        self._session = None
        self._token = None
        self._secret = None
        self._context = None
        self._organization = None
        self.profile = None
        self.settings = settings
Beispiel #8
0
import errno
import json
import os
import socket
import subprocess
import sys
from collections import OrderedDict
from time import sleep

import semver

from blackfynn.log import get_logger
from blackfynn.models import Collection, DataPackage, Dataset

logger = get_logger("blackfynn.agent")

try:
    from websocket import create_connection
except ModuleNotFoundError:
    logger.warn(
        "websocket-client is not installed - uploading with the Agent will not work"
    )

MINIMUM_AGENT_VERSION = semver.parse_version_info("0.2.2")
DEFAULT_LISTEN_PORT = 11235


class AgentError(Exception):
    pass
import boto3
import botocore
import requests
#from requests.exceptions import HTTPError, ConnectionError
from joblib import Parallel, delayed
if 'BLACKFYNN_LOG_LEVEL' not in os.environ:
    # silence agent import warning
    os.environ['BLACKFYNN_LOG_LEVEL'] = 'CRITICAL'

from blackfynn import log as _bflog
# blackfynn.log sets logging.basicConfig which pollutes logs from
# other programs that are sane and do not use the root logger
# so we have to undo the damage done by basic config here
# we add the sparcur local handlers back in later
from sparcur.utils import log, silence_loggers
for __bflog in (_bflog.get_logger(), _bflog.get_logger("blackfynn.agent")):
    silence_loggers(__bflog)
    __bflog.addHandler(log.handlers[0])

from blackfynn import Blackfynn, Collection, DataPackage, Organization, File
from blackfynn import Dataset, BaseNode
from blackfynn import base as bfb
from blackfynn.api import agent
from blackfynn.api import transfers
from blackfynn.api.data import PackagesAPI
from pyontutils.utils import Async, deferred
from pyontutils.iterio import IterIO
from sparcur import monkey
from sparcur import exceptions as exc
from sparcur.utils import BlackfynnId, ApiWrapper, make_bf_cache_as_classes
Beispiel #10
0
import os
import sys
import threading
import time
import uuid
from concurrent.futures import ThreadPoolExecutor, as_completed

from blackfynn import log
from blackfynn.api.agent import agent_upload, validate_agent_installation
from blackfynn.api.base import APIBase
from blackfynn.models import Collection, DataPackage, Dataset, TimeSeries

# GLOBAL
UPLOADS = {}

logger = log.get_logger("blackfynn.api.transfers")


def check_files(files):
    for f in files:
        if not os.path.exists(f):
            raise Exception("File {} not found.".format(f))


class IOAPI(APIBase):
    """
    Input/Output interface.
    """

    name = "io"
Beispiel #11
0
  bf move [options] <item> [<destination>]

global options:
  -h --help                 Show help
  --dataset=<dataset>       Use specified dataset (instead of your current working dataset)
  --profile=<name>          Use specified profile (instead of default)
'''

from docopt import docopt
from blackfynn import Dataset
from blackfynn.models import Collection
import blackfynn.log as log

from cli_utils import get_item

logger = log.get_logger('blackfynn.cli.bf_move')


def main(bf):
    args = docopt(__doc__)

    item = get_item(args['<item>'], bf)

    if args['<destination>']:
        destination = get_item(args['<destination>'], bf)
    else:
        # item will be moved to to the top of its containing dataset
        destination = None

    if destination is None or isinstance(destination, Collection):
        try:
Beispiel #12
0
import sqlite3
import time
from datetime import datetime
from glob import glob
from itertools import groupby

from blackfynn import log
from blackfynn.extensions import numpy as np
from blackfynn.extensions import pandas as pd
from blackfynn.extensions import require_extension
from blackfynn.models import DataPackage, TimeSeriesChannel
from blackfynn.utils import usecs_since_epoch, usecs_to_datetime

from .cache_segment_pb2 import CacheSegment

logger = log.get_logger("blackfynn.cache")


def filter_id(some_id):
    return some_id.replace(":", "_").replace("-", "_")


def remove_old_pages(cache, mbdiff):
    # taste the rainbow!
    n = int(1.5 * ((mbdiff * 1024 * 1024) / 100) / cache.page_size) + 5

    # 2. Delete some pages from cache
    with cache.index_con as con:
        logger.debug("Cache - removing {} pages...".format(n))
        # find the oldest/least accessed pages
        q = """
Beispiel #13
0
def test_default_log_level():
    import blackfynn.log as log

    # default log level should be INFO
    base_log = log.get_logger('base_log')
    assert base_log.getEffectiveLevel() == logging.INFO
def test_default_log_level():
    from blackfynn import log

    # default log level should be INFO
    base_log = log.get_logger("base_log")
    assert base_log.getEffectiveLevel() == logging.INFO
Beispiel #15
0
import uuid
from concurrent.futures import ThreadPoolExecutor, as_completed

import boto3
from boto3.s3.transfer import S3Transfer
from botocore.client import Config

import blackfynn.log as log
from blackfynn.api.agent import validate_agent_installation, agent_upload
from blackfynn.api.base import APIBase
from blackfynn.models import Collection, DataPackage, Dataset, TimeSeries

# GLOBAL
UPLOADS = {}

logger = log.get_logger('blackfynn.api.transfers')


def check_files(files):
    for f in files:
        if not os.path.exists(f):
            raise Exception("File {} not found.".format(f))


class ProgressPercentage(object):
    def __init__(self, filename, upload_session_id):
        self._filename = filename
        self._size = float(os.path.getsize(filename))
        self._seen_so_far = 0
        self._lock = threading.Lock()
        self._done = (self.progress == 1)
Beispiel #16
0
import json
import os
import socket
import subprocess
import sys
from collections import OrderedDict
from time import sleep

import semver
from future.utils import raise_from
from websocket import create_connection

from blackfynn.log import get_logger
from blackfynn.models import Collection, Dataset, DataPackage

logger = get_logger('blackfynn.agent')

MINIMUM_AGENT_VERSION = semver.parse_version_info("0.2.2")
DEFAULT_LISTEN_PORT = 11235


class AgentError(Exception):
    pass


def agent_cmd():
    if sys.platform == 'darwin':
        return '/usr/local/opt/blackfynn/bin/blackfynn_agent'

    elif sys.platform.startswith('linux'):
        return '/opt/blackfynn/bin/blackfynn_agent'
Beispiel #17
0
import sqlite3
import platform
import numpy as np
import pandas as pd
from glob import glob
import multiprocessing as mp
from itertools import groupby
from datetime import datetime

# blackfynn-specific
from blackfynn.utils import usecs_to_datetime, usecs_since_epoch
from blackfynn.models import DataPackage, TimeSeriesChannel
import blackfynn.log as log
from .cache_segment_pb2 import CacheSegment

logger = log.get_logger('blackfynn.cache')


def filter_id(some_id):
    return some_id.replace(':', '_').replace('-', '_')


def remove_old_pages(cache, mbdiff):
    # taste the rainbow!
    n = int(1.5 * ((mbdiff * 1024 * 1024) / 100) / cache.page_size) + 5

    # 2. Delete some pages from cache
    with cache.index_con as con:
        logger.debug("Cache - removing {} pages...".format(n))
        # find the oldest/least accessed pages
        q = """