def __init__(self, session): """ Base class to be used by all API components. """ # api session self.session = session self._logger = log.get_logger("pennsieve.api")
def test_log_level_based_on_env(): from pennsieve import log # setting env var should change logging level os.environ["PENNSIEVE_LOG_LEVEL"] = "WARN" warn_log = log.get_logger("warn_log") assert warn_log.getEffectiveLevel() == logging.WARN
def __init__(self, func, uri, *args, **kwargs): self._func = func self._uri = uri self._args = args self._kwargs = kwargs self._response = None self._logger = log.get_logger("pennsieve.base.PennsieveRequest")
def __init__(self, settings): self._host = settings.api_host self._api_token = settings.api_token self._api_secret = settings.api_secret self._headers = settings.headers self._model_service_host = settings.model_service_host self._logger = log.get_logger("pennsieve.base.ClientSession") self._session = None self._token = None self._secret = None self._context = None self._organization = None self.profile = None self.settings = settings
import os import sys import threading import time import uuid from concurrent.futures import ThreadPoolExecutor, as_completed from pennsieve import log from pennsieve.api.agent import agent_upload, validate_agent_installation from pennsieve.api.base import APIBase from pennsieve.models import Collection, DataPackage, Dataset, TimeSeries # GLOBAL UPLOADS = {} logger = log.get_logger("pennsieve.api.transfers") def check_files(files): for f in files: if not os.path.exists(f): raise Exception("File {} not found.".format(f)) class IOAPI(APIBase): """ Input/Output interface. """ name = "io"
import os if 'PENNSIEVE_LOG_LEVEL' not in os.environ: # silence agent import warning os.environ['PENNSIEVE_LOG_LEVEL'] = 'CRITICAL' from pennsieve import log as _pnlog # blackfynn.log sets logging.basicConfig which pollutes logs from # other programs that are sane and do not use the root logger # so we have to undo the damage done by basic config here # we add the sparcur local handlers back in later from sparcur.utils import log, silence_loggers for __pnlog in (_pnlog.get_logger(), _pnlog.get_logger("pennsieve.agent")): silence_loggers(__pnlog) __pnlog.addHandler(log.handlers[0]) from pennsieve import Pennsieve, DataPackage, BaseNode from pennsieve import Organization, Dataset, Collection, File from pennsieve import base as pnb from pennsieve.api import agent from pennsieve.api.data import PackagesAPI from sparcur import monkey from sparcur.utils import ApiWrapper, PennsieveId, make_bf_cache_as_classes def id_to_type(id): #if isinstance(id, BlackfynnId): # FIXME this is a bad place to do this (sigh) #return {'package': DataPackage, #'collection':Collection, #'dataset': Dataset, #'organization': Organization,}[id.type] if id.startswith('N:package:'):
import sqlite3 import time from datetime import datetime from glob import glob from itertools import groupby from pennsieve import log from pennsieve.extensions import numpy as np from pennsieve.extensions import pandas as pdr from pennsieve.extensions import require_extension from pennsieve.models import DataPackage, TimeSeriesChannel from pennsieve.utils import usecs_since_epoch, usecs_to_datetime from .cache_segment_pb2 import CacheSegment logger = log.get_logger("pennsieve.cache") def filter_id(some_id): return some_id.replace(":", "_").replace("-", "_") def remove_old_pages(cache, mbdiff): # taste the rainbow! n = int(1.5 * ((mbdiff * 1024 * 1024) / 100) / cache.page_size) + 5 # 2. Delete some pages from cache with cache.index_con as con: logger.debug("Cache - removing {} pages...".format(n)) # find the oldest/least accessed pages q = """
def test_default_log_level(): from pennsieve import log # default log level should be INFO base_log = log.get_logger("base_log") assert base_log.getEffectiveLevel() == logging.INFO