def post_fork(server, worker):
    server.log.info("Worker spawned (pid: %s)", worker.pid)
    from util.cass import _init
    from preload_database.database import create_engine_from_url, create_scoped_session
    from ooi_data.postgres.model import MetadataBase
    with worker_lock:
        worker.log.debug('Connecting worker to cassandra')
        _init()
        worker.log.debug('Connected worker to cassandra')
        engine = create_engine_from_url(None)
        Session = create_scoped_session(engine)
        MetadataBase.query = Session.query_property()
def post_fork(server, worker):
    server.log.info("Worker spawned (pid: %s)", worker.pid)
    from util.cass import _init
    from preload_database.database import create_engine_from_url, create_scoped_session
    from ooi_data.postgres.model import MetadataBase
    with worker_lock:
        worker.log.debug('Connecting worker to cassandra')
        _init()
        worker.log.debug('Connected worker to cassandra')
        engine = create_engine_from_url(None)
        Session = create_scoped_session(engine)
        MetadataBase.query = Session.query_property()
Example #3
0
from ooi_data.postgres.model import Parameter, MetadataBase
from util.asset_management import AssetEvents
from util.common import StreamKey, TimeRange, StreamEngineException, InvalidParameterException, read_size_config
from util.csvresponse import CsvGenerator
from util.jsonresponse import JsonResponse
from util.netcdf_generator import NetcdfGenerator
from util.netcdf_utils import rename_glider_lat_lon
from util.stream_dataset import StreamDataset
from util.stream_request import StreamRequest, SIZE_ESTIMATES
from util.calc import execute_stream_request, validate

TEST_DIR = os.path.dirname(__file__)
DATA_DIR = os.path.join(TEST_DIR, 'data')

engine = create_engine_from_url(None)
session = create_scoped_session(engine)
MetadataBase.query = session.query_property()

logging.basicConfig()
log = logging.getLogger()
log.setLevel(logging.DEBUG)
metadata = pd.read_csv(os.path.join(DATA_DIR, 'stream_metadata.csv'))


def get_available_time_range(sk):
    rows = metadata[(metadata.subsite == sk.subsite)
                    & (metadata.node == sk.node) &
                    (metadata.sensor == sk.sensor) &
                    (metadata.method == sk.method) &
                    (metadata.stream == sk.stream.name)]
    for index, row in rows.iterrows():
import tempfile
import unittest

import numpy as np
import xarray as xr
from ooi_data.postgres.model import MetadataBase

from preload_database.database import create_engine_from_url, create_scoped_session
from util.aggregation import aggregate_netcdf_group

logging.basicConfig()
log = logging.getLogger()
log.setLevel(logging.DEBUG)

engine = create_engine_from_url(None)
session = create_scoped_session(engine)
MetadataBase.query = session.query_property()

TEST_DIR = os.path.dirname(__file__)
DATA_DIR = os.path.join(TEST_DIR, 'data')


class AggregationTest(unittest.TestCase):
    def setUp(self):
        self.tempdir = tempfile.mkdtemp()

    def tearDown(self):
        shutil.rmtree(self.tempdir)

    def test_aggregate_netcdf_group_simple(self):
        f1 = os.path.join(self.tempdir, 'f1.nc')
import logging
import unittest

import numpy as np
import xarray as xr
from ooi_data.postgres.model import MetadataBase

from preload_database.database import create_engine_from_url, create_scoped_session
from util.netcdf_utils import max_shape, max_dtype, prep_classic

logging.basicConfig()
log = logging.getLogger()
log.setLevel(logging.DEBUG)

sqla_engine = create_engine_from_url(None)
session = create_scoped_session(sqla_engine)
MetadataBase.query = session.query_property()


class NetcdfUtilsTest(unittest.TestCase):
    def test_max_size(self):
        self.assertEqual(max_shape((), (1,)), (1,))
        self.assertEqual(max_shape((1, ), (1,)), (1,))
        self.assertEqual(max_shape((1,), (2,)), (2,))
        self.assertEqual(max_shape((2,), (1,)), (2,))
        self.assertEqual(max_shape((1, 2), (2,)), (2, 2))
        self.assertEqual(max_shape((1, 2), (2, 3)), (2, 3))

    def test_max_dtype(self):
        b1 = np.dtype(np.bool)
        i8 = np.dtype(np.int8)
Example #6
0
import global_test_setup

import copy
import engine
import mock
import unittest
import util.metadata_service
from preload_database.database import create_engine_from_url, create_scoped_session
from ooi_data.postgres.model import MetadataBase
from util.common import TimeRange, StreamKey, MissingStreamMetadataException
from util.metadata_service import CASS_LOCATION_NAME, SAN_LOCATION_NAME

sqla_engine = create_engine_from_url(None)
session = create_scoped_session(sqla_engine)
MetadataBase.query = session.query_property()


class MockMetadataServiceAPI(object):
    '''Mock version of the util.metadata_service.metadata_service_api.api.MetadataServiceAPI class.'''
    def __init__(self):
        self.test_clean_up()

    def test_clean_up(self):
        self.stream_metadata_records = []
        self.partition_metadata_records = []

    def test_add_stream_metadata_record(self, subsite, node, sensor, method,
                                        stream, first, last, count):
        new_record = util.metadata_service.MetadataServiceAPI.build_stream_metadata_record(
            subsite, node, sensor, method, stream, first, last, count)
        self.stream_metadata_records.append(new_record)