Ejemplo n.º 1
0
def extract_datapoints(data_type, folder_path):
    client = CogniteClient(api_key=API_KEY)
    existing_timeseries = {
        i["metadata"]["externalID"]: i["name"]
        for i in client.time_series.get_time_series(include_metadata=True,
                                                    autopaging=True).to_json()
    }

    try:
        if data_type == 'live':
            last_timestamp = LAST_PROCESSED_TIMESTAMP

            while True:
                paths = find_new_files(last_timestamp, folder_path)
                if paths:
                    last_timestamp = post_datapoints(client, paths,
                                                     existing_timeseries)

                    # logger.info("Removing processed files {}".format(', '.join(p.name for p in paths)))
                    # for path in paths:
                    #    path.unlink()

                    time.sleep(5)
        elif data_type == 'historical':
            paths = find_new_files(
                0, folder_path)  # All paths in folder, regardless of timestamp
            if paths:
                post_datapoints(client, paths, existing_timeseries)
            logger.info("Extraction complete")
    except KeyboardInterrupt:
        logger.warning("Extractor stopped")
Ejemplo n.º 2
0
def main(args):
    logging.basicConfig(level=logging.INFO)

    if os.path.isdir(args.path):
        files = glob.glob(os.path.join(args.path, "*.tdms"))
    elif os.path.exists(args.path) and os.path.splitext(
            args.path)[1] == ".tdms":
        files = [args.path]
    else:
        logger.fatal(
            "--path must point to either folder or tdms file: {}".format(
                args.path))
        sys.exit(2)

    client = CogniteClient(api_key=args.apikey if args.apikey else os.environ.
                           get("COGNITE_API_KEY"))

    for path in files:
        with open(path, "rb") as fp:
            try:
                tdms = TdmsFile(fp)
            except Exception as exc:
                logger.error("Fatal: failed to parse TDMS file {}: {}".format(
                    path, exc))
                continue
            else:
                process_tdms_file(client, tdms, path, args.only_static)
    def __init__(self,
                 data_spec: DataSpec,
                 api_key: str = None,
                 cookies: Dict = None,
                 num_of_workers: int = 10):
        """
        Args:
            data_spec (data_transfer_service.DataSpec):   Data Spec.
            api_key (str):          Api key.
            cookies (dict):         Cookies.
            num_of_workers (int):   Number of workers to fetch data with.
        """
        self.cognite_client = CogniteClient(api_key=api_key,
                                            cookies=cookies,
                                            num_of_workers=num_of_workers)

        if isinstance(data_spec, DataSpec):
            self.data_spec = deepcopy(data_spec)
        elif isinstance(data_spec, dict):
            self.data_spec = DataSpec.from_JSON(data_spec)
        else:
            raise ValueError(
                "DataTransferService accepts a DataSpec instance or a json object representation of it."
            )
        self.ts_data_specs = self.data_spec.time_series_data_specs
        self.files_data_spec = self.data_spec.files_data_spec
        self.cookies = cookies
    def test_parameter_config(self):
        base_url = "blabla"
        num_of_retries = 1
        num_of_workers = 1
        timeout = 10

        client = CogniteClient(
            project="something",
            base_url=base_url,
            num_of_retries=num_of_retries,
            num_of_workers=num_of_workers,
            timeout=timeout,
        )
        self.assert_config_is_correct(client, base_url, num_of_retries,
                                      num_of_workers, timeout)
Ejemplo n.º 5
0
def time_series_in_cdp():
    global TEST_TS_1_ID, TEST_TS_2_ID
    client = CogniteClient(num_of_retries=3)

    try:
        ts_list = [TimeSeries(name=TEST_TS_1_NAME)]
        client.time_series.post_time_series(ts_list)
        log.warning("Posted sdk test time series 1")
        client.datapoints.post_datapoints(
            name=TEST_TS_1_NAME,
            datapoints=[
                Datapoint(timestamp=i, value=i)
                for i in range(TEST_TS_START, TEST_TS_END, int(3.6e6))
            ],
        )
        log.warning("Posted datapoints to sdk test time series 1")
        TEST_TS_1_ID = client.time_series.get_time_series(
            prefix=TEST_TS_1_NAME).to_json()[0]["id"]
    except APIError as e:
        log.warning("Posting test time series 1 failed with code {}".format(
            e.code))

    try:
        ts_list = [TimeSeries(name=TEST_TS_2_NAME)]
        client.time_series.post_time_series(ts_list)
        log.warning("Posted sdk test time series 2")
        client.datapoints.post_datapoints(
            name=TEST_TS_2_NAME,
            datapoints=[
                Datapoint(timestamp=i, value=i)
                for i in range(TEST_TS_START, TEST_TS_END, int(3.6e6))
            ],
        )
        log.warning("Posted datapoints to sdk test time series 2")
        TEST_TS_2_ID = client.time_series.get_time_series(
            prefix=TEST_TS_2_NAME).to_json()[0]["id"]
    except APIError as e:
        log.warning("Posting test time series 2 failed with code {}".format(
            e.code))

    TEST_TS_1_ID = client.time_series.get_time_series(
        prefix=TEST_TS_1_NAME).to_json()[0]["id"]
    TEST_TS_2_ID = client.time_series.get_time_series(
        prefix=TEST_TS_2_NAME).to_json()[0]["id"]
    yield TEST_TS_1_ID, TEST_TS_2_ID
Ejemplo n.º 6
0
def main(args):
    logging.basicConfig(level=logging.INFO)

    if os.path.isdir(args.path):
        files = glob.glob(os.path.join(args.path, "*.zip"))
    elif os.path.exists(args.path) and os.path.splitext(
            args.path)[1] == ".zip":
        files = [args.path]
    else:
        logger.fatal(
            "--path must point to either folder or trend zip file: {}".format(
                args.path))
        sys.exit(2)

    client = CogniteClient(api_key=args.apikey if args.apikey else os.environ.
                           get("COGNITE_API_KEY"))

    for path in files:
        timeseries, datapoints = process_inputs(path,
                                                save_files=args.save_files)
        if timeseries and datapoints:
            process_datapoints(client, timeseries, datapoints, path)
from random import randint

import pytest

from cognite import CogniteClient
from cognite.client.stable.time_series import TimeSeries
from cognite.client.experimental.time_series import TimeSeriesClient, TimeSeriesResponse
from tests.conftest import TEST_TS_1_NAME

stable_time_series = CogniteClient().time_series
time_series = CogniteClient().experimental.time_series


@pytest.fixture
def new_ts_id():
    name = "test_ts_{}".format(randint(1, 2 ** 53 - 1))
    stable_time_series.post_time_series([TimeSeries(name)])
    yield stable_time_series.get_time_series(prefix=name).to_json()[0]["id"]


class TestTimeseries:
    def test_delete_time_series_by_id(self, new_ts_id):
        res = time_series.delete_time_series_by_id([new_ts_id])
        assert res is None

    @pytest.fixture(scope="class")
    def get_time_series_by_id_response_obj(self, time_series_in_cdp):
        yield time_series.get_time_series_by_id(id=time_series_in_cdp[0])

    @pytest.fixture(scope="class")
    def get_multiple_time_series_by_id_response_obj(self, time_series_in_cdp):
Ejemplo n.º 8
0
from cognite.client.stable.datapoints import (
    Datapoint,
    DatapointsQuery,
    DatapointsResponse,
    LatestDatapointResponse,
    TimeseriesWithDatapoints,
)
from cognite.client.stable.time_series import TimeSeries
from tests.conftest import (
    TEST_TS_1_NAME,
    TEST_TS_2_NAME,
    TEST_TS_REASONABLE_INTERVAL,
    TEST_TS_REASONABLE_INTERVAL_DATETIME,
)

client = CogniteClient()

TS_NAME = None


@pytest.fixture(autouse=True, scope="class")
def ts_name():
    global TS_NAME
    TS_NAME = "test_ts_{}".format(randint(1, 2 ** 53 - 1))


@pytest.fixture(scope="class")
def datapoints_fixture():
    tso = TimeSeries(TS_NAME)
    client.time_series.post_time_series([tso])
    yield
# Temporary mock setup while waiting for a better way to do integration tests
from unittest.mock import patch

import pytest

from cognite import CogniteClient
from cognite.client.stable.tagmatching import TagMatchingResponse
from tests.conftest import MockReturnValue

tag_matching = CogniteClient().tag_matching.tag_matching


@pytest.fixture(scope="module")
@patch("requests.sessions.Session.post")
def tagmatching_result(mock_post):
    response = {
        "data": {
            "items": [{
                "matches": [
                    {
                        "platform": "a_platform",
                        "score": 0,
                        "tagId": "a_match"
                    },
                    {
                        "platform": "a_platform",
                        "score": 0,
                        "tagId": "a_match1"
                    },
                    {
                        "platform": "a_platform",
Ejemplo n.º 10
0
from typing import List

import pandas as pd
import pytest

from cognite import APIError, CogniteClient
from cognite.client.experimental.sequences import Column, Row, RowValue, Sequence, SequenceDataResponse
from tests.conftest import generate_random_string

sequences = CogniteClient().experimental.sequences

# This variable will hold the ID of the sequence that is created in one of the test fixtures of this class.
CREATED_SEQUENCE_ID = None
# This variable holds the external id used for the sequence that'll be created (and deleted) in these tests
SEQUENCE_EXTERNAL_ID = "external_id" + generate_random_string(10)


class TestSequences:
    @pytest.fixture(scope="class")
    def sequence_that_isnt_created(self):
        """Returns a Sequence that hasn't been created yet. (It does not have an ID)"""
        global SEQUENCE_EXTERNAL_ID

        return Sequence(
            id=None,
            name="test_sequence",
            external_id=SEQUENCE_EXTERNAL_ID,
            asset_id=None,
            columns=[
                Column(id=None,
                       name="test_column",
Ejemplo n.º 11
0
import pandas as pd
import pytest

from cognite import CogniteClient
from cognite.client.stable.assets import Asset, AssetListResponse, AssetResponse
from tests.conftest import generate_random_string

assets = CogniteClient().assets

ASSET_NAME = "test_asset" + generate_random_string(10)


@pytest.fixture(scope="module")
def get_asset_subtree_response():
    return assets.get_asset_subtree(asset_id=6354653755843357, limit=1)


@pytest.fixture(scope="module")
def get_assets_response():
    return assets.get_assets(limit=1)


def test_get_assets_response_object(get_assets_response):
    assert isinstance(get_assets_response, AssetListResponse)
    assert get_assets_response.next_cursor() is not None
    assert get_assets_response.previous_cursor() is None


def test_get_assets_with_metadata_args():
    res = assets.get_assets(limit=1, metadata={"something": "something"})
    assert not res.to_json()
Ejemplo n.º 12
0
import os

import pandas as pd
import pytest

from cognite import CogniteClient
from cognite.client.stable.files import FileInfoResponse, FileListResponse

files = CogniteClient().files


def test_upload_file_metadata():
    response = files.upload_file("test_file",
                                 source="sdk-tests",
                                 overwrite=True)
    assert response.get("uploadURL") is not None
    assert response.get("fileId") is not None


def test_upload_file(tmpdir):
    file_path = os.path.join(str(tmpdir), "test_file.txt")
    tmpdir.join("test_file.txt").write("This is a test file.")
    with pytest.warns(UserWarning):
        response = files.upload_file("test_file",
                                     file_path,
                                     source="sdk-tests",
                                     overwrite=True)
    assert response.get("uploadURL") is None
    assert response.get("fileId") is not None

Ejemplo n.º 13
0
from random import randint

import pandas as pd
import pytest

from cognite import CogniteClient
from cognite.client.stable.time_series import TimeSeries, TimeSeriesResponse

timeseries = CogniteClient().time_series


@pytest.fixture(autouse=True, scope="class")
def ts_name():
    global TS_NAME
    TS_NAME = "test_ts_{}".format(randint(1, 2**53 - 1))


class TestTimeseries:
    def test_post_timeseries(self):
        tso = TimeSeries(TS_NAME)
        res = timeseries.post_time_series([tso])
        assert res is None

    def test_update_timeseries(self):
        tso = TimeSeries(TS_NAME, unit="celsius")
        res = timeseries.update_time_series([tso])
        assert res is None

    @pytest.fixture(scope="class", params=[True, False])
    def get_timeseries_response_obj(self, request):
        yield timeseries.get_time_series(prefix=TS_NAME,
Ejemplo n.º 14
0
# -*- coding: utf-8 -*-

import os
import traceback
import argparse
import yaml
import datetime
import sys
import logging
import csv
from cognite import CogniteClient
from post_data import *

import pandas as pd

client = CogniteClient()
api_key = os.environ['COGNITE_KEY']  #config["cognite"]["api_key"]
client = CogniteClient(api_key=api_key, timeout=300)


def post_data():
    url = "https://tcloud2.twave.io/cunb4h/rest/waves/Michaelkrohnsgate/Pos_4/Velocity/"
    username = "******"
    password = "******"
    ts_name = "spectra_pos_1_accelerate"
    value, epoctime = get_raw_string_data(username, password, url)
    d = {"timestamp": [epoctime], ts_name: [value]}
    df = pd.DataFrame(d)
    try:
        client.datapoints.post_datapoints_frame(df)
        print(" OK")
Ejemplo n.º 15
0
import pandas as pd
import pytest

import cognite.client.stable.events
from cognite import APIError, CogniteClient

events = CogniteClient().events


@pytest.fixture(scope="module")
def get_post_event_obj():
    event = cognite.client.stable.events.Event(start_time=1521500400000, end_time=1521586800000, description="hahaha")
    res = events.post_events([event])
    yield res
    ids = list(ev["id"] for ev in res.to_json())
    events.delete_events(ids)


def test_post_events(get_post_event_obj):
    assert isinstance(get_post_event_obj, cognite.client.stable.events.EventListResponse)
    assert isinstance(get_post_event_obj.to_pandas(), pd.DataFrame)
    assert isinstance(get_post_event_obj.to_json(), list)


def test_post_events_length(get_post_event_obj):
    assert len(get_post_event_obj.to_json()) == 1


def test_get_event(get_post_event_obj):
    id = get_post_event_obj.to_json()[0]["id"]
    res = events.get_event(event_id=id)
def client():
    yield CogniteClient()
Ejemplo n.º 17
0
from random import randint

import numpy as np
import pandas as pd
import pytest

from cognite import APIError, CogniteClient
from cognite.client.stable.raw import RawResponse, RawRow

raw = CogniteClient().raw

DB_NAME = None
TABLE_NAME = None
ROW_KEY = None
ROW_COLUMNS = None


@pytest.fixture(autouse=True, scope="class")
def db_name():
    global DB_NAME
    DB_NAME = "test_db_{}".format(randint(1, 2 ** 53 - 1))


@pytest.fixture(autouse=True, scope="class")
def table_name():
    global TABLE_NAME
    TABLE_NAME = "test_table_{}".format(randint(1, 2 ** 53 - 1))


@pytest.fixture(autouse=True, scope="class")
def row_key():
 def __init__(cls):
     cls.apiKey = os.environ["TEST_API_KEY_WRITE"]
     cls.project = os.environ["PROJECT"]
     cls.client = CogniteClient(cls.apiKey, cls.project)
     cls.random_model = "model_{}".format(random.randint(0, sys.maxsize))
Ejemplo n.º 19
0
from random import randint

import pytest

from cognite import CogniteClient

models = CogniteClient().experimental.analytics.models


@pytest.fixture
def created_model():
    model_name = "test-model-{}".format(randint(0, 1e5))
    model = models.create_model(name=model_name)
    yield model
    models.delete_model(model["id"])


@pytest.fixture
def created_source_package():
    sp_name = "test-sp-{}".format(randint(0, 1e5))
    sp = models.create_source_package(
        name=sp_name,
        package_name="whatever",
        available_operations=["TRAIN", "PREDICT"],
        runtime_version="0.1")
    yield sp
    models.delete_source_package(source_package_id=sp["id"])


class TestModels:
    def test_get_model(self, created_model):