Exemple #1
0
def main():
    configure_session(api_key=os.getenv("COGNITE_API_KEY"),
                      project="akerbp",
                      debug=True)
    tags_d03 = []
    tags_d02 = []

    for root, subdirs, files in os.walk("../tags"):
        for file in files:
            if file in ("well_tags.csv", "routing.csv", "output.csv",
                        "riser_tags.csv", "template_tags.csv"):
                with open(os.path.join(root, file)) as f:
                    df = pd.read_csv(f)

                    placements = ["T3 WGM", "Template", "Riser"]
                    placements_d03 = ["WellD03"] + placements
                    placements_d02 = ["WellD02"] + placements

                    df = df[~df["tag"].isin(EXCLUDE_TAGS)]

                    tags_d03.append(df[df["placement"].isin(placements_d03)])
                    tags_d02.append(df[df["placement"].isin(placements_d02)])

    tags_d02_concat = pd.concat(tags_d02, ignore_index=True)
    tags_d03_concat = pd.concat(tags_d03, ignore_index=True)

    tags_d02_concat = tags_d02_concat.drop_duplicates(subset="tag")
    tags_d03_concat = tags_d03_concat.drop_duplicates(subset="tag")

    d02_input_time_series = []
    d03_input_time_series = []

    for tag in tags_d02_concat["tag"]:
        aggregate = "step" if ("ESV" in tag or "18HV" in tag) else "avg"
        missing_data_strategy = "ffill" if (
            "ESV" in tag or "18HV" in tag) else "linearInterpolation"
        ts = TimeSeries(name=tag,
                        missing_data_strategy=missing_data_strategy,
                        aggregates=[aggregate])
        d02_input_time_series.append(ts)

    for tag in tags_d03_concat["tag"]:
        aggregate = "step" if ("ESV" in tag or "18HV" in tag) else "avg"
        missing_data_strategy = "ffill" if (
            "ESV" in tag or "18HV" in tag) else "linearInterpolation"
        ts = TimeSeries(name=tag,
                        missing_data_strategy=missing_data_strategy,
                        aggregates=[aggregate])
        d03_input_time_series.append(ts)

    d02_tsds = TimeSeriesDataSpec(
        time_series=d02_input_time_series,
        aggregates=["avg"],
        granularity="10s",
        start=int(datetime(2017, 3, 1).timestamp() * 1e3),
        label="d2",
    )
    d03_tsds = TimeSeriesDataSpec(
        time_series=d03_input_time_series,
        aggregates=["avg"],
        granularity="10s",
        start=int(datetime(2017, 3, 1).timestamp() * 1e3),
        label="d3",
    )

    data_spec = DataSpec(time_series_data_specs=[d02_tsds, d03_tsds])

    dts = DataTransferService(data_spec, num_of_processes=10)

    print(data_spec.to_JSON())

    df_dict = dts.get_dataframes()

    for label, df in df_dict.items():
        df.to_csv(f"../data/{label}.csv")
        print(df.shape)
from cognite.v05.timeseries import post_time_series, get_timeseries
from cognite.v05.dto import TimeSeries
from bysykkel import Station, find_or_create_asset, find_or_create_root_assets
import oslobysykkelsdk as oslo
import bergenbysykkelsdk as bergen
import trondheimbysykkelsdk as trondheim

parser = argparse.ArgumentParser()
parser.add_argument('--apikey', type=str, required=True)
parser.add_argument('--project', type=str, required=True)
parser.add_argument('--delete_timeseries', action='store_true')
parser.add_argument('--create_timeseries', action='store_true')
args = parser.parse_args()

# Set API key and project for current session
configure_session(api_key=args.apikey, project=args.project)


def delete_timeseries(cities):
    # TODO: implement
    print('delete_timeseries is not yet implemented')


def create_timeseries(cities):
    timeseries = []

    for city, data in cities.items():
        city_id = data['asset_id']
        assets = get_asset_subtree(asset_id=city_id).to_json()
        for asset in assets:
            timeseries_bikes = TimeSeries(name=asset['name'] + '_bikes',
Exemple #3
0
import tools.lstm_network as lstm
import tools.knn as knn
import tools.constants as constants
import tools.plotting as plotting
import seaborn as sns

from sklearn import preprocessing
from sklearn.metrics import mean_squared_error, r2_score
from cognite.config import configure_session
from cognite.v05.timeseries import get_datapoints_frame
from statsmodels.tsa.stattools import adfuller
from pandas.plotting import scatter_matrix
from statsmodels.graphics.tsaplots import plot_acf, plot_pacf
from pandas.plotting import autocorrelation_plot

configure_session(os.environ.get('PUBLIC_DATA_KEY'), 'publicdata')

input_tags = {
    'VAL_23-FT-92512:X.Value|average': 'Gas inflow from separators',
    'VAL_23-PT-92532:X.Value|average': 'Suction pressure',
    'VAL_23-TT-92533:X.Value|average': 'Suction temperature'
}

output_tags = {
    'VAL_23-FT-92537-01:X.Value|average': 'Discharge mass flow',
    'VAL_23-FT-92537-04:X.Value|average': 'Discharge volume flow',
    'VAL_23-PT-92539:X.Value|average': 'Discharge pressure',
    'VAL_23-TT-92539:X.Value|average': 'Discharge temperature'
}

control_tags = {
                        TimeseriesWithDatapoints(
                            locks_asset_name,
                            [Datapoint(timestamp, num_locks)]))
            timestamp = int(time.time() * 1000)
            log('  Posting %d data points for %s at %d' %
                (len(datapoints), city, timestamp))
            post_multi_tag_datapoints(datapoints)
            log('  Data points posted to CDP.')
        except Exception as e:
            log('  Error fetching availaility for %s: %s' % (city, str(e)))


log('Initializing bysykkel sampler ...')

# Set API key and project for current session
configure_session(api_key=os.getenv('COGNITE_API_KEY'),
                  project=os.getenv('COGNITE_PROJECT'))

# Find root assets
cities = {
    'Oslo': {
        'stations': oslo.get_stations(),
        'get_availability': oslo.get_availability
    },
    'Bergen': {
        'stations': bergen.get_stations(),
        'get_availability': bergen.get_availability
    },
    'Trondheim': {
        'stations': trondheim.get_stations(),
        'get_availability': trondheim.get_availability
    }
Exemple #5
0
import time
from datetime import datetime, timedelta

import numpy as np
import pandas as pd

from cognite._utils import APIError
from cognite.config import configure_session, set_number_of_retries
from cognite.data_transfer_service import DataSpec, DataTransferService, TimeSeries, TimeSeriesDataSpec
from cognite.v05.dto import Datapoint
from cognite.v05.dto import TimeSeries as TimeSeriesDTO
from cognite.v05.timeseries import post_datapoints, post_time_series
from cognite.v06 import models
from virtual_metering.data_fetcher import EXCLUDE_TAGS

configure_session(api_key=os.getenv("COGNITE_API_KEY"), project="akerbp", debug=True)
set_number_of_retries(3)

try:
    ts = TimeSeriesDTO(name="SKAP_18FI381-VFlLGas/Y/10sSAMP_calc_D02_2", asset_id=8129784932439587)
    res = post_time_series([ts])
    print(res)
except APIError as e:
    print(e)
    print("øladfhjaoøsidfhjapsoidfjaspodifhjaspoid")
    pass


def main():
    output_columns = [
        "SKAP_18FI381-VFlLGas/Y/10sSAMP|average",
def unset_config_variables():
    configure_session('', '')
    yield (TEST_API_KEY, TEST_PROJECT)
    configure_session(TEST_API_KEY, TEST_PROJECT)
def configure_test_session():
    configure_session(TEST_API_KEY, TEST_PROJECT)
    yield
    configure_session('', '')  # teardown
Exemple #8
0
import os

import matplotlib.pyplot as plt
from cognite.client.stable.time_series import get_datapoints_frame
from cognite.config import configure_session

# Set API key and project for current session. The project is Open Industrial Data.
configure_session(api_key=os.getenv("COGNITE_API_KEY"), project="publicdata")

# Retrieve one year of daily aggregates for a time series
ts = "VAL_23-PT-92512:X.Value"
dataframe = get_datapoints_frame([ts],
                                 start="52w-ago",
                                 aggregates=["avg", "min", "max"],
                                 granularity="1d",
                                 processes=1)

# Plot the dataframe
dataframe.plot(x="timestamp")
plt.show()