예제 #1
0
def start_dataset_creating():
    start = datetime.datetime.now()
    dataset_name = 'dataset_' + str(time.time()).replace('.', '')
    dataset_dir = os.path.join(current_app.config['DATASET_DIR'], dataset_name)
    os.makedirs(dataset_dir, exist_ok=True)

    dataset = Dataset(name=dataset_name,
                      path=dataset_dir,
                      dt_start=start,
                      status=DatasetStatus.start,
                      type=DatasetType.top_one)
    db.session.add(dataset)
    db.session.commit()

    collector = DatasetCollector(dataset_model=dataset)

    try:
        # TODO: добавить параметры датасета
        collector.create_doctor_item_base_matrix()
        collector.create_datasets_for_catboost(min_appts=10)
    except Exception as e:
        traceback.print_exc()
        dataset.status = DatasetStatus.fail
        dataset.error = str(e)
    else:
        dataset.status = DatasetStatus.end
    finally:
        dataset.dt_end = datetime.datetime.now()
        db.session.add(dataset)
        db.session.commit()

    return redirect(url_for('dataset.main'))
예제 #2
0
 def __init__(self, exchange, period_start: datetime, period_end=None, interval=60, *args, **kwargs):
     self.exchange = exchange
     self.interval = interval
     self.period_start = period_start
     self.period_end = period_end
     self.start = datetime.now()
     self.dataset = Dataset().create(
         data={'exchange': '/api/exchanges/'+self.exchange.name.lower(), 'periodStart': self.period_start, 'periodEnd': self.period_end,
               'candleSize': 60,
               'currency': '/api/currencies/'+self.exchange.currency.lower(), 'asset': '/api/currencies/'+self.exchange.asset.lower()})
예제 #3
0
def parse_datasets(res):
    soup = BeautifulSoup(res.text, 'html.parser')
    views = soup.findAll('div', {'class': 'views-row'})
    datasets = []
    for view in views:
        div = view.find('div', {'class': 'views-field-body'})
        if div != -1:
            el = div.find('a')
            datasetid = el['href'].replace("/download/content/", "")
            name = el.text
            datasets.append(Dataset(datasetid, name))

    return datasets
예제 #4
0
def generate_yaml_from_netCDF(nc_path, product_name, product_description,
                              no_data):
    # Extract data from netCDF file
    dataset = xarray.load_dataset(nc_path)

    measurements = []
    dims = [i for i in dataset.sizes.mapping.mapping]
    for var in dataset:
        ds_var = dataset[var].variable
        if "units" in ds_var.attrs.keys():
            measurements.append(
                Measurement(
                    var,
                    ds_var.dtype.name,
                    ds_var.attrs["units"],
                    no_data,
                    Path(nc_path).name,
                ))

    # Classes generation
    dataset = Dataset(
        product_name,
        dataset.longitude.data,
        dataset.latitude.data,
        measurements,
    )

    product = Product(
        product_name,
        product_description,
        measurements=measurements,
        storage_driver="NetCDF CF",
        storage_dimension_order=dims,
    )

    # YAML configuration
    yaml.emitter.Emitter.process_tag = lambda self, *args, **kw: None
    CWD = os.path.dirname(__file__)
    # Product generation
    with open(os.path.join(CWD, "./tests/product_generated.yaml"), "w") as f:
        yaml.dump(product, f, sort_keys=False)

    # Dataset generation
    data = yaml.dump(dataset, sort_keys=False)
    data = data.replace("'%", "")
    data = data.replace("%'", "")
    with open(os.path.join(CWD, "./tests/dataset_generated.yaml"), "w") as f:
        f.write(data)
예제 #5
0
    def dataset(self, windows, remove_seconds=0):
        if len(windows) == 0:
            return Dataset.empty()

        n_samples = len(windows)
        n_channels = len(self.ch_names)
        window_length = np.shape(windows)[1]

        X = np.empty([n_samples, n_channels, window_length])
        y = np.empty([n_samples], dtype=np.int8)

        for i, window in enumerate(windows):
            X[i] = window[:, 0:n_channels].T
            y[i] = int(max(window[:, -1]))

        if remove_seconds > 0:
            change_points = []
            action_labels = []
            for i in range(1, len(y), 1):
                if y[i] != y[i - 1]:
                    change_points.append(i)
                    action_labels.append(np.max(y[i - 1:i + 1]))

            remove_distance = (250 * remove_seconds) / window_length
            keep_indices = []
            for i in range(len(y)):
                label = y[i]
                if label == 0:
                    viable = True
                    for point in change_points:
                        if np.abs(i - point) <= remove_distance:
                            viable = False
                    if viable:
                        keep_indices.append(i)
                else:
                    keep_indices.append(i)

            X = X[keep_indices]
            y = y[keep_indices]

        return Dataset(X, y, self.person_id, self.id)
예제 #6
0
    def __init__(self, exchange: Exchange, timeout=60, *args, **kwargs):
        super().__init__(exchange, timeout, *args, **kwargs)
        self.buy_price = 0
        self.sell_price = 0
        self.stop_loss = 0

        self.market_delta = 0

        self.advised = False
        self.waiting_order = False
        self.fulfilled_orders = []
        self.last_price = 0
        # create a dataset for the session
        self.dataset = Dataset().create(
            data={
                'exchange': self.exchange.name.lower(),
                'periodStart': datetime.now(),
                'candleSize': 60,
                'currency': self.exchange.currency,
                'asset': self.exchange.asset
            })
예제 #7
0
    def __init__(self,
                 exchange: Exchange,
                 period_start: datetime,
                 period_end=None,
                 interval=60):
        self.launchedAt = datetime.now()
        # Try to find dataset
        dataset = Dataset().get({
            "exchange": exchange.name.lower(),
            "currency": exchange.currency.lower(),
            "asset": exchange.asset.lower(),
            "periodStart": period_start,
            "periodEnd": period_end,
            "candleSize": interval
        })
        if dataset and len(dataset) > 0:
            print(dataset)
            print(dataset[0])
            print("Dataset found: " + dataset[0]['uuid'])
            price = Price()
            for prices in price.query('get', {"dataset": dataset[0]['uuid']}):
                for price in prices:
                    print(price)
                    newPrice = Price()
                    newPrice.populate(price)
                    exchange.strategy.set_price(newPrice)
                    exchange.strategy.run()
        else:
            print("Dataset not found, external API call to " + exchange.name)
            for price in exchange.historical_symbol_ticker_candle(
                    period_start, period_end, interval):
                exchange.strategy.set_price(price)
                exchange.strategy.run()

        execution_time = datetime.now() - self.launchedAt
        print('Execution time: ' + str(execution_time.total_seconds()) +
              ' seconds')
        sys.exit(0)
예제 #8
0
파일: test.py 프로젝트: MisakiCoca/DLAVC
    config = yaml.safe_load(f)

# load logger
lc = config['environment']['log_config']
logging.config.fileConfig(lc)
logs = logging.getLogger()

# load device config
cuda = config['environment']['cuda']
os.environ["CUDA_VISIBLE_DEVICES"] = "0"

# load dataloader
it = config['test']['image_root']
bs = 1
iz = None
data = Dataset(it, iz, cuda)
loader = DataLoader(data, bs, cuda)

# load color transform network
net_col = col.Generator(2)
net_col = nn.DataParallel(net_col)
net_col = net_col.cuda() if cuda else net_col

# load temporal constraint network
net_tem = tem.Generator(64)
net_tem = nn.DataParallel(net_tem)
net_tem = net_tem.cuda() if cuda else net_tem

# load pretrained models
# col_gen.load_state_dict(torch.load(test['load_pretrain_model'][0], map_location='cpu'))
# tem_gen.load_state_dict(torch.load(test['load_pretrain_model'][1], map_location='cpu'))
예제 #9
0
파일: dataset.py 프로젝트: jack2200/models
from models.dataset import Dataset

# Creating a dataset of dimension 2 in input and 3 in output
dset = Dataset(2, 3)

# Adding datapoints
dset.add_xy([0.0, 1.0], [ 1.0, 2.0, 0.0])
dset.add_xy([1.0, 0.0], [ 0.0, 0.0, 2.0])
dset.add_xy([2.0,-1.0], [-1.0,-2.0, 4.0])

# Nearest neighbors queries on input, requesting 2 neighbors
dset.nn_x([0.2, 0.5], 2)
# Nearest neighbors queries on output, requesting 1 neighbors
dist, index = dset.nn_y([1.0, 1.0, 1.0], 1)

# Retrieving the nearest output of [1.0, 1.0, 1.0]
print dset.get_y(index[0])
# Retrieving the nearest datapoint
print dset.get_xy(index[0])
예제 #10
0
elif mode == 'live':
    exchange.start_symbol_ticker_socket(exchange.get_symbol())

elif mode == 'backtest':
    period_start = config('PERIOD_START')
    period_end = config('PERIOD_END')

    print("Backtest period from {} to {} with {} seconds candlesticks.".format(
        period_start, period_end, interval))

    # Try to find dataset
    dataset = Dataset().query(
        'get', {
            "exchange": '/api/exchanges/' + exchange.name.lower(),
            "currency": '/api/currencies/' + currency.lower(),
            "asset": '/api/currencies/' + asset.lower(),
            "period_start": period_start,
            "period_end": period_end,
            "candleSize": interval
        })

    if dataset and len(dataset) > 0:
        print(dataset[0])
        price = Price()
        for price in price.query('get', {"dataset": dataset[0]['uuid']}):
            newPrice = Price()
            newPrice.populate(price)
            exchange.strategy.set_price(newPrice)
            exchange.strategy.run()
    else:
        print("Dataset not found, external API call to " + exchange.name)