def create_price_rule( self, discount_level: str, discount_name: str, start_time: pd.Timestamp, end_time: pd.Timestamp, customer_selection: str = "all", target_selection: str = "all", ) -> Optional[Dict]: call_path = "price_rules.json" method = "POST" payload = { "price_rule": { "value_type": "percentage", "value": f"-{discount_level}", "customer_selection": customer_selection, # 'prerequisite', # 'prerequisite_customer_ids': [384028349005], "target_type": "line_item", "target_selection": target_selection, # 'entitled', # 'entitled_collection_ids': [ 4564654869, 979761006 ] # 'entitled_product_ids': [ 4564654869, 979761006 ], "once_per_customer": "true", "allocation_method": "across", "starts_at": start_time.isoformat() + "Z", "ends_at": end_time.isoformat() + "Z", "title": discount_name, } } price_rule_response = self.authenticated_shopify_call( call_path=call_path, method=method, payload=payload) if not price_rule_response: return None return price_rule_response["price_rule"]
def get_ts_traces(self, site_list, start=0, end=0, varfrom=100, varto=140, interval='day', multiplier=1, datasource='A', data_type='mean', qual_codes=[30, 20, 10, 11, 21, 18], report_time=None): """ """ # Convert the site list to a comma delimited string of sites sites = select_sites(site_list).astype(str) site_list_str = ','.join([str(site) for site in sites]) ### Datetime conversion - with dates < 1900 c1900 = Timestamp('1900-01-01') if start != 0: start1 = Timestamp(start) if start1 > c1900: start = start1.strftime('%Y%m%d%H%M%S') else: start = start1.isoformat(' ').replace('-', '').replace(' ', '').replace(':', '') if end != 0: end1 = Timestamp(end) if end1 > c1900: end = end1.strftime('%Y%m%d%H%M%S') else: end = end1.isoformat(' ').replace('-', '').replace(' ', '').replace(':', '') ts_traces_request = {'function': 'get_ts_traces', 'version': 2, 'params': {'site_list': site_list_str, 'start_time': start, 'end_time': end, 'varfrom': varfrom, 'varto': varto, 'interval': interval, 'datasource': datasource, 'data_type': data_type, 'multiplier': multiplier, 'report_time': report_time}} ts_traces_request = self.query_by_dict(ts_traces_request) j1 = ts_traces_request['return']['traces'] ### Convert json to a dataframe sites = [str(f['site']) for f in j1] out1 = DataFrame() for i in range(len(j1)): df1 = DataFrame(j1[i]['trace']) if not df1.empty: df1.rename(columns={'v': 'data', 't': 'time', 'q': 'qual_code'}, inplace=True) df1['data'] = to_numeric(df1['data'], errors='coerce') df1['time'] = to_datetime(df1['time'], format='%Y%m%d%H%M%S') df1['qual_code'] = to_numeric(df1['qual_code'], errors='coerce', downcast='integer') df1['site'] = sites[i] df2 = df1[df1.qual_code.isin(qual_codes)] out1 = concat([out1, df2]) out2 = out1.set_index(['site', 'time'])[['data', 'qual_code']] return out2
def read_database_table(conn, table: str, t0: pd.Timestamp, t1: pd.Timestamp): """ Read data from a database table within the supplied time range. - conn: Database connection object (see database.make_connection) - table: Database table name - t0, t1: the time range of the returned dataframe """ st0 = t0.isoformat() st1 = t1.isoformat() df = pd.read_sql( f"SELECT * FROM {table} WHERE \"time\" BETWEEN '{st0}' AND '{st1}'", conn ) return df
def _get_bars( sid_map: List[Tuple[int, Text]], start_session: pd.Timestamp, end_session: pd.Timestamp, cache: dataframe_cache, bin_size: Text, ) -> Iterator[Tuple[int, pd.DataFrame]]: for sid, symbol in sid_map: key = symbol + '-' + bin_size if key not in cache: cache[key] = pd.DataFrame() while cache[key].empty or cache[key].index[-1] < end_session: cursor = start_session if cache[key].empty else cache[key].index[-1] _res = _bitmex_rest( '/trade/bucketed', { 'binSize': bin_size, 'count': 500, 'symbol': symbol, 'startTime': cursor.isoformat(), 'endTime': end_session.isoformat(), }, ) if not _res: break res = pd.DataFrame.from_dict(_res) res.drop('symbol', axis=1, inplace=True) res['timestamp'] = res['timestamp'].map( lambda x: pd.to_datetime(x, utc=True)) res.set_index('timestamp', inplace=True) if not cache[key].empty: cache[key] = cache[key].drop(index=cache[key].index[-1]) cache[key] = pd.concat([cache[key], res]) yield sid, cache[key]
def _get_minute_bar(symbol: str, day_start: pd.Timestamp): day_end = day_start + timedelta(days=1, seconds=-1) res = [] for _ in range(3): _res = _bitmex_rest( '/trade/bucketed', { 'binSize': '1m', 'count': 500, 'symbol': symbol, 'startTime': day_start.isoformat(), 'endTime': day_end.isoformat(), 'start': len(res) }) assert len(_res) != 0 res += _res assert len(res) == 24 * 60 res = pd.DataFrame.from_dict(res) res.drop('symbol', axis=1, inplace=True) # I think this is a bug of pandas # res['timestamp'] = pd.to_datetime(res['timestamp'], utc=True) for i in range(res.shape[0]): res.loc[i, 'timestamp'] = pd.to_datetime(res.loc[i, 'timestamp'], utc=True) res.set_index('timestamp', inplace=True) assert res.shape[1] == 11 return res
def h5q_intervals_indexes_gen(db_path, table: str, t_prev_interval_start: pd.Timestamp, t_intervals_start: Iterable[pd.Timestamp], i_range=None) -> Iterator[pd.Index]: """ Yields start and end coordinates (0 based indexes) of hdf5 store table index which values are next nearest to intervals start input :param db_path :param table, str (see h5q_interval2coord) :param t_prev_interval_start: first index value :param t_intervals_start: :param i_range: Sequence, 1st and last element will limit the range of returned result :return: Iterator[pd.Index] of lower and upper int limits (adjasent intervals) """ for t_interval_start in t_intervals_start: # load_interval start_end = h5q_interval2coord( db_path, table, [t_prev_interval_start.isoformat(), t_interval_start.isoformat()]) if len(start_end): if i_range is not None: # skip intervals that not in index range start_end = minInterval([start_end], [i_range], start_end[-1])[0] if not len(start_end): if 0 < i_range[-1] < start_end[0]: raise Ex_nothing_done continue yield start_end else: # no data print('-', end='') t_prev_interval_start = t_interval_start
def generate_plot_task( plot_task_template: typing.Dict, start_time: pd.Timestamp, forecast_time: pd.Timedelta, data_source: typing.Dict, ) -> typing.Optional[typing.Dict]: """ Generate plot task for GISET. Parameters ---------- plot_task_template data_path Returns ------- """ data_path = get_data_path(data_source, start_time=start_time, forecast_time=forecast_time, data_type="grib2/orig") if data_path is None: logger.warning("WARNING: data is not found", start_time, forecast_time) return None plot_task = plot_task_template.copy() for layer in plot_task["maplayer"]: layer["file_path"] = str(data_path) task = { 'steps': [ { 'step_type': 'plotter', 'type': 'ploto_gidat.plotter.meteor_draw_plotter', 'plot_task': plot_task, }, { 'step_type': 'distributor', 'type': 'ploto_gidat.distributor.giset_distributor', "username": data_source["username"], "user_id": data_source["user_id"], "routing_key": data_source["routing_key"], "test_ID": data_source["test_ID"], "meteor_type": plot_task["maplayer"][0]["meteor_type"], "start_time": start_time.isoformat(), "forecast_time": forecast_time.isoformat(), }, ], } return task
def time_stamp_to_str(ts: pd.Timestamp) -> str: """ Convert to ISO string and strip timezone. Used to create numpy datetime64 arrays. We cannot create directly from pd.Timestamp because Numpy doesn't like parsing timezones anymore. """ ts_str: str = ts.isoformat() if ts_str[-1] == 'Z': return ts_str[0:-1] try: i = ts_str.rindex('+') return ts_str[0:i] except ValueError: return ts_str
def format_ts(ts: pd.Timestamp) -> str: return ts.isoformat()[:-6] + "Z" # type: ignore
def to_tree(cls, node: pd.Timestamp, ctx): """Serialize timestamp to tree.""" tree = {} tree["value"] = node.isoformat() return tree
def format_ts(ts: pd.Timestamp) -> str: return ts.isoformat() + "Z"
def to_yaml_tree(self, obj: pd.Timestamp, tag: str, ctx) -> str: """Convert to iso format string.""" return obj.isoformat()
def _to_isoformat(self, dt: pd.Timestamp): """Required as cryptostore doesnt allow +00:00 for UTC requires Z explicitly. """ return dt.isoformat().replace("+00:00", 'Z')
# print(d) # new_bar = mt5.copy_rates_from_pos('EURUSD', mt5.TIMEFRAME_H1, 0, 1)[0] # # lfg.process_new_bar(new_bar) # # for i, row in enumerate(lfg.data_q): # print(i, len(row), row) lfg_df = pd.DataFrame(lfg.data_q, columns=list(lfg.feature_indices.keys())) lfg_df.to_csv('lfg_df.csv') print(lfg_df) from_dt = Timestamp(rates[0][0], unit='s', tzinfo=timezone.utc) to_dt = Timestamp(rates[-1][0], unit='s', tzinfo=timezone.utc) print(from_dt.isoformat()) print(to_dt.isoformat()) indicators_info = { 'ichimoku': custom_settings['ichimoku'], 'rsi': { 'periods': 14 } } tick_data_filepath = research.download_mt5_data('EURUSD', 'H1', from_dt.isoformat(), to_dt.isoformat()) data_with_indicators = research.add_indicators_to_raw( filepath=tick_data_filepath, indicators_info=indicators_info,