def select_timerange(df, time1, time2, attr='request_time'): ''' select data within a time range >>> select_timerange(DataFrame, '2019-1-1', '2019-2-1') ''' from utils import str2date time1 = str2date(time1) time2 = str2date(time2) expr = '({0}>=@time1) and ({0}<=@time2)'.format(attr) return df.query(expr)
def get_old_values(self, currency_unit=None, per_currency=True, start_date="2021-04-01", end_date=None, func_name="get_return_rate"): rates = [] dates = [] if per_currency: per_currency_dict = defaultdict(list) if self.last_update_time is not None: end_date = str2date(end_date) start_date = str2date(start_date) for date in pd.date_range(start=start_date, end=end_date)[::-1]: if (date < pd.to_datetime(self.last_update_time).normalize()): if self.last_checkpoint is not None: tmp_res = self.last_checkpoint.get_old_values( currency_unit=currency_unit, end_date=date, start_date=start_date, per_currency=per_currency, func_name=func_name) if per_currency: rates_tmp, dates_tmp, per_currency_dict_tmp = tmp_res else: rates_tmp, dates_tmp = tmp_res rates += rates_tmp dates += dates_tmp # extend for key, values in per_currency_dict_tmp.items(): per_currency_dict[key].extend(values) break else: rates.append(getattr(self, func_name)(date=date)) dates.append(date) if per_currency: for name, currency in self.securities.items(): per_currency_dict[name].append( (date, getattr(currency, func_name)(date=date))) if per_currency: return rates, dates, per_currency_dict else: return rates, dates
def get_user_info(user_id): # try to find user in database where id = userid try: user = DoubanUser.objects.get(user_id=user_id) user.access_times = user.access_times + 1 user.save() return user.user_id except DoubanUser.DoesNotExist: pass # try to find user in database where uid = userid try: user = DoubanUser.objects.get(user_uid=user_id) user.access_times = user.access_times + 1 user.save() return user.user_id except DoubanUser.DoesNotExist: pass # if up fails, then get user info from douban.com res = requests.get(USER_INFO_API.format(user_id)) user_info = json.loads(res.content) new_user = DoubanUser(user_id=user_info['id'], user_uid=user_info['uid'], name=user_info['name'], created_at_douban=utils.str2date( user_info['created']), loc_name=user_info['loc_name'], avatar=user_info['avatar'].split('com/')[1], signature=user_info['signature'], desc=user_info['desc'], created_at=datetime.now(), access_times=1) new_user.save() return new_user.user_id
def decorated_function(*args, **kwargs): if 'date' in kwargs: try: d = utils.str2date(kwargs['date']) today = datetime.date.today() if d < today or not d in utils.get_select_date_list(180): d = None kwargs['date'] = d except: kwargs['date'] = None return f(*args, **kwargs)
def add_date(update, context): if update.message.from_user.id != global_user_id: return ADD_DATE print("ADD CALL DATE") print("Requesting user input") date_text = update.message.text print("Date Text: " + date_text) if utils.str2date(date_text) != -1: # INPUT IS CORRECT date = utils.str2date(date_text) print("Date is valid: ", date) global saving saving[1] = date print("Date added to global list") else: # INPUT IS INCORRECT global add_call_message keyboard = [[InlineKeyboardButton("Cancel", callback_data="cancel")]] reply_markup = InlineKeyboardMarkup(keyboard) add_call_message.delete() add_call_message = update.message.reply_text(text=wrong_date_text, parse_mode=ParseMode.HTML, reply_markup=reply_markup) return ADD_DATE if 2 in global_missing_arguments: format_input_argument(update, 1, "Time", global_missing_arguments, global_missing_arguments.index(2)) print("Going to next step") return ADD_TIME else: print("CONVERSATION END - send call details") # SAVE INFO IN DATABASE save_call_info(update=update, context=context, title=saving[0], date=str(saving[1]), time=str(saving[2]), duration=saving[3]) return CALL_DETAILS
def parse(cls, content: str) -> dict: """Парсит содержимое xml. Возвращает словарь вида: { 'request_id': id запроса (Пр.: "123ABCD") 'request_time': время запроса (Пр.: "28-09-2015 20:23:49") 'response_time': время ответа (Пр.: "28-09-2015 20:23:56") 'proposals': список предложений по перелетам } """ root = ET.fromstring(content) request_time = root.attrib.get('RequestTime') response_time = root.attrib.get('ResponseTime') request_node = root.find('RequestId') date_format = DateFormat.DEFAULT.value data = { 'request_id': request_node.text if request_node else '', 'request_time': str2date(request_time, date_format=date_format), 'response_time': str2date(response_time, date_format=date_format), 'proposals': [], } for proposal_node in root.findall('*/Flights'): onward_node = proposal_node.find('OnwardPricedItinerary') return_node = proposal_node.find('ReturnPricedItinerary') pricing_node = proposal_node.find('Pricing') data['proposals'].append({ 'onward': cls._parse_flights(onward_node), 'returned': (cls._parse_flights(return_node) if return_node else []), 'pricing': cls._parse_pricing(pricing_node), }) return data
def add_event(date, time, duration, title, description, group, color): print("CALENDAR: Time type: ", type(time)) print("CALENDAR: Date type: ", type(date)) start_time_string = str(utils.str2date(date)) + \ " " + str(utils.str2time(time)) start_time = datetime.strptime(start_time_string, "%Y/%m/%d %H:%M:%S") print("CALENDAR: Start time string: ", start_time_string) print(type(start_time)) print("CALENDAR: Start time: " + str(start_time)) # Get end time calculating with the duration print("CALENDAR: Duration string: ", duration) duration = timedelta(seconds=int(duration)) print("CALENDAR: Duration: " + str(duration)) print("CALENDAR: type: ", type(duration)) end_time = start_time + duration print("CALENDAR: End time: " + str(end_time)) print(type(end_time)) GMT_OFF = '+00:00' event = { 'summary': title, 'location': group, 'start': { 'dateTime': start_time.strftime("%Y-%m-%dT%H:%M:%S"), 'timeZone': GMT_OFF, }, 'end': { 'dateTime': end_time.strftime("%Y-%m-%dT%H:%M:%S"), 'timeZone': GMT_OFF, }, 'description': description, 'colorId': str(color), } calendar_id = os.environ.get(key='CALENDAR_ID', default='primary') saved_event = service.events().insert(calendarId=calendar_id, body=event, sendNotifications=True).execute() url = saved_event.get('htmlLink') event_id = saved_event['id'] return [event_id, url]
def process_csv(filepath, output): if sys.version_info[0] < 3: infile = open(filepath, 'rb') else: infile = open(filepath, 'r', newline='', encoding='utf8') with infile as csvfile: reader = csv.DictReader(csvfile, delimiter=';', quotechar='"') with open(output, "w") as outputfile: outputfile.write(",".join(AVANTAGES_HEADERS) + "\n") for row in reader: cleaned_row = dict((k, clean_text(v)) for k, v in row.items()) outputfile.write(",".join((build_origin(cleaned_row), cleaned_row["denomination_sociale"], build_name(cleaned_row), build_address(cleaned_row), build_qualification(cleaned_row), build_rpps(cleaned_row), cleaned_row["avant_montant_ttc"], str2date(cleaned_row["avant_date_signature"]), build_nature(cleaned_row), build_eta(cleaned_row), cleaned_row["benef_codepostal"])) + "\n")
def process_csv(filepath, output): if sys.version_info[0] < 3: infile = open(filepath, 'rb') else: infile = open(filepath, 'r', newline='', encoding='utf8') with infile as csvfile: reader = csv.DictReader(csvfile, delimiter=';', quotechar='"') with open(output, "w") as outputfile: outputfile.write(",".join(AVANTAGES_HEADERS) + "\n") for row in reader: cleaned_row = dict((k, clean_text(v)) for k, v in row.items()) outputfile.write(",".join( (build_origin(cleaned_row), cleaned_row["denomination_sociale"], build_name(cleaned_row), build_address(cleaned_row), build_qualification(cleaned_row), build_rpps(cleaned_row), cleaned_row["avant_montant_ttc"], str2date(cleaned_row["avant_date_signature"]), build_nature(cleaned_row), build_eta(cleaned_row), cleaned_row["benef_codepostal"])) + "\n")
def read_time_series_01(): """ This is a test data analysis that will be improved later on """ # Folder where the time series are stored folder = '../data/csse_covid_19_data/csse_covid_19_time_series/' files = [ os.path.join(folder, item) for item in os.listdir(folder) if '.csv' in item ] # List of time series # Dictionary for the data data = {} # New data with re-arranged information for dr in files: dr_name = dr.split('/')[-1] # Name of the variable (confirmed, deaths or recovered) varname = dr_name.split('.')[0].split('-')[-1].lower() with open(dr, 'r') as f: dataframe = pd.read_csv(dr) # dataframe.set_index('Country/Region', inplace=True) data[varname] = dataframe # Find the date key list data_columns = dataframe.columns dates = OrderedDict() date_indices = OrderedDict() previous_keys = [] for col_name in data_columns: try: date = utl.str2date(col_name, '%m/%d/%y') except Exception as e: pass else: date_key = date.strftime('%d/%m/%Y') dates[date_key] = date previous_keys.append(col_name) date_indices[date_key] = len(dates) - 1 # Number of dates ndates = len(dates) # Store dates in ws ws.dates = dates # Dictionary with the provinces for each country provinces = OrderedDict() nlocations = dataframe.shape[0] for i in range(nlocations): dfi = dataframe.loc[i] try: provinces[dfi['Country/Region']].append(dfi['Province/State']) except KeyError: provinces[dfi['Country/Region']] = [dfi['Province/State']] # Create new dataset that rearranges the data columns = [ 'Country/Region', 'Province/State', 'Lat', 'Long', 'Date Key', 'Date Value', 'confirmed', 'deaths', 'recovered' ] ds_new = pd.DataFrame(index=np.arange(nlocations * ndates), columns=columns) # Set up the ds_new from the information in the last dataframe # j = 0 end = -1 for i in range(nlocations): dfi = dataframe.loc[i] start = end + 1 end = start - 1 + ndates ds_new.loc[start:end, 'Country/Region'] = dfi['Country/Region'] ds_new.loc[start:end, 'Province/State'] = dfi['Province/State'] ds_new.loc[start:end, 'Lat'] = dfi['Lat'] ds_new.loc[start:end, 'Long'] = dfi['Long'] ds_new.loc[start:end, 'Date Key'] = list(dates.keys()) ds_new.loc[start:end, 'Date Value'] = list(dates.values()) for variable in ['confirmed', 'recovered', 'deaths']: ds_new.loc[ start:end, variable] = data[variable].loc[i][previous_keys].tolist() # Show the time series for the whole world ws.dates_keys = list(dates.keys()) ws.date_indices = date_indices ws.dates = dates # Save the dataframe in the workspace ws.data = ds_new
def _parse_flights(cls, node: ET.Element) -> List[dict]: """Парсит содержимое тэга Flights. Возвращает список словарей вида: [ { 'carrier_id': id перевозчика, 'carrier': наименование перевозчика, 'flight_number': номер рейса, 'source': аэропорт вылета, 'destination': аэропорт прибытия, 'departure_timestamp': время вылета, 'arrival_timestamp': время прибытия, 'trip_class': класс, 'number_of_stops': количество остановок, 'fare_basis': код FareBasis, 'warning_text': предупреждение, 'ticket_type': тип билета, }, ... ] """ flights = [] for flight in node.iter('Flight'): carrier_node = flight.find('Carrier') flight_number_node = flight.find('FlightNumber') source_node = flight.find('Source') destination_node = flight.find('Destination') departure_ts_node = flight.find('DepartureTimeStamp') arrival_ts_node = flight.find('ArrivalTimeStamp') class_node = flight.find('Class') number_of_stops_node = flight.find('NumberOfStops') fare_basis_node = flight.find('FareBasis') warning_text_node = flight.find('WarningText') ticket_type_node = flight.find('TicketType') # В xml файлах встречаются опечатки в тегах. if departure_ts_node is None: departure_ts_node = flight.find('DeartureTimeStamp') flights.append({ 'carrier_id': carrier_node.attrib.get('id'), 'carrier': carrier_node.text, 'flight_number': str2int(flight_number_node.text), 'source': source_node.text, 'destination': destination_node.text, 'departure_timestamp': str2date(departure_ts_node.text, date_format=DateFormat.TIMESTAMP.value), 'arrival_timestamp': str2date(arrival_ts_node.text, date_format=DateFormat.TIMESTAMP.value), 'trip_class': class_node.text, 'number_of_stops': str2int(number_of_stops_node.text), 'fare_basis': fare_basis_node.text.strip(), 'warning_text': warning_text_node.text, 'ticket_type': ticket_type_node.text, }) return flights
def fit_with_forward_moneyness(dt, dates, money, vol, weight=True, weight_cut=0.4, calendar_buffer=2.0e-4, vol_scale=1.0, max_iter=10000): if type(dates[0]) == str: dates = list(map(utils.str2date, dates)) if type(dt) == datetime: dt = dt.date() if type(dt) == str: dt = utils.str2date(dt) vol = np.array(vol) * vol_scale mult_money = [money for i in range(len(dates))] #import pdb;pdb.set_trace() fitter = [ ssvi.Ssvi([-0.3, 0.01], phi.QuotientPhi([0.4, 0.4])) for i in range(len(dates)) ] surface = fitting.SurfaceFit(dt, dates, mult_money, vol, fitter, weight=weight, weight_cut=weight_cut, calendar_buffer=calendar_buffer) surface.calibrate(maxiter=max_iter, verbose=True, method='SLSQP') surface.visualize() msg = "데이터베이스에 입력 하시겠습니까?\n\n" msg += "(원하지 않으실 경우 No를 누르시고,\n" msg += "weight_cut | calendar_buffer 값등을 조정해서 재 실행 해보시기 바랍니다. \n" msg += "default 값은 weight_cut=0.4 (=40%), calendar_buffer=0.0003 입니다. \n" msg += "외 가격의 비중을 줄이려면 weight_cut값을 늘리고 (e.g., 0.7), \n" msg += "calendar arbitrage 발생시 \n" msg += "calendar_buffer를 낮게 설정해 주시면 됩니다 (0.00015 미만은 권장되지 않음). \n" msg += "[예시: fit_with_forward_moneyness(dt, dates, money, vol, TRUE, 0.7, 0.0002)] \n" msg += "[다섯번째 변수가, FLASE일 경우 가중치 부여 없음] \n" msg += "8, 9 번째의 변수 값은 볼 스케일 값과 \n" msg += "(e.g., 데이터가 25.432등으로 들어올경우 0.01로 설정) \n" msg += "최적화 시 max iteration값 입니다 \n" msg += "(e.g., 결과가 만족스럽지 않다면 20000으로 설정). \n\n" msg += "주의! 엑셀 계산 옵션이 자동이라면 데이터 값 변경시 자동으로 실행 됩니다.\n" m_res = utils.Mbox("", msg, 4) if m_res == 6: rho = ud.lookup("GREEK SMALL LETTER RHO") theta = ud.lookup("GREEK SMALL LETTER THETA") eta = ud.lookup("GREEK SMALL LETTER ETA") gamma = ud.lookup("GREEK SMALL LETTER GAMMA") res = [[None, rho, theta, eta, gamma]] _params = surface.params for i, p in enumerate(_params): row = [dates[i]] + list(p) res = res + [row] return res else: return
def __doc_rvp(request, id, mode): ''' Read/View/Print :param mode:enum - mode (0: read, 1: html, 2: pdf) ''' __log_request(request) #print "__doc_rvp" item = models.Doc.objects.get(pk=id) uuid = item.type if (uuid not in moduledict): return 'Template not found' # else: tpl = moduledict[uuid] self_func = [K_T_F_READ, K_T_F_VIEW, K_T_F_PRINT][mode] if (self_func in tpl[K_V_MODULE].__dict__): return tpl[K_V_MODULE].__dict__[self_func](request, id) # ??? # else: data = json.loads(item.data) # auto date conversion if (K_V_DATES in tpl): for k in tpl[K_V_DATES]: utils.str2date(data, k) __try_to_call(tpl, K_T_F_POST_LOAD, data) # split 1: create data dict template_key = [K_T_T_READ, K_T_T_VIEW, K_T_T_PRINT][mode] if ((K_T_T in tpl[K_V_MODULE].DATA) and (template_key in tpl[K_V_MODULE].DATA[K_T_T])): template = tpl[K_V_MODULE].DATA[K_T_T][template_key] context_dict = { 'data': data } else: # auto_* #print "auto" template = ['auto_read.html', 'auto_view.html', 'auto_print.html'][mode] # transform data: # 1. single values: { key: value, } => [{ k: key, v: value, l: label, h: help }, ] datalist = list() for k, v in tpl[K_V_MODULE].DATA[K_T_FIELD].iteritems(): datalist.append({ 'k': k, 'v': data[k], 'l': v[K_T_FIELD_A]['label'], 'h': v[K_T_FIELD_A].get('help_text', None), }) # 2. multivalues: { key: [{k: value,},],} => [{l: label, h: help, t: header, v: [[{value,],],] datasets = list() if (K_T_S in tpl[K_V_MODULE].DATA): for k, v in tpl[K_V_MODULE].DATA[K_T_S].iteritems(): header = list() for i, j in v[K_T_FIELD_T].iteritems(): header.append(j[K_T_FIELD_A]['label']) dataset = list() # all lines if k in data: # skip empty multivalues for rec in data[k]: # one line in data - dict dataset.append(rec.values()) datasets.append({ 't': header, 'v': dataset, 'l': v[K_T_FIELD_A]['label'], 'h': v[K_T_FIELD_A].get('help_text', None), }) context_dict = { 'pk': item.pk, 'name': item.name, 'type': tpl, 'datalist': datalist, 'datasets': datasets, } # split 2: call render if (mode < 2): # READ, VIEW __try_to_call(tpl, (K_T_F_PRE_READ, K_T_F_PRE_VIEW)[mode], data) #return render_to_response(template, context_instance=RequestContext(request, context_dict)) return converter.html2html(request, context_dict, template) else: # PRINT __try_to_call(tpl, K_T_F_PRE_PRINT, data) return __doc_print(request, context_dict, template)
def __doc_acu(request, id, mode): ''' Anon/Create/Update :param id:int - uuid (anon/create) or doc id (update) :param mode:int (0: anon (print), 1: create, 2: update) ''' __log_request(request) if (mode == 2): item = models.Doc.objects.get(pk=id) # Update only uuid = item.type if (uuid not in moduledict): return 'Template not found' else: uuid = id if (request.method == 'POST') and (mode > 0) and (request.POST.get('_action', None) in set(('print', 'view'))): mode = 0 tpl = moduledict[uuid] # 1. check <pkg>.ANON/CREATE/UPDATE self_func = [K_T_F_ANON, K_T_F_ADD, K_T_F_EDIT][mode] if (self_func in tpl[K_V_MODULE].__dict__): return tpl[K_V_MODULE].__dict__[self_func](request, id) # else: # 2. get FORM and FORMSETS formclass = tpl[K_T_FORM] formsetsclass = tpl[K_T_FORMSETS] # SortedDict of dicts if request.method == 'POST': #pprint.pprint(request.POST['_action']) form = formclass(request.POST) if (mode == 0): # ANON, Create/Update -> view/print del form.fields[K_T_F_NAME] formlist = SortedDict() isvalid = form.is_valid() for k, formset in formsetsclass.iteritems(): formlist[k] = formset(request.POST, prefix=k) isvalid = isvalid and formlist[k].is_valid() if isvalid: data = form.cleaned_data # inject formsets into data for k, v in formlist.iteritems(): dataset = list() for i in v.cleaned_data: # list of dicts if i: # reject empty dicts dataset.append(i) if dataset: # reject empty lists data[k] = dataset # inject datasets into data __try_to_call(tpl, K_T_F_POST_FORM, data) # split if (mode == 0): # ANON > PRINT, C/U -> V/P if ((K_T_T in tpl[K_V_MODULE].DATA) and (K_T_T_PRINT in tpl[K_V_MODULE].DATA[K_T_T])): context_dict = {'data': data} template = tpl[K_V_MODULE].DATA[K_T_T][K_T_T_PRINT] if (request.POST.get('_action', None) == u'view'): __try_to_call(tpl, K_T_F_PRE_VIEW, data) # Create/Update -> View return converter.html2html(request, context_dict, template) else: # Anon/Create/Update -> PRINT __try_to_call(tpl, K_T_F_PRE_PRINT, data) return __doc_print(request, context_dict, template) else: # tmp dummy return redirect('dox.views.index') else: # CREATE/UPDATE -> SAVE if (mode == 1): # CREATE name = data[K_T_F_NAME] else: item.name = data[K_T_F_NAME] del data[K_T_F_NAME] # convert dates if (K_V_DATES in tpl): for k in tpl[K_V_DATES]: utils.date2str(data, k) __try_to_call(tpl, K_T_F_PRE_SAVE, data) if (mode == 1): # CREATE # user, type, name, data item = models.Doc(user=request.user, type=uuid, name=name, data=json.dumps(data, indent=1, ensure_ascii=False)) else: item.data = json.dumps(data, indent=1, ensure_ascii=False) item.save() return redirect(doc_r, id=item.pk) else: # GET if (mode < 2): # ANON, CREATE form = formclass() if (mode == 0): # ANON del form.fields[K_T_F_NAME] formlist = SortedDict() for k, formset in formsetsclass.iteritems(): formlist[k] = formset(prefix=k) else: # UPDATE data = json.loads(item.data) data[K_T_F_NAME] = item.name # inject name # restore dates after loading if (K_V_DATES in tpl): for k in tpl[K_V_DATES]: utils.str2date(data, k) #pprint.pprint(data) __try_to_call(tpl, K_T_F_POST_LOAD, data) #pprint.pprint(data) __try_to_call(tpl, K_T_F_PRE_FORM, data) # split form and formsets # 1. eject formsets formlist = SortedDict([]) for pfx, formset in formsetsclass.iteritems(): # formsetsclass == SortedDict {name: FormSetClass} formset_data = dict() for i, l in enumerate(data.get(pfx, list())): # l:str - formset name; l: for k, v in l.iteritems(): formset_data[pfx+'-'+str(i)+'-'+k] = v formset_data.update({ pfx+'-TOTAL_FORMS': len(data[pfx]) if pfx in data else 1, pfx+'-INITIAL_FORMS': u'0', pfx+'-MAX_NUM_FORMS': u'', }) formlist[pfx] = formset(formset_data, prefix=pfx) if (pfx in data): del data[pfx] # 2. else form = formclass(data) return render_to_response( tpl[K_V_MODULE].DATA[K_T_T][K_T_T_FORM] if ((K_T_T in tpl[K_V_MODULE].DATA) and (K_T_T_FORM in tpl[K_V_MODULE].DATA[K_T_T])) else 'auto_form.html', context_instance=RequestContext(request, { 'name': tpl[K_V_MODULE].DATA[K_T_NAME], 'comments': tpl[K_V_MODULE].DATA[K_T_COMMENTS], 'legend': tpl[K_V_MODULE].DATA.get(K_T_LEGEND, ''), 'uuid': tpl[K_V_MODULE].DATA[K_T_UUID], 'form': form, 'formlist': formlist, 'example': tpl[K_V_MODULE].DATA.get('example', None), }))