def instance_detail_admin(request): user = str(request.user) account = Account.objects.filter(login=user)[0].account_id instance = request.POST['instance'] filter_args_dict = json.loads(request.POST['filter_args_dict']) start = datetime.fromordinal(int(request.POST['start'])) end = datetime.fromordinal(int(request.POST['end'])) results = ReportData.objects.filter(instance_identifier=instance, date__gt=start, date__lt=end, **filter_args_dict) try: rtn = [] for rd in results: copy = rd.to_dict() copy['date'] = rd.date.toordinal() rtn.append(copy) except: _LOG.error(sys.exc_info())[0] _LOG.error(sys.exc_info())[1] response_data = {} response_data['list'] = rtn response_data['account'] = account try: response = HttpResponse(simplejson.dumps(response_data)) except: _LOG.error(sys.exc_info()[0]) _LOG.error(sys.exc_info()[1]) raise return response
def run(self): logger.info(u'id=%s' % self.id) dates = get_his_date_by_id(self.id) if self.update: now_date = datetime.now().date() start_date = now_date - timedelta(days=30) datas = get_stock_data(self.id+'.'+self.location, start_date, now_date) else: start_date = (stockinfo.get_start_time_from_db(self.id) or datetime(1995, 1, 1)) datas = get_stock_data(self.id+'.'+self.location, start_date, datetime.now().date()) # import pdb;pdb.set_trace() if not datas: return for data in datas: if datetime.fromordinal(int(data[0])) in dates: continue logger.debug(u'id=%s date=%s' % (self.id, datetime.fromordinal(int(data[0])))) table = format_stock_data(self.id, self.stockname, data) self.session.add(table) self.lock.acquire() self.session.commit() self.lock.release() self.session.close()
def detailed_report_admin(request): user = str(request.user) account = Account.objects.filter(login=user)[0].account_id filter_args_dict = json.loads(request.POST['filter_args_dict']) start = datetime.fromordinal(int(request.POST['start'])) end = datetime.fromordinal(int(request.POST['end'])) results = [] instances = ReportData.objects.filter(date__gt=start, date__lt=end, **filter_args_dict).distinct('instance_identifier') for i in instances: count = ReportData.objects.filter(instance_identifier=i, date__gt=start, date__lt=end, **filter_args_dict).count() results.append({'instance': i, 'count': count}) this_filter = json.dumps(filter_args_dict) response_data = {} response_data['list'] = results response_data['start'] = start.toordinal() response_data['end'] = end.toordinal() response_data['this_filter'] = this_filter try: response = HttpResponse(simplejson.dumps(response_data)) except: _LOG.error(sys.exc_info()[0]) _LOG.error(sys.exc_info()[1]) raise return response
def to_datetime(var): #转换成日期时间 if True: if isinstance(var,str): try: var=float(var) except: return datetime.strptime(var,'%Y-%m-%d %H:%M:%S') if type(var)==QDate: var= var.toPyDate() if type(var)==QTime: var=var.toPyTime() if isinstance(var,QDateTime): return var.toPyDateTime() elif isinstance(var,float): d=int(var) d=d if d else 1 dt=datetime.fromordinal(d) if d else datetime() t=[] d=var for k in [24,60,60]: t.append(int(d*k)%k) d*=k return dt.replace(hour=t[0],minute=t[1],second=t[2]) elif isinstance(var,int): if var: return datetime.fromordinal(var) elif type(var) in(date,datetime): return datetime(*var.timetuple()[:7]) elif isinstance(var,time): return datetime(1,1,1,var.hour,var.minute,var.second) else: return None
def fromUSec(self, usec2): usec = int(usec2) days = usec / (86400 * 1000000) seconds = (usec - days * 86400 * 1000000) / 1000000 misec = usec - days * 86400 * 1000000 - seconds * 1000000 if days >= 1 and datetime.fromordinal(days) >= datetime.fromordinal(1): return datetime.fromordinal(days) + timedelta(seconds = seconds, microseconds = misec) return None
def make_daily_breakpoint(df,threshold=0.0,interp=False): '''interpolate the series from breakpoint to daily avg threshold = minimum significant opening ''' #--apply threshold dbkey = df.columns.tolist()[0] df[df[dbkey]<threshold] = threshold #--group by date #grouped = df.groupby(lambda x: x.toordinal()) #groups = grouped.groups #ord_days = groups.keys() dt = np.array(df.index.tolist()) data = np.array(df[dbkey].values.tolist()) data = np.vstack((dt,data)).transpose() #--strip out the nans data = data[~np.isnan(data[:,1].astype(np.float64))] ord_days = [] for d in data[:,0]: ord_days.append(d.toordinal()) ord_days = np.array(ord_days) #--process each day #last_entry = df[dbkey][0] try: last_entry = data[-1,1] except: return pandas.DataFrame() rec = [] for day in range(ord_days[0],ord_days[-1]): #--if this day has some entries, calc the time-weighted average if day in ord_days: entries = data[np.where(ord_days==day),:][0] avg_day_value = calc_time_avg(last_entry,entries) last_entry = entries[-1,1] #--otherwise, use the last entry for the day else: avg_day_value = last_entry rec.append([datetime.fromordinal(day)+timedelta(hours=12),avg_day_value]) #--create a new pandas dataframe of daily average values rec = np.array(rec) df = pandas.DataFrame({dbkey:rec[:,1].astype(np.float64)},index=rec[:,0]) start = datetime.fromordinal(ord_days[0]) + timedelta(hours=12) end = datetime.fromordinal(ord_days[-1]) + timedelta(hours=12) df_range = pandas.DataFrame({dbkey:np.NaN},index=pandas.date_range(start,end)) df_range = df_range.combine_first(df) if interp: df_range[dbkey] = df_range[dbkey].interpolate() return df_range.dropna()
def main(): warehouse_width = 200 warehouse_lengh = 400 node_distance = 10 center_aisle_width = 20 bottom_aisle_width = 15 aisle_width = 10 aisle_angle_degree = 60 slots_per_node = 10 #starting the database tlbx.run_sql('./tables.sql'); #Creating the SKU list and uploading it sku_list = generator.sku(5000) tlbx.upload_sku_to_database(sku_list) #Creating the order list and uploading it start_date = datetime.fromordinal(735500) end_date = datetime.fromordinal(735850) avg_order_per_day = 10 pick_date_deviation = 3 order_list = generator.order_normal_datebound(avg_order_per_day,3,start_date,end_date,pick_date_deviation) tlbx.upload_order_to_database(order_list) #Creating the item list and uploading it sku_id_list = [sku[0] for sku in sku_list] order_id_list = [order[0] for order in order_list] item_list = generator.line_item_fixn (5,3,sku_id_list,order_id_list) tlbx.upload_item_to_database(item_list) #creating a 2by1 warehouse with the specified parameters and getting the matrices of nodes, arcs, and slots arc_list,node_list,slot_list = whousedesign.twobyone(warehouse_width,warehouse_lengh,node_distance,center_aisle_width,bottom_aisle_width,aisle_width,aisle_angle_degree,slots_per_node) tlbx.upload_whouse_to_database(arc_list,node_list,slot_list) #whousedesign.draw_whouse(arc_list,node_list,slot_list) #Drawing the warehouse based on the nodes, arcs, and slots table #Updating the Slot list based on the Location Assigning Problem (LocAP) slot_list = locap.random(slot_list,sku_id_list) tlbx.upload_slot_to_database(slot_list) #Creating the Graph and visualizing it G = graph.nx_create(arc_list,node_list,"Undirected") graph.nx_draw_graph(G) #Creating the SKU pick list and later transform the list into the pick node list #sku_pick_list = pickseq.order_in_one_all(item_list,order_list) sku_pick_list = pickseq.order_in_one_all_from_db() #change to visitation depot_node_id = 1 for sku_pick in sku_pick_list: #route = tsp.google_sku(sku_pick,slot_list,G,depot_node_id) route,obj_value = tsp.gurobi_sku(sku_pick,slot_list,G,depot_node_id) print route print obj_value print "All Done!"
def to_datetime(date_in): if isinstance(date_in,str): return datetime.strptime(date_in, "%m/%d/%Y") elif date_in.dtype.type is np.string_ or date_in.dtype.type is np.unicode_: return datetime.strptime(date_in[()], "%m/%d/%Y") else: if date_in > 0: return datetime.fromordinal(int(date_in)) + timedelta(days=date_in%1) - timedelta(days = 366) else: return datetime.fromordinal(1)
def ordinal2datetime(ord_): """Converts an ordinal to a datetime object. >>> ordinal2datetime(744730) datetime.datetime(2040, 1, 1, 0, 0) """ try: return np.array([datetime.fromordinal(sub_ord) for sub_ord in ord_]) except TypeError: return datetime.fromordinal(ord_)
def instance_report(request): user = str(request.user) account = Account.objects.filter(login=user)[0].account_id instance = request.GET['instance'] filter_args_dict = json.loads(request.GET['filter_args_dict']) start = datetime.fromordinal(int(request.GET['start'])) end = datetime.fromordinal(int(request.GET['end'])) results = ReportData.objects.filter(instance_identifier=instance, date__gt=start, date__lt=end, **filter_args_dict) response = TemplateResponse(request, 'create_report/instance_details.html', {'list': results, 'account': account}) return response
def test_add_days(self): dt = datetime.fromordinal(10) dt = Story.add_days(dt=dt) self.assertEqual(11, dt.toordinal()) dt = datetime.fromordinal(10) dt = Story.add_days(dt=dt, days=2) self.assertEqual(12, dt.toordinal()) dt = None Story.add_days(dt) self.assertEqual(datetime.today().toordinal(), dt.toordinal())
def federation_charts(request, federation_slug=None): if federation_slug is None: federation = None else: federation = get_object_or_404(Federation, slug=federation_slug) if request.method == 'POST': form = ChartForm(request.POST, request.FILES, instance=federation) if form.is_valid(): stats_config_dict = getattr(settings, "STATS") service_terms = stats_config_dict['statistics']['entity_by_type']['terms'] protocol_terms = stats_config_dict['statistics']['entity_by_protocol']['terms'] protocols = stats_config_dict['protocols'] from_time = datetime.fromordinal(form.cleaned_data['fromDate'].toordinal()) if timezone.is_naive(from_time): from_time = pytz.utc.localize(from_time) to_time = datetime.fromordinal(form.cleaned_data['toDate'].toordinal() + 1) if timezone.is_naive(to_time): to_time = pytz.utc.localize(to_time) service_stats = EntityStat.objects.filter( federation=federation \ , feature__in = service_terms \ , time__gte = from_time \ , time__lte = to_time).order_by("time") protocol_stats = EntityStat.objects.filter( federation=federation \ , feature__in = protocol_terms \ , time__gte = from_time \ , time__lte = to_time).order_by("time") s_chart = stats_chart(stats_config_dict, request, service_stats, 'entity_by_type') p_chart = stats_chart(stats_config_dict, request, protocol_stats, 'entity_by_protocol', protocols) return render_to_response('metadataparser/federation_chart.html', {'form': form, 'statcharts': [s_chart, p_chart], }, context_instance=RequestContext(request)) else: messages.error(request, _('Please correct the errors indicated' ' below')) else: form = ChartForm(instance=federation) return render_to_response('metadataparser/federation_chart.html', {'settings': settings, 'form': form}, context_instance=RequestContext(request))
def test_CCDCesque_changedates(record): # Test start, end, and break dates of first two segments starts = [dt.strptime('1984-06-04', '%Y-%m-%d'), dt.strptime('1999-07-16', '%Y-%m-%d')] ends = [dt.strptime('1999-06-30', '%Y-%m-%d'), dt.strptime('2003-07-11', '%Y-%m-%d')] breaks = [dt.strptime('1999-07-16', '%Y-%m-%d'), dt.strptime('2003-07-27', '%Y-%m-%d')] for i in range(2): assert dt.fromordinal(record[i]['start']) == starts[i] assert dt.fromordinal(record[i]['end']) == ends[i] assert dt.fromordinal(record[i]['break']) == breaks[i]
def at(self, _from, to=None): """指定した期間に作業時間が計上されているかどうかを返す""" if to is None: to = _from if isinstance(_from, date): _from = int(datetime.fromordinal(_from.toordinal()).timestamp()) if isinstance(to, date): to = int(datetime.fromordinal(to.toordinal() + 1).timestamp()) for s, t in self.timetable: if _from < s+t and s <= to: return True return False
def get_week(time="None"): if time == "None": d = datetime.now().toordinal() last = d - 6 sunday = last - (last % 7) saturday = sunday + 6 return datetime.fromordinal(sunday).strftime('%Y-%m-%d 00:00:00'), datetime.fromordinal(saturday).strftime('%Y-%m-%d 23:59:59'), datetime.fromordinal(saturday).isocalendar()[1] else: time = datetime.strptime(time, '%m/%d/%Y') d = time.toordinal() #Input will have to be date(Y, m, d) #last = d - 6 sunday = d - (d % 7) saturday = sunday + 6 return datetime.fromordinal(sunday).strftime('%Y-%m-%d 00:00:00'), datetime.fromordinal(saturday).strftime('%Y-%m-%d 23:59:59'), datetime.fromordinal(saturday).isocalendar()[1]
def visualize_feedback(): condition_usage = [] start_date = datetime.fromordinal(date(2013, 7, 1).toordinal()) end_date = datetime.fromordinal(date.today().toordinal()) while start_date != end_date: feedbacks = get_list_feedbacks(2013, start_date.month, start_date.day, 9) row = {} row['number'] = len(feedbacks); row['time'] = start_date.strftime("%Y-%m-%d %H:%M:%S"); condition_usage.append(row) start_date = start_date + timedelta(days=1) return render_template("panel.html", usage=condition_usage)
def include_more_actions(day): current_day = day.day previous_day = datetime.fromordinal((current_day.toordinal() - 1)).date() if current_day < datetime.today().date(): next_day = datetime.fromordinal((current_day.toordinal() + 1)).date() else: next_day = None nice_month = datetime.strftime(current_day, "%B") return { 'current_day': current_day, 'previous_day': previous_day, 'next_day': next_day, 'nice_month': nice_month, }
def test_default(self): """Expects shortdatetime.""" value_test = datetime.fromordinal(733900) value_expected = format_datetime(value_test, format='short', locale=u'en_US') value_returned = datetimeformat(self.context, value_test) eq_(pq(value_returned)('time').text(), value_expected)
def jd_to(jd) : ordinal = int(jd) - 1721425 if 0 < ordinal < 3652060:## > 4x faster # datetime(9999, 12, 31).toordinal() == 3652059 dt = datetime.fromordinal(ordinal) return (dt.year, dt.month, dt.day) ##wjd = floor(jd - 0.5) + 0.5 qc, dqc = divmod(jd - epoch, 146097) ## qc ~~ quadricent cent, dcent = divmod(dqc, 36524) quad, dquad = divmod(dcent, 1461) yindex = dquad//365 ## divmod(dquad, 365)[0] year = qc*400 + cent*100 + quad*4 + yindex + (cent!=4 and yindex!=4) yearday = jd - to_jd(year, 1, 1) # Python 2.x and 3.x: if jd < to_jd(year, 3, 1): leapadj = 0 elif isLeap(year): leapadj = 1 else: leapadj = 2 # Python >= 2.5: #leapadj = 0 if jd < to_jd(year, 3, 1) else (1 if isLeap(year) else 2) month = ((yearday+leapadj) * 12 + 373) // 367 day = jd - to_jd(year, month, 1) + 1 return int(year), int(month), int(day)
def todays_games(): today = date.today() today = datetime.fromordinal(today.toordinal()) tomorrow = today + timedelta(days=1) return Game.all().filter('matchtime >=', today).filter('matchtime <=', tomorrow)
def dailystats(metric_root, appid, at, gp): """ Retreive daily stats of an app based on appid. Only the data of a complete day, in other words yesterday, will be stored """ yesterday = datetime.today().toordinal() - 1 timestamp = time.mktime(datetime.fromordinal(yesterday).timetuple()) apps = at.get_apps() # If we want to stop tracking a metric remove it below. metrics = ['crashPercent', 'mau', 'dau', 'rating', 'appLoads', 'crashes', 'affectedUsers', 'affectedUserPercent'] for metric in metrics: appName = apps[appid]['appName'] path = [metric_root, appName, 'daily', metric] # the errorMonitoring/graph API call returns an incomplete value for # the running day. # Request the data for two days and only use yesterdays value to track # the completed days. stat = at.errorMonitoringGraph(appid=appid, metric=metric, duration=2880) try: value = stat['data']['series'][0]['points'][0] except LookupError: log.exception('No data for metric: %s app: %s', metric, appName) else: gp.submit(path, value, timestamp) gp.flush()
def waveform2python(self, w, yn_to_tf=True): self.network = str(w.scnl.network) self.station = str(w.scnl.station) self.location = str(w.scnl.location) self.channel = str(w.scnl.channel) self.sampling_rate = w.Fs ordinal_time = int(w.start) delta_time = ordinal_time - w.start self.starttime = (datetime.fromordinal(ordinal_time) - timedelta(ordinal_time - w.start)) self.data = w.data self.units = w.units self.version = 0 self.misc_fields = dict(zip(w.misc_fields, w.misc_values)) word_to_bool = {'true': True, 'yes': True, 'false': False, 'no': False} if yn_to_tf: for k, v in self.misc_fields.items(): try: if v in word_to_bool: self.misc_fields[k] = word_to_bool[v] except AttributeError: pass self.history = [] return self # for chaining. feels kludgy
def dn_to_dt(dn): # convert MATLAB datenum to datetime # there is a tiny roundoff error involved, amounts to 5e-6 sec dt = ( datetime.fromordinal(int(dn)) + timedelta(days = np.mod(dn,1)) - timedelta(days = 366) ) return dt
def detail(request, ticker): tickerClean = ticker.lower() try: stock = Stock.objects.get(ticker=tickerClean) except Stock.DoesNotExist: # Create the stock plot plotRange = 200 endtime = datetime.now() starttime = datetime.fromordinal(endtime.toordinal() - plotRange) try: df = web.DataReader(tickerClean, 'yahoo', starttime, endtime) except pandas_datareader._utils.RemoteDataError: raise Http404("Ticker symbol not found: {}".format(ticker)) stock = Stock.objects.create(ticker=tickerClean) fig = candlestick.Candlestick(df) plotly_filename = "stock/{}".format(tickerClean) url = plot(fig, filename=plotly_filename, auto_open=False) stockPlot = stock.default_plot.create( plot_stock=stock, title="Movement of {} over last {} days".format(ticker.upper(), plotRange), start_date=starttime, end_date=endtime, pub_date=datetime.now(), edit_date=datetime.now(), plotly_url=url, plotly_filename=plotly_filename) return render(request, 'stock/detail.html', {"stock": stock, 'plots': stock.default_plot.all()})
def matlabdntodatetime(matlab_dn): #Get the fractional days from the matlab_dn fract_days = Decimal(matlab_dn) % 1 py_dt = datetime.fromordinal(int(matlab_dn)) + timedelta(days=float(fract_days)) - timedelta(days=366) return py_dt
def test_date(self): """Expects date format.""" value_test = datetime.fromordinal(733900) value_expected = format_date(value_test, locale=self.locale) value_returned = datetimeformat(self.context, value_test, format='date') eq_(pq(value_returned)('time').text(), value_expected)
def makedate(self, arg): if isinstance(arg, str) and len(arg) > 0: return datetime.strptime(arg, '%Y-%m-%d') if isinstance(arg, int): return datetime.fromordinal(arg) if type(arg) == type([]): return [self.makedate(i) for i in arg]
def init(self, *args, **kwargs): super(Logger, self).init(*args, **kwargs) interval = datetime.fromordinal(( date.today() + timedelta(1) ).toordinal()) Timer(interval, rotate(), self.channel).register(self)
def convert_to_datetime(input, timezone, arg_name): """ Converts the given object to a timezone aware datetime object. If a timezone aware datetime object is passed, it is returned unmodified. If a native datetime object is passed, it is given the specified timezone. If the input is a string, it is parsed as a datetime with the given timezone. Date strings are accepted in three different forms: date only (Y-m-d), date with time (Y-m-d H:M:S) or with date+time with microseconds (Y-m-d H:M:S.micro). :rtype: datetime """ if isinstance(input, datetime): datetime_ = input elif isinstance(input, date): datetime_ = datetime.fromordinal(input.toordinal()) elif isinstance(input, string_types): m = _DATE_REGEX.match(input) if not m: raise ValueError('Invalid date string') values = [(k, int(v or 0)) for k, v in m.groupdict().items()] values = dict(values) datetime_ = datetime(**values) else: raise TypeError('Unsupported input type: %s' % type(input)) if datetime_.tzinfo is not None: return datetime_ if timezone is None: raise ValueError('The "timezone" argument must be specified if %s has no timezone information' % arg_name) if isinstance(timezone, string_types): timezone = gettz(timezone) return datetime_.replace(tzinfo=timezone)
def parse_date_time(d, t, network, dateOnly=False): """ Parse date and time string into local time :param d: date string :param t: time string :param network: network to use as base :return: datetime object containing local time """ parsed_time = time_regex.search(t) network_tz = get_network_timezone(network, load_network_dict()) hr = 0 m = 0 if parsed_time: hr = tryInt(parsed_time.group("hour")) m = tryInt(parsed_time.group("minute")) ap = parsed_time.group("meridiem") ap = ap[0].lower() if ap else "" if ap == "a" and hr == 12: hr -= 12 elif ap == "p" and hr != 12: hr += 12 hr = hr if 0 <= hr <= 23 else 0 m = m if 0 <= m <= 59 else 0 result = datetime.fromordinal(max(tryInt(d), 1)) return result.replace(hour=hr, minute=m, tzinfo=network_tz) if not dateOnly else result.replace(tzinfo=network_tz)
def reducer(): count = 0 old_date = None for line in sys.stdin: data = line.strip().split("\t") if len(data) != 2: continue this_date, _ = data if old_date and old_date != this_date: date_str = datetime.fromordinal(int(this_date)).date() print("{}\t{}".format(date_str, count)) count = 0 old_date = this_date count += 1
def fit_population(age_distribution, population_size, time_points, data, guess=None): """ Fit R0, reported and logInitial to specified reported cases :param time_points: ??? :param data: :return: """ if data is None or len(data[Sub.D]) <= 5: return None if guess is None: guess = {"R0": 3.0, "reported": 0.3, "logInitial": 1 } param, init_cases, err = fit_params(age_distribution, population_size, time_points, data, guess) tMin = datetime.strftime(datetime.fromordinal(time_points[0]), '%Y-%m-%d') return {'params': param, 'initialCases': init_cases, 'tMin': tMin, 'data': data, 'error': err}
def __update_data_history_from_yahoo(self): data_folder = "./data" data_ticker = data_folder + "/" + self.name + ".txt" start = "1950-01-01" # yesterday end = datetime.fromordinal(datetime.today().toordinal() - 1).strftime("%Y-%m-%d") today = datetime.today().strftime("%Y-%m-%d") # Use ticker as filename to store historical data into a text # file. only update the delta up to today print "Update %s historical data..." % self.name if not os.path.exists(data_folder): print "The data folder %s dose not exist!" % data_folder print "Create %s..." % data_folder os.mkdir(data_folder) if not os.path.exists(data_ticker): print "Create %s historical data from yahoo..." % self.name self.data_history_yahoo = self.yahoo.get_historical(start, end) self.data_history_yahoo.reverse() pickle.dump(self.data_history_yahoo, open(data_ticker, "wb")) return self.data_history_yahoo = pickle.load(open(data_ticker, "rb")) if not self.data_history_yahoo: print "Cannot get history data!" return prev_date = datetime.strptime(self.data_history_yahoo[-1]["Date"], "%Y-%m-%d").strftime("%Y-%m-%d") if end > prev_date: print "Update %s data from %s to %s" % (self.name, prev_date, end) delta_history = self.yahoo.get_historical(prev_date, end) delta_history.reverse() self.data_history_yahoo += delta_history pickle.dump(self.data_history_yahoo, open(data_ticker, "wb")) else: print "Already up-to-date"
def _from_ordinal(x, tz: tzinfo | None = None) -> datetime: ix = int(x) dt = datetime.fromordinal(ix) remainder = float(x) - ix hour, remainder = divmod(24 * remainder, 1) minute, remainder = divmod(60 * remainder, 1) second, remainder = divmod(60 * remainder, 1) microsecond = int(1_000_000 * remainder) if microsecond < 10: microsecond = 0 # compensate for rounding errors dt = datetime(dt.year, dt.month, dt.day, int(hour), int(minute), int(second), microsecond) if tz is not None: dt = dt.astimezone(tz) if microsecond > 999990: # compensate for rounding errors dt += timedelta(microseconds=1_000_000 - microsecond) return dt
def test_days_ago(self): today = datetime.today() today_midnight = datetime.fromordinal(today.date().toordinal()) self.assertTrue(dates.days_ago(0) == today_midnight) self.assertTrue( dates.days_ago(100) == today_midnight + timedelta(days=-100)) self.assertTrue( dates.days_ago(0, hour=3) == today_midnight + timedelta(hours=3)) self.assertTrue( dates.days_ago(0, minute=3) == today_midnight + timedelta(minutes=3)) self.assertTrue( dates.days_ago(0, second=3) == today_midnight + timedelta(seconds=3)) self.assertTrue( dates.days_ago(0, microsecond=3) == today_midnight + timedelta(microseconds=3))
def load_metadata(path, metadata): content = sio.loadmat(path) filename_to_metadata = {} imdb = content[metadata][0][0] date_of_birth = imdb[0][0] photo_taken_year = imdb[1][0] full_path = imdb[2][0] gender = imdb[3][0] for i in range(0, date_of_birth.size - 1): birth_year = datetime.fromordinal(int(date_of_birth[i])).year file_name = full_path[i][0].split('/')[-1] age = photo_taken_year[i] - birth_year filename_to_metadata[file_name] = {'age': age, 'gender': gender[i]} return filename_to_metadata
def mat_loader(path): # matfile = '/home/jzhao/Desktop/wiki_crop/wiki.mat' mat = scipy.io.loadmat(path) wiki = mat['wiki'] item = wiki[0][0] dob = item[0][0] photo_taken = item[1][0] photo_path = item[2][0] label_dict = {} for i in range(len(dob)): birth = datetime.fromordinal(int( dob[i])) + timedelta(days=int(dob[i] % 1)) - timedelta(days=366) label_dict[photo_path[i][0]] = photo_taken[i] - birth.year return label_dict
def test_user_timezone(self): """Shows time in user timezone.""" value_test = datetime.fromordinal(733900) # Choose user with non default timezone user = User.objects.get(username='******') self.context['request'].user = user # Convert tzvalue to user timezone default_tz = timezone(settings.TIME_ZONE) user_tz = user.get_profile().timezone tzvalue = default_tz.localize(value_test) tzvalue = user_tz.normalize(tzvalue.astimezone(user_tz)) value_expected = format_datetime(tzvalue, format='long', locale=u'en_US') value_returned = datetimeformat(self.context, value_test, format='longdatetime') eq_(pq(value_returned)('time').text(), value_expected)
def __init__(self): self.weight_chg = {} self.weights = {} self.last_weights = None self.date = datetime.fromordinal(int(self.datas[0].fromdate)) self.day = 0 self.prices = {} self.rebalance_dict = dict() for i, d in enumerate(self.datas): self.rebalance_dict[d] = dict() self.rebalance_dict[d]['rebalanced'] = False for asset in self.p.assets: if asset[0] == d._name: self.weights[d._name] = asset[1] / 100 self.weight_chg[d._name] = asset[1] / 100 self.prices[d._name] = pd.Series(self.datas[i].close.array) # start_dt = dt.fromordinal(int(self.datas[i].fromdate)) # end_dt = dt.fromordinal(int(self.datas[i].todate)) # self.prices[asset].set_index(pd.date_range(start=start_dt, end=end_dt)) self.rebalance_dict[d]['target_percent'] = asset[1]
def get_metadata(self): # metadata with h5py.File(self.file, 'r') as f: self.metadata = dict(f.attrs) dateList = [ dt.fromordinal(int(i)).strftime('%Y%m%d') for i in f['dates'][:].tolist() ] for key, value in self.metadata.items(): try: self.metadata[key] = value.decode('utf8') except: self.metadata[key] = value self.metadata['START_DATE'] = dateList[0] self.metadata['END_DATE'] = dateList[-1] # size self.get_size() self.metadata['LENGTH'] = str(self.length) self.metadata['WIDTH'] = str(self.width) return self.metadata
def _coerce_to_bson_compatible(value): """ Ensure that any types which cannot be encoded into BSON are converted appropriately BSON cannot handle the following: * dates - convert to datetime * decimals - convert to float """ if isinstance(value, Decimal): # Convert to float: return float(value) elif isinstance(value, date) and not isinstance(value, datetime): return datetime.fromordinal(value.toordinal()) # If it's a Django model, cast to String if type(value) in get_models(): value = str(value) return value
def submission_dates(self): """ Create data for the "submission date" chart. The "submission date" chart shows the selected submissions over time. """ all_dates = [s.submission_date.date() for s in self.solutions] if not all_dates: return [] all_dates.sort() # Add all dates in the range between least recent and most recent date_range = date_range_in_between(all_dates[0], all_dates[-1]) all_dates += date_range # Group dates by day dates_grouped = groupby(all_dates, key=datetime.toordinal) # Assign dates to their occurrences in the data # It is important that we subtract 1 from the occurrences # since we added 1 by adding every date in the date span dates = [(datetime.fromordinal(k).strftime(self.date_format), len(list(v)) - 1) for k, v in dates_grouped] return dates
def get_weekly_updates(cls, ref_date=None): """ Gets all weekly updates for the week the given timestamp belongs to. """ query = cls.all() if ref_date is None: ref_date = date.today() date_diff = ref_date.weekday() - CUTOFF_DAY if date_diff <= 0: # we have wrapped around the week so add 6 days t_delta = timedelta(date_diff + 6) elif date_diff > 0: # remove one day from the diff so we don't get a one day overlap # with last week t_delta = timedelta(date_diff - 1) start_datetime = datetime.fromordinal((ref_date.toordinal())) - t_delta # filter out only updates received after the start time query.filter('datetime_received_at >=', start_datetime) # order earliest to latest query.order('datetime_received_at') return query
def __init__(self, datetimes, init_obs, band): self.T = 365.25 self.pi_val_simple = (2 * np.pi) / self.T self.pi_val_advanced = (4 * np.pi) / self.T self.pi_val_full = (6 * np.pi) / self.T self.datetimes = datetimes self.band = band self.doy = np.array([ datetime.fromordinal(x.astype(int)).timetuple().tm_yday for x in self.datetimes ]) self.lasso_model = None self.residuals = None self.RMSE = None self.coefficients = None self.predicted = None self.start_val = None self.end_val = None self.init_obs = init_obs
def generate_dates(year_back): values = {"ISO":[], "FR":[], "OTHER": [], "TS":[]} end_date = date.today() # To include the the 01/01/[year] start_date = date(year = end_date.year - (year_back+1), month = 12, day = 31) delta_days = (end_date - start_date).days while delta_days > 0: start_date += timedelta(days=1) # ISO format: YYYYMMDD values["ISO"].append(start_date.strftime("%Y%m%d")) # FR format: DDMMYYYY values["FR"].append(start_date.strftime("%d%m%Y")) # OTHER format: MMDDYYYY values["OTHER"].append(start_date.strftime("%m%d%Y")) # Timestamp format ts = int(datetime.fromordinal(start_date.toordinal()).timestamp()) values["TS"].append(ts) delta_days -= 1 for k in values: values[k].sort() return values
def get_date12_list(self): with h5py.File(self.file, 'r') as f: self.dateList = [ dt.fromordinal(int(i)).strftime('%Y%m%d') for i in f['dates'][:].tolist() ] self.numDate = len(self.dateList) # grab date12 from Jmat dates = np.array(self.dateList) Jmat = f['Jmat'][:] mDates = [] sDates = [] for i in range(Jmat.shape[0]): mDates.append(dates[Jmat[i, :] == 1][0]) sDates.append(dates[Jmat[i, :] == -1][0]) self.date12List = [ '{}_{}'.format(m, s) for m, s in zip(mDates, sDates) ] self.mDates = mDates self.sDates = sDates return self.date12List
def restore(dct): if "py/dict" in dct: return dict(dct["py/dict"]) if "py/tuple" in dct: return tuple(dct["py/tuple"]) if "py/set" in dct: return set(dct["py/set"]) if "py/pandas.tseries.index.DatetimeIndex" in dct: data = dct["py/pandas.tseries.index.DatetimeIndex"] return pd.tseries.index.DatetimeIndex(data["values"], dtype=data["dtype"]) if "py/pandas.DataFrame" in dct: data = dct["py/pandas.DataFrame"] return pd.DataFrame(data["data"]) if "py/numpy.ndarray" in dct: data = dct["py/numpy.ndarray"] return np.array(data["values"], dtype=data["dtype"]) if "py/datetime" in dct: data = dct["py/datetime"] return datetime.fromordinal(data["ordinal"]) return dct
def test_user_timezone(self): """Shows time in user timezone.""" value_test = datetime.fromordinal(733900) # Choose user with non default timezone user = self.user_model.objects.get(username='******') self.context['request'].user = user # Convert tzvalue to user timezone default_tz = pytz.timezone(settings.TIME_ZONE) user_tz = pytz.timezone(user.timezone) tzvalue = default_tz.localize(value_test) tzvalue = user_tz.normalize(tzvalue.astimezone(user_tz)) value_expected = format_datetime(tzvalue, format='long', locale='en_US') value_returned = datetimeformat(self.context, value_test, format='longdatetime', output='json') assert value_expected == value_returned
async def _inspect_node(self, node, timestamp): node_info = self.known[node] peek_n = await self._peek_node(node) if peek_n == {}: # unreachable if 'node_id' in list(node_info.keys()): # dummy data for pre-collected node node_info['n_peers'] = 0 node_info['val_addr'] = '' node_info['latest_block_time'] = datetime.fromordinal(1) node_info['latest_block_height'] = 0 node_info['catching_up'] = False node_info['elapsed'] = 0 node_info['online'] = False else: return # ignore new node else: node_info.update(self._expand_node(peek_n)) node_info['online'] = True node_info['timestamp'] = timestamp self.nodes[node] = node_info
def __getitem__(self, key): if type(key) is str: if key == "machine_name": return self.machine_name elif key == "sensor_type": return self.sensor_type elif key == "date_measurement": return dt.fromordinal( self.date_measurement).strftime("%Y-%m-%d") elif key == "start_timestamp": return dt.fromtimestamp( self.start_timestamp).strftime('"%Y-%m-%d %H:%M:%S.%f"') elif key == "end_timestamp": return dt.fromtimestamp( self.end_timestamp).strftime('"%Y-%m-%d %H:%M:%S.%f"') elif key == "realvalue": return str(self.realvalue) elif key == "unit": return self.unit else: raise KeyError()
def create_ktv_order(db, order_id, params): order_items = db.query('select * from order_item where order_id=%s', order_id) # "sku_properties":"包厢房型:小包;欢唱时间:17点至20点;日期:6月1日(周三)" ktv_order_result = [] if not 'sku_properties' in params: return ktv_order_result sku = TaobaoSku('', None, 0, 0, 0, 0, 0) tmp = re.split(r'[:;]', params.sku_properties) sku.parse_taobao_property(tmp[1].encode('utf-8'), tmp[3].encode('utf-8'), tmp[5].encode('utf-8')) # 7天内过期 expire_at = datetime.fromordinal(sku.date.toordinal()) expire_at = expire_at + timedelta(days=7, seconds=-1) for order_item in order_items: is_ktv = db.get('select 1 from ktv_product_goods where goods_id=%s', order_item.goods_id) if not is_ktv: continue db.execute('update item_coupon ic, item i set ic.expire_at=%s where ic.item_id=i.id and i.order_item_id=%s', expire_at, order_item.id) product_goods = db.get('select kp.shop_id, kp.product_id, ss.name shop_name, ss.manager_mobile ' 'from ktv_product_goods kp left join supplier_shop ss ' 'on kp.shop_id = ss.id where kp.goods_id=%s', order_item.goods_id) db.execute('insert into ktv_order set created_at=NOW(), deal_at=NOW(), ' 'room_type=%s, scheduled_day=%s, scheduled_time=%s, status="DEAL",' 'goods_id=%s, order_item_id=%s, shop_id=%s, product_id=%s', sku.room_type, sku.date, sku.start_time, order_item.goods_id, order_item.id, product_goods.shop_id, product_goods.product_id) ktv_order_params = { 'shop_name': product_goods.shop_name, 'manager_mobile': product_goods.manager_mobile, 'sku': sku } ktv_order_result.append(ktv_order_params) return ktv_order_result
def create_tickets_csv(connection): cursor = connection.cursor() list_employees = [] try: select_query_store = "select employee.id from employee;" cursor.execute(select_query_store) list_employees = cursor.fetchall() except (Exception, psycopg2.Error) as error: print("Error while connecting to PostgreSQL", error) start_date = dt(2019, 1, 1).toordinal() end_date = dt(2019, 12, 31).toordinal() list_of_days = [ rd.randint(start_date, end_date) for iter in range(5000000) ] rd.shuffle(list_of_days) list_to_csv = [] for index in range(5000000): random_day = dt.fromordinal(list_of_days.pop()) random_day = random_day.strftime('%Y-%m-%d') id_employee = rd.choice(list_employees) list_to_csv.append({ 'Barcode_item': rd.randint(100, 10000), 'Date': random_day, 'Employee_id': id_employee[0], }) df = pd.DataFrame.from_dict(list_to_csv) df.to_csv('./Our_Database/GeneratedCSV/ticket_data.csv', sep=';', index=False, encoding="utf-8")
def predict_price(request): dates = [] price = [] template = 'stock_api/new_day_wise_predict.html' # Selecting Russell qs = StockData.objects.filter( companyAbbr='RUT') #RUSSELL 2000 INDEX(company name) df = read_frame(qs) # dataframe values to list dates = df['date'] price = df['open'] # Converting date to ordinal because Sklearn expect only integer type dates not YYYY-MM-DD import datetime as dt df['date'] = pd.to_datetime(df['date']) dates = df['date'].map(dt.datetime.toordinal) tomorrow_date_in_ordinal = 736559 # yo chai query garney din ho eslai dynamic garau # date = 2012-08-20 # Fitting Model via Linear Regression linear_mod = linear_model.LinearRegression( ) #defining the linear regression model dates = np.reshape(dates.values, (len(dates), 1)) # converting to matrix of n X 1 prices = np.reshape(price.values, (len(price), 1)) linear_mod.fit(dates, prices) #fitting the data points in the model predicted_price = linear_mod.predict(tomorrow_date_in_ordinal) # conversion of ordinal date to human readable date from datetime import datetime dt = datetime.fromordinal(tomorrow_date_in_ordinal) context = { "predicted_price": predicted_price[0][0], "coefficient": linear_mod.coef_[0][0], "constant": linear_mod.intercept_[0], "dt": dt, } return render(request, template, context)
def compute_age(): while True: from datetime import datetime dob = input("Please enter your date of birth this way 'dd-mm-yyyy': ") answer = dob try: calculation = datetime.today().toordinal() - datetime.strptime( dob, "%m-%d-%Y").toordinal() currentAge = int(datetime.fromordinal(calculation).strftime("%Y")) if answer == dob: print(f"You are {currentAge} years old.") continue except ValueError: if dob == "0": print("You ended the program yourself by pressing '0' ") break
def __init__(self, file_dir): workbook = xlrd.open_workbook(file_dir) worksheet = workbook.sheet_by_index(0) num_rows = worksheet.nrows num_cols = worksheet.ncols title = [] for curr_row in range(0, 1): for curr_col in range(0, num_cols, 1): data = worksheet.cell_value(curr_row, curr_col) title.append(data) self.title = title record = [] for curr_row in range(1, num_rows, 1): column = {} for curr_col in range(0, num_cols, 1): data = worksheet.cell_value(curr_row, curr_col) if title[curr_col] in [ 'Date', 'Time', 'Booking Date', 'Time Depart', 'Time Arrive' ]: if title[curr_col] == 'Time': data = int(data * 24 * 3600) data = time(data // 3600, (data % 3600) // 60, data % 60) else: data = int(data) if type(data) is float else data data = datetime.fromordinal( datetime(1900, 1, 1).toordinal() + data - 2) column[title[curr_col]] = data else: data = int(data) if type(data) is float else data column[title[curr_col]] = data record.append(column) self.record = record
def inserDataToMySql(self, cursor, filename, teamname): # Open the workbook and define the worksheet book = xlrd.open_workbook(filename) sheet = book.sheet_by_index(0) colrange = sheet.ncols if colrange > 65: colrange = 65 # Create the INSERT INTO sql query query = """INSERT IGNORE INTO """+sqltablename +""" (Plant_Name,Plant_ID,LD,PO_No,User_Purchaser_Name,Warehouse,PO_Document_Date,PO_Received_Date,Customer_Due_Delivery_Date,CPN_Code,Material_Description_Short_Text,Material_Description_Long_Text,PO_Status,Qty,UOM,SP_Unit_in_PO,TAX_percent,Total_Pre_tax_value,Total_Customer_Invoice_Value,Customer_Freight_Terms,Buyers_PO_punching_date,Buyers_customer_item_id,Sourcing_Allocation,Customer_LPP_Contract_1_Price,Customer_LPP_Contract_2_Price,Deviation_from_LPP_Contract_Price,Sourcing_KAM_Remarks,Moglix_Supplier,Supplier_ID,HSN,HSN_TAX,Brand,List_Price,Discpercent_Received,TP_Unit,Supplier_Freight_Terms,Supplier_Advance_Credit,Supplier_Credit_days_in_days,Supplier_Pick_up_due_date,MSN,MSN_Description,Supplier_PO_ID,EMS_Item_ID,Supplier_PO_raised_date,OPS_Allocation,OPS_team_remarks,Supplier_committed_pick_up_date,Actual_pick_up_date_MRN_date,Inbound_freight_cost,Invoice_Number,Invoice_Date,Delivered_date,Outbound_Freight_Cost,CN_Raised,Customer_Credit_Days,GMpercent,GM_mul_percent,PO_date_vs_PO_recived_TAT,PO_recvd_vs_PO_Punching_TAT,PO_Punching_vs_Supplier_PO_TAT,Committed_vs_Actual_Pick_UP_Date_TAT,Pick_Up_Date_vs_Invoice_Date_TAT,Invoice_Date_vs_Delivery_Date_TAT,Delivery_TAT,Moglix_ETA, teamname) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)""" # print(query) Val_ist = [] # Create a For loop to iterate through each row in the XLS file, starting at row 2 to skip the headers for r in range(1, sheet.nrows): # print(sheet.ncols) ponum = sheet.cell(r, 3).value if ponum: for l in range(0, colrange): val = sheet.cell(r, l).value if not val: val = None elif l in [6,7,8,20,38,43,46,47,50,51,64]: try: excel_date = int(val) dt = datetime.fromordinal(datetime(1900, 1, 1).toordinal() + excel_date - 2) val = str(dt.year) + "-" + str(dt.month) + "-" + str(dt.day) except Exception: val=None #print(val) Val_ist.append(val) #print(Val_ist) Val_ist.append(teamname) cursor.execute(query, Val_ist) del Val_ist[:] # Print results print("") print("All Done for this file!") print("") os.rename(filename, filename + ".done")
def parse_date_time(d, t, network, dateOnly=False): """ Parse date and time string into local time :param d: date string :param t: time string :param network: network to use as base :return: datetime object containing local time """ if not network_dict: load_network_dict() parsed_time = time_regex.search(t) network_tz = get_network_timezone(network) hr = 0 m = 0 if parsed_time: hr = tryInt(parsed_time.group('hour')) m = tryInt(parsed_time.group('minute')) ap = parsed_time.group('meridiem') ap = ap[0].lower() if ap else '' if ap == 'a' and hr == 12: hr -= 12 elif ap == 'p' and hr != 12: hr += 12 hr = hr if 0 <= hr <= 23 else 0 m = m if 0 <= m <= 59 else 0 result = datetime.fromordinal(max(tryInt(d), 1)) return result.replace( hour=hr, minute=m, tzinfo=network_tz) if not dateOnly else result.replace( tzinfo=network_tz)
def read_timeseries_yx(timeseries_file, y, x, ref_yx=None): """Read time-series displacement on point (y,x) from timeseries_file Inputs: timeseries_file : string, name/path of timeseries hdf5 file y/x : int, row/column number of point of interest Output: dis_ts : list of float, displacement time-series of point of interest """ atr = readfile.read_attribute(timeseries_file) k = atr['FILE_TYPE'] dis_ts = [] if k in ['GIANT_TS']: h5 = h5py.File(timeseries_file, 'r') date_list = [ dt.fromordinal(int(i)).strftime('%Y%m%d') for i in h5['dates'][:].tolist() ] dname = [i for i in ['rawts', 'recons'] if i in list(h5.keys())][0] dis_ts = h5[dname][:, y, x] if ref_yx is not None: dis_ts = h5[dname][:, ref_yx[0], ref_yx[1]] h5.close() else: box = (x, y, x + 1, y + 1) dis_ts = timeseries(timeseries_file).read(box=box, print_msg=False) if ref_yx is not None: box = (ref_yx[1], ref_yx[0], ref_yx[1] + 1, ref_yx[0] + 1) dis_ts -= timeseries(timeseries_file).read(box=box, print_msg=False) #date_list = list(h5[k].keys()) # for date in date_list: # dis = h5[k].get(date)[y,x] # if inps.ref_yx: # dis -= h5[k].get(date)[ref_yx[0], ref_yx[1]] # dis_ts.append(dis) #dis_ts = np.array(dis_ts) return dis_ts
def parse_reference(emitter, reference, rows): entity = emitter.make('LegalEntity') entity.make_id(reference) entity.add('sourceUrl', URL) sanction = emitter.make('Sanction') sanction.make_id(entity.id) sanction.add( 'authority', 'Australian Department of Foreign Affairs and Trade Consolidated Sanctions' ) # noqa sanction.add('entity', entity) for row in rows: if row.pop('type') == 'Individual': entity.schema = model.get('Person') name = row.pop('name_of_individual_or_entity', None) if row.pop('name_type') == 'aka': entity.add('alias', name) else: entity.add('name', name) entity.add('address', row.pop('address')) entity.add('notes', row.pop('additional_information')) sanction.add('program', row.pop('committees')) nationality = normalize_country(row.pop('citizenship')) entity.add('nationality', nationality, quiet=True) entity.add('birthDate', row.pop('date_of_birth'), quiet=True) entity.add('birthPlace', row.pop('place_of_birth'), quiet=True) entity.add('status', row.pop('listing_information'), quiet=True) control_date = int(row.pop('control_date')) base_date = datetime(1900, 1, 1).toordinal() dt = datetime.fromordinal(base_date + control_date - 2) sanction.add('modifiedAt', dt.date()) entity.add('modifiedAt', dt.date()) emitter.emit(entity) emitter.emit(sanction)
def convertExceltoDate(serialDate, dateFormat): #if dateFormat=2 replace - to / in date dt = datetime.fromordinal( datetime(1900, 1, 1).toordinal() + int(serialDate) - 2) d = dt.date() #dd/mm/yyyy if dateFormat == 1: d = str(d).replace('-', '/') splitStr = d.split('/') d = splitStr[2] + '/' + splitStr[1] + '/' + splitStr[0] #change to mm/dd/yyyy if dateFormat == 2: d = str(d).replace('-', '/') splitStr = d.split('/') d = splitStr[1] + '/' + splitStr[2] + '/' + splitStr[0] #yyyy-mm-dd if dateFormat == 3: d = str(d) splitStr = d.split(' ') d = splitStr[0] return d