def get_text_labeled_dataset(from_date, to_date, binary = True): dataset = {"text" : [], "label" : []} directory = "../../Data/Reddit_Data/btc/comments/" bictoin_price_dict = get_labeled_bitcoin_price() if binary else get_multi_class_label() current_date = moment.date(from_date) from_date = moment.date(from_date) to_date = moment.date(to_date) while (current_date < to_date): date = current_date.format('YYYY-MM-DD') file = f"{directory}{date}.csv" if os.path.exists(file) and date in bictoin_price_dict.keys(): label = bictoin_price_dict[date] df = pd.read_csv(file, sep=";") df["body"] = [str(x) for x in df["body"]] dataset["text"].extend(df["body"]) dataset["label"].extend([label for i in df["body"]]) current_date.add(day=1) return pd.DataFrame(dataset)
def get_date_labeled_dataset(from_date, to_date, binary = True): dataset = {"label" : [], "year" : [], "month" : [], "day" : [], "label_m1" : [], "label_m2" : []} bictoin_price_dict = get_labeled_bitcoin_price() if binary else get_multi_class_label() current_date = moment.date(from_date) label_m1 = bictoin_price_dict[moment.date(from_date).add(day=-1).format("YYYY-MM-DD")] label_m2 = bictoin_price_dict[moment.date(from_date).add(day=-2).format("YYYY-MM-DD")] from_date = moment.date(from_date) to_date = moment.date(to_date) while (current_date < to_date): date = current_date.format('YYYY-MM-DD') label = bictoin_price_dict[date] dataset["label"].append(label) dataset["year"] = int(str(date)[0:4]) dataset["month"] = int(str(date)[5:7]) dataset["day"] = int(str(date)[8:10]) dataset["label_m2"].append(label_m2) dataset["label_m1"].append(label_m1) label_m2 = label_m1 label_m1 = label current_date.add(day=1) return pd.DataFrame(dataset)
def process_ad_form(request): subcategory = request.POST.get('subcategory') title = request.POST.get('title') description = request.POST.get('description') date_range = request.POST.get('start_date') date_range = date_range.split('-') opening_hour = parse_time(request.POST.get('opening_hour')) closing_hour = parse_time(request.POST.get('closing_hour')) start_date = moment.date(date_range[0]).date end_date = moment.date(date_range[1]).date state = request.POST.get('state') zipcode = request.POST.get('zipcode') address = request.POST.get('address') address1 = request.POST.get('address1') longitude = request.POST.get('longitude') latitude = request.POST.get('latitude') return { 'subcategory': subcategory, 'title': title, 'description': description, 'start_date': start_date, 'end_date': end_date, 'state': state, 'zipcode': zipcode, 'address': address, 'address1': address1, 'open_at': opening_hour, 'closed_at': closing_hour, 'longitude': longitude, 'latitude': latitude }
def build_frame_to_send(self, client, timestamp, rssi, mac_id, ssid=None): global IS_PROCESSING global DID_NOT_SEND print(f'{timestamp} {rssi} {mac_id} {ssid}') # Set the flag to True to tell others using the global object to wait till processing is complete IS_PROCESSING = True if ssid is not None: # this means it is a directed probe request client.frame_to_send.value['frame']['probes']['directed'].append({'timestamp': str( moment.date(timestamp)), 'rssi': rssi, 'mac_id': mac_id, 'ssid': ssid.strip()}) else: # this means it is a null probe request client.frame_to_send.value['frame']['probes']['null'].append({'timestamp': str( moment.date(timestamp)), 'rssi': rssi, 'mac_id': mac_id, 'ssid': None}) # Set this flag back to False to tell others using the global object that processing is complete and the object is usable IS_PROCESSING = False # TODO: Handle errors!!! # IF Did not send because it was still processing, now send and reset back the frame object back to empty if DID_NOT_SEND: # add timestamp to the frame to help in creating files client.frame_to_send.value["timestamp"] = str( moment.utcnow()) # add device mac_address client.frame_to_send.value["deviceMacID"] = self.device_mac_address self.mqtt_client.client.publish( 'frame_topic', json.dumps(client.frame_to_send.value)) client.frame_to_send.value = { 'frame': {'probes': {'directed': [], 'null': []}}} DID_NOT_SEND = False
def messagesOld(mdf): #print(mdf) mdf['OTHER'] = mdf.apply(lambda x: x['TO'] if x['FROM'] == 'Krishna Mehra' else x['FROM'], axis=1) filteredStart = mdf[ mdf['DATETIME'] < pd.to_datetime(moment.date("1 year ago").date)] filteredByDate = filteredStart[filteredStart['DATETIME'] > pd.to_datetime( moment.date("2 years ago").date)] fileredByFolder = filteredByDate[filteredByDate['FOLDER'] == 'INBOX'] groupedConversations = filteredByDate.groupby('OTHER') #multipleConversations = groupedConversations.filter(lambda x: len(x) > 1) #print(multipleConversations) #sampleConversations = multipleConversations.sample(frac=0.1) for key, conversations in groupedConversations.groups.items(): if len(conversations) < 2: continue sent = 0 for c in conversations: if mdf.iloc[c]['FROM'] == 'Krishna Mehra': sent = sent + 1 if sent == 0: continue if random.random() > 0.1: continue print("\n===\n{}\n===".format(key)) for c in conversations: print(" [{}] {}".format(mdf.iloc[c]['DATETIME'], mdf.iloc[c]['CONTENT'])) return
def set_args(self, args): """ Return dict with parameters included in GET. """ interval = self.validate_interval(args.get('interval'), self.valid_intervals) start = args.get('start', None) if start != '' and start is not None: try: start = moment.date(start) except ValueError: start = None else: start = None end = args.get('end', None) if end is None: end = self.set_end(start, args.get('duration', None)) else: end = moment.date(end) circuit = args.get('circuit', 'summary') base = args.get('base', '65') location = args.get('location', '0') return { 'interval': interval, 'start': start, 'end': end, 'circuit': circuit, 'base': base, 'location': location }
def correct_demand(data: tuple = []): week = {} dates = [] for x in data: week[datetime.strptime(x[0], '%Y-%m-%d').isoweekday()] = x[0] num_datos = (len(x) - 1) for day_week in range(1, 8): aggregate = [] if day_week not in week: if day_week > 1: date = moment.date(week.get(1)).add(days=(day_week - 1)).format(ff) else: keys_id = list(week.keys()) date = moment.date(week.get( keys_id[0])).subtract(days=(keys_id[0] - 1)).format(ff) week[datetime.strptime(date, '%Y-%m-%d').isoweekday()] = date aggregate.append(date) for n in range(0, num_datos): aggregate.append(0) data.append(aggregate) dates.append([date, day_week - 1]) else: dates.append([week.get(day_week), day_week - 1]) return sorted(data), dates
def sent_framework(start=None, end=None, day=None): # start date default if start is None: start = moment.now().subtract(days=1).format("YYYY/MM/DD") if end is None: end = moment.now().subtract(days=1).format("YYYY/MM/DD") # end date default if end is None: end = moment.now().format("YYYY/MM/DD") # day default # today if day is not None: start = moment.date(day).format("YYYY/MM/DD") end = moment.date(start).format("YYYY/MM/DD") conn = db.connect_mssql() cur = conn.cursor() cur.execute(""" SELECT * FROM VW_RETENCAO_SMS_ENVIADO WHERE DATAENVIO between '{start}' AND '{end}' """.format(start=start, end=end)) return list(cur)
def amanda_month(): credentials = get_credentials() http = credentials.authorize(httplib2.Http()) discoveryUrl = ('https://sheets.googleapis.com/$discovery/rest?' 'version=v4') service = discovery.build('sheets', 'v4', http=http, discoveryServiceUrl=discoveryUrl) spreadsheetId = '1CZRVoXjseU5jQlLeT_IhpFUA_1yPgvcXtMfgvinn1hM' rangeName = 'data!A2:100' result = service.spreadsheets().values().get( spreadsheetId=spreadsheetId, range=rangeName).execute() values = result.get('values', []) if not values: print('No data found.') else : work_period = request.args.get("date-period") work_month_start = moment.date(work_period[0:10]) work_month_end = moment.date(work_period[11:21]) work_status =[] for row in values: if moment.date(row[0]) >= work_month_start and moment.date(row[0]) <= work_month_end: print('%s %s' % (row[0], row[3])) work_status.append(row[3]) print('%s %s' % (str(work_status.count("TRUE")).zfill(2),str(work_status.count("FALSE")).zfill(2))) status_true = str(work_status.count("TRUE")).zfill(2) status_false = str(work_status.count("FALSE")).zfill(2) return '%s %s' % (status_true, status_false)
def duration_str(self): start = moment.date(self.start).locale(settings.TIME_ZONE).format("ddd DD/MM/YYYY HH:mm") if self.start.month == self.end.month and self.start.day == self.end.day: end = moment.date(self.end).locale(settings.TIME_ZONE).format("HH:mm") else: end = moment.date(self.end).locale(settings.TIME_ZONE).format("ddd DD/MM/YYYY HH:mm") return f"{start}-{end}"
async def bean_change(self, session): """ 鲜豆变动通知 :return: """ total_bean = 0 # 鲜豆总数 yesterday = moment.date(moment.now().sub('days', 1)).zero today = moment.date(moment.now()).zero today_used = 0 # 今日支出 today_income = 0 # 今日收入 yesterday_income = 0 # 昨日收入 yesterday_used = 0 # 昨日支出 page = 1 finished = False println('{}, 正在获取资产变动信息...'.format(self.account)) while True: detail = await self.get_bean_detail(session, page) if not detail or 'evaluateList' not in detail or len( detail['evaluateList']) < 0: break total_bean = detail['points'] item_list = detail['evaluateList'] for item in item_list: day = moment.date(item['createTime'], '%H:%M:%S').zero amount = int(item['points']) if day.diff(yesterday).days == 0: if amount > 0: # 收入 yesterday_income += amount else: # 支出 yesterday_used += -amount elif day.diff(yesterday).days >= 1: # 昨天之前的日期跳过 finished = True break if day.diff(today).days == 0: if amount > 0: today_income += amount else: today_used = -amount if finished: break else: page += 1 message = '\n【活动名称】赚鲜豆\n【活动入口】京东APP>京东到家->签到\n' message += '【京东账号】{}\n【活动昵称】{}\n'.format(self.account, self.nickname) message += '【连续签到】{}天\n'.format(self.already_sign_days) message += '【鲜豆总数】{}\n【今日收入】{}\n【今日支出】{}\n'.format( total_bean, today_income, today_used) message += '【昨日收入】{}\n【昨天支出】{}\n'.format(yesterday_income, yesterday_used) self.message = message println('{}, 获取资产变动信息完成...'.format(self.account))
def before_date(self, date): this_date = str(self.meta.get('date')) if not date: return True if not this_date: return False return moment.date(this_date) < moment.date(date)
def price(path): rooms = 0 for i, (f1, t1, d1) in enumerate(path[0:-1]): (f2, t2, d2) = path[i + 1] rooms += (abs( moment.date(d1).add(hours=5).diff( moment.date(d2).add(hours=5)).days) - 1) * prices[cities[t1]] return flightPrice(path) + rooms
def handle_festival_names(self, string, check_past_tense): """ handles and returns timestamps for festival names in user input.""" res_time_stamp = [] fest_data = list([x for x in propertyFile.festival_names if x.lower() in string.lower()]) if len(fest_data) > 0 and fest_data is not None: next_data = list([x for x in propertyFile.next_words if x.lower() in string.lower()]) prev_data = list([x for x in propertyFile.previous_words if x.lower() in string.lower()]) day_num = moment.now().day month = moment.now().month year = moment.now().year if 'christmas' in fest_data[0]: if len(next_data) > 0: if (month == 12 and day_num > 25): year_num = year + 1 else: year_num = year elif len(prev_data) > 0 or check_past_tense: if (month == 12 and day_num > 25): year_num = year else: year_num = year - 1 else: year_num = year day = str(moment.date(year_num, 12, 25, 0, 0, 0)) day = day.split('+')[0] elif 'thanksgiving' in fest_data[0]: if len(next_data) > 0: if ((month == 11 and day_num > 22) or month == 12): year_num = year + 1 else: year_num = year elif len(prev_data) > 0 or check_past_tense: if ((month == 11 and day_num > 22) or month == 12): year_num = year else: year_num = year - 1 else: year_num = year day = str(moment.date(year_num, 11, 22, 0, 0, 0)) day = day.split('+')[0] elif 'new year' in fest_data[0]: if len(next_data) > 0: year_num = year + 1 elif len(prev_data) > 0 or check_past_tense: if (month == 1 and day_num == 1): year_num = year - 1 else: year_num = year else: year_num = year + 1 day = str(moment.date(year_num, 1, 1, 0, 0, 0)) day = day.split('+')[0] res_time_stamp = self.set_json_time_stamp(day, string, 'festival') return res_time_stamp
def extract(self) -> Dict[str, str]: raw_actions: List[Dict] = self.get_data() raw_actions.reverse() actions: List[MoveActions] = [] # serialize move actions for action in raw_actions: if action["type"] == "createCard": actions.append( MoveActions(None, action["data"]["list"].get("id"), action["date"])) elif action["type"] == "updateCard" and action["data"].get( "listBefore"): actions.append( MoveActions(action["data"]["listBefore"]["id"], action["data"]["listAfter"]["id"], action["date"])) if len(actions) == 0 or raw_actions[0]["type"] != "createCard": return {} # calculate time in each list ( in hours ) calculated_time: Dict[str, int] = dict() for (index, action) in enumerate(actions): if index == 0: continue if not calculated_time.get(action.from_list_name): calculated_time[action.from_list_name] = 0 from_date = moment.date(actions[index - 1].date).datetime to_date = moment.date(action.date).datetime total = to_date - from_date calculated_time[ action.from_list_name] += total.total_seconds() / (60 * 60) # last list time action = actions[-1] if len(actions) > 1 else actions[0] if not calculated_time.get(action.to_list_name): calculated_time[action.to_list_name] = 0 from_date = moment.date(action.date).datetime to_date = datetime.now().replace(tzinfo=from_date.tzinfo) total = to_date - from_date calculated_time[action.to_list_name] += total.total_seconds() / (60 * 60) data = dict() for item in calculated_time: if len(self.arguments) > 0 and item not in self.arguments: continue data["time in {0}".format( BaseData.idList_to_List_map.get( item, trello.List( name=item, list_id=item, board=self.card.board) ).name)] = '{0} h'.format(calculated_time[item]) \ if calculated_time[item] < 24 else \ '{0} d'.format(calculated_time[item] // 24) return data
async def total_bean(self, session): """ 京豆统计 :param session: :return: """ bean_amount = await self.get_bean_amount(session) # 当前总京豆 expire_record = await self.get_expire_bean(session) # 获取过期京豆数据 today_income = 0 # 今日收入 today_used = 0 # 今日支出 yesterday_income = 0 # 昨日收入 yesterday_used = 0 # 昨日支出 yesterday = moment.date(moment.now().sub('days', 1)).zero today = moment.date(moment.now()).zero page = 1 finished = False println('{}, 正在获取京豆明细...'.format(self.account)) while True: detail_list = await self.get_bean_detail(session, page) if len(detail_list) < 1: break for item in detail_list: day = moment.date(item['date'], '%H:%M:%S').zero amount = int(item['amount']) if day.diff(yesterday).days == 0: if amount > 0: # 收入 yesterday_income += amount else: # 支出 yesterday_used += -amount elif day.diff(yesterday).days >= 1: # 昨天之前的日期跳过 finished = True break if day.diff(today).days == 0: if amount > 0: today_income += amount else: today_used = -amount page += 1 if finished: break return { 'bean_amount': bean_amount, 'today_income': today_income, 'today_used': today_used, 'yesterday_income': yesterday_income, 'yesterday_used': yesterday_used, 'expire': expire_record }
def months(self, origin, destination, date, adults): if len(NoFlights.objects.filter(origin=origin, destination=destination)) > 0: return [] entries = FlightPrice.objects.filter( origin=origin, destination=destination, date__year=moment.date(date).year, date__month=moment.date(date).month, adults=adults) if len(entries) == 0: browser = self.browser() browser.visit( 'https://www.google.nl/flights/#search;f=' + origin + ';t=' + destination + ';d=' + date + ';tt=o;ti=t0800-2000;px=' + adults+";s=0") el = browser.find_by_css('.OMOBOQD-G-q') el.first.click() time.sleep(3) table = browser.find_by_css('.OMOBOQD-p-j').first trs = [tr for tr in table.find_by_css('tr')][1:6] count = 0 for tr in trs: for td in tr.find_by_css('td'): sp = td.text.split("\n") if len(sp) == 2: day = sp[0] price = sp[1] price = int(price.strip('€ ').replace('.', '')) fdate = moment.date(date).replace(days=int(day)).strftime("%Y-%m-%d") fp = FlightPrice(origin=origin, destination=destination, date=fdate, adults=adults, price=price) fp.save() count += 1 fdate = moment.date(date).replace(days=1).add(months=1) table = browser.find_by_css('.OMOBOQD-p-o').first trs = [tr for tr in table.find_by_css('tr')][1:6] for tr in trs: for td in tr.find_by_css('td'): sp = td.text.split("\n") if len(sp) == 2: day = sp[0] price = sp[1] price = int(price.strip('€ ').replace('.', '')) fdate = moment.date(fdate).replace(days=int(day)).strftime("%Y-%m-%d") fp = FlightPrice(origin=origin, destination=destination, date=fdate, adults=adults, price=price) fp.save() count += 1 if count == 0: NoFlights(origin=origin, destination=destination).save() entries = FlightPrice.objects.filter(origin=origin, destination=destination, date=date, adults=adults) return entries
def computeMatchPosts(uid, post_content, mydb): res = mydb.selectCollection("xmatePost") if(res['status']): return res dis_threshold = 2.0 match_list = {} docu_list = [] if(post_content["type"] == None): pass else: match_list["type"] = post_content["type"] if(post_content["time_range"] == None): pass else: #st = datetime.fromtimestamp(post_content["time_range"]["start_time"]) st = moment.unix(post_content["time_range"]["start_time"]) #en = datetime.fromtimestamp(post_content["time_range"]["end_time"]) en = moment.unix(post_content["time_range"]["end_time"]) #nst = datetime(st.year, st.month, st.day, 0) nst = moment.date(st.year, st.month, st.day, 0).epoch() #nen = datetime(st.year, st.month, st.day, 23,59) nen = moment.date(st.year, st.month, st.day, 23,59).epoch() match_list["time_range.start_time"] = {'$gt': nst} #match_list["time_range.end_time"] = {'$lt': datetime.timestamp(nen)} match_list["time_range.end_time"] = {'$lt': nen} res = mydb.getData(match_list) if(res["status"]): return res cursor = res["content"] if(post_content["location"] == None): for doc in cursor: docu_list.append(doc) docu_list.sort(key = lambda postd: postd["post_datetime"], reverse = True) else: for doc in cursor: # if(doc["related_member"].count(uid) > 0): # continue dist = calculateDistance(doc["location"], post_content["location"]) if(dist < dis_threshold): doc["diff"] = dist docu_list.append(doc) docu_list.sort(key = lambda postd: (postd["post_datetime"],postd["diff"])) return returnHelper(content = docu_list)
def incremet_month(date: str, num_month: int): start = moment.date(date).\ add(months=num_month).\ replace(day=1).format(ff) last_month_day = calendar.monthrange(int(moment.date(start).format(fy)), int(moment.date(start).format(fm)))[1] end = moment.date(start).\ replace(day=last_month_day).format(ff) return {'start': start, 'end': end, 'months': 1}
def adminChargeMonthcard(self, face, user_id=None, user_qq=None, administrator_id=None, administrator_qq=None): """ date object: end date 1: face not valid 2: user not exist 100: system error """ card_item = self.getMonthcard(face) if not card_item: return 1 if not self.long_connect: self.connect() user = self.getUser(user_id, user_qq, no_insert=True) administrator = self.getUser(administrator_id, administrator_qq) if not user: if not self.long_connect: self.close(False) return 2 if not administrator: logging.warning("<detected error> administrator not found") if not self.long_connect: self.close(False) return 100 now_timestamp = self.timestamp() today = self.datetime(now_timestamp) today = moment.date(today.year, today.month, today.day).date start_timestamp = self.timestamp(today) if self.cur.execute( 'SELECT time_end FROM monthcard WHERE user_id = {0} AND face = "{1}" ORDER BY time_end DESC LIMIT 1' .format(user.id, face)): record = self.cur.fetchone() if record[0] > start_timestamp: start_timestamp = record[0] start_time = self.datetime(start_timestamp) start_time = moment.date(start_time.year, start_time.month, start_time.day) end_time = start_time.clone().add(months=1).date start_time = start_time.date start_timestamp = self.timestamp(start_time) end_timestamp = self.timestamp(end_time) if not self.cur.execute( 'INSERT INTO monthcard (user_id, administrator_id, face, time_register, time_start, time_end) VALUES ({0}, {1}, "{2}", {3}, {4}, {5})' .format(user.id, administrator.id, face, now_timestamp, start_timestamp, end_timestamp)): logging.warning( "<detected error> insert monthcard record not found") if not self.long_connect: self.close(False) return 100 if not self.long_connect: self.close(True) return datetime.date.fromtimestamp(end_timestamp / 1000)
def imuafool(priceData, mrqData, dfMry=None, startDate='2018-05-08', endDate='2018-06-11'): print('---------------------------- Imuafool ---------------------------------') maxMonthPrice = priceData.groupby(['year', 'quarter'])['close'].agg(['mean', 'max', 'min', 'last']).reset_index() totalQuarterlyData = mrqData.merge(maxMonthPrice, on=['year', 'quarter']) totalQuarterlyData['fcf_ttm'] = totalQuarterlyData['fcf'].rolling(4).sum() totalQuarterlyData['mc_max'] = totalQuarterlyData['max'] * totalQuarterlyData['sharesbas'] totalQuarterlyData['mc_min'] = totalQuarterlyData['min'] * totalQuarterlyData['sharesbas'] totalQuarterlyData['mc_last'] = totalQuarterlyData['last'] * totalQuarterlyData['sharesbas'] # Right now data - relies on up-to-date price data startDate = moment.date(startDate) endDate = moment.date(endDate) maxPrice = priceData.loc[startDate.date:endDate.date].max()['close'] sharesbas_ = mrqData['sharesbas'][-1:].sum() maxMarketCap = maxPrice * sharesbas_ minPrice = priceData.loc[startDate.date:endDate.date].min()['close'] minMarketCap = minPrice * sharesbas_ lastPrice = priceData['close'].iat[-1] lastMarketCap = lastPrice * sharesbas_ lastEightQuarters = mrqData[-8:] startDateFmt = startDate.format('MMM D, YYYY') endDateFmt = endDate.format('MMM D, YYYY') change = periodChange(['revenue', 'netinc', 'eps', 'workingcapital'], lastEightQuarters) print(change.to_string()) # print("Market Cap (min, max, last):", minMarketCap, maxMarketCap, lastMarketCap) # print("Current share price", lastPrice) print("52 week low/high", "???") ev = mrqData['ev'].iat[-1] print("EV/EBITDA (mrq)", ev / mrqData['ebitdausd'].iat[-1]) print("EV/Sales (ttm)", ev / change['revenue_ttm'].iat[-1]) print("Fwd P/E", "???") # needs estimates print("Revenue. Net Income and Earnings") # print(source[['quarter','revenue']].to_string(formatters={'revenue':'${:,.0f}'.format})) # print(constructSentence("Fiscal 20XX Revenue", source['revenue'][-4:].sum(),source['revenue'][-9:-5].sum())) print("Revenue") print(lastEightQuarters[['quarter', 'grossmargin', 'ebitdamargin', 'netmargin']].to_string()) print("Margins") print(lastEightQuarters[['quarter', 'grossmargin', 'ebitdamargin', 'netmargin']].to_string()) print(lastEightQuarters[['year', 'grossmargin', 'ebitdamargin', 'netmargin']].to_string()) # print("TrendLine",trendline(source['revenue'])) print("Free Cash Flow") print(lastEightQuarters[['year', 'fcf']].to_string()) print("Capital structure") lastEightQuarters['debtEquity'] = (lastEightQuarters['debtusd'] / lastEightQuarters['equity']) print(lastEightQuarters[ ['cashnequsd', 'workingcapital', 'debtusd', 'equity', 'debtEquity']].dropna().transpose().to_string())
def split_by_month(collection_name): """ Принимает на входе данные, возвращает массив этих данных, разбитых по месяцам """ min_date = find_min_date(collection_name) max_date = find_max_date(collection_name) output = [] while (moment.date(min_date).add(months=1).date < max_date): chunk = list(ngs.find({"$and": [{"date": {"$lt": moment.date(min_date).add(months=1).date}}, {"date": {"$gte": min_date}}]}).sort("date", 1)) output.append((chunk, min_date)) min_date = moment.date(min_date).add(months=1).date # TODO: возвращать не просто массив, а объект, где помимо данных будет их привязка к месяцу return output
def test_devices_post_existing(self, mock_moment): mock_datetime = moment.utc('2015-06-22', '%Y-%m-%d').timezone(CONFIG['moment']['timezone']) mock_moment.side_effect = lambda: mock_datetime expected_device = { 'tipo': 'gcm', 'id': 'dummy', 'fecha_registro': moment.date('2015-06-21', '%Y-%m-%d').isoformat() } Device.insert_one(self.mongo_db, 'gcm', 'dummy') response = self.app.post('/0/dispositivos', data={'tipo': 'gcm', 'id': 'dummy'}) self.assertEqual('application/json', response.mimetype) self.assertEqual(200, response.status_code) data = json.loads(response.data.decode()) self.assertEqual(dict, type(data)) self.assertNotEqual(expected_device['fecha_registro'], data['fecha_registro']) del expected_device['fecha_registro'] del data['fecha_registro'] self.assertEqual(expected_device, data)
def batchTasks(username, repository, service): """ Fetches all issues for the given user in a repository from the specified service """ if service == "github": issues = requests.get( "https://api.github.com/repos/" + username + "/" + repository + "/issues?client_id=f4c46f537e5abec0d5b0&client_secret=53ba628c38e4f8adca7d467573a13989b4546743" ) data = json.loads(issues.text) print len(data) # print 'https://api.github.com/repos/' + username + '/' + repository + '/issues?state=all&client_id=f4c46f537e5abec0d5b0&client_secret=53ba628c38e4f8adca7d467573a13989b4546743' # Store all the User data (will be posted to Todoist) users = [] for datum in data: user = {} if datum["assignee"] != None: if datum["assignee"]["login"] == username: user["username"] = datum["assignee"]["login"] user["title"] = datum["title"] if datum["milestone"] != None: m = moment.date(datum["milestone"]["due_on"], "%Y-%m-%dT%H:%M:%SZ") user["due"] = m.format("YYYY-M-D H:M") users.append(user) return users else: data = batchBitbucketTasks(username, repository) return data
def parse_date(date): """ Validates and transform date string from user """ if date in ['today', 'now']: date = moment.now() elif date == 'yesterday': date = moment.now().subtract(days=1) elif date in ['su', 'sun', 'sunday']: date = moment.now().replace(weekday=0) elif date in ['mo', 'mon', 'monday']: date = moment.now().replace(weekday=1) elif date in ['tu', 'tue', 'tuesday']: date = moment.now().replace(weekday=2) elif date in ['we', 'wed', 'wednesday']: date = moment.now().replace(weekday=3) elif date in ['th', 'thu', 'thursday']: date = moment.now().replace(weekday=4) elif date in ['fr', 'fri', 'friday']: date = moment.now().replace(weekday=5) elif date in ['sa', 'sat', 'saturday']: date = moment.now().replace(weekday=6) else: date = moment.date(date) return date.format("YYYY-MM-DD")
def determinate_previous_start_month_ff(annual: bool, lastMonth: ElastMonth = None): if annual is True: start = moment.now().\ subtract(years=2, months=1).\ replace(day=1) else: start = moment.date(lastMonth.date_month_end).\ add(months=1).\ replace(day=1) date_start = date(int(start.format(fy)), int(start.format(fm)), int(start.format(fd))) date_end = date(int(previous_end_month.format(fy)), int(previous_end_month.format(fm)), int(previous_end_month.format(fd))) diff = rdelta.relativedelta(date_end, date_start) if int("{0.years}".format(diff)) == 1: months = 13 elif int("{0.months}".format(diff)) == 0: months = 1 else: months = int("{0.months}".format(diff)) return { 'start': start.format(ff), 'end': previous_end_month_ff, 'months': months }
def test_models_device_insert_existing(self, mock_moment): mock_datetime = moment.utc('2015-06-22', '%Y-%m-%d').timezone(CONFIG['moment']['timezone']) mock_moment.side_effect = lambda: mock_datetime expected_data = { 'tipo': 'email', 'id': '*****@*****.**', 'fecha_registro': mock_datetime.isoformat() } Device.insert_one(self.mongo_db, 'email', '*****@*****.**') # Mock new date mock_datetime = moment.date('2015-06-23', '%Y-%m-%d') mock_moment.side_effect = lambda: mock_datetime response = Device.insert_one(self.mongo_db, 'email', '*****@*****.**') self.assertEqual(1, self.mongo_db.devices.count()) device_in_db = self.mongo_db.devices.find_one({'tipo': 'email', 'id': '*****@*****.**'}, {'_id': 0}) self.assertEqual(expected_data, device_in_db) # Keep previous data self.assertEqual('ok', response['status']) self.assertEqual(expected_data, response['data'])
def parse_178_html(html): soup = BeautifulSoup(html, 'lxml') dls = soup.find_all('div', attrs={'class': 'list-section'}) items = [] for dl in dls: try: data = {} simgtag = dl.find('div', attrs={'class': 'list-section-image'}) if simgtag: data['img'] = simgtag.find('a').attrs['href'] ctag = dl.find('div', attrs={'class': 'list-section-contents'}) if ctag: htag = ctag.find('h2') atag = htag.find('a') data['href'] = atag.attrs['href'] data['id'] = md5.new(data['href']).hexdigest() data['title'] = atag.attrs['title'] data['created_at'] = moment.date(ctag.find('h5').text.split('.')[0], 'YYYY-MM-DD HH:mm:ss').format('YYYY-MM-DDThh:mm:ss') data['text'] = ctag.find('p').text items.append(data) except Exception, e: raise else: pass finally:
def add_game_details_data(): """ 更新 GameDetails 数据 """ with open(JSON_DATA, "r", encoding="utf8") as f: games = json.load(f) for game in games: score1 = int(game["Score1"]) score2 = int(game["Score2"]) if score1 > score2: integral1, integral2 = 3, 0 elif score1 == score2: integral1, integral2 = 0, 0 else: integral1, integral2 = 0, 3 goal_difference1 = score1 - score2 goal_difference2 = score2 - score1 GameDetails( g_score1=score1, g_score2=score2, g_team1=game["Team1"], g_team2=game["Team2"], g_group=game["group"], g_date=moment.date("{} {}".format(game["date"], game["time"])).datetime, g_integral1=integral1, g_integral2=integral2, g_goal_difference1=goal_difference1, g_goal_difference2=goal_difference2, ) orm.commit()
def test_from_date_string(self): d = moment.date(2015, 1, 1) other = d.copy().subtract(seconds=10) self.assertEquals(d.from_date(other), "a few seconds ago") other = d.copy().subtract(seconds=45) self.assertEquals(d.from_date(other), "a minute ago") other = d.copy().subtract(minutes=5) self.assertEquals(d.from_date(other), "5 minutes ago") other = d.copy().subtract(minutes=45) self.assertEquals(d.from_date(other), "an hour ago") other = d.copy().subtract(hours=10) self.assertEquals(d.from_date(other), "10 hours ago") other = d.copy().subtract(hours=22) self.assertEquals(d.from_date(other), "a day ago") other = d.copy().subtract(days=15) self.assertEquals(d.from_date(other), "15 days ago") other = d.copy().subtract(days=26) self.assertEquals(d.from_date(other), "a month ago") other = d.copy().subtract(days=120) self.assertEquals(d.from_date(other), "4 months ago") other = d.copy().subtract(days=360) self.assertEquals(d.from_date(other), "a year ago") other = d.copy().subtract(years=4) self.assertEquals(d.from_date(other), "4 years ago")
def get_all_data(): from_date = request.args.get('from') to_date = request.args.get('to') # calculate these values if found percentage_spent =0 percentage_earned =0 revenue=0 # calculate the data expenses_data = get_expenses_data(from_date, to_date) earnings_data = get_earnings_data(from_date, to_date) # calculate additional fields total_expenses = expenses_data['total_expenses'] total_income = earnings_data['total_income'] if total_expenses > 0 and total_income > 0: percentage_spent = round((total_expenses/total_income) * 100) percentage_earned = 100-percentage_spent if total_income > 0: revenue = total_income-total_expenses return { 'expenses_data': expenses_data, 'earnings_data': earnings_data, 'percentage_earned': percentage_earned, 'percentage_spent': percentage_spent, 'revenue': revenue, 'dates': [moment.date(timestamp).format('YYYY-MM-DD') for timestamp in list(file.index)] }
def manage(request): page = request.GET.get('page', 1) per_page = request.GET.get('PerPage', 10) plan_list = Plan.objects.filter(user=request.user).order_by('-created_at') paginator = Paginator(plan_list, per_page) counts = plan_list.count() next_per_page = int(per_page) + 10 if int(per_page) > counts: next_per_page = 0 try: plans = paginator.page(page) except PageNotAnInteger: plans = paginator.page(1) except EmptyPage: plans = paginator.page(paginator.num_pages) precent_list = map(clac_plan_percent, plans) for i, plan in enumerate(plans): plan.precent = precent_list[i] to_moment = moment.date(plan.created_at) plan.format_time = to_moment.format("YYYY.MM.DD") plan.weekday = get_format_weekday(to_moment.weekday) return render(request, 'manage.html', {'title': '管理规划', 'plans': plans, 'next_per_page': next_per_page})
def resolve(self, text): """ :type text: object :return : list """ try: resolved_mappings = [] if type(text) is str or type(text) is str: dateobj = DateUtils() result = dateobj.parse_date(str(text)) for result_each in result: timestamp = result_each['timestamp'] temp = [] resp = {'baseEntity': result_each['words'], 'tag': 'TIMESTAMP', 'start': result_each['position'][0], 'end': result_each['position'][1], 'entity': result_each['words'], 'resolvedTo': {'values': []}} for timestamp_each in timestamp: moment_obj = moment.date(datetime.datetime.strptime(timestamp_each[0:19], '%Y-%m-%d %H:%M:%S')) day = moment_obj.format('DD') month = moment_obj.format('MM') year = moment_obj.format('YYYY') hour = moment_obj.format('h') minute = moment_obj.format('m') second = moment_obj.format('s') temp.append({'timestamp': timestamp_each[0:19], 'year': year, 'month': month_map[month], 'day': day, 'hour': hour, 'minute': minute, 'second': second }) resp['resolvedTo']['values']= temp if result_each['words'] in text: resolved_mappings.append(resp) return resolved_mappings except Exception as ex: logger.exception(ex, exc_info=True) logger.error(traceback.format_exc()) return []
def fetch_coindesk_stats(): with open(os.path.join(DIR_PATH, "../store/headlines.json"), 'w') as headlines_json: try: html = requests.get("https://www.coindesk.com/") soup = BeautifulSoup(html.text, "html.parser") top_articles = soup.find_all('div', class_="card-text-block") below_list = soup.find_all('div', class_="list-item-card post") headlines = [] for i in top_articles + below_list: date_container = i.find("span", class_="card-date") if date_container is None: # i.e. below_list date_container = i.find("time") date_published = moment.date( date_container.get_text()).format("M-D") headline_container = i.find("h4") if i.find("h4") else i.find( "h2") headline = headline_container.get_text().strip() headlines.append( (headline, date_published, i.find("a", class_="")["href"])) ordered_headlines = sorted(headlines, key=lambda h: h[1], reverse=True) processed_headlines = [] for headline in ordered_headlines: headline_str = headline[0].split('\n')[0] date_published = headline[1] sentiment = TextBlob(headline_str).sentiment.polarity if sentiment > 0: sentiment = "POS" elif int(sentiment) == 0: sentiment = "NEUT" else: sentiment = "NEG" processed_headlines += [[ date_published, headline_str, sentiment, headline[2] ]] headlines_json.write( json.dumps({ "error": False, "data": processed_headlines }, indent=2)) except: headlines_json.write( json.dumps({ "error": True, "data": [] }, indent=2))
def weight_on_day(d): # filter 'weights' to only include records in the same week as date 'd' list_with_only_this_day = list(filter(lambda pair: day_matches(moment.date(pair[0]).date, d), weights)) if len(list_with_only_this_day) > 0: return list_with_only_this_day[0][1] else: return 0.0
def resolve_edad(self: Persona, info): now = datetime.date.today().strftime('%Y-%m-%d') moment_date = moment.date(now, 'YYYY-MM-DD').subtract( years=self.fecha_nacimiento.year, months=self.fecha_nacimiento.month, days=self.fecha_nacimiento.day) return moment_date.year
def get_last_date(export_from, filename): try: with open(filename, mode="r") as file: reader = csv.DictReader(file) return max([moment.date(row["date"]).date for row in reader]) except (ValueError, FileNotFoundError): return export_from
def get_scatter_data(timespan): """Send tweet sentiment to scatter plot""" print "In our JSON route" + session.get("ticker") ticker = session.get("ticker") current_stock = Stock.query.get(ticker) tweets = current_stock.get_tweets() stocks = Stock.query.all() # tweets_json = json.dumps(tweets, default=lambda o: o.__dict__) # now = moment.utcnow().timezone("US/Eastern") result = [] s = Sentiment(stocks) sentiment = None negative = ['0.0', '0.1', '0.2', '0.3', '0.4', '0.5'] positive = ['0.6', '0.7', '0.8', '0.9', '1.0'] for tweet in tweets: #create a moment that represents now - 24 hours day_ago = moment.utcnow().timezone("US/Eastern").subtract(hours=24) # convert unicode created_at to string created_at = unicodedata.normalize('NFKD', tweet.created_at).encode('ascii', 'ignore') # format created_at string to ISO 8610 created_at_str = time.strftime('%Y-%m-%d %H:%M:%S', time.strptime(created_at, '%a %b %d %H:%M:%S +0000 %Y')) # create a moment from the string created_at = moment.date(created_at_str, 'YYYY-MM-DD HH:mm:ss') # convert timezone of moment from UTC to Eastern time created_at_final = created_at.utcnow().timezone("US/Eastern") print created_at_final > day_ago if tweet.text.count('$') == 1 and tweet.retweeted_status is None and created_at_final > day_ago: # Convert tweet text from unicode to text tweet_text = unicodedata.normalize('NFKD', tweet.text).encode('ascii', 'ignore') # Get the sentiment of the tweet retured in either 'positive' or 'negative' sentiment_str = s.get_tweet_sentiment(tweet_text) if sentiment_str == 'positive': sentiment = random.choice(positive) if sentiment_str == 'negative': sentiment = random.choice(negative) created_at = unicodedata.normalize('NFKD', tweet.created_at).encode('ascii', 'ignore') # Sun Jun 05 17:09:07 +0000 2016 created_at_str = time.strftime('%Y-%m-%d %H:%M:%S', time.strptime(created_at, '%a %b %d %H:%M:%S +0000 %Y')) # Below 4 lines returns duplicate timestamps... need a way to convert to US/EST timezone # create a moment from the string # created_at = moment.date(created_at_str, 'YYYY-MM-DD HH:mm:ss') # convert timezone of moment from UTC to Eastern time # created_at_final = created_at.utcnow().timezone("US/Eastern") print "&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&" print created_at_str print "&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&" result.append({'datetime': created_at_str, 'sentiment': sentiment}) #sort dictionary by datetime sorted_result = sorted(result, key=lambda k: k['datetime']) return json.dumps(sorted_result)
def reports_get_query(): date = request.args.get('fecha') query = {} if date is not None: try: query['fecha'] = moment.date(date.strip(), '%Y-%m-%d').format('YYYY-M-D') except ValueError: return None return query
def show_search_results(): """Search Twitter and return a dictionary of results.""" #Get values from search-box via AJAX current_keyword = request.form.get('search').lower() print "**********************" print current_keyword print "**********************" tweets = get_tweets_by_api(term=current_keyword) result = [] for tweet in tweets: # Exclude retweets since they appear as duplicatses to endu ser if tweet.retweeted_status is None: # Convert tweet text from unicode to text tweet_id = tweet.id text = unicodedata.normalize('NFKD', tweet.text).encode('ascii', 'ignore') # Find URL in text and bind to url # url = re.search('((?:http|https)(?::\\/{2}[\\w]+)(?:[\\/|\\.]?)(?:[^\\s"]*))', text) url = re.findall('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', text) # Remove URL from text text_wo_url = re.sub(r'^https?:\/\/.*[\r\n]*', '', text, flags=re.MULTILINE) # Handle / Name user = unicodedata.normalize('NFKD', tweet.user.screen_name).encode('ascii', 'ignore') # Count of favorites favorite_count = tweet.favorite_count #Return dictionary of hashtags with hashtag as key and number of occurances as value if tweet.hashtags: # Convert hashtags from unicode to string ht_list = [] for hashtag in tweet.hashtags: ht_str = unicodedata.normalize('NFKD', hashtag.text).encode('ascii', 'ignore') ht_list.append(ht_str.lower()) hashtags = Counter(ht_list) else: hashtags = tweet.hashtags # Convert tweet from unicode to datetime created_at = tweet.created_at # format created_at string to ISO 8610 created_at_str = time.strftime('%Y-%m-%d %H:%M:%S', time.strptime(created_at, '%a %b %d %H:%M:%S +0000 %Y')) # create a moment from the string created_at = moment.date(created_at_str, 'YYYY-MM-DD HH:mm:ss') result.append({'created_at': created_at_str, 'tweet_text': text_wo_url, 'user': user, 'favorite_count': favorite_count, 'hashtags': hashtags, 'url': url, 'tweet_id': tweet_id}) print "&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&" print result print "&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&" return jsonify(result=result) #, tweets
def adminChargeMonthcard(self, face, user_id = None, user_qq = None, administrator_id = None, administrator_qq = None): """ date object: end date 1: face not valid 2: user not exist 100: system error """ card_item = self.getMonthcard(face) if not card_item: return 1 if not self.long_connect: self.connect() user = self.getUser(user_id, user_qq, no_insert = True) administrator = self.getUser(administrator_id, administrator_qq) if not user: if not self.long_connect: self.close(False) return 2 if not administrator: logging.warning("<detected error> administrator not found") if not self.long_connect: self.close(False) return 100 now_timestamp = self.timestamp() today = self.datetime(now_timestamp) today = moment.date(today.year, today.month, today.day).date start_timestamp = self.timestamp(today) if self.cur.execute('SELECT time_end FROM monthcard WHERE user_id = {0} AND face = "{1}" ORDER BY time_end DESC LIMIT 1'.format(user.id, face)): record = self.cur.fetchone() if record[0] > start_timestamp: start_timestamp = record[0] start_time = self.datetime(start_timestamp) start_time = moment.date(start_time.year, start_time.month, start_time.day) end_time = start_time.clone().add(months = 1).date start_time = start_time.date start_timestamp = self.timestamp(start_time) end_timestamp = self.timestamp(end_time) if not self.cur.execute('INSERT INTO monthcard (user_id, administrator_id, face, time_register, time_start, time_end) VALUES ({0}, {1}, "{2}", {3}, {4}, {5})'.format(user.id, administrator.id, face, now_timestamp, start_timestamp, end_timestamp)): logging.warning("<detected error> insert monthcard record not found") if not self.long_connect: self.close(False) return 100 if not self.long_connect: self.close(True) return datetime.date.fromtimestamp(end_timestamp/1000)
def processvote(self, answer_selected, request): new_vote = Vote.objects.create(voter=None, answer=answer_selected) if request.user.is_authenticated(): user = request.user new_vote.voter = user # answer_selected.selected_by.add(user) # self.answered_by.add(user) new_vote.date = moment.date(datetime.datetime(new_vote.created.year,new_vote.created.month,new_vote.created.day)).epoch() new_vote.date = long(str(long(new_vote.date))+"000") new_vote.save() answer_selected.save() self.save() return self
def is_good_tweet(tweet): """Ignore tweets with more than 1 $ symbol, retweets, and anything older than 1 day""" #create a moment that represents now - 24 hours day_ago = moment.utcnow().timezone("US/Eastern").subtract(hours=24) # convert unicode created_at to string created_at = unicodedata.normalize('NFKD', tweet.created_at).encode('ascii', 'ignore') # format created_at string to ISO 8610 created_at_str = time.strftime('%Y-%m-%d %H:%M:%S', time.strptime(created_at, '%a %b %d %H:%M:%S +0000 %Y')) # create a moment from the string created_at = moment.date(created_at_str, 'YYYY-MM-DD HH:mm:ss') # convert timezone of moment from UTC to Eastern time created_at_final = created_at.utcnow().timezone("US/Eastern") print created_at_final > day_ago return tweet.text.count('$') == 1 and tweet.retweeted_status is None and created_at_final > day_ago
def details(request, plan_id): plan = Plan.objects.get(id=plan_id) to_moment = moment.date(plan.created_at) format_time = to_moment.format("YYYY.MM.DD") weekday = get_format_weekday(to_moment.weekday) precent = clac_plan_percent(plan) plan_details = PlanDetail.objects.filter(plan=plan) return render(request, 'details.html',{'title': '详情', 'details': plan_details, 'note': plan.note, 'date': format_time, 'precent': precent, 'weekday': weekday})
def test_devices_post_ok(self, mock_moment): mock_datetime = moment.date('2015-06-22', '%Y-%m-%d') mock_moment.side_effect = lambda: mock_datetime expected_device = {'tipo': 'gcm', 'id': 'dummy'} response = self.app.post('/0/dispositivos', data=expected_device) self.assertEqual('application/json', response.mimetype) self.assertEqual(200, response.status_code) data = json.loads(response.data.decode()) self.assertEqual(dict, type(data)) expected_device['fecha_registro'] = mock_datetime.isoformat() self.assertEqual(expected_device, data)
def parse_date(datestr, date_formats): '''Try all dates formats defined in date_formats array and returns a Moment object representing that date. If format doesn't containt year, default assign current year to returned date (instead of 1900). Returns: Moment object or None ''' assert datestr assert date_formats for date_format in date_formats: date_format = date_format.strip() try: date = moment.date(datestr, date_format) if date_format.find('Y') == -1: # date format doesn't containts year current_year = datetime.date.today().year return date.replace(year=current_year) else: return date except ValueError: pass return None
def charts(request): user_activities = ExtUser.objects.get(user=request.user).activities.all() created_at_list = ExtUser.objects.get( user=request.user).activities.values('created_at') results = [] for created_at in created_at_list: charts_item = {} fail = 0 success = 0 activity_time = created_at['created_at'] one_day_activities = user_activities.filter(created_at=activity_time) fail = one_day_activities.filter(is_ok=False).count() success = one_day_activities.filter(is_ok=True).count() charts_item['time'] = moment.date(activity_time).format("MM-DD") charts_item['success'] = success charts_item['fail'] = fail results.append(charts_item) return JsonResponse({'faceRecognitionCounts': results}, status=200)
def batchTasks(username, repository): """ Fetches all Github issues for the given user in a repository. """ issues = requests.get('https://api.github.com/repos/' + username + '/' + repository + '/issues?state=all&client_id=f4c46f537e5abec0d5b0&client_secret=53ba628c38e4f8adca7d467573a13989b4546743') data = json.loads(issues.text) # Store all the User data (will be posted to Todoist) users = [] for datum in data: user = {} if datum['assignee'] != None: if datum['assignee']['login'] == 'DrkSephy': user['username'] = datum['assignee']['login'] user['title'] = datum['title'] m = moment.date(datum['milestone']['due_on'], '%Y-%m-%dT%H:%M:%SZ') user['due'] = m.format('YYYY-M-D H:M') users.append(user) return users
def parse_youku_html(html): soup = BeautifulSoup(html, "lxml") yks = soup.find_all('div', attrs={'class': 'yk-col4'}) items = [] for dl in yks: try: data = {} data['created_at'] = moment.date(dl.attrs['c_time'], 'YYYY-MM-DD HH:mm:ss').format('YYYY-MM-DDThh:mm:ss') imgtag = dl.find('img') data['img'] = imgtag.attrs['src'] data['title'] = imgtag.attrs['title'] htag = dl.find('a') data['href'] = htag.attrs['href'] data['id'] = md5.new(data['href']).hexdigest() items.append(data) except Exception, e: raise else: pass finally:
def parse_carry6_html(html): soup = BeautifulSoup(html, 'lxml') dls = soup.find_all('div', attrs={'class': 'container_list'}) items = [] for dl in dls: try: data = {} atag = dl.find('a', attrs={'class': 'list_pic'}) data['href'] = atag.attrs['href'] data['id'] = md5.new(data['href']).hexdigest() data['title'] = atag.attrs['title'] itemtag = dl.find('div', attrs={'class': 'items-info'}) btag = itemtag.find_all('b') data['author'] = btag[0].text data['created_at'] = moment.date(btag[1].text, 'YY-MM-DD').format('YYYY-MM-DDThh:mm:ss') data['text'] = dl.find('p').text items.append(data) except Exception, e: raise else: pass finally:
def getTweetsOverTime(listOfTweets): aaa = [] pts = [-1,0,0,0,0,0] index = -1 #timeStamps.append("Time") for tweet in listOfTweets: #timeStamps.append(tweet['Tweet Created At']) timestring = tweet['Tweet Created At'] #print timestring m = moment.date(timestring, '%Y-%m-%d %H:%M:%S') print str(m.month) + " " + str(m.day) + " " + str(m.hour) + " " + str(m.minute) + " " + str(m.second) #ts = [m.month, m.day, m.hour, m.minute, m.second] ts = [m.year, m.month - 1, m.day, m.hour, m.minute] if pts == ts: aaa[index][1] += 1 else: aaa.append([ts, 1]) index += 1 if index >= 20: break pts = ts print aaa return aaa
def index(request): account = request.user.username real_name = request.user.user_ext.name student_number = request.user.user_ext.student_number xclass = request.user.user_ext.xclass now_user_activities = request.user.user_ext.activities.order_by('-created_at') for ac in now_user_activities: ac.format_time = moment.date(ac.created_at).format("YYYY-MM-DD hh:mm:ss A") paginator = Paginator(now_user_activities, 5) page = request.GET.get('page', 1) try: activities = paginator.page(page) except PageNotAnInteger: activities = paginator.page(1) first = False try: previous_page = activities.previous_page_number() except EmptyPage: first = True previous_page = 1 last = False try: next_page = activities.next_page_number() except EmptyPage: last = True next_page = paginator.num_pages return render(request, 'index.html', { 'account': account, 'username': real_name, 'xclass': xclass, 'number': student_number, 'activities': activities.object_list, 'previous_page': previous_page, 'next_page': next_page, 'first': first, 'last': last})
def test_moment_can_subtract_another_moment(self): d = moment.date((2012, 12, 19)) self.assertTrue(d - moment.date((2012, 12, 18)))
def test_moment_can_transfer_between_datetime_and_moment(self): d = moment.now().to_date() self.assertEquals(d, moment.date(d).to_date())
def format_date(date): return moment.date(date).strftime('%B %d, %Y')
def test_subtract_with_keywords(self): d = moment.date((2012, 12, 19, 1, 2, 3)) d.subtract(hours=1, minutes=2, seconds=3) expecting = moment.date((2012, 12, 19)) self.assertEquals(d, expecting)
def test_simple_chaining_commands(self): d = moment.date([2012, 12, 18]) expecting = moment.date((2012, 12, 18, 1, 2, 3)).done() d.replace(hours=1, minutes=2, seconds=3) self.assertEqual(d, expecting)
def test_date_property(self): d = moment.date(2012, 12, 18).date self.assertEquals(d, datetime(2012, 12, 18))
def test_a_datetime_can_subtract_a_moment(self): d = moment.date((2012, 12, 18)) self.assertTrue(datetime(2012, 12, 19) - d)
def test_moment_unix_command(self): d = moment.unix(1355788800000, utc=True) expected = moment.date((2012, 12, 18)) self.assertEquals(d, expected)