def extract_dataframe_peruser(alldata): rows = [] install_id_to_first_condition = {} max_timestamp = get_global_max_timestamp(alldata) for install_id, experiment_info_with_sessions in alldata.items(): userid = '' last_condition_for_user = '' firstlast_info = get_firstlast_info(experiment_info_with_sessions) first_localepoch = firstlast_info['first_localepoch'] last_localepoch = firstlast_info['last_localepoch'] first_timestamp = firstlast_info['first_timestamp'] last_timestamp = firstlast_info['last_timestamp'] if last_timestamp == None or first_timestamp == None: continue #days_kept_installed = (moment.unix(last_timestamp) - moment.unix(first_timestamp)).days days_kept_installed = last_localepoch - first_localepoch attritioned = (moment.unix(max_timestamp) - moment.unix(last_timestamp)).days > 2 first_condition_for_user = None first_conditionduration_for_user = None for experiment_info in experiment_info_with_sessions: for condition_info in experiment_info['condition_info_list']: condition = condition_info['condition'] conditionduration = condition_info['conditionduration'] if first_condition_for_user == None: first_condition_for_user = condition if first_conditionduration_for_user == None: first_conditionduration_for_user = conditionduration for day_info in condition_info['day_info_list']: for session_info in sorted(day_info['session_info_list'], key=lambda k: k['timestamp']): last_condition_for_user = condition userid = session_info['userid'] domain = session_info['domain'] if first_condition_for_user == None or first_conditionduration_for_user == None: continue completed_first_condition = days_kept_installed >= first_conditionduration_for_user attritioned_during_first_condition = ( not completed_first_condition) and attritioned rows.append({ 'userid': userid, 'install_id': install_id, 'last_condition_for_user': last_condition_for_user, 'attritioned': int(attritioned), 'days_kept_installed': days_kept_installed, 'attritioned_during_first_condition': int(attritioned_during_first_condition), 'completed_first_condition': int(completed_first_condition), 'first_condition_for_user': first_condition_for_user, 'first_conditionduration_for_user': first_conditionduration_for_user, }) return pd.DataFrame(rows)
def clean_timestamps(stock_quotes, timespan): """Transform Epoch timestamp to %Y-%m-%d %H:%M:%S""" bar = { # "2016-07-16 09:30:00": 250 } num_results = len(stock_quotes["series"]) unsorted_stock_quotes = [] for i in range(num_results): stock_quote = stock_quotes["series"][i] unix_timestamp = stock_quote["Timestamp"] # Create moment from unix_timestamp, convert to EST timezone, and format clean_timestamp = moment.unix(unix_timestamp, utc=True).timezone("US/Eastern").format("YYYY-M-D HH:mm:ss") # For each item in "series", create clean_stock_quote for table display in JINJA clean_stock_quote = {"Timestamp": clean_timestamp, "close": stock_quote["close"], "volume": stock_quote["volume"]} #Append clean_stock_quote to unsorted_stock_quotes list unsorted_stock_quotes.append(clean_stock_quote) # Define prefix for 30 minute intervals prefix = moment.unix(unix_timestamp, utc=True).timezone("US/Eastern").strftime('%Y-%m-%d %H:') if timespan == '1': bar[clean_timestamp] = stock_quote["volume"] elif timespan == '6': bar[clean_timestamp] = stock_quote["volume"] elif timespan == '12': bar[clean_timestamp] = stock_quote["volume"] elif timespan == '24': # If minute is less than 30, take prefix and concatinate minutes # which becomes the bar key if int(moment.unix(unix_timestamp, utc=True).timezone("US/Eastern").strftime('%M')) < 30: unix_timestamp = prefix + '00:00' else: unix_timestamp = prefix + '30:00' #Calculates sum of volume in time. #Is time in bar, if not append. if unix_timestamp in bar: bar[unix_timestamp] = bar[unix_timestamp] + stock_quote["volume"] else: bar[unix_timestamp] = stock_quote["volume"] #Sort clean_stock_quotes by Timestamp clean_stock_quotes = sorted(unsorted_stock_quotes, key=lambda k: k["Timestamp"]) return clean_stock_quotes, bar
def getDateUnix(inputString): start = getDate(inputString) if start is None: return None try: start = int(start.timestamp()) end = getCurrentTime() print(moment.unix(start), moment.unix(end)) return (start, end) except: return None
def computeMatchPosts(uid, post_content, mydb): res = mydb.selectCollection("xmatePost") if(res['status']): return res dis_threshold = 2.0 match_list = {} docu_list = [] if(post_content["type"] == None): pass else: match_list["type"] = post_content["type"] if(post_content["time_range"] == None): pass else: #st = datetime.fromtimestamp(post_content["time_range"]["start_time"]) st = moment.unix(post_content["time_range"]["start_time"]) #en = datetime.fromtimestamp(post_content["time_range"]["end_time"]) en = moment.unix(post_content["time_range"]["end_time"]) #nst = datetime(st.year, st.month, st.day, 0) nst = moment.date(st.year, st.month, st.day, 0).epoch() #nen = datetime(st.year, st.month, st.day, 23,59) nen = moment.date(st.year, st.month, st.day, 23,59).epoch() match_list["time_range.start_time"] = {'$gt': nst} #match_list["time_range.end_time"] = {'$lt': datetime.timestamp(nen)} match_list["time_range.end_time"] = {'$lt': nen} res = mydb.getData(match_list) if(res["status"]): return res cursor = res["content"] if(post_content["location"] == None): for doc in cursor: docu_list.append(doc) docu_list.sort(key = lambda postd: postd["post_datetime"], reverse = True) else: for doc in cursor: # if(doc["related_member"].count(uid) > 0): # continue dist = calculateDistance(doc["location"], post_content["location"]) if(dist < dis_threshold): doc["diff"] = dist docu_list.append(doc) docu_list.sort(key = lambda postd: (postd["post_datetime"],postd["diff"])) return returnHelper(content = docu_list)
def render(self): """ """ # Get all the ative attacks buildings = session.query(Building).order_by(Building.collection_time).all() friends = session.query(Player).filter(Player.is_friend == 1).all() neighbours = session.query(Player).filter(Player.is_neighbor == 1).all() guild = session.query(Player).filter(Player.is_guild_member == 1).all() resources = session.query(Resources).first() now = moment.unix(time.time(), utc=True).format('HH:mm:ss') self.screen.addstr(self.line, 0, "Time: %s | Running: %ss | Update in: %ss" % (now, int(self.running), 100)) self.screen.addstr(self.line, 0, self.SEPERATOR) self.screen.addstr(self.line, 0, "Coins: %s | Supplies: %s" % ("{:,}".format(resources.money), "{:,}".format(resources.supplies))) self.screen.addstr(self.line, 0, "Friends: %s | Neighbours: %s | Guild: %s" % (len(friends), len(neighbours), len(guild))) self.screen.addstr(self.line, 0, "Buildings: %s" % (len(buildings))) self.screen.addstr(self.line, 0, self.SEPERATOR) # MAPPER = { 'residential': curses.color_pair(1), 'production': curses.color_pair(2), 'goods': curses.color_pair(3), } # self.screen.addstr(self.line, 0, "ID | Building | Type | State | Collection Time | Remaining |") self.screen.addstr(self.line, 0, self.SEPERATOR) # for building in buildings[:35]: colour = MAPPER.get(building.type, '') remaining = "%0.0f" % round(building.collection_time - time.time()) collection = moment.unix(building.collection_time, utc=True).format('HH:mm:ss') self.screen.addstr(self.line, 0, "%s | %s | %s | %s | %s | %s" % (self.fixed(building.id, 3), self.fixed(building.cityentity_id, 30), self.fixed(building.type, 12), self.fixed(building.state, 23), self.fixed(collection, 15), self.fixed(remaining, 3)), colour) return
async def get_expire_bean(self, session, timeout=0.5): """ :param timeout: :param session: :return: """ try: println('{}, 正在获取即将过期京豆数据...'.format(self.account, timeout)) await asyncio.sleep(timeout) session.headers.add( 'Referer', 'https://wqs.jd.com/promote/201801/bean/mybean.html') session.headers.add('Host', 'wq.jd.com') session.headers.add('Content-Type', 'application/x-www-form-urlencoded') url = 'https://wq.jd.com/activep3/singjd/queryexpirejingdou?_={}&g_login_type=1&sceneval=2'.\ format(int(time.time()*1000)) response = await session.get(url=url) text = await response.text() data = json.loads(text[23:-13]) res = [] for item in data['expirejingdou']: amount = item['expireamount'] if amount == 0: continue msg = '【{}即将过期京豆】:{}'.format( moment.unix(item['time']).zero.format('YYYY-M-D'), amount) res.append(msg) return res except Exception as e: println('{}, 获取即将过期的京豆数据失败:{}.'.format(self.account, e.args)) return []
def get_timestamp(timestamp=None, utc=True): if timestamp is None: timestamp = time.time() # https://stackoverflow.com/a/52606421/9919772 #dt = datetime.datetime.fromtimestamp(timestamp).astimezone() dt = moment.unix(timestamp, utc=utc) dt = dt.timezone(current_tzname()) return dt.strftime("%m-%d-%Y %I:%M:%S%p %Z")
def format_millis(time): """Formats a unix milliseconds timestamp to user readable string""" try: time = float(time) formatted_time = moment.unix(time) click.echo(formatted_time) except ValueError: click.echo("The value that was entered is not valid")
def main(): for sub in reddit.subreddit('AdvancedRunning').new(limit=None): dt = moment.unix(sub.created_utc).format('YYYY-MM-DD') if dt not in commentsByDate: commentsByDate[dt] = 0 commentsByDate[dt] += sub.num_comments for dt in sorted(commentsByDate.iterkeys()): print(dt + ' ' + str(commentsByDate[dt]))
def send_message(number, timestamp, location): text = 'Looks like a great day to dive in %s on ' % location date = moment.unix(timestamp, utc=True).format('dddd, MMMM D') try: message = client.messages.create(to=number, from_=TWILIO_PHONE_NUM, body=text + date) except Exception as e: print "Unknown Exception: ", e.msg
def timestamp_to_epoch(timestamp): start_of_epoch = moment.now().timezone("US/Pacific").replace( years=2016, months=1, days=1, hours=0, minutes=0, seconds=0, milliseconds=0, microseconds=0) return moment.unix(timestamp).timezone("US/Pacific").diff( start_of_epoch).days
def dateCheck(item): itemTime = os.path.getatime(item) # itemTime = * 1000 # make moments itemMoment = moment.unix(itemTime) nowMoment = moment.now() agedSeven = moment.unix(itemTime).add(days=7) if agedSeven > nowMoment: itemSafe = True else: itemSafe = False print("item: ", item) print("atime: ", itemMoment) print("aged: ", agedSeven) print("safe: ", itemSafe) print("-----------------")
def makeUsefulData(args, dirName): folders = listdir(dirName) for folderName in folders: filtedData = [] files = listdir(join(dirName, folderName)) files = sorted(files) operation_num = 1 running_time = 0 save_dir = join("datasets", "usefulData", folderName) if not os.path.exists(save_dir): os.makedirs(save_dir) for fileName in files: df = pd.read_csv(join(dirName, folderName, fileName)) titles = np.array(df.columns) datas = df.iloc[:, :].to_numpy(dtype=float) datas, operation_num, running_time = filterData(args, datas, operation_num, running_time) # print(join(dirName, folderName, fileName), np.sum(datas[:,:],axis=0)/datas.shape[0]) # break # 開啟輸出的 CSV 檔案 if datas.shape[0] > 0: operation_num += 1 # if args.bp: # datas[:,14:-1] = meanfilter(datas[:,14:-1]) print(moment.unix(datas[0,0])) with open(join(save_dir, fileName.split('.')[0]+'.csv'), 'w', newline='') as csvfile: # 建立 CSV 檔寫入器 writer = csv.writer(csvfile) # 寫入資料 titles = (titles).tolist() titles.append('開機次數') titles.append('運轉時間') writer.writerow(titles) for r in range(datas.shape[0]): # if datas[r,3]<24: # print(datas[r,0], moment.unix(datas[r,0]),"==") writer.writerow(datas[r,:]) filtedData.append(datas[r,:]) filtedData = np.array(filtedData) if args.bp: print(filtedData.shape) drawPlot(filtedData, folderName) else: drawPlot2(filtedData, folderName)
def get_proccessed_data(self, exported_data): logger.info(f"Processing ranking data") proccessed_data = {} for data in exported_data: platform = config.SENSORTOWER_APPS[str(data["app_id"])] country = data["country"].lower() category = data["category"] chart_type = data["chart_type"] key = f"{country}_{category}_{chart_type}" for timestamp, ranking, _ in data["ranking_history"]: date = moment.unix(timestamp, utc=True).format(config.MOMENT_DATE_FORMAT) proccessed_data.setdefault((date, platform), {})[key] = int(ranking) return proccessed_data
def get_game_details() -> Dict[str, str]: games : List = get_upcoming_games() for game in games: try: date: str = moment.unix(game['globalReleaseDate']).strftime('%Y%m%d') name: str = game['title'] stamp: str = '{}T000000Z'.format(date) yield { 'date': date, 'name': name, 'stamp': stamp } except TypeError: continue
def __getTransfers(self, timeRange): startTime = timeRange["startTime"] endTime = timeRange["endTime"] print("Syncing BEP2 transfers until " + moment.unix(endTime).format("DD.MM.YYYY @ HH:mm") + " ...") transfers = self.__binanceApi.getTransfers(startTime=startTime, endTime=endTime) data = [] for transfer in transfers: data += [{ "from": transfer["fromAddr"], "to": transfer["toAddr"], "amount": transfer["value"], "timestamp": transfer["timeStamp"], "txHash": transfer["txHash"] }] return data
def reset_password(params): if not("code" in params and "password" in params and "confirm" in params): raise WebException("Please fill out all the fields.") if params["password"] != params["confirm"]: raise WebException("Your passwords don't match, silly!") db = api.common.db_conn() ticket = db.password_recovery.find_one({ "code": params["code"], "active": True }) if ticket is None: raise WebException("That code doesn't seem to be right.") now = moment.utcnow().date.timestamp() expiredate = moment.unix(ticket["expire"], utc=True).date.timestamp() if now > expiredate: raise WebException("Your code expired!") db.password_recovery.update_one({ "code": params["code"], "active": True }, { "$set": { "active": False } }) phash = hash_password(params["password"]) db.users.update_one({ "email": ticket["email"].lower() }, { "$set": { "password": phash } }) return
def build_location_weather(): dataset = "./C-B0024-002.json" time_list =[] temp_list = [] m_list = [] with open(dataset, 'r', encoding="utf-8") as f: json_data = json.loads(f.read()) data = json_data["cwbopendata"]["dataset"]["location"][0] print(data["locationName"]) timeStr = "" for location_data in data["weatherElement"][0]["time"]: print(location_data["obsTime"]) try: timeArray = time.strptime(location_data["obsTime"], "%Y-%m-%d %H:%M") timestamp_temp = int(time.mktime(timeArray)) timeStr = location_data["obsTime"] except: timestamp_temp += 3600 timeStr = (moment.unix(timestamp_temp).format('YYYY-M-D HH:m')) pass if timestamp_temp >= 1543593600: time_list.append(timeStr) temp_list.append(location_data["weatherElement"][1]["elementValue"]["value"]) m_list.append(location_data["weatherElement"][2]["elementValue"]["value"]) # print(location_data["obsTime"], location_data["weatherElement"][1]["elementValue"]["value"], location_data["weatherElement"][2]["elementValue"]["value"]) new_df = pd.DataFrame({'time':time_list, 'temp':temp_list, 'humidity':m_list}) new_df.to_csv("Banqiao.csv",index=False) return
def quotes(): try: url = request.args.get('url') if url: url = request.args.get('url') content = loop.run_until_complete(get_csv(url)) soup = BeautifulSoup(content, 'lxml') ind_titles = soup.findAll(attrs={"class": "pane-legend-line"}) _ind_titles = [] _ind_values = [] for ind in ind_titles: name = ind.find( attrs={"class": "pane-legend-title__description"}) values = ind.findAll( attrs={"class": "pane-legend-item-value-wrap"}) _loc_values = [] for val in values: _loc_values.append(val.get_text()) _ind_values.append(' '.join(list(map(str, _loc_values)))) _ind_titles.append(name.get_text()) json_string = soup.find( attrs={"class": "js-chart-view"})['data-options'] parsed_string = json.loads(json_string) parsed_string = json.loads(parsed_string['content'])['panes'] main = None indicators = [] for item in parsed_string: for item2 in item['sources']: if item2['type'] == 'MainSeries': main = item2 elif item2['type'] == 'Study': indicators.append(item2) r = '\nno data\n' if main: title = soup.find(attrs={ "class": "pane-legend-title__container" }).get_text() d = main['bars']['data'] columns = [ 'time', 'open', 'high', 'low', 'close', 'vol', '%', 'id', 'timestamp' ] if (len(indicators)): for i in indicators: _name = i['metaInfo']['shortDescription'] for _n in range(len(i['data']['data'][0]['value'])): _s = 1 for _i in i['data']['data']: if _i['value'][_n] != 0 and _i['value'][ _n] != 1: _s = 0 break if _s == 1: for _i in i['data']['data']: _i['value'].pop(_n) break for name in _ind_titles: s = 1 if not re.match('^' + str(_name), name): s = 0 if 'source' in i['state'][ 'inputs'] and not re.search( str(i['state']['inputs']['source']), name): s = 0 if 'length' in i['state'][ 'inputs'] and not re.search( str(i['state']['inputs']['length']), name): s = 0 if 'increment' in i['state'][ 'inputs'] and not re.search( str(i['state']['inputs']['increment']), name): s = 0 if 'max value' in i['state'][ 'inputs'] and not re.search( str(i['state']['inputs']['max value']), name): s = 0 if 'start' in i['state'][ 'inputs'] and not re.search( str(i['state']['inputs']['start']), name): s = 0 for n in range(12): if 'in_' + str(n) in i['state'][ 'inputs'] and not re.search( str(i['state']['inputs']['in_' + str(n)]), name): s = 0 if s: _name = name break count_columns = len(i['data']['data'][0]['value']) - 1 for number in range(count_columns): columns.append('"' + _name + '"') r = ','.join(columns) + '\n' i = 0 close = None for item in d: item['value'] = item['value'][0:6] data_ind = [] procent = None for item2 in indicators: for item3 in item2['data']['data']: if item['value'][0] == item3['value'][0]: item3['value'] = list(map(str, item3['value'])) item3['value'].pop(0) data_ind.append(','.join(item3['value'])) break if close: procent = round( (item['value'][-2] - close) / (close / 100), 2) close = item['value'][-2] r += ','.join(list(map(str, item['value']))) + ',' + str( procent) + ',' + str(i) + ',' + moment.unix( item['value'][0] * 1000, utc=True).format( "YYYY-MM-DD HH:mm:ss") + ',' + ','.join( data_ind) + '\n' i += 1 return Response(r, mimetype="text/csv", headers={ "Content-disposition": "attachment; filename= " + title + ".csv" }) return '{"error":"no url parameter"}' except Exception as e: print('---> ', e) return '{"error":"' + str(e) + '"}'
def test_moment_unix_command(self): d = moment.unix(1355788800000, utc=True) expected = moment.date((2012, 12, 18)) self.assertEquals(d, expected)
} } } } } } } res = es.search(index=fromIndex, search_type="count", body=body) buckets = res["aggregations"]["hist"]["buckets"] # prettyPrint(buckets) bulkActions = [] for bucket in buckets: m = moment.unix(bucket['key']).date ##.add(hours=4).date doc = {"@timestamp": bucket['key_as_string']} if ("the_count" in bucket): doc["the_count"] = bucket["the_count"]["value"] if ("holt1" in bucket): doc["holt1"] = bucket["holt1"]["value"] if ("holt2" in bucket): doc["holt2"] = bucket["holt2"]["value"] if ("holt3" in bucket): doc["holt3"] = bucket["holt3"]["value"] if ("the_count" in doc and "holt1" in doc and (doc["the_count"] > doc["holt1"] * 1.4)): doc["surprise1"] = doc["the_count"] if ("the_count" in doc and "holt2" in doc and (doc["the_count"] > doc["holt2"] * 1.4)):
def extract_dataframe_daily(alldata, day_filter_funcs=[], user_filter_funcs=[]): if callable(day_filter_funcs): day_filter_funcs = [filter_funcs] rows = [] install_id_to_first_condition = {} max_timestamp = get_global_max_timestamp(alldata) for install_id, experiment_info_with_sessions in alldata.items(): accept_user = True if len(experiment_info_with_sessions) == 0: continue for user_filter_func in user_filter_funcs: if not user_filter_func(experiment_info_with_sessions): accept_user = False if not accept_user: continue first_condition_for_user = None for experiment_info in experiment_info_with_sessions: for condition_info in experiment_info['condition_info_list']: condition = condition_info['condition'] if first_condition_for_user == None: first_condition_for_user = condition first_conditionduration_for_user = None for experiment_info in experiment_info_with_sessions: for condition_info in experiment_info['condition_info_list']: conditionduration = condition_info['conditionduration'] if first_conditionduration_for_user == None: first_conditionduration_for_user = conditionduration install_id_to_first_condition[install_id] = first_condition_for_user firstlast_info = get_firstlast_info(experiment_info_with_sessions) first_localepoch = firstlast_info['first_localepoch'] last_localepoch = firstlast_info['last_localepoch'] last_timestamp = firstlast_info['last_timestamp'] user_saw_both_same_and_random = get_did_user_experience_both_same_and_random( experiment_info_with_sessions) for experiment_info in experiment_info_with_sessions: num_days_in_same_condition = 0 num_days_in_same_condition_and_saw_intervention = 0 intervention_to_num_days_seen_at_least_once = Counter() for condition_info in experiment_info['condition_info_list']: condition = condition_info['condition'] conditionduration = condition_info['conditionduration'] for day_info in condition_info['day_info_list']: domain_to_num_samples = get_domain_to_num_samples( day_info['session_info_list']) #is_day_with_just_one_sample = 0 #if len(day_info['session_info_list']) < 2: # is_day_with_just_one_sample = 1 domain_to_total_time_spent = get_domain_to_total_time_spent( day_info['session_info_list']) domain_to_last_timestamp = get_domain_to_last_timestamp( day_info['session_info_list']) day_intervention = 'random' domain_to_num_impressions_on_day = {} last_timestamp_on_day = None days_since_install = None days_until_last_day = None domain_to_last_timestamp = {} saw_intervention_today_same = False interventions_seen_today = set() for session_info in sorted(day_info['session_info_list'], key=lambda k: k['timestamp']): domain = session_info['domain'] if domain not in domain_to_num_impressions_on_day: domain_to_num_impressions_on_day[domain] = 0 else: domain_to_num_impressions_on_day[domain] += 1 userid = session_info['userid'] time_spent = session_info['time_spent'] timestamp = session_info['timestamp'] if domain not in domain_to_last_timestamp: domain_to_last_timestamp[domain] = timestamp else: domain_to_last_timestamp[domain] = max( timestamp, domain_to_last_timestamp[domain]) if last_timestamp_on_day == None: last_timestamp_on_day = timestamp last_timestamp_on_day = max(last_timestamp_on_day, timestamp) intervention = session_info['intervention'] interventions_seen_today.add(intervention) if condition == 'same': saw_intervention_today_same = True day_intervention = intervention localepoch = session_info['localepoch'] days_since_install = localepoch - first_localepoch days_until_last_day = last_localepoch - localepoch #if domain != 'www.facebook.com': # continue if len(day_info['session_info_list']) == 0: continue is_last_day = int(last_timestamp_on_day == last_timestamp) domain_to_attritioned = {} if is_last_day: for domain, last_timestamp_for_domain in domain_to_last_timestamp.items( ): if last_timestamp == last_timestamp_for_domain: # user attritioned on this domain days_until_final_impression = ( moment.unix(max_timestamp) - moment.unix(last_timestamp)).days domain_to_attritioned[ domain] = days_until_final_impression > 2 attritioned_today = 0 if len(day_info['session_info_list']) > 0 and is_last_day: if (moment.unix(max_timestamp) - moment.unix(last_timestamp)).days > 2: attritioned_today = 1 for domain, total_time_spent in domain_to_total_time_spent.items( ): attritioned = 0 if domain in domain_to_attritioned: attritioned = int(domain_to_attritioned[domain]) row = { 'conditionduration': conditionduration, 'days_since_install': days_since_install, 'days_until_last_day': days_until_last_day, 'user_saw_both_same_and_random': int(user_saw_both_same_and_random), 'num_visits_to_domain_today': domain_to_num_samples[domain], 'is_day_with_just_one_sample': int(domain_to_num_samples[domain] == 1), 'attritioned': attritioned, 'attritioned_today': attritioned_today, 'is_last_day': is_last_day, 'first_condition_for_user': first_condition_for_user, 'first_conditionduration_for_user': first_conditionduration_for_user, 'intervention': day_intervention, 'num_impressions_on_day': domain_to_num_impressions_on_day[domain], 'log_time_spent': log(total_time_spent), 'time_spent': total_time_spent, 'install_id': install_id, 'userid': userid, 'condition': condition, 'domain': domain, 'num_days_in_same_condition': num_days_in_same_condition, 'num_days_in_same_condition_and_saw_intervention': num_days_in_same_condition_and_saw_intervention, } row['num_days_saw_intervention_for_same_intervention'] = 0 if condition == 'same': row['num_days_saw_intervention_for_same_intervention'] = intervention_to_num_days_seen_at_least_once[ day_intervention] accept = True for day_filter_func in day_filter_funcs: if not day_filter_func(row): accept = False if accept: rows.append(row) for intervention in interventions_seen_today: intervention_to_num_days_seen_at_least_once[ intervention] += 1 if condition == 'same': num_days_in_same_condition += 1 if saw_intervention_today_same: num_days_in_same_condition_and_saw_intervention += 1 print(Counter(install_id_to_first_condition.values())) return pd.DataFrame(rows)
def unix_time_to_date(text): try: date = moment.unix(int(text)).format("YYYY-MM-DD") except: date = text return date
def to_utc_date(timestamp): return moment.unix(timestamp, utc=True)
def get_hour(utc_time, tz_id): momentObj= moment.unix(utc_time, utc=True).timezone(tz_id) return momentObj.format('HH')
def get_dayOfWeek(utc_time, tz_id): momentObj= moment.unix(utc_time, utc=True).timezone(tz_id) return momentObj.format('dddd')
def get_file_modify_time(filepath): return moment.unix(int(os.stat(filepath).st_mtime * 1000))
def transform_data(args, dirName): wc = WaterChiller() cops = [] outputDatas = [] PP = [] files = listdir(dirName) files = sorted(files) save_dir = os.path.join("datasets", "copData") if not os.path.exists(save_dir): os.makedirs(save_dir) for fileName in files: print("loading...") df = pd.read_csv(os.path.join(dirName, fileName, fileName + '.csv'), encoding='utf-8') titles = np.array(df.columns) datas = df.iloc[:, :].to_numpy(dtype=float) n = datas.shape[0] if n == 0: continue row = 0 while row < n: outputData = np.zeros((9)) Tevwi = datas[row, 1] Tcdwi = datas[row, 3] operation_num = datas[row, -2] running_time = datas[row, -1] if args.bp: Pev = datas[row, 15] Pcd = datas[row, 18] Tevo = datas[row, 21] Thg = datas[row, 16] Tcdo = datas[row, 20] freq = datas[row, 13] W = datas[row, 19] else: Pev = datas[row, 6] Pcd = datas[row, 9] Tevo = datas[row, 12] Thg = datas[row, 7] Tcdo = datas[row, 11] freq = 60 W = datas[row, 10] if freq == 0: print(moment.unix(datas[row, 0]), "=") h1, h2, h3, h4 = wc.getEnthalpy(Pcd, Pev, Tevo, Thg, Tcdo) outputData[0] = datas[row, 0] outputData[1] = h1 / 1000 outputData[2] = h2 / 1000 outputData[3] = h3 / 1000 outputData[4] = (h1 - h3) / (h2 - h1) outputData[5] = Tevwi outputData[6] = Tcdwi outputData[7] = operation_num outputData[8] = running_time outputDatas.append(outputData) row += 1 outputDatas = np.array(outputDatas) print("outputDatas : ", outputDatas.shape) # titles = np.array(df.columns) # titles[-1] = "cop" ''' "Tevo" :蒸發出口溫度 "Tsh" :過熱度 "Thg" :吐出溫度 "Tcdo" :冷凝出口溫度 "Tsc" :過冷度 "Tca" :冷卻水溫差 "Tea" :冷水溫差 ''' titles = np.array([ "time", "h1", "h2", "h3", "C.O.P", "Tevwi", "Tcdwi", "operation_num", "running_time" ]) saveCSV(outputDatas, titles, fileName.split('.')[0]) drawDiagram(outputDatas, titles, fileName.split('.')[0])
with open(source_html, 'r') as file: source_contents = file.read() if not check_source_contains_leadership_chart(source_contents): raise ValueError("The source does not contain the Leadership Chart") users = parse_for_competent_leader_award(source_contents) users_found_count = 0 for user in users: if user_name_matched(user_name_arg, user.name) or user_name_arg == "": print("-------------------------------") users_found_count += 1 print(user.name) # find the latest date of a role taken in each the Leadership Chart column latest_date = moment.unix(0).date latest_evaluated_column = 1 for role in user.roles_collection: date = moment.date(role.date, '%d-%M-%Y') if role.column_number > latest_evaluated_column: print(latest_date.strftime("%m/%d/%Y")) latest_date = date latest_evaluated_column = role.column_number elif is_second_date_later(latest_date, date): latest_date = date print(role) print(latest_date.strftime("%m/%d/%Y")) if user_name_arg != "" and users_found_count == 0: print("No matching user found: " + user_name_arg) sys.exit(1)
def extract_dataframe(alldata, filter_funcs=[], user_filter_funcs=[]): if callable(filter_funcs): filter_funcs = [filter_funcs] rows = [] max_timestamp = get_global_max_timestamp(alldata) for install_id, experiment_info_with_sessions in alldata.items(): accept_user = True for user_filter_func in user_filter_funcs: if not user_filter_func(experiment_info_with_sessions): accept_user = False if not accept_user: continue first_condition_for_user = None for experiment_info in experiment_info_with_sessions: for condition_info in experiment_info['condition_info_list']: condition = condition_info['condition'] if first_condition_for_user == None: first_condition_for_user = condition first_conditionduration_for_user = None for experiment_info in experiment_info_with_sessions: for condition_info in experiment_info['condition_info_list']: conditionduration = condition_info['conditionduration'] if first_conditionduration_for_user == None: first_conditionduration_for_user = conditionduration intervention_to_num_impressions = {} intervention_to_num_days_seen_at_least_once = {} #epoch = day_info['epoch'] #if install_id not in install_id_to_first_epoch: # install_id_to_first_epoch[install_id] = 0 #else: # install_id_to_first_epoch[install_id] = min(install_id_to_first_epoch[install_id], epoch) firstlast_info = get_firstlast_info(experiment_info_with_sessions) first_timestamp = firstlast_info['first_timestamp'] last_timestamp = firstlast_info['last_timestamp'] first_localepoch = firstlast_info['first_localepoch'] last_localepoch = firstlast_info['last_localepoch'] for experiment_info in experiment_info_with_sessions: for condition_info in experiment_info['condition_info_list']: condition = condition_info['condition'] conditionduration = condition_info['conditionduration'] for day_info in condition_info['day_info_list']: #is_day_with_just_one_sample = 0 #if len(day_info['session_info_list']) < 2: # is_day_with_just_one_sample = 1 domain_to_num_samples = get_domain_to_num_samples( day_info['session_info_list']) intervention_to_num_impressions_today = {} intervention_to_seen_today_at_least_once = {} for session_info in sorted(day_info['session_info_list'], key=lambda k: k['timestamp']): domain = session_info['domain'] userid = session_info['userid'] time_spent = session_info['time_spent'] timestamp = session_info['timestamp'] intervention = session_info['intervention'] if intervention not in intervention_to_num_impressions: intervention_to_num_impressions[intervention] = 0 else: intervention_to_num_impressions[intervention] += 1 if intervention not in intervention_to_num_impressions_today: intervention_to_num_impressions_today[ intervention] = 0 else: intervention_to_num_impressions_today[ intervention] += 1 if intervention not in intervention_to_seen_today_at_least_once: intervention_to_seen_today_at_least_once[ intervention] = True num_days_intervention_seen_at_least_once = 0 if intervention in intervention_to_num_days_seen_at_least_once: num_days_intervention_seen_at_least_once = intervention_to_num_days_seen_at_least_once[ intervention] #if domain != 'www.facebook.com': # continue timestamp = session_info['timestamp'] is_last_intervention_for_user = timestamp == last_timestamp attritioned = False if is_last_intervention_for_user: if (moment.unix(max_timestamp) - moment.unix(last_timestamp)).days > 2: attritioned = True days_since_install = round( (timestamp - first_timestamp) / (24 * 3600 * 1000)) days_until_last_day = floor( (last_timestamp - timestamp) / (24 * 3600 * 1000)) localepoch = session_info['localepoch'] days_since_install = localepoch - first_localepoch days_until_last_day = last_localepoch - localepoch #print(days_until_last_day) is_first_visit_of_day = intervention_to_num_impressions_today[ intervention] == 0 row = { 'first_condition_for_user': first_condition_for_user, 'first_conditionduration_for_user': first_conditionduration_for_user, 'attritioned': int(attritioned), 'conditionduration': conditionduration, 'days_since_install': days_since_install, 'days_until_last_day': days_until_last_day, 'num_days_intervention_seen_at_least_once': num_days_intervention_seen_at_least_once, 'timestamp': timestamp, 'is_day_with_just_one_sample': int(domain_to_num_samples[domain] == 1), 'impression_idx': intervention_to_num_impressions[intervention], 'is_first_visit_of_day': int(is_first_visit_of_day), 'impression_idx_within_day': intervention_to_num_impressions_today[ intervention], 'log_time_spent': log(time_spent), 'time_spent': time_spent, 'install_id': install_id, 'userid': userid, 'condition': condition, 'intervention': intervention, 'domain': domain, } accept = True for filter_func in filter_funcs: if not filter_func(row): accept = False if accept: rows.append(row) for intervention in intervention_to_seen_today_at_least_once.keys( ): if intervention not in intervention_to_num_days_seen_at_least_once: intervention_to_num_days_seen_at_least_once[ intervention] = 1 else: intervention_to_num_days_seen_at_least_once[ intervention] += 1 return pd.DataFrame(rows)
def utc_unix_to_readable(utc_time, tz_id): momentObj= moment.unix(utc_time, utc=True).timezone(tz_id) time_string = momentObj.format("h:mm A") return time_string
def sync(self): self.from_fs() self.from_sp() c = self.conn.cursor() c.execute(sync_query) for row in c.fetchall(): sync = (row[1], Moment(row[2]).locale('UTC') if row[2] else None) sp = (row[3], Moment(row[4]) if row[4] else None) fs = (row[5], unix(row[6], utc=True) if row[6] else None) # Figure out which version is newest and sync that version to the other local_p = self.path / row[0] # print(row[0], sync, sp, fs) if sync[0] is None: if sp[0] is None and fs[0] is not None: # The file only exists on the FS and hasn't previously been seen # so sync it to the server self.sync_to_sp(row) elif sp[0] is not None and fs[0] is None: # The file only exists on the server and hasn't previously been seen. # Sync it to the FS self.sync_to_fs(row) elif sp[0] is not None and fs[0] is not None: # The file has appeared on both sides since the last sync. print(' *** Error: file {} conflict'.format(row[0])) else: # The file has been deleted on both sides since the last sync. print(' --- Deleted from Both: {}'.format(row[0])) self.remove_from_sync(row) else: if sp[0] is None or fs[0] is None: if sp[0] is not None: self.unlink_from_sp(row) if fs[0] is not None: self.unlink_from_fs(row) if sp[0] is None and fs[0] is None: print(' --- Deleted from Both: {}'.format(row[0])) self.remove_from_sync(row) else: if sync[1] >= sp[1] and sync[1] >= fs[1]: # Both sides are older than the last sync if sp[0]: print(' Up to Date Folder: {}'.format(row[0])) else: print(' Up to Date: {}'.format(row[0])) elif sp[0] and fs[0]: # Both sides are folders. Leave them be. print(' Up to Date Folder: {}'.format(row[0])) elif sync[1] < sp[1] and sync[1] < fs[1]: print(' *** Error: file {} conflict'.format(row[0])) resp = '' while len(resp) == 0 or (resp[0] != 'r' and resp[0] != 'l'): resp = input( ' Take [R]emote or [L]ocal? ').lower() if resp[0] == 'l': self.sync_to_sp(row) else: self.sync_to_fs(row) elif sp[1] >= sync[1] and sp[1] >= fs[1]: # SP version is newer self.sync_to_fs(row) elif fs[1] >= sync[1] and fs[1] >= sp[1]: # Local version is newer self.sync_to_sp(row) self.conn.commit()
} } } } res = es.search(index=fromIndex, search_type="count",body=body) buckets = res["aggregations"]["hist"]["buckets"] # prettyPrint(buckets) bulkActions = [] for bucket in buckets: m = moment.unix(bucket['key']).date ##.add(hours=4).date doc = { "@timestamp": bucket['key_as_string'] } if("the_count" in bucket): doc["the_count"] = bucket["the_count"]["value"] if("holt1" in bucket): doc["holt1"] = bucket["holt1"]["value"] if("holt2" in bucket): doc["holt2"] = bucket["holt2"]["value"] if("holt3" in bucket): doc["holt3"] = bucket["holt3"]["value"] if( "the_count" in doc and "holt1" in doc and (doc["the_count"] > doc["holt1"] * 1.4)): doc["surprise1"] = doc["the_count"] if( "the_count" in doc and "holt2" in doc and (doc["the_count"] > doc["holt2"] * 1.4)):
def date_str(time_stamp): moment_obj = moment.unix(time_stamp) return moment_obj.format("YYYY-M-D")