def main(): default_start = common.format_time_to_hour( time.time() - 3600*2 ) default_end = common.format_time_to_hour(time.time()) args = { 'cluster' : getQS('cluster', 'cluster0'), 'query' : getQS('query', 'qps'), 'period' : getQS('period', 'min'), 'start' : getQS('start', default_start), 'end' : getQS('end', default_end), } if args['period'] == 'min' and (utils.parse_time(args['end']) - utils.parse_time(args['start'])) / 3600 > 10: print '' hours = (utils.parse_time(args['end']) - utils.parse_time(args['start']) ) / 3600 print 'please use period=hour for %d hours of data' % hours return start_list = [ common.format_time_to_hour(time.time() - 3600*24*7), common.format_time_to_hour(time.time() - 3600*24), common.format_time_to_hour(time.time() - 3600* 2 ), ] start_list = '\n'.join (['<a id="%s" href=""> %s </a>|' %(i, i) for i in start_list]) print "Content-Type: text/html" print "" head, tail = html.split('[new Date(2014, 6, 18), 250, ],') print head json_data(args) print tail.replace('{start_list}', start_list)
def set_now_playing_info_from_speaker(self, speaker): track = speaker.get_current_track_info() BASIC_DATA = ("title", "artist", "album") playing_track = track['uri'] track['volume'] = speaker.volume for key in BASIC_DATA: label = self.now_playing_widget[key] text = track.get(key) if track.get(key) else self.empty_info label.config(text=text) self.clear('album_art') art = track.get("album_art") if art: self.set_album_art(art, track_uri=playing_track) else: self.set_album_art(None) volume = track.get("volume") if volume: self.now_playing_widget["volume"].set(volume) duration = track.get("duration", "0:00:0") position = track.get("position", "0:00:0") duration = parse_time(duration) position = parse_time(position) self.now_playing_widget["duration"].config(text=duration) self.now_playing_widget["position"].config(text=position) logging.info("Set track info")
def main(): default_start = common.format_time_to_hour(time.time() - 3600 * 2) default_end = common.format_time_to_hour(time.time()) args = { 'cluster': getQS('cluster', 'cluster0'), 'query': getQS('query', 'qps'), 'period': getQS('period', 'min'), 'start': getQS('start', default_start), 'end': getQS('end', default_end), } if args['period'] == 'min' and (utils.parse_time( args['end']) - utils.parse_time(args['start'])) / 3600 > 10: print '' hours = (utils.parse_time(args['end']) - utils.parse_time(args['start'])) / 3600 print 'please use period=hour for %d hours of data' % hours return start_list = [ common.format_time_to_hour(time.time() - 3600 * 24 * 7), common.format_time_to_hour(time.time() - 3600 * 24), common.format_time_to_hour(time.time() - 3600 * 2), ] start_list = '\n'.join( ['<a id="%s" href=""> %s </a>|' % (i, i) for i in start_list]) print "Content-Type: text/html" print "" head, tail = html.split('[new Date(2014, 6, 18), 250, ],') print head json_data(args) print tail.replace('{start_list}', start_list)
def get_time_window_from_entry(entry): if "start time" not in entry: raise ValueError("Start time not in entry.") if "end time" not in entry: raise ValueError("End time not in entry.") start_time = entry["start time"] end_time = entry["end time"] return TimeWindow( utils.time_to_seconds(utils.parse_time(start_time)), utils.time_to_seconds(utils.parse_time(end_time)))
def correct_time(finish_string, start_string, si): ''' Adjusts time of SI 5 card, which only supports 12-hour format. ''' finish = parse_time(finish_string, strip_milliseconds=True) start = parse_time(start_string) noon = parse_time('12:00:00') if is_sportident_5(si) and finish < noon and not (start <= finish < noon): finish += timedelta(hours=12) return format_time(finish)
def get_current_payment_number(self): """ return number of current payment calculate by: = number of month of (now() - orderdate) / 30 """ issueDate = parse_time(self.issueDate) lastPaymentDate = parse_time(self.lastPaymentDate) if not lastPaymentDate: return 0 payment_no = (lastPaymentDate - issueDate).days / 30 if lastPaymentDate and not payment_no: return 1 else: return payment_no
def __init__( self, source, message, user, requested_date, target_date=None, db_id=None, time_string=None, count_duplicates=0, thread_id=None, defaulted=False, timezone=None ): self.source = source self.message = message self.user = user self.requested_date = requested_date self.count_duplicates = count_duplicates self.thread_id = thread_id self.timezone = timezone self.result_message = None self.valid = True self.defaulted = defaulted if target_date is not None: self.target_date = target_date elif time_string is not None: self.target_date = utils.parse_time(time_string, requested_date, timezone) if self.target_date is not None and self.target_date < self.requested_date: self.result_message = f"This time, {time_string.strip()}, was interpreted as " \ f"{utils.get_datetime_string(self.target_date)}, which is in the past" log.info(self.result_message) self.valid = False else: self.target_date = None if self.target_date is None: if time_string is None: self.result_message = "Could not find a time in message, defaulting to one day" else: self.result_message = f"Could not parse date: \"{time_string.strip()}\", defaulting to one day" log.info(self.result_message) self.defaulted = True self.target_date = utils.parse_time("1 day", requested_date, None) self.db_id = db_id
def __cmp__(self, obj): time_a = utils.parse_time(self.start_time) time_b = utils.parse_time(obj.start_time) idx_a = int(self.idx) idx_b = int(obj.idx) if time_a == time_b: if idx_a < idx_b: return -1 if idx_a == idx_b: return 0 if idx_a > idx_b: return 1 if time_a < time_b: return -1 return 1
def add_command(self, pars, args): """Command for adding trip to table""" test = self.test(pars, [0, 1], args, [2, 3], ["c"]) if test: return test if "c" in pars: id = self.table.add_trip_consumption(utils.parse_time(args[2]), utils.parse_time(args[-1]), float(args[0]), float(args[1])) else: id = self.table.add_trip(utils.parse_time(args[1]), utils.parse_time(args[-1]), float(args[0])) return "{} \nAdded.".format(self.table.trips_table[id].get_print())
async def remind(self, ctx, limit, *, reminder): '''Set a reminder. Reminders are capped at 6 months to prevent abuse.\n **Example:```yml\n♤remind 2h clean room\n♤remind 1h fix bugs```** ''' time_, delta = utils.parse_time(limit) now = datetime.datetime.utcnow() SEMIYEAR = 1555200 if delta.total_seconds() > SEMIYEAR: raise commands.BadArgument timeout = now + delta self.bot.loop.create_task(self._remind(ctx, reminder, timeout)) async with self.bot.pool.acquire() as con: query = 'INSERT INTO reminders VALUES ($1, $2, $3, $4)' await con.execute(query, ctx.author.id, ctx.channel.id, timeout, reminder) files = [ File('assets/dot.png', 'unknown.png'), File('assets/clock.png', 'unknown1.png') ] embed = Embed(description=f'>>> {reminder}', color=utils.Color.sky) embed.set_author(name='Reminder', icon_url='attachment://unknown.png') embed.set_footer(text=time_, icon_url='attachment://unknown1.png') embed.timestamp = timeout await ctx.reply(files=files, embed=embed, mention_author=False)
def parse_data(data): final_data = {} for year, call_log in data.items(): print(f'Getting call data for {year}') # UNCOMMENT THIS FOR CREATING A YEAR KEY FOR EACH YEAR (1 OF 3) # final_data[year] = {} for month, calls in call_log.items(): # UNCOMMENT THIS FOR CREATING A MONTH KEY FOR EACH MONTH (2 OF 3) # month = parse_month(month=month, abbreviated=False) # final_data[year][month] = {} for call in calls: call_data = call.split("-") call_date = utils.parse_date(call_data[0].split("_")[0]) call_time = utils.parse_time(call_data[0].split("_")[1]) call_number = utils.parse_call_number(call_data[1]) call_direction = call_data[2].split(".")[0] # UNCOMMENT THIS FOR CREATING A MONTH KEY FOR EACH MONTH (3 OF 3) # final_data[year][month].setdefault(call_date, {}) # final_data[year][month][call_date].setdefault(call_direction, []) # final_data[year][month][call_date][call_direction].append(f'{call_time} - {call_number}') final_data.setdefault(call_date, []) final_data[call_date].append( f'{call_direction} - {call_time} - {call_number}') return final_data
def get_email_file_rejected_body(self, proj, spec, db_file): return (' \ <h3>A file has been rejected for "%s"</h3> \ <div id="wrap" style="border-top-style:solid;border-top-width:1px;border-top-color:#666666;border-bottom-style:solid;border-bottom-width:1px;border-bottom-color:#666666">\ <div style="float:left;margin: 15 15 15 15"> \ <table> \ <tr><td>Client:</td><td>%s</td></tr> \ <tr><td>Project Name:</td><td>%s</td></tr> \ <tr><td>Project Id:</td><td>%s</td></tr> \ <tr><td>Specification Name:</td><td>%s</td></tr> \ <tr><td>Specification Id:</td><td>%s</td></tr> \ <tr><td>Specification Reference:</td><td>%s</td></tr> \ <tr><td>Original File Name:</td><td>%s</td></tr> \ <tr><td>Production Name:</td><td>%s</td></tr> \ <tr><td>Size:</td><td>%s bytes</td></tr> \ <tr><td>Upload Time:</td><td>%s</td></tr> \ <tr><td>Pre-flight Status:</td><td>%s</td></tr> \ <tr><td>View HTML Report:</td><td><a href="%s" target="_blank">here</a></td></tr> \ <tr><td>View Text_Report Report:</td><td><a href="%s" target="_blank">here</a></td></tr> \ <tr><td>View PDF Report:</td><td><a href="%s" target="_blank">here</a></td></tr> \ </table> \ </div> \ <div style="float: left;margin-bottom:15;margin-top:15;border-style:solid;border-width:1px;border-color:#666666"> \ <div style="margin:15 15 15 15"><a href="%s"><img src="%s"></a></div> \ </div><br style="clear:both" /></div>' \ % (spec['spec_name'], proj['client_account'], proj['project_name'], str(proj['project_id']), \ spec['spec_name'], str(spec['spec_id']), str(spec['reference_number']),\ db_file['original_name'], db_file['dam_name'], str(db_file['size']), \ utils.parse_time(db_file['upload_time'], utils.STYLE_DATE_TIME_SHORT_MONTH), db_file['preflight_info'], \ db_file['preflight_report_html'], db_file['preflight_report_text'], db_file['preflight_report_mask'], \ db_file['preflight_report_html'], db_file['thumbnail_url']))
def parse_comment(body, trigger, comment_created): time_string = utils.find_reminder_time(body, trigger) time_string = time_string.strip() if time_string is not None else None target_date = None if time_string is not None: target_date = utils.parse_time(time_string, comment_created, None) return time_string, target_date
def _set_period(self, from_time='', to_time=''): ''' parse time specs and set period ''' from_dt = to_dt = None if from_time: from_dt = utils.parse_time(from_time) if not from_dt: return False if to_time: to_dt = utils.parse_time(to_time) if not to_dt: return False if to_dt and to_dt <= from_dt: common_err("%s - %s: bad period" % (from_time, to_time)) return False return crm_report().set_period(from_dt, to_dt)
def json_data(args): ''' history monitor info of the cluster ''' start_ts = utils.parse_time(args['start']) end_ts = utils.parse_time(args['end']) + 3600 for t in range(start_ts, end_ts, 3600): timestr = common.format_time_to_hour(t) f = 'data/%s/statlog.%s' % (args['cluster'], timestr) for line in file(f): try: __print_statlog_line(line, args) if (args['period']) == 'hour': break except: pass
def _set_period(self, from_time='', to_time=''): ''' parse time specs and set period ''' from_dt = to_dt = None if from_time: from_dt = utils.parse_time(from_time) if not from_dt: return False if to_time: to_dt = utils.parse_time(to_time) if not to_dt: return False if to_dt and to_dt <= from_dt: common_err("%s - %s: bad period" % (from_time, to_time)) return False return crm_report.set_period(from_dt, to_dt)
def json_data(args): ''' history monitor info of the cluster ''' start_ts = utils.parse_time(args['start']) end_ts = utils.parse_time(args['end']) + 3600 for t in range(start_ts, end_ts, 3600): timestr = common.format_time_to_hour(t) f = 'data/%s/statlog.%s' % (args['cluster'], timestr ) for line in file(f): try: __print_statlog_line(line, args) if (args['period']) == 'hour': break; except: pass
def gas_command(self, pars, args): """Command for calculating spent gasoline.""" test = self.test(pars, [0, 2], args, [1, 2], ["s", "a", "b"]) if test: return test strict = True if 's' in pars else False if "a" in pars and "b" not in pars: res = self.table.calculate_gasoline_after_date( utils.parse_time(args[0]), strict) elif "b" in pars and "a" not in pars: res = self.table.calculate_gasoline_before_date( utils.parse_time(args[0]), strict) elif "a" not in pars and "b" not in pars and len(args) == 2: res = self.table.calculate_gasoline_between_dates( utils.parse_time(args[0]), utils.parse_time(args[1]), strict) else: return "Invalid arguments." return "Total gasoline: {}".format(res)
def get_lessons(self): lessons = [] joined_list = [ x for y in self.data['elementPeriods'].values() for x in y ] for element in joined_list: id = element['id'] text = element['lessonText'] date = parse_date(element['date']) start = parse_time(element['startTime']) end = parse_time(element['endTime']) teachers = [] subject = None rooms = [] exam = element['is'][ 'exam'] if 'is' in element and 'exam' in element[ 'is'] else False substitution = element['is'][ 'substitution'] if 'is' in element and 'substitution' in element[ 'is'] else False additional = element['is'][ 'additional'] if 'is' in element and 'additional' in element[ 'is'] else False for info in element['elements']: info_id = info['id'] if info['type'] == 1: pass elif info['type'] == 2: teachers.append(self.get_object(info_id, Teacher)) elif info['type'] == 3: subject = self.get_object(info_id, Subject) elif info['type'] == 4: rooms.append(self.get_object(info_id, Room)) lesson = Lesson(id, text, teachers, subject, rooms, start, end, date, exam, substitution, additional) lessons.append(lesson) return lessons
def list_command(self, pars, args): """Command for list trips from table.""" test = self.test(pars, [0, 2], args, [1, 2], ["s", "a", "b"]) if test: return test if len(args) == 1 and args[0] == "all": return self.gen_string(self.table.list_all()) strict = True if 's' in pars else False if "a" in pars and "b" not in pars: res = self.table.list_trips_after_date(utils.parse_time(args[0]), strict) elif "b" in pars and "a" not in pars: res = self.table.list_trips_before_date(utils.parse_time(args[0]), strict) elif "a" not in pars and "b" not in pars and len(args) == 2: res = self.table.list_trips_between_dates( utils.parse_time(args[0]), utils.parse_time(args[1])) else: return "Invalid arguments." return self.gen_string(res)
def search_command(self, pars, args): """Command for search trip in table.""" test = self.test(pars, [0], args, [1, 2], []) if test: return test if len(args) == 1: date = utils.parse_time(args[0]) else: date = utils.parse_time_with_format(args[0], args[1]) res = self.table.search_trips_by_date(date) return self.gen_string(res)
def volume_since(self, time, trades, side=None): volume = 0 total = 0 i = 0 recent = utils.parse_time(trades[i]['datetime']) > time not_mine = trades[i]['tradeid'] not in self.my_trade_ids not_control = trades[i]['tradeid'] != self.control_trade_id while recent and not_mine and not_control: if side == None or trades[i]['initiate_ordertype'] == side: volume += float(trades[i]['quantity']) total += float(trades[i]['total']) i += 1 if i >= len(trades): break recent = utils.parse_time(trades[i]['datetime']) > time not_mine = trades[i]['tradeid'] not in self.my_trade_ids not_control = trades[i]['tradeid'] != self.control_trade_id return volume, total
def send_reminders(reddit, database): timestamp = utils.datetime_now() count_reminders = database.get_count_pending_reminders(timestamp) counters.queue.set(count_reminders) reminders_sent = 0 if count_reminders > 0: reminders = database.get_pending_reminders( utils.requests_available(count_reminders), timestamp) for reminder in reminders: reminders_sent += 1 counters.notifications.inc() counters.queue.dec() log.info( f"{reminders_sent}/{len(reminders)}/{count_reminders}: Sending reminder to u/{reminder.user.name} : " f"{reminder.id} : {utils.get_datetime_string(reminder.target_date)}" ) bldr = utils.get_footer(reminder.render_notification()) result = reddit.send_message(reminder.user.name, "RemindMeBot Here!", ''.join(bldr)) if result in [ ReturnType.INVALID_USER, ReturnType.USER_DOESNT_EXIST ]: log.info(f"User doesn't exist: u/{reminder.user.name}") if result in [ReturnType.NOT_WHITELISTED_BY_USER_MESSAGE]: log.info( f"User blocked notification message: u/{reminder.user.name}" ) if reminder.recurrence is not None: if reminder.user.recurring_sent > static.RECURRING_LIMIT: log.info( f"User u/{reminder.user.name} hit their recurring limit, deleting reminder {reminder.id}" ) database.delete_reminder(reminder) else: new_target_date = utils.parse_time(reminder.recurrence, reminder.target_date, reminder.user.timezone) log.info( f"{reminder.id} recurring from {utils.get_datetime_string(reminder.target_date)} to " f"{utils.get_datetime_string(new_target_date)}") reminder.target_date = new_target_date reminder.user.recurring_sent += 1 else: log.debug(f"{reminder.id} deleted") database.delete_reminder(reminder) database.commit() else: log.debug("No reminders to send") return reminders_sent
def run(self): inputfile = self.inputEdit.displayText() for index, segment in enumerate(self.segments): timestamps = {} filenames = {} filenames['input'] = inputfile filenames['output'] = segment['name'].displayText() try: timestamps['start'] = utils.parse_time( segment['start'].displayText()) timestamps['end'] = utils.parse_time( segment['end'].displayText()) except ValueError: QMessageBox.about(self, 'Error', 'Segment %d is invalid.' % (index + 1)) return FFMpeg(filenames, timestamps, path=self.ffmpeg_path).run()
def run(self): cmd_args = [ item for sublist in self._get_command() for item in sublist ] proc = subprocess.Popen(['ffmpeg', '-hide_banner', '-y', *cmd_args], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) while True: line = proc.stdout.readline() if not line: break line = str(line.rstrip()) if self._duration is None: match = re.search(r'Duration: ([^,]+),', line) if match: self._duration = parse_time(match[0]) # if self._frames is None: # match = re.search(r', ([\d.]+) tbr', line, flags=re.IGNORECASE) # if match: # self._frames = float(match[1]) if line.startswith('frame'): status = re.search( r'frame=([\d\s]+).*size=([\d\skmB]+).*time=([\d:.]+).*speed=([\s\d.]+)x', line, flags=re.IGNORECASE) time = self._get_fixed_time_on_run(parse_time(status[3])) speed = float(status[4].strip()) print_progressbar(time, self._duration, suffix=f"(speed: {speed}x)") print()
def get_available_lesson(grade, semester, time_table, lesson_type): # get specified lesson lessons = {} # sort out all lesson for row in range(2, lesson_number[lesson_type]): if lesson_type == 1: lesson = elective_sheet[row] else: lesson = general_sheet[row] # comfirm that there is no overlapping class parse_result = utils.parse_time(lesson[3].value) flag = False for time in parse_result: for i in time["period"]: if time_table[time["day"]][i] != 0: flag = True if lesson_type != 2 and (grade != int(lesson[1].value[0]) or not ( (semester == 1 and lesson[1].value[1] == '+') or (semester == 2 and lesson[1].value[1] == '-'))): continue if flag: continue if lesson_type == 2 and lesson[4].value == 0: continue target_lesson = { "type": lesson_type, "grade": grade, "semester": "+" if semester == 1 else "-", "name": lesson[0].value, "time": utils.parse_time(lesson[3].value), "love": lesson[4].value, "easy": lesson[5].value, "credit": lesson[2].value } lessons[str(uuid.uuid1())] = target_lesson return lessons
def test_date_parsing(): base_time = utils.datetime_force_utc( datetime.strptime("2019-01-01 01:23:45", "%Y-%m-%d %H:%M:%S")) pairs = [ ["1 day", "2019-01-02 01:23:45"], ["365 days", "2020-01-01 01:23:45"], ["2 weeks", "2019-01-15 01:23:45"], ["3 years", "2022-01-01 01:23:45"], ["3 months", "2019-04-01 01:23:45"], ["24 hours", "2019-01-02 01:23:45"], ["5 hrs", "2019-01-01 06:23:45"], ["20 minutes", "2019-01-01 01:43:45"], ["5 seconds", "2019-01-01 01:23:50"], ["tomorrow", "2019-01-02 01:23:45"], ["Next Thursday at 4pm", "2019-01-03 16:00:00"], ["Tonight", "2019-01-01 21:00:00"], ["2 pm", "2019-01-01 14:00:00"], ["eoy", "2019-12-31 09:00:00"], ["eom", "2019-01-31 09:00:00"], ["eod", "2019-01-01 17:00:00"], ["2022-01-01", "2022-01-01 00:00:00"], ["10/15/19", "2019-10-15 00:00:00"], ["April 9, 2020", "2020-04-09 00:00:00"], ["January 13th, 2020", "2020-01-13 00:00:00"], ["January 5th 2020", "2020-01-05 00:00:00"], ["June 2nd", "2019-06-02 00:00:00"], ["November 2", "2019-11-02 00:00:00"], ["August 25, 2018, at 4pm", "2018-08-25 16:00:00"], ["September 1, 2019 14:00:00", "2019-09-01 14:00:00"], ["august", "2019-08-01 00:00:00"], ["September", "2019-09-01 00:00:00"], ["2025", "2025-01-01 00:00:00"], ["2pm", "2019-01-01 14:00:00"], ["7:20 pm", "2019-01-01 19:20:00"], ["72hr", "2019-01-04 01:23:45"], ["1d", "2019-01-02 01:23:45"], ["1yr", "2020-01-01 01:23:45"], ["7h", "2019-01-01 08:23:45"], ["35m", "2019-01-01 01:58:45"], ["2 weeks with a test string", "2019-01-15 01:23:45"], ["3 years with a second date 2014", "2022-01-01 01:23:45"], ] for time_string, expected_string in pairs: result_date = utils.parse_time(time_string, base_time, "UTC") expected_date = utils.datetime_force_utc( datetime.strptime(expected_string, "%Y-%m-%d %H:%M:%S")) assert result_date == expected_date, f"`{time_string}` as `{result_date}` != `{expected_date}`"
def evaluate_alerts(): time.sleep(10) dc = DataController() tables = ["filesystem"] # Evaluate alerts runs in a separate dedicated thread, so it must be always running. while True: for table in tables: records = dc.retrieve_records(table, "timeinsertion") # This needs to be updated/done # Currently just left the variable as a remainder track_already_sent_alerts = {"filesystem": []} if table == "filesystem": print("Retrieving the records from file system table.") for record in records: print("Record for evaluation") print(record) current_time_tokens = parse_time(get_current_time()) # This try might not be needed, since the table wont accept records with missing index 7th try: record_time_tokens = parse_time(record[7]) if current_time_tokens["YearMonthDay"] == record_time_tokens["YearMonthDay"] and\ current_time_tokens["Hour"] == record_time_tokens["Hour"]: if int(current_time_tokens["Minutes"]) - int( record_time_tokens["Minutes"]) == 1: print("OK I would send an alert now") # alert send_alert("FileSystem Alert", record) except IndexError: pass
def _parse(self, quests): reader = csv.reader(quests, delimiter=',') headers = next(reader) if headers: self.quest_headers = headers[10::3] for (name, surname, _, _, _, _, time, _, _, _, *answers) in reader: quests = [] item = [parse_name(name, surname), parse_time(time)] for (quest, answer, correct) in group_n(answers, 3): quests.append(quest) item.append(answer.strip() == correct.strip()) self.quests = self.quests or quests self.answers.append(item)
def imap_append(self, tag, argparser): name = argparser.parse() args = [] while len(argparser._arg) > 0: nextarg = argparser.parse() args.append(nextarg) msg = args[-1] flags = [] timestamp = datetime.datetime.now() # TODO: implement time parser function if len(args) > 1: if isinstance(args[0], list): flags = args[0] if len(args) == 3: timestamp = utils.parse_time(args[1]) else: timestamp = utils.parse_time(args[0]) # TODO: set timestamp # TODO: set flags name = utils.normpath(name) if not self._account.get_mailmanager().folder_exists(name): return 'NO [TRYCREATE] APPEND failed; folder does not exist' self._account.get_mailmanager().add_message(name, msg) return 'OK APPEND completed'
def config_required_lesson(grade, semester, time_table, current_credit): # config current semester required lesson in time table for row in range(2, lesson_number[0]): lesson = required_sheet[row] if grade != int(lesson[0].value[0]) or not ( (semester == 1 and lesson[0].value[1] == '+') or (semester == 2 and lesson[0].value[1] == '-')): continue # update current credit current_credit += int(lesson[2].value) # config required lesson to time table result = utils.parse_time(lesson[3].value) for time in result: for i in time["period"]: # lesson name represent that you can not choose lesson at the time time_table[time["day"]][i] = lesson[1].value
def handle_mentions(api, chainer): mentions = grab_mentions(api=api) # Generate replies for mention in mentions: reply_to_id = mention.id mention_text = mention.full_text post_time = utils.parse_time(mention.created_at) if datetime.now() - post_time > timedelta( hours=settings.REPLY_INTERVAL): # not in past x hours continue else: rtn_bool, *other_rtns = chainer.new_phrase() if rtn_bool: if settings.REPLY_TO_MENTIONS: msg = other_rtns[0] reply_to_mention(api, message=msg, reply_to_id=reply_to_id)
def loadAndStore(self, currency, fromutc=None, toutc=None): reverse = False if currency == "USDT" and self._quoteCoin == "BTC": reverse = True if fromutc == None: last = self._database.maxUtcstamp(currency) if last == None: last = utils.parse_time(self._config["startDate"]) if reverse: ticker = self._quoteCoin + "" + currency else: ticker = currency + "" + self._quoteCoin else: last = fromutc if toutc == None: toutc = time.time() logging.info("Downloading " + currency + " from " + utils.format_time(last)) interval = self._config["tradeInterval"] if interval == 3600: klines = self._client.get_historical_klines2( ticker, Client.KLINE_INTERVAL_1HOUR, (last + 1) * 1000, int(toutc) * 1000) elif interval == 1800: klines = self._client.get_historical_klines2( ticker, Client.KLINE_INTERVAL_30MINUTE, (last + 1) * 1000, int(toutc) * 1000) else: print("unsupported interval " + interval) exit(-1) if len(klines) > 0: df = self.createDataFrame(klines, reverse) print(df) self._database.storeDataFrame(currency, df) return max(df.index) else: return last
def scrape_price_data(eventId): """ Function is called when a pundit creates an event. Will attempt to periodically retrieve and scrape price/odds & data/changes from multiple sources until the event off time. Data is used to graph on front end. """ event = Event.query.filter_by(id=eventId).first() event_time = event.meta_data["Start Time"] info_tuple = parse_time(event_time) if info_tuple[0] is False: return target_url = build_scrape_url(event, race_time=info_tuple[1]) if target_url is None: runner = get_betfair_market_prices(event) if runner is not None: runner["time"] = info_tuple[2] event.pricedata = json.loads(event.pricedata) event.pricedata.append(runner) event.pricedata = json.dumps(event.pricedata) db.session.commit() scrape_price_data.apply_async(args=[eventId], countdown=60) else: try: page = requests.get(target_url) except: scrape_price_data.apply_async(args=[eventId], countdown=10) print ("URL", target_url) soup = BeautifulSoup(page.text, "html.parser") scraped = soup.find(id="t1") runner = betfair_client.get_current_prices(event.market_id, event.selection_id) if scraped is not None and runner is not None: selection = event.meta_data["Selection"].lower() for child in scraped.children: if selection in child["data-bname"].lower(): try: runner["price"] = float(child["data-best-dig"]) runner["time"] = info_tuple[2] runner["bookmakers"] = get_bookmakers(child["data-best-bks"]) event.pricedata = json.loads(event.pricedata) event.pricedata.append(runner) event.pricedata = json.dumps(event.pricedata) db.session.commit() except KeyError, e: print ("KeyError in scraped data", e) scrape_price_data.apply_async(args=[eventId], countdown=60)
def check(self, arg): """ Assumes the argument is valid python code. It evaluates the code and returns the result. @param arg: the time argument @type arg: string @returns: the number of seconds since the epoch @rtype: int @raise ParserException: if the arg is not a valid time """ time = utils.parse_time(arg) if time != None: return time else: raise ParserException, "Invalid time specified %s" % (arg,)
def check(self, arg): """ Assumes the argument is valid python code. It evaluates the code and returns the result. @param arg: the time argument @type arg: string @returns: the number of seconds since the epoch @rtype: int @raise ParserException: if the arg is not a valid time """ time = utils.parse_time(arg) if time != None: return time else: raise ParserException, "Invalid time specified %s" % (arg, )
def inform_flowdock_rejected(self, accesstoken, proj, spec, db_file): subject = 'File rejected for ' + proj['client_account'] + ' - Project ' +proj['project_name'] + ' (' + str(proj['project_id'])+')' message = (' \ <h3>A file has been rejected for "%s"</h3> \ <div id="wrap" style="border-top-style:solid;border-top-width:1px;border-top-color:#666666;border-bottom-style:solid;border-bottom-width:1px;border-bottom-color:#666666">\ <div style="float:left;margin: 15 15 15 15"> \ <table> \ <tr><td>Client:</td><td>%s</td></tr> \ <tr><td>Project Name:</td><td>%s</td></tr> \ <tr><td>Project Id:</td><td>%s</td></tr> \ <tr><td>Specification Name:</td><td>%s</td></tr> \ <tr><td>Specification Id:</td><td>%s</td></tr> \ <tr><td>Specification Reference:</td><td>%s</td></tr> \ <tr><td>Original File Name:</td><td>%s</td></tr> \ <tr><td>Production Name:</td><td>%s</td></tr> \ <tr><td>Size:</td><td>%s bytes</td></tr> \ <tr><td>Upload Time:</td><td>%s</td></tr> \ <tr><td>Pre-flight Status:</td><td>%s</td></tr> \ <tr><td>View HTML Report:</td><td><a href="%s" target="_blank">here</a></td></tr> \ <tr><td>View Text_Report Report:</td><td><a href="%s" target="_blank">here</a></td></tr> \ <tr><td>View PDF Report:</td><td><a href="%s" target="_blank">here</a></td></tr> \ </table> \ </div> \ <div style="float: left;margin-bottom:15;margin-top:15;border-style:solid;border-width:1px;border-color:#666666"> \ <div style="margin:15 15 15 15"><a href="%s"><img src="%s"></a></div> \ </div><br style="clear:both" /></div>' \ % (spec['spec_name'], proj['client_account'], proj['project_name'], str(proj['project_id']), \ spec['spec_name'], str(spec['spec_id']), str(spec['reference_number']),\ db_file['original_name'], db_file['dam_name'], str(db_file['size']), \ utils.parse_time(db_file['upload_time'], utils.STYLE_DATE_TIME_SHORT_MONTH), db_file['preflight_info'], \ db_file['preflight_report_html'], db_file['preflight_report_text'], db_file['preflight_report_mask'], \ db_file['preflight_report_html'], db_file['thumbnail_url'])) post_url = 'https://api.flowdock.com/v1/messages/team_inbox/' + accesstoken data = { "source": "PrintFlow 2 Service", "from_address" : "*****@*****.**", \ "subject": subject, "content" : message, \ "project": str(proj['project_id']), \ "tags": ["@all", "#project", str(proj['project_id']), str(spec['spec_id'])] } d = json.dumps(data) headers = { "Content-Type": "application/json" } response = requests.post(post_url, data=d, headers=headers) if response.status_code == 200: # now link to category r = response.json()
def loop_jobs(jobs, driver, days, year, month): for day in range(days-1): driver.execute_script("submitday("+str(day+1)+")") time.sleep(2) notEmpty = True try: driver.find_element_by_tag_name("h2") notEmpty = False except NoSuchElementException: notEmpty = True if notEmpty: all_jobs = driver.find_element_by_xpath("/html/body/form/div/center/table/tbody") trs = all_jobs.find_elements(By.TAG_NAME, "tr") for index, i in enumerate(trs): if index > 0 and index < len(trs)-1: tds = trs[index].find_elements(By.TAG_NAME, "td") if len(tds) > 0: job = {} details = tds[1].text.split('\n') job_num = details[0] job_num = [num for num in job_num if num.isdigit()] job_num.insert(0, "R") phase = parse_phase_name(details[1]) _2ndTab = tds[2].text.split('\n') start_hour, start_minutes, end_hour, end_minutes = utils.parse_time(_2ndTab) job['job_number'] = "".join(job_num) job['start'] = datetime.datetime(year, month, day+1, int(start_hour), int(start_minutes)) job['end'] = datetime.datetime(year, month,day+1, int(end_hour), int(end_minutes)) job['phase'] = phase jobs.append(job) driver.execute_script("window.history.go(-1)")
def cmd_set_time(message): global time # Check if timezone already set # I don't remember this if-clause to fire if not offset_storage.exists(str(message.chat.id)): 'No offset storage' logger.warning('Whoa! It looks like {0!s} hasn\'t set offset yet! What a shame!'.format( str(message.from_user.username) + ' (' + str(message.chat.id) + ')')) bot.send_message(message.chat.id, config.lang.s_error_timezone_not_set) set_new_state(message.chat.id, StateMachine.States.STATE_SETTING_TIMEZONE_FOR_ALARM) return None timezone = offset_storage.get(str(message.chat.id)) time = None global error_msg error_msg = None try: time = utils.parse_time(message.text, int(timezone)) except utils.PastDateError as ex: error_msg = str(ex) except utils.ParseError as ex: error_msg = str(ex) else: pass # If there was an error getting time if time is None: logger.warning( 'User {0!s} set incorrect time: {1!s}'.format( str(message.from_user.username) + ' (' + str(message.chat.id) + ')', message.text)) if error_msg is None: # "Could not recognize timezone. Please try again" bot.send_message(message.chat.id, config.lang.s_error_time_not_recognized) else: bot.send_message(message.chat.id, error_msg) set_new_state(message.chat.id, StateMachine.States.STATE_SETTING_TIME) else: logger.debug('User {0!s} set time: {1!s}'.format( str(message.from_user.username) + ' (' + str(message.chat.id) + ')', time)) utils.get_time_storage().save(str(message.chat.id), time, force_save=True) set_new_state(message.chat.id, StateMachine.States.STATE_SETTING_TEXT) bot.send_message(message.chat.id, config.lang.s_common_is_time_correct.format(time)) pass
def build_file_web_page(self, proj, spec, db_file): html = ' \ <html><head><title>%s File Information</title></head><body> \ <h3>A file has been uploaded for "%s"</h3> \ <div id="wrap" style="border-top-style:solid;border-top-width:1px;border-top-color:#666666;border-bottom-style:solid;border-bottom-width:1px;border-bottom-color:#666666">\ <div style="float:left;margin: 15 15 15 15"> \ <table> \ <tr><td>Client:</td><td>%s</td></tr> \ <tr><td>Project Name:</td><td>%s</td></tr> \ <tr><td>Project Id:</td><td>%s</td></tr> \ <tr><td>Specification Name:</td><td>%s</td></tr> \ <tr><td>Specification Id:</td><td>%s</td></tr> \ <tr><td>Specification Reference:</td><td>%s</td></tr> \ <tr><td>Original File Name:</td><td>%s</td></tr> \ <tr><td>Production Name:</td><td>%s</td></tr> \ <tr><td>Size:</td><td>%s bytes</td></tr> \ <tr><td>Upload Time:</td><td>%s</td></tr> \ <tr><td>Pre-flight Status:</td><td>%s</td></tr> \ <tr><td>View HTML Report:</td><td><a href="%s" target="_blank">here</a></td></tr> \ <tr><td>View Text_Report Report:</td><td><a href="%s" target="_blank">here</a></td></tr> \ <tr><td>View PDF Report:</td><td><a href="%s" target="_blank">here</a></td></tr> \ </table> \ </div> \ <div style="float: left;margin-bottom:15;margin-top:15;border-style:solid;border-width:1px;border-color:#666666"> \ <div style="margin:15 15 15 15"><a href="%s"><img src="%s"></a></div> \ </div><br style="clear:both" /></div>' \ % (db_file['original_name'], spec['spec_name'], proj['client_account'], proj['project_name'], str(proj['project_id']), \ spec['spec_name'], str(spec['spec_id']), str(spec['reference_number']),\ db_file['original_name'], db_file['dam_name'], str(db_file['size']), \ utils.parse_time(db_file['upload_time'], utils.STYLE_DATE_TIME_SHORT_MONTH), db_file['preflight_info'], \ db_file['preflight_report_html'], db_file['preflight_report_text'], db_file['preflight_report_mask'], \ db_file['preflight_report_html'], db_file['thumbnail_url']) index_folder = utils.get_guid_path(self.web_rootdir, db_file['guid']) index_file = os.path.join(index_folder, 'index.html') f = open(index_file, 'w') f.write(html) f.write(utils.get_email_company_footer(self.mail_signature_logo, self.company_web_address)) f.write('</body></html>') f.close()
def timestamp(self): ''' Return a parsed timestamp ''' return parse_time(self._timestamp)
def sleep(irc, source, msgtarget, args): after = re.match(time_expression, args) if after: after = parse_time(after.group(1)) time.sleep(after) irc.msg(msgtarget, "{} seconds passed".format(after))
print from utils import parse_timespan _pass_fail("parse_timespan 1", parse_timespan("1h"), 3600) _pass_fail("parse_timespan 2", parse_timespan("1m"), 60) _pass_fail("parse_timespan 3", parse_timespan("1s"), 1) _pass_fail("parse_timespan 4", parse_timespan("1h2m3s"), 3723) _pass_fail("parse_timespan 5", parse_timespan("17"), 17) _pass_fail("parse_timespan 6", parse_timespan("5h"), 3600 * 5) print # FIXME - these always fail because we don't get the precision right. # not sure what to do about that. from utils import parse_time _pass_fail("parse_time 1", int(parse_time("4:20p")), 1029878400) _pass_fail("parse_time 2", int(parse_time("4m")), 1029796956) _pass_fail("parse_time 3", int(parse_time("9")), 1029796725) _pass_fail("parse_time 4", int(parse_time("1:17:34a")), 1029824254) print from utils import expand_placement_vars # these are lyntin mode tests _pass_fail("expand_placement_vars 1", expand_placement_vars("#test 1 2 3", "#test"), "#test 1 2 3") _pass_fail("expand_placement_vars 2", expand_placement_vars("#test 1 2 3", "#test %1 %2"), "#test 1 2") _pass_fail("expand_placement_vars 3",
def main(args): if len(args) != 5: print 'Usage: <trace> <segment> <user_ids> <output>' return f = open(args[1], 'r') requests = [] for line in f: request = json.loads(line) requests.append(request) f.close() f = open(args[2], 'r') segments = [] for line in f: request = json.loads(line) segments.append(request) f.close() user_requests = {} f = open(args[3], 'r') for line in f: line = line[:-1] user_requests[line] = [] f.close() start_date = segments[0]['start_time'].split()[0] is_full = set() cnt = 0 for request in requests: uid = request['uid'] if uid in user_requests and uid not in is_full: cnt += 1 # each request has an unique id request['idx'] = str(int(segments[-1]['idx']) + cnt) # avoid using same expt_idx in segment request['expt_idx'] = 'zfeng' + request['expt_idx'] request['uid'] = 'zfeng' + uid req_list = user_requests[uid] if len(req_list) == 0: splits = request['start_time'].split() # new_time = replace date request['new_time'] = start_date + ' ' + splits[1] else: start_time = utils.parse_time(req_list[0]['start_time']) cur_time = utils.parse_time(request['start_time']) diff = cur_time - start_time if diff.days >= 31: is_full.add(uid) continue start_new_time = utils.parse_time(req_list[0]['new_time']) cur_new_time = start_new_time + diff # new_time = new_time of first request + difference request['new_time'] = utils.format_time(cur_new_time) req_list.append(request) f = open(args[4], 'w') # output requests in segment for request in segments: f.write(json.dumps(request) + '\n') # output requests in users for uid in user_requests.keys(): req_list = user_requests[uid] print uid, len(req_list) for request in req_list: f.write(json.dumps(request) + '\n') f.close()
def parseCondition(condition, user, owner=None): ''' condition: { value: "war" } or condition: { key: "year", value: [1970, 1980], operator: "=" } ... ''' k = condition.get('key', '*') k = {'id': 'itemId'}.get(k, k) if not k: k = '*' v = condition['value'] op = condition.get('operator') if not op: op = '=' if op.startswith('!'): op = op[1:] exclude = True else: exclude = False facet_keys = models.Item.facet_keys + ['title'] key_type = settings.CONFIG['keys'].get(k, {'type':'string'}).get('type') if isinstance(key_type, list): key_type = key_type[0] key_type = { 'title': 'string', 'person': 'string', 'text': 'string', 'year': 'string', 'length': 'string', 'layer': 'string', 'list': 'list', }.get(key_type, key_type) if k == 'list': key_type = '' if v == '{me}' and op == '==': if not owner: owner = user if k == 'user': v = owner.username elif k == 'groups': q = Q(groups__in=owner.groups.all()) if exclude: q = ~q return q if (not exclude and op == '=' or op in ('$', '^')) and v == '': return Q() elif k == 'filename' and (user.is_anonymous() or \ not user.get_profile().capability('canSeeMedia')): return Q(id=0) elif k == 'oshash': return Q(files__oshash=v) elif k == 'rendered': return Q(rendered=v) elif k == 'resolution': q = parseCondition({'key': 'width', 'value': v[0], 'operator': op}, user) \ & parseCondition({'key': 'height', 'value': v[1], 'operator': op}, user) if exclude: q = ~q return q elif isinstance(v, list) and len(v) == 2 and op == '=': q = parseCondition({'key': k, 'value': v[0], 'operator': '>='}, user) \ & parseCondition({'key': k, 'value': v[1], 'operator': '<'}, user) if exclude: q = ~q return q elif k in ('canplayvideo', 'canplayclips'): level = user.is_anonymous() and 'guest' or user.get_profile().get_level() allowed_level = settings.CONFIG['capabilities'][{ 'canplayvideo': 'canPlayVideo', 'canplayclips': 'canPlayClips' }[k]][level] if v: q = Q(level__lte=allowed_level) else: q = Q(level__gt=allowed_level) if exclude: q = ~q return q elif key_type == 'boolean': q = Q(**{'find__key': k, 'find__value': v}) if exclude: q = ~q return q elif key_type == "string": in_find = not k.startswith('itemId') if in_find: value_key = 'find__value' else: value_key = k if k in facet_keys: in_find = False facet_value = 'facets__value%s' % { '==': '__iexact', '>': '__gt', '>=': '__gte', '<': '__lt', '<=': '__lte', '^': '__istartswith', '$': '__iendswith', }.get(op, '__icontains') v = models.Item.objects.filter(**{'facets__key':k, facet_value:v}) value_key = 'id__in' else: value_key = '%s%s' % (value_key, { '==': '__iexact', '>': '__gt', '>=': '__gte', '<': '__lt', '<=': '__lte', '^': '__istartswith', '$': '__iendswith', }.get(op, '__icontains')) k = str(k) value_key = str(value_key) if isinstance(v, unicode): v = unicodedata.normalize('NFKD', v).lower() if k == '*': q = Q(**{value_key: v}) elif in_find: q = Q(**{'find__key': k, value_key: v}) else: q = Q(**{value_key: v}) if exclude: q = ~q return q elif k == 'list': q = Q(id=0) l = v.split(":") if len(l) == 1: vqs = Volume.objects.filter(name=v, user=user) if vqs.count() == 1: v = vqs[0] q = Q(files__instances__volume__id=v.id) elif len(l) >= 2: l = (l[0], ":".join(l[1:])) lqs = list(List.objects.filter(name=l[1], user__username=l[0])) if len(lqs) == 1 and lqs[0].accessible(user): l = lqs[0] if l.query.get('static', False) == False: data = l.query q = parseConditions(data.get('conditions', []), data.get('operator', '&'), user, l.user) else: q = Q(id__in=l.items.all()) if exclude: q = ~q else: q = Q(id=0) return q elif key_type == 'date': def parse_date(d): while len(d) < 3: d.append(1) return datetime(*[int(i) for i in d]) #using sort here since find only contains strings v = parse_date(v.split('-')) vk = 'sort__%s%s' % (k, { '==': '__exact', '>': '__gt', '>=': '__gte', '<': '__lt', '<=': '__lte', }.get(op,'')) vk = str(vk) q = Q(**{vk: v}) if exclude: q = ~q return q else: #integer, float, list, time #use sort table here if key_type == 'time': v = int(utils.parse_time(v)) vk = 'sort__%s%s' % (k, { '==': '__exact', '>': '__gt', '>=': '__gte', '<': '__lt', '<=': '__lte', }.get(op,'')) vk = str(vk) q = Q(**{vk: v}) if exclude: q = ~q return q
def days_since_last_pmt(self): if not self.lastPaymentDate: return None return (get_time_now() - parse_time(self.lastPaymentDate)).days