def persentageOfTypingWhileLookingAtKeyboard(gazeTypeList, rt_gaze, rt_kb, y_ratio): returnNumber = 0 lastIndex_kb = 0 # chunk by chunk lastChunk_b = -1 while lastChunk_b < len(gazeTypeList) - 1: chunk_f, chunk_b, c_type = DataPreprocessing.nextChunk( lastChunk_b, gazeTypeList) if c_type == 2: # do something a = y_ratio[chunk_f - 1] b = y_ratio[chunk_f - 2] # look down rt_f = rt_gaze[chunk_f] rt_b = rt_gaze[chunk_b] index_kb_f = Time.Time().findPositionInFloatList( rt_f, rt_kb, lastIndex_kb) if index_kb_f >= len(rt_kb): break lastIndex_kb = index_kb_f index_kb_b = Time.Time().findPositionInFloatList( rt_b, rt_kb, lastIndex_kb - 1) if index_kb_b >= len(rt_kb): break lastIndex_kb = index_kb_b returnNumber += (index_kb_b - index_kb_f) # update lastChunk_b = chunk_b return float(returnNumber) / float(len(rt_kb))
def __init__(self, test, job_id): self.now_exec_testcase = test self.time = Time() self.job_id = job_id self.test_info = TestInfo() self.test = [] self.dst_file_path = ""
def SlidingMovingWindow(self): current_rt_f = self.gaze_rts[self.WT_index_front] current_rt_b = current_rt_f + self.windowLength # check end print(str(current_rt_b) + ' ' + str(self.gaze_rts[-1])) if current_rt_b >= self.gaze_rts[-1]: self.reahEndFlag = True current_rt_b = self.gaze_rts[-1] self.WT_index_back = Time.Time().findPositionInFloatList( current_rt_b, self.gaze_rts, self.foundIndex_WT) if self.WT_index_back > len(self.gaze_rts): self.WT_index_back = len(self.gaze_rts) self.foundIndex_WT = self.WT_index_back # compute movement sum_x_diff, sum_y_diff = self.computeGM() # update at_f = Time.Time().addByNms(self.gaze_ats[self.WT_index_front], self.delta) self.WT_index_front = Time.Time().findPositionInTimeArray( at_f, self.gaze_ats, self.foundIndex_atF) - 1 self.foundIndex_atF = self.WT_index_front return (current_rt_f + current_rt_b) / 2.0, sum_x_diff, sum_y_diff
def checkExpTime(entry): import Files import Timer import Time clock = Timer.timeQueue.wallClock passed = False #import pdb; pdb.set_trace() if entry.timeBase == clock: passed = True if entry.timeBase < clock: if entry.earliest == "*": passed = True else: if entry.timeBase + entry.earliest >= clock: passed = True else: if entry.latest == "*": passed = True else: if entry.timeBase - entry.latest <= clock: passed = True if not passed: expectFailed = True Global.results.putFailure("Timing : output expected at %s, output occurred at %s" % ( Time.timeString(entry.timeBase), Time.timeString(Timer.timeQueue.wallClock)))
def locations(self, school_id, day, time): depot = self.select_all_locations( "SELECT street, streetNumber, locality, region, zipcode, country FROM schools WHERE id={}" .format(school_id)) drivers = self.select_all_locations( "SELECT street, streetNumber, locality, region, zipcode, country FROM users, timetable WHERE {}=\"{}\" AND school_id={} AND users.id=timetable.id AND timetable.status=1 AND seats IS NOT NULL GROUP BY users.id" .format(day, Time.new_time_string_for_time(time), school_id)) passengers = self.select_all_locations( "SELECT street, streetNumber, locality, region, zipcode, country FROM users, timetable WHERE {}=\"{}\" AND school_id={} AND users.id=timetable.id AND timetable.status=1 AND seats IS NULL GROUP BY users.id" .format(day, Time.new_time_string_for_time(time), school_id)) depot_index = [] drivers_indices = [] passengers_indices = [] depot_index.append(0) for x in range(1, len(drivers) + len(depot)): drivers_indices.append(x) for z in range((len(drivers) + len(depot)), (len(passengers) + len(drivers) + len(depot))): passengers_indices.append(z) logger_5.info(depot_index) logger_5.info(drivers_indices) logger_5.info(passengers_indices) locations_indices = depot_index + drivers_indices + passengers_indices logger_5.info(locations_indices) location_data = { 'num': len(locations_indices), 'starts': drivers_indices } vehicle_data = { 'num': len(drivers_indices), 'capacities': self.select_capacities(school_id, day, time) } temp1, temp2 = self.get_user_indices(school_id, day, time) return vehicle_data, location_data, drivers_indices, passengers_indices, temp1, temp2
def Generate(self): """Generates transaction(s) for the current config entry. One transaction per interval up to to and including the target date. In normal operation zero or one transactions are generated, but if processing has been delayed, more than one transaction can be produced. Returns a list of transaction strings ready for sorting and output to a ledger file. Each one begins with a newline to ensure it is separated by an empty line on output. """ Rv = [] TargetDateObject = self._Parent._TargetDateObject Done = False while not Done: if self._DateObject > TargetDateObject: Done = True else: # Append transaction text. Rv.append('\n' + self.TransactionText()) # Evaluate next posting date. NewDateObject = Time.DateAddPeriod(self._DateObject, self._Period) if NewDateObject <= self._DateObject: _Throw("Period is negative or zero!") self._DateObject = NewDateObject self._DateStr = Time.DateToText(self._DateObject) return Rv
def run(self): #--ref to: http://www.codeproject.com/Articles/36459/PID-process-control-a-Cruise-Control-example lastTs=Time.currentTime(); previousError=0; integral=0; Kp=1.2; Ki=0.003; Kd=1; pV=0.0; noise=0.0; while(not self.done_): self.lock_.acquire(); tS=Time.currentTime(); dT=tS-lastTs; error=self.pRef_-self.pResponse_; integral=integral+error*dT; derivative=(error-previousError)/dT; output=Kp*error+Ki*integral+Kd*derivative; previousError=error; pV=pV+(output*0.20)-(pV*0.10)+noise; dP=pV*dT; self.pResponse_ = self.pResponse_+dP; lastTs=tS; # self.pResponse_ = self.pResponse_ + dP; self.D_.append((Time.currentTime(),self.pResponse_)); self.lock_.release(); time.sleep(ControlLoopCycle);
def Download(url, folder="./", file_name=None): parsed_url = urllib.parse.urlparse(url) if parsed_url.scheme == "https": conn = http.client.HTTPSConnection(parsed_url.netloc) else: conn = http.client.HTTPConnection(parsed_url.netloc) path_query = parsed_url.path if parsed_url.query != "": path_query += "?" + parsed_url.query method = "GET" conn.request(method, path_query, None, {}) print("{} Request Start: {}".format(GetTime(), url)) resp = conn.getresponse() resp_header = resp.getheaders() resp_body = resp.read() conn.close() print("{} Request End: {}, {}Bytes".format(Time.GetTime(), resp.status, len(resp_body))) file_path = folder + parsed_url.path.split( "/")[-1] if file_name == None else foldr + file_name with open(file_path, "wb") as f: print("{} File Write Start: {}".format(Time.GetTime(), file_path)) f.write(resp_body) print("{} File Write End: {} Bytes".format(Time.GetTime(), len(resp_body))) return resp_header
def Addbycost(begin_time,category,content): df = pd.read_csv('./data1.csv',index_col=0) sub=t.SubtractionTime(begin_time , over_time) begin_time=t.Getstr(begin_time) print(sub) df = df.append([{'Date':'{}'.format(begin_time['YMD']),'Category':'{}'.format(category),'Time':'{}'.format(sub['name']),'Content':'{}'.format(content),'Timestamp':int(begin_time['timestamp']),'Rank':sub["Rank"]}], ignore_index=True) df.to_csv('data1.csv',index=False, encoding='utf-8')
def getDuractionTime(year, duractionTime): massing = [] startLockTime = 0 startLockName = "" ren = len(duractionTime) if "PM: suspend of" in duractionTime.keys( ) and "PM: resume of" in duractionTime.keys(): totalityTime = Time.DuractionTime(year, duractionTime["PM: resume of"], duractionTime["PM: suspend of"]) massing.append("唤醒持续时长:" + str(totalityTime) + "秒" + "=====" + "开始时间:" + duractionTime["PM: resume of"] + ">>>>" + "结束时间:" + duractionTime["PM: suspend of"] + "\n") else: massing.append("本次唤醒之后没有休眠或者log终结唤醒时间:" + duractionTime["PM: resume of"] + "\n") for key, address in duractionTime.items(): if not key == "PM: resume of": if startLockTime != 0 and startLockName != "": lockTime = Time.DuractionTime(year, startLockTime, address) massing.append(startLockName + "持续了:" + str(lockTime) + "秒" + "\n") startLockName = key startLockTime = address else: startLockName = key startLockTime = address return massing
def __init__(self, id, pref1, pref2, pref3, pref4, pref5, lockedIn, address, city, state, zipCode, bussing): self.id = id self.address = address self.city = city self.state = state self.zipCode = zipCode self.pref1 = str(pref1) self.pref2 = str(pref2) self.pref3 = str(pref3) self.pref4 = str(pref4) self.pref5 = str(pref5) #will need to be changed to something more universal if (bussing == 'x'): self.bussing = True else: self.bussing = False #will need to be changed to something more universal if (lockedIn == 'X'): self.lockedIn = True else: self.lockedIn = False #set variables not set in by parameters to dummy data to show that it is unset self.timeOfDay = "" self.placed = False self.placementName = "" self.placementId = -1 self.school = None self.busTime = Time.Time(0) self.busDropoffTime = Time.Time(0) self.busRoute = -1 self.longitude = -1 self.latitue = -1 self.distanceMatrixPosition = -1
def parse_to(inaccuracy, args): """Create a SNTP packet with modified fields (time and flags)""" try: li = args[0] version = 4 mode = 4 LIVNMODE = (li << 6) + (version << 3) + mode stratum = 3 poll = args[4] precision = random.randint(-20, -6) root_delay = random.randint(-2, 512) root_disp = random.randint(-2, 512) reference_identifier = 0 shifter = 24 i = 0 while shifter >= 0: reference_identifier += (ID[i] << shifter) shifter -= 8 i += 1 time = Time.get_time(inaccuracy) reference_timestamp = int((int(math.floor(time)) << 32) + random.random()) originate_timestamp = (args[12][0] << 32) + args[12][1] time = Time.get_time(inaccuracy) recieved_timestamp = int((int(math.floor(time)) << 32) + random.random()) transmit_timestamp = int((int(math.floor(time)) << 32) + random.random()) return struct.pack("!4biII4Q", LIVNMODE, stratum, poll, precision, root_delay, root_disp, reference_identifier, reference_timestamp, originate_timestamp, recieved_timestamp, transmit_timestamp) except Exception: print("Can't make a package from a data, received")
def deliver_packages(Packed_Trucks, Truck_List): Package_List = Packages.get_package_list() truck_counter = 1; for truck in Packed_Trucks: #first truck gets fed in, we have find nerest package. now we need to run through until all packages are delivered' while truck.package_manifest: delivered_package = find_nearest_package(truck.package_manifest, truck.current_loc,truck) truck.set_current_loc(delivered_package.address) Package_List.search(delivered_package.id).set_time_left_hub(truck.departing_time) Package_List.search(delivered_package.id).update_status('Delivered') Package_List.search(delivered_package.id).set_time_delivered(Time.get_time(truck.departing_time, truck.distance_traveled)) #print("Package delivered id and time: ", delivered_package.id, delivered_package.delivery_time, len(truck.package_manifest)) truck.remove_package(delivered_package) if truck_counter == 1: Truck_List[2].departing_time = Time.get_time(truck.departing_time, truck.distance_traveled) truck_counter += 1 print('Truck: ', truck.id, truck.departing_time) print('Total Distance Traveled: ',truck.distance_traveled, 'Time taken: ', Time.int_to_hours(truck.distance_traveled)) print('Time finished = ', Time.get_time(truck.departing_time, truck.distance_traveled)) print('************ \n')
def main(): days = {} days['Sunday'] = 'monday' days['Monday'] = 'tuesday' days['Tuesday'] = 'wednesday' days['Wednesday'] = 'thursday' days['Thursday'] = 'friday' days['Friday'] = None days['Saturday'] = None deadline = datetime.time(20, 0, 0) while True: one = SQLHandler() threads = [] timezones = one.build_timezone_pool() already_run = False logger.debug("here") for t in timezones: logger.info(t) time_in_timezone = Time.add_timezone(deadline, t) if days[datetime.date.today().strftime( "%A")] is not None and Time.time_in_range( deadline, datetime.time(20, 2, 0), time_in_timezone) and already_run is False: day = days[datetime.date.today().strftime("%A")] schools = one.build_school_pool(t) thread = threading.Thread(target=run_thread, args=(day, schools)) thread.start() threads.append(thread) already_run = True
def __init__(self, coin, timeLength): self.coin = coin self.timeLength = timeLength self.INDICATOR_TABLE_NAME = "{}_indicators".format(self.coin) self.PRICE_TABLE_NAME = "{}_price_data".format(self.coin) self.time = Time() self.sma = SMA()
def startDose(doseLength, waitTime): if(shouldDose == True): doseLock = True t0 = Utility.currentTimeMillis() #set dig output pin to start dosing try: ik.interfaceKit.setOutputState(0, 1) except PhidgetException as e: doseLock = False ik.interfaceKit.setOutputState(0, 0) print("Phidget Exception %i: %s" % (e.code, e.details)) print("Exiting....") exit(1) print("setting dig output to 1") # dose until the dose time is met or something else releases the lock while((Utility.currentTimeMillis() - t0) < timeToDose * 1000 or doseLock != True): Time.sleep(0.1) try: ik.interfaceKit.setOutputState(0, 0) doseLock = False print("dosed for %i second out put changed to 0" % (timeToDose)) except PhidgetException as e: doseLock = False print("Phidget Exception %i: %s" % (e.code, e.details)) print("Exiting....") exit(1) doseLock = False
def obterCadencia(linhas_arq): passos = obterUltimoPasso(linhas_arq) duracao = Time.obterDuracaoTotal(linhas_arq) duracao_min = Time.ConverterDuracaoMinuto(duracao) cadencia = passos / duracao_min return cadencia
def userInput(): Save.counter+=1 inp = Save.inputs[Save.counter-1] if Save.counter==len(Save.inputs): ti.setLoadingGame(False) Save.counter = 0 d.SavedRolls.counter = 0 return inp
def I106_Decode_TimeF1(header, msg_buffer): native_irig_time = Time._ctIrig106Time() ret_status = Packet.IrigDataDll.enI106_Decode_TimeF1( ctypes.byref(header), ctypes.byref(msg_buffer), ctypes.byref(native_irig_time)) irig_time = Time.IrigTime() irig_time.set_from_ctIrig106Time(native_irig_time) return ret_status, irig_time
def JSON(data, file_name, directory="./"): file_path = directory + file_name + ".json" with open(file_path, "wt") as f: print("{} File Write Start: {}".format(Time.GetTime(), file_path)) content = json.dumps(data, ensure_ascii=False) f.write(content) print("{} File Write End: {} Bytes".format(Time.GetTime(), len(content))) return
def __init__(self): Command.__init__(self, name="edit", footer=[ "ID " + ID.help_text(), "PRIORITY " + Priority.help_text(), "START " + Time.help_text(), "END " + Time.help_text() ])
def adjustEDAAtList(EDA_at_list, diffGME): returnAtList = [] for at in EDA_at_list: if diffGME >= 0: t = Time.Time().addByNms(at, diffGME) else: t = Time.Time().substractByNms(at, abs(diffGME)) returnAtList.append(t) return returnAtList
def Open_Csv(self, filename): self.filename = filename self.details.clear() self.participants.clear() dets = False firstRow = True with open(filename, 'r', encoding='utf-8') as f: for x in f: if str(x) == '\n': dets = True continue data = x.split('\t') if not dets: if len(data) == 1: continue self.details.update({data[0]: data[1]}) continue if firstRow: #Dismiss the first line firstRow = False continue am = data[4].split('@') if am[0].startswith('int'): primary_key = am[0][4:] elif am[0].startswith('thl'): primary_key = am[0][5:] print(primary_key) if primary_key in self.participants: starttime = data[1].split(',') starttime_type = starttime[1].split(' ') self.participants[self.participants.index( primary_key)].Append_STime(starttime_type[1]) endtime = data[2].split(',') endtime_type = endtime[1].split(' ') self.participants[self.participants.index( primary_key)].Append_ETime(endtime_type[1]) self.participants[self.participants.index( primary_key)].Append_Dur(t.ToSeconds(data[3])) continue name = data[0] date = data[1].split(',')[0] starttime = data[1].split(',')[1].split(' ')[1] endtime = data[2].split(',')[1].split(' ')[1] duration = t.ToSeconds(data[3]) email = data[4] am = data[4].split('@')[0] if am.startswith('int'): am = am[4:] elif am.startswith('thl'): am = am[5:] role = data[5] aparticipant = prt.participant(name, date, role, am, email) aparticipant.Append_STime(starttime) aparticipant.Append_ETime(endtime) aparticipant.Append_Dur(duration) self.participants.append(aparticipant)
def validate(self): if self.number == 0 or self.begin == '' or self.end == '' or self.text is None: return False zbegin = re.split('\W+', self.begin) zend = re.split('\W+', self.end) self.time_begin = Time(int(zbegin[0]), int(zbegin[1]), int(zbegin[2]), int(zbegin[3])) self.time_end = Time(int(zend[0]), int(zend[1]), int(zend[2]), int(zend[3])) self.set_mt() del self.begin del self.end return True
def readNonEditingIntervalsCSV(fileName): editingInterval = [] with open(fileName, 'r') as f: for line in f: line_info = line.split(',') interval_f_at = Time.Time(line_info[0]) interval_b_at = Time.Time(line_info[1]) # append editingInterval.append((interval_f_at, interval_b_at)) return editingInterval
def reload(): global nextTimePubData global nextTimeInsertData global nextTimePubImage global lastTimePubImage nextTimePubImage = int( int(lastTimePubImage) + (int(sp.get(fqPImage)) * int(3600))) #1 hour in time stamp nextTimePubData = t.getNextTimePubData(sp.get(fqPData)) nextTimeInsertData = t.getNextTimeInsertData(sp.get(fqIData))
def misc_setup(rps): """Set default change ownership flag, umask, relay regexps""" os.umask(077) Time.setcurtime(Globals.current_time) SetConnections.UpdateGlobal("client_conn", Globals.local_connection) Globals.postset_regexp('no_compression_regexp', Globals.no_compression_regexp_string) for conn in Globals.connections: conn.robust.install_signal_handlers() conn.Hardlink.initialize_dictionaries()
def graph_run(bt_max): x = np.arange(0, bt_max + 1, 1) li = [] for i in range(bt_max + 1): y = Time.sec_in_min(runtime(start_x, start_y, buspoint(Time.min_in_sec(i)))) li.append(y) print("Start at 7:{:.1f}".format(30 + i - y)) plt.plot(x, li) plt.plot(x, x) plt.show()
def manageTime(self): t=Time() if self.clock==None: self.clock=ClockThread(t.getCurrentTime(),self.pool.getTime())#obtiene el tiempo actual self.connect(self.clock,SIGNAL("timeOver()"),self.timeEnding) else: self.clock.setStartTime(t.getCurrentTime()) #manda a ejecutar un hilo que consultara el tiempo self.clock.start()
def generate2CSV(self, fileName): f = open(fileName, 'w') for i in range(0, len(self.nonEditingIntervals)): interval = self.nonEditingIntervals[i] line = '' line += Time.Time().toString(interval[0]) line += ',' line += Time.Time().toString(interval[1]) f.write(line) f.write('\n') f.closed
def estadisticaFormulaUno(lista, h, m, s): tiempoPatron = Time( h, m, s) # creamos un tiempo con los valores temporales de los parametros listaMejoresTiempos = [ ] # lista para guardar los tiempos que estan por debajo al patron # iteramos la lista, comparamos los segundos totales de cada tiempo con el tiempo patron, ampliamos la lista con los tiempos mas rapidos y retornamos el numero de pilotos con tiempos por debajo del tiempo patron return len([ listaMejoresTiempos.append(item) for item in lista if item.time_to_int() < tiempoPatron.time_to_int() ])
def fill_driver_data(user_id, url, duration, time): data = load_json( "/Users/oskarhaeter/PycharmProjects/PythonServer/files/json/json_form_driver_data.json" ) data["user_id"] = user_id data["url"] = url data["duration"] = duration data["pick_up"] = Time.new_time_string_for_time( Time.subtract_time(Time.datetime_for_time(time), duration + 300).time()) return data
def manageTime(self): t=Time()#este objeto permite extraer el tiempo actual del sistema if self.clock==None: #se envia al hilo el tiempo actual y el maximo tiempo de espera self.clock=ClockThread(t.getCurrentTime(),self.pool.getTime())#obtiene el tiempo actual self.connect(self.clock,SIGNAL("timeOver()"),self.timeEnding) else: self.clock.setMaxTime(self.pool.getTime()) self.clock.setStartTime(t.getCurrentTime()) #manda a ejecutar un hilo que consultara el tiempo self.clock.start()
def get_timestats_string(self): """Return portion of statistics string dealing with time""" timelist = [] if self.StartTime is not None: timelist.append("StartTime %.2f (%s)\n" % (self.StartTime, Time.timetopretty(self.StartTime))) if self.EndTime is not None: timelist.append("EndTime %.2f (%s)\n" % (self.EndTime, Time.timetopretty(self.EndTime))) if self.ElapsedTime or (self.StartTime is not None and self.EndTime is not None): if self.ElapsedTime is None: self.ElapsedTime = self.EndTime - self.StartTime timelist.append("ElapsedTime %.2f (%s)\n" % (self.ElapsedTime, Time.inttopretty(self.ElapsedTime))) return "".join(timelist)
def select_all_addresses(self, school_id, day, time): passengers = self.select_all_locations( "SELECT street, streetNumber, locality, region, zipcode, country FROM users, timetable WHERE {}=\"{}\" AND school_id={} AND users.id=timetable.id AND timetable.status=1 AND seats IS NULL GROUP BY users.id" .format(day, Time.new_time_string_for_time(time), school_id)) drivers = self.select_all_locations( "SELECT street, streetNumber, locality, region, zipcode, country FROM users, timetable WHERE {}=\"{}\" AND school_id={} AND users.id=timetable.id AND timetable.status=1 AND seats IS NOT NULL GROUP BY users.id" .format(day, Time.new_time_string_for_time(time), school_id)) depot = self.select_all_locations( "SELECT street, streetNumber, locality, region, zipcode, country FROM schools WHERE id={}" .format(school_id)) locations = depot + drivers + passengers return locations
def calculation_All_Time_Current(startTime, all_Time, endTime, fd_main, strTitle, year): if Time.isTime(startTime) and int( Time.TimeTransferTimestamp(str(startTime))) > 0: fd_main.writelines(strTitle + str(all_Time + Time.DuractionTime( year, startTime, Time.TimeTimestampTransfer(endTime))) + "秒" + "\n") startTime = 0 else: fd_main.writelines(strTitle + str(all_Time) + "秒" + "\n") startTime = 0 return startTime
def obterTempos(linhas_arq): tempos = [] lista_ts = Geral.obterTimestamp(linhas_arq) anterior = Time.obterHora(int(lista_ts[0])) for linha in lista_ts: posterior = Time.obterHora(int(linha)) duracao = posterior - anterior duracao = duracao.total_seconds() if duracao != 0: tempos.append(duracao) anterior = posterior return tempos
def yield_metadata(): """Iterate rorps from metadata file, if any are available""" metadata.SetManager() metadata_iter = metadata.ManagerObj.GetAtTime(regress_time) if metadata_iter: return metadata_iter log.Log.FatalError("No metadata for time %s (%s) found,\ncannot regress" % (Time.timetopretty(regress_time), regress_time))
def triple_to_line(triple): """Convert triple to display string""" time, size, cum_size = triple return "%24s %13s %15s" % \ (Time.timetopretty(time), stat_obj.get_byte_summary_string(size), stat_obj.get_byte_summary_string(cum_size))
def Compare(compare_type, src_rp, dest_rp, compare_time = None): """Compare metadata in src_rp with metadata of backup session Prints to stdout whenever a file in the src_rp directory has different metadata than what is recorded in the metadata for the appropriate session. Session time is read from restore_timestr if compare_time is None. """ global return_val dest_rp = require_root_set(dest_rp, 1) if not compare_time: try: compare_time = Time.genstrtotime(restore_timestr) except Time.TimeException, exc: Log.FatalError(str(exc)) mirror_rp = restore_root.new_index(restore_index) inc_rp = Globals.rbdir.append_path("increments", restore_index) backup_set_select(src_rp) # Sets source rorp iterator if compare_type == "compare": compare_func = compare.Compare elif compare_type == "compare-hash": compare_func = compare.Compare_hash else: assert compare_type == "compare-full", compare_type compare_func = compare.Compare_full return_val = compare_func(src_rp, mirror_rp, inc_rp, compare_time)
def describe_incs_human(incs, mirror_time, mirrorrp): """Return a string describing all the the root increments""" incpairs = [(inc.getinctime(), inc) for inc in incs] incpairs.sort() result = ["Found %d increments:" % len(incpairs)] if Globals.chars_to_quote: for time, inc in incpairs: result.append(" %s %s" % (FilenameMapping.unquote(inc.dirsplit()[1]), Time.timetopretty(time))) else: for time, inc in incpairs: result.append(" %s %s" % (inc.dirsplit()[1], Time.timetopretty(time))) result.append("Current mirror: %s" % Time.timetopretty(mirror_time)) return "\n".join(result)
def _writer_helper(self, prefix, flatfileclass, typestr, time): """Used in the get_xx_writer functions, returns a writer class""" if time is None: timestr = Time.curtimestr else: timestr = Time.timetostring(time) filename = '%s.%s.%s' % (prefix, timestr, typestr) rp = Globals.rbdir.append(filename) assert not rp.lstat(), "File %s already exists!" % (rp.path,) assert rp.isincfile() return flatfileclass(rp, 'w', callback = self.add_incrp)
def ListAtTime(rp): """List files in archive under rp that are present at restoretime""" rp = require_root_set(rp, 1) try: rest_time = Time.genstrtotime(restore_timestr) except Time.TimeException, exc: Log.FatalError(str(exc)) mirror_rp = restore_root.new_index(restore_index) inc_rp = mirror_rp.append_path("increments", restore_index) for rorp in rp.conn.restore.ListAtTime(mirror_rp, inc_rp, rest_time): print rorp.get_indexpath()
def ListChangedSince(rp): """List all the files under rp that have changed since restoretime""" rp = require_root_set(rp, 1) try: rest_time = Time.genstrtotime(restore_timestr) except Time.TimeException, exc: Log.FatalError(str(exc)) mirror_rp = restore_root.new_index(restore_index) inc_rp = mirror_rp.append_path("increments", restore_index) for rorp in rp.conn.restore.ListChangedSince(mirror_rp, inc_rp, rest_time): # This is a hack, see restore.ListChangedSince for rationale print rorp.index[0]
def Verify(dest_rp, verify_time = None): """Check the hashes of the regular files against mirror_metadata""" global return_val dest_rp = require_root_set(dest_rp, 1) if not verify_time: try: verify_time = Time.genstrtotime(restore_timestr) except Time.TimeException, exc: Log.FatalError(str(exc)) mirror_rp = restore_root.new_index(restore_index) inc_rp = Globals.rbdir.append_path("increments", restore_index) return_val = dest_rp.conn.compare.Verify(mirror_rp, inc_rp, verify_time)
def Backup(rpin, rpout): """Backup, possibly incrementally, src_path to dest_path.""" global incdir SetConnections.BackupInitConnections(rpin.conn, rpout.conn) backup_check_dirs(rpin, rpout) backup_set_rbdir(rpin, rpout) rpout.conn.fs_abilities.backup_set_globals(rpin, force) if Globals.chars_to_quote: rpout = backup_quoted_rpaths(rpout) init_user_group_mapping(rpout.conn) backup_final_init(rpout) backup_set_select(rpin) backup_warn_if_infinite_regress(rpin, rpout) if prevtime: Time.setprevtime(prevtime) rpout.conn.Main.backup_touch_curmirror_local(rpin, rpout) backup.Mirror_and_increment(rpin, rpout, incdir) rpout.conn.Main.backup_remove_curmirror_local() else: backup.Mirror(rpin, rpout) rpout.conn.Main.backup_touch_curmirror_local(rpin, rpout)
def get_latest_t0_status(db_con,table_name,t0): sql_dump = "SELECT * FROM "+ table_name +" ORDER BY id DESC LIMIT 1" df = psql.read_frame(sql_dump, db_con) # This condition might be occured at first time running if df.empty: print 'empty data frame..' return t0 # Previous id_event = None, means there is no previous flood happened if (df.ix[0]['id_event'] == None): print 'No flood at previous time ... t0 is changing' return t0 # Previous id_event != None, means there is flood at previous time else: print 'There is flood at previous time ... t0 is not changing' t0 = str(df.ix[0]['t0']) t0_timestamp = t.formatted_date_to_timestamp(t0,psql_time_format) t0 = t.timestamp_to_formatted_date(t0_timestamp,std_time_format) return t0
def rot_check_time(time_string): """Check remove older than time_string, return time in seconds""" try: time = Time.genstrtotime(time_string) except Time.TimeException, exc: Log.FatalError(str(exc)) times_in_secs = [inc.getinctime() for inc in restore.get_inclist(Globals.rbdir.append_path("increments"))] times_in_secs = filter(lambda t: t < time, times_in_secs) if not times_in_secs: Log("No increments older than %s found, exiting." % (Time.timetopretty(time),), 3) return None times_in_secs.sort() inc_pretty_time = "\n".join(map(Time.timetopretty, times_in_secs)) if len(times_in_secs) > 1 and not force: Log.FatalError("Found %d relevant increments, dated:\n%s" "\nIf you want to delete multiple increments in this way, " "use the --force." % (len(times_in_secs), inc_pretty_time)) if len(times_in_secs) == 1: Log("Deleting increment at time:\n" + inc_pretty_time, 3) else: Log("Deleting increments at times:\n" + inc_pretty_time, 3) return times_in_secs[-1]+1 # make sure we don't delete current increment
def set_regress_time(): """Set global regress_time to previous sucessful backup If there are two current_mirror increments, then the last one corresponds to a backup session that failed. """ global regress_time, unsuccessful_backup_time manager = metadata.SetManager() curmir_incs = manager.sorted_prefix_inclist('current_mirror') assert len(curmir_incs) == 2, \ "Found %s current_mirror flags, expected 2" % len(curmir_incs) mirror_rp_to_delete = curmir_incs[0] regress_time = curmir_incs[1].getinctime() unsuccessful_backup_time = mirror_rp_to_delete.getinctime() log.Log("Regressing to " + Time.timetopretty(regress_time), 4) return manager, mirror_rp_to_delete
def get_inc(rp, typestr, time = None): """Return increment like rp but with time and typestr suffixes To avoid any quoting, the returned rpath has empty index, and the whole filename is in the base (which is not quoted). """ if time is None: time = Time.prevtime addtostr = lambda s: "%s.%s.%s" % (s, Time.timetostring(time), typestr) if rp.index: incrp = rp.__class__(rp.conn, rp.base, rp.index[:-1] + (addtostr(rp.index[-1]),)) else: dirname, basename = rp.dirsplit() incrp = rp.__class__(rp.conn, dirname, (addtostr(basename),)) assert not incrp.lstat(), incrp return incrp
def get_incfile_info(basename): """Returns None or tuple of (is_compressed, timestr, type, and basename)""" dotsplit = basename.split(".") if dotsplit[-1] == "gz": compressed = 1 if len(dotsplit) < 4: return None timestring, ext = dotsplit[-3:-1] else: compressed = None if len(dotsplit) < 3: return None timestring, ext = dotsplit[-2:] if Time.stringtotime(timestring) is None: return None if not (ext == "snapshot" or ext == "dir" or ext == "missing" or ext == "diff" or ext == "data"): return None if compressed: basestr = ".".join(dotsplit[:-3]) else: basestr = ".".join(dotsplit[:-2]) return (compressed, timestring, ext, basestr)
def recreate_meta(meta_manager): """Make regress_time mirror_metadata snapshot by patching We write to a tempfile first. Otherwise, in case of a crash, it would seem we would have an intact snapshot and partial diff, not the reverse. """ temprp = [TempFile.new_in_dir(Globals.rbdir)] def callback(rp): temprp[0] = rp writer = metadata.MetadataFile(temprp[0], 'w', check_path = 0, callback = callback) for rorp in meta_manager.get_meta_at_time(regress_time, None): writer.write_object(rorp) writer.close() finalrp = Globals.rbdir.append("mirror_metadata.%s.snapshot.gz" % Time.timetostring(regress_time)) assert not finalrp.lstat(), finalrp rpath.rename(temprp[0], finalrp) if Globals.fsync_directories: Globals.rbdir.fsync()
class Subtitle: def __init__(self, number=0, begin='', end='', text=None): self.number = number self.begin = begin self.end = end if text is None: self.text = [] else: self.text = text # Used to validate the content of the subtitle. # Mandatory because it generates times. # Also generate the mt, which is used to describe the position of the # subtitle among others. # @return valid True if validate is successful, False otherwise def validate(self): if self.number == 0 or self.begin == '' or self.end == '' or self.text is None: return False zbegin = re.split('\W+', self.begin) zend = re.split('\W+', self.end) self.time_begin = Time(int(zbegin[0]), int(zbegin[1]), int(zbegin[2]), int(zbegin[3])) self.time_end = Time(int(zend[0]), int(zend[1]), int(zend[2]), int(zend[3])) self.set_mt() del self.begin del self.end return True # Prints the entire subtitle at cout as if it was written in a .srt file # @return none def show(self): print(str(self.number)) print(self.time_begin.get_formatted() + ' --> ' + self.time_end.get_formatted()) for line in self.text: print(line, end='') print('') # Generate the mt based on the beginning time of the subtitle # @return none def set_mt(self): self.mt = self.time_begin.generate_mt() # Write the subtitle on the file provided # @param file The file to be written to # @return none def write(self, file): file.write(str(self.number) + "\n") file.write(self.time_begin.get_formatted() + ' --> ' + self.time_end.get_formatted() + "\n") for line in self.text: file.write(line) file.write("\n") # Shift the subtitle by shift Time # @param shift The time to shift the subtitle def shift(self, shift): self.time_begin.shift(shift) self.time_end.shift(shift) self.set_mt() # Know if the subtitle begins after time Time # @param time The time to be compared to # @return boolean True if the subtitle begins after the time provided, False otherwise def begins_after(self, time): return self.time_begin.is_after(time) # Know if the subtitle begins before time Time # @param time The time to be compared to # @return boolean True if the subtitle begins before the time provided, False otherwise def begins_before(self, time): return self.time_begin.is_before(time)
def mainLoop(): plt.ion(); fig=plt.figure(1); ax1=fig.add_subplot(511); ax1.set_ylabel(loader_.name()); ax2=fig.add_subplot(512); ax2.set_ylabel(shuttle_.name()); ax3=fig.add_subplot(513); ax3.set_ylabel(flopTray_.name()); ax4=fig.add_subplot(514); ax4.set_ylabel(rammer_.name()); ax5=fig.add_subplot(515); ax5.set_ylabel("sequencer"); l1,=ax1.plot(100,100,'r-'); l2,=ax2.plot(100,100,'r-'); l3,=ax3.plot(100,100,'r-'); l4,=ax4.plot(100,100,'r-'); l5,=ax5.plot(100,100,'r-'); global donePlotting; seqD=[]; step=0; while (not donePlotting): D1 = loader_.pList(); T1=[x[0] for x in D1]; L1=[x[1] for x in D1]; l1.set_xdata(T1); l1.set_ydata(L1); ax1.set_ylim([0,105]); ax1.set_xlim([min(T1),max(T1)]); D2 = shuttle_.pList(); T2=[x[0] for x in D2]; L2=[x[1] for x in D2]; l2.set_xdata(T2); l2.set_ydata(L2); ax2.set_ylim([0,95]); ax2.set_xlim([min(T2),max(T2)]); D3 = flopTray_.pList(); T3=[x[0] for x in D3]; L3=[x[1] for x in D3]; l3.set_xdata(T3); l3.set_ydata(L3); ax3.set_ylim([0,45]); ax3.set_xlim([min(T3),max(T3)]); D4 = rammer_.pList(); T4=[x[0] for x in D4]; L4=[x[1] for x in D4]; l4.set_xdata(T4); l4.set_ydata(L4); ax4.set_ylim([0,180]); ax4.set_xlim([min(T4),max(T4)]); seqD.append((Time.currentTime(),step)); T5=[x[0] for x in seqD]; L5=[x[1] for x in seqD]; l5.set_xdata(T5); l5.set_ydata(L5); ax5.set_ylim([0,15]); ax5.set_xlim([min(T4),max(T4)]); #--update sequencer if (step==0): #--move everything home loader_.moveTo(15); shuttle_.moveTo(0); flopTray_.moveTo(0); rammer_.moveTo(0); if (loader_.position() < 16 and shuttle_.position() < 1 and flopTray_.position() < 1 and rammer_.position() < 1): step=1; elif(step==1): #--move propellant to flopTray shuttle_.moveTo(90); if (shuttle_.position() >= 89): step=2; elif(step==2): #--move flopTray to receive projectile & move shuttle to grab projectile flopTray_.moveTo(30); shuttle_.moveTo(0); if(flopTray_.position() > 29 and shuttle_.position() < 1): step=3; elif(step==3): #--move projectile to flopTray shuttle_.moveTo(90); if (shuttle_.position() >= 89): step=4; elif(step==4): #--move loader to breech and home shuttle loader_.moveTo(90); shuttle_.moveTo(0); if (loader_.position() >= 89): step=5; elif(step==5): #--ram projectile rammer_.moveTo(178); if (rammer_.position() >= 177): step=6; elif(step==6): #--retract rammer rammer_.moveTo(0); if (rammer_.position() <= 1): step=7; elif(step==7): #--shift propellant on flopTray flopTray_.moveTo(0); if (flopTray_.position() <= 1): step=8; elif(step==8): #--ram propellant rammer_.moveTo(178); if (rammer_.position() >= 177): step=9; elif(step==9): loader_.moveTo(15); if(loader_.position() <= 16): step=10; elif(step==10): print "BANG!!!" step=0; plt.draw(); plt.pause(0.1);
def __sub__(self, Other): """ Calculates the difference between this 'Time' instance and the given 'Time' instance. """ t = Time(0, 0) t.total_seconds = self.total_seconds - Other.total_seconds return t
def __add__(self, Other): """ Calculates the sum of this 'Time' instance and the given 'Time' instance. """ t = Time(0, 0) t.total_seconds = self.total_seconds + Other.total_seconds return t
#!/usr/bin/python ''' record your infomathon ''' import Time deadline = Time.current_time() dic = '{"scope":"xxx","deadline":%d}' %deadline ak = 'xxxxxx' sk = 'xxxxx' Block_Size = 4194304 Chunk_Size = 1048576 filename = 'xxxx'
+ src_rp.path) restore_check_paths(src_rp, dest_rp, restore_as_of) try: dest_rp.conn.fs_abilities.restore_set_globals(dest_rp) except IOError, exc: if exc.errno == errno.EACCES: print "\n" Log.FatalError("Could not begin restore due to\n%s" % exc) else: raise init_user_group_mapping(dest_rp.conn) src_rp = restore_init_quoting(src_rp) restore_check_backup_dir(restore_root, src_rp, restore_as_of) inc_rpath = Globals.rbdir.append_path('increments', restore_index) if restore_as_of: try: time = Time.genstrtotime(restore_timestr, rp = inc_rpath) except Time.TimeException, exc: Log.FatalError(str(exc)) else: time = src_rp.getinctime() restore_set_select(restore_root, dest_rp) restore_start_log(src_rp, dest_rp, time) try: restore.Restore(restore_root.new_index(restore_index), inc_rpath, dest_rp, time) except IOError, exc: if exc.errno == errno.EACCES: print "\n" Log.FatalError("Could not complete restore due to\n%s" % exc) else: raise else: Log("Restore finished", 4)
def set_compatible_timestamps(self): if Globals.chars_to_quote.find(":") > -1: SetConnections.UpdateGlobal('use_compatible_timestamps', 1) Time.setcurtime(Time.curtime) # update Time.curtimestr on all conns log.Log("Enabled use_compatible_timestamps", 4)