def Update(self): saat = str(config.plugins.TimeSet.UTCTim.value[0]) if len(saat) < 2: saat = '0' + saat minuti = str(config.plugins.TimeSet.UTCTim.value[1]) if len(minuti) < 2: minuti = '0' + minuti sekunde = strftime('%S', localtime()) pp = config.plugins.TimeSet.NDate.value import time TimeString = time.strftime('%Y%m%d', time.gmtime(pp)) + saat + minuti + sekunde RTCString = time.strftime('%Y.%m.%d', time.gmtime(pp)) + '-' + saat + ':' + minuti + ':' + sekunde TimeZoneS = config.timezone.val.value ipos1 = TimeZoneS.find('(GMT') ipos2 = TimeZoneS.find(')') tmp = TimeZoneS[ipos1 + 4:ipos2] if len(tmp) == 0: tmp = '+00' tzpredznak = tmp[:1] tzvalue = str(int(tmp[1:3])) TimeString = TimeString + tzpredznak + tzvalue import os as os cmd = 'echo "' + str(TimeString) + '" > /proc/settime' os.system(cmd) cmd = 'date -u -s "' + str(RTCString) + '"' os.system(cmd) self.session.openWithCallback(self.callback, MessageBox, _('RTC Update done! \n\nGUI Clock Update done!'), type = 1, timeout = 5)
def Page(self): plugin_path = '/usr/lib/enigma2/python/RTiTeam/TimeSet' print plugin_path before = 'Before: Local=' + strftime('%H:%M', localtime()) + ', UTC=' + strftime('%H:%M', gmtime()) cmd = str(plugin_path + '/ntpdate -t 20 0.debian.pool.ntp.org') res = popen(cmd).read() if res == '': cmd = 'ls -l %s%s' % (plugin_path, '/ntpdate') res = popen(cmd).read() if res[3] != 'x': cmd = 'chmod 755 %s%s' % (plugin_path, '/ntpdate') res = popen(cmd).read() self.session.open(MessageBox, _('ntpdate problem: attributes for ntpdate have not been correct! Fixed now! Try again!\n%s' % res), MessageBox.TYPE_INFO) else: self.session.open(MessageBox, _('ntpdate problem: Internet connection ok? Time server ok?'), MessageBox.TYPE_INFO) else: z1 = mktime(datetime.utcnow().timetuple()) config.plugins.TimeSet.NDate = ConfigDateTime(default = z1, formatstring = _('%d.%B %Y'), increment = 86400) config.plugins.TimeSet.UTCTim = ConfigClock(default = z1) self.list1 = [] self.list1.append(getConfigListEntry(_('UTC Time'), config.plugins.TimeSet.UTCTim)) self.list1.append(getConfigListEntry(_('Date'), config.plugins.TimeSet.NDate)) self['config'].setList(self.list1) self.selectionChanged() saat = str(config.plugins.TimeSet.UTCTim.value[0]) if len(saat) < 2: saat = '0' + saat minuti = str(config.plugins.TimeSet.UTCTim.value[1]) if len(minuti) < 2: minuti = '0' + minuti sekunde = strftime('%S', localtime()) pp = config.plugins.TimeSet.NDate.value import time TimeString = time.strftime('%Y%m%d', time.gmtime(pp)) + saat + minuti + sekunde RTCString = time.strftime('%Y.%m.%d', time.gmtime(pp)) + '-' + saat + ':' + minuti + ':' + sekunde TimeZoneS = config.timezone.val.value ipos1 = TimeZoneS.find('(GMT') ipos2 = TimeZoneS.find(')') tmp = TimeZoneS[ipos1 + 4:ipos2] if len(tmp) == 0: tmp = '+00' tzpredznak = tmp[:1] tzvalue = str(int(tmp[1:3])) TimeString = TimeString + tzpredznak + tzvalue import os cmd = 'echo "' + str(TimeString) + '" > /proc/settime' os.system(cmd) cmd = 'date -u -s "' + str(RTCString) + '"' os.system(cmd) self.session.openWithCallback(self.callback, MessageBox, _('RTC Update done! \n\nGUI Clock Update done!\n\n' + before + '\n\nntpdate done! ' + res + '\nAfter: Local=' + strftime('%H:%M', localtime()) + ', UTC=' + strftime('%H:%M', gmtime())), type = 1, timeout = 15)
def selectionChanged(self): self['introduction'].setText(_('Your time = UTC Time + Your Time Zone')) self['vreme'].setText(_('*Current Your Time: ' + str(datetime.now().strftime('%H:%M:%S')))) saat = str(config.plugins.TimeSet.UTCTim.value[0]) if len(saat) < 2: saat = '0' + saat minuti = str(config.plugins.TimeSet.UTCTim.value[1]) if len(minuti) < 2: minuti = '0' + minuti sekunde = strftime('%S', localtime()) pp = config.plugins.TimeSet.NDate.value import time TimeString = time.strftime('%Y%m%d', time.gmtime(pp)) + saat + minuti + sekunde TimeZoneS = config.timezone.val.value ipos1 = TimeZoneS.find('(GMT') ipos2 = TimeZoneS.find(')') tmp = TimeZoneS[ipos1 + 4:ipos2] if len(tmp) == 0: tmp = '+00' tzpredznak = tmp[:1] tzvalue = str(int(tmp[1:3])) TimeString = TimeString + tzpredznak + tzvalue self['timez'].setText(_('Time Zone : ' + str(TimeZoneS))) self['poraka'].setText(_('TimeString : ' + str(TimeString))) novovreme = str(int(saat) + int(tzpredznak + tzvalue)) if len(novovreme) < 2: novovreme = '0' + novovreme novovreme = novovreme + ':' + minuti self['vreme'].setText(_('Your Time (After Setting): ' + str(novovreme)))
def format_date(format, date=None, language=None): if format is None: format = 'medium' if date is None: # If time is not specified, try to use $SOURCE_DATE_EPOCH variable # See https://wiki.debian.org/ReproducibleBuilds/TimestampsProposal source_date_epoch = os.getenv('SOURCE_DATE_EPOCH') if source_date_epoch is not None: date = time.gmtime(float(source_date_epoch)) else: date = datetime.now() if '%' not in format: # consider the format as babel's return babel_format_date(date, format, locale=language) else: warnings.warn('ustrftime format support will be dropped at Sphinx-1.5', DeprecationWarning) # consider the format as ustrftime's and try to convert it to babel's result = [] tokens = re.split('(%.)', format) for token in tokens: if token in date_format_mappings: babel_format = date_format_mappings.get(token, '') result.append(babel_format_date(date, babel_format, locale=language)) else: result.append(token) return "".join(result)
def logRunningTime(self): startTime = time.strftime("%a, %d %b %Y %H:%M:%S -0007", self.startTime) currTime = time.gmtime() totalJobs = self.mgr.getJobCount() self.logprint('Spice Server running since ' + startTime + ". Total Jobs:%d" % totalJobs)
def wrapper(*args, **kargs): t1 = time.time() res = func(*args, **kargs) tel = time.time()-t1 timeformated = time.strftime( "%H:%M:%S",time.gmtime(tel)) print '-'*5 + '%s took %0.3f ms' % (func.func_name + str(kargs) + str(args), (tel)*1000.0) + '|' + timeformated + '|'+ '-'*10 return res
def format_date(format, date=None, language=None): if format is None: format = 'medium' if date is None: # If time is not specified, try to use $SOURCE_DATE_EPOCH variable # See https://wiki.debian.org/ReproducibleBuilds/TimestampsProposal source_date_epoch = os.getenv('SOURCE_DATE_EPOCH') if source_date_epoch is not None: date = time.gmtime(float(source_date_epoch)) else: date = datetime.now() if '%' not in format: # consider the format as babel's return babel_format_date(date, format, locale=language) else: warnings.warn('ustrftime format support will be dropped at Sphinx-1.5', DeprecationWarning) # consider the format as ustrftime's and try to convert it to babel's result = [] tokens = re.split('(%.)', format) for token in tokens: if token in date_format_mappings: babel_format = date_format_mappings.get(token, '') result.append( babel_format_date(date, babel_format, locale=language)) else: result.append(token) return "".join(result)
def run(self): t0 = (2013, 3, 26, 23, 0, 0, 0, 1, -1) t0 = calendar.timegm(t0) t1 = time.time() announcement_frequency = 60*60 if t1 - self.last_run < announcement_frequency: return print 't1 - self.last_run:',t1 - self.last_run print 'self.last_run:',self.last_run self.last_run = int(t1- (int(t1) % announcement_frequency)) t = int(t1 - t0) days = t // (3600 * 24) #move it back one day days -= 1 date = time.gmtime(t0 + (days * (3600 * 24))) next_date = time.gmtime(t0 + ((days+1) * (3600 * 24))) response = 'NOTICE: Sefira for \x031,9YESTERDAY\x03, {date}: *{count}* Days of the omer.' \ + ' You can count until sunset on {next_date}.' \ + ' WARNING: THIS IS ALPHA, DOUBLE CHECK THIS YOURSELF (http://goo.gl/hzY2v)' response = response.format(date=time.strftime("%a NIGHT, %d %b %I:%M %p",date), next_date=time.strftime("%a",next_date), count=(days+1)) for bot in self.bot_factory.bots: bot.msg(bot.factory.channel, response)
def deleteDicc(self): lista = [] formato = '%Y-%m-%d %H:%M:%S' for clave in self.Dicc: valor = self.Dicc[clave][1] print(valor) if time.strptime(valor, formato) <= time.gmtime(time.time()): lista.append(clave) for usuario in lista: del self.Dicc[usuario]
def tDistribution(self, tempG): timestamps = {} for item in tempG.edges.data(): if item[2]['t'] in timestamps: timestamps[item[2]['t']] = timestamps[item[2]['t']] + 1 else: timestamps[item[2]['t']] = 1 min = -1 max = -1 x = [] y = [] for k, v in timestamps.items(): if k > max: max = k if min > k or min == -1: min = k x.append(k) y.append(v) print(min, max) print(time.gmtime(min), time.gmtime(max))
def __init__(self): # start logging MM_Common_Logging.MM_Common_Logging_Start('./log/MetaMan_Subprogram_Reactor_String') # set other data self.server_start_time = time.mktime(time.gmtime()) self.users = {} # maps user names to network instances # open the database self.db = database_base.MM_Server_Database() self.db.MM_Server_Database_Open(Config.get('DB Connections', 'PostDBHost').strip(), Config.get('DB Connections', 'PostDBPort').strip(), Config.get('DB Connections', 'PostDBName').strip(), Config.get('DB Connections', 'PostDBUser').strip(), Config.get('DB Connections', 'PostDBPass').strip()) # preload some data from database self.genre_list = self.db.MM_Server_Database_Metadata_Genre_List() logging.info("Ready for connections!")
def delete_old_entries(self): DAY_RANGE = self.delete_interval*24*60*60 min_local_time = time.gmtime(time.time() - DAY_RANGE) for entry in self.raw_feed.entries: try: if entry.published_parsed < min_local_time: entry["deleted"] = True else: entry["deleted"] = False except: print("delete_old_entries.")
def format_date(format, date=None, language=None, warn=None): if format is None: format = 'medium' if date is None: # If time is not specified, try to use $SOURCE_DATE_EPOCH variable # See https://wiki.debian.org/ReproducibleBuilds/TimestampsProposal source_date_epoch = os.getenv('SOURCE_DATE_EPOCH') if source_date_epoch is not None: date = time.gmtime(float(source_date_epoch)) else: date = datetime.now() if re.match('EEE|MMM|dd|DDD|MM|WW|medium|YY', format): # consider the format as babel's warnings.warn('LDML format support will be dropped at Sphinx-1.5', DeprecationWarning) return babel_format_date(date, format, locale=language, warn=warn, formatter=babel.dates.format_datetime) else: # consider the format as ustrftime's and try to convert it to babel's result = [] tokens = re.split('(%.)', format) for token in tokens: if token in date_format_mappings: babel_format = date_format_mappings.get(token, '') # Check if we have to use a different babel formatter then # format_datetime, because we only want to format a date # or a time. if token == '%x': function = babel.dates.format_date elif token == '%X': function = babel.dates.format_time else: function = babel.dates.format_datetime result.append( babel_format_date(date, babel_format, locale=language, formatter=function)) else: result.append(token) return "".join(result)
def senseMotion(): while True: i = GPIO.input(26) if i == 1: print("Motion detected!") print("Taking Photo of License Plate") timestring = time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime()) file_name = 'photo_' + timestring + '.jpg' takePhoto(file_path, file_name) print("Uploading to S3") uploadToS3(file_path, file_name, BUCKET, location) print("Detecting License Plate") text_count = detect_text(file_name, BUCKET) print("Text detected: " + str(text_count)) sleep(1)
def createvid(self): print("Start recording") self.vidw = cv2.VideoWriter( time.strftime(RECORDINGS_PATH + "%Y-%m-%d %H:%M:%S", time.gmtime()) + '-output.avi', cv2.VideoWriter_fourcc(*'XVID'), FRAMERATE - 13, (WIDTH, HEIGHT)) while True: try: b = self.q.pop() self.vidw.write(convertYUV(b)) self.isRecording = True except IndexError: pass if self.killi: self.killi = False break
def main(): # TODO: 1. Define start_time to measure total program runtime by # collecting start time start_time = timeit.timeit() # TODO: 2. Define get_input_args() function to create & retrieve command # line arguments in_arg = get_input_args() # TODO: 3. Define get_pet_labels() function to create pet image labels by # creating a dictionary with key=filename and value=file label to be used # to check the accuracy of the classifier function answers_dic = get_pet_labels() # TODO: 4. Define classify_images() function to create the classifier # labels with the classifier function uisng in_arg.arch, comparing the # labels, and creating a dictionary of results (result_dic) result_dic = classify_images() # TODO: 5. Define adjust_results4_isadog() function to adjust the results # dictionary(result_dic) to determine if classifier correctly classified # images as 'a dog' or 'not a dog'. This demonstrates if the model can # correctly classify dog images as dogs (regardless of breed) adjust_results4_isadog() # TODO: 6. Define calculates_results_stats() function to calculate # results of run and puts statistics in a results statistics # dictionary (results_stats_dic) results_stats_dic = calculates_results_stats() # TODO: 7. Define print_results() function to print summary results, # incorrect classifications of dogs and breeds if requested. print_results() # TODO: 1. Define end_time to measure total program runtime # by collecting end time end_time = timeit.timeit() + 5 # TODO: 1. Define tot_time to computes overall runtime in # seconds & prints it in hh:mm:ss format tot_time = end_time - start_time ftot_time = time.strftime("%H:%M:%S", time.gmtime(end_time - start_time)) print( "\n** Total Elapsed Runtime:", str(int((tot_time / 3600))) + ":" + str(int( (tot_time % 3600) / 60)) + ":" + str(int((tot_time % 3600) % 60)))
def selectMedia(self, count, options, server): printl("", self, "S") #if we have two or more files for the same movie, then present a screen self.options = options self.server = server self.dvdplayback = False if not self.options: response = Singleton().getPlexInstance().getLastResponse() self.session.open(MessageBox, (_("Error:") + "\n%s") % response, MessageBox.TYPE_INFO) else: if count > 1: printl("we have more than one playable part ...", self, "I") indexCount = 0 functionList = [] for items in self.options: printl("item: " + str(items), self, "D") if items[1] is not None: name = items[1].split('/')[-1] else: size = convertSize(int(items[3])) duration = time.strftime('%H:%M:%S', time.gmtime(int(items[4]))) # this is the case when there is no information of the real file name name = items[0] + " (" + items[ 2] + " / " + size + " / " + duration + ")" printl("name " + str(name), self, "D") functionList.append(( name, indexCount, )) indexCount += 1 self.session.openWithCallback(self.setSelectedMedia, ChoiceBox, title=_("Select media to play"), list=functionList) else: self.setSelectedMedia() printl("", self, "C")
def format_date(format, date=None, language=None, warn=None): if format is None: format = 'medium' if date is None: # If time is not specified, try to use $SOURCE_DATE_EPOCH variable # See https://wiki.debian.org/ReproducibleBuilds/TimestampsProposal source_date_epoch = os.getenv('SOURCE_DATE_EPOCH') if source_date_epoch is not None: date = time.gmtime(float(source_date_epoch)) else: date = datetime.now() if re.match('EEE|MMM|dd|DDD|MM|WW|medium|YY', format): # consider the format as babel's warnings.warn('LDML format support will be dropped at Sphinx-1.5', DeprecationWarning) return babel_format_date(date, format, locale=language, warn=warn, formatter=babel.dates.format_datetime) else: # consider the format as ustrftime's and try to convert it to babel's result = [] tokens = re.split('(%.)', format) for token in tokens: if token in date_format_mappings: babel_format = date_format_mappings.get(token, '') # Check if we have to use a different babel formatter then # format_datetime, because we only want to format a date # or a time. if token == '%x': function = babel.dates.format_date elif token == '%X': function = babel.dates.format_time else: function = babel.dates.format_datetime result.append(babel_format_date(date, babel_format, locale=language, formatter=function)) else: result.append(token) return "".join(result)
def make_timeseries_by_day(df, interpolation=None): if interpolation[1] == False: return df time_day = [] year = int(str(df.iloc[0, 0])[0:4]) if df.shape[0] == 1: print('row in') ori_time = str(df.iloc[0, 0]) + "-06-15 12:00" ori_time = datetime.datetime.strptime(ori_time, '%Y-%m-%d %H:%M') df.iloc[0, 0] = ori_time return df elif df.shape[0] == 2: #print('ddddddddddd') #print(df.iloc[0, 0], df.iloc[1, 0]) if df.iloc[0, 0] == df.iloc[1, 0]: ori_time = str(df.iloc[0, 0]) + "-03-15 12:00" ori_time = datetime.datetime.strptime(ori_time, '%Y-%m-%d %H:%M') df.iloc[0, 0] = ori_time ori_time = str(df.iloc[1, 0]) + "-09-15 12:00" ori_time = datetime.datetime.strptime(ori_time, '%Y-%m-%d %H:%M') df.iloc[1, 0] = ori_time return df div_cnt = df[df.columns[0]].value_counts().sort_index().tolist() for i in range(len(div_cnt)): month = calendar.monthrange(year, i + 1) for j in range(div_cnt[i]): time_val = datetime.timedelta(days=(month[1] / div_cnt[i] * j) + 1) time_day.append( '-' + str(time_val.days) + ' ' + str(time.strftime('%H:00:00', time.gmtime(time_val.seconds)))) df.iloc[:, 0] = df.iloc[:, 0] + time_day return df
def backupTable(self,**kwargs): method = sys._getframe().f_code.co_name message = "" flag = True now = time.time() if DEBUG: print('Class {classname} Method {method} Arguments {args}'.format(classname=self.class_name, method=method, args=kwargs)) modifier = time.strftime("%Y-%m-%d-%s", time.gmtime(now)) table_name = kwargs.get('table',None) table_back = table_name + "_" + modifier sql_statement = "CREATE TABLE `{table_back}` as SELECT * from `{table}`;".format(table_back=table_back,table=table_name) if DEBUG: print('{classname}:{method}:from{table} to:{table_back}'. format(classname=self.class_name, method=method, table=table_name, table_back=table_back )) flag, message = self.sqlExecute(sql=sql_statement) return flag, message
def get_static_file( filename, root ): import mimetypes, time root = os.path.abspath(root) + os.sep filename = os.path.abspath(os.path.join(root, filename.strip('/\\'))) headers = {} mimetype, encoding = mimetypes.guess_type(filename) if mimetype: headers['Content-Type'] = mimetype if encoding: headers['Content-Encoding'] = encoding stats = os.stat(filename) headers['Content-Length'] = stats.st_size from core.core import locale_date lm = locale_date("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stats.st_mtime), 'en_US.UTF-8') headers['Last-Modified'] = str(lm) headers['Cache-Control'] = 'max-age=604800' with open(filename, 'rb') as f: content = f.read() f.close() return content, headers
def get_static_file(filename, root): import mimetypes, time root = os.path.abspath(root) + os.sep filename = os.path.abspath(os.path.join(root, filename.strip('/\\'))) headers = {} mimetype, encoding = mimetypes.guess_type(filename) if mimetype: headers['Content-Type'] = mimetype if encoding: headers['Content-Encoding'] = encoding stats = os.stat(filename) headers['Content-Length'] = stats.st_size from core.core import locale_date lm = locale_date("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stats.st_mtime), 'en_US.UTF-8') headers['Last-Modified'] = str(lm) headers['Cache-Control'] = 'max-age=604800' with open(filename, 'rb') as f: content = f.read() f.close() return content, headers
def selectMedia(self, count, options, server ): printl("", self, "S") #if we have two or more files for the same movie, then present a screen self.options = options self.server = server self.dvdplayback=False if not self.options: response = Singleton().getPlexInstance().getLastResponse() self.session.open(MessageBox,(_("Error:") + "\n%s") % response, MessageBox.TYPE_INFO) else: if count > 1: printl("we have more than one playable part ...", self, "I") indexCount=0 functionList = [] for items in self.options: printl("item: " + str(items), self, "D") if items[1] is not None: name=items[1].split('/')[-1] else: size = convertSize(int(items[3])) duration = time.strftime('%H:%M:%S', time.gmtime(int(items[4]))) # this is the case when there is no information of the real file name name = items[0] + " (" + items[2] + " / " + size + " / " + duration + ")" printl("name " + str(name), self, "D") functionList.append((name ,indexCount, )) indexCount+=1 self.session.openWithCallback(self.setSelectedMedia, ChoiceBox, title=_("Select media to play"), list=functionList) else: self.setSelectedMedia() printl("", self, "C")
def single_csv(filename=None,lines=None,owner=None): if lines==None and filename!=None: with open(filename,'r') as f: lines = list(set(f.readlines())) print(len(lines)) csv_dict = { "stars":[] } for i,j in zip(owner,lines): try: print(i,"/",j) data = g.get_repo(i+"/"+j) csv_dict["stars"].append(data.stargazers_count) print(data.stargazers_count) except StopIteration: break # loop end except RateLimitExceededException: search_rate_limit = g.get_rate_limit().search print('search remaining: {}'.format(search_rate_limit.remaining)) reset_timestamp = calendar.timegm(search_rate_limit.reset.timetuple()) # add 10 seconds to be sure the rate limit has been reset sleep_time = reset_timestamp - calendar.timegm(time.gmtime()) + 10 time.sleep(sleep_time) continue except Exception: continue print(csv_dict) df = pd.DataFrame(data=csv_dict) return df
batch_size=config['training']['batch'], epochs=config['training']['epochs'], validation_data=(test_x, test_y), verbose=1, callbacks=cbacks) # verbose=verbose -- he canviat a verbose=1 ############################################ # Store Plots import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt # poso timestamp als plots d'acc i loss import time ts = time.gmtime() timestamp = time.strftime("%Y%m%d_%H%M%S", ts) # Loss plot plt.plot(history.history['loss']) plt.plot(history.history['val_loss']) plt.title('model loss') plt.ylabel('loss') plt.xlabel('epoch') plt.legend(['train', 'test'], loc='upper left') plt.savefig('music_loss_' + timestamp + '.pdf') plt.close() # Results # if args.best:
def create(self,**kwargs): '''PARAMS: ------- overwrite = [True | False] backup = [True | False] If database exists, with overwrite FALSE and backup FALSE, method exists with False If database does not exist it is created If database exists and overwrite without backup, it is deleted If database exists and backup is set, database will be backed up; regardless of overwrite. RETURN ----- FLAG = True if a database is created, False if a database is not created. MESSAGE = database name + actions taken ''' method = sys._getframe().f_code.co_name message = self.database + " " flag = True db_exists = False now = time.time() bak_name = "" modifier = time.strftime("%Y-%m-%d-%s", time.gmtime(now)) overwrite = kwargs.get('overwrite',False) backup = kwargs.get('backup',True) if DEBUG: print('Class {classname} Method {method} Arguments {args}'.format(classname=self.class_name, method=method, args=kwargs)) db_exists = self.name()[0] if DEBUG and db_exists: print('Database {database} exists'.format(database=self.database)) if backup: if DEBUG: print("Database backed up to filename is {}".format(bak_name)) dir_name = os.path.dirname(self.database) base_name = os.path.basename(self.database) file_name, file_ext = base_name.rsplit('.', 1) bak_name = dir_name + "/" + file_name + "_" + modifier + "." + file_ext try: copyfile(self.database, bak_name) message += "- File was backed up" except IOError as e: message += "- Error file was not backed up {error}".format(error=e) return False, message if overwrite and db_exists: try: os.remove(self.database) message += "- database file deleted" except Error as e: message += "- Error file was not deleted {error}".format(error=e) return False, message self.connect() wal = "PRAGMA journal_mode=WAL;" cur = self.db_connect.cursor() cur.execute(wal) results = cur.fetchall() message += "- " + str(sqlite3.version) + ":" + str(results) if db_exists and not overwrite: message += "- Wal mode is enabled" else: message += "- New Database created with WAL mode enabled" self.db_connect.close() return flag, message
import threading import subprocess import datetime # import text_extractor import sys from time import time import json import calendar import time # old_stdout = sys.stdout # log_file = open("message.log","w") # sys.stdout = log_file # self.provided_dir = sys.argv[1].split(str(os.sep))[:-1].join(str(os.sep)) # self.provided_dir = sys.argv[0] # print(self.provided_dir) current_time = str(calendar.timegm(time.gmtime())) table_name = 'video_track_points_local' + current_time import json def dump_file(path): # path = 'MyFile.txt' f = open(path, 'r') flag = False for i in f: if flag is False: temp = i temp = json.loads(temp) flag = True else:
def run(self): """ this function will be called every self.dt_set seconds request data tm_wday 0=Monday tm_yday """ today = date.today() # only start new jobs after change the day changed if self._currend_day != gmtime().tm_yday: self._currend_day = gmtime().tm_yday for job in ScheduledExportTask.objects.filter( active=1): # get all active jobs add_task = False if job.export_period == 1: # daily start_time = '%s %02d:00:00' % ( (today - timedelta(1)).strftime('%d-%b-%Y'), job.day_time) # "%d-%b-%Y %H:%M:%S" start_time = mktime( datetime.strptime(start_time, "%d-%b-%Y %H:%M:%S").timetuple()) filename_suffix = 'daily_export_%d_%s' % (job.pk, job.label) add_task = True elif job.export_period == 2 and time.gmtime( ).tm_yday % 2 == 0: # on even days (2,4,...) start_time = '%s %02d:00:00' % ( (today - timedelta(2)).strftime('%d-%b-%Y'), job.day_time) # "%d-%b-%Y %H:%M:%S" start_time = mktime( datetime.strptime(start_time, "%d-%b-%Y %H:%M:%S").timetuple()) filename_suffix = 'two_day_export_%d_%s' % (job.pk, job.label) add_task = True elif job.export_period == 7 and time.gmtime( ).tm_wday == 0: # on every monday start_time = '%s %02d:00:00' % ( (today - timedelta(7)).strftime('%d-%b-%Y'), job.day_time) # "%d-%b-%Y %H:%M:%S" start_time = mktime( datetime.strptime(start_time, "%d-%b-%Y %H:%M:%S").timetuple()) filename_suffix = 'weekly_export_%d_%s' % (job.pk, job.label) add_task = True elif job.export_period == 14 and time.gmtime( ).tm_yday % 14 == 0: # on every second monday start_time = '%s %02d:00:00' % ( (today - timedelta(14)).strftime('%d-%b-%Y'), job.day_time) # "%d-%b-%Y %H:%M:%S" start_time = mktime( datetime.strptime(start_time, "%d-%b-%Y %H:%M:%S").timetuple()) filename_suffix = 'two_week_export_%d_%s' % (job.pk, job.label) add_task = True elif job.export_period == 30 and time.gmtime( ).tm_yday % 30 == 0: # on every 30 days start_time = '%s %02d:00:00' % ( (today - timedelta(30)).strftime('%d-%b-%Y'), job.day_time) # "%d-%b-%Y %H:%M:%S" start_time = mktime( datetime.strptime(start_time, "%d-%b-%Y %H:%M:%S").timetuple()) filename_suffix = '30_day_export_%d_%s' % (job.pk, job.label) add_task = True if job.day_time == 0: end_time = '%s %02d:59:59' % ( (today - timedelta(1)).strftime('%d-%b-%Y'), 23 ) # "%d-%b-%Y %H:%M:%S" else: end_time = '%s %02d:59:59' % ( today.strftime('%d-%b-%Y'), job.day_time - 1 ) # "%d-%b-%Y %H:%M:%S" end_time = mktime( datetime.strptime(end_time, "%d-%b-%Y %H:%M:%S").timetuple()) # create ExportTask if add_task: if job.mean_value_period == 0: mean_value_period = 5 else: mean_value_period = job.mean_value_period et = ExportTask(\ label = filename_suffix,\ time_max = end_time,\ time_min=start_time,\ filename_suffix = filename_suffix,\ mean_value_period = mean_value_period,\ file_format = job.file_format,\ start = end_time+60\ ) et.save() et.variables.add(*job.variables.all()) ## iter over all Export Tasks wait_time = 1 # wait one second to start the job for job in ExportTask.objects.filter( done=False, busy=False, failed=False, start__lte=time()): # get all jobs log.debug(' started Timer %d' % job.pk) Timer(wait_time, _export_handler, [job, today]).start() job.busy = True job.save() ## delete all done jobs older the 60 days for job in ExportTask.objects.filter(done=True, busy=False, start__gte=time() + 60 * 24 * 60 * 60): job.delete() ## delete all failed jobs older the 60 days for job in ExportTask.objects.filter(failed=True, start__gte=time() + 60 * 24 * 60 * 60): job.delete() return None # because we have no data to store
def date_gmt(value): return time.strftime('%a, %d %b %Y %H:%M:%S GMT',time.gmtime(value))
def getCompilationTS(pe): return time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(pe.FILE_HEADER.TimeDateStamp))
def run(self): """ this function will be called every self.dt_set seconds request data tm_wday 0=Monday tm_yday """ today = date.today() # only start new jobs after change the day changed if self._currend_day != gmtime().tm_yday: self._currend_day = gmtime().tm_yday for job in ScheduledExportTask.objects.filter( active=1): # get all active jobs add_task = False if job.export_period == 1: # daily start_time = '%s %02d:00:00' % ( (today - timedelta(1)).strftime('%d-%b-%Y'), job.day_time) # "%d-%b-%Y %H:%M:%S" start_time = mktime( datetime.strptime(start_time, "%d-%b-%Y %H:%M:%S").timetuple()) filename_suffix = 'daily_export_%d_%s' % (job.pk, job.label) add_task = True elif job.export_period == 2 and time.gmtime( ).tm_yday % 2 == 0: # on even days (2,4,...) start_time = '%s %02d:00:00' % ( (today - timedelta(2)).strftime('%d-%b-%Y'), job.day_time) # "%d-%b-%Y %H:%M:%S" start_time = mktime( datetime.strptime(start_time, "%d-%b-%Y %H:%M:%S").timetuple()) filename_suffix = 'two_day_export_%d_%s' % (job.pk, job.label) add_task = True elif job.export_period == 7 and time.gmtime( ).tm_wday == 0: # on every monday start_time = '%s %02d:00:00' % ( (today - timedelta(7)).strftime('%d-%b-%Y'), job.day_time) # "%d-%b-%Y %H:%M:%S" start_time = mktime( datetime.strptime(start_time, "%d-%b-%Y %H:%M:%S").timetuple()) filename_suffix = 'weekly_export_%d_%s' % (job.pk, job.label) add_task = True elif job.export_period == 14 and time.gmtime( ).tm_yday % 14 == 0: # on every second monday start_time = '%s %02d:00:00' % ( (today - timedelta(14)).strftime('%d-%b-%Y'), job.day_time) # "%d-%b-%Y %H:%M:%S" start_time = mktime( datetime.strptime(start_time, "%d-%b-%Y %H:%M:%S").timetuple()) filename_suffix = 'two_week_export_%d_%s' % (job.pk, job.label) add_task = True elif job.export_period == 30 and time.gmtime( ).tm_yday % 30 == 0: # on every 30 days start_time = '%s %02d:00:00' % ( (today - timedelta(30)).strftime('%d-%b-%Y'), job.day_time) # "%d-%b-%Y %H:%M:%S" start_time = mktime( datetime.strptime(start_time, "%d-%b-%Y %H:%M:%S").timetuple()) filename_suffix = '30_day_export_%d_%s' % (job.pk, job.label) add_task = True if job.day_time == 0: end_time = '%s %02d:59:59' % ( (today - timedelta(1)).strftime('%d-%b-%Y'), 23 ) # "%d-%b-%Y %H:%M:%S" else: end_time = '%s %02d:59:59' % ( today.strftime('%d-%b-%Y'), job.day_time - 1 ) # "%d-%b-%Y %H:%M:%S" end_time = mktime( datetime.strptime(end_time, "%d-%b-%Y %H:%M:%S").timetuple()) # create ExportTask if add_task: et = ExportTask(\ label = filename_suffix,\ datetime_max = datetime.fromtimestamp(end_time,UTC),\ datetime_min = datetime.fromtimestamp(start_time,UTC),\ filename_suffix = filename_suffix,\ mean_value_period = job.mean_value_period,\ file_format = job.file_format,\ datetime_start = datetime.fromtimestamp(end_time+60,UTC)\ ) et.save() et.variables.add(*job.variables.all()) ## check runnging tasks and start the next Export Task running_jobs = ExportTask.objects.filter(busy=True, failed=False) if running_jobs: for job in running_jobs: if time() - job.start() < 30: # only check Task wenn it is running longer then 30s continue if job.backgroundtask is None: # if the job has no backgroundtask assosiated mark as failed job.failed = True job.save() continue if time() - job.backgroundtask.timestamp < 60: # if the backgroundtask has been updated in the past 60s wait continue if job.backgroundtask.pid == 0: # if the job has no valid pid mark as failed job.failed = True job.save() continue # check if process is alive try: os.kill(job.backgroundtask.pid, 0) except OSError: job.failed = True job.save() continue if time() - job.backgroundtask.timestamp > 60 * 20: # if there is not update in the last 20 minutes terminate # the process and mark as failed os.kill(job.backgroundtask.pid, 15) job.failed = True job.save() continue else: # start the next Export Task wait_time = 1 # wait one second to start the job job = ExportTask.objects.filter(\ done=False,\ busy=False,\ failed=False,\ datetime_start__lte=datetime.now(UTC)).first() # get all jobs if job: log.debug(' started Timer %d' % job.pk) Timer(wait_time, _export_handler, [job, today]).start() if job.datetime_start == None: job.datetime_start = datetime.now(UTC) job.busy = True job.save() ## delete all done jobs older the 60 days for job in ExportTask.objects.filter( done=True, busy=False, datetime_start__gte=datetime.fromtimestamp( time() + 60 * 24 * 60 * 60, UTC)): job.delete() ## delete all failed jobs older the 60 days for job in ExportTask.objects.filter( failed=True, datetime_start__gte=datetime.fromtimestamp( time() + 60 * 24 * 60 * 60, UTC)): job.delete() return None # because we have no data to store