def create_statement(self, cr, uid, line_invoice, partner, amount, journal, date_bank=None, account_id=None): bank_stmt_id = self.acc_bank_stmt_model.create( cr, uid, {"journal_id": journal, "date": date_bank or time.strftime("%Y") + "-07-01"} ) bank_stmt_line_id = self.acc_bank_stmt_line_model.create( cr, uid, { "name": "payment", "statement_id": bank_stmt_id, "partner_id": partner, "amount": amount, "date": date_bank or time.strftime("%Y") + "-07-01", }, ) val = { "credit": amount > 0 and amount or 0, "debit": amount < 0 and amount * -1 or 0, "name": line_invoice and line_invoice.name or "cash flow", } if line_invoice: val.update({"counterpart_move_line_id": line_invoice.id}) if account_id: val.update({"account_id": account_id}) self.acc_bank_stmt_line_model.process_reconciliation(cr, uid, bank_stmt_line_id, [val]) move_line_ids_complete = self.acc_bank_stmt_model.browse(cr, uid, bank_stmt_id).move_line_ids return move_line_ids_complete
def recommender( recom_count = 25, test_times = 100, hotNode_degree = 60, year_sta = 2011): ''' 进行推荐实验,计算结果存储在txt文件中 @edge_del 随机删掉的边数 @recom_count 推荐列表大小 @test_times 实验次数 @hotNode_degree 定义热点最小邻居数 ''' file_input = open('/home/zhenchentl/out.txt','w+') file_input_re = open('/home/zhenchentl/out_re.txt','w+') file_input.write('recom_count:' + str(recom_count) + '\n') file_input.write('test_times:' + str(test_times) + '\n') file_input.write('hotNode_degree:' + str(hotNode_degree) + '\n') file_input.write('befor get graph time:' + time.strftime('%Y-%m-%d-%H-%M-%S', \ time.localtime(time.time())) + '\n') print 'befor get graph time:' + time.strftime('%Y-%m-%d-%H-%M-%S', \ time.localtime(time.time())) '''get the graph based on the coauhtor relationship''' mD = DigraphByYear() mDigraph = mD.getDigraph() getGraphAttr(mDigraph, file_input) file_input.write('after get graph time:' + time.strftime('%Y-%m-%d-%H-%M-%S', \ time.localtime(time.time())) + '\n') print 'after get graph time:' + time.strftime('%Y-%m-%d-%H-%M-%S', \ time.localtime(time.time())) recom_count = 5 while(recom_count <= 100): exp_recom(mDigraph, file_input,file_input_re,recom_count) recom_count += 5 file_input.close() file_input_re.close()
def saveVerbrauchsData(v_wp,v_sz,zs_wp,zs_sz,interval): y = time.strftime('%Y', time.localtime()) m = time.strftime('%m', time.localtime()) d = time.strftime('%d', time.localtime()) f = open("/var/lib/heatpumpMonitor/verbrauch.%s-%s-%s" %(y,m,d) , 'a') f.write("%s %04d %04d %d %d %d\n" % (time.strftime('%Y %m %d %a %H %H:%M:%S', time.localtime()), v_wp, v_sz, zs_wp, zs_sz, interval)) f.close
def delete(self, thema, id, beitragID=None): discussionpath = "./data/themen/" + thema + "/" + id + ".json" with open(discussionpath, "r") as discussionfile: discussion = json.load(discussionfile) if beitragID == None: if discussion["Status"] == "deleted": discussion["Status"] = " " else: discussion["Status"] = "deleted" discussion["Bearbeiter"] = cherrypy.session["Benutzername"] discussion["Bearbeitet"] = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) else: for post in discussion["Beitraege"]: if post["ID"] == beitragID: if post["Status"] == "deleted": post["Status"] = " " else: post["Status"] = "deleted" post["Bearbeiter"] = cherrypy.session["Benutzername"] post["Bearbeitet"] = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) with open(discussionpath, "w") as discussionfile: json.dump(discussion, discussionfile, indent=4)
def run_once(self, test_name): if test_name == 'setup': return # # We need to be sure we run this on the right target machines # as this is really quite destructive! # if not os.uname()[1] in self.valid_clients: return date_start = time.strftime("%Y-%m-%d") time_start = time.strftime("%H%M") output = '' # # Test 3 different I/O schedulers: # for iosched in ['cfq', 'deadline', 'noop']: # # Test 5 different file systems, across 20+ tests.. # os.chdir(self.fio_tests_dir) cmd = './test.sh' cmd += ' -d ' + self.dev + '1 -m 8G -S -s ' + iosched + ' -f ext2,ext3,ext4,xfs,btrfs' cmd += ' -D ' + date_start + ' -T ' + time_start output += utils.system_output(cmd, retain_output=True) # # Move the results from the src tree into the autotest results tree where it will automatically # get picked up and copied over to the jenkins server. # os.rename(os.path.join(self.srcdir, 'fs-test-proto'), os.path.join(self.resultsdir, 'fs-test-proto'))
def createTestWorkspace(self): """ Create a workspace for testing against with ideal log values """ from mantid.simpleapi import CreateWorkspace from mantid.simpleapi import AddSampleLog from time import gmtime, strftime,mktime import numpy as np # Create a matrix workspace x = np.array([1.,2.,3.,4.]) y = np.array([1.,2.,3.]) e = np.sqrt(np.array([1.,2.,3.])) wksp = CreateWorkspace(DataX=x, DataY=y,DataE=e,NSpec=1,UnitX='TOF') # Add run_start tmptime = strftime("%Y-%m-%d %H:%M:%S", gmtime(mktime(gmtime()))) AddSampleLog(Workspace=wksp,LogName='run_start',LogText=str(tmptime)) tsp_a=kernel.FloatTimeSeriesProperty("SensorA") tsp_b=kernel.FloatTimeSeriesProperty("SensorB") tsp_c=kernel.FloatTimeSeriesProperty("SensorC") for i in arange(25): tmptime = strftime("%Y-%m-%d %H:%M:%S", gmtime(mktime(gmtime())+i)) tsp_a.addValue(tmptime, 1.0*i*i) tsp_b.addValue(tmptime, 2.0*i*i) tsp_c.addValue(tmptime, 3.0*i*i) wksp.mutableRun()['SensorA']=tsp_a wksp.mutableRun()['SensorB']=tsp_b wksp.mutableRun()['SensorC']=tsp_c return wksp
def cmd_list(self, args): """ @G%(name)[email protected] - @B%(cmdname)[email protected] list timers and the plugins they are defined in @[email protected]: list """ tmsg = [] match = args['match'] tmsg.append('Local time is: %s' % time.strftime('%a %b %d %Y %H:%M:%S', time.localtime())) tmsg.append('%-20s : %-13s %-9s %-8s %s' % ('Name', 'Defined in', 'Enabled', 'Fired', 'Next Fire')) for i in self.timerlookup: if not match or match in i: timerc = self.timerlookup[i] tmsg.append('%-20s : %-13s %-9s %-8s %s' % ( timerc.name, timerc.plugin.sname, timerc.enabled, timerc.timesfired, time.strftime('%a %b %d %Y %H:%M:%S', time.localtime(timerc.nextcall)))) return True, tmsg
def handle(self, data, fulltext, tokens, slackclient, channel, user): slackclient.post_message(channel, 'UTC: `' + time.strftime('%Y/%m/%d-%H:%M:%S', time.gmtime(self._epoch)) + '`') if self._additional_location and 'modules.google_tz_handler' in sys.modules: handler_module = sys.modules['modules.google_tz_handler'] handler_class = getattr(handler_module, 'google_tz_handler') handler_instance = handler_class(self._config) slackclient.post_message(channel, self._additional_location + ': `' + time.strftime('%Y/%m/%d-%H:%M:%S', time.gmtime(handler_instance.get_raw_local_time(handler_instance.get_cities(self._additional_location.replace(' ', '+'))[0], self._epoch))) + '`')
def do_export(_): left_idx = g_pool.seek_control.trim_left right_idx = g_pool.seek_control.trim_right export_range = left_idx, right_idx + 1 # exclusive range.stop export_ts_window = pm.exact_window(g_pool.timestamps, (left_idx, right_idx)) export_dir = os.path.join(g_pool.rec_dir, "exports") export_dir = next_export_sub_dir(export_dir) os.makedirs(export_dir) logger.info('Created export dir at "{}"'.format(export_dir)) export_info = { "Player Software Version": str(g_pool.version), "Data Format Version": meta_info["Data Format Version"], "Export Date": strftime("%d.%m.%Y", localtime()), "Export Time": strftime("%H:%M:%S", localtime()), "Frame Index Range:": g_pool.seek_control.get_frame_index_trim_range_string(), "Relative Time Range": g_pool.seek_control.get_rel_time_trim_range_string(), "Absolute Time Range": g_pool.seek_control.get_abs_time_trim_range_string(), } with open(os.path.join(export_dir, "export_info.csv"), "w") as csv: write_key_value_file(csv, export_info) notification = { "subject": "should_export", "range": export_range, "ts_window": export_ts_window, "export_dir": export_dir, } g_pool.ipc_pub.notify(notification)
def exec_cmd_servers(username): print '\nInput the \033[32mHost IP(s)\033[0m,Separated by Commas, q/Q to Quit.\n' while True: hosts = raw_input('\033[1;32mip(s)>: \033[0m') if hosts in ['q', 'Q']: break hosts = hosts.split(',') hosts.append('') hosts = list(set(hosts)) hosts.remove('') ip_all, ip_all_dict = ip_all_select(username) no_perm = set(hosts)-set(ip_all) if no_perm: print "You have NO PERMISSION on %s..." % list(no_perm) continue print '\nInput the \033[32mCommand\033[0m , The command will be Execute on servers, q/Q to quit.\n' while True: cmd = raw_input('\033[1;32mCmd(s): \033[0m') if cmd in ['q', 'Q']: break exec_log_dir = os.path.join(log_dir, 'exec_cmds') if not os.path.isdir(exec_log_dir): os.mkdir(exec_log_dir) os.chmod(exec_log_dir, 0777) filename = "%s/%s.log" % (exec_log_dir, time.strftime('%Y%m%d')) f = open(filename, 'a') f.write("DateTime: %s User: %s Host: %s Cmds: %s\n" % (time.strftime('%Y/%m/%d %H:%M:%S'), username, hosts, cmd)) for host in hosts: remote_exec_cmd(host, username, cmd)
def strftime(dt, fmt): if dt.year >= 1900: return super(type(dt), dt).strftime(fmt) illegal_formatting = _illegal_formatting.search(fmt) if illegal_formatting: msg = 'strftime of dates before 1900 does not handle {0}' raise TypeError(msg.format(illegal_formatting.group(0))) year = dt.year # for every non-leap year century, advance by # 6 years to get into the 28-year repeat cycle delta = 2000 - year off = 6 * (delta // 100 + delta // 400) year += off # move to around the year 2000 year += ((2000 - year) // 28) * 28 timetuple = dt.timetuple() s1 = time.strftime(fmt, (year,) + timetuple[1:]) sites1 = _findall(s1, str(year)) s2 = time.strftime(fmt, (year + 28,) + timetuple[1:]) sites2 = _findall(s2, str(year + 28)) sites = [] for site in sites1: if site in sites2: sites.append(site) s = s1 syear = "%04d" % (dt.year,) for site in sites: s = s[:site] + syear + s[site + 4:] return s
def banIP(IP, dport, service, timer = BANTIMER): """Returns 1 if IP is already BANNED/UNBANNED Returns 0 if BANNED/UNBANNED successfully """ print 'banIP:' if (IP, service) in bannedIPs: print 'IP:' + IP + 'is already BANNED' logging.info('IP:' + IP + 'is already BANNED') return 1 else: ip = bannedIP(IP, time.time(), service, timer) chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), "INPUT") ip.rule = iptc.Rule(chain=chain) ip.rule.src = ip.IP + "/255.255.255.255" ip.rule.protocol = "tcp" ip.rule.target = iptc.Target(ip.rule, "REJECT") ip.rule.target.reject_with = "icmp-admin-prohibited" match = iptc.Match(ip.rule,"tcp") match.dport = dport ip.rule.add_match(match) bannedIPs[(ip.IP, service)] = ip chain.insert_rule(ip.rule) print 'IP:' + ip.IP + ' BANNED at ' + time.strftime("%b %d %H:%M:%S") logging.info('IP:' + ip.IP + ' BANNED at ' + time.strftime("%b %d %H:%M:%S")) resp = {"action": BANNEDIP, "data":{"IP":ip.IP, "time":time.strftime("%b %d %H:%M:%S", time.localtime(ip.time)), "timer":ip.timer, "service":ip.service}} server.send_message_to_all(json.dumps(resp))
def set_filter_date(self): dialog = xbmcgui.Dialog() if self.start_date == '': self.start_date = str(datetime.datetime.now())[:10] if self.end_date == '': self.end_date = str(datetime.datetime.now())[:10] try: d = dialog.numeric(1, common.getstring(30117) ,strftime("%d/%m/%Y",strptime(self.start_date,"%Y-%m-%d")) ) if d != '': self.start_date = strftime("%Y-%m-%d",strptime(d.replace(" ","0"),"%d/%m/%Y")) else: self.start_date ='' common.log('', str(self.start_date)) d = dialog.numeric(1, common.getstring(30118) ,strftime("%d/%m/%Y",strptime(self.end_date,"%Y-%m-%d")) ) if d != '': self.end_date = strftime("%Y-%m-%d",strptime(d.replace(" ","0"),"%d/%m/%Y")) else: self.end_date ='' common.log('', str(self.end_date)) except: pass if self.start_date != '' or self.end_date != '': self.getControl( BUTTON_DATE ).setLabel( self.start_date + ' ... ' + self.end_date ) else: self.getControl( BUTTON_DATE ).setLabel( common.getstring(30164) ) self.getControl( BUTTON_DATE ).setVisible(False) self.getControl( BUTTON_DATE ).setVisible(True)
def lastlogExit(self): starttime = time.strftime("%a %b %d %H:%M", time.localtime(self.logintime)) endtime = time.strftime("%H:%M", time.localtime(time.time())) duration = utils.durationHuman(time.time() - self.logintime) f = file("%s/lastlog.txt" % self.env.cfg.get("honeypot", "data_path"), "a") f.write("root\tpts/0\t%s\t%s - %s (%s)\n" % (self.clientIP, starttime, endtime, duration)) f.close()
def _format_data(self, start_time, timestamp, name, units, values): fields = _fields[:] file_timestamp = time.strftime('%Y%m%d%H%M',time.gmtime(start_time)) value_timestamp = time.strftime('%Y%m%d%H%M',time.gmtime(timestamp)) fields[_field_index['units']] = units fields[_field_index['commodity']] = self.commodity meter_id = name + '|1' if units: meter_id += '/%s' % units fields[_field_index['meter_id']] = meter_id fields[_field_index['receiver_id']] = '' fields[_field_index['receiver_customer_id']] = self.customer_name + '|' + self.account_name fields[_field_index['timestamp']] = file_timestamp # interval put into "MMDDHHMM" with MMDD = 0000 fields[_field_index['interval']] = '0000%02d%02d' % (self.period / 3600, (self.period % 3600) / 60) fields[_field_index['count']] = str(len(values)) value_sets = [] for value in values: try: value = '%f' % value protocol_text = '' except ValueError: value = '' protocol_text = 'N' value_set = (value_timestamp, protocol_text, value) value_sets.append(string.join(value_set, ',')) value_timestamp = '' fields[_field_index['interval_data']] = string.join(value_sets, ',') return string.join(fields, ',')
def watchRunFolder(run,sleep): """ Args: run -> A folder that contains an RTAcomplete.txt Method: The file will be polled every hour. If the first line is not the same as the last time it checked it will kick out and run the rest of the BCL pipeline """ RTAcomplete = run +"/RTAComplete.txt" iteration = 0 while True: if not os.path.isfile(RTAcomplete): print("Real Time Analysis has not begun yet. Time: %s" % time.strftime("%m-%d-%y %H:%M:%S",time.localtime())) else: with open(RTAcomplete,"r") as input_file: first_line = input_file.readline().strip() if not first_line: print("Real Time Analysis in process. Time %s" % time.strftime("%m-%d-%y %H:%M:%S",time.localtime())) else: print("Checked file at %s and the RTAComplete.txt shows that RTA has finished" % time.strftime("%m-%d-%y %H:%M:%S",time.localtime())) print("Moving on to Bcl Analysis") break time.sleep(sleep)
def FullTextQuery(calendar_service): print 'Full text query for events on Primary Calendar: \'%s\'' % (q) query = GServ.CalendarEventQuery(calendar, 'private', 'full', q) query.start_min = date # calling date to set the beginning of query range for the present day query.start_max = endDate # calling endDate to limit the query range to the next 14 days. change tmedelta(days) to set the range query.singleevents = 'true' # enables creation of repeating events query.orderBy = 'startTime' # sort by event start time query.sortorder = 'a' # sort order: ascending feed = calendar_service.CalendarQuery(query) for i, an_event in enumerate(feed.entry): for a_when in an_event.when: print " " print an_event.title.text ,"Scheduled:",i,"For:",time.strftime('%d-%m-%Y %H:%M',time.localtime(tf_from_timestamp(a_when.start_time))),"Current Time:",time.strftime('%d-%m-%Y %H:%M') if time.strftime('%d-%m-%Y %H:%M',time.localtime(tf_from_timestamp(a_when.start_time))) == time.strftime('%d-%m-%Y %H:%M'): print "Waking you up!" print "---" songfile = random.choice(os.listdir(mp3_path)) # choosing by random an .mp3 file from direcotry print "Now Playing:", songfile # plays the MP3 in it's entierty. As long as the file is longer # than a minute it will only be played once: command ="mpg321" + " " + mp3_path + "'"+songfile+"'"+ " -g 100" print command os.system(command) # plays the song else: print "Wait for it..." # the event's start time is not the system's current time
def assignmentsHTML(): html = "" bytes = 0 for i in assignments: fdata = os.stat(i[1]) # Get last modified date, and format to DOS format mdate = time.strftime("%m-%d-%y", time.localtime(fdata.st_mtime)) # Get last modified time, and format to DOS format mtime = time.strftime("%I", time.localtime(fdata.st_mtime)).strip("0") + \ time.strftime(":%M", time.localtime(fdata.st_mtime)) + \ time.strftime("%p", time.localtime(fdata.st_mtime)).lower()[0] # Get file size, and format to DOS format fsize = '{:,}'.format(fdata.st_size) elem = '{}{:>13}{:>9}{:>8}'.format(a('{:<21}'.format(i[0]), i[1]), fsize, mdate, mtime) html = html + elem + "\n" bytes = bytes + os.path.getsize(i[1]) files = len(assignments) free = 8589869056 - bytes html = html + '{:>18} file(s){:>14,} bytes\n'.format(files, bytes) html = html + '{:>40} bytes free\n'.format('{:,}'.format(free)) return html
def _fix_review_dates(self, item): ''' Convert dates so ES detect them ''' for date_field in ['timestamp','createdOn','lastUpdated']: if date_field in item.keys(): date_ts = item[date_field] item[date_field] = time.strftime('%Y-%m-%dT%H:%M:%S', time.localtime(date_ts)) if 'patchSets' in item.keys(): for patch in item['patchSets']: pdate_ts = patch['createdOn'] patch['createdOn'] = time.strftime('%Y-%m-%dT%H:%M:%S', time.localtime(pdate_ts)) if 'approvals' in patch: for approval in patch['approvals']: adate_ts = approval['grantedOn'] approval['grantedOn'] = \ time.strftime('%Y-%m-%dT%H:%M:%S', time.localtime(adate_ts)) if 'comments' in item.keys(): for comment in item['comments']: cdate_ts = comment['timestamp'] comment['timestamp'] = time.strftime('%Y-%m-%dT%H:%M:%S', time.localtime(cdate_ts))
def _createSearchRequest(self, search=None, tags=None, notebooks=None, date=None, exact_entry=None, content_search=None): request = "" if notebooks: for notebook in tools.strip(notebooks.split(',')): if notebook.startswith('-'): request += '-notebook:"%s" ' % tools.strip(notebook[1:]) else: request += 'notebook:"%s" ' % tools.strip(notebook) if tags: for tag in tools.strip(tags.split(',')): if tag.startswith('-'): request += '-tag:"%s" ' % tag[1:] else: request += 'tag:"%s" ' % tag if date: date = tools.strip(date.split('-')) try: dateStruct = time.strptime(date[0] + " 00:00:00", "%d.%m.%Y %H:%M:%S") request += 'created:%s ' % time.strftime("%Y%m%d", time.localtime(time.mktime(dateStruct))) if len(date) == 2: dateStruct = time.strptime(date[1] + " 00:00:00", "%d.%m.%Y %H:%M:%S") request += '-created:%s ' % time.strftime("%Y%m%d", time.localtime(time.mktime(dateStruct) + 60 * 60 * 24)) except ValueError, e: out.failureMessage('Incorrect date format in --date attribute. ' 'Format: %s' % time.strftime("%d.%m.%Y", time.strptime('19991231', "%Y%m%d"))) return tools.exitErr()
def add_automatic_comment(self): if self.fixed is True: text = ( "This %s has been scheduled for fixed downtime from %s to %s. " "Notifications for the %s will not be sent out during that time period." % ( self.ref.my_type, time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(self.start_time)), time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(self.end_time)), self.ref.my_type) ) else: hours, remainder = divmod(self.duration, 3600) minutes, seconds = divmod(remainder, 60) text = ("This %s has been scheduled for flexible downtime starting between %s and %s " "and lasting for a period of %d hours and %d minutes. " "Notifications for the %s will not be sent out during that time period." % ( self.ref.my_type, time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(self.start_time)), time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(self.end_time)), hours, minutes, self.ref.my_type) ) if self.ref.my_type == 'host': comment_type = 1 else: comment_type = 2 c = Comment(self.ref, False, "(Nagios Process)", text, comment_type, 2, 0, False, 0) self.comment_id = c.id self.extra_comment = c self.ref.add_comment(c)
def createSatellitesXMLfile(self, tp_list, save_xml_dir) : pos = self.orb_position if pos > 1800 : pos -= 3600 if pos < 0 : pos_name = '%dW' % (abs(int(pos))/10) else : pos_name = '%dE' % (abs(int(pos))/10) location = '%s/dmm_blindscan_%s_%s.xml' %(save_xml_dir, pos_name, strftime("%d-%m-%Y_%H-%M-%S")) tuner = nimmanager.nim_slots[self.feid].friendly_full_description xml = ['<?xml version="1.0" encoding="iso-8859-1"?>\n\n'] xml.append('<!--\n') xml.append(' File created on %s\n' % (strftime("%A, %d of %B %Y, %H:%M:%S"))) try: xml.append(' using %s receiver running Enigma2 image, version %s,\n' % (boxtype, about.getEnigmaVersionString())) xml.append(' image %s, with the Blind scan plugin\n\n' % (about.getImageTypeString())) except: xml.append(' using %s receiver running Enigma2 image (%s), with the Dreambox blind scan plugin\n\n' % (boxtype, tuner)) xml.append('-->\n\n') xml.append('<satellites>\n') xml.append(' <sat name="%s" flags="0" position="%s">\n' % (self.sat_name.replace('&', '&'), self.orb_position)) for tp in tp_list: xml.append(' <transponder frequency="%d" symbol_rate="%d" polarization="%d" fec_inner="%d" system="%d" modulation="%d"/>\n' % (tp.frequency, tp.symbol_rate, tp.polarisation, tp.fec, tp.system, tp.modulation)) xml.append(' </sat>\n') xml.append('</satellites>') f = open(location, "w") f.writelines(xml) f.close() return location
def generateur(presence) : # a si le ficher et present, w sinon global cible global nom global type pasdefichier = open(cible + ".txt", presence) # Ouvre le fichier fdebug("Ouvert / cree le fichier") continueraecrire = "c" while continueraecrire is not "q" : if type is "a" : action = raw_input ( VERT + "Entre l'action a inscrire dans le registre >>>" + NORMAL ) while action == "" : print (ROUGE + "Entre quelque chose" + NORMAL ) action = raw_input ( VERT + "Entre l'action a inscrire dans le registre >>>" + NORMAL ) pasdefichier.write(time.strftime('%d/%m/%y %H:%M',time.localtime()) + " *** " + action + "(" + nom + ")"+ "\n") else : commantaire = raw_input( VERT + "Entre le commentaire a inscrire dans le casier >>>" + NORMAL ) while commantaire == "" : print (ROUGE + "Entre quelque chose" + NORMAL ) commantaire = raw_input( VERT + "Entre le commentaire a inscrire dans le casier >>>" + NORMAL ) pasdefichier.write(time.strftime('%d/%m/%y %H:%M',time.localtime()) + " >>> " + commantaire + " (" + nom + ")"+ "\n") fdebug("Enregistrement dans le fichier") warn = raw_input(VERT + "Avez vous prevenu " + cible + " pour la faute ? (o/n) " + NORMAL) if "o" in warn : pasdefichier.write(time.strftime('%d/%m/%y %H:%M',time.localtime()) + " *** " + "warn " + "(" + nom + ")"+ "\n") continueraecrire = raw_input(VERT + "Continuer a écrire sur la meme personne ?(q pour quitter , c pour continuer)" + NORMAL) pasdefichier.close() # Je ferme la porte derriere mon fichier
def on_data(self, data): if time.time() >= self.started + self.duration: stats = open('{0}-sample.stats'.format(int(self.started)), 'w+') stats.write("================= STATISTICS =================" + "\n") stats.write("Start time: " + time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self.started)) + "\n") stats.write("End time: " + time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) + "\n") stats.write("First Tweet ID: " + self.first_tweet_id + "\n") stats.write("Last Tweet ID: " + self.last_tweet_id + "\n") stats.write("Language: " + self.lang + "\n") stats.write("Language classification threshold: " + str(self.lang_threshold) + "\n") stats.write("Above threshold: " + str(self.counter[self.lang + '-above']) + "\n") stats.write("Below threshold: " + str(self.counter[self.lang + '-below']) + "\n") stats.write("Exluded: " + str(self.counter['excluded']) + "\n") return False elif 'in_reply_to_status_id' in data: status = Status.parse(self.api, json.loads(data)) langclass = langid.classify(status.text) if (self.counter == {self.lang + '-above':0, self.lang + '-below':0, 'excluded':0}): self.first_tweet_id = str(status.id) self.last_tweet_id = str(status.id) if (langclass[0] == self.lang): if langclass[1] >= self.lang_threshold: self.above_output.write(data) self.counter[self.lang + '-above'] += 1 else: self.below_output.write(data) self.counter[self.lang + '-below'] += 1 else: self.excl_output.write(data) self.counter['excluded'] += 1 return True
def main(argv): try: postTitle = argv[1] postCategory = argv[2] except: postTitle = "DEFAULT TITLE" postCategory = "DEFAULT CATEGORY" todayDate = time.strftime('%Y-%m-%d',time.localtime(time.time())) currentTime = time.strftime('%H:%M',time.localtime(time.time())) fileNameWithoutDate = postTitle.lower().replace(' ', '-') fileName = todayDate + "-" + fileNameWithoutDate + ".markdown" # fileFullName = os.path.join(POST_PATH, fileName) with open(fileName, 'w+') as fin: fin.write("---\n") fin.write("layout: post\n") fin.write('title: "%s"\n' % postTitle) fin.write('date: %s %s\n' %(todayDate, currentTime)) fin.write("comments: true\n") fin.write('categories: %s\n' % postCategory.capitalize()) fin.write("---\n\n\n\n") fin.write("<!--more-->\n\n\n") fin.close() print('"%s" was created successfully.' % fileName)
def make_payment(self, invoice_record, bank_journal, amount=0.0, amount_currency=0.0, currency_id=None): bank_stmt = self.acc_bank_stmt_model.create({ 'journal_id': bank_journal.id, 'date': time.strftime('%Y') + '-07-15', }) bank_stmt_line = self.acc_bank_stmt_line_model.create({'name': 'payment', 'statement_id': bank_stmt.id, 'partner_id': self.partner_agrolait_id, 'amount': amount, 'amount_currency': amount_currency, 'currency_id': currency_id, 'date': time.strftime('%Y') + '-07-15',}) #reconcile the payment with the invoice for l in invoice_record.move_id.line_ids: if l.account_id.id == self.account_rcv.id: line_id = l break amount_in_widget = currency_id and amount_currency or amount bank_stmt_line.process_reconciliation(counterpart_aml_dicts=[{ 'move_line': line_id, 'debit': amount_in_widget < 0 and -amount_in_widget or 0.0, 'credit': amount_in_widget > 0 and amount_in_widget or 0.0, 'name': line_id.name, }]) return bank_stmt
def wav_file_gen(encoding_type, ir_code, frequency, signal_strength, btn_name, brand): # Name today = datetime.date.today() today_name = (str2md5(str2md5(str(today))))[0:10] wav_src = btn_name + time.strftime('%Y%m%d', time.localtime(time.time())) wav_name = (str2md5(wav_src))[0:10] brand_src = brand + time.strftime('%m%d', time.localtime(time.time())) brand_name = (str2md5(brand_src))[0:10] # Path path_brand = brand_name + "/" path_header = "/var/www/weixin/wechat/static/media/" path_today = path_header + today_name + "/" # File raw_data = path_today + path_brand + wav_name pcm_file = raw_data + ".pcm" wav_file = raw_data + ".wav" relative_wav_file = "media/" + today_name + "/" + path_brand + wav_name + ".wav" # Delete Older Path for day in range(1, 6): date_src = str(today - datetime.timedelta(days=day))
def main(): # zprava zprava = matrix([[20],[17],[2],[5],[6]]) # klic klic = matrix([[18, 0,19,12,23], [22,30,32,19,10], [19,17, 2,32,32], [11,24,20,22, 5], [30, 0,19,26,22]]) print "Brutal force started in",strftime("%H:%M:%S", gmtime()) for a in range(26): print "" print a, for b in range(26): print ".", for c in range(26): for d in range(26): for e in range(26): matice = matrix([[a],[b],[c],[d],[e]]) nasobek = klic * matice if ( (nasobek[0]%33==28) & (nasobek[1]%33==9) & (nasobek[2]%33==8) & (nasobek[3]%33==4) & (nasobek[4]%33==14)): print matice print "" print "Brutal force ended in",strftime("%H:%M:%S", gmtime())
def create_batch(): """ Collects all the data for the batch (whereafter the batch itself is to be created with create_batch()). :return: Returns gathered data from a trip in the correct batch format. """ arduino = serial.Serial('/dev/serial/by-id/usb-Gravitech_ARDUINO_NANO_13BP1066-if00-port0', 115200) batch_data = [] starttime = time.strftime( "%Y-%m-%dT%H:%M:%S") # the gps already has a fix when the function is executed so this is the correct start time ard_read = arduino.readline().strip() # added to prevent error first run of the while-loop while ard_read != '1995': # Stop condition: arduino sending '1995' to the Pi #adds accelerometer data and most of the time data from one sensor (GPS, humidity, temperature or heartbeat) to the batch_data list ard_read = arduino.readline().strip() if ard_read == '1234': batch_data += temphum_pointdata() if ard_read == '1337': batch_data += gps_pointdata() if ard_read == '1996': batch_data += beat_pointdata() batch_data += accelerometer_pointdata() endtime = time.strftime("%Y-%m-%dT%H:%M:%S") batch = [ {"startTime": starttime, "endTime": endtime, "groupID": "cwa2", "userID": ID, "sensorData": batch_data, "meta": {}}] return batch
def recomByBasewalker(graph, targetNode, newCoAuthorList, recom_count, \ file_input, file_input_re, max_iterations, damping_factor): recom_list = [] file_input.write('befor BaseWalker time:' + time.strftime('%Y-%m-%d-%H-%M-%S', \ time.localtime(time.time())) + '\n') pagerank = PageRank(0, graph, targetNode, damping_factor, max_iterations) file_input.write('after BaseWalker time:' + time.strftime('%Y-%m-%d-%H-%M-%S', \ time.localtime(time.time())) + '\n') index = 0 for k, v in pagerank: # if not graph.has_edge((targetNode, k)): recom_list.append(k) file_input.write('recom:' + '(' + targetNode + ':' + k + ')' + str(v) + '\n') index += 1 if index >= recom_count - 1: break pagerank = [] file_input.write(str(newCoAuthorList) + '\n') node_count_right = len(list(set(newCoAuthorList) & set(recom_list))) path_dis = find_shortest_path(graph, targetNode, recom_list) file_input_re.write('2'+str(len(newCoAuthorList)) + ' ' + str(node_count_right) + \ ' ' + str(recom_count) + ' ' + str((1.0*path_dis)/recom_count) + '\n') recom_list = [] '''return the percision,recall and average of shortest path leghth''' return (1.0*node_count_right)/recom_count, (1.0*node_count_right)/len(newCoAuthorList), (1.0*path_dis)/recom_count