def ADMIN_write_File (text_File): UpdateReadfile() file = open("users.txt", "a") user_Input = text_File.get() file.write("ADMIN XXX : "+user_Input+ '\n') the_input1.delete(0, END) file.close()
def convert_to_json_and_store(mails_list,filename): filename = filename.replace('2018','18') #HAVE TO REWRITE THE CODE filename = filename.replace('2019','19') data = {} #Initializing data as an empty dictionary and then adding all the batches as keys for batch in values['batches']: data[batch] = [] fileobj = open('maildata.json','r') mails_data = json.load(fileobj) fileobj.close() for mail in mails_list: in_batch = False for batch in values['batches']: if (mail in mails_data[batch]): in_batch = True data[batch].append(mail) break if (not in_batch): #ONLY the amritapurifoss mail is supposed to be printed. If any other mail is being printed then it means that the mail is not there in maildata.json and must be added!! app.logger.info("The following mail was not there in the database " + mail) if (len(data['2015']) == 0 and len(data['2016']) == 0 and len(data['2017']) == 0): return file = open("jsondata/"+filename+".txt",'w+') json.dump(data,file) file.close()
def main(): relevant1, numLines = getOldInfo(FILENAME) #get old info from file relevant2, myString = getNewInfo() #get current info if(MANUAL == "TRUE"): #check if the user wants to run manually print("'MANUAL' has been set to TRUE so the program will run manually:") if (numLines > NUMLOGS): #check if the number of lines in the file is greater than the number of logs to keep. If it is, delete the file on google drive and locally ident = getDriveInfo() os.remove(FILENAME) DRIVE.files().delete(fileId = ident).execute() file = open(FILENAME, 'a+') #create a new local log file or update the existing one if it exists file.write(myString + "\n") #write the current info in 'my string' to the file print(myString) #print current info print("Saving above info...") file.close() #close the file driveManip() #run drive manip to either upload new file or update the existing one in drive print("-----------------------------------------------------------------------------------------") #print spacer option = raw_input("Operation completed, run again? Type 'y' for yes or anything else to quit: ") #ask if user wants to continue manually running. if (option != "y"): quit() #if they don't want to continue manually running quit else: if (relevant1 != relevant2): #defualt operation with 'MANUAL' var set to defualt of "FALSE". Will only update logs if there is a change of public ip if (numLines > NUMLOGS): #check if the number of lines in the file is greater than the number of logs to keep. If it is, delete the file on google drive and locally ident = getDriveInfo() os.remove(FILENAME) DRIVE.files().delete(fileId = ident).execute() file = open(FILENAME, 'a+') #create a new local log file or update the existing one if it exists file.write(myString + "\n") #write the current info in 'my string' to the file print(myString) #print current info print("Saving above info...") file.close() #close the file driveManip() #run drive manip to either upload new file or update the existing one in drive print("-----------------------------------------------------------------------------------------")
def update_subject_content(content_subject): file = open("last_message.txt", "w") file.close() time.sleep(2) file = open("last_message.txt", "w") file.write(content_subject) file.close()
def print_response(response, appname): appname = appname.replace('/', '_') file = open('./SaveExtract/' + appname + '.csv', 'w') file.write('PagePathLevel1;Date;Sessions\n') print('processing ' + appname + ' . . . ') for report in response.get('reports', []): columnHeader = report.get('columnHeader', {}) dimensionHeaders = columnHeader.get('dimensions', []) metricHeaders = columnHeader.get('metricHeader', {}).get('metricHeaderEntries', []) rows = report.get('data', {}).get('rows', []) print(report.get('nextPageToken')) for row in rows: lst = [] dimensions = row.get('dimensions', []) dateRangeValues = row.get('metrics', []) for header, dimension in zip(dimensionHeaders, dimensions): lst.append(dimension) for i, values in enumerate(dateRangeValues): for metricHeader, value in zip(metricHeaders, values.get('values')): lst.append(value) lst[1] = datetime.datetime.strptime(lst[1], '%Y%m%d').strftime( '%Y-%m-%d 00:00:00') #lst[1]+' 00:00:00' file.write(';'.join(lst).encode('utf-8') + '\n') while report.get('nextPageToken') is not None: try: analytics = initialize_analyticsreporting() response = get_report_iteration(analytics, report.get('nextPageToken')) for report in response.get('reports', []): columnHeader = report.get('columnHeader', {}) dimensionHeaders = columnHeader.get('dimensions', []) metricHeaders = columnHeader.get('metricHeader', {}).get( 'metricHeaderEntries', []) rows = report.get('data', {}).get('rows', []) print('while', report.get('nextPageToken')) for row in rows: lst = [] dimensions = row.get('dimensions', []) dateRangeValues = row.get('metrics', []) for header, dimension in zip(dimensionHeaders, dimensions): lst.append(dimension) for i, values in enumerate(dateRangeValues): for metricHeader, value in zip(metricHeaders, values.get('values')): lst.append(value) lst[1] = datetime.datetime.strptime( lst[1], '%Y%m%d').strftime('%Y-%m-%d 00:00:00') file.write(';'.join(lst).encode('utf-8') + '\n') except: time.sleep(15) print(appname + ' processed') file.close()
def save_refresh_token(oauth_tokens): ''' Stores a refresh token locally. Be sure to save your refresh token securely. ''' with open("refresh.txt","w+") as file: file.write(oauth_tokens.refresh_token) file.close() return None
def read_serial(): line = str(ser.readline()) for i in line: if i == 'b' or i == "'" or i == "\\" or i == "n" or i == "r": line = line.replace(i, "") file = open("temp.txt", "w") file.write(line) file.close() ser.close()
def update_hashes(txt): try: file = open(hashfile, "w") for line in txt: file.write(line) file.close() except Exception as e: if debug: log("|update_hashes() error: " + str(e)) pass
def list_local(cwd): local_lis = os.listdir(cwd) drive_ignore_path = os.path.join(cwd, '.driveignore') if os.path.isfile(drive_ignore_path): file = open(drive_ignore_path, 'r') untracked_files = file.readlines() for f in untracked_files: local_lis.remove(f[:-1]) file.close() return local_lis
def retrieve(): '''calling sheets api and writing to text file.''' request = service.spreadsheets().values().get(spreadsheetId=SPREADSHEET_ID, range=RANGE_, majorDimension='ROWS') response = request.execute() locationEntries = response["values"] for entry in locationEntries[-5:-1]: # data cleanup for txt file writing stringified = entry[0] + " " + entry[1] + "\n" file = open("test.txt", "a") file.write(stringified) file.close()
def log_hashes(txt): try: now = datetime.datetime.now() time = now.strftime("%d-%m-%Y %H:%M:%S") file = open(hashfile, "a") txt = str(time + "|" + str(txt).encode("cp1254") + "\n") file.write(txt) file.close() except Exception as e: if debug: log("|log_hashes() error: " + str(e)) pass
async def spreadsheet(self, ctx, spreadsheet_link=None): """Connects the bot to the spreadsheet and stores it into memory so you don't have to do it everytime.""" #Just stores spreadsheet_id into txt file and connects if (spreadsheet_link == None): #Wasn't given await ctx.send( "You never gave a spreadsheet to connect to. Current connected spreadsheet is: {}" .format(self.master_ss)) else: #spreadsheet try: #Grab spreadsheet_id spreadsheet_link = spreadsheet_link.strip('https://') spreadsheet_link = spreadsheet_link.strip( 'docs.google.com/spreadsheets/d/') spreadsheet_link = spreadsheet_link.split('/') if (spreadsheet_link[0] != self.master_ss): #Different/new Spreadsheet #open file and write new spreadsheet_id to file file = open("data/spreadsheet.txt", "w") file.write(spreadsheet_link[0]) self.master_ss = spreadsheet_link[0] file.close() print("Saved new spreadsheet as master.") await ctx.send( "Entered a new spreadsheet. The given spreadsheet is now the saved spreadsheet." ) else: #Same spreadsheet await ctx.send( "Entered previous spreadsheet. Nothing changed.") #Check to see if sheet is created already if (self.DoesSheetExist()): #Returns true/false await ctx.send( "Sheet exists in spreadsheet. Not creating new sheet.") print( "Sheet exists in spreadsheet. Not creating new sheet.") return #Don't create another sheet if the sheet already exists self.CreateNewSheet( "BOOM-Bot Attendance" ) #Set to have sheet name be "BOOM-Bot Attendance" await ctx.send("Created new sheet in spreadsheet.") print("Created new sheet in spreadsheet.") self.InitializeSheet("BOOM-Bot Attendance" ) #Set up the sheet for taking attendance await ctx.send("Initialized new sheet in spreadsheet.") print("Initialized new sheet in spreadsheet.") except: await ctx.send("Failed to create new sheet in spreadsheet.") print("Failed to created new sheet in spreadsheet.")
def get_refresh_token(): ''' Returns a refresh token if stored locally. ''' file=None try: file=open("refresh.txt") line=file.readline() file.close() return line if line else None except IOError: if file: file.close() return None
def appendBinaryToFile(self, data, filename): if not self.ifExist(filename): self.createFile(filename, fileMimeExtractor(filename)) history = self.basicDownloadToBinary(filename) file = open('google_api/drive_api/appendTEMP.txt', 'wb') file.write(history) file.close() file = open('google_api/drive_api/appendTEMP.txt', 'ab') file.write(data) file.close() mimeType = self.getFileMimeType(filename) self.deleteFile(filename) self.basicUpload(filename, 'google_api/drive_api/appendTEMP.txt') os.remove('google_api/drive_api/appendTEMP.txt')
def check_status(): now = datetime.datetime.utcnow().isoformat() + 'Z' service = credentials() events_result = service.events().list(calendarId='primary', timeMin=now, maxResults=10, singleEvents=True, orderBy='startTime').execute() x = events_result.get('items', []) if len(x) == 0: file = open("temp.txt", "w") file.write("livre") file.close() return 'livre' else: events = x[0] now2 = datetime.datetime.now().isoformat() start = events['start'].get('dateTime', events['start'].get('date')) m_now = getmonth(now2) m_start = getmonth(start) d_now = getday(now2) d_start = getday(start) h_now = gettime(now2) h_start = gettime(start) if m_now == m_start and d_now == d_start and (h_start - h_now) < 1 and ( h_start - h_now) > 0: file = open("temp.txt", "w") file.write("livre") file.close() return "livre" elif m_now == m_start and d_now == d_start and (h_start - h_now) > 1: file = open("temp.txt", "w") file.write("livre") file.close() return "livre" elif m_now == m_start and d_now == d_start and (h_start - h_now) < 0: if (h_start - h_now) > -0.25: file = open("temp.txt", "w") file.write("timer") file.close() return "timer" else: file = open("temp.txt", "w") file.write("ocupado") file.close() return "ocupado"
def write_log(module, level, message): appDir = logDirectory appDir += "/Logs/" if os.path.exists(appDir) == False: os.mkdir(appDir) t = datetime.datetime.now() logName = appDir + t.strftime("Log_" + "%d_%m_%y_.csv") dateString = t.strftime("%d/%m/%y") timeString = t.strftime("%H:%M:%S") csvString = dateString + "," + timeString + "," + module + "," + str(level) + "," + message + "\n" if os.path.exists(logName): file = open(logName, "a") else: file = open(logName, "w") file.write("Date,Time,Module,Level,Message\n") file.write(csvString) file.close() print("[" + module + "] " + str(level) + ": " + message)
def extract_data_from_email_and_save_to_csv(): msg_list = list_messages(service, user_id, query) j = 0 data = [] for i in msg_list: j += 1 print(str(j) + " extracted " + str(i)) msg = get_message_data(service, user_id, i['id']) if msg['rate'] is not None: data.append(msg) file = open('remittance-rate.json', "w") file.write(json.dumps(data)) file.close() print("File saved")
def get_email(): service = credentials() now = datetime.datetime.utcnow().isoformat() + 'Z' events_result = service.events().list(calendarId='primary', timeMin=now, maxResults=10, singleEvents=True, orderBy='startTime').execute() x = events_result.get('items', []) if len(x) == 0: print("Não há eventos") return 0 else: events = x[0] email = events['summary'] email = email file = open("temp.txt", "w") file.write(email) file.close()
def createMessage(self, subject, body, mime, attach_file): msg = MIMEMultipart() msg["From"] = self.from_addr msg["To"] = self.to_addr msg["Date"] = formatdate() msg["Subject"] = subject body = MIMEText(body) msg.attach(body) #for attach file if mime != None and attach_file != None: attachment = MIMEBase(mime['type'], mime['subtype']) file = open(attach_file['path']) attachment.set_payload(file.read()) file.close() Encoders.encode_base64(attachment) msg.attach(attachment) attachment.add_header("Content-Disposition", "attachment", filename=attach_file['name']) return msg
def drive_ignore(unttrack_file, l): cwd = os.getcwd() drive_ignore_path = os.path.join(cwd, '.driveignore') if(len(unttrack_file) != 0): try: file = open(drive_ignore_path, 'r') files = file.readlines() file.close() except: files = [] file = open(drive_ignore_path, 'a+') for f in unttrack_file: f = f + "\n" file_path = os.path.join(cwd, f[:-1]) if os.path.exists(file_path): if not (f in files): file.write(f) else: click.secho(f[:-1] + " doesn't exist in " + cwd, fg="red") file.close() if l: click.secho("listing untracked files....", fg="magenta") utils.save_history([{"-l": ["True"]}, " ", cwd]) if os.path.isfile(drive_ignore_path): file = open(drive_ignore_path, 'r') untracked_files = file.read() click.secho(untracked_files) file.close() else: click.secho(".driveignore file doesn't exist in " + cwd, fg="red") sys.exit(0) else: utils.save_history([{"-l": [None]}, " ", cwd])
def get_time_next(): now = datetime.datetime.utcnow().isoformat() + 'Z' service = credentials() events_result = service.events().list(calendarId='primary', timeMin=now, maxResults=10, singleEvents=True, orderBy='startTime').execute() x = events_result.get('items', []) if len(x) == 0: file = open("temp.txt", "w") file.write("Livre") file.close() return 0 else: events = x[0] start = events['start'].get('dateTime', events['start'].get('date')) m_start = getmonth(start) d_start = getday(start) h_start = gettime2(start) file = open("temp.txt", "w") file.write("{}/{}/->{}".format(d_start, m_start, h_start)) file.close()
def loadfileasattach(filenm=None, toadd=None, fradd=None, subj=None): #should return something sendable thru mime if filenm is None: makeattr() filenm = 'test.xls' file = open(filenm, 'rb') ament = mime.multipart.MIMEMultipart() ament['to'] = toadd ament['from'] = fradd ament['subject'] = subj msg = mime.base.MIMEBase('test', 'octet-base') msg.set_payload(file.read()) file.close() # filename = os.path.basename(file) msg.add_header('Content-Disposition', 'attachment', filename=filenm) ament.attach(msg) # ament.attach(msg) return {'raw': base64.urlsafe_b64encode(ament.as_bytes()).decode()}
def main(argv): global service global authorized_creators global logger parser = argparse.ArgumentParser(description='Removes unauthorized events from gCal', prog=argv[0]) parser.add_argument('--loglevel', nargs='?', default='info', help='Set log level (default: info)') args = parser.parse_args() CLIENT_SECRET = '../../client_secret_gappsadm_genepoc.json' # downloaded JSON file # Check https://developers.google.com/drive/scopes for all available scopes OAUTH_SCOPE = 'https://www.googleapis.com/auth/calendar' storage = Storage('../../storage_gappsadm_genepoc.json') credentials = storage.get() if not credentials or credentials.invalid: flow = client.flow_from_clientsecrets(CLIENT_SECRET, OAUTH_SCOPE) credentials = tools.run(flow, storage) # Create an httplib2.Http object and authorize it with our credentials http = httplib2.Http() http = credentials.authorize(http) service = build(serviceName='calendar', version='v3', http=http) numeric_level = getattr(logging, args.loglevel.upper(), None) if not isinstance(numeric_level, int): raise ValueError('Invalid log level: %s' % args.loglevel) logging.basicConfig(level=numeric_level) # logging.basicConfig() LOG_FILENAME = './guard.log' logger = logging.getLogger('GuardCalendar') # Add the log message handler to the logger handler = logging.handlers.RotatingFileHandler(LOG_FILENAME, maxBytes=2048, backupCount=5) fmt = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") handler.setFormatter(fmt) logger.addHandler(handler) logger.info("Log level set to %s" % args.loglevel) file = open("config/guard_calendar.yml") config = yaml.load(file) file.close() users = config.get('toguard').split(',') theDomain = config.get('domain') # cannot_delete_deadline = GuardCalendar.gdata_to_datetime("2010-01-26T01:00:00.000Z") for user in users: email_address = user + '@' + config.get('domain') guard = GuardCalendar(theDomain) authorized_creators = guard.GetCreators(email_address) future_events = guard.QueryFutureEvents(email_address) file = open("config/" + user + ".yml") email = yaml.load(file) file.close() for x in range(len( future_events )): my_event = future_events[x] if (my_event['creator'] not in authorized_creators): logger.info("%s is not allowed to create an event on calendar of: %s" % (my_event['creator'],email_address)) guard.DeleteEvent(email_address, my_event['id']) guard.SendEmail(my_event, email) logger.info('notified organizer ' + my_event['creator'] + ' for this deletion')
def print_response(response, appname): """Parses and prints the Analytics Reporting API V4 response""" appname = appname.replace('/', '_') #os.system('rm -rf '+appname+'.csv') now = datetime.datetime.now() file = open( '/home/erowz/analytics_Script/SaveExtract/' + now.strftime("%Y%m%d") + "_" + appname + '.csv', 'w') file.write('page;sessions\n') print('processing ' + appname + '. . .') for report in response.get('reports', []): columnHeader = report.get('columnHeader', {}) dimensionHeaders = columnHeader.get('dimensions', []) metricHeaders = columnHeader.get('metricHeader', {}).get('metricHeaderEntries', []) rows = report.get('data', {}).get('rows', []) print(report.get('nextPageToken')) for row in rows: lst = [] dimensions = row.get('dimensions', []) dateRangeValues = row.get('metrics', []) for header, dimension in zip(dimensionHeaders, dimensions): #print header + ': ' + dimension lst.append(dimension) for i, values in enumerate(dateRangeValues): #print 'Date range (' + str(i) + ')' for metricHeader, value in zip(metricHeaders, values.get('values')): #print metricHeader.get('name') + ': ' + value lst.append(value) if '?' in lst[0]: pass elif '&' in lst[0]: pass elif '=' in lst[0]: pass elif ',' in lst[0]: pass if int(lst[1]) >= 2: file.write(';'.join(lst).encode('utf-8') + '\n') while report.get('nextPageToken') is not None: try: analytics = initialize_analyticsreporting() response = get_report_iteration(analytics, report.get('nextPageToken')) for report in response.get('reports', []): columnHeader = report.get('columnHeader', {}) dimensionHeaders = columnHeader.get('dimensions', []) metricHeaders = columnHeader.get('metricHeader', {}).get( 'metricHeaderEntries', []) rows = report.get('data', {}).get('rows', []) print('while', report.get('nextPageToken')) for row in rows: lst = [] dimensions = row.get('dimensions', []) dateRangeValues = row.get('metrics', []) for header, dimension in zip(dimensionHeaders, dimensions): #print header + ': ' + dimension lst.append(dimension) for i, values in enumerate(dateRangeValues): #print 'Date range (' + str(i) + ')' for metricHeader, value in zip(metricHeaders, values.get('values')): #print metricHeader.get('name') + ': ' + value lst.append(value) if '?' in lst[0]: pass elif '&' in lst[0]: pass elif '=' in lst[0]: pass elif ',' in lst[0]: pass if int(lst[1]) >= 2: file.write(';'.join(lst).encode('utf-8') + '\n') except: time.sleep(15) print(appname + ' processed') file.close()
def getMessageHTML(): file = open("templates/test_email.html", "r") messageHTML = file.read() file.close() return messageHTML
# Check if new and old lists are the same sameList = len(unreadMessages) == len(lastKnownUnread) if sameList: sameList = checkIfSameList(unreadMessages, lastKnownUnread) # If new and old lists are the same, cycle the ones in the file if sameList: if len(unreadMessages) > 0: lastKnownUnread.append(lastKnownUnread.pop(0)) # Overwrite the file with cycled messages file = open('/home/rjslater/.config/i3blocks/scripts/.gmailMessages', 'w') for message in lastKnownUnread: file.write(message[0] + '*.*.*.*' + message[1] + '\n') file.close() # Display top old message from cycled list outputString = '\uf0e0 {} {}: {}'.format(len(lastKnownUnread), lastKnownUnread[0][1], lastKnownUnread[0][0]) print(outputString[:60]) # Else overwrite the file with the updated list else: file = open('/home/rjslater/.config/i3blocks/scripts/.gmailMessages', 'w') for message in unreadMessages: file.write(message[0] + '*.*.*.*' + message[1] + '\n') file.close() # Display top new message if len(unreadMessages) > 0: # Send a notification
def file_write(self): if self.entWFCheck.get() != "": valWFCheck = round(float(self.entWFCheck.get()), 2) else: self.entWFCheck.insert(0, "0") valWFCheck = 0 if self.entWFSave.get() != "": valWFSave = round(float(self.entWFSave.get()), 2) else: self.entWFSave.insert(0, "0") valWFSave = 0 # if self.entCHCheck.get() != "": # valCHCheck = round(float(self.entCHCheck.get()), 2) # else: # self.entCHCheck.insert(0, "0") # valCHCheck = 0 if self.entFBCheck.get() != "": valFBCheck = round(float(self.entFBCheck.get()), 2) else: self.entFBCheck.insert(0, "0") valFBCheck = 0 if self.entFBSave.get() != "": valFBSave = round(float(self.entFBSave.get()), 2) else: self.entFBSave.insert(0, "0") valFBSave = 0 # if self.entELCheck.get() != "": # valELCheck = round(float(self.entELCheck.get()), 2) # else: # self.entELCheck.insert(0, "0") # valELCheck = 0 # if self.entELSave.get() != "": # valELSave = round(float(self.entELSave.get()), 2) # else: # self.entELSave.insert(0, "0") # valELSave = 0 if self.entCash.get() != "": valCash = round(float(self.entCash.get()), 2) else: self.entCash.insert(0, "0") valCash = 0 if self.entDiscover.get() != "": valDiscover = round(float(self.entDiscover.get()), 2) else: self.entDiscover.insert(0, "0") valDiscover = 0 if self.entAmex.get() != "": valAmex = round(float(self.entAmex.get()), 2) else: self.entAmex.insert(0, "0") valAmex = 0 # banktotal = valWFCheck + valWFSave + valCHCheck + valFBCheck + valFBSave + valELCheck + valELSave + valCash banktotal = valWFCheck + valWFSave + valFBCheck + valFBSave + valCash self.lblBankTotalValue['text'] = str(round(banktotal, 2)) finalamount = banktotal - valDiscover - valAmex self.lblFinalValue['text'] = str(round(finalamount, 2)) # Write into file now = datetime.datetime.now() file_name = "Bank_Summary-" + str(now.strftime("%Y%m%d")) + ".txt" file_dir = "D:\Google Drive\Jobs\Balance\\" if os.path.isdir(file_dir): file_path = file_dir + file_name else: file_path = file_name file = open(file_path, 'w') file.write('|==================================\n') file.write('|Bank Summary ' + str(now.strftime("%Y%m%d") + '\n')) file.write('|==================================\n') file.write('|Wells Fargo Checking:---' + str(valWFCheck) + '\n') file.write('|Wells Fargo Saving:-----' + str(valWFSave) + '\n') # file.write('|Chase Checking:---------' + str(valCHCheck) + '\n') file.write('|First Bank Checking:----' + str(valFBCheck) + '\n') file.write('|First Bank Saving:------' + str(valFBSave) + '\n') # file.write('|Elevation Checking:-----' + str(valELCheck) + '\n') # file.write('|Elevation Saving:-------' + str(valELSave) + '\n') file.write('|==================================\n') file.write('|Cash:-------------------' + str(valCash) + '\n') file.write('|==================================\n') file.write('|Total Amount:-----------' + str(round(banktotal, 2)) + '\n') file.write('|==================================\n') file.write('|Discover:---------------' + str(valDiscover) + '\n') file.write('|American Express:-------' + str(valAmex) + '\n') file.write('|==================================\n') file.write('|Final Amount:-----------' + str(round(finalamount, 2)) + '\n') file.write('|==================================\n') file.close() return file_name, file_path