def getURLFromEmail(user, password, id_password): ''' If no valid email is found returns '' ''' print('INFO:\tLocating email for: ' + user) url = '' M = imaplib.IMAP4_SSL('imap.gmail.com') try: M.login(user, password) except: print(bcolors.FAIL + "Login failed, please verify " + user + "'s email credentials" + bcolors.ENDC) return url rv, data = M.select('INBOX') if rv == 'OK': # select the first email found if one exist typ, data = M.search(None, criteria) try: verification_email = data[0].split()[0] rv, data = M.fetch(verification_email, '(RFC822)') content = email.message_from_bytes(data[0][1]) latest_sent_date = getEmailDate(content) url = parseVerificationURL(content) except: print("Error:\tNo email found") resendVerificationEmail(user, id_password) return url # select the latest email for parsing typ, data = M.search(None, criteria) for item in data[0].split(): rv, data = M.fetch(item, '(RFC822)') content = email.message_from_bytes(data[0][1]) if getEmailDate(content) > latest_sent_date: # print('INFO:\tFound a newer email the the first') latest_sent_date = getEmailDate(content) url = parseVerificationURL(content) if (latest_sent_date < datetime.datetime.now()-datetime.timedelta(days=3)): print(bcolors.WARNING + 'ERROR:\t\tThis email is older than 3 ' + 'days and is no longer valid' + bcolors.ENDC) resendVerificationEmail(user, id_password) return '' else: print('INBOX not found') M.logout() if url != '': print('INFO:\tValid email found for ' + user) return url
def get_mailbox_content(self): messages = [] for filename in os.listdir(self.tmp_dir): with open(os.path.join(self.tmp_dir, filename), 'rb') as fp: session = fp.read().split(force_bytes('\n' + ('-' * 79) + '\n', encoding='ascii')) messages.extend(message_from_bytes(m) for m in session if m) return messages
def run(pelican): update_settings(pelican) host = pelican.settings['IMAP_IMPORTER']['HOST'] user = pelican.settings['IMAP_IMPORTER']['USER'] folders = pelican.settings['IMAP_IMPORTER']['FOLDERS'] new_password = False password = keyring.get_password('PELICAN_IMAP_IMPORTER', user) if password is None: password = get_password(user) new_password = True host = pelican.settings['IMAP_IMPORTER']['HOST'] # Connect imap = None try: imap = imaplib.IMAP4(host) context = ssl.create_default_context() imap.starttls(context) except Exception as e: logger.critical(_log + "Couldn't connect to '" + host + "'") logger.critical(_log + str(e)) exit() # Login try_again = True fail_counter = 0 while try_again: try: imap.login(user, password) try_again = False except imaplib.IMAP4_SSL.error as e: fail_counter += 1 logger.warning(_log + "Authentication failed!") logger.warning(_log + "Make sure that the user name and password are correct") if fail_counter > 3: exit() password = get_password(user) new_password = True # successful login if new_password: keyring.set_password('PELICAN_IMAP_IMPORTER', user, password) for folder in folders: imap.select(folder) typ, data = imap.search(None, 'ALL') for num in data[0].split(): typ, data = imap.fetch(num, '(RFC822)') #imap.append('INBOX.Server.Comments', None, None, message=data[0][1]) msg = email.message_from_bytes(data[0][1]) if process_email(pelican.settings, msg): del msg['X-PELICAN-IMAP-IMPORTER'] msg['X-PELICAN-IMAP-IMPORTER'] = 'processed-debug4' imap.store(num, '+FLAGS', '\\Deleted') # delete email imap.append(folder, '\\Seen', None, message=msg.as_bytes()) imap.expunge() imap.logout()
def fetch_zip_file(gsm): full_filename = None try: # Acquire a connection client = connection_queue.get() # The sort command does not work with Outlook, use search instead and sort by uid. result, data = client.uid( "search", "(FROM {sender} SUBJECT {subject})".format( sender="*****@*****.**", subject="ID#" + format(gsm, "08d") ), ) uid_list = data[0].split() if len(uid_list) > 0: uid = uid_list[-1] print("Downloading last e-mail for transmitter : {0}".format(gsm)) result, data = client.uid("fetch", uid, "RFC822") text = data[0][1] msg = email.message_from_bytes(text) for part in msg.walk(): if part.get_content_type() == "application/zip": filename = part.get_filename() data = part.get_payload(decode=True) full_filename = os.path.join(dest_path, filename) f = open(full_filename, "wb") f.write(data) f.close() break except Exception as e: print("Error occurs when processing transmitter : {0}".format(gsm)) finally: connection_queue.put(client) return full_filename
def fetch_all(self): """Fetch all mail from the inbox.""" _, data = self.imap.search(None, 'ALL') for mail_id in data[0].decode().split(' '): if mail_id: _, data = self.imap.fetch(mail_id, '(RFC822)') yield mail_id, email.message_from_bytes(data[0][1])
def read_response(self, sock, expected_id): sock.settimeout(self.timeout) try: packet, address = sock.recvfrom(self.max_packet_size) except socket.timeout as ex: sock.close() raise pyzor.TimeoutError("Reading response timed-out.") except socket.error as ex: sock.close() raise pyzor.CommError("Socket error while reading response: %s" % ex) self.log.debug("received: %r/%r", packet, address) msg = email.message_from_bytes(packet, _class=pyzor.message.Response) msg.ensure_complete() try: thread_id = msg.get_thread() if thread_id != expected_id: if thread_id.in_ok_range(): raise pyzor.ProtocolError( "received unexpected thread id %d (expected %d)" % (thread_id, expected_id)) self.log.warn("received error thread id %d (expected %d)", thread_id, expected_id) except KeyError: self.log.warn("no thread id received") return msg
def get_email_object(self): """Returns an `email.message.Message` instance representing the contents of this message and all attachments. See [email.Message.Message]_ for more information as to what methods and properties are available on `email.message.Message` instances. .. note:: Depending upon the storage methods in use (specifically -- whether ``DJANGO_MAILBOX_STORE_ORIGINAL_MESSAGE`` is set to ``True``, this may either create a "rehydrated" message using stored attachments, or read the message contents stored on-disk. .. [email.Message.Message]: Python's `email.message.Message` docs (https://docs.python.org/2/library/email.message.html) """ if self.eml: if self.eml.name.endswith('.gz'): body = gzip.GzipFile(fileobj=self.eml).read() else: self.eml.open() body = self.eml.file.read() self.eml.close() else: body = self.get_body() if six.PY3: flat = email.message_from_bytes(body) else: flat = email.message_from_string(body) return self._rehydrate(flat)
def moveToTrash(M): M.select(EMAIL_FOLDER) rv,data=M.search(None,'ALL') if rv != 'OK': print ("No messages in", EMAIL_FOLDER) return for num in data[0].split(): rv, data = M.fetch(num, '(RFC822)') if rv != 'OK': print ("ERROR getting message", num) return msg = email.message_from_bytes(data[0][1]) decode = email.header.decode_header(msg['Subject'])[0] deleteNum = decode[0].split() if deleteNum[0] == 'Delete': print ("---Starting to delete message---") print ("Should delete msg", num, "with subject",decode[0]) typ, response = M.fetch(num, '(FLAGS)') print ('Flags before:', response) M.copy(num,FOLDER_DONE) print ('Change Flag') M.store(num, '+FLAGS', r'(\Deleted)') typ, response = M.fetch(num, '(FLAGS)') print ('Flags after:', response) typ, response = M.expunge() print ('Expunged:', response) print ("---Delete finished---")
def call(self): # 1: { # "server": "imap.mail.ru", # "credentials": ("login", "password") # } for (chat_id, server) in self.config["mailboxes"].items(): logging.warning("Mailcheck %d %d" % (chat_id, self.lastcheck[chat_id])) try: imap = imaplib.IMAP4_SSL(server["server"]) imap.login(*server['credentials']) # заходим во входящие imap.select() status_s, response_s = imap.search(None, 'ALL') checktime = time.time() if status_s != 'OK': raise Exception("email_check on 'search all'") new_msgs = [] for num in reversed(response_s[0].split()): status_f, response_f = imap.fetch(num, '(BODY[HEADER])') if status_f != 'OK': raise Exception("email_check on 'fetch %s'" % num.decode()) header = email.message_from_bytes(response_f[0][1]) header_date = email.utils.mktime_tz(email.utils.parsedate_tz(header["Date"])) if header_date < self.lastcheck[chat_id]: break header_from = really_decode_header(header["From"]) if header_from.find("<*****@*****.**>") != -1: continue try: header_subj = really_decode_header(header["Subject"]) except: header_subj = "<Без темы>" new_msgs.append((header_from, header_subj)) if new_msgs: response_v = [] response_v.append("На почте %d новых сообщений" % len(new_msgs)) for msg in new_msgs: response_v.append("От: %s\nТема: %s" % msg) response_v.append(server["page"]) self.bot.vkapi.messages.send(message='\n'.join(response_v), chat_id=chat_id, random_id=random.randint(1, 12345678)) self.lastcheck[chat_id] = checktime except: logging.exception('email_check')
def test_cte_type_7bit_transforms_8bit_cte(self): source = textwrap.dedent("""\ From: [email protected] To: Dinsdale Subject: Nudge nudge, wink, wink Mime-Version: 1.0 Content-Type: text/plain; charset="latin-1" Content-Transfer-Encoding: 8bit oh là là, know what I mean, know what I mean? """).encode('latin1') msg = message_from_bytes(source) expected = textwrap.dedent("""\ From: [email protected] To: Dinsdale Subject: Nudge nudge, wink, wink Mime-Version: 1.0 Content-Type: text/plain; charset="iso-8859-1" Content-Transfer-Encoding: quoted-printable oh l=E0 l=E0, know what I mean, know what I mean? """).encode('ascii') s = io.BytesIO() g = BytesGenerator(s, policy=self.policy.clone(cte_type='7bit', linesep='\n')) g.flatten(msg) self.assertEqual(s.getvalue(), expected)
def parsingMail(self, data): message = email.message_from_bytes(data, _class=email.message.Message) temp = message['From'].split(' ') if len(temp) == 2: fromname = email.header.decode_header(temp[0].strip('\\')) strFrom = self.unicode(fromname[0][0], fromname[0][1]) +' '+ temp[1] else: strFrom = message['From'] subject = email.header.decode_header(message['Subject']) strSubject = self.unicode(subject[0][0], subject[0][1]) mailContent = '' contenttype = None suffix = '' for part in message.walk(): if not part.is_multipart(): contenttype = part.get_content_type() filename = part.get_filename() charset = part.get_content_charset() if filename: #is annex? print(filename) else: if charset == None: mailContent = part.get_payload() else: mailContent = part.get_payload(decode=True).decode(charset) if contenttype in ['text/plain']: suffix = '.txt' elif contenttype in ['text/html']: suffix = '.htm' mailContent = self.remove_tags(mailContent) return (strFrom, strSubject, mailContent, message['Date'], suffix)
def process_mailbox(M, fromdate): """ Do something with emails messages in the folder. For the sake of this example, print some headers. """ rv, data = M.search(None, "ALL") if rv != "OK": print("No messages found!") return for num in data[0].split(): rv, data = M.fetch(num, "(RFC822)") if rv != "OK": print("ERROR getting message", num) return msg = email.message_from_bytes(data[0][1]) hdr = email.header.make_header(email.header.decode_header(msg["Subject"])) subject = str(hdr) # print('Message %s: %s' % (num, subject)) # print('Raw Date:', msg['Date']) # Now convert to local date-time date_tuple = email.utils.parsedate_tz(msg["Date"]) local_date = None if date_tuple: local_date = datetime.datetime.fromtimestamp(email.utils.mktime_tz(date_tuple)) # print("Local Date:", local_date.strftime("%a, %d %b %Y %H:%M:%S")) if not local_date or local_date > fromdate: body = decode_body(msg) print(body) send_message(subject + "\n" + str(body))
def edit(self, j, newTitle): logging.info("New title %s", newTitle) thePost = self.obtainPostData(j) oldTitle = thePost[0] logging.info("servicename %s" %self.service) import base64 import email from email.parser import BytesParser api = self.getClient() idPost = self.getPosts()[j]['list']['id'] #thePost[-1] title = self.getHeader(self.getPosts()[j]['meta'], 'Subject') message = self.getMessageRaw(idPost) theMsg = email.message_from_bytes(base64.urlsafe_b64decode(message['raw'])) self.setHeaderEmail(theMsg, 'subject', newTitle) message['raw'] = theMsg.as_bytes() message['raw'] = base64.urlsafe_b64encode(message['raw']).decode() update = api.users().drafts().update(userId='me', body={'message':message},id=idPost).execute() logging.info("Update %s" % update) update = "Changed "+title+" with "+newTitle return(update)
def can_parse(self, email_message): for response_part in email_message: if isinstance(response_part, tuple): msg = email.message_from_bytes(response_part[1]) return self.parse_header(msg['from']) != '' return False
def fetch_mails(self, uids, mailbox, return_fields=None): """ Retrieve mails from a mailbox """ self.logger.debug('Fetching mails with uids %s', uids) return_raw = True if return_fields is None: return_raw = False return_fields = [b'RFC822'] mails = {} try: for uid in uids: result = self.conn.fetch(uid, return_fields) if not result: continue if return_raw: mails[uid] = result[uid] else: #mails[uid] = Mail(logger=self.logger, uid=uid, mail_native=email.message_from_bytes(result[uid][b'RFC822'])) mails[uid] = Mail(logger=self.logger, mail_native=email.message_from_bytes(result[uid][b'RFC822'])) return self.Retval(True, mails) except IMAPClient.Error as e: return self.process_error(e)
def get_mails(server, login, passwd): M = imaplib.IMAP4(server) M.starttls() M.login(login, passwd) try: M.select() _, data = M.uid('search', None, 'ALL') mails = [] for num in data[0].split(): _, tmp = M.uid('fetch', num, '(RFC822)') msg = email.message_from_bytes(tmp[0][1]) texts = [] for part in msg.walk(): if part.get_content_type() == 'text/plain': try: texts.append(part.get_payload(decode=True) .decode(part.get_content_charset(), errors='ignore')) except UnicodeDecodeError: pass mails.append({'headers': {i: email.header.decode_header(j)[0][0] for i,j in msg.items()}, 'texts': texts}) finally: M.close() M.logout() return mails
def get_message_content(self, message): """ 获取邮件内容 :param message: :return: """ try: while True: res = {} msg = email.message_from_bytes(message[b'BODY[]']) res['subject'] = self.parse_email(msg['Subject']) res['from'] = self.parse_email(msg['From'], flag='from') res['to'] = self.parse_email(msg['To'], flag='to') res['date'] = self.parse_email(msg['Date']) for par in msg.walk(): if not par.is_multipart(): name = par.get_param("name") if name: # print(name) pass else: body = par.get_payload(decode=True) if not body: continue try: code = par.get_content_charset() res['body'] = body.decode(code, 'ignore') except TypeError: res['body'] = body return res except Exception as e: traceback.print_exc() return
def download_files(imap_label, pdf_directory, file_regex, settings): if type(file_regex) == str: file_regex = re.compile(file_regex) mail_conn = IMAP4_SSL(settings['serverimap']) mail_conn.login(settings['user'], settings['password']) mail_conn.select(imap_label) typ, data = mail_conn.search(None, 'ALL') for num in data[0].split(): typ, data = mail_conn.fetch(num, '(RFC822)') message = message_from_bytes(data[0][1]) if message.is_multipart(): for part in message.get_payload(): if part.get_content_type() == 'application/pdf': fn = part.get_filename().lower() m = file_regex.match(fn) if not m: print("Invalid filename for pdf in message from {}".format(message['From'])) break if exists(join(pdf_directory, fn)): break print("Saving {}".format(fn)) with open(join(pdf_directory, fn), 'wb') as pdf_file: pdf_file.write(part.get_payload(decode=True)) yield m.group(1), message['From'] break else: print("Message from {} had no valid pdf attachment".format(message['From'])) mail_conn.logout()
def process_mailbox(M): """ Do something with emails messages in the folder. For the sake of this example, print some headers. """ rv, data = M.search(None, "ALL") if rv != 'OK': print("No messages found!") return for num in data[0].split(): rv, data = M.fetch(num, '(RFC822)') if rv != 'OK': print("ERROR getting message", num) return msg = email.message_from_bytes(data[0][1]) hdr = email.header.make_header(email.header.decode_header(msg['Subject'])) subject = str(hdr) print('Message %s: %s' % (num, subject)) print('Raw Date:', msg['Date']) # Now convert to local date-time date_tuple = email.utils.parsedate_tz(msg['Date']) if date_tuple: local_date = datetime.datetime.fromtimestamp( email.utils.mktime_tz(date_tuple)) print("Local Date:", local_date.strftime("%a, %d %b %Y %H:%M:%S"))
def get_headers(self): self.list_message.delete(0, END) connect = poplib.POP3(self.server_addres.get()) connect.getwelcome() connect.user(self.user.get()) connect.pass_(self.password.get()) # response, lst, octets = connect.list() numMessages = len(connect.list()[1]) for i in range(numMessages): #print(i) response = connect.top(i+1,0) # return in format: (response, ['line', ...], octets) raw_message = response[1] message = email.message_from_bytes(b'\n'.join(raw_message)) #print(str_message['to']) mes_to = message['to'] mes_from = message['from'] mes_subj = message['subject'] headers,charset = decode_header(mes_subj)[0] # subject,charset = email.Header.decode_header(message["Subject"])[0] # Header() buf_str = "To: " + mes_to + " From: " + mes_from + " Subject: " + headers.decode(charset) self.list_message.insert(END, buf_str) #print(str_message) connect.quit()
def parse(self): use_ssl = True if self.config["EMAIL"]["useSSL"] else False server = IMAPClient(self.config["EMAIL"]["servername"], ssl=use_ssl) server.login(self.config["EMAIL"]["username"], self.config["EMAIL"]["password"]) logging.getLogger().debug("connected to IMAP server") select_info = server.select_folder("INBOX") # get list of fitting messages messages = server.search(["NOT", "DELETED", "SUBJECT", self.config["EMAIL"]["subject"]]) logging.getLogger().info("%d email message(s) found" % len(messages)) # loop through all messages for msgid in messages: # download it response = server.fetch(msgid, ["RFC822"]) msg = email.message_from_bytes(response[msgid][b"RFC822"]) self.__process_message(msg) # delete messages? if len(messages) > 0 and int(self.config["EMAIL"]["deleteAfterProcessing"]): if int(self.config["EMAIL"]["deleteAfterProcessing"]) > 1: messages = messages[:-1] server.delete_messages(messages) if self.config["EMAIL"]["expungeMailbox"]: server.expunge() logging.getLogger().info("Deleted email message(s) from server") server.logout()
def load_mail_from_mailbox(self): mails = [] typ, data = self.server.uid('search', None, "ALL") h = hashlib.sha256() for num in data[0].split(): mail = {} typ, data = self.server.uid('fetch', num, '(RFC822)') raw_email = data[0][1] msg = email.message_from_bytes(raw_email) print(msg) subject, encoding = decode_header(msg['subject'])[0] if encoding is None: mail['subject'] = subject else: mail['subject'] = subject.decode(encoding) mail['source'] = msg mail['sender'] = msg['from'] mail['receiver'] = msg['to'] ident = h.update(raw_email) mail['identifier'] = h.hexdigest() mails.append(mail) return mails
def parse_ole_msg(ole): email_h = None stream_dirs = ole.listdir() for stream in stream_dirs: # get stream that contains the email header if stream[0].startswith('__substg1.0_007D'): email_h = ole.openstream(stream).read() if stream[0].endswith('001F'): # Unicode probably needs something better than just stripping \x00 email_h = email_h.replace(b'\x00', b'') # If it came from outlook we may need to trim some lines try: email_h = email_h.split(b'Version 2.0\x0d\x0a', 1)[1] except: pass if not email_h: self.log('warning', 'This OLE file is not an email.') return None # Leaving us an RFC compliant email to parse if isinstance(email_h, str): # Python2 madness msg = email.message_from_string(email_h) else: msg = email.message_from_bytes(email_h) return msg
def get_newest_mail(self): self.mail.select('INBOX') typ, id_byte_raw = self.mail.search(None, 'ALL') if typ != "OK": raise mailException("Cannot get email from inbox. Please check your connection.") # No email if id_byte_raw == [b'']: return [] mail_list = [] id_byte = id_byte_raw[0] id_list = id_byte.split() for i in range(int(id_list[-1]) - 1, int(id_list[-1]) - 2, -1): typ, mail_data = self.mail.fetch(str(i), '(RFC822)') try: mail_data = mail_data[0][1] except Exception: pass msg = email.message_from_bytes(mail_data) allTime = str(msg['received']) varTime = allTime.split()[-6] + " " + allTime.split()[-5] + " " + allTime.split()[-4] + " " + allTime.split()[-3] bSubject = email.header.decode_header(msg['subject']) try: varSubject = bSubject[0][0].decode(bSubject[0][1]) except Exception: varSubject = bSubject[0][0] varFrom = str(msg['from']).split()[-1] # Get text and number of attachment(s) from email allUseful = msg.get_payload(decode = False) varText = allUseful[0].get_payload(decode = allUseful[0]["Content-Transfer-Encoding"]).decode(allUseful[0]['Content-Type'].split('charset=')[1]) varAtt = len(allUseful) - 1 mail_list.append([varTime, varFrom, varSubject, varText, varAtt]) return mail_list
def _retrieve_next_file(self): try: file_found = False self.current_index = self.current_index + 1 while not file_found and self.current_index < len(self.message_ids): self.log(logging.DEBUG, "Processing email ID " + \ str(self.message_ids[self.current_index])) message_content = self.imapconn.fetch( \ self.message_ids[self.current_index], "RFC822") \ [self.message_ids[self.current_index]][b"RFC822"] message = email.message_from_bytes(message_content) for part in message.walk(): content_disposition = part.get("Content-Disposition") if content_disposition is not None and \ content_disposition.startswith("attachment"): filename = self._extract_filename(part.get_filename()) self.log(logging.DEBUG, "Extracting attachment " + filename) contents = part.get_payload(decode=True) self._add_file(filename, contents) file_found = True if not file_found: self.imapconn.move(self.message_ids[self.current_index], self.configuration["Downloaded Folder"]) self.current_index = self.current_index + 1 except: self.log(logging.ERROR, "Failed to retrieve next file: " + traceback.format_exc())
def parse_message(raw_email): message = email.message_from_bytes(raw_email) msg = str( message.get_payload(decode=True), message.get_content_charset(), 'ignore').encode('utf8', 'replace') if type(msg) is bytes: msg = str(msg) return msg
def parse(message): if type(message) == bytes: return message_from_bytes(message) elif type(message) == str: return message_from_string(message) else: raise Exception('Parse failed!')
def get_unread(self): self.imap = imaplib.IMAP4_SSL("imap.gmail.com") self.login_imap() retcode, messages = "", "" try: self.imap.list() self.imap.select('inbox') (retcode, messages) = self.imap.search(None, '(UNSEEN)') except self.imap.abort: logger.error("imaplib abort, waiting until next turn") print("") except TimeoutError: logger.error("Inbox timed out") print("") unread = [] if retcode == "OK": for num in messages[0].split(): typ, data = self.imap.fetch(num,'(RFC822)') for response_part in data: if isinstance(response_part, tuple): unread.append(email.message_from_bytes(response_part[1])) self.imap.store(num, '+FLAGS', '\\Seen') else: logger.warning("Inbox retrieval failed") print("") self.imap.logout() return unread
def main(args): if len(args) != 1: raise TypeError("need 1 and only 1 argument") emlpath = pathlib.Path(args[0]) with open(emlpath, "rb") as fo: msg = email.message_from_bytes(fo.read()) date_str = "{:%Y%m%dT%H%M%S%z}".format(email.utils.parsedate_to_datetime(msg["date"])) from_name, from_address = email.utils.parseaddr(msg["from"]) subject_str = msg["subject"] journal_entry_name = "{date} [{sender_name} {sender_address}] {subject}".format( date=date_str, sender_name=from_name, sender_address=from_address, subject=subject_str, ) journal_entry_path = emlpath.parent / journal_entry_name journal_entry_path.mkdir() destemlpath = journal_entry_path / emlpath.name emlpath.rename(destemlpath) print(destemlpath)
def countFolder(folder): global imap imap.select(folder, True); rv, search = imap.search(None, 'ALL'); folderCount = len(search[0].split()); timestamps = [] # based on https://gist.github.com/robulouski/7441883 for num in search[0].split(): rv, data = imap.fetch(num, '(RFC822)') if rv != 'OK': print("ERROR getting message", num) return msg = email.message_from_bytes(data[0][1]) # Now convert to local date-time date_tuple = email.utils.parsedate_tz(msg['Date']) if date_tuple: timestamp = email.utils.mktime_tz(date_tuple) #print ("Timestamp:", timestamp) timestamps.append(timestamp) return (folderCount, timestamps);
detach_dir='/Users/deekshachandwani/Desktop/lab_project' mail = imaplib.IMAP4_SSL('imap.gmail.com') (retcode, capabilities) = mail.login('*****@*****.**',password) mail.list() mail.select('inbox') n=0 (retcode, messages) = mail.search(None, '(UNSEEN)') if retcode == 'OK': for num in reversed(messages[0].split()) : print ('Processing ') n=n+1 typ, data = mail.fetch(num,'(RFC822)') emailbody=data[0][1] original = email.message_from_bytes(emailbody) #print(original['From']) for part in original.walk(): if part.get_content_maintype() == 'multipart': continue if part.get('Content-Disposition') is None: continue fileName = part.get_filename() if bool(fileName): filePath = os.path.join(detach_dir, fileName) fp = open(filePath, 'wb') fp.write(part.get_payload(decode=True)) fp.close() break typ, data = mail.store(num,'+FLAGS','\\Seen')
def process_message(self, peer, mailfrom, rcpttos, data, **kwargs): try: # Refresh the list of list names every time we process a message # since the set of mailing lists could have changed. listnames = set(getUtility(IListManager).names) # Parse the message data. If there are any defects in the # message, reject it right away; it's probably spam. msg = email.message_from_bytes(data, Message) except Exception: elog.exception('LMTP message parsing') config.db.abort() return CRLF.join(ERR_451 for to in rcpttos) # Do basic post-processing of the message, checking it for defects or # other missing information. message_id = msg.get('message-id') if message_id is None: return ERR_550_MID if msg.defects: return ERR_501 msg.original_size = len(data) add_message_hash(msg) msg['X-MailFrom'] = mailfrom # RFC 2033 requires us to return a status code for every recipient. status = [] # Now for each address in the recipients, parse the address to first # see if it's destined for a valid mailing list. If so, then queue # the message to the appropriate place and record a 250 status for # that recipient. If not, record a failure status for that recipient. received_time = now() for to in rcpttos: try: to = parseaddr(to)[1].lower() local, subaddress, domain = split_recipient(to) if subaddress is not None: # Check that local-subaddress is not an actual list name. listname = '{}-{}@{}'.format(local, subaddress, domain) if listname in listnames: local = '{}-{}'.format(local, subaddress) subaddress = None slog.debug('%s to: %s, list: %s, sub: %s, dom: %s', message_id, to, local, subaddress, domain) listname = '{}@{}'.format(local, domain) if listname not in listnames: status.append(ERR_550) continue listid = '{}.{}'.format(local, domain) # The recipient is a valid mailing list. Find the subaddress # if there is one, and set things up to enqueue to the proper # queue. queue = None msgdata = dict(listid=listid, original_size=msg.original_size, received_time=received_time) canonical_subaddress = SUBADDRESS_NAMES.get(subaddress) queue = SUBADDRESS_QUEUES.get(canonical_subaddress) if subaddress is None: # The message is destined for the mailing list. msgdata['to_list'] = True queue = 'in' elif canonical_subaddress is None: # The subaddress was bogus. slog.error('%s unknown sub-address: %s', message_id, subaddress) status.append(ERR_550) continue else: # A valid subaddress. msgdata['subaddress'] = canonical_subaddress if canonical_subaddress == 'owner': msgdata.update( dict( to_owner=True, envsender=config.mailman.site_owner, )) queue = 'in' # If we found a valid destination, enqueue the message and add # a success status for this recipient. if queue is not None: config.switchboards[queue].enqueue(msg, msgdata) slog.debug('%s subaddress: %s, queue: %s', message_id, canonical_subaddress, queue) status.append('250 Ok') except Exception: slog.exception('Queue detection: %s', msg['message-id']) config.db.abort() status.append(ERR_550) # All done; returning this big status string should give the expected # response to the LMTP client. return CRLF.join(status)
def copy(msg: Message) -> Message: """return a copy of message""" return email.message_from_bytes(msg.as_bytes())
def __init__(self, msg): self._msg = email.message_from_bytes(msg)
def startListening(): mail = imaplib.IMAP4_SSL('imap.gmail.com') password = open("password.log").read() mail.login(open("username.log").read(), password) mail.select('inbox') result, data = mail.search(None,'ALL') result, data = mail.uid('search', None, "ALL") latest_email_uids = data[0].split() total_items = [] for uid in list(latest_email_uids)[:100]: result, data = mail.uid('fetch', uid, '(RFC822)') raw_email = data[0][1] # print(str(raw_email)) email_message = email.message_from_bytes(raw_email) connection = sqlite3.connect('pvi.db') c = connection.cursor() try: if "Your Publix receipt." in email_message['Subject']: # print(email_message["To"]) # print(email_message['From']) print(email_message['Subject']) maintype = email_message.get_content_maintype() senderEmail = re.search(r'\<(.*)\>', email_message['From']).group(0)[1:-1] print(senderEmail, "test") # check if there is a correlated user ret = [email for email in c.execute('SELECT * FROM user WHERE email=?', (senderEmail,))] if not ret: mail.uid('STORE', uid , '+FLAGS', '(\Deleted)') mail.expunge() continue if maintype == 'multipart': for part in email_message.get_payload(): if part.get_content_maintype() == 'text': content = part.get_payload() items = parsePublix(content) break # DELETE READ EMAIL AFTER READ mail.uid('STORE', uid , '+FLAGS', '(\Deleted)') mail.expunge() # ADD ITEMS TO DATABASE #Can't always execute many. Select to see if already in inventory. If already in inventory, update with amount rather than insert """ c.executemany('INSERT INTO inventory VALUES (?,?,?,?,?)', items) #total_items is a list of tuples of (owner, foodName, amount, unit, store) connection.commit()""" # break except: pass
def process_response(response): logger.debug("Processing Request Response...") if response.status_code == 200: try: data = bytes("Content-Type: ", 'utf-8') + bytes( response.headers['content-type'], 'utf-8') + bytes( '\r\n\r\n', 'utf-8') + response.content msg = email.message_from_bytes(data) # pylint: disable=no-member except AttributeError: data = "Content-Type: " + response.headers[ 'content-type'] + '\r\n\r\n' + response.content msg = email.message_from_string(data) for payload in msg.get_payload(): if payload.get_content_type() == "application/json": j = json.loads(payload.get_payload()) logger.debug("JSON String Returned: %s", json.dumps(j, indent=2)) elif payload.get_content_type() == "audio/mpeg": filename = tmp_path + hashlib.md5( payload.get('Content-ID').strip( "<>").encode()).hexdigest() + ".mp3" with open(filename, 'wb') as f: f.write(payload.get_payload(decode=True)) else: logger.debug("NEW CONTENT TYPE RETURNED: %s", payload.get_content_type()) # Now process the response if 'directives' in j['messageBody']: if not j['messageBody']['directives']: logger.debug("0 Directives received") for directive in j['messageBody']['directives']: if directive['namespace'] == 'SpeechSynthesizer': if directive['name'] == 'speak': player.play_speech("file://" + tmp_path + hashlib.md5( directive['payload']['audioContent'].replace( "cid:", "", 1).encode()).hexdigest() + ".mp3") elif directive['namespace'] == 'SpeechRecognizer': if directive['name'] == 'listen': logger.debug( "Further Input Expected, timeout in: %sms", directive['payload']['timeoutIntervalInMillis']) player.play_speech(resources_path + 'beep.wav') timeout = directive['payload'][ 'timeoutIntervalInMillis'] / 116 audio_stream = capture.silence_listener(timeout) # now process the response alexa_speech_recognizer(audio_stream) elif directive['namespace'] == 'AudioPlayer': if directive['name'] == 'play': player.play_playlist(directive['payload']) elif directive['namespace'] == "Speaker": # speaker control such as volume if directive['name'] == 'SetVolume': vol_token = directive['payload']['volume'] type_token = directive['payload']['adjustmentType'] if (type_token == 'relative'): volume = player.get_volume() + int(vol_token) else: volume = int(vol_token) if (volume > MAX_VOLUME): volume = MAX_VOLUME elif (volume < MIN_VOLUME): volume = MIN_VOLUME player.set_volume(volume) logger.debug("new volume = %s", volume) # Additional Audio Iten elif 'audioItem' in j['messageBody']: player.play_playlist(j['messageBody']) return elif response.status_code == 204: logger.debug("Request Response is null (This is OKAY!)") else: logger.info("(process_response Error) Status Code: %s", response.status_code) response.connection.close() platform.indicate_failure()
def fetch_mail(self): """ WARNING: meant for cron usage only - will commit() after each email! """ MailThread = self.env['mail.thread'] for server in self.filtered(lambda s: s.l10n_it_is_pec): _logger.info('start checking for new emails on %s PEC server %s', server.type, server.name) count, failed = 0, 0 imap_server = None try: imap_server = server.connect() imap_server.select() result, data = imap_server.uid( 'search', None, '(FROM "@pec.fatturapa.it")', '(UID %s:*)' % (server.l10n_it_last_uid)) new_max_uid = server.l10n_it_last_uid for uid in data[0].split(): if int(uid) <= server.l10n_it_last_uid: # We get always minimum 1 message. If no new message, we receive the newest already managed. continue result, data = imap_server.uid('fetch', uid, '(RFC822)') if not data[0]: continue message = data[0][1] # To leave the mail in the state in which they were. if "Seen" not in data[1].decode("utf-8"): imap_server.uid('STORE', uid, '+FLAGS', '\\Seen') else: imap_server.uid('STORE', uid, '-FLAGS', '\\Seen') # See details in message_process() in mail_thread.py if isinstance(message, xmlrpclib.Binary): message = bytes(message.data) if isinstance(message, str): message = message.encode('utf-8') msg_txt = email.message_from_bytes(message) try: self._attachment_invoice(msg_txt) new_max_uid = max(new_max_uid, int(uid)) except Exception: _logger.info( 'Failed to process mail from %s server %s.', server.type, server.name, exc_info=True) failed += 1 self._cr.commit() count += 1 server.write({'l10n_it_last_uid': new_max_uid}) _logger.info( "Fetched %d email(s) on %s server %s; %d succeeded, %d failed.", count, server.type, server.name, (count - failed), failed) except Exception: _logger.info( "General failure when trying to fetch mail from %s server %s.", server.type, server.name, exc_info=True) finally: if imap_server: imap_server.close() imap_server.logout() server.write({'date': fields.Datetime.now()}) return super( FetchmailServer, self.filtered(lambda s: not s.l10n_it_is_pec)).fetch_mail()
imap.login(username, password) # select a mailbox (in this case, the inbox mailbox) # use imap.list() to get the list of mailboxes status, messages = imap.select("INBOX") # total number of emails messages = int(messages[0]) for i in range(messages - 4, messages - N - 4, -1): # fetch the email message by ID res, msg = imap.fetch(str(i), "(RFC822)") for response in msg: if isinstance(response, tuple): # parse a bytes email into a message object msg = email.message_from_bytes(response[1]) # decode the email subject subject = decode_header(msg["Subject"])[0][0] if isinstance(subject, bytes): # if it's a bytes, decode to str subject = subject.decode() # email sender from_ = msg.get("From") print("Subject:", subject) print("From:", from_) # if the email message is multipart if msg.is_multipart(): # iterate over email parts for part in msg.walk(): # extract content type of email content_type = part.get_content_type()
def process_mailbox(self, conn, search): root_folder = conf['NAS_FOLDER'] rv, data = conn.search(None, search) # 'SEEN', 'UNSEEN', etc... if rv != 'OK': print("No messages found!") return for num in data[0].split(): subject = "" to = "" sender = "" date = "" body = "" message_ID = "" in_reply_to = "" references = "" path_to_dir = "" has_att = False attach = [] rfq_id = uuid.uuid1( ) # identificador do email. Usado para compor o path rv, data = conn.fetch(num, '(RFC822)') if rv != 'OK': print("ERROR getting message") return msg = email.message_from_bytes(data[0][1]) #Dados do email ------- message_ID = msg['Message-ID'] in_reply_to = msg['In-Reply-To'] references = msg['References'] path_to_dir = root_folder + '\\' + str(rfq_id) if not os.path.exists(path_to_dir): os.makedirs(path_to_dir) # se não existe: cria if (msg['Subject']): subject = msg['Subject'] subject, encoding = email.header.decode_header(subject)[0] if not isinstance(subject, str): subject = subject.decode(encoding) if (msg['to']): to = msg['to'] if (msg['from']): sender = msg['from'] date_tuple = email.utils.parsedate_tz(msg['Date']) if date_tuple: local_date = datetime.datetime.fromtimestamp( email.utils.mktime_tz(date_tuple)) date = datetime.datetime.strptime(str(local_date), "%Y-%m-%d %H:%M:%S") #----------------------------- #Anexos do email ------- if msg.is_multipart(): for part in msg.walk(): ctype = part.get_content_type() cdispo = str(part.get('Content-Disposition')) dispo_type = str(part.get_content_disposition()) charset = part.get_content_charset() if ctype == 'text/plain': #text = str(part.get_payload(decode=True), str(charset), "ignore").encode('ANSI', 'replace') if charset != None: text = str(part.get_payload(decode=True), str(charset), "ignore") else: text = str(part.get_payload(decode=True)) body = text if ctype == 'text/html': #html = str(part.get_payload(decode=True), str(charset), "ignore").encode('ANSI', 'ignore') if charset != None: html = str(part.get_payload(decode=True), str(charset), "ignore") else: html = str(part.get_payload(decode=True)) body = self.text_from_html(html) if cdispo is not None: if (dispo_type == 'attachment' ) and ctype.split('/')[0] != 'message': #é um anexo e não é uma mensagem (rfc822); Para incluir partes não textuais do corpo do email: dispo_type == 'inline' attachment = Attachment() attachment.data = part.get_payload(decode=True) attachment.content_type = part.get_content_type() attachment.size = len(attachment.data) attachment.name = part.get_filename() attachment.name = re.sub('[\n\r]', '', attachment.name) attachment.name = os.path.join( path_to_dir, attachment.name) if re.search( '(\w+[.]\w+)', attachment.name ) != None: #confere se é um arquivo com extenção if attachment.name[-2:] != "?=": attach.append(attachment) has_att = True else: charset = msg.get_content_charset() if charset != None: html = str(msg.get_payload(decode=True), str(charset), "ignore") else: html = str(msg.get_payload(decode=True)) body = self.text_from_html(html) i = '[^0-9a-zA-Z]+' sufixo = re.sub(i, '', subject) sufixo = unicodedata.normalize('NFKD', sufixo).encode('ascii', 'ignore') sufixo = sufixo.decode() for x in sufixo.split(' '): os.path.join(sufixo, x) name = sufixo + '_body.txt' abspath = os.path.join(path_to_dir, name) # compõe nome do body # Agora compõe os dados em uma classe mail_infos = MailInfos() mail_infos.to = to mail_infos.sender = sender mail_infos.subject = subject mail_infos.date = date mail_infos.text = body mail_infos.name = abspath mail_infos.ID = message_ID mail_infos.in_reply_to = in_reply_to mail_infos.references = references if (has_att): mail_infos.atts = attach has_att = False #mail_inf.append(mail_infos); self.save_file(mail_infos) # salva o body self.save_attachments(mail_infos) # salva anexos self.save_database(mail_infos) # salva no banco
def fetch_unsub_links(self): """Fetches the unsubscribe links.""" imap = self.get_imap() imap.select('INBOX') _, messageNumbersRaw = imap.search(None, 'ALL') messageNumbers = messageNumbersRaw[0].split() progressBar = ProgressBar(len(messageNumbers)) startTime = datetime.now() print(f"({startTime.strftime('%H:%M:%S')}) Parsing emails... Start") continueProcess = True for messageNumber in messageNumbers: # Check if the process should continue. if not continueProcess: break try: _, msg = imap.fetch(messageNumber, '(RFC822)') # Parse the email. if msg and msg[0]: message = email.message_from_bytes(msg[0][1]) else: continue emailFrom = self.extract_email(message['from']) # If sender's email could not be found or the sender email is # already stored, then move on. if not emailFrom or email in self.get_links(): continue if message.is_multipart(): for part in message.walk(): multipartPayload = message.get_payload() for subMessage in multipartPayload: try: link = self.extract_unsub_link( subMessage.get_payload( decode=True).decode() ) if link: self.add_link(emailFrom, link) except (UnicodeDecodeError, AttributeError): pass else: # message.get_payload(decode=True).decode() pass except KeyboardInterrupt: # On keyboard interrupt, check if the user wants to continue # parsing emails. If they do not, then stop the process and # move on. stopProcess = None while stopProcess is None: print() stopProcess = input('Stop process? [y/N] ',).lower() if stopProcess not in ('y', 'n'): stopProcess = None print('Invalid entry, enter y (yes) or n (no).') continue if stopProcess == 'y': print('exiting email parsing.') continueProcess = False break except Exception: if self.debug: print(traceback.format_exc()) else: print('Something went wrong, skipping.') progressBar.numerator = int(messageNumber) print(progressBar, end='\r') endTime = datetime.now() duration = str(endTime - startTime).split('.')[0] print(f"\n({endTime.strftime('%H:%M:%S')}) Parsing emails... Done") print(f"({datetime.now().strftime('%H:%M:%S')}) Duration: {duration}")
def _fetch(fetch, M, nums): for num in nums[0].split(): yield num, message_from_bytes(r(M.fetch(num, fetch))[0][1])
def lambda_handler(event, context): ''' First, load in all of the AWS services and their tables/containers/features ''' dynamodb = boto3.resource('dynamodb') rek_client = boto3.client('rekognition') accountsTable = dynamodb.Table('accounts') classroomsTable = dynamodb.Table('classrooms') ''' Now, we format the response into the proper format. The response should be multipart, meaning that it allows for regular json data as well as files. ''' # decoding form-data into bytes post_data = base64.b64decode(event['body']) # fetching content-type content_type = "" if 'Content-Type' in event["headers"]: content_type = event["headers"]['Content-Type'] elif 'content-type' in event["headers"]: content_type = event["headers"]['content-type'] # concate Content-Type: with content_type from event ct = "Content-Type: " + content_type + "\n" # parsing message from bytes msg = email.message_from_bytes(ct.encode() + post_data) # if message is multipart if msg.is_multipart(): multipart_content = {} # retrieving form-data for part in msg.get_payload(): multipart_content[part.get_param( 'name', header='content-disposition')] = part.get_payload(decode=True) ''' Get Data from the Request. Since this is multipart, there is a Metadata JSON value passsed in as well as a file. ''' try: uid = json.loads(multipart_content["Account"])["uid"] username = json.loads(multipart_content["Account"])["username"] password = json.loads(multipart_content["Account"])["password"] except: return { 'statusCode': 500, 'body': json.dumps({ "error": True, "message": "Incorrect Data in the Request, Please Try Again!" }) } ''' Now, we delete the classroom. First, we must remove it from the list of uids associated with the user account ''' result = accountsTable.update_item( Key={ 'username': username, 'password': password }, UpdateExpression="SET classes = list_delete(classes, :i)", ExpressionAttributeValues={ ':i': [uid], }, ReturnValues="UPDATED_NEW") # bad response if result['ResponseMetadata']['HTTPStatusCode'] != 200: return { 'statusCode': result['ResponseMetadata']['HTTPStatusCode'], 'body': json.dumps({ "error": True, "message": "There has been an error deleting the classroom" }) } ''' Now, we delete the classroom from the table ''' result = classroomsTable.delete_item(Key={ 'uid': uid, }, ) if result['ResponseMetadata']['HTTPStatusCode'] != 200: return { 'statusCode': result['ResponseMetadata']['HTTPStatusCode'], 'body': json.dumps({ "error": True, "message": "There has been an error deleting the classroom" }) } ''' Finally, delete the attendance csv from s3 ''' s3 = boto3.resource('s3') s3.Object('classattendance', uid + '.csv').delete() return { 'statusCode': 200, 'body': json.dumps({ "error": False, "message": "The classroom has been deleted!" }) } else: # on upload failure return { 'statusCode': 500, 'body': json.dumps({ "error": True, "message": "Data is not multipart!" }) }
if typ != 'OK': print ('Error searching Inbox.') raise # Iterating over all emails for msgId in data[0].split(): typ, messageParts = imapSession.fetch(msgId, "(RFC822)") if typ != 'OK': print ('Error fetching mail.') raise emailBody = messageParts[0][1] mail = email.message_from_bytes(emailBody) for part in mail.walk(): if part.get_content_maintype() == 'multipart': def get_text(msg): if msg.is_multipart(): return get_text(msg.get_payload(0)) else: return msg.get_payload(None, True) string=get_text(mail) # string = E-Mail Text - maybe for later use... print (string) if part.get('Content-Disposition') is None:
def message_from_headers(hdr): bs = "\r\n".join("{}: {}".format(*h) for h in hdr) return message_from_bytes(bs.encode())
def update(self, _reuse_imap=None) -> List[int]: new_msgs = [] if _reuse_imap is None: imap = IMAPClient(host='imap.ietf.org', ssl=False, use_uid=True) imap.login("anonymous", "anonymous") else: imap = _reuse_imap imap.select_folder("Shared Folders/" + self._list_name, readonly=True) msg_list = imap.search() msg_fetch = [] for msg_id, msg in imap.fetch(msg_list, "RFC822.SIZE").items(): cache_file = Path(self._cache_folder, F"{msg_id:06d}.msg") if not cache_file.exists(): msg_fetch.append(msg_id) else: file_size = cache_file.stat().st_size imap_size = msg[b"RFC822.SIZE"] if file_size != imap_size: self.log.warn(F"message size mismatch: {self._list_name}/{msg_id:06d}.msg ({file_size} != {imap_size})") cache_file.unlink() msg_fetch.append(msg_id) if len(msg_fetch) > 0: aa_cache = Path(self._cache_folder, "aa-cache.json") aa_cache_tmp = Path(self._cache_folder, "aa-cache.json.tmp") aa_cache.unlink() last_keepalive = datetime.now() for msg_id, msg in imap.fetch(msg_fetch, "RFC822").items(): curr_keepalive = datetime.now() if (curr_keepalive - last_keepalive) > timedelta(seconds=10): if _reuse_imap is not None: self.log.info("imap keepalive") _reuse_imap.noop() last_keepalive = curr_keepalive cache_file = Path(self._cache_folder, F"{msg_id:06d}.msg") fetch_file = Path(self._cache_folder, F"{msg_id:06d}.msg.download") if not cache_file.exists(): with open(fetch_file, "wb") as outf: outf.write(msg[b"RFC822"]) fetch_file.rename(cache_file) e = email.message_from_bytes(msg[b"RFC822"], policy=policy.default) if e["Archived-At"] is not None: list_name, msg_hash = _parse_archive_url(e["Archived-At"]) self._archive_urls[msg_hash] = msg_id self._num_messages += 1 new_msgs.append(msg_id) self._msg_metadata[msg_id] = {} for helper in self._helpers: self.log.info(F"{helper.name}: scan message {self._list_name}/{msg_id:06} for metadata") self._msg_metadata[msg_id][helper.name] = helper.scan_message(e) with open(aa_cache_tmp, "w") as aa_cache_file: json.dump(self._archive_urls, aa_cache_file) aa_cache_tmp.rename(aa_cache) self.serialise_metadata() imap.unselect_folder() if _reuse_imap is None: imap.logout() self._last_updated = datetime.now() return new_msgs
#!/usr/env/python # -*- coding: utf-8 -*- # # 筛选邮箱中指定的邮件并获取邮件正文 # # Author: Dongdong Tian @ USTC # Date: 2016-07-28 # import email from imaplib import IMAP4 host = 'mail.ustc.edu.cn' port = '143' username = '******' password = '******' with IMAP4(host, port) as M: # 登录 M.login(username, password) # 选择收件箱 M.select('INBOX') # 筛选特定发件人的邮件 type, data = M.search(None, '(FROM "*****@*****.**")') # 对邮件做循环 for num in data[0].split(): # 获取邮件中的TEXT部分 type, data = M.fetch(num, "(BODY[TEXT])") msg = email.message_from_bytes(data[0][1]) print(msg)
def read_mail(): lastTurns = load_last_turn_file() mail = imaplib.IMAP4_SSL(SMTP_SERVER) mail.login(FROM_EMAIL, FROM_PWD) mail.select('inbox') responseType, returnData = mail.search(None, 'FROM', TURN_EMAIL, '(UNSEEN)') mailIDs = returnData[0].split() # print(len(mailIDs), "new emails received") attPath = None gameName = None newTurnFoundGamenames = [] turnNumber = -1 if len(mailIDs) > 0: for mailID in mailIDs: responseType, mailParts = mail.fetch(mailID, FETCH_PROTOCOL) msg = email.message_from_bytes(mailParts[0][1]) subject = email.header.decode_header(msg["Subject"])[0][0] if isinstance(subject, bytes): subject = subject.decode() needTurn, gameName, turnNumber = extract_from_subject(subject) if needTurn: if gameName in lastTurns: # check if we look at old turnfiles # if int(lastTurns[gameName]) >= int(turnNumber): # continue # else: lastTurns[gameName] = turnNumber else: # if int(turnNumber) == 1: # this just started lastTurns[gameName] = turnNumber for part in msg.walk(): if part.get_content_maintype() == 'multipart': continue if part.get('Content-Disposition') is None: continue filename = part.get_filename() attPath = os.path.join(get_or_create_save_game_path(gameName), filename) fp = open(attPath, 'wb') fp.write(part.get_payload(decode=True)) fp.close() if BACKUP_TURNS: attPath = os.path.join(get_or_create_save_game_path(gameName), "{}_{}".format(turnNumber, filename)) fp = open(attPath, 'wb') fp.write(part.get_payload(decode=True)) fp.close() newTurnFoundGamenames.append((gameName, turnNumber)) mail.logout() if len(newTurnFoundGamenames) > 0: for gameName, turnNumber in newTurnFoundGamenames: messagebox.showinfo(title="New turn", message="New turn {} received for {}".format(turnNumber, gameName)) start_dominions(gameName) if ask_for_upload(): upload_turn(gameName, lastTurns[gameName]) else: messagebox.showinfo(title="No new turns", message="No new turn received") # print("No new turn received") save_last_turn_file(lastTurns)
FROM_EMAIL = login_info[0] FROM_PWD = login_info[1] SMTP_SERVER = "imap.gmail.com" SMTP_PORT = 993 mail = imaplib.IMAP4_SSL(SMTP_SERVER) mail.login(FROM_EMAIL, FROM_PWD) mail.select('inbox') type, data = mail.search(None, 'ALL') mail_ids = data[0] id_list = mail_ids.split() first_email_id = int(id_list[0]) latest_email_id = int(id_list[-1]) print("done") type, data = mail.fetch(str(latest_email_id), '(RFC822)') email_content = data[0][1] msg = email.message_from_bytes(email_content) # this needs to be corrected in your case maintype = msg.get_content_maintype() print(msg.get_payload()) # print(msg) emailDate = msg["Date"] emailSubject = msg["Subject"] emailBody = msg["Message"] # print(emailDate) # print(emailSubject) # print(emailBody) except Exception as ex: print(ex) print("Unexpected error:", sys.exc_info()[0])
def get_email_code(imap_account, imap_password, imap_server, imap_folder, debug=0, delete=1): code = None M = imaplib.IMAP4_SSL(imap_server) try: rv, data = M.login(imap_account, imap_password) except imaplib.IMAP4.error: print ("ERROR: email login failed") return ''; code = '' for c in range(20): time.sleep(10) rv, data = M.select(imap_folder) if rv != 'OK': print("ERROR: Unable to open mailbox ", rv) return ''; rv, data = M.search(None, "ALL") if rv != 'OK': print("ERROR: Email search failed") return ''; count = 0; for num in data[0].split()[::-1]: count = count + 1; if count > 3: break rv, data = M.fetch(num, '(RFC822)') if rv != 'OK': print("ERROR: ERROR getting message", num) sys.exit(1) msg = email.message_from_bytes(data[0][1]) x = email.header.make_header(email.header.decode_header(msg['Subject'])) subject = str(x) if debug: print("DEBUG: SUBJECT:", subject) x = email.header.make_header(email.header.decode_header(msg['From'])) frm = str(x) if debug: print("DEBUG: FROM:", frm) if not re.search('*****@*****.**', frm, re.IGNORECASE): continue if not re.search('Your Mint Account', subject, re.IGNORECASE): continue date_tuple = email.utils.parsedate_tz(msg['Date']) if date_tuple: local_date = datetime.fromtimestamp(email.utils.mktime_tz(date_tuple)) else: print("ERROR: FAIL0") diff = datetime.now() - local_date if debug: print("DEBUG: AGE:", diff.seconds) if diff.seconds > 180: continue if debug: print("DEBUG: EMAIL HEADER OK") body = str(msg) p = re.search('Verification code:<.*?(\d\d\d\d\d\d)$', body, re.S|re.M) if p: code = p.group(1) else: print("FAIL1") if debug: print("DEBUG: CODE FROM EMAIL:", code) if code != '': break if debug: print("DEBUG: CODE FROM EMAIL 2:", code) sys.stdout.flush() if code != '': if debug: print("DEBUG: CODE FROM EMAIL 3:", code) sys.stdout.flush() if delete > 0 and count > 0: M.store(num, '+FLAGS', '\\Deleted') if delete > 0: M.expunge() break M.logout() return code
r"Tipo de contrato: ([a-zA-Z0-9]+.+)") bodyexp = re.findall(exp2, body, re.MULTILINE) bodypos0 = (bodyexp[0]) #print(bodypos0) return bodyexp for i in range(mensaje, mensaje - N, -1): #print(f"vamos por el mensaje {i}") try: res, mensaje = imap.fetch(str(i), "(RFC822)") except: break for repuesta in mensaje: if isinstance(repuesta, tuple): mensaje = email.message_from_bytes(repuesta[1]) subject = decode_header(mensaje["Subject"])[0][0] if isinstance(subject, bytes): subject = subject.decode() from_ = mensaje.get("From") #print("Subject:", subject) #print("From: ", from_) if mensaje.is_multipart(): for part in mensaje.walk(): content_type = part.get_content_type() content_disposition = str(part.get("Content-Disposition")) try: body = part.get_payload(decode=True).decode() except: pass
def get_mailbox_content(self): messages = self.stream.getvalue().split(str('\n' + ('-' * 79) + '\n')) return [message_from_bytes(force_bytes(m)) for m in messages if m]
def parser(self, s, *args, **kw): return email.message_from_bytes(s.encode(), *args, **kw)
def extract_last_email(self, messages: list): #very specific function for the sel_crawler.py, lol #it's one line so sort of useless, but makes the code in sel_crawler cleaner # still maybe we should delete this return get_email_body(email.message_from_bytes((messages[-1])[0][1]))
def _fetch_mails_pop3() -> MailMessages: return { i: email.message_from_bytes(b"\n".join( verified_result(self._connection.retr(i + 1)))) for i in range(len(verified_result(self._connection.list()))) }
f'{row[0].strftime("%d.%m")} в {row[1]}: ' f'{subject_types.get(row[2]) if subject_types.get(row[2]) else row[2]}' # сокращает тип пары f' по "{subject_name}" {row[4]}') if __name__ == '__main__': try: with IMAP4_SSL(f'imap.{SERVER}') as imap: imap.login(EMAIL, PASSWORD) print('Авторизация успешна, поиск...\n') imap.select("inbox") id_list = imap.search(None, 'ALL')[1][0].split()[::-1][:count] for email_id in id_list: # берем по письму, начиная с самого последнего data = imap.fetch(email_id, "(RFC822)")[1][0][1] msg = email.message_from_bytes(data) if msg['Return-path'] == '<*****@*****.**>' and \ decode_header(msg['Subject'])[0][0].decode('utf-8') == 'ИСУ ИТМО - Дистанционное обучение': payload = msg.get_payload()[1].get_payload( ) # письмо в сыром виде body_no_breaks = re.sub(r'^\s+|\n|\r|\s+$', '', payload) # удаляем переносы строк letter_details = decompose_letter(body_no_breaks) if letter_details['Дата'] in dates: # если дата не сегодняшняя, либо сегодняшняя, но # между текущим временем и временем пары менее трех часов try: lesson_time = datetime.strptime( letter_details['Время'], '%H:%M') except ValueError:
def extract_real(self): config = self._settings extractor = None self.update_progress("Initializing...") cards = {} urls = [] e = [extractors_list[i] for i, c in enumerate(self.checkboxes) if c.get()] if len(e) == 0: self.update_progress('No sources selected!') self.extraction_cleanup() emails = [i for e_list in [x.email() for x in e] for i in e_list] days = int(config.get('Settings', 'days')) self.browser = None for section in (e for e in self._settings.sections() if e.startswith('Email')): if config.get(section, 'imap_active') == "True": imap_ssl = config.get(section, 'imap_ssl') == "True" imap_host = config.get(section, 'imap_host') imap_port = int(config.get(section, 'imap_port')) imap_username = config.get(section, 'imap_username') imap_password = config.get(section, 'imap_password') phonenum = config.get(section, 'phonenum') self.update_progress("Connecting to {}...".format(imap_username)) # Connect to the server if imap_ssl: mailbox = IMAP4_SSL(host=imap_host, port=imap_port) else: mailbox = IMAP4(host=imap_host, port=imap_port) # Log in and select the configured folder try: mailbox.login(imap_username, imap_password) except IMAP4.error as e: self.update_progress('Failed to login to {}: {}'.format(imap_username, e)) continue mailbox.select("INBOX") since = (date.today() - timedelta(days - 1)).strftime("%d-%b-%Y") from_list = '(OR ' * (len(emails) - 1) + '(FROM "'+emails[0]+'") ' +\ ''.join(['(FROM "'+em+'")) ' for em in emails[1:]])[0:-1] # subject = ' HEADER Subject "'+extractor.subject()+'" ' if extractor.subject() is not "" else " " space = ' ' if len(emails) > 1 else '' search = '({}{}SINCE "{}")'.format(from_list, space, since) # status, messages = mailbox.search(None, '(OR (OR (FROM "*****@*****.**") (FROM "*****@*****.**")) (FROM "*****@*****.**"))') # search = '(FROM "*****@*****.**") SINCE "23-Nov-2018"' status, messages = mailbox.search(None, search) if status == "OK": # Convert the result list to an array of message IDs messages = [m.decode('ascii') for m in messages[0].split()] if len(messages) == 0: continue self.update_progress("Fetching messages from {}...".format(imap_username)) data = [] try: status, data = mailbox.fetch(','.join(messages), '(RFC822)') # remove every other element of list, extract messages data = [email.message_from_bytes(i[1]) for index, i in enumerate(data) if (index + 1) % 2 != 0] except IMAP4.error: # Can't fetch all messages at once, do them one at a time for msg_id in messages: self.update_progress("{}: Fetching message id {}...".format(imap_username, msg_id)) # Fetch it from the server status, m = mailbox.fetch(msg_id, '(RFC822)') if status == "OK": data.append(email.message_from_bytes(m[0][1])) if status == "OK": for idx, msg in enumerate(data): # Get To: and From: addresses to_address = email.utils.parseaddr(msg.get("To", imap_username))[1] from_address = email.utils.parseaddr(msg.get("From"))[1] # Get extractor extractor = [ext for ext in extractors_list if from_address in ext.email()][0] # Get the HTML body payload msg_html = extractor.fetch_payload(msg) if msg_html is None: continue # Save the email timestamp datetime_received = datetime.fromtimestamp( email.utils.mktime_tz(email.utils.parsedate_tz(msg.get('date')))) # Parse the message msg_parsed = BeautifulSoup(msg_html, 'html.parser') # Find the "View My Code" link url = extractor.fetch_url(msg_parsed, self.browser, imap_username) if url is not None: if isinstance(url, list): for u in url: urls.append([messages[idx], extractor, datetime_received, u, imap_username, to_address, phonenum]) else: urls.append([messages[idx], extractor, datetime_received, url, imap_username, to_address, phonenum]) if len(urls) < 1: self.update_progress('No cards to extract!') self.extraction_cleanup() if self.browser is None: self.update_progress("Launching ChromeDriver...") chrome_options = webdriver.ChromeOptions() if config.get('Settings', 'hide_chrome_window') == 'True': chrome_options.add_argument("--window-position=-10000,0") try: profile = config.get('Settings', 'profile') chrome_options.add_argument('--user-data-dir={}'.format(profile)) except NoOptionError: pass self.browser = custom_webdriver.CustomWebDriver(config.get('Settings', 'chromedriver_path'), chrome_options=chrome_options) for msg_id, extractor, datetime_received, url, imap_username, to_address, phonenum in urls: self.update_progress("{}: Getting gift card from message id: {}".format(imap_username, msg_id)) while True: # keep retrying to load the page if it's timing out. # TODO add cancel option while True: try: self.browser.get('about:blank') self.browser.get(url) except TimeoutException: self.update_progress('Page load timed out. Retrying...') continue # if page load times out, retry... if 'ERR_TIMED_OUT' in self.browser.page_source or 'com.bhn.general.service.error' in self.browser.page_source: self.update_progress('Page load timed out. Retrying...') time.sleep(1) continue break # challenege for various cards extractor.complete_challenge(self.browser, to_address, phonenum) card = extractor.fetch_codes(self.browser) if card is None: break if card['card_store'] not in cards: cards[card['card_store']] = [] if card['card_code'] != '': break if card is not None: card['datetime_received'] = str(datetime_received) card['url'] = url cards[card['card_store']].append(card) if config.get('Settings', 'screenshots'): self.save_screenshot(self.browser, card['card_code']) # if self.ids.prints.active: # browser.execute_script('window.print()') extractor.delay() # update output window for each new card self.output_cards(cards) self.browser.close() self.browser = None self.extraction_cleanup()
def get_email_by_id(email_id): # Get the raw data of the email response, data = mail.fetch(email_id, "(RFC822)") # The raw message is in the data arrray at [0][1], not sure why... return email.message_from_bytes(data[0][1])
def get_full(M, num): typ, data = M.fetch(num, '(RFC822)') message = email.message_from_bytes(data[0][1]) return message
def getEmailData(): # extract all html parts def get_html(email_message_instance): maintype = email_message_instance.get_content_maintype() if maintype == 'multipart': for part in email_message_instance.get_payload(): if part.get_content_type() == 'text/html': return part.get_payload() print("[SerienRecorder] Loading TV-Planer e-mail") SRLogger.writeLog("\n---------' Lade TV-Planer E-Mail '---------\n", True) # get emails if len(config.plugins.serienRec.imap_server.value) == 0: SRLogger.writeLog("TV-Planer: imap_server nicht gesetzt", True) return None if len(config.plugins.serienRec.imap_login_hidden.value) == 0: SRLogger.writeLog("TV-Planer: imap_login nicht gesetzt", True) return None if len(config.plugins.serienRec.imap_password_hidden.value) == 0: SRLogger.writeLog("TV-Planer: imap_password nicht gesetzt", True) return None if len(config.plugins.serienRec.imap_mailbox.value) == 0: SRLogger.writeLog("TV-Planer: imap_mailbox nicht gesetzt", True) return None if len(config.plugins.serienRec.imap_mail_subject.value) == 0: SRLogger.writeLog("TV-Planer: imap_mail_subject nicht gesetzt", True) return None if 1 > config.plugins.serienRec.imap_mail_age.value > 100: config.plugins.serienRec.imap_mail_age.value = 1 try: if config.plugins.serienRec.imap_server_ssl.value: mail = imaplib.IMAP4_SSL( config.plugins.serienRec.imap_server.value, config.plugins.serienRec.imap_server_port.value) else: mail = imaplib.IMAP4( config.plugins.serienRec.imap_server.value, config.plugins.serienRec.imap_server_port.value) except (imaplib.IMAP4.abort, imaplib.IMAP4.error, imaplib.IMAP4.readonly) as e: SRLogger.writeLog( "TV-Planer: Verbindung zum E-Mail Server fehlgeschlagen [%s]" % str(e), True) return None except: SRLogger.writeLog( "TV-Planer: Verbindung zum E-Mail Server fehlgeschlagen [unbekannter Fehler]", True) return None try: mail.login( decrypt(STBHelpers.getmac("eth0"), config.plugins.serienRec.imap_login_hidden.value), decrypt(STBHelpers.getmac("eth0"), config.plugins.serienRec.imap_password_hidden.value)) print("[SerienRecorder] IMAP login ok") except imaplib.IMAP4.error as e: SRLogger.writeLog( "TV-Planer: Anmeldung am Server fehlgeschlagen [%s]" % str(e), True) print("[SerienRecorder] IMAP login failed") return None try: result, data = mail.select(config.plugins.serienRec.imap_mailbox.value, False) if result != 'OK': SRLogger.writeLog( "TV-Planer: Mailbox ' %s ' nicht gefunden [%s]" % (config.plugins.serienRec.imap_mailbox.value, str(result))) mail.logout() return None except imaplib.IMAP4.error as e: SRLogger.writeLog( "TV-Planer: Mailbox ' %s ' nicht gefunden [%s]" % (config.plugins.serienRec.imap_mailbox.value, str(e)), True) mail.logout() return None searchstr = getMailSearchString( config.plugins.serienRec.imap_mail_age.value, config.plugins.serienRec.imap_mail_subject.value) try: result, data = mail.uid('search', None, searchstr) if result != 'OK': SRLogger.writeLog( "TV-Planer: Fehler bei der Suche nach TV-Planer E-Mails", True) SRLogger.writeLog("TV-Planer: %s" % data, True) mail.logout() return None except imaplib.IMAP4.error as e: SRLogger.writeLog( "TV-Planer: Keine TV-Planer Nachricht in den letzten %s Tagen [%s]" % (str(config.plugins.serienRec.imap_mail_age.value), str(e)), True) SRLogger.writeLog("TV-Planer: %s" % searchstr, True) mail.logout() return None if len(data[0]) == 0: SRLogger.writeLog( "TV-Planer: Keine TV-Planer Nachricht in den letzten %s Tagen" % str(config.plugins.serienRec.imap_mail_age.value), True) SRLogger.writeLog("TV-Planer: %s" % searchstr, True) mail.logout() return None # get the latest email latest_email_uid = data[0].split()[-1] # fetch the email body (RFC822) for the given UID try: result, data = mail.uid('fetch', latest_email_uid, '(RFC822)') except Exception as e: SRLogger.writeLog( "TV-Planer: Laden der E-Mail fehlgeschlagen [%s]" % str(e), True) return None mail.logout() # extract email message including headers and alternate payloads if PY3: email_message = email.message_from_bytes(data[0][1]) else: email_message = email.message_from_string(data[0][1]) if len(email_message) == 0: SRLogger.writeLog("TV-Planer: Leere E-Mail", True) return None # get html of wunschliste SRLogger.writeLog("Extrahiere HTML Part der TV-Planer E-Mail.", True) html = get_html(email_message) if html is None or len(html) == 0: SRLogger.writeLog("TV-Planer: Leeres HTML", True) return None if config.plugins.serienRec.tvplaner.value and config.plugins.serienRec.tvplaner_backupHTML.value: try: SRLogger.writeLog("Erstelle Backup der TV-Planer E-Mail.\n") htmlFilePath = os.path.join( config.plugins.serienRec.LogFilePath.value, SERIENRECORDER_TVPLANER_HTML_FILENAME) writeTVPlanerHTML = open(htmlFilePath, "w") writeTVPlanerHTML.write(html) writeTVPlanerHTML.close() except: SRLogger.writeLog( "TV-Planer: HTML konnte nicht für die Fehlersuche gespeichert werden.", True) # make one line and convert characters html = html.replace('=\r\n', '').replace('=\n', '').replace('=\r', '').replace( '\n', '').replace('\r', '') html = html.replace('=3D', '=') try: def getTextContentByTitle(node, titleValue, default): titleNodes = node.childNodes.getElementsByAttr('title', titleValue) if titleNodes: return toStr(titleNodes[0].textContent) else: return default def getEpisodeTitle(node): childNodes = node.childNodes.getElementsByTagName('a') if childNodes: return toStr(childNodes[0].textContent) else: # Movies does not a link to the episode => only country, year childNodes = node.childNodes.getElementsByTagName('span') if childNodes: return toStr(childNodes[0].textContent) else: return '' from . import AdvancedHTMLParser SRLogger.writeLog('Starte HTML Parsing der TV-Planer E-Mail.', True) print("[SerienRecorder] TV-Planer: Start HTML parsing") parser = AdvancedHTMLParser.AdvancedHTMLParser() if PY3: html = toStr(html) html = parser.unescape(html) parser.parseStr(html) # Get tables from HTML tables = parser.getElementsByTagName('table') # Initialize regular expressions date_regexp = re.compile( '.*TV-Planer.*?den ([0-3][0-9]\.[0-1][0-9]\.20[0-9][0-9])\s.(?:\(ab (.*?) Uhr\))?' ) url_title_regexp = re.compile( '.*<a href="([^\?]+)(?:\?.*)?".*><strong.*>(.*)</strong>') endtime_regexp = re.compile('.*bis:\s(.*)\sUhr.*') # Get date and time of TV-Planer header = toStr(tables[1].getAllChildNodes().getElementsByTagName('div') [0].textContent) planerDateTime = date_regexp.findall(header)[0] print("[SerienRecorder] TV-Planer date/time: %s" % str(planerDateTime)) # Get transmissions transmissions = [] transmissionTable = tables[1].getAllChildNodes().getElementsByTagName( 'table')[0] transmissionRows = transmissionTable.childNodes for transmissionRow in transmissionRows: transmission = [] if not transmissionRow.hasAttribute('style'): transmissionColumns = transmissionRow.childNodes # Each transmission row has three columns # [0]: Start time starttime = toStr(transmissionColumns[0].textContent) if starttime != 'Anzeige': transmission.append(starttime.replace(' Uhr', '')) # [1]: URL, Title, Season, Episode, Info transmissionColumn = transmissionColumns[1] # Season, Episode, Title, Episode info, End time episodeInfo = ['0', '00', '', '', '0.00'] if transmissionColumn.firstChild: # First child is always URL + Title url_title = url_title_regexp.findall( toStr(transmissionColumn.firstChild.toHTML()))[0] transmission.extend(url_title) if transmissionColumn.lastChild: # Last element => End time (it has to be filled with a time because later on the time will be splitted) endtime = endtime_regexp.findall( toStr(transmissionColumn.lastChild.toHTML())) if endtime: episodeInfo[4] = endtime[0] divPartIndex = 0 for transmissionPart in transmissionColumn.childNodes: if transmissionPart is transmissionColumn.lastChild: # Skip part if it the "last" part continue if transmissionPart.tagName == 'div' and divPartIndex == 0: # First div element => Season / Episode / Title / e.g. NEU episodeInfo[0] = getTextContentByTitle( transmissionPart, 'Staffel', '0') episodeInfo[1] = getTextContentByTitle( transmissionPart, 'Episode', '00') episodeInfo[2] = getEpisodeTitle(transmissionPart) divPartIndex += 1 elif transmissionPart.tagName == 'div' and divPartIndex == 1: # Second div element => Episode info episodeInfo[3] = toStr( transmissionPart.textContent) transmission.extend(episodeInfo) # [2] Channel transmission.append( toStr(transmissionColumns[2].textContent)) #print("[SerienRecorder] " + transmission) transmissions.append(transmission) except Exception as e: print("[SerienRecorder] TV-Planer: Break HTML parsing [%s]" % str(e)) SRLogger.writeLog("TV-Planer: HTML Parsing abgebrochen [%s]" % str(e), True) return None # prepare transmissions # [ ( seriesName, channel, start, end, season, episode, title, '0' ) ] # calculate start time and end time of list in E-Mail missingTime = False if len(planerDateTime) != 2: SRLogger.writeLog("TV-Planer: Falsches Datumsformat", True) return None (day, month, year) = planerDateTime[0].split('.') if len(planerDateTime[1]) == 0: if transmissions: # Get time of first transmission (hour, minute) = transmissions[0][0].split(':') else: missingTime = True (hour, minute) = ('00', '00') else: (hour, minute) = planerDateTime[1].split(':') liststarttime_unix = TimeHelpers.getRealUnixTime(minute, hour, day, month, year) # generate dictionary with final transmissions SRLogger.writeLog( "Ab dem %s %s Uhr wurden die folgenden %d Sendungen gefunden:\n" % (planerDateTime[0], planerDateTime[1], len(transmissions))) print( "[SerienRecorder] TV-Planer: Found %d number of transmissions from %s %s o'clock:" % (len(transmissions), planerDateTime[0], planerDateTime[1])) if missingTime: SRLogger.writeLog( "In der Kopfzeile der TV-Planer E-Mail konnte keine Uhrzeit gefunden werden, bitte die angelegten Timer kontrollieren!\n" ) transmissiondict = dict() import quopri for starttime, url, seriesname, season, episode, title, description, endtime, channel in transmissions: try: if url.startswith('https://www.wunschliste.de/spielfilm'): if not config.plugins.serienRec.tvplaner_movies.value: SRLogger.writeLog( "' %s ' - Filmaufzeichnung ist deaktiviert" % seriesname, True) print( "[SerienRecorder] TV-Planer: ' %s ' - Movie recording is disabled" % seriesname) continue transmissiontype = '[ Film ]' elif url.startswith('https://www.wunschliste.de/serie'): if not config.plugins.serienRec.tvplaner_series.value: SRLogger.writeLog( "' %s ' - Serienaufzeichnung ist deaktiviert" % seriesname, True) print( "[SerienRecorder] TV-Planer: ' %s ' - Series recording is disabled" % seriesname) continue transmissiontype = '[ Serie ]' else: SRLogger.writeLog( "' %s ' - Ungültige URL [%s]" % (seriesname, str(url)), True) print( "[SerienRecorder] TV-Planer: ' %s ' - Invalid URL [%s]" % seriesname, str(url)) continue # get fernsehserie ID from URL fsID = url[str.rindex(url, '/') + 1:] # series transmission = [seriesname] # channel channel = channel.replace(' (Pay-TV)', '').replace( ' (Schweiz)', '').replace(' (GB)', '').replace(' (Österreich)', '').replace( ' (USA)', '').replace(' (RP)', '').replace(' (F)', '').strip() transmission += [channel] # start time (hour, minute) = starttime.split(':') transmissionstart_unix = TimeHelpers.getRealUnixTime( minute, hour, day, month, year) if transmissionstart_unix < liststarttime_unix: transmissionstart_unix = TimeHelpers.getRealUnixTimeWithDayOffset( minute, hour, day, month, year, 1) transmission += [transmissionstart_unix] # end time (hour, minute) = endtime.split('.') transmissionend_unix = TimeHelpers.getRealUnixTime( minute, hour, day, month, year) if transmissionend_unix < transmissionstart_unix: transmissionend_unix = TimeHelpers.getRealUnixTimeWithDayOffset( minute, hour, day, month, year, 1) transmission += [transmissionend_unix] # season if season == '': season = '0' transmission += [season] # episode if episode == '': episode = '00' transmission += [episode] # title transmission += [toStr(quopri.decodestring(toBinary(title)))] # last transmission += ['0'] # url transmission += [url] # store in dictionary transmissiondict[fsID] = [ seriesname: [ transmission 0 ], [ transmission 1], .... ] if fsID in transmissiondict: transmissiondict[fsID] += [transmission] else: transmissiondict[fsID] = [transmission] log = "' %s - S%sE%s - %s ' - %s - %s - %s - %s" % ( transmission[0], str( transmission[4]).zfill(2), str(transmission[5]).zfill(2), transmission[6], transmission[1], time.strftime("%d.%m.%Y %H:%M", time.localtime(int(transmissionstart_unix))), time.strftime( "%d.%m.%Y %H:%M", time.localtime( int(transmissionend_unix))), transmissiontype) SRLogger.writeLog(log, True) print("[SerienRecorder] TV-Planer: %s" % log) except Exception as e: print( "[SerienRecorder] TV-Planer: Processing TV-Planer e-mail failed: [%s]" % str(e)) SRLogger.writeLog( "TV-Planer Verarbeitung fehlgeschlagen! [%s]" % str(e), True) # Create marker SRLogger.writeLog("\n", True) from .SerienRecorder import serienRecDataBaseFilePath print("[SerienRecorder] TV-Planer: Create markers...") database = SRDatabase(serienRecDataBaseFilePath) for fsID in list(transmissiondict.keys()): print( "[SerienRecorder] TV-Planer: Check whether or not a marker exists for fsid: [%s]" % str(fsID)) # marker isn't in database, create new marker # url stored in marker isn't the final one, it is corrected later url = transmissiondict[fsID][0][-1] seriesname = transmissiondict[fsID][0][0] marker_type = "Serien-Marker" try: boxID = None seriesInfo = "" if url.startswith('https://www.wunschliste.de/serie'): seriesID = SeriesServer().getIDByFSID(fsID) if seriesID > 0: url = str(seriesID) data = SeriesServer().getSeriesNamesAndInfoByWLID( [seriesID]) if data: seriesInfo = data[0]['info'] else: url = None if config.plugins.serienRec.tvplaner_series_activeSTB.value: boxID = config.plugins.serienRec.BoxID.value elif url.startswith('https://www.wunschliste.de/spielfilm'): marker_type = "Temporärer Serien-Marker" if config.plugins.serienRec.tvplaner_movies_activeSTB.value: boxID = config.plugins.serienRec.BoxID.value else: url = None if url: if database.addMarker( url, seriesname, seriesInfo, fsID, boxID, 1 if url.startswith('https://www.wunschliste.de/spielfilm') else 0): if len(seriesInfo) == 0: SRLogger.writeLog( "%s für ' %s ' wurde angelegt" % (marker_type, seriesname), True) else: SRLogger.writeLog( "%s für ' %s (%s) ' wurde angelegt" % (marker_type, seriesname, seriesInfo), True) print( "[SerienRecorder] TV-Planer: %s created ' %s ' (%s)" % (marker_type, seriesname, seriesInfo)) except Exception as e: SRLogger.writeLog( "%s für ' %s ' konnte wegen eines Fehlers nicht angelegt werden [%s]" % (marker_type, seriesname, str(e)), True) print( "[SerienRecorder] TV-Planer: %s - %s could not been created [%s]" % (seriesname, marker_type, str(e))) return transmissiondict
zip.extract(finfo, path=extract_dir) except Exception: return [False, None] else: return [True, "{}{}".format(extract_dir, name)] if __name__ == "__main__": m = imaplib.IMAP4_SSL(server) m.login(user, pwd) items = get_messages() if len(items) != 0: for item in items: resp, data = m.fetch(item, '(RFC822)') mail_full = email.message_from_bytes(data[0][1]) if mail_full.is_multipart(): for part in mail_full.get_payload(): mail = mail_full.get_payload()[0].get_payload( decode=True).decode() else: mail = mail_full.get_payload(decode=True).decode() parse = parse_html(mail) mail_from = parseaddr(mail_full['From'])[1] mail_from = mail_full['X-Envelope-From'] mail_subj = str(make_header(decode_header(mail_full['Subject']))) if mail_from == sender: if parse[0]: download_res, name = download_file(parse[1]) if download_res: extract_res, txt_name = extract(name)