Example #1
0
def test_clear_attachment():
    with open('test_messages/text_three_attachments.txt', 'r') as f:
        html_parts = []
        text_parts = []
        msg = message_from_file(f)
        generator = mime.get_attachment_parts(msg, text_parts, html_parts)
        truncated_filename = '1' * 124 + '.png'
        for attachment_part, file_name, c_id in generator:
            assert file_name in [truncated_filename, 'green-_ball.png', 'redball.png']
            assert c_id is None
            mime.clear_attachment(attachment_part)
        assert len(html_parts) == 0
        assert len(text_parts) == 1
    with open('test_messages/message_write.txt', 'w+') as f:
        g = Generator(f)
        g.flatten(msg)
    with open('test_messages/message_write.txt', 'r') as f:
        html_parts = []
        text_parts = []
        msg = message_from_file(f)
        generator = mime.get_attachment_parts(msg, text_parts, html_parts)
        for attachment_part, file_name, c_id in generator:
            assert False
        assert len(html_parts) == 3
        assert len(text_parts) == 1
Example #2
0
File: test.py Project: jlebar/blog
 def test_date(self):
     filename = os.path.join(self.tempdir, 'set_date.html')
     # First test a message without any date defined
     def file_without_date():
         open(filename, 'w').write('title: Some title\n\nSome content')
     file_without_date()
     command_date([filename, '2008-1-1'], None)
     message = email.message_from_file(open(filename))
     self.assert_('date' in message)
     self.assertEqual(message['date'], str(datetime.date(2008, 1, 1)))
     # Then test a file which has already a date
     def file_with_date():
         open(filename, 'w').write('title: Some title\ndate: 2008-12-31\n'
                                   '\nSome content')
     file_with_date()
     command_date([filename, '2008-1-1'], None)
     message = email.message_from_file(open(filename))
     self.assert_('date' in message)
     self.assertEqual(message['date'], str(datetime.date(2008, 1, 1)))
     # Test aliases
     for alias in ('now', 'today', 'tomorrow', 'next_day'):
         file_without_date()
         command_date([filename, alias], None)
         message = email.message_from_file(open(filename))
         self.assert_('date' in message)
Example #3
0
def get_direct_emails(emails, addresses):
    """Return emails directly sent to addresses in given emails"""

    direct = []
    cced = []
    for msg_path in emails:
        try:
            msg_f = open(msg_path, 'r', encoding='utf8')
            msg = email.message_from_file(msg_f)
        except UnicodeDecodeError:
            msg_f = open(msg_path, 'r', encoding='latin1')
            msg = email.message_from_file(msg_f)

        try:
            msg_to = get_header_field(msg, 'To')
            msg_cc = get_header_field(msg, 'Cc')
            recipients = msg_to + '\n' + msg_cc

            if any(e in recipients for e in addresses):
                if any(e in msg_to for e in addresses) and len(re.split(r'[\n,]', msg_to)) == 1:
                    direct.append(msg_path)
                else:
                    cced.append(msg_path)
        except:
            continue

    return (direct, cced)
Example #4
0
    def handle(self, *args, **options):
        infile = args[0] if args else options['infile']

        if infile:
            logger.info('Parsing mail loaded by filename')
            if six.PY3:
                with open(infile, 'rb') as file_:
                    mail = email.message_from_binary_file(file_)
            else:
                with open(infile) as file_:
                    mail = email.message_from_file(file_)
        else:
            logger.info('Parsing mail loaded from stdin')
            if six.PY3:
                mail = email.message_from_binary_file(sys.stdin.buffer)
            else:
                mail = email.message_from_file(sys.stdin)
        try:
            result = parse_mail(mail, options['list_id'])
            if result:
                sys.exit(0)
            logger.warning('Failed to parse mail')
            sys.exit(1)
        except Exception:
            logger.exception('Error when parsing incoming email',
                             extra={'mail': mail.as_string()})
Example #5
0
    def testEmailParsing(self):
        # create an initial "from" email
        message = email.message_from_file(
            open("./testdata/mail3.txt", "r"))
        name, mto = parseaddr(message['to'])
        name, mfrom = parseaddr(message['from'])
        for part in message.walk():
            if part.get_content_maintype() == "multipart":
                continue # it's just a container
            if part.get_content_type() == "text/plain":
                body = part.get_payload(decode=True)
                break
        try:
            user = CustomUser.objects.get(
                email=mfrom)
        except CustomUser.DoesNotExist:
            user = CustomUser.objects.create(email=mfrom,
                                             username=mfrom)
            user.set_unusable_password()
            user.save()
        recipient, _ = CustomUser.objects.get_or_create(
            email=mto,
            username=mto)
        mail = Mail.objects.create(
            subject=message['subject'],
            mfrom=user,
            mto=recipient,
            message=body)

        # now we simulate a reply, and override a couple of relevant
        # headers 
        raw_email = email.message_from_file(
            open("./testdata/mail4.txt", "r"))
        new_mto = user.proxy_email
        raw_email.replace_header('in-reply-to', "<%s>" % mail.message_id)
        raw_email.replace_header('references', "<%s>" % mail.message_id)
        raw_email.replace_header('to', "%s <%s>" % (mail.message_id,
                                      new_mto))
        fp = StringIO(raw_email.as_string()) 
        response = make_response_from_file(fp)
        self.assertTrue(response)
        
        # now parse in an email that isn't a response to anything
        fp = open("./testdata/mail2.txt", "r")
        response = make_response_from_file(fp)
        self.assertEqual(response, EX_NOUSER)

        # and now try threading based on subject line as a fallback...
        raw_email = email.message_from_file(
            open("./testdata/mail2.txt", "r"))
        new_mto = user.proxy_email
        del(raw_email['in-reply-to'])
        del(raw_email['references'])
        raw_email.replace_header('subject', "Re:RE: re:%s" % mail.subject)
        raw_email.replace_header('to', "%s <%s>" % (mail.message_id,
                                      new_mto))
        fp = StringIO(raw_email.as_string()) 
        response = make_response_from_file(fp)
        self.assertTrue(response)
Example #6
0
 def __init__(self, path):
     if hasattr(path, 'readlines'):
         self.msg = email.message_from_file(path)
     else:
         with open(path, 'r') as fip:
             self.msg = email.message_from_file(fip)
     self.parts = []
     self.headers = {}
     self.attachments = []
 def setUp(self):
     if sys.version_info >= (3, 2):
         import io
         msg1 = email.message_from_file(io.open(join(DATA, 'test1.msg'), encoding='utf8'))
         msg2 = email.message_from_file(io.open(join(DATA, 'test2.msg'), encoding='utf8'))
     else:
         msg1 = email.message_from_file(open(join(DATA, 'test1.msg')))
         msg2 = email.message_from_file(open(join(DATA, 'test2.msg')))
     self.umessage1 = UMessage(msg1)
     self.umessage2 = UMessage(msg2)
Example #8
0
File: test.py Project: dougt/blog
    def test_date_empty(self):
        filename = os.path.join(self.tempdir, 'empty_date.html')
        open(filename, 'w').write('title: title\n\ncontent')
        command_date([filename])
        message = email.message_from_file(open(filename))
        self.assert_('date' in message)
        self.assert_(message['date'] >= str(datetime.date.today()))

        open(filename, 'w').write('title: title\ndate: 2010-2-4\n\ncontent')
        command_date([filename])
        message = email.message_from_file(open(filename))
        self.assert_('date' in message)
        self.assert_(message['date'] >= str(datetime.date.today()))
Example #9
0
def main(args):
    django.setup()
    logger = setup_error_handler()
    parser = argparse.ArgumentParser()
    parse_lock = None

    def list_logging_levels():
        """Give a summary of all available logging levels."""
        return sorted(list(VERBOSITY_LEVELS.keys()),
                      key=lambda x: VERBOSITY_LEVELS[x])

    parser.add_argument('--verbosity', choices=list_logging_levels(),
                        help='logging level', default='info')

    args = vars(parser.parse_args())

    logging.basicConfig(level=VERBOSITY_LEVELS[args['verbosity']])

    mail = message_from_file(sys.stdin)
    try:
        parse_lock = lock()
        return parse_mail(mail)
    except:
        if logger:
            logger.exception('Error when parsing incoming email', extra={
                'mail': mail.as_string(),
            })
        raise
    finally:
        release(parse_lock)
    def build_attachments(self):
        """Build email's attachment messages"""
        attachments = []

        for attachment in self.newsletter.attachment_set.all():
            ctype, encoding = mimetypes.guess_type(attachment.file_attachment.path)

            if ctype is None or encoding is not None:
                ctype = 'application/octet-stream'

            maintype, subtype = ctype.split('/', 1)

            fd = open(attachment.file_attachment.path, 'rb')
            if maintype == 'text':
                message_attachment = MIMEText(fd.read(), _subtype=subtype)
            elif maintype == 'message':
                message_attachment = message_from_file(fd)
            elif maintype == 'image':
                message_attachment = MIMEImage(fd.read(), _subtype=subtype)
            elif maintype == 'audio':
                message_attachment = MIMEAudio(fd.read(), _subtype=subtype)
            else:
                message_attachment = MIMEBase(maintype, subtype)
                message_attachment.set_payload(fd.read())
                encode_base64(message_attachment)
            fd.close()
            message_attachment.add_header('Content-Disposition', 'attachment',
                                          filename=attachment.title)
            attachments.append(message_attachment)

        return attachments
Example #11
0
    def test_file_sessions(self):
        """Make sure opening a connection creates a new file"""
        msg = EmailMessage('Subject', 'Content', '*****@*****.**', ['*****@*****.**'], headers={'From': '*****@*****.**'})
        connection = mail.get_connection()
        connection.send_messages([msg])

        self.assertEqual(len(os.listdir(self.tmp_dir)), 1)
        with open(os.path.join(self.tmp_dir, os.listdir(self.tmp_dir)[0])) as fp:
            message = email.message_from_file(fp)
        self.assertEqual(message.get_content_type(), 'text/plain')
        self.assertEqual(message.get('subject'), 'Subject')
        self.assertEqual(message.get('from'), '*****@*****.**')
        self.assertEqual(message.get('to'), '*****@*****.**')

        connection2 = mail.get_connection()
        connection2.send_messages([msg])
        self.assertEqual(len(os.listdir(self.tmp_dir)), 2)

        connection.send_messages([msg])
        self.assertEqual(len(os.listdir(self.tmp_dir)), 2)

        msg.connection = mail.get_connection()
        self.assertTrue(connection.open())
        msg.send()
        self.assertEqual(len(os.listdir(self.tmp_dir)), 3)
        msg.send()
        self.assertEqual(len(os.listdir(self.tmp_dir)), 3)
Example #12
0
def page_for_message(mlist, year, month, day, message):
    with open(message, "r") as fd:
        mail = email.message_from_file(fd)
        ## Headers
        # ['MIME-Version', 'Date', 'From', 'In-Reply-To', 'List-Id',
        # 'Message-Id', 'Precedence', 'References', 'Reply-To',
        # 'Return-Path', 'Sender', 'Subject', 'To', 'Content-Type',
        # 'Content-Transfer-Encoding']
        data = dict(mail)
        body = None
        if mail.is_multipart():
            body = mail.get_payload(0).get_payload()
        else:
            body = mail.get_payload(decode=True)
        data.update(
            {
                "Body": body,
                "List": mlist,
                "Year": year,
                "Month": month,
                "Day": day,
                ## Some older messages lack a `Date`
                "Date": data.get("Date", "Unknown"),
            }
        )
        tmpl = Message.Message(searchList=[data])
        return (unicode(tmpl), data)
Example #13
0
    def test_remove_next_part_from_content(self):
        with open(get_test_file("pipermail_nextpart.txt")) as email_file:
            msg = message_from_file(email_file)
        scrubber = Scrubber("*****@*****.**", msg)
        contents = scrubber.scrub()[0]

        self.failIf("-------------- next part --------------" in contents)
Example #14
0
    def read(self, length=None):
        try:
            return File.read(self,length)
        except IOError:
           pass

        if self.readptr > 0:
            return ''
        
        self.fd.seek(0)
        a=email.message_from_file(self.fd)
        my_part = self.inode.split('|')[-1]
        attachment_number = int(my_part[1:])
        #print "attchement number %s" % attachment_number
        count = 0

        for part in a.walk():
            if part.get_content_maintype() == 'multipart':
                continue

            if count==attachment_number:
                self.message = part.get_payload(decode=1)
                self.readptr += len(self.message)
                #print "Returning %s" % part.get_payload()
                return self.message

            count+=1

        return ''
Example #15
0
def parseEmail(emailFile):

    escon = es.ESload()

    start = time.time()

    f = open(emailFile, 'r')
    msg = email.message_from_file(f)
    f.close()

    env = open(emailFile + '.envelope', 'r')
    envelope = env.readline()
    env.close()
    
    metaData = getMetaData(msg)
    
    try:  
        if msg.is_multipart():
            data = mimeEmail(msg, envelope, metaData, )
        else:
            data = rawEmail(msg, envelope, metaData)
        
    except NoBoundaryInMultipartDefect:
        data = rawEmail(msg, envelope, metaData, )
    
    key = createKey(metaData[0], envelope)
    envData = elasticEnvelope(envelope)

    #write data into ES
    escon.indexEmailData(data, key)
    escon.indexEnvelopeData(envData, key)
     
    duration = time.time() - start 
   
    return duration
Example #16
0
def get_bulk_email(site, filename):
    """ Show a specific bulk email saved on the disk """
    save_path = get_log_dir(site)
    join = os.path.join

    if save_path:
        save_path = join(save_path, 'sent-bulk')
        if os.path.isdir(save_path):
            message_path = join(save_path, filename)

            try:
                message_file = open(message_path, 'r+')
            except IOError:
                return None
            mail = message_from_file(message_file)
            message_file.close()

            # Prepare the date to be formatted with utShowFullDateTime
            date = email_utils.parsedate_tz(mail.get('Date', ''))
            date = email_utils.mktime_tz(date)
            date = datetime.fromtimestamp(date)

            return {
                'subject': mail.get('Subject', '(no-subject)'),
                'content': mail.get_payload(decode=True).replace(
                    '\n\n', '</p><p>').replace('\n', '<br/>'),
                'recipients': mail.get_all('To'),
                'sender': mail.get('From'),
                'date': date,
            }
    def test_write_mime_extra_headers(self):
        """write_mime() with extra headers."""

        pr = ProblemReport(date="now!")
        pr["Simple"] = "bar"
        pr["TwoLine"] = "first\nsecond\n"
        io = StringIO()
        pr.write_mime(io, extra_headers={"Greeting": "hello world", "Foo": "Bar"})
        io.seek(0)

        msg = email.message_from_file(io)
        self.assertEqual(msg["Greeting"], "hello world")
        self.assertEqual(msg["Foo"], "Bar")
        msg_iter = msg.walk()

        # first part is the multipart container
        part = msg_iter.next()
        self.assert_(part.is_multipart())

        # second part should be an inline text/plain attachments with all short
        # fields
        part = msg_iter.next()
        self.assert_(not part.is_multipart())
        self.assertEqual(part.get_content_type(), "text/plain")
        self.assert_("Simple: bar" in part.get_payload(decode=True))

        # no more parts
        self.assertRaises(StopIteration, msg_iter.next)
Example #18
0
 def metadata(self):
     if self._metadata is None:
         metadata_file = self.data['metadata']
         import email
         with open(metadata_file) as fp:
             self._metadata = email.message_from_file(fp)
     return self._metadata
    def __init__(self, contents=None, filename="debian/control"):
        """
        Parse an existing control file.

        @param contents: content of a control file
        @type contents: C{str}
        @param filename: name of the control file
        @type filename: C{str}
        @return: Control object
        @rtype: C{gbp.deb.conrol.Control} object
        """
        if contents:
            control = email.message_from_string(contents)
        else:
            if not os.access(filename, os.F_OK):
                raise NoControlError("Control file %s does not exist" % filename)

            with open(filename) as f:
                control = email.message_from_file(f)

        if not control.items():
            raise ParseControlError("Empty or invalid control file or contents")

        self._control = control
        self.filename = filename
def debug_hash_algorithm(opts):
    #mail_text = ''.join(sys.stdin.readlines())
    #message = email.message_from_string(mail_text)
    message = email.message_from_file(sys.stdin)
    mail_hash, header_text = compute_hash_key(None, message, opts.message_id)
    print header_text
    print 'Hash:', mail_hash
def import_mail_list(corpus_path, file_list_path, db_config):
    db = build_db(db_config)
    if db is None:
        print 'ERROR: ImportEnronCorpus::import_corpus could not build db object'
        sys.exit(-1)

    error_list = []
    mail_list = open(file_list_path, 'r').readlines()
    total_mail_files = len(mail_list)
    mail_count = 0
    for mail_path in mail_list:
        mail_count += 1
        if mail_count % 1000 == 0:
            print 'Stored {} / {}'.format(mail_count, total_mail_files)
        mail_full_path = os.path.join(corpus_path, mail_path.strip())
        cur_email = email.message_from_file(open(mail_full_path))
        if not exists_email_db(db, cur_email['Message-ID']):
            success, err_msg = insert_email_db(db, 'enron_corpus', cur_email)
            if not success:
                error_list.append(( mail_path, err_msg))

    with open('insert_error.log', 'wb') as err_file:
        for email_path, error_msg in error_list:
            err_file.write('{}\t{}'.format(email_path, error_msg))
        err_file.close()
def parse_from_file(email_file):
    ''' return_type: message.Message '''
    with open(email_file) as f:
        e = email.message_from_file(f)
#         print(e["Message-ID"])
        date = datetime.datetime.fromtimestamp(time.mktime(parsedate(e["Date"])))
        print(type(e["Date"]))
        print(e["Date"])
        print(type(date))
        print(date)
#         print(e["From"])
#         print(e["To"])
#         print(e["Subject"])
#         print(e["Mime-Version"])
#         print(e["Content-Type"])
#         print(e["Content-Transfer-Encoding"])
#         print(e["X-From"])
#         print(e["X-To"])
#         print(e["X-cc"])
#         print(e["X-bcc"])
#         print(e["X-Folder"])
#         print(e["X-Origin"])
#         print(e["X-FileName"])
#         print(e.get_payload())
        if e.is_multipart():
            for payload in e.get_payload():
                # if payload.is_multipart(): ...
                print payload.get_payload()
        else:
            print e.get_payload()
            print len(e.get_payload())
Example #23
0
    def __init__(self, name, dir_name):
        self.name = name
        self.dir = dir_name
        metadata_name = os.path.join(dir_name, 'METADATA.txt')
        if os.path.exists(metadata_name):
            with open(metadata_name, 'rb') as stream:
                metadata = email.message_from_file(stream)
            self.description = metadata.get_payload()
            for key, value in metadata.items():
                setattr(self, tagify(key).replace('-', '_'), value)
        else:
            self.title = name.title()
            self.description = ''

        # Scan the directory for data files.
        self.all_links = []
        for file_name in os.listdir(dir_name):
            if file_name.endswith('.csv'):
                with open(os.path.join(dir_name, file_name), 'rb') as csv_file:
                    reader = csv.DictReader(csv_file)
                    for d in reader:
                        if any(k and v for (k, v) in d.items()):
                            self.all_links.append(Link(self, d))

        # Prepare for indexing by keyword.
        self.facet_keywords = {'main': set()}
        for link in self.all_links:
            for facet_name, keywords in link.facet_keywords.items():
                self.facet_keywords.setdefault(facet_name, set()).update(keywords)
        self.main_keywords = self.facet_keywords[MAIN]
        self.facet_keywords_by_tag = dict((tagify(f, k), (f, k)) for (f, keywords) in self.facet_keywords.items() for k in keywords)
Example #24
0
 def _msgobj(self, filename):
     fp = openfile(filename)
     try:
         msg = email.message_from_file(fp)
     finally:
         fp.close()
     return msg
Example #25
0
def main():
    parser = OptionParser(usage="""Hilfetext""")
    parser.add_option('-d', '--directory',
                      type='string', action='store',
                      help="""Hilfe""")
    opts, args = parser.parse_args()
    if not opts.directory:
        parser.print_help()
        sys.exit(1)

    try:
        msgfile = args[0]
    except IndexError:
        parser.print_help()
        sys.exit(1)

    fp = open(msgfile)
    msg = email.message_from_file(fp)
    fp.close()

    counter = 1
    for part in msg.walk():
        if part.get_content_maintype() == 'multipart' or part.get_content_maintype() == 'text':
            continue
        filename = part.get_filename()
        if not filename:
            continue
        fp = open(os.path.join(opts.directory, filename), 'wb')
        fp.write(part.get_payload(decode=True))
        fp.close()
 def test_custom_message_gets_policy_if_possible_from_file(self):
     source_file = io.StringIO("Subject: bogus\n\nmsg\n")
     msg = email.message_from_file(source_file,
                                   self.MyMessage,
                                   policy=self.MyPolicy)
     self.assertIsInstance(msg, self.MyMessage)
     self.assertIs(msg.check_policy, self.MyPolicy)
def decodeheader(file):          
	fp = open(file,"r")          
	msg = email.message_from_file(fp)  

	subject = msg.get("subject")   #取主题

	#下面是调用python的email模块解码,实质是对MIME数据块解码,信体类似的
	try:
		head = email.Header.Header(subject)   #加上gbk或者utf-8参数可以让系统转码的时候适应中文
	except:
		head = email.Header.Header(subject,"gbk")
	dh = email.Header.decode_header(head)
	subject = dh[0][0]

	#下面的打印内容可按需求调整
	print "subject:" , subject
	print "from:", email.utils.parseaddr(msg.get("from"))[1] 
	try:
		to = msg.get("to").split(',')
		print "to:",
		for i in to:
			print email.utils.parseaddr(i)[1],
		print ""
	except:
		print "to:", email.utils.parseaddr(msg.get("to"))[1]
	print "date:",msg.get("date")

	fp.close()
Example #28
0
def print_message_score(msg_name, msg_fp):
    msg = email.message_from_file(msg_fp)
    bayes = CdbClassifier(open(DB_FILE, 'rb'))
    prob, evidence = bayes.spamprob(tokenize(msg), evidence=True)
    print msg_name, prob
    for word, prob in evidence:
        print '  ', repr(word), prob
Example #29
0
 def __init__(self, msg):
     if isinstance(msg, Message):
         self.message = msg
     elif hasattr(msg, 'read'):
         self.message = email.message_from_file(msg)
     else:
         self.message = email.message_from_string(str(msg))
def decodebody(file):                        
	fp = open(file,"r")
	msg = email.message_from_file(fp)   

	for par in msg.walk():               #循环信件中的每一个mime数据块
		if not par.is_multipart():     #判断是否属于multipart,不是的话其实是邮件的注释部分,没作用
			name = par.get_param("name")    #如果有附件,将会取出附件名
			if name:                    #有附件
				try:
					h = email.Header.Header(name)
				except:
					h = email.Header.Header(name,"gbk")
				dh = email.Header.decode_header(h)
				fname = dh[0][0]
				#下面这行是QP或base64解码
				data = par.get_payload(decode=True)  #解码出附件数据,存到文件中
			
				try :
					f = open(fname, 'wb')
				except:
					f = open('aaa', 'wb')
				f.write(data)
				f.close()
			else:
				print par.get_payload(decode=True)   #不是附件,直接解码输出文本内容
Example #31
0
def email_recv(request):

    if not hasattr(settings, 'REPLY_EMAIL') \
            or not hasattr(settings, 'EMAIL_KEY'):
        return HttpResponse(status=501)  # Not Implemented

    key = request.POST.get('key')
    if key != settings.EMAIL_KEY:
        raise PermissionDenied

    if 'email' not in request.FILES:
        raise HttpResponse(status=400)  # Bad Request

    msg = request.FILES['email']
    msg = email.message_from_file(msg)

    mfrom = msg.get('From')
    mto = msg.get('To')
    subject = msg.get('Subject')

    if msg.is_multipart():
        msgs = msg.get_payload()
        for m in msgs:
            if m.get_content_type == 'text/plain':
                content = m.get_payload(decode=True)
                break
        else:
            content = msgs[0].get_payload(decode=True)
    else:
        content = msg.get_payload(decode=True)

    if python_version < (3, ):
        content = content.decode('utf-8')

    addr = settings.REPLY_EMAIL
    pos = addr.find('@')
    name = addr[:pos]
    domain = addr[pos:]

    p = re.compile(
        '^%s\+(?P<project>[-\w]+)\.issue-(?P<issue>[0-9]+)(\.(?P<event>[0-9]+))?\.(?P<user>[0-9]+)\.(?P<token>[a-z0-9]+)%s$'
        % (name, domain))
    m = None
    for _mto in map(lambda x: x.strip(), mto.split(',')):
        m = p.match(_mto)
        if m:
            break
    if not m:  # no one matches
        raise Http404

    project = get_object_or_404(Project, name=m.group('project'))
    issue = get_object_or_404(Issue, project=project, id=m.group('issue'))
    event = m.group('event')
    if event:
        event = get_object_or_404(Event, issue=issue, pk=event)
    user = get_object_or_404(User, pk=m.group('user'))
    token = m.group('token')

    mid = '%s.issue-%d' % (project.name, issue.id)
    if event:
        mid += '.%s' % event.pk
    reference_token = hexdigest_sha256(settings.SECRET_KEY, mid, user.pk)

    if token != reference_token:
        raise PermissionDenied

    event = Event(issue=issue,
                  author=user,
                  code=Event.COMMENT,
                  additionnal_section=content)
    event.save()
    issue.subscribers.add(user)
    notify_new_comment(event)
    issue.save()

    return HttpResponse()
Example #32
0
    "ingv": "INGV",
    "ipgp": "IPGP",
    "knmi": "ODC",
    "resif": "RESIF",
    "uni-muenchen": "LMU"
}

for myfile in filelist:
    print 70 * '-'
    print "Processing", myfile,
    if not os.path.exists(myfile):
        scores[2] += 1
        continue

    fid = open(myfile, "r")
    msg = email.message_from_file(fid)
    frm = msg['From']

    # Need to decide if a file is one of:
    #   (i) the HTML report from reqlogstats, as e-mail: use From to set 'who'.
    #  (ii) the text report from fdsnws operators, as e-mail: use payload to set 'who'
    # (iii) the text report from fdsnws operators: payload still determines 'who'.
    #  (iv) the HTML report from reqlogstats; 'who' must be given separately.

    contentType = "html"
    if os.path.basename(myfile).startswith("fdsnws_"):
        contentType = "text"

    try:
        subj = msg['Subject'].lower()
    except AttributeError:
Example #33
0
def main():
    for a in sys.argv[1:]:
        print "**********************************"
        parseOne(email.message_from_file(open(a)), a, "")
Example #34
0
#  прочитати файл, в унікоді погратися, зберегти в коремому файлі зміст.

from email.parser import BytesParser
from email.policy import default, HTTP, SMTP
from email import message_from_file
import locale
import sys

#  using convenience functions
with open("mail1", "r", encoding="utf-8") as f:
    cont = message_from_file(f, policy=default)
    print('start:----')
    print(cont)
    print('stop:----')
    with open("targ.txt", 'w', encoding="utf-8") as fp:
        fp.write(cont.as_string(maxheaderlen=90))
"""
#  reading in binary mode
with open("mail1", "rb") as f:
    cont = BytesParser(policy=default).parse(f)

for i in cont.walk():
    if i.get_content_type() == "text/plain":
        with open("targ.txt", 'wb') as fp:  
            fp.write(i.get_payload(decode=True))
"""
'''
# reading message's body withou headers
body = cont.get_body(preferencelist=('plain'))
mes = body.get_content()
with open("targ.txt", 'w', encoding="utf-8") as fp:
if not os.path.isdir(kde_dir):
    os.mkdir(kde_dir)
arfile = os.path.join(kde_dir, 'duplicate_mails')
duplicates = {}
try:
    archive = open(arfile)
except IOError:
    pass
else:
    for line in archive:
        when, msgid = line[:-1].split(' ', 1)
        duplicates[msgid] = float(when)
    archive.close()
redo_archive = False
# suck message in from stdin and study it
msg = email.message_from_file(sys.stdin)
msgid = msg['Message-ID']
if msgid:
    if msgid in duplicates:
        # duplicate message: alter its subject
        subject = msg['Subject']
        if subject is None:
            msg['Subject'] = '**** DUP **** ' + msgid
        else:
            del msg['Subject']
            msg['Subject'] = '**** DUP **** ' + subject
    else:
        # non-duplicate message: redo the archive file
        redo_archive = True
        duplicates[msgid] = now
else:
Example #36
0
def randomly_pickup(dataset):
    email_path = random.choice(dataset)
    with open(email_path) as file:
        original_email = email.message_from_file(file)
    return original_email
Example #37
0
def send_mail(message, filename="", whitelists=None):
    """sendmail wrapper, takes _either_ a message string or a file as arguments

    @type  whitelists: list of (str or None)
    @param whitelists: path to whitelists. C{None} or an empty list whitelists
                       everything, otherwise an address is whitelisted if it is
                       included in any of the lists.
                       In addition a global whitelist can be specified in
                       Dinstall::MailWhiteList.
    """

    maildir = Cnf.get('Dir::Mail')
    if maildir:
        path = os.path.join(maildir, datetime.datetime.now().isoformat())
        path = find_next_free(path)
        with open(path, 'w') as fh:
            print(message, end=' ', file=fh)

    # Check whether we're supposed to be sending mail
    if "Dinstall::Options::No-Mail" in Cnf and Cnf[
            "Dinstall::Options::No-Mail"]:
        return

    # If we've been passed a string dump it into a temporary file
    if message:
        (fd, filename) = tempfile.mkstemp()
        os.write(fd, message)
        os.close(fd)

    if whitelists is None or None in whitelists:
        whitelists = []
    if Cnf.get('Dinstall::MailWhiteList', ''):
        whitelists.append(Cnf['Dinstall::MailWhiteList'])
    if len(whitelists) != 0:
        with open_file(filename) as message_in:
            message_raw = modemail.message_from_file(message_in)

        whitelist = []
        for path in whitelists:
            with open_file(path, 'r') as whitelist_in:
                for line in whitelist_in:
                    if not re_whitespace_comment.match(line):
                        if re_re_mark.match(line):
                            whitelist.append(
                                re.compile(re_re_mark.sub("", line.strip(),
                                                          1)))
                        else:
                            whitelist.append(
                                re.compile(re.escape(line.strip())))

        # Fields to check.
        fields = ["To", "Bcc", "Cc"]
        for field in fields:
            # Check each field
            value = message_raw.get(field, None)
            if value is not None:
                match = []
                for item in value.split(","):
                    (rfc822_maint, rfc2047_maint, name,
                     email) = fix_maintainer(item.strip())
                    mail_whitelisted = 0
                    for wr in whitelist:
                        if wr.match(email):
                            mail_whitelisted = 1
                            break
                    if not mail_whitelisted:
                        print("Skipping {0} since it's not whitelisted".format(
                            item))
                        continue
                    match.append(item)

                # Doesn't have any mail in whitelist so remove the header
                if len(match) == 0:
                    del message_raw[field]
                else:
                    message_raw.replace_header(field, ', '.join(match))

        # Change message fields in order if we don't have a To header
        if "To" not in message_raw:
            fields.reverse()
            for field in fields:
                if field in message_raw:
                    message_raw[fields[-1]] = message_raw[field]
                    del message_raw[field]
                    break
            else:
                # Clean up any temporary files
                # and return, as we removed all recipients.
                if message:
                    os.unlink(filename)
                return

        fd = os.open(filename, os.O_RDWR | os.O_EXCL, 0o700)
        os.write(fd, message_raw.as_string(True))
        os.close(fd)

    # Invoke sendmail
    (result, output) = commands.getstatusoutput(
        "%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
    if (result != 0):
        raise SendmailFailedError(output)

    # Clean up any temporary files
    if message:
        os.unlink(filename)
Example #38
0
    def install(self, paths, maker, **kwargs):
        """
        Install a wheel to the specified paths. If kwarg ``warner`` is
        specified, it should be a callable, which will be called with two
        tuples indicating the wheel version of this software and the wheel
        version in the file, if there is a discrepancy in the versions.
        This can be used to issue any warnings to raise any exceptions.
        If kwarg ``lib_only`` is True, only the purelib/platlib files are
        installed, and the headers, scripts, data and dist-info metadata are
        not written.

        The return value is a :class:`InstalledDistribution` instance unless
        ``options.lib_only`` is True, in which case the return value is ``None``.
        """

        dry_run = maker.dry_run
        warner = kwargs.get('warner')
        lib_only = kwargs.get('lib_only', False)

        pathname = os.path.join(self.dirname, self.filename)
        name_ver = '%s-%s' % (self.name, self.version)
        data_dir = '%s.data' % name_ver
        info_dir = '%s.dist-info' % name_ver

        metadata_name = posixpath.join(info_dir, METADATA_FILENAME)
        wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
        record_name = posixpath.join(info_dir, 'RECORD')

        wrapper = codecs.getreader('utf-8')

        with ZipFile(pathname, 'r') as zf:
            with zf.open(wheel_metadata_name) as bwf:
                wf = wrapper(bwf)
                message = message_from_file(wf)
            wv = message['Wheel-Version'].split('.', 1)
            file_version = tuple([int(i) for i in wv])
            if (file_version != self.wheel_version) and warner:
                warner(self.wheel_version, file_version)

            if message['Root-Is-Purelib'] == 'true':
                libdir = paths['purelib']
            else:
                libdir = paths['platlib']

            records = {}
            with zf.open(record_name) as bf:
                with CSVReader(stream=bf) as reader:
                    for row in reader:
                        p = row[0]
                        records[p] = row

            data_pfx = posixpath.join(data_dir, '')
            info_pfx = posixpath.join(info_dir, '')
            script_pfx = posixpath.join(data_dir, 'scripts', '')

            # make a new instance rather than a copy of maker's,
            # as we mutate it
            fileop = FileOperator(dry_run=dry_run)
            fileop.record = True    # so we can rollback if needed

            bc = not sys.dont_write_bytecode    # Double negatives. Lovely!

            outfiles = []   # for RECORD writing

            # for script copying/shebang processing
            workdir = tempfile.mkdtemp()
            # set target dir later
            # we default add_launchers to False, as the
            # Python Launcher should be used instead
            maker.source_dir = workdir
            maker.target_dir = None
            try:
                for zinfo in zf.infolist():
                    arcname = zinfo.filename
                    if isinstance(arcname, text_type):
                        u_arcname = arcname
                    else:
                        u_arcname = arcname.decode('utf-8')
                    # The signature file won't be in RECORD,
                    # and we  don't currently don't do anything with it
                    if u_arcname.endswith('/RECORD.jws'):
                        continue
                    row = records[u_arcname]
                    if row[2] and str(zinfo.file_size) != row[2]:
                        raise DistlibException('size mismatch for '
                                               '%s' % u_arcname)
                    if row[1]:
                        kind, value = row[1].split('=', 1)
                        with zf.open(arcname) as bf:
                            data = bf.read()
                        _, digest = self.get_hash(data, kind)
                        if digest != value:
                            raise DistlibException('digest mismatch for '
                                                   '%s' % arcname)

                    if lib_only and u_arcname.startswith((info_pfx, data_pfx)):
                        logger.debug('lib_only: skipping %s', u_arcname)
                        continue
                    is_script = (u_arcname.startswith(script_pfx)
                                 and not u_arcname.endswith('.exe'))

                    if u_arcname.startswith(data_pfx):
                        _, where, rp = u_arcname.split('/', 2)
                        outfile = os.path.join(paths[where], convert_path(rp))
                    else:
                        # meant for site-packages.
                        if u_arcname in (wheel_metadata_name, record_name):
                            continue
                        outfile = os.path.join(libdir, convert_path(u_arcname))
                    if not is_script:
                        with zf.open(arcname) as bf:
                            fileop.copy_stream(bf, outfile)
                        outfiles.append(outfile)
                        # Double check the digest of the written file
                        if not dry_run and row[1]:
                            with open(outfile, 'rb') as bf:
                                data = bf.read()
                                _, newdigest = self.get_hash(data, kind)
                                if newdigest != digest:
                                    raise DistlibException('digest mismatch '
                                                           'on write for '
                                                           '%s' % outfile)
                        if bc and outfile.endswith('.py'):
                            try:
                                pyc = fileop.byte_compile(outfile)
                                outfiles.append(pyc)
                            except Exception:
                                # Don't give up if byte-compilation fails,
                                # but log it and perhaps warn the user
                                logger.warning('Byte-compilation failed',
                                               exc_info=True)
                    else:
                        fn = os.path.basename(convert_path(arcname))
                        workname = os.path.join(workdir, fn)
                        with zf.open(arcname) as bf:
                            fileop.copy_stream(bf, workname)

                        dn, fn = os.path.split(outfile)
                        maker.target_dir = dn
                        filenames = maker.make(fn)
                        fileop.set_executable_mode(filenames)
                        outfiles.extend(filenames)

                if lib_only:
                    logger.debug('lib_only: returning None')
                    dist = None
                else:
                    # Generate scripts

                    # Try to get pydist.json so we can see if there are
                    # any commands to generate. If this fails (e.g. because
                    # of a legacy wheel), log a warning but don't give up.
                    commands = None
                    file_version = self.info['Wheel-Version']
                    if file_version == '1.0':
                        # Use legacy info
                        ep = posixpath.join(info_dir, 'entry_points.txt')
                        try:
                            with zf.open(ep) as bwf:
                                epdata = read_exports(bwf)
                            commands = {}
                            for key in ('console', 'gui'):
                                k = '%s_scripts' % key
                                if k in epdata:
                                    commands['wrap_%s' % key] = d = {}
                                    for v in epdata[k].values():
                                        s = '%s:%s' % (v.prefix, v.suffix)
                                        if v.flags:
                                            s += ' %s' % v.flags
                                        d[v.name] = s
                        except Exception:
                            logger.warning('Unable to read legacy script '
                                           'metadata, so cannot generate '
                                           'scripts')
                    else:
                        try:
                            with zf.open(metadata_name) as bwf:
                                wf = wrapper(bwf)
                                commands = json.load(wf).get('extensions')
                                if commands:
                                    commands = commands.get('python.commands')
                        except Exception:
                            logger.warning('Unable to read JSON metadata, so '
                                           'cannot generate scripts')
                    if commands:
                        console_scripts = commands.get('wrap_console', {})
                        gui_scripts = commands.get('wrap_gui', {})
                        if console_scripts or gui_scripts:
                            script_dir = paths.get('scripts', '')
                            if not os.path.isdir(script_dir):
                                raise ValueError('Valid script path not '
                                                 'specified')
                            maker.target_dir = script_dir
                            for k, v in console_scripts.items():
                                script = '%s = %s' % (k, v)
                                filenames = maker.make(script)
                                fileop.set_executable_mode(filenames)

                            if gui_scripts:
                                options = {'gui': True }
                                for k, v in gui_scripts.items():
                                    script = '%s = %s' % (k, v)
                                    filenames = maker.make(script, options)
                                    fileop.set_executable_mode(filenames)

                    p = os.path.join(libdir, info_dir)
                    dist = InstalledDistribution(p)

                    # Write SHARED
                    paths = dict(paths)     # don't change passed in dict
                    del paths['purelib']
                    del paths['platlib']
                    paths['lib'] = libdir
                    p = dist.write_shared_locations(paths, dry_run)
                    if p:
                        outfiles.append(p)

                    # Write RECORD
                    dist.write_installed_files(outfiles, paths['prefix'],
                                               dry_run)
                return dist
            except Exception:  # pragma: no cover
                logger.exception('installation failed.')
                fileop.rollback()
                raise
            finally:
                shutil.rmtree(workdir)
Example #39
0
def process_mailfile(filename, stats):
    with open(filename, 'r') as fp:
        mail = email.message_from_file(fp)
        process_mail(mail, filename, stats)
Example #40
0
 def _msgobj(self, filename):
     with openfile(filename) as fp:
         return email.message_from_file(fp, policy=self.policy)
Example #41
0
                parts['content'] += payload

    if msg["Date"]:
        e_date = email.utils.parsedate_tz(msg['Date'])
        if e_date:
            date = datetime.datetime.fromtimestamp(
                email.utils.mktime_tz(e_date))
            parts['content'] += ' weekday_' + str(
                date.weekday()) + ' hour_of_day_' + str(date.hour)
    if msg['Subject']:
        # parts['subject'] = msg['Subject']
        parts['content'] += ' ' + msg['Subject']
    return parts


ham_folder = os.path.join(os.getcwd(), 'Dataset', 'Ham')

ham_list = []

for subdir, dirs, files in os.walk(ham_folder):
    for file_n in files:
        with open(os.path.join(subdir, file_n), encoding='latin-1') as f:
            ham_list.append(get_text_from_email(email.message_from_file(f)))

print(len(ham_list))
df = pd.DataFrame(ham_list)
ham_file = os.path.join(os.getcwd(), 'Dataset', 'csv_files', 'ham.csv')
df.to_csv(ham_file)
ham_file = os.path.join(os.getcwd(), 'Dataset', 'csv_files', 'ham_latin.csv')
df.to_csv(ham_file, encoding='latin-1')
Example #42
0
            if e_date:
                date = datetime.datetime.fromtimestamp(
                    email.utils.mktime_tz(e_date))
                parts['content'] += ' weekday_' + str(
                    date.weekday()) + ' hour_of_day_' + str(date.hour)
        except Exception:
            pass
    if msg['Subject']:
        parts['content'] += ' ' + msg['Subject']
    return parts


# input spam folder
spam_folder = os.path.join(os.getcwd(), 'Dataset', sys.argv[1])

spam_list = []

for subdir, dirs, files in os.walk(spam_folder):
    for filename in files:
        try:
            with open(os.path.join(subdir, filename), encoding='latin-1') as f:
                spam_list.append(
                    get_text_from_email(email.message_from_file(f)))
        except OSError:
            pass

df = pd.DataFrame(spam_list)
# Output file path. Saves as .csv file
spam_file = os.path.join(os.getcwd(), 'Dataset', 'csv_files', sys.argv[2])
df.to_csv(spam_file)
Example #43
0
def message_from_file(handle):
    '''Reads a mail from the given file-like object and returns an email
    object, very much like email.message_from_file. In addition to
    that OpenPGP encrypted data is detected and decrypted. If this
    succeeds, any mime messages found in the recovered plaintext
    message are added to the returned message object.

    :param handle: a file-like object
    :returns: :class:`email.message.Message` possibly augmented with
              decrypted data
    '''
    m = email.message_from_file(handle)

    # make sure noone smuggles a token in (data from m is untrusted)
    del m[X_SIGNATURE_VALID_HEADER]
    del m[X_SIGNATURE_MESSAGE_HEADER]

    p = get_params(m)
    app_pgp_sig = 'application/pgp-signature'
    app_pgp_enc = 'application/pgp-encrypted'

    # handle OpenPGP signed data
    if (m.is_multipart() and
            m.get_content_subtype() == 'signed' and
            p.get('protocol') == app_pgp_sig):
        # RFC 3156 is quite strict:
        # * exactly two messages
        # * the second is of type 'application/pgp-signature'
        # * the second contains the detached signature

        malformed = False
        if len(m.get_payload()) != 2:
            malformed = u'expected exactly two messages, got {0}'.format(
                len(m.get_payload()))
        else:
            ct = m.get_payload(1).get_content_type()
            if ct != app_pgp_sig:
                malformed = u'expected Content-Type: {0}, got: {1}'.format(
                    app_pgp_sig, ct)

        # TODO: RFC 3156 says the alg has to be lower case, but I've
        # seen a message with 'PGP-'. maybe we should be more
        # permissive here, or maybe not, this is crypto stuff...
        if not p.get('micalg', 'nothing').startswith('pgp-'):
            malformed = u'expected micalg=pgp-..., got: {0}'.format(
                p.get('micalg', 'nothing'))

        sigs = []
        if not malformed:
            try:
                sigs = crypto.verify_detached(
                    helper.email_as_string(m.get_payload(0)),
                    m.get_payload(1).get_payload())
            except GPGProblem as e:
                malformed = unicode(e)

        add_signature_headers(m, sigs, malformed)

    # handle OpenPGP encrypted data
    elif (m.is_multipart() and
          m.get_content_subtype() == 'encrypted' and
          p.get('protocol') == app_pgp_enc and
          'Version: 1' in m.get_payload(0).get_payload()):
        # RFC 3156 is quite strict:
        # * exactly two messages
        # * the first is of type 'application/pgp-encrypted'
        # * the first contains 'Version: 1'
        # * the second is of type 'application/octet-stream'
        # * the second contains the encrypted and possibly signed data
        malformed = False

        ct = m.get_payload(0).get_content_type()
        if ct != app_pgp_enc:
            malformed = u'expected Content-Type: {0}, got: {1}'.format(
                app_pgp_enc, ct)

        want = 'application/octet-stream'
        ct = m.get_payload(1).get_content_type()
        if ct != want:
            malformed = u'expected Content-Type: {0}, got: {1}'.format(want,
                                                                       ct)

        if not malformed:
            try:
                sigs, d = crypto.decrypt_verify(m.get_payload(1).get_payload())
            except GPGProblem as e:
                # signature verification failures end up here too if
                # the combined method is used, currently this prevents
                # the interpretation of the recovered plain text
                # mail. maybe that's a feature.
                malformed = unicode(e)
            else:
                # parse decrypted message
                n = message_from_string(d)

                # add the decrypted message to m. note that n contains
                # all the attachments, no need to walk over n here.
                m.attach(n)

                # add any defects found
                m.defects.extend(n.defects)

                # there are two methods for both signed and encrypted
                # data, one is called 'RFC 1847 Encapsulation' by
                # RFC 3156, and one is the 'Combined method'.
                if len(sigs) == 0:
                    # 'RFC 1847 Encapsulation', the signature is a
                    # detached signature found in the recovered mime
                    # message of type multipart/signed.
                    if X_SIGNATURE_VALID_HEADER in n:
                        for k in (X_SIGNATURE_VALID_HEADER,
                                  X_SIGNATURE_MESSAGE_HEADER):
                            m[k] = n[k]
                    else:
                        # an encrypted message without signatures
                        # should arouse some suspicion, better warn
                        # the user
                        add_signature_headers(m, [], 'no signature found')
                else:
                    # 'Combined method', the signatures are returned
                    # by the decrypt_verify function.

                    # note that if we reached this point, we know the
                    # signatures are valid. if they were not valid,
                    # the else block of the current try would not have
                    # been executed
                    add_signature_headers(m, sigs, '')

        if malformed:
            msg = u'Malformed OpenPGP message: {0}'.format(malformed)
            content = email.message_from_string(msg.encode('utf-8'))
            content.set_charset('utf-8')
            m.attach(content)

    return m
Example #44
0
def msgfactory(fp):
    try:
        return email.message_from_file(fp)
    except email.Errors.MessageParseError:
        # Don't return None since that will stop the mailbox iterator
        return ''
Example #45
0
    def __init__(self, script_path):
        self.script_path = script_path
        self.parseConfig()

        options = self.parseArguments()

        self.quiet = options.quiet
        self.full_url = options.full_url
        self.urlList = None
        self.raw_file = None

        if not options.url:

            if options.urlList:

                with File(options.urlList) as urlList:

                    if not urlList.exists():
                        print("The file with URLs does not exist")
                        exit(1)

                    if not urlList.is_valid():
                        print("The file with URLs is invalid")
                        exit(1)

                    if not urlList.can_read():
                        print("The file with URLs cannot be read")
                        exit(1)

                    self.urlList = list(urlList.get_lines())

            elif options.cidr:
                self.urlList = [str(ip) for ip in IPv4Network(options.cidr)]

            elif options.stdin_urls:
                self.urlList = sys.stdin.read().splitlines()

            elif options.raw_file:
                with File(options.raw_file) as raw_content:
                    if not raw_content.exists():
                        print("The file with the raw request does not exist")
                        exit(1)

                    if not raw_content.is_valid():
                        print("The file with the raw request is invalid")
                        exit(1)

                    if not raw_content.can_read():
                        print("The file with the raw request cannot be read")
                        exit(1)

                self.raw_file = options.raw_file

            else:
                print("URL target is missing, try using -u <url>")
                exit(1)

        else:
            self.urlList = [options.url]

        if not options.extensions and not options.noExtension:
            print("WARNING: No extension was specified!")

        if options.noExtension:
            options.extensions = str()

        # Enable to use multiple dictionaries at once
        for dictFile in options.wordlist.split(","):
            with File(dictFile) as wordlist:
                if not wordlist.exists():
                    print("The wordlist file does not exist")
                    exit(1)

                if not wordlist.is_valid():
                    print("The wordlist is invalid")
                    exit(1)

                if not wordlist.can_read():
                    print("The wordlist cannot be read")
                    exit(1)

        if options.proxyList:
            with File(options.proxyList) as plist:
                if not plist.exists():
                    print("The proxylist file does not exist")
                    exit(1)

                if not plist.is_valid():
                    print("The proxylist is invalid")
                    exit(1)

                if not plist.can_read():
                    print("The proxylist cannot be read")
                    exit(1)

            self.proxylist = open(options.proxyList).read().splitlines()

            options.requestByHostname = True

        elif options.proxy:
            self.proxy = options.proxy
            options.requestByHostname = True

        else:
            self.proxy = None

        if options.replay_proxy:
            self.replay_proxy = options.replay_proxy
            options.requestByHostname = True

        else:
            self.replay_proxy = None

        if options.headers:
            try:
                self.headers = dict(
                    email.message_from_file(
                        StringIO("\r\n".join(options.headers))))
            except Exception:
                print("Invalid headers")
                exit(1)

        else:
            self.headers = {}

        if options.headerList:
            try:
                with File(options.headerList) as hlist:
                    if not hlist.exists():
                        print("The header list file does not exist")
                        exit(1)

                    if not hlist.is_valid():
                        print("The header list file is invalid")
                        exit(1)

                    if not hlist.can_read():
                        print("The header list cannot be read")
                        exit(1)

                    headers = dict(
                        email.message_from_file(StringIO(hlist.read())))

                    for key, value in headers.items():
                        self.headers[key] = value
            except Exception as e:
                print("Error in headers file: " + str(e))
                exit(1)

        if options.extensions == "*":
            self.extensions = [
                "php", "inc.php", "jsp", "jsf", "asp", "aspx", "do", "action",
                "cgi", "pl", "html", "htm", "js", "css", "json", "txt",
                "tar.gz", "tgz"
            ]
        elif options.extensions == "CHANGELOG.md":
            print(
                "A weird extension was provided: CHANGELOG.md. Please do not use * as the extension or enclose it in double quotes"
            )
            exit(0)
        else:
            self.extensions = list(
                oset([
                    extension.lstrip(' .')
                    for extension in options.extensions.split(",")
                ]))

        if options.excludeExtensions:
            self.excludeExtensions = list(
                oset([
                    excludeExtension.lstrip(' .') for excludeExtension in
                    options.excludeExtensions.split(",")
                ]))
        else:
            self.excludeExtensions = []

        self.useragent = options.useragent
        self.useRandomAgents = options.useRandomAgents
        self.cookie = options.cookie

        if options.threadsCount < 1:
            print("Threads number must be greater than zero")
            exit(1)

        self.threadsCount = options.threadsCount

        self.includeStatusCodes = []

        if options.includeStatusCodes:
            for statusCode in options.includeStatusCodes.split(","):
                try:
                    if "-" in statusCode:
                        statusCodes = [
                            i for i in range(
                                int(statusCode.split("-")[0].strip()),
                                int(statusCode.split("-")[1].strip()) + 1)
                        ]
                        self.includeStatusCodes.extend(statusCodes)

                    else:
                        self.includeStatusCodes.append(int(statusCode.strip()))

                except ValueError:
                    print(
                        "Invalid status code or status code range: {0}".format(
                            statusCode))
                    exit(1)

        self.excludeStatusCodes = []

        if options.excludeStatusCodes:
            for statusCode in options.excludeStatusCodes.split(","):
                try:
                    if "-" in statusCode:
                        statusCodes = [
                            i for i in range(
                                int(statusCode.split("-")[0].strip()),
                                int(statusCode.split("-")[1].strip()) + 1)
                        ]
                        self.excludeStatusCodes.extend(statusCodes)

                    else:
                        self.excludeStatusCodes.append(int(statusCode.strip()))

                except ValueError:
                    print(
                        "Invalid status code or status code range: {0}".format(
                            statusCode))
                    exit(1)

        if options.excludeSizes:
            try:
                self.excludeSizes = list(
                    oset([
                        excludeSize.strip().upper() if excludeSize else None
                        for excludeSize in options.excludeSizes.split(",")
                    ]))

            except ValueError:
                self.excludeSizes = []
        else:
            self.excludeSizes = []

        if options.excludeTexts:
            try:
                self.excludeTexts = list(
                    oset([
                        excludeText.strip() if excludeText else None
                        for excludeText in options.excludeTexts.split(",")
                    ]))

            except ValueError:
                self.excludeTexts = []
        else:
            self.excludeTexts = []

        if options.excludeRegexps:
            try:
                self.excludeRegexps = list(
                    oset([
                        excludeRegexp.strip() if excludeRegexp else None
                        for excludeRegexp in options.excludeRegexps.split(",")
                    ]))

            except ValueError:
                self.excludeRegexps = []
        else:
            self.excludeRegexps = []

        if options.excludeRedirects:
            try:
                self.excludeRedirects = list(
                    oset([
                        excludeRedirect.strip() if excludeRedirect else None
                        for excludeRedirect in options.excludeRedirects.split(
                            ",")
                    ]))

            except ValueError:
                self.excludeRedirects = []
        else:
            self.excludeRedirects = []

        self.prefixes = [] if not options.prefixes else list(
            oset([prefix.strip() for prefix in options.prefixes.split(",")]))
        self.suffixes = [] if not options.suffixes else list(
            oset([suffix.strip() for suffix in options.suffixes.split(",")]))
        if options.wordlist:
            self.wordlist = list(
                oset([
                    wordlist.strip()
                    for wordlist in options.wordlist.split(",")
                ]))
        else:
            print("No wordlist was provided, try using -w <wordlist>")
            exit(1)

        self.lowercase = options.lowercase
        self.uppercase = options.uppercase
        self.capitalization = options.capitalization
        self.forceExtensions = options.forceExtensions
        self.data = options.data
        self.testFailPath = options.testFailPath
        self.color = options.color
        self.delay = options.delay
        self.timeout = options.timeout
        self.ip = options.ip
        self.maxRetries = options.maxRetries
        self.recursive = options.recursive
        self.minimumResponseSize = options.minimumResponseSize
        self.maximumResponseSize = options.maximumResponseSize
        self.noExtension = options.noExtension
        self.onlySelected = options.onlySelected
        self.simpleOutputFile = options.simpleOutputFile
        self.plainTextOutputFile = options.plainTextOutputFile
        self.jsonOutputFile = options.jsonOutputFile
        self.xmlOutputFile = options.xmlOutputFile
        self.markdownOutputFile = options.markdownOutputFile
        self.csvOutputFile = options.csvOutputFile

        if options.scanSubdirs:
            self.scanSubdirs = list(
                oset([
                    subdir.strip(" /") + "/"
                    for subdir in options.scanSubdirs.split(",")
                ]))

        else:
            self.scanSubdirs = []

        if options.excludeSubdirs:
            self.excludeSubdirs = list(
                oset([
                    subdir.strip(" /") + "/"
                    for subdir in options.excludeSubdirs.split(",")
                ]))

        else:
            self.excludeSubdirs = None

        if len(set(self.extensions).intersection(self.excludeExtensions)):
            print(
                "Exclude extension list can not contain any extension that has already in the extension list"
            )
            exit(1)

        self.redirect = options.followRedirects
        self.httpmethod = options.httpmethod
        self.scheme = options.scheme
        self.requestByHostname = options.requestByHostname
        self.exit_on_error = options.exit_on_error
        self.skip_on_429 = options.skip_on_429
        self.debug = options.debug

        self.recursion_depth = options.recursion_depth

        if self.scheme not in ["http", "https"]:
            print("Invalid URI scheme: {0}".format(self.scheme))
            exit(1)
Example #46
0
def load_message(filepath):
    """ Load a message from the filesystem """
    with open(filepath, 'r', encoding='utf-8') as file:
        return email.message_from_file(file)
Example #47
0
def extractPDM(filename):
    with open(filename, "r") as f:
        oi = message_from_string(f.read())
        msg = email.message_from_file(f, policy=policy.default)

        print(oi)
Example #48
0
 def parse_file(self, fd, prefix=None):
     m = message_from_file(fd)
     return self.parse(m, prefix)
Example #49
0
 def __init__(self, file, category=None):
     self.mail = email.message_from_file(file)
     self.category = category
Example #50
0
    def eom(self):
        """ End of Message """
        self.fp.seek(0)
        msg = email.message_from_file(self.fp)

        msg_id = msg.get("In-Reply-To", None)
        if msg_id is None:
            self.log("No In-Reply-To, keep going")
            return Milter.CONTINUE

        # Ensure we don't get extra lines in the message-id
        msg_id = msg_id.split("\n")[0].strip()

        self.log("msg-ig %s" % msg_id)
        self.log("To %s" % msg["to"])
        self.log("Cc %s" % msg.get("cc"))
        self.log("From %s" % msg["From"])

        # Check the email was sent to the right address
        email_address = msg["to"]
        if "reply+" in msg.get("cc", ""):
            email_address = msg["cc"]
        if "reply+" not in email_address:
            self.log("No valid recipient email found in To/Cc: %s" %
                     email_address)
            return Milter.CONTINUE

        # Ensure the user replied to his/her own notification, not that
        # they are trying to forge their ID into someone else's
        salt = _config.get("SALT_EMAIL")
        from_email = clean_item(msg["From"])
        session = pagure.lib.model_base.create_session(_config["DB_URL"])
        try:
            user = pagure.lib.query.get_user(session, from_email)
        except:
            self.log("Could not find an user in the DB associated with %s" %
                     from_email)
            session.remove()
            return Milter.CONTINUE

        hashes = []
        for email_obj in user.emails:
            m = hashlib.sha512("%s%s%s" % (msg_id, salt, email_obj.email))
            hashes.append(m.hexdigest())

        tohash = email_address.split("@")[0].split("+")[-1]
        if tohash not in hashes:
            self.log("hash list: %s" % hashes)
            self.log("tohash:    %s" % tohash)
            self.log("Hash does not correspond to the destination")
            session.remove()
            return Milter.CONTINUE

        if msg["From"] and msg["From"] == _config.get("FROM_EMAIL"):
            self.log("Let's not process the email we send")
            session.remove()
            return Milter.CONTINUE

        msg_id = clean_item(msg_id)

        if msg_id and "-ticket-" in msg_id:
            self.log("Processing issue")
            session.remove()
            return self.handle_ticket_email(msg, msg_id)
        elif msg_id and "-pull-request-" in msg_id:
            self.log("Processing pull-request")
            session.remove()
            return self.handle_request_email(msg, msg_id)
        else:
            self.log("Not a pagure ticket or pull-request email, let it go")
            session.remove()
            return Milter.CONTINUE
Example #51
0
    def install(self, paths, maker, **kwargs):
        """
        Install a wheel to the specified paths. If kwarg ``warner`` is
        specified, it should be a callable, which will be called with two
        tuples indicating the wheel version of this software and the wheel
        version in the file, if there is a discrepancy in the versions.
        This can be used to issue any warnings to raise any exceptions.
        If kwarg ``lib_only`` is True, only the purelib/platlib files are
        installed, and the headers, scripts, data and dist-info metadata are
        not written. If kwarg ``bytecode_hashed_invalidation`` is True, written
        bytecode will try to use file-hash based invalidation (PEP-552) on
        supported interpreter versions (CPython 2.7+).

        The return value is a :class:`InstalledDistribution` instance unless
        ``options.lib_only`` is True, in which case the return value is ``None``.
        """

        dry_run = maker.dry_run
        warner = kwargs.get("warner")
        lib_only = kwargs.get("lib_only", False)
        bc_hashed_invalidation = kwargs.get("bytecode_hashed_invalidation", False)

        pathname = os.path.join(self.dirname, self.filename)
        name_ver = "%s-%s" % (self.name, self.version)
        data_dir = "%s.data" % name_ver
        info_dir = "%s.dist-info" % name_ver

        metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME)
        wheel_metadata_name = posixpath.join(info_dir, "WHEEL")
        record_name = posixpath.join(info_dir, "RECORD")

        wrapper = codecs.getreader("utf-8")

        with ZipFile(pathname, "r") as zf:
            with zf.open(wheel_metadata_name) as bwf:
                wf = wrapper(bwf)
                message = message_from_file(wf)
            wv = message["Wheel-Version"].split(".", 1)
            file_version = tuple([int(i) for i in wv])
            if (file_version != self.wheel_version) and warner:
                warner(self.wheel_version, file_version)

            if message["Root-Is-Purelib"] == "true":
                libdir = paths["purelib"]
            else:
                libdir = paths["platlib"]

            records = {}
            with zf.open(record_name) as bf:
                with CSVReader(stream=bf) as reader:
                    for row in reader:
                        p = row[0]
                        records[p] = row

            data_pfx = posixpath.join(data_dir, "")
            info_pfx = posixpath.join(info_dir, "")
            script_pfx = posixpath.join(data_dir, "scripts", "")

            # make a new instance rather than a copy of maker's,
            # as we mutate it
            fileop = FileOperator(dry_run=dry_run)
            fileop.record = True  # so we can rollback if needed

            bc = not sys.dont_write_bytecode  # Double negatives. Lovely!

            outfiles = []  # for RECORD writing

            # for script copying/shebang processing
            workdir = tempfile.mkdtemp()
            # set target dir later
            # we default add_launchers to False, as the
            # Python Launcher should be used instead
            maker.source_dir = workdir
            maker.target_dir = None
            try:
                for zinfo in zf.infolist():
                    arcname = zinfo.filename
                    if isinstance(arcname, text_type):
                        u_arcname = arcname
                    else:
                        u_arcname = arcname.decode("utf-8")
                    if self.skip_entry(u_arcname):
                        continue
                    row = records[u_arcname]
                    if row[2] and str(zinfo.file_size) != row[2]:
                        raise DistlibException("size mismatch for " "%s" % u_arcname)
                    if row[1]:
                        kind, value = row[1].split("=", 1)
                        with zf.open(arcname) as bf:
                            data = bf.read()
                        _, digest = self.get_hash(data, kind)
                        if digest != value:
                            raise DistlibException(
                                "digest mismatch for " "%s" % arcname
                            )

                    if lib_only and u_arcname.startswith((info_pfx, data_pfx)):
                        logger.debug("lib_only: skipping %s", u_arcname)
                        continue
                    is_script = u_arcname.startswith(
                        script_pfx
                    ) and not u_arcname.endswith(".exe")

                    if u_arcname.startswith(data_pfx):
                        _, where, rp = u_arcname.split("/", 2)
                        outfile = os.path.join(paths[where], convert_path(rp))
                    else:
                        # meant for site-packages.
                        if u_arcname in (wheel_metadata_name, record_name):
                            continue
                        outfile = os.path.join(libdir, convert_path(u_arcname))
                    if not is_script:
                        with zf.open(arcname) as bf:
                            fileop.copy_stream(bf, outfile)
                        outfiles.append(outfile)
                        # Double check the digest of the written file
                        if not dry_run and row[1]:
                            with open(outfile, "rb") as bf:
                                data = bf.read()
                                _, newdigest = self.get_hash(data, kind)
                                if newdigest != digest:
                                    raise DistlibException(
                                        "digest mismatch "
                                        "on write for "
                                        "%s" % outfile
                                    )
                        if bc and outfile.endswith(".py"):
                            try:
                                pyc = fileop.byte_compile(
                                    outfile, hashed_invalidation=bc_hashed_invalidation
                                )
                                outfiles.append(pyc)
                            except Exception:
                                # Don't give up if byte-compilation fails,
                                # but log it and perhaps warn the user
                                logger.warning("Byte-compilation failed", exc_info=True)
                    else:
                        fn = os.path.basename(convert_path(arcname))
                        workname = os.path.join(workdir, fn)
                        with zf.open(arcname) as bf:
                            fileop.copy_stream(bf, workname)

                        dn, fn = os.path.split(outfile)
                        maker.target_dir = dn
                        filenames = maker.make(fn)
                        fileop.set_executable_mode(filenames)
                        outfiles.extend(filenames)

                if lib_only:
                    logger.debug("lib_only: returning None")
                    dist = None
                else:
                    # Generate scripts

                    # Try to get pydist.json so we can see if there are
                    # any commands to generate. If this fails (e.g. because
                    # of a legacy wheel), log a warning but don't give up.
                    commands = None
                    file_version = self.info["Wheel-Version"]
                    if file_version == "1.0":
                        # Use legacy info
                        ep = posixpath.join(info_dir, "entry_points.txt")
                        try:
                            with zf.open(ep) as bwf:
                                epdata = read_exports(bwf)
                            commands = {}
                            for key in ("console", "gui"):
                                k = "%s_scripts" % key
                                if k in epdata:
                                    commands["wrap_%s" % key] = d = {}
                                    for v in epdata[k].values():
                                        s = "%s:%s" % (v.prefix, v.suffix)
                                        if v.flags:
                                            s += " [%s]" % ",".join(v.flags)
                                        d[v.name] = s
                        except Exception:
                            logger.warning(
                                "Unable to read legacy script "
                                "metadata, so cannot generate "
                                "scripts"
                            )
                    else:
                        try:
                            with zf.open(metadata_name) as bwf:
                                wf = wrapper(bwf)
                                commands = json.load(wf).get("extensions")
                                if commands:
                                    commands = commands.get("python.commands")
                        except Exception:
                            logger.warning(
                                "Unable to read JSON metadata, so "
                                "cannot generate scripts"
                            )
                    if commands:
                        console_scripts = commands.get("wrap_console", {})
                        gui_scripts = commands.get("wrap_gui", {})
                        if console_scripts or gui_scripts:
                            script_dir = paths.get("scripts", "")
                            if not os.path.isdir(script_dir):
                                raise ValueError("Valid script path not " "specified")
                            maker.target_dir = script_dir
                            for k, v in console_scripts.items():
                                script = "%s = %s" % (k, v)
                                filenames = maker.make(script)
                                fileop.set_executable_mode(filenames)

                            if gui_scripts:
                                options = {"gui": True}
                                for k, v in gui_scripts.items():
                                    script = "%s = %s" % (k, v)
                                    filenames = maker.make(script, options)
                                    fileop.set_executable_mode(filenames)

                    p = os.path.join(libdir, info_dir)
                    dist = InstalledDistribution(p)

                    # Write SHARED
                    paths = dict(paths)  # don't change passed in dict
                    del paths["purelib"]
                    del paths["platlib"]
                    paths["lib"] = libdir
                    p = dist.write_shared_locations(paths, dry_run)
                    if p:
                        outfiles.append(p)

                    # Write RECORD
                    dist.write_installed_files(outfiles, paths["prefix"], dry_run)
                return dist
            except Exception:  # pragma: no cover
                logger.exception("installation failed.")
                fileop.rollback()
                raise
            finally:
                shutil.rmtree(workdir)
Example #52
0
def lambda_handler(event, context):
    s3 = boto3.client('s3')
    ses = boto3.client('ses')

    try:
        o2 = s3.get_object(Bucket=bucketConfig, Key=bucketKeyConfig)
        defaultConfig = json.loads(o2['Body'].read())
    except:
        print "There was an error accessing the default Config in s3"
        defaultConfig = defaultConfigBackUp

    record = event['Records'][0]
    assert record['eventSource'] == 'aws:ses'

    o = s3.get_object(Bucket=defaultConfig['Ses_Incoming_Bucket'],
                      Key=defaultConfig['prefix'] +
                      record['ses']['mail']['messageId'])

    raw_mail = o['Body']
    recipients = []
    msg = message_from_file(raw_mail)

    del msg['DKIM-Signature']
    del msg['Sender']
    sender = msg['Return-Path'][1:-1]
    del msg['Return-Path']

    defaultConfig['original_from'] = msg['From']
    defaultConfig['previousSubj'] = msg['Subject']

    del msg['Subject']
    del msg['From']
    msg['From'] = (
        re.sub(r'\<.+?\>', '', defaultConfig['original_from']).strip() +
        ' <{}>'.format(defaultConfig['Verified_From_Email']))

    msg['Return-Path'] = defaultConfig['Verified_From_Email']
    recipient = record['ses']['receipt']['recipients'][0]

    defaultConfig['recipientName'] = recipient.split('@')[0]
    defaultConfig['subjectPrefix'] = ('[AWS ' +
                                      defaultConfig['recipientName'] + '] ' +
                                      defaultConfig['previousSubj'])
    msg['Subject'] = defaultConfig['subjectPrefix']
    msg_string = msg.as_string()

    for key, value in defaultConfig['Forward_bySender_Mapping'].iteritems():
        print sender
        if key == sender:
            recipients = value
            sendEmail(recipients, msg_string, ses)
            return

    for key, value in defaultConfig['Forward_byRecipient_Mapping'].iteritems():
        if key == recipient:
            recipients = value
            sendEmail(recipients, msg_string, ses)
            return
        if recipient.endswith(key):
            recipients = defaultConfig['Forward_byRecipient_Mapping'].get(
                key, [])
            sendEmail(recipients, msg_string, ses)
            return
    return
                elif part.get_content_type() == 'message/rfc822':
                    continue
                if not os.path.isdir(newdir):
                    os.mkdir(newdir)

                counter += 1
                fd = file(os.path.join(newdir, filename), 'w')
                fd.write(part.get_payload(decode=1))
                fd.close()
        else:
            description = msg.get_payload(decode=1).decode(msg.get_charsets()[0])

        project = self.project_re.search(subject)
        if project:
            project = project.groups()[0]
        else:
            project = ''

        for partner in partners:
            self.rpc(('res.partner.event', 'create', {'name' : subject, 'partner_id' : partner, 'description' : description, 'project' : project}))


if __name__ == '__main__':
    rpc_dispatcher = CommandDispatcher(RPCProxy(4, 'admin'))
    dispatcher = CommandDispatcher(ReceiverEmail2Event(rpc_dispatcher))
    parser = EmailParser(['To', 'Cc', 'From'], dispatcher)
    parser.parse(email.message_from_file(sys.stdin))

# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

Example #54
0
def from_file(fileobj):
    """Reads an email and cleans it up to make a MailBase."""
    return from_message(email.message_from_file(fileobj))
    def eom(self):
        ''' End of Message '''
        self.fp.seek(0)
        msg = email.message_from_file(self.fp)

        msg_id = msg.get('In-Reply-To', None)
        if msg_id is None:
            self.log('No In-Reply-To, keep going')
            return Milter.CONTINUE

        # Ensure we don't get extra lines in the message-id
        msg_id = msg_id.split('\n')[0].strip()

        self.log('msg-ig %s' % msg_id)
        self.log('To %s' % msg['to'])
        self.log('Cc %s' % msg.get('cc'))
        self.log('From %s' % msg['From'])

        # Check the email was sent to the right address
        email_address = msg['to']
        if 'reply+' in msg.get('cc', ''):
            email_address = msg['cc']
        if 'reply+' not in email_address:
            self.log(
                'No valid recipient email found in To/Cc: %s'
                % email_address)
            return Milter.CONTINUE

        # Ensure the user replied to his/her own notification, not that
        # they are trying to forge their ID into someone else's
        salt = _config.get('SALT_EMAIL')
        from_email = clean_item(msg['From'])
        session = pagure.lib.query.create_session(_config['DB_URL'])
        try:
            user = pagure.lib.query.get_user(session, from_email)
        except:
            self.log(
                "Could not find an user in the DB associated with %s" %
                from_email)
            session.remove()
            return Milter.CONTINUE

        hashes = []
        for email_obj in user.emails:
            m = hashlib.sha512('%s%s%s' % (msg_id, salt, email_obj.email))
            hashes.append(m.hexdigest())

        tohash = email_address.split('@')[0].split('+')[-1]
        if tohash not in hashes:
            self.log('hash list: %s' % hashes)
            self.log('tohash:    %s' % tohash)
            self.log('Hash does not correspond to the destination')
            session.remove()
            return Milter.CONTINUE

        if msg['From'] and msg['From'] == _config.get('FROM_EMAIL'):
            self.log("Let's not process the email we send")
            session.remove()
            return Milter.CONTINUE

        msg_id = clean_item(msg_id)

        if msg_id and '-ticket-' in msg_id:
            self.log('Processing issue')
            session.remove()
            return self.handle_ticket_email(msg, msg_id)
        elif msg_id and '-pull-request-' in msg_id:
            self.log('Processing pull-request')
            session.remove()
            return self.handle_request_email(msg, msg_id)
        else:
            self.log('Not a pagure ticket or pull-request email, let it go')
            session.remove()
            return Milter.CONTINUE
Example #56
0
        return urlopen(req)
    except URLError, e:
        raise ShortyError(e)

def githubshrink(bigurl):
    gitio_pattern = 'http(s)?://((gist|raw|develop(er)?)\.)?github\.com'
    gitio_re = re.compile(gitio_pattern)
    if not gitio_re.search(bigurl):
        raise ShortyError('URL must match %s' % gitio_pattern)
    resp = request('http://git.io', post_data="url=%s" % bigurl)
    for header in resp.info().headers:
        if header.startswith("Location:"):
            return header[10:].strip('\n\r')
    raise ShortyError('Failed to shrink url')

incoming = email.message_from_file(sys.stdin)

if incoming.is_multipart():
    payload = incoming.get_payload(0).get_payload(decode=True)
else:
    payload = incoming.get_payload(decode=True)

bodylines = payload.split("\n")
subj = incoming.get("Subject")

userparts = incoming.get("From").split("@", 1)
theuser = userparts[0].split("<", 1)

msgs = []
foundcommit = 0
foundmsg = 0
Example #57
0
def extract(msgfile):
    message = message_from_file(msgfile)
    Text, Html = get_text(message)
    ret = {"text": Text.strip(), "html": Html.strip()}
    return ret
Example #58
0
def message_from_filepath(filepath):
    import email
    fileobj = decompress(filepath)
    return email.message_from_file(fileobj)
Example #59
0
M.user("18829210056")
M.pass_("wyh123")

#打印有多少封信
numMessages = len(M.list()[1])
print 'num   of   messages', numMessages

for i in range(numMessages):
    m = M.retr(i + 1)
    buf = cStringIO.StringIO()
    for j in m[1]:
        print >> buf, j
    buf.seek(0)

    #解析信件内容
    msg = email.message_from_file(buf)
    for part in msg.walk():
        contenttype = part.get_content_type()
        filename = part.get_filename()

        if filename and contenttype == 'application/octet-stream':
            #   保存附件
            f = open("mail%d.%s.attach" % (i + 1, filename), 'wb')
            f.write(base64.decodestring(part.get_payload()))
            f.close()
        elif contenttype == 'text/plain':
            #   保存正文
            f = open("mail%d.txt" % (i + 1), 'wb')
            f.write(base64.decodestring(part.get_payload()))
            f.close()
	def _load_msg_from_file(self, name):
		f = self._open_file(name)
		msg = message_from_file(f)
		f.close()
		return msg