Пример #1
0
    def test_start_subshell(self, call_mock, tempfile_mock):
        memfile = StringIO()
        memfile.name = 'FILENAME'
        tempfile_mock.return_value = memfile
        credentials = {'AWS_VALID_SECONDS': 600}
        start_subshell(credentials, 'ACCOUNT', 'ROLE')
        call_mock.assert_called_once_with(
            ["bash", "--rcfile", 'FILENAME'],
            stdout=sys.stdout, stderr=sys.stderr, stdin=sys.stdin)
        expected = dedent("""
            # Pretend to be an interactive, non-login shell
            for file in /etc/bash.bashrc ~/.bashrc; do
                [ -f "$file" ] && . "$file"
            done

            function afp_minutes_left {
                if ((SECONDS >= 600)) ; then
                    echo EXPIRED
                else
                    echo $(((600-SECONDS)/60)) Min
                fi
            }

            PS1="(AWS ACCOUNT/ROLE \$(afp_minutes_left)) $PS1"
            export AWS_VALID_SECONDS='600'""")
        memfile.seek(0)
        received = memfile.read()
        self.assertEqual(received, expected)
Пример #2
0
    def test_alternative(self):
        mockfile = StringIO("I'm a file! *cough, cough*")
        mockfile.name = '/etc/mockfile.txt'

        m = encode(OrderedDict([
            ('file', mockfile),
            ('full_name', 'Joe Average'),
        ]))

        boundary = m.get_boundary()

        self.assertEqual(str(m),
            '--{boundary}\r\n'
            'Content-Disposition: form-data; name="file"; '
                'filename="mockfile.txt"\r\n'
            'Content-Type: text/plain\r\n'
            '\r\n'
            "I'm a file! *cough, cough*\r\n"
            '--{boundary}\r\n'
            'Content-Disposition: form-data; name="full_name"\r\n'
            'Content-Type: text/plain; charset=utf-8\r\n'
            '\r\n'
            'Joe Average\r\n'
            '--{boundary}--\r\n'.format(
                boundary=boundary
            )
        )
Пример #3
0
    def test_condition(self):
        pp = CompilerPreprocessor({"A": 1, "B": "2", "C": "0L"})
        pp.out = StringIO()
        input = StringIO(
            dedent("""\
            #ifdef A
            IFDEF_A
            #endif
            #if A
            IF_A
            #endif
            #  if B
            IF_B
            #  else
            IF_NOT_B
            #  endif
            #if !C
            IF_NOT_C
            #else
            IF_C
            #endif
        """))
        input.name = "foo"
        pp.do_include(input)

        self.assertEquals("IFDEF_A\nIF_A\nIF_NOT_B\nIF_NOT_C\n",
                          pp.out.getvalue())
    def test_condition(self):
        pp = CompilerPreprocessor({
            'A': 1,
            'B': '2',
            'C': '0L',
        })
        pp.out = StringIO()
        input = StringIO(dedent('''\
            #ifdef A
            IFDEF_A
            #endif
            #if A
            IF_A
            #endif
            #  if B
            IF_B
            #  else
            IF_NOT_B
            #  endif
            #if !C
            IF_NOT_C
            #else
            IF_C
            #endif
        '''))
        input.name = 'foo'
        pp.do_include(input)

        self.assertEquals('IFDEF_A\nIF_A\nIF_NOT_B\nIF_NOT_C\n', pp.out.getvalue())
Пример #5
0
    def test_normalization(self):
        pp = CompilerPreprocessor({
            '__has_attribute(bar)': '1',
            '__has_warning("-Wc++98-foo")': '1',
        })
        pp.out = StringIO()
        input = StringIO(
            dedent('''\
        #if __has_warning("-Wbar")
        WBAR
        #endif
        #if __has_warning("-Wc++98-foo")
        WFOO
        #endif
        #if !__has_warning("-Wc++98-foo")
        NO_WFOO
        #endif
        #if __has_attribute(bar)
        BAR
        #else
        NO_BAR
        #endif
        #if !__has_attribute(foo)
        NO_FOO
        #endif
        '''))

        input.name = 'foo'
        pp.do_include(input)

        self.assertEquals(pp.out.getvalue(), 'WFOO\nBAR\nNO_FOO\n')
Пример #6
0
    def test_normalization(self):
        pp = CompilerPreprocessor({
            "__has_attribute(bar)": 1,
            '__has_warning("-Wc++98-foo")': 1
        })
        pp.out = StringIO()
        input = StringIO(
            dedent("""\
        #if __has_warning("-Wbar")
        WBAR
        #endif
        #if __has_warning("-Wc++98-foo")
        WFOO
        #endif
        #if !__has_warning("-Wc++98-foo")
        NO_WFOO
        #endif
        #if __has_attribute(bar)
        BAR
        #else
        NO_BAR
        #endif
        #if !__has_attribute(foo)
        NO_FOO
        #endif
        """))

        input.name = "foo"
        pp.do_include(input)

        self.assertEquals(pp.out.getvalue(), "WFOO\nBAR\nNO_FOO\n")
Пример #7
0
    def test_start_subshell(self, call_mock, tempfile_mock):
        memfile = StringIO()
        memfile.name = 'FILENAME'
        tempfile_mock.return_value = memfile
        credentials = {'AWS_VALID_SECONDS': 600}
        start_subshell(credentials, 'ACCOUNT', 'ROLE')
        call_mock.assert_called_once_with(["bash", "--rcfile", 'FILENAME'],
                                          stdout=sys.stdout,
                                          stderr=sys.stderr,
                                          stdin=sys.stdin)
        expected = dedent("""
            # Pretend to be an interactive, non-login shell
            for file in /etc/bash.bashrc ~/.bashrc; do
                [ -f "$file" ] && . "$file"
            done

            function afp_minutes_left {
                if ((SECONDS >= 600)) ; then
                    echo EXPIRED
                else
                    echo $(((600-SECONDS)/60)) Min
                fi
            }

            PS1="(AWS ACCOUNT/ROLE \$(afp_minutes_left)) $PS1"
            export AWS_VALID_SECONDS='600'""")
        memfile.seek(0)
        received = memfile.read()
        self.assertEqual(received, expected)
Пример #8
0
    def test_expansion(self):
        pp = CompilerPreprocessor({"A": 1, "B": "2", "C": "c", "D": "d"})
        pp.out = StringIO()
        input = StringIO('A.B.C "D"')
        input.name = "foo"
        pp.do_include(input)

        self.assertEquals(pp.out.getvalue(), '1 . 2 . c "D"')
Пример #9
0
    def test_expansion(self):
        pp = CompilerPreprocessor({'A': 1, 'B': '2', 'C': 'c', 'D': 'd'})
        pp.out = StringIO()
        input = StringIO('A.B.C "D"')
        input.name = 'foo'
        pp.do_include(input)

        self.assertEquals(pp.out.getvalue(), '1 . 2 . c "D"')
Пример #10
0
    def test_figure(self):
        # http://stackoverflow.com/a/2473445/1694979
        imgfile = StringIO(
            'GIF87a\x01\x00\x01\x00\x80\x01\x00\x00\x00\x00ccc,'
            '\x00\x00\x00\x00\x01\x00\x01\x00\x00\x02\x02D\x01\x00;')
        imgfile.name = 'test_img_file.gif'

        self.q.figure.save('image', ContentFile(imgfile.read()))
        self.assertIsInstance(self.q.figure, ImageFieldFile)
Пример #11
0
    def test_override_l10n(self):
        jm = self.jm
        jm.l10nbase = "/L10N_BASE"
        jm.outputFormat = "flat"  # doesn't touch chrome dir without files
        jarcontents = StringIO("""en-US.jar:
relativesrcdir dom/locales:
""")
        jarcontents.name = "override.mn"
        jm.makeJar(jarcontents, "/NO_OUTPUT_REQUIRED")
        self.assertEquals(jm.localedirs, [os.path.join("/L10N_BASE", "dom")])
Пример #12
0
    def test_override_l10n(self):
        jm = self.jm
        jm.l10nbase = '/L10N_BASE'
        jm.outputFormat = 'flat'  # doesn't touch chrome dir without files
        jarcontents = StringIO('''en-US.jar:
relativesrcdir dom/locales:
''')
        jarcontents.name = 'override.mn'
        jm.makeJar(jarcontents, '/NO_OUTPUT_REQUIRED')
        self.assertEquals(jm.localedirs, [os.path.join('/L10N_BASE', 'dom')])
Пример #13
0
    def test_chunking(self):
        mockfile = StringIO("I'm a file! *cough, cough*")
        mockfile.name = 'mockfile.txt'

        ms = Multipart([
            Field('full_name', fileobj=mockfile),
            Field('full_name', 'Joe Average')
        ], chunksize=10)

        self.assertTrue(reduce(lambda acc, x: acc and len(x) <= 10, ms, True))
Пример #14
0
    def test_image_in_question(self):
        imgfile = StringIO(
            'GIF87a\x01\x00\x01\x00\x80\x01\x00\x00\x00\x00ccc,'
            '\x00\x00\x00\x00\x01\x00\x01\x00\x00\x02\x02D\x01\x00;')
        imgfile.name = 'test_img_file.gif'

        self.question1.figure.save('image', ContentFile(imgfile.read()))
        response = self.client.get('/tq1/take/')

        self.assertContains(response, '<img src=')
        self.assertContains(response, 'alt="' + str(self.question1.content))
Пример #15
0
def extract_templetor(fileobj, keywords, comment_tags, options):
    """Extract i18n messages from web.py templates."""
    try:
        # Replace/remove inline js '\$' which interferes with the Babel python parser:
        code = web.template.Template.generate_code(
            fileobj.read().replace('\$', ''), fileobj.name)
        f = StringIO(code)
        f.name = fileobj.name
    except Exception as e:
        print(fileobj.name + ':', str(e), file=web.debug)
        return []
    return extract_python(f, keywords, comment_tags, options)
Пример #16
0
    def test_override(self):
        jm = self.jm
        jm.outputFormat = 'flat'  # doesn't touch chrome dir without files
        jarcontents = StringIO('''en-US.jar:
relativesrcdir dom/locales:
''')
        jarcontents.name = 'override.mn'
        jm.makeJar(jarcontents, '/NO_OUTPUT_REQUIRED')
        self.assertEquals(jm.localedirs, [
            os.path.join(os.path.abspath('/TOPSOURCEDIR'), 'dom/locales',
                         'en-US')
        ])
Пример #17
0
 def test_load_fasta(self):
     client = kbase_data.MockDataClient()
     client.authenticate()
     s = StringIO("> metadata\nGATTACAGATTACAGATTACA\n")
     s.name = "StringIO"
     src = kbase_data.FastaSource(s)
     oid = client.load(src)
     objects = client.search({'oid': oid})
     #print("Got back objects:")
     #for o in objects:
     #    print('--')
     #    print(o)
     self.assertEqual(len(objects), 1)
     file_hash = objects[0].measurements.get('Bytes.FASTA', None)
     self.assertNotEqual(file_hash, None)
Пример #18
0
 def test_load_fasta(self):
     client = kbase_data.MockDataClient()
     client.authenticate()
     s = StringIO("> metadata\nGATTACAGATTACAGATTACA\n")
     s.name = "StringIO"
     src = kbase_data.FastaSource(s)
     oid = client.load(src)
     objects = client.search({'oid': oid})
     #print("Got back objects:")
     #for o in objects:
     #    print('--')
     #    print(o)
     self.assertEqual(len(objects), 1)
     file_hash = objects[0].measurements.get('Bytes.FASTA', None)
     self.assertNotEqual(file_hash, None)
Пример #19
0
    def test_override(self):
        jm = self.jm
        jm.outputFormat = "flat"  # doesn't touch chrome dir without files
        jarcontents = StringIO("""en-US.jar:
relativesrcdir dom/locales:
""")
        jarcontents.name = "override.mn"
        jm.makeJar(jarcontents, "/NO_OUTPUT_REQUIRED")
        self.assertEquals(
            jm.localedirs,
            [
                os.path.join(os.path.abspath("/TOPSOURCEDIR"), "dom/locales",
                             "en-US")
            ],
        )
Пример #20
0
 def test_start_subcmd(self, unlink_mock, call_mock, tempfile_mock):
     memfile = StringIO()
     memfile.name = 'FILENAME'
     # Need to mock away the 'close', so we can read it out later
     # despite 'close' being called inside the function.
     memfile.close = Mock()
     tempfile_mock.return_value = memfile
     credentials = {'AWS_VALID_SECONDS': 600}
     start_subcmd(credentials, 'ACCOUNT', 'ROLE')
     call_mock.assert_called_once_with(["cmd", "/K", 'FILENAME'])
     expected = dedent("""
         @echo off
         set PROMPT=$C AWS ACCOUNT/ROLE $F
         set AWS_VALID_SECONDS='600'""")
     memfile.seek(0)
     received = memfile.read()
     self.assertEqual(received, expected)
Пример #21
0
def _expand_include_statements(file_obj, parsed_files=None):
    parsed_lines = []

    for line in file_obj:
        line = line.strip()
        if not line or line.startswith('#'):
            continue

        match = re.match(SETTINGS_REGEX, line)
        if not match:
            parsed_lines.append(line)
            continue

        key = match.group(1).lower()
        value = match.group(2)

        if key != 'include':
            parsed_lines.append(line)
            continue

        if parsed_files is None:
            parsed_files = []

        # The path can be relative to its parent configuration file
        if path.isabs(value) is False and value[0] != '~':
            folder = path.dirname(file_obj.name)
            value = path.join(folder, value)

        value = path.expanduser(value)

        for filename in glob.iglob(value):
            if path.isfile(filename):
                if filename in parsed_files:
                    raise Exception(
                        'Include loop detected in ssh config file: %s' %
                        filename, )
                with open(filename) as fd:
                    parsed_files.append(filename)
                    parsed_lines.extend(
                        _expand_include_statements(fd, parsed_files))

    return parsed_lines
    output = StringIO('\n'.join(parsed_lines))
    output.name = file_obj.name
    return output
Пример #22
0
 def test_start_subcmd(self, unlink_mock, call_mock, tempfile_mock):
     memfile = StringIO()
     memfile.name = 'FILENAME'
     # Need to mock away the 'close', so we can read it out later
     # despite 'close' being called inside the function.
     memfile.close = Mock()
     tempfile_mock.return_value = memfile
     credentials = {'AWS_VALID_SECONDS': 600}
     start_subcmd(credentials, 'ACCOUNT', 'ROLE')
     call_mock.assert_called_once_with(
         ["cmd", "/K", 'FILENAME'])
     expected = dedent("""
         @echo off
         set PROMPT=$C AWS ACCOUNT/ROLE $F
         set AWS_VALID_SECONDS='600'""")
     memfile.seek(0)
     received = memfile.read()
     self.assertEqual(received, expected)
Пример #23
0
    def attempt_upload_file_and_verify_result(self, test_case, event_name, content=None):
        """
        Running on a test case, creating a temp file, uploading it by
        calling the corresponding ajax event, and verifying that upload
        happens or is rejected as expected.
        """
        if 'magic_number' in test_case:
            f_handler = StringIO(test_case['magic_number'].decode('hex'))
        elif content is not None:
            f_handler = StringIO(json.dumps(content, sort_keys=True))
        else:
            f_handler = StringIO('')

        f_handler.content_type = test_case['mimetypes']
        f_handler.name = 'file' + test_case['suffixes']
        url = self.get_handler_url(event_name)
        resp = self.client.post(url, {'file': f_handler})
        self.assertEqual(resp.status_code, test_case['status'])
Пример #24
0
def test_dive_export(mocked_urlopen, mock_fetch_response):
    mocked_urlopen.return_value = mock_fetch_response

    fragmentsdb = 'data/fragments.sqlite'
    uniprot_annot = StringIO(
        'Entry\tGene names  (primary )\tProtein families\tCross-reference (PDB)' + '\n' +
        'P0CG48\tUBC\tUbiquitin family\t1C3T;2N2K' + '\n'
    )
    pdbtag = StringIO('2n2k' + '\n')
    pdbtag.name = 'mytag'

    propnames = StringIO()
    props = StringIO()

    dive.dive_export(fragmentsdb, uniprot_annot, [pdbtag], propnames, props)

    assert '["pdb", "het", "fragment", "title", "smiles", "weight", "uniprot", "protein", "organism", "gene", "pdbtag", "family0", "family1", "family2", "family3", "family4"]' == propnames.getvalue()
    props_lines = props.getvalue().split('\n')
    result = list(filter(lambda d: d.startswith('2n2k_MTN_frag1'), props_lines))[0]
    expected = '2n2k_MTN_frag1 pdb:2n2k het:MTN fragment:1 "title:Ensemble structure of the closed state of Lys63-linked diubiquitin in the absence of a ligand" smiles:CC1(C)C=C(C[S-])C(C)(C)[NH+]1O 170.17 uniprot:P0CG48 "protein:Polyubiquitin-C" "organism:H**o sapiens" "gene:UBC" pdbtag:mytag "family0:Ubiquitin family"'
    assert result == expected
Пример #25
0
    def test_alternative_explicit_filename(self):
        mockfile = StringIO("I'm a file! *cough, cough*")
        mockfile.name = '/etc/mockfile.txt'

        m = encode({
            'file': (mockfile, 'explicit-name.txt')
        })

        boundary = m.get_boundary()

        self.assertEqual(str(m),
            '--{boundary}\r\n'
            'Content-Disposition: form-data; name="file"; '
                'filename="explicit-name.txt"\r\n'
            'Content-Type: text/plain\r\n'
            '\r\n'
            "I'm a file! *cough, cough*\r\n"
            '--{boundary}--\r\n'.format(
                boundary=boundary
            )
        )
Пример #26
0
    def test_with_fileobj(self):
        mockfile = StringIO("I'm a file! *cough, cough*")
        mockfile.name = 'mockfile.txt'

        m = Multipart([
            Field('file', fileobj=mockfile)
        ], boundary='testing')

        self.assertEqual(m.get_headers(), {
            'Content-Length': '150',
            'Content-Type': 'multipart/form-data; boundary=testing'
        })

        self.assertEqual(str(m),
            '--testing\r\n'
            'Content-Disposition: form-data; name="file"; '
                'filename="mockfile.txt"\r\n'
            'Content-Type: text/plain\r\n'
            '\r\n'
            "I'm a file! *cough, cough*\r\n"
            '--testing--\r\n'
        )
Пример #27
0
def test_dive_export(mocked_urlopen, mock_fetch_response):
    mocked_urlopen.return_value = mock_fetch_response

    fragmentsdb = 'data/fragments.sqlite'
    uniprot_annot = StringIO(
        'Entry\tGene names  (primary )\tProtein families\tCross-reference (PDB)'
        + '\n' + 'P0CG48\tUBC\tUbiquitin family\t1C3T;2N2K' + '\n')
    pdbtag = StringIO('2n2k' + '\n')
    pdbtag.name = 'mytag'

    propnames = StringIO()
    props = StringIO()

    dive.dive_export(fragmentsdb, uniprot_annot, [pdbtag], propnames, props)

    assert '["pdb", "het", "fragment", "title", "smiles", "weight", "uniprot", "protein", "organism", "gene", "pdbtag", "family0", "family1", "family2", "family3", "family4"]' == propnames.getvalue(
    )
    props_lines = props.getvalue().split('\n')
    result = list(filter(lambda d: d.startswith('2n2k_MTN_frag1'),
                         props_lines))[0]
    expected = '2n2k_MTN_frag1 pdb:2n2k het:MTN fragment:1 "title:Ensemble structure of the closed state of Lys63-linked diubiquitin in the absence of a ligand" smiles:CC1(C)C=C(C[S-])C(C)(C)[NH+]1O 170.17 uniprot:P0CG48 "protein:Polyubiquitin-C" "organism:H**o sapiens" "gene:UBC" pdbtag:mytag "family0:Ubiquitin family"'
    assert result == expected
 def fetch(self, url, inject_ids=True):  # type: (Text, bool) -> Any
     if url in self.idx:
         return self.idx[url]
     try:
         text = self.fetch_text(url)
         if isinstance(text, bytes):
             textIO = StringIO(text.decode('utf-8'))
         else:
             textIO = StringIO(text)
         textIO.name = url  # type: ignore
         result = yaml.round_trip_load(textIO)
         add_lc_filename(result, url)
     except yaml.parser.ParserError as e:
         raise validate.ValidationException("Syntax error %s" % (e))
     if (isinstance(result, CommentedMap) and inject_ids
             and bool(self.identifiers)):
         for identifier in self.identifiers:
             if identifier not in result:
                 result[identifier] = url
             self.idx[self.expand_url(result[identifier], url)] = result
     else:
         self.idx[url] = result
     return result
Пример #29
0
 def fetch(self, url, inject_ids=True):  # type: (Text, bool) -> Any
     if url in self.idx:
         return self.idx[url]
     try:
         text = self.fetch_text(url)
         if isinstance(text, bytes):
             textIO = StringIO(text.decode('utf-8'))
         else:
             textIO = StringIO(text)
         textIO.name = url    # type: ignore
         result = yaml.round_trip_load(textIO)
         add_lc_filename(result, url)
     except yaml.parser.ParserError as e:
         raise validate.ValidationException("Syntax error %s" % (e))
     if (isinstance(result, CommentedMap) and inject_ids
             and bool(self.identifiers)):
         for identifier in self.identifiers:
             if identifier not in result:
                 result[identifier] = url
             self.idx[self.expand_url(result[identifier], url)] = result
     else:
         self.idx[url] = result
     return result
Пример #30
0
    def send_details_email(self,
                           save=True,
                           template_name=TemplateModel.MEETING,
                           mail_to=None,
                           exclude_user=None,
                           extra_ctx=None,
                           save_timestamp=True):

        ctx = {
            'meeting':
            self.closest_repetition.to_meeting_with_repetition_date(),
            'site': Site.objects.get_current(),
            'protocol': settings.SSL_ON and 'https' or 'http',
            'link': self.get_absolute_url(),
        }

        if extra_ctx:
            ctx.update(extra_ctx)

        if mail_to is None:
            mail_to = self.members

        for member in mail_to:

            if not member.user.is_active or not member.is_active \
                    or member.invitation_status != member.INV_INVITED:
                continue

            if exclude_user and member.user.id == exclude_user.id:
                continue

            ctx['recipient'] = member

            tmpl = TemplateModel.objects.get(name=template_name)
            subject = tmpl.generate_title(
                ctx) or self.account.name  # fixme: which one?
            message = tmpl.generate(ctx)

            mail = EmailMessage(subject, message, settings.DEFAULT_FROM_EMAIL,
                                [member.user.email])

            mail.extra_headers['From'] = settings.DONOTREPLY_FROM_EMAIL

            admin_emails = [
                m.user.email for m in self.account.get_admin_memberships()
                if m.id != member.id
            ]

            if self.created_by and self.created_by.user.email:
                mail.extra_headers['Reply-To'] = self.created_by.user.email
            elif admin_emails:
                mail.extra_headers['Reply-To'] = ', '.join(admin_emails)

            mail.content_subtype = "html"

            # try:
            #     for doc in self.folder.documents.all():
            #         mail.attach(doc.name, doc.file.read())
            # except Folder.DoesNotExist:
            #     pass

            ics = self.generate_ics(self.creator.user, member)
            f = StringIO()
            f.name = 'calendar.ics'
            f.write(ics)
            mail.attach(f.name, f.getvalue(), 'text/calendar')

            mail.send()
            f.close()

        if save_timestamp:
            self.last_email_sent = timezone.now()
        if save:
            self.save()
Пример #31
0
def clipboard_stream(name=None):
    stream = StringIO(get_clipboard_data())
    stream.name = name or '<clipboard>'
    return stream
Пример #32
0
def main():
    parser = ArgumentParser()
    parser.add_argument(
        "-D",
        dest="defines",
        action="append",
        metavar="VAR[=VAL]",
        help="Define a variable",
    )
    parser.add_argument(
        "--format",
        default="omni",
        help="Choose the chrome format for packaging " +
        "(omni, jar or flat ; default: %(default)s)",
    )
    parser.add_argument("--removals",
                        default=None,
                        help="removed-files source file")
    parser.add_argument(
        "--ignore-errors",
        action="store_true",
        default=False,
        help="Transform errors into warnings.",
    )
    parser.add_argument(
        "--ignore-broken-symlinks",
        action="store_true",
        default=False,
        help="Do not fail when processing broken symlinks.",
    )
    parser.add_argument(
        "--minify",
        action="store_true",
        default=False,
        help="Make some files more compact while packaging",
    )
    parser.add_argument(
        "--minify-js",
        action="store_true",
        help="Minify JavaScript files while packaging.",
    )
    parser.add_argument(
        "--js-binary",
        help="Path to js binary. This is used to verify "
        "minified JavaScript. If this is not defined, "
        "minification verification will not be performed.",
    )
    parser.add_argument("--jarlog",
                        default="",
                        help="File containing jar " + "access logs")
    parser.add_argument(
        "--compress",
        choices=("none", "deflate"),
        default="deflate",
        help="Use given jar compression (default: deflate)",
    )
    parser.add_argument("manifest",
                        default=None,
                        nargs="?",
                        help="Manifest file name")
    parser.add_argument("source", help="Source directory")
    parser.add_argument("destination", help="Destination directory")
    parser.add_argument(
        "--non-resource",
        nargs="+",
        metavar="PATTERN",
        default=[],
        help="Extra files not to be considered as resources",
    )
    args = parser.parse_args()

    defines = dict(buildconfig.defines["ALLDEFINES"])
    if args.ignore_errors:
        errors.ignore_errors()

    if args.defines:
        for name, value in [split_define(d) for d in args.defines]:
            defines[name] = value

    compress = {
        "none": False,
        "deflate": True,
    }[args.compress]

    copier = FileCopier()
    if args.format == "flat":
        formatter = FlatFormatter(copier)
    elif args.format == "jar":
        formatter = JarFormatter(copier, compress=compress)
    elif args.format == "omni":
        formatter = OmniJarFormatter(
            copier,
            buildconfig.substs["OMNIJAR_NAME"],
            compress=compress,
            non_resources=args.non_resource,
        )
    else:
        errors.fatal("Unknown format: %s" % args.format)

    # Adjust defines according to the requested format.
    if isinstance(formatter, OmniJarFormatter):
        defines["MOZ_OMNIJAR"] = 1
    elif "MOZ_OMNIJAR" in defines:
        del defines["MOZ_OMNIJAR"]

    respath = ""
    if "RESPATH" in defines:
        respath = SimpleManifestSink.normalize_path(defines["RESPATH"])
    while respath.startswith("/"):
        respath = respath[1:]

    with errors.accumulate():
        finder_args = dict(
            minify=args.minify,
            minify_js=args.minify_js,
            ignore_broken_symlinks=args.ignore_broken_symlinks,
        )
        if args.js_binary:
            finder_args["minify_js_verify_command"] = [
                args.js_binary,
                os.path.join(os.path.abspath(os.path.dirname(__file__)),
                             "js-compare-ast.js"),
            ]
        finder = PackagerFileFinder(args.source,
                                    find_executables=True,
                                    **finder_args)
        if "NO_PKG_FILES" in os.environ:
            sinkformatter = NoPkgFilesRemover(formatter, args.manifest
                                              is not None)
        else:
            sinkformatter = formatter
        sink = SimpleManifestSink(finder, sinkformatter)
        if args.manifest:
            preprocess_manifest(sink, args.manifest, defines)
        else:
            sink.add(Component(""), "bin/*")
        sink.close(args.manifest is not None)

        if args.removals:
            removals_in = StringIO(open(args.removals).read())
            removals_in.name = args.removals
            removals = RemovedFiles(copier)
            preprocess(removals_in, removals, defines)
            copier.add(mozpath.join(respath, "removed-files"), removals)

    # If a pdb file is present and we were instructed to copy it, include it.
    # Run on all OSes to capture MinGW builds
    if buildconfig.substs.get("MOZ_COPY_PDBS"):
        # We want to mutate the copier while we're iterating through it, so copy
        # the items to a list first.
        copier_items = [(p, f) for p, f in copier]
        for p, f in copier_items:
            if isinstance(f, ExecutableFile):
                pdbname = os.path.splitext(f.inputs()[0])[0] + ".pdb"
                if os.path.exists(pdbname):
                    copier.add(os.path.basename(pdbname), File(pdbname))

    # Setup preloading
    if args.jarlog:
        if not os.path.exists(args.jarlog):
            raise Exception("Cannot find jar log: %s" % args.jarlog)
        omnijars = []
        if isinstance(formatter, OmniJarFormatter):
            omnijars = [
                mozpath.join(base, buildconfig.substs["OMNIJAR_NAME"])
                for base in sink.packager.get_bases(addons=False)
            ]

        from mozpack.mozjar import JarLog

        log = JarLog(args.jarlog)
        for p, f in copier:
            if not isinstance(f, Jarrer):
                continue
            if respath:
                p = mozpath.relpath(p, respath)
            if p in log:
                f.preload(log[p])
            elif p in omnijars:
                raise Exception("No jar log data for %s" % p)

    copier.copy(args.destination)
    generate_precomplete(
        os.path.normpath(os.path.join(args.destination, respath)))
Пример #33
0
def main():
    parser = ArgumentParser()
    parser.add_argument('-D',
                        dest='defines',
                        action='append',
                        metavar="VAR[=VAL]",
                        help='Define a variable')
    parser.add_argument('--format',
                        default='omni',
                        help='Choose the chrome format for packaging ' +
                        '(omni, jar or flat ; default: %(default)s)')
    parser.add_argument('--removals',
                        default=None,
                        help='removed-files source file')
    parser.add_argument('--ignore-errors',
                        action='store_true',
                        default=False,
                        help='Transform errors into warnings.')
    parser.add_argument('--ignore-broken-symlinks',
                        action='store_true',
                        default=False,
                        help='Do not fail when processing broken symlinks.')
    parser.add_argument('--minify',
                        action='store_true',
                        default=False,
                        help='Make some files more compact while packaging')
    parser.add_argument('--minify-js',
                        action='store_true',
                        help='Minify JavaScript files while packaging.')
    parser.add_argument('--js-binary',
                        help='Path to js binary. This is used to verify '
                        'minified JavaScript. If this is not defined, '
                        'minification verification will not be performed.')
    parser.add_argument('--jarlog',
                        default='',
                        help='File containing jar ' + 'access logs')
    parser.add_argument('--compress',
                        choices=('none', 'deflate', 'brotli'),
                        default='deflate',
                        help='Use given jar compression (default: deflate)')
    parser.add_argument('manifest',
                        default=None,
                        nargs='?',
                        help='Manifest file name')
    parser.add_argument('source', help='Source directory')
    parser.add_argument('destination', help='Destination directory')
    parser.add_argument('--non-resource',
                        nargs='+',
                        metavar='PATTERN',
                        default=[],
                        help='Extra files not to be considered as resources')
    args = parser.parse_args()

    defines = dict(buildconfig.defines['ALLDEFINES'])
    if args.ignore_errors:
        errors.ignore_errors()

    if args.defines:
        for name, value in [split_define(d) for d in args.defines]:
            defines[name] = value

    compress = {
        'none': False,
        'deflate': True,
        'brotli': JAR_BROTLI,
    }[args.compress]

    copier = FileCopier()
    if args.format == 'flat':
        formatter = FlatFormatter(copier)
    elif args.format == 'jar':
        formatter = JarFormatter(copier, compress=compress)
    elif args.format == 'omni':
        formatter = OmniJarFormatter(copier,
                                     buildconfig.substs['OMNIJAR_NAME'],
                                     compress=compress,
                                     non_resources=args.non_resource)
    else:
        errors.fatal('Unknown format: %s' % args.format)

    # Adjust defines according to the requested format.
    if isinstance(formatter, OmniJarFormatter):
        defines['MOZ_OMNIJAR'] = 1
    elif 'MOZ_OMNIJAR' in defines:
        del defines['MOZ_OMNIJAR']

    respath = ''
    if 'RESPATH' in defines:
        respath = SimpleManifestSink.normalize_path(defines['RESPATH'])
    while respath.startswith('/'):
        respath = respath[1:]

    if not buildconfig.substs['CROSS_COMPILE']:
        launcher.tooldir = mozpath.join(buildconfig.topobjdir, 'dist')

    with errors.accumulate():
        finder_args = dict(
            minify=args.minify,
            minify_js=args.minify_js,
            ignore_broken_symlinks=args.ignore_broken_symlinks,
        )
        if args.js_binary:
            finder_args['minify_js_verify_command'] = [
                args.js_binary,
                os.path.join(os.path.abspath(os.path.dirname(__file__)),
                             'js-compare-ast.js')
            ]
        finder = FileFinder(args.source, find_executables=True, **finder_args)
        if 'NO_PKG_FILES' in os.environ:
            sinkformatter = NoPkgFilesRemover(formatter, args.manifest
                                              is not None)
        else:
            sinkformatter = formatter
        sink = SimpleManifestSink(finder, sinkformatter)
        if args.manifest:
            preprocess_manifest(sink, args.manifest, defines)
        else:
            sink.add(Component(''), 'bin/*')
        sink.close(args.manifest is not None)

        if args.removals:
            removals_in = StringIO(open(args.removals).read())
            removals_in.name = args.removals
            removals = RemovedFiles(copier)
            preprocess(removals_in, removals, defines)
            copier.add(mozpath.join(respath, 'removed-files'), removals)

    # shlibsign libraries
    if launcher.can_launch():
        if not mozinfo.isMac and buildconfig.substs.get('COMPILE_ENVIRONMENT'):
            for lib in SIGN_LIBS:
                libbase = mozpath.join(respath, '%s%s') \
                    % (buildconfig.substs['DLL_PREFIX'], lib)
                libname = '%s%s' % (libbase, buildconfig.substs['DLL_SUFFIX'])
                if copier.contains(libname):
                    copier.add(
                        libbase + '.chk',
                        LibSignFile(os.path.join(args.destination, libname)))

    # If a pdb file is present and we were instructed to copy it, include it.
    # Run on all OSes to capture MinGW builds
    if buildconfig.substs.get('MOZ_COPY_PDBS'):
        # We want to mutate the copier while we're iterating through it, so copy
        # the items to a list first.
        copier_items = [(p, f) for p, f in copier]
        for p, f in copier_items:
            if isinstance(f, ExecutableFile):
                pdbname = os.path.splitext(f.inputs()[0])[0] + '.pdb'
                if os.path.exists(pdbname):
                    copier.add(os.path.basename(pdbname), File(pdbname))

    # Setup preloading
    if args.jarlog:
        if not os.path.exists(args.jarlog):
            raise Exception('Cannot find jar log: %s' % args.jarlog)
        omnijars = []
        if isinstance(formatter, OmniJarFormatter):
            omnijars = [
                mozpath.join(base, buildconfig.substs['OMNIJAR_NAME'])
                for base in sink.packager.get_bases(addons=False)
            ]

        from mozpack.mozjar import JarLog
        log = JarLog(args.jarlog)
        for p, f in copier:
            if not isinstance(f, Jarrer):
                continue
            if respath:
                p = mozpath.relpath(p, respath)
            if p in log:
                f.preload(log[p])
            elif p in omnijars:
                raise Exception('No jar log data for %s' % p)

    copier.copy(args.destination)
    generate_precomplete(
        os.path.normpath(os.path.join(args.destination, respath)))
Пример #34
0
def parse_attachment(message_part, state, attachments=None):
    """ Extract the attachment and metadata about it from the message.

    Returns the content, content type, size, and create/modification/read dates
    for the attachment.
    """
    params = message_part.get_params(None, "Content-Disposition")
    if params:
        # If a 'part' has a Content-Disposition, we assume it is an attachment
        try:
            params = dict(params)
            print("\tContent-Disposition (for following email)", params)
            if "attachment" in params:
                # Not sure what's going on here
                # Why get payload with decode, then try again and reparse?
                # See details at
                # http://docs.python.org/2/library/email.message.html#email.message.Message.get_payload
                file_data = message_part.get_payload(decode=True)
                if not file_data:
                    payload = message_part.get_payload()
                    if isinstance(payload, list):
                        for msgobj in payload:
                            # TODO not sure this actually does anything
                            parse2(msgobj, state, attachments)
                        return None
                    print(message_part.get_payload(), file=sys.stderr)
                    print(message_part.get_content_charset(), file=sys.stderr)

                attachment = StringIO(file_data)
                attachment.content_type = message_part.get_content_type()
                attachment.size = params.get("size", len(file_data))
                attachment.create_date = params.get("create-date")
                attachment.mod_date = params.get("modification-date")
                attachment.read_date = params.get("read-date")
                # TODO convert dates to datetime

                filename = message_part.get_filename(None)
                if filename:
                    # Filenames may be encoded with =?encoding?...
                    # If so, convert to unicode
                    name, encoding = email.header.decode_header(filename)[0]
                    if encoding:
                        print(
                            "\t{filename} encoded with {encoding}, converting to unicode"
                            .format(filename=filename, encoding=encoding))
                        filename = name.decode(encoding)
                else:  # filename not in Content-Disposition
                    print(
                        """Warning, no filename found in: [{%s}%s] Content-Disposition: %s or Content-Type"""
                        % (state.sourceFileUUID, state.sourceFilePath, params),
                        file=sys.stderr,
                    )
                    filename = six.text_type(uuid.uuid4())
                    print(
                        "Attempting extraction with random filename: %s" %
                        (filename),
                        file=sys.stderr,
                    )
                # Remove newlines from filename because that breaks everything
                filename = filename.replace("\r", "").replace("\n", "")

                attachment.name = filename
                return attachment

        except Exception as inst:
            print(type(inst), file=sys.stderr)
            print(inst.args, file=sys.stderr)
            print(
                "Error parsing: file: {%s}%s" %
                (state.sourceFileUUID, state.sourceFilePath),
                file=sys.stderr,
            )
            print("Error parsing: Content-Disposition: ",
                  params,
                  file=sys.stderr)
            print(file=sys.stderr)
            state.error_count += 1
    return None