Пример #1
0
def apt_supports_trusted_option():
    """
    Since apt version 0.8.16~exp3 the option ``[trusted=yes]`` can be used in a
    ``sources.list`` file to disable GPG key checking (see `Debian bug
    #596498`_). This version of apt is included with Ubuntu 12.04 and later,
    but deb-pkg-tools also has to support older versions of apt. The
    :py:func:`apt_supports_trusted_option()` function checks if the installed
    version of apt supports the ``[trusted=yes]`` option, so that deb-pkg-tools
    can use it when possible.

    :returns: ``True`` if the option is supported, ``False`` if it is not.

    .. _Debian bug #596498: http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=596498
    """
    global trusted_option_supported
    if trusted_option_supported is None:
        try:
            # Find the installed version of the `apt' package.
            version = execute('dpkg-query','--show', '--showformat=${Version}', 'apt', capture=True)
            # Check if the version is >= 0.8.16 (which includes [trusted=yes] support).
            execute('dpkg','--compare-versions', version, 'ge', '0.8.16~exp3')
            # If ExternalCommandFailed  is not raised,
            # `dpkg --compare-versions' reported succes.
            trusted_option_supported = True
        except ExternalCommandFailed:
            trusted_option_supported = False
    return trusted_option_supported
Пример #2
0
def runInteractive():
    """
    Main function for interactive mode
    """
    print "------------- Welcome to the Miner %s ----------------" % miner_version.version
    print "You can run HELP command anytime to get more information."
    print "Press TAB key for context base completion"
    print "    - F1  key to get context base HELP"
    print "    - Ctrl-H to get list of keyboard bindings"
    print "    - Ctrl-D to exit"
    while True:
        s = ""
        try:
            s = raw_input(">>> ")
        except KeyboardInterrupt:
            print
            continue
        except EOFError:
            break
        if not s: continue
        executor.execute(s)
        if statements.Import.checkIfWasModified():
            global theSymbolCompleter
            theSymbolCompleter = CompleterWrap()

    print "\nGoodbye"
Пример #3
0
def runInteractive():
    """
    Main function for interactive mode
    """
    print "------------- Welcome to the Miner %s ----------------" % miner_version.version
    print "You can run HELP command anytime to get more information."
    print "Press TAB key for context base completion"
    print "    - F1  key to get miner command help"
    print "    - F2  key to get python documentation"
    print "    - Ctrl-K to get list of keyboard bindings"
    print "    - Ctrl-D to exit"
    miner_globals.setIsInteractive(True)
    while True:
        s = ""
        try:
            s = raw_input(">>> ")
        except KeyboardInterrupt:
            print
            continue
        except EOFError:
            break
        if not s: continue
        executor.execute(s)

    print "\nGoodbye"
Пример #4
0
    def wkhtml_to_pdf(cls, data, options=None):
        """
        Call wkhtmltopdf to convert the html to pdf
        """
        with tempfile.NamedTemporaryFile(
                suffix='.html', prefix='trytond_', delete=False
        ) as source_file:
            file_name = source_file.name
            source_file.write(data)
            source_file.close()

            # Evaluate argument to run with subprocess
            args = 'wkhtmltopdf'
            # Add Global Options
            if options:
                for option, value in options.items():
                    args += ' --%s' % option
                    if value:
                        args += ' "%s"' % value

            # Add source file name and output file name
            args += ' %s %s.pdf' % (file_name, file_name)
            # Execute the command using executor
            execute(args)
            return open(file_name + '.pdf').read()
Пример #5
0
 def test_subprocess_output(self):
     self.assertEqual(execute('echo', 'this is a test', capture=True), 'this is a test')
     self.assertEqual(execute('echo', '-e', r'line 1\nline 2', capture=True), 'line 1\nline 2\n')
     # I don't know how to test for the effect of silent=True in a practical
     # way without creating the largest test in this test suite :-). The
     # least I can do is make sure the keyword argument is accepted and the
     # code runs without exceptions in supported environments.
     self.assertTrue(execute('echo', 'this is a test', silent=True))
Пример #6
0
def copy_package_files(from_directory, to_directory, hard_links=True):
    """
    Copy package files to a temporary directory, using hard links when possible.

    :param from_directory: The pathname of a directory tree suitable for
                           packaging with ``dpkg-deb --build``.
    :param to_directory: The pathname of a temporary build directory.
    :param hard_links: Use hard links to speed up copying when possible.

    This function copies a directory tree suitable for packaging with
    ``dpkg-deb --build`` to a temporary build directory so that individual
    files can be replaced without changing the original directory tree. If the
    build directory is on the same file system as the source directory, hard
    links are used to speed up the copy. This function is used by
    :func:`build_package()`.
    """
    logger.info("Copying files (%s) to temporary directory (%s) ..",
                format_path(from_directory), format_path(to_directory))
    command = ['cp', '-a']
    makedirs(to_directory)
    if hard_links and ALLOW_HARD_LINKS:
        # Check whether we can use hard links to speed up the copy. In the past
        # this used the following simple and obvious check:
        #
        #   os.stat(source_directory).st_dev == os.stat(build_directory).st_dev
        #
        # However this expression holds true inside schroot, yet `cp -al' fails
        # when trying to create the hard links! This is why the following code now
        # tries to create an actual hard link to verify that `cp -al' can be used.
        test_file_from = None
        test_file_to = None
        try:
            # Find a unique filename that we can create and destroy without
            # touching any of the caller's files.
            while True:
                test_name = 'deb-pkg-tools-hard-link-test-%d' % random.randint(1, 1000)
                test_file_from = os.path.join(from_directory, test_name)
                test_file_to = os.path.join(to_directory, test_name)
                if not os.path.isfile(test_file_from):
                    break
            # Create the test file.
            with open(test_file_from, 'w') as handle:
                handle.write('test')
            os.link(test_file_from, test_file_to)
            logger.debug("Speeding up file copy using hard links ..")
            command.append('-l')
        except (IOError, OSError):
            pass
        finally:
            for test_file in [test_file_from, test_file_to]:
                if test_file and os.path.isfile(test_file):
                    os.unlink(test_file)
    # I know this looks really funky, but this is a valid use of shell escaping
    # and globbing (obviously I tested it ;-).
    command.append('%s/*' % pipes.quote(from_directory))
    command.append(pipes.quote(to_directory))
    execute(' '.join(command), logger=logger)
Пример #7
0
def execute(stockId=None, date=None):

	now = datetime.datetime.now()
	if date is None or len(date)==0:
        	#convert western calendar to R.O.C calendar
		date = str(now.year-1911) + "{:02d}".format(now.month) + "{:02d}".format(now.day)
	elif len(date)==4:
		date = str(now.year-1911) + date[0:2] + date[2:4]

	executor.execute(stockId, date)
Пример #8
0
    def rotate_backups(self, directory):
        """
        Rotate the backups in a directory according to a flexible rotation scheme.
        :param directory: The pathname of a directory that contains backups to
                          rotate (a string).

        .. note:: This function binds the main methods of the
                  :class:`RotateBackups` class together to implement backup
                  rotation with an easy to use Python API. If you're using
                  `rotate-backups` as a Python API and the default behavior is
                  not satisfactory, consider writing your own
                  :func:`rotate_backups()` function based on the underlying
                  :func:`collect_backups()`, :func:`group_backups()`,
                  :func:`apply_rotation_scheme()` and
                  :func:`find_preservation_criteria()` methods.
        """
        # Load configuration overrides by user?

        # Collect the backups in the given directory. if rotate type is on local or on google drive
        sorted_backups = self.collect_backups(directory, self.rotate_type)
        if not sorted_backups:
            logger.info("No backups found in %s.", self.custom_format_path(directory))
            return
        most_recent_backup = sorted_backups[-1]
        # Group the backups by the rotation frequencies.
        backups_by_frequency = self.group_backups(sorted_backups)
        # Apply the user defined rotation scheme.
        self.apply_rotation_scheme(backups_by_frequency, most_recent_backup.datetime)
        # Find which backups to preserve and why.
        backups_to_preserve = self.find_preservation_criteria(backups_by_frequency)
        # Apply the calculated rotation scheme.
        for backup in sorted_backups:
            if backup in backups_to_preserve:
                matching_periods = backups_to_preserve[backup]
                logger.info("Preserving %s (matches %s retention %s) ..",
                            self.custom_format_path(backup.pathname),
                            concatenate(map(repr, matching_periods)),
                            "period" if len(matching_periods) == 1 else "periods")
            else:
                logger.info("Deleting %s %s ..", backup.type, self.custom_format_path(backup.pathname))
                if not self.dry_run:
                    timer = Timer()
                    if self.rotate_type == 'local':  # if rotate type is on local or on google drive
                        command = ['rm', '-Rf', backup.pathname]
                        if self.io_scheduling_class:
                            command = ['ionice', '--class', self.io_scheduling_class] + command

                        execute(*command, logger=logger)
                    else:
                        self.gdrivecm.delete_file(backup.pathname.split('_')[0])
                    logger.debug("Deleted %s in %s.", self.custom_format_path(backup.pathname), timer)
        if len(backups_to_preserve) == len(sorted_backups):
            logger.info("Nothing to do! (all backups preserved)")
Пример #9
0
    def generate_key_file(self, filename):
        """
        Generate a temporary host or client key for the OpenSSH server.

        The :func:`start()` method automatically calls :func:`generate_key_file()`
        to generate :data:`host_key_file` and :attr:`client_key_file`. This
        method uses the ``ssh-keygen`` program to generate the keys.
        """
        if not os.path.isfile(filename):
            timer = Timer()
            self.logger.debug("Generating SSH key file (%s) ..", filename)
            execute('ssh-keygen', '-f', filename, '-N', '', '-t', 'rsa', silent=True, logger=self.logger)
            self.logger.debug("Generated key file %s in %s.", filename, timer)
Пример #10
0
 def test_status_code_checking(self):
     self.assertTrue(execute('true'))
     self.assertFalse(execute('false', check=False))
     self.assertRaises(ExternalCommandFailed, execute, 'false')
     try:
         execute('bash', '-c', 'exit 42')
         # Make sure the previous line raised an exception.
         self.assertTrue(False)
     except Exception as e:
         # Make sure the expected type of exception was raised.
         self.assertTrue(isinstance(e, ExternalCommandFailed))
         # Make sure the exception has the expected properties.
         self.assertEqual(e.command, "bash -c 'exit 42'")
         self.assertEqual(e.returncode, 42)
Пример #11
0
def run(args):
    if (len(args) == 0) or (len(args) % 2 == 1):
        print(messages.HELP_STRING)
        return
    iterator = iter(args)
    jobs = izip(iterator, iterator)
    for job_path, params_json in jobs:
        try:
            job = simplejson.loads(file_get_contents(job_path))
            validictory.validate(job, job_schema)
            params = simplejson.loads(params_json)
            execute(job, params)
        except Exception, error:
            print(error)
Пример #12
0
def application(request):
    """
    To use this application, the user must send a POST request with
    base64 or form encoded encoded HTML content and the wkhtmltopdf Options in
    request data, with keys 'base64_html' and 'options'.
    The application will return a response with the PDF file.
    """
    if request.method != 'POST':
        return

    request_is_json = request.content_type.endswith('json')

    with tempfile.NamedTemporaryFile(suffix='.html') as source_file:

        if request_is_json:
            # If a JSON payload is there, all data is in the payload
            payload = json.loads(request.data)
            source_file.write(payload['contents'].decode('base64'))
            options = payload.get('options', {})
        elif request.files:
            # First check if any files were uploaded
            source_file.write(request.files['file'].read())
            # Load any options that may have been provided in options
            options = json.loads(request.form.get('options', '{}'))

        source_file.flush()

        # Evaluate argument to run with subprocess
        args = ['wkhtmltopdf']

        # Add Global Options
        if options:
            for option, value in options.items():
                args.append('--%s' % option)
                if value:
                    args.append('"%s"' % value)

        # Add source file name and output file name
        file_name = source_file.name
        args += [file_name, file_name + ".pdf"]

        # Execute the command using executor
        execute(' '.join(args))

        return Response(
            wrap_file(request.environ, open(file_name + '.pdf')),
            mimetype='application/pdf',
        )
Пример #13
0
def find_channels_of_guest(guest_name):
    """
    Find the pathnames of the channels associated to a guest.

    :param guest_name: The name of the guest (a string).
    :returns: A dictionary with channel names (strings) as keys and pathnames
              of UNIX socket files (strings) as values. If no channels are
              detected an empty dictionary will be returned.

    This function uses ``virsh dumpxml`` and parses the XML output to
    determine the pathnames of the channels associated to the guest.
    """
    logger.debug("Discovering '%s' channels using 'virsh dumpxml' command ..", guest_name)
    domain_xml = execute('virsh', 'dumpxml', guest_name, capture=True)
    parsed_xml = xml.etree.ElementTree.fromstring(domain_xml)
    channels = {}
    for channel in parsed_xml.findall('devices/channel'):
        if channel.attrib.get('type') == 'unix':
            source = channel.find('source')
            target = channel.find('target')
            if source is not None and target is not None and target.attrib.get('type') == 'virtio':
                name = target.attrib.get('name')
                path = source.attrib.get('path')
                if name in SUPPORTED_CHANNEL_NAMES:
                    channels[name] = path
    if channels:
        logger.debug("Discovered '%s' channels: %s", guest_name, channels)
    else:
        logger.debug("No channels found for guest '%s'.", guest_name)
    return channels
Пример #14
0
 def mktx(self, recip, amount):
     if self.iscoinset():
         command = self.coinbinary + " payto -f " + self.fee + " " + recip + " " + str(amount)
         output = execute(command, capture="True")
         return output
     else:
         return False
Пример #15
0
def find_running_guests():
    """
    Find the names of the guests running on the current host.

    This function parses the output of the ``virsh list`` command instead of
    using the libvirt API because of two reasons:

    1. I'm under the impression that the libvirt API is still very much in flux
       and large changes are still being made, so it's not the most stable
       foundation for Negotiator to find running guests.

    2. The Python libvirt API needs to match the version of the libvirt API on
       the host system and there is AFAIK no obvious way to express this in the
       ``setup.py`` script of Negotiator.

    :returns: A generator of strings.
    :raises: :exc:`GuestDiscoveryError` when ``virsh list`` fails.
    """
    try:
        logger.debug("Discovering running guests using 'virsh list' command ..")
        output = execute('virsh', '--quiet', 'list', '--all', capture=True, logger=logger)
    except ExternalCommandFailed:
        raise GuestDiscoveryError("The 'virsh list' command failed! Are you sure libvirtd is running?")
    else:
        for line in output.splitlines():
            logger.debug("Parsing 'virsh list' output: %r", line)
            try:
                vm_id, vm_name, vm_status = line.split(None, 2)
                if vm_status == 'running':
                    yield vm_name
            except Exception:
                logger.warning("Failed to parse 'virsh list' output! (%r)", line)
Пример #16
0
 def listaddresses(self):
     if self.iscoinset():
         command = self.coinbinary + " listaddresses"
         output = execute(command, capture="True")
         return output
     else:
         return False
Пример #17
0
 def broadcast(self, tx):
     if self.iscoinset():
         command = self.coinbinary + " broadcast " + tx
         output = execute(command, capture="True")
         return True
     else:
         return False
Пример #18
0
 def daemon(self, control):
     if self.iscoinset() and (control == "start" or control == "stop"):
         command = self.coinbinary + " daemon " + control
         output = execute(command, capture="True")
         return True
     else:
         return False
Пример #19
0
 def getaddresshistory(self, address):
     if self.iscoinset():
         command = self.coinbinary + " getaddresshistory " + address
         output = execute(command, capture="True")
         return output
     else:
         return False
Пример #20
0
    def find_system_dependencies(self, shared_object_files):
        """
        (Ab)use dpkg-shlibdeps_ to find dependencies on system libraries.

        :param shared_object_files: The pathnames of the ``*.so`` file(s) contained
                                    in the package (a list of strings).
        :returns: A list of strings in the format of the entries on the
                  ``Depends:`` line of a binary package control file.

        .. _dpkg-shlibdeps: https://www.debian.org/doc/debian-policy/ch-sharedlibs.html#s-dpkg-shlibdeps
        """
        logger.debug("Abusing `dpkg-shlibdeps' to find dependencies on shared libraries ..")
        # Create a fake source package, because `dpkg-shlibdeps' expects this...
        with TemporaryDirectory(prefix='py2deb-dpkg-shlibdeps-') as fake_source_directory:
            # Create the debian/ directory expected in the source package directory.
            os.mkdir(os.path.join(fake_source_directory, 'debian'))
            # Create an empty debian/control file because `dpkg-shlibdeps' requires
            # this (even though it is apparently fine for the file to be empty ;-).
            open(os.path.join(fake_source_directory, 'debian', 'control'), 'w').close()
            # Run `dpkg-shlibdeps' inside the fake source package directory, but
            # let it analyze the *.so files from the actual build directory.
            command = ['dpkg-shlibdeps', '-O', '--warnings=0'] + shared_object_files
            output = execute(*command, directory=fake_source_directory, capture=True, logger=logger)
            expected_prefix = 'shlibs:Depends='
            if not output.startswith(expected_prefix):
                msg = ("The output of dpkg-shlibdeps doesn't match the"
                       " expected format! (expected prefix: %r, output: %r)")
                logger.warning(msg, expected_prefix, output)
                return []
            output = output[len(expected_prefix):]
            dependencies = sorted(dependency.strip() for dependency in output.split(','))
            logger.debug("Dependencies reported by dpkg-shlibdeps: %s", dependencies)
            return dependencies
Пример #21
0
def inspect_package_contents(archive, cache=None):
    """
    Get the contents from a ``*.deb`` archive.

    :param archive: The pathname of an existing ``*.deb`` archive.
    :param cache: The :class:`.PackageCache` to use (defaults to :data:`None`).
    :returns: A dictionary with the directories and files contained in the
              package. The dictionary keys are the absolute pathnames and the
              dictionary values are :class:`ArchiveEntry` objects (see the
              example below).

    An example:

    >>> from deb_pkg_tools.package import inspect_package_contents
    >>> print(repr(inspect_package_contents('python3.4-minimal_3.4.0-1+precise1_amd64.deb')))
    {u'/': ArchiveEntry(permissions=u'drwxr-xr-x', owner=u'root', group=u'root', size=0, modified=u'2014-03-20 23:54', target=u''),
     u'/usr/': ArchiveEntry(permissions=u'drwxr-xr-x', owner=u'root', group=u'root', size=0, modified=u'2014-03-20 23:52', target=u''),
     u'/usr/bin/': ArchiveEntry(permissions=u'drwxr-xr-x', owner=u'root', group=u'root', size=0, modified=u'2014-03-20 23:54', target=u''),
     u'/usr/bin/python3.4': ArchiveEntry(permissions=u'-rwxr-xr-x', owner=u'root', group=u'root', size=3536680, modified=u'2014-03-20 23:54', target=u''),
     u'/usr/bin/python3.4m': ArchiveEntry(permissions=u'hrwxr-xr-x', owner=u'root', group=u'root', size=0, modified=u'2014-03-20 23:54', target=u'/usr/bin/python3.4'),
     u'/usr/share/': ArchiveEntry(permissions=u'drwxr-xr-x', owner=u'root', group=u'root', size=0, modified=u'2014-03-20 23:53', target=u''),
     u'/usr/share/binfmts/': ArchiveEntry(permissions=u'drwxr-xr-x', owner=u'root', group=u'root', size=0, modified=u'2014-03-20 23:53', target=u''),
     u'/usr/share/binfmts/python3.4': ArchiveEntry(permissions=u'-rw-r--r--', owner=u'root', group=u'root', size=72, modified=u'2014-03-20 23:53', target=u''),
     u'/usr/share/doc/': ArchiveEntry(permissions=u'drwxr-xr-x', owner=u'root', group=u'root', size=0, modified=u'2014-03-20 23:53', target=u''),
     u'/usr/share/doc/python3.4-minimal/': ArchiveEntry(permissions=u'drwxr-xr-x', owner=u'root', group=u'root', size=0, modified=u'2014-03-20 23:54', target=u''),
     u'/usr/share/doc/python3.4-minimal/README.Debian': ArchiveEntry(permissions=u'-rw-r--r--', owner=u'root', group=u'root', size=3779, modified=u'2014-03-20 23:52', target=u''),
     u'/usr/share/doc/python3.4-minimal/changelog.Debian.gz': ArchiveEntry(permissions=u'-rw-r--r--', owner=u'root', group=u'root', size=28528, modified=u'2014-03-20 22:32', target=u''),
     u'/usr/share/doc/python3.4-minimal/copyright': ArchiveEntry(permissions=u'-rw-r--r--', owner=u'root', group=u'root', size=51835, modified=u'2014-03-20 20:37', target=u''),
     u'/usr/share/man/': ArchiveEntry(permissions=u'drwxr-xr-x', owner=u'root', group=u'root', size=0, modified=u'2014-03-20 23:52', target=u''),
     u'/usr/share/man/man1/': ArchiveEntry(permissions=u'drwxr-xr-x', owner=u'root', group=u'root', size=0, modified=u'2014-03-20 23:54', target=u''),
     u'/usr/share/man/man1/python3.4.1.gz': ArchiveEntry(permissions=u'-rw-r--r--', owner=u'root', group=u'root', size=5340, modified=u'2014-03-20 23:30', target=u''),
     u'/usr/share/man/man1/python3.4m.1.gz': ArchiveEntry(permissions=u'lrwxrwxrwx', owner=u'root', group=u'root', size=0, modified=u'2014-03-20 23:54', target=u'python3.4.1.gz')}

    """
    if cache:
        entry = cache.get_entry('contents', archive)
        value = entry.get_value()
        if value is not None:
            return value
    contents = {}
    for line in execute('dpkg-deb', '-c', archive, logger=logger, capture=True).splitlines():
        # Example output of dpkg-deb -c archive.deb:
        # drwxr-xr-x root/root 0 2013-07-08 17:49 ./usr/share/doc/
        # lrwxrwxrwx root/root 0 2013-09-26 22:29 ./usr/bin/pdb2.7 -> ../lib/python2.7/pdb.py
        fields = line.split(None, 5)
        permissions = fields[0]
        owner, group = fields[1].split('/')
        size = int(fields[2])
        modified = fields[3] + ' ' + fields[4]
        pathname = re.sub('^./', '/', fields[5])
        pathname, _, target = pathname.partition(' -> ')
        if not target:
            pathname, _, target = pathname.partition(' link to ')
            target = re.sub('^./', '/', target)
        contents[pathname] = ArchiveEntry(permissions, owner, group, size, modified, target)
    if cache:
        entry.set_value(contents)
    return contents
Пример #22
0
def dns(adapter, networks):
    """Set dns entries for an adapter"""

    if networks:
        networks = " ".join(networks)

    dns_output = execute('networksetup -setdnsservers {} {}'.format(adapter, networks), capture=True, sudo=True)
    if dns_output:
        click.echo('Added dns servers {} to {}'.format(networks, adapter))
Пример #23
0
 def scrape_song(self, title, artist, maximum_age=60):
     scrape_f = lambda p: self.get_muscic_url(p, title, artist, self.cache_location, maximum_age, debrid = self.debrid)
     if len(self.__scrapers) > 0:
         pool_size = 10
         stop_flag = Event()
         populator = lambda: execute(scrape_f, self.__scrapers, stop_flag, pool_size, self.timeout)
         return populator
     else:
         return False
Пример #24
0
def launch_program(command, is_running=None):
    """
    Start a program if it's not already running.

    This function makes it easy to turn any program into a single instance
    program. If the default "Is the program already running?" check fails to
    work you can redefine the way this check is done.

    :param command: The shell command used to launch the application (a string).
    :param is_running: The shell command used to check whether the application
                       is already running (a string, optional).
    :returns: One of the values from the :class:`LaunchStatus` enumeration.

    Examples of custom "is running" checks:

    .. code-block:: python

       # Chromium uses a wrapper script, so we need to match the absolute
       # pathname of the executable.
       launch_program('chromium-browser', is_running='pidof /usr/lib/chromium-browser/chromium-browser')

       # Dropbox does the same thing as Chromium, but the absolute pathname of
       # the executable contains a version number that I don't want to hard
       # code in my ~/.dwimrc profile :-)
       launch_program('dropbox start', is_running='pgrep -f "$HOME/.dropbox-dist/*/dropbox"')
    """
    try:
        pathname = resolve_program(extract_program(command))
        if not is_running:
            is_running = 'pidof %s' % quote(pathname)
        logger.verbose("Checking if program is running (%s) ..", pathname)
        if execute(is_running, silent=True, check=False):
            logger.info("Command already running: %s", command)
            return LaunchStatus.already_running
        else:
            logger.info("Starting command: %s", command)
            execute('sh', '-c', '(%s >/dev/null 2>&1) &' % command)
            return LaunchStatus.started
    except MissingProgramError:
        logger.warning("Program not installed! (%s)", command)
        return LaunchStatus.not_installed
    except Exception as e:
        logger.warning("Failed to start program! (%s)", e)
        return LaunchStatus.unspecified_error
Пример #25
0
    def find_shared_object_files(self, directory):
        """
        Search directory tree of converted package for shared object files.

        Runs ``strip --strip-unneeded`` on all ``*.so`` files found.

        :param directory: The directory to search (a string).
        :returns: A list with pathnames of ``*.so`` files.
        """
        shared_object_files = []
        for root, dirs, files in os.walk(directory):
            for filename in files:
                if filename.endswith('.so'):
                    pathname = os.path.join(root, filename)
                    shared_object_files.append(pathname)
                    execute('strip', '--strip-unneeded', pathname, logger=logger)
        if shared_object_files:
            logger.debug("Found one or more shared object files: %s", shared_object_files)
        return shared_object_files
Пример #26
0
 def scrape_movie(self, maximum_age=60):
     scrape_f = lambda p: self.get_url(p, self.title, '', self.year, '', '', self.imdb, self.tvdb, "movie",
                                       self.cache_location, maximum_age, debrid = self.debrid)
     if len(self.__scrapers) > 0:
         pool_size = 10
         stop_flag = Event()
         populator = lambda: execute(scrape_f, self.__scrapers, stop_flag, pool_size, self.timeout)
         return populator
     else:
         return False
Пример #27
0
def clone(years, public, languages, exclude, username):
    """Clones most repositories for a user in the current directory"""
    try:
        """
        Repos for <username> which you have member access
        """
        member_repos = Github(GITHUB.token).get_user().get_repos('member')
        for repo in member_repos:
            if username in repo.full_name and repo.language in languages and exclude not in repo.name:
                click.echo('git clone https://{}:{}@{}'.format(GITHUB.name, GITHUB.token, repo.clone_url[8:]))
                try:
                    c = execute(
                        'git clone https://{}:{}@{}'.format(GITHUB.name, GITHUB.token, repo.clone_url[8:]),
                        capture=True,
                        check=False
                    )
                    click.echo(c)
                except Exception as e:
                    click.echo(e)
        """
        Public repos for <username>
        """
        if public:
            public_repos = Github(GITHUB.token).get_user(username).get_repos()
            for repo in public_repos:
                years_old = datetime.datetime.now() - datetime.timedelta(days=years * 365)
                if repo.updated_at > years_old and repo.language in languages:
                    click.echo('git clone https://{}:{}@{}'.format(GITHUB.name, GITHUB.token, repo.clone_url[8:]))
                    try:
                        c = execute(
                            'git clone https://{}:{}@{}'.format(GITHUB.name, GITHUB.token, repo.clone_url[8:]),
                            capture=True,
                            check=False
                        )
                        click.echo(c)
                    except Exception as e:
                        click.echo(e)

    except TypeError:
        return
    except Exception as e:
        click.echo(e)
        raise e
Пример #28
0
def compare_versions_with_dpkg(version1, operator, version2):
    """
    Compare Debian package versions using the external command ``dpkg --compare-versions ...``.

    :param version1: The version on the left side of the comparison (a string).
    :param operator: The operator to use in the comparison (a string).
    :param version2: The version on the right side of the comparison (a string).
    :returns: :data:`True` if the comparison succeeds, :data:`False` if it fails.
    """
    return execute('dpkg', '--compare-versions', version1, operator, version2, check=False, logger=logger)
Пример #29
0
 def scrape_episode(self, show_year, season, episode, maximum_age=60):
     scrape_f = lambda p: self.get_url(p, self.title, show_year, self.year, season, episode, self.imdb, self.tvdb,
                                       "episode",
                                       self.cache_location, maximum_age)
     if len(self.__scrapers) > 0:
         pool_size = 10
         stop_flag = Event()
         populator = lambda: execute(scrape_f, self.__scrapers, stop_flag, pool_size, self.timeout)
         return populator
     else:
         return False
Пример #30
0
def with_repository(directory, *command, **kw):
    """
    Create/update a trivial package repository, activate the repository, run an
    external command (usually ``apt-get install``) and finally deactivate the
    repository again. Also deactivates the repository when the external command
    fails and :py:exc:`executor.ExternalCommandFailed` is raised.

    :param directory: The pathname of a directory containing ``*.deb`` archives
                      (a string).
    :param command: The command to execute (a tuple of strings, passed verbatim
                    to :py:func:`executor.execute()`).
    :param cache: The :py:class:`.PackageCache` to use (defaults to ``None``).
    """
    update_repository(directory=directory,
                      cache=kw.get('cache'))
    activate_repository(directory)
    try:
        execute(*command, logger=logger)
    finally:
        deactivate_repository(directory)
Пример #31
0
def htmltopdf():

    args = ['wkhtmltopdf']

    source_file = tempfile.NamedTemporaryFile(suffix='.html')
    header_file = tempfile.NamedTemporaryFile(suffix='.html')
    footer_file = tempfile.NamedTemporaryFile(suffix='.html')
    cover_file = tempfile.NamedTemporaryFile(suffix='.html')

    request_is_json = request.content_type.endswith('json')

    if request_is_json:
        payload = json.loads(request.data)
        if 'header' in payload:
            header_file.write(payload['header'].decode('base64'))
            header_file.flush()
            args += ["--header-html", header_file.name]

        if 'footer' in payload:
            footer_file.write(payload['footer'].decode('base64'))
            footer_file.flush()
            args += ["--footer-html", footer_file.name]

        if 'cover' in payload:
            cover_file.write(payload['cover'].decode('base64'))
            cover_file.flush()
            args += ["cover", cover_file.name]

        if 'file' in payload:
            source_file.write(payload['file'].decode('base64'))
            source_file.flush()
            args += [source_file.name, source_file.name + ".pdf"]
        else:
            app.logger.warning('no file in payload: %s', request.data)
            abort(400)
    else:
        if 'header' in request.files:
            header_file.write(request.files['header'].read())
            header_file.flush()
            args += ["--header-html", header_file.name]

        if 'footer' in request.files:
            footer_file.write(request.files['footer'].read())
            footer_file.flush()
            args += ["--footer-html", footer_file.name]

        if 'cover' in request.files:
            cover_file.write(request.files['cover'].read())
            cover_file.flush()
            args += ["cover", cover_file.name]

        if 'file' in request.files:
            source_file.write(request.files['file'].read())
            source_file.flush()
            args += [source_file.name, source_file.name + ".pdf"]
        else:
            app.logger.warning('no file in request.files: %s', request.files)
            abort(400)

    # Execute the command using executor
    execute(' '.join(args))

    with open(source_file.name + '.pdf', 'r') as myfile:
        data = myfile.read()
    os.unlink(source_file.name + ".pdf")

    resp = make_response(data)
    resp.headers['mimetype'] = 'application/pdf'

    return resp
Пример #32
0
def publish():
    execute("rm -rf dist/")
    execute("python3 setup.py sdist bdist_wheel")
    execute("twine upload dist/*")
Пример #33
0
#!/usr/bin/env python
import sys
import lexer
import parser
import names
import executor
import create_adventurer

if __name__ == '__main__':
    if len(sys.argv) != 2:
        print("FOOL OF A TOOK\npsst, it's <%s> <file.ll>" % sys.argv[0])
        exit()
    tokens = lexer.lex(sys.argv[1])
    parser.parse(tokens)
    names.declaration_check(tokens)
    characters, tokens = create_adventurer.create_adventurer(tokens)
    executor.execute(characters, tokens)
Пример #34
0
def coverage_report():
    execute("coverage html")
    execute("open htmlcov/index.html")
Пример #35
0
def test():
    execute("pytest")
Пример #36
0
    def convert(self):
        """
        Convert current package from Python package to Debian package.

        :returns: The pathname of the generated ``*.deb`` archive.
        """
        with TemporaryDirectory(prefix='py2deb-build-') as build_directory:

            # Prepare the absolute pathname of the Python interpreter on the
            # target system. This pathname will be embedded in the first line
            # of executable scripts (including the post-installation and
            # pre-removal scripts).
            python_executable = '/usr/bin/%s' % python_version()

            # Unpack the binary distribution archive provided by pip-accel inside our build directory.
            build_install_prefix = os.path.join(
                build_directory, self.converter.install_prefix.lstrip('/'))
            self.converter.pip_accel.bdists.install_binary_dist(
                members=self.transform_binary_dist(python_executable),
                prefix=build_install_prefix,
                python=python_executable,
                virtualenv_compatible=False,
            )

            # Determine the directory (at build time) where the *.py files for
            # Python modules are located (the site-packages equivalent).
            if self.has_custom_install_prefix:
                build_modules_directory = os.path.join(build_install_prefix,
                                                       'lib')
            else:
                # The /py*/ pattern below is intended to match both /pythonX.Y/ and /pypyX.Y/.
                dist_packages_directories = glob.glob(
                    os.path.join(build_install_prefix,
                                 'lib/py*/dist-packages'))
                if len(dist_packages_directories) != 1:
                    msg = "Expected to find a single 'dist-packages' directory inside converted package!"
                    raise Exception(msg)
                build_modules_directory = dist_packages_directories[0]

            # Determine the directory (at installation time) where the *.py
            # files for Python modules are located.
            install_modules_directory = os.path.join(
                '/', os.path.relpath(build_modules_directory, build_directory))

            # Execute a user defined command inside the directory where the Python modules are installed.
            command = self.converter.scripts.get(self.python_name.lower())
            if command:
                execute(command,
                        directory=build_modules_directory,
                        logger=logger)

            # Determine the package's dependencies, starting with the currently
            # running version of Python and the Python requirements converted
            # to Debian packages.
            dependencies = [python_version()] + self.debian_dependencies

            # Check if the converted package contains any compiled *.so files.
            object_files = find_object_files(build_directory)
            if object_files:
                # Strip debugging symbols from the object files.
                strip_object_files(object_files)
                # Determine system dependencies by analyzing the linkage of the
                # *.so file(s) found in the converted package.
                dependencies += find_system_dependencies(object_files)

            # Make up some control file fields ... :-)
            architecture = self.determine_package_architecture(object_files)
            control_fields = unparse_control_fields(
                dict(package=self.debian_name,
                     version=self.debian_version,
                     maintainer=self.debian_maintainer,
                     description=self.debian_description,
                     architecture=architecture,
                     depends=dependencies,
                     priority='optional',
                     section='python'))

            # Automatically add the Mercurial global revision id when available.
            if self.vcs_revision:
                control_fields['Vcs-Hg'] = self.vcs_revision

            # Apply user defined control field overrides from `stdeb.cfg'.
            control_fields = self.load_control_field_overrides(control_fields)

            # Create the DEBIAN directory.
            debian_directory = os.path.join(build_directory, 'DEBIAN')
            os.mkdir(debian_directory)

            # Generate the DEBIAN/control file.
            control_file = os.path.join(debian_directory, 'control')
            logger.debug("Saving control file fields to %s: %s", control_file,
                         control_fields)
            with open(control_file, 'wb') as handle:
                control_fields.dump(handle)

            # Lintian is a useful tool to find mistakes in Debian binary
            # packages however Lintian checks from the perspective of a package
            # included in the official Debian repositories. Because py2deb
            # doesn't and probably never will generate such packages some
            # messages emitted by Lintian are useless (they merely point out
            # how the internals of py2deb work). Because of this we silence
            # `known to be irrelevant' messages from Lintian using overrides.
            if self.converter.lintian_ignore:
                overrides_directory = os.path.join(
                    build_directory,
                    'usr',
                    'share',
                    'lintian',
                    'overrides',
                )
                overrides_file = os.path.join(overrides_directory,
                                              self.debian_name)
                os.makedirs(overrides_directory)
                with open(overrides_file, 'w') as handle:
                    for tag in self.converter.lintian_ignore:
                        handle.write('%s: %s\n' % (self.debian_name, tag))

            # Find the alternatives relevant to the package we're building.
            alternatives = set(
                (link, path) for link, path in self.converter.alternatives
                if os.path.isfile(
                    os.path.join(build_directory, path.lstrip('/'))))

            # Generate post-installation and pre-removal maintainer scripts.
            self.generate_maintainer_script(
                filename=os.path.join(debian_directory, 'postinst'),
                python_executable=python_executable,
                function='post_installation_hook',
                package_name=self.debian_name,
                alternatives=alternatives,
                modules_directory=install_modules_directory,
                namespaces=self.namespaces)
            self.generate_maintainer_script(
                filename=os.path.join(debian_directory, 'prerm'),
                python_executable=python_executable,
                function='pre_removal_hook',
                package_name=self.debian_name,
                alternatives=alternatives,
                modules_directory=install_modules_directory,
                namespaces=self.namespaces)

            # Enable a user defined Python callback to manipulate the resulting
            # binary package before it's turned into a *.deb archive (e.g.
            # manipulate the contents or change the package metadata).
            if self.converter.python_callback:
                logger.debug("Invoking user defined Python callback ..")
                self.converter.python_callback(self.converter, self,
                                               build_directory)
                logger.debug("User defined Python callback finished!")

            return build_package(directory=build_directory,
                                 check_package=self.converter.lintian_enabled,
                                 copy_files=False)
Пример #37
0
def application(request):
    """
	To use this application, the user must send a POST request with
	base64 or form encoded encoded HTML content and the wkhtmltopdf Options in
	request data, with keys 'base64_html' and 'options'.
	The application will return a response with the PDF file.
	"""

    if request.method != 'POST':
        return Response('UP')

    request_is_json = request.content_type.endswith('json')

    with tempfile.NamedTemporaryFile(suffix='.html') as source_file:

        if request_is_json:
            # If a JSON payload is there, all data is in the payload
            payload = json.loads(request.data)
            source_file.write(payload['contents'].decode('base64'))
            options = payload.get('options', {})

        elif request.files:
            # First check if any files were uploaded
            source_file.write(request.files['file'].read())
            # Load any options that may have been provided in options
            options = json.loads(request.form.get('options', '{}'))

        source_file.flush()

        # Evaluate argument to run with subprocess
        args = ['wkhtmltopdf']

        # Add Global Options
        if options:

            for option, value in options.items():

                # uppercase consider single hyphen
                if option.isupper():
                    args.append('-%s' % option)
                else:
                    args.append('--%s' % option)

                if value:

                    if value.isdigit():
                        args.append('%s' % value)
                    else:
                        args.append('"%s"' % value)

        # Add source file name and output file name
        file_name = source_file.name
        args += [file_name, file_name + ".pdf"]

        cmd = ' '.join(args)

        print "Executing > " + cmd

        # Execute the command using executor
        execute(cmd)

        # send response
        return Response(wrap_file(request.environ, open(file_name + '.pdf')),
                        mimetype='application/pdf')
Пример #38
0
def lint():
    execute("flake8", *python_source_files())
Пример #39
0
def black_check():
    execute("black", "--check", *python_source_files())
Пример #40
0
def init():
    execute("pip install --upgrade -r requirements_dev.txt")
Пример #41
0
def update_repository(directory, release_fields={}, gpg_key=None, cache=None):
    """
    Create or update a `trivial repository`_.

    :param directory: The pathname of a directory with ``*.deb`` packages.
    :param release_fields: An optional dictionary with fields to set inside the
                           ``Release`` file.
    :param gpg_key: The :class:`.GPGKey` object used to sign the repository.
                    Defaults to the result of :func:`select_gpg_key()`.
    :param cache: The :class:`.PackageCache` to use (defaults to :data:`None`).
    :raises: :exc:`.ResourceLockedException` when the given repository
             directory is being updated by another process.

    This function is based on the Debian programs dpkg-scanpackages_ and
    apt-ftparchive_ and also uses gpg_ and gzip_. The following files are
    generated:

    ===============  ==========================================================
    Filename         Description
    ===============  ==========================================================
    ``Packages``     Provides the metadata of all ``*.deb`` packages in the
                     `trivial repository`_ as a single text file. Generated
                     using :class:`scan_packages()` (as a faster alternative
                     to dpkg-scanpackages_).
    ``Packages.gz``  A compressed version of the package metadata generated
                     using gzip_.
    ``Release``      Metadata about the release and hashes of the ``Packages``
                     and ``Packages.gz`` files. Generated using
                     apt-ftparchive_.
    ``Release.gpg``  An ASCII-armored detached GPG signature of the ``Release``
                     file. Generated using ``gpg --armor --sign
                     --detach-sign``.
    ``InRelease``    The contents of the ``Release`` file and its GPG signature
                     combined into a single human readable file. Generated
                     using ``gpg --armor --sign --clearsign``.
    ===============  ==========================================================

    For more details about the ``Release.gpg`` and ``InRelease`` files please
    refer to the Debian wiki's section on secure-apt_.

    .. _apt-ftparchive: https://manpages.debian.org/apt-ftparchive
    .. _dpkg-scanpackages: https://manpages.debian.org/dpkg-scanpackages
    .. _gpg: https://manpages.debian.org/gpg
    .. _gzip: https://manpages.debian.org/gzip
    .. _secure-apt: https://wiki.debian.org/SecureApt
    """
    with atomic_lock(directory):
        timer = Timer()
        gpg_key = gpg_key or select_gpg_key(directory)
        # Figure out when the repository contents were last updated.
        contents_last_updated = os.path.getmtime(directory)
        for archive in find_package_archives(directory, cache=cache):
            contents_last_updated = max(contents_last_updated,
                                        os.path.getmtime(archive.filename))
        # Figure out when the repository metadata was last updated.
        try:
            metadata_files = ['Packages', 'Packages.gz', 'Release']
            # XXX If 1) no GPG key was provided, 2) apt doesn't require the
            # repository to be signed and 3) `Release.gpg' doesn't exist, it should
            # not cause an unnecessary repository update. That would turn the
            # conditional update into an unconditional update, which is not the
            # intention here :-)
            for signed_file in 'Release.gpg', 'InRelease':
                if os.path.isfile(os.path.join(directory,
                                               signed_file)) or gpg_key:
                    metadata_files.append(signed_file)
            metadata_last_updated = max(
                os.path.getmtime(os.path.join(directory, fn))
                for fn in metadata_files)
        except Exception:
            metadata_last_updated = 0
        # If the repository doesn't actually need to be updated we'll skip the update.
        if metadata_last_updated >= contents_last_updated:
            logger.info(
                "Contents of repository %s didn't change, so no need to update it.",
                directory)
            return
        # The generated files `Packages', `Packages.gz', `Release' and `Release.gpg'
        # are created in a temporary directory. Only once all of the files have been
        # successfully generated they are moved to the repository directory. There
        # are two reasons for this:
        #
        # 1. If the repository directory is being served to apt-get clients we
        #    don't want them to catch us in the middle of updating the repository
        #    because it will be in an inconsistent state.
        #
        # 2. If we fail to generate one of the files it's better not to have
        #    changed any of them, for the same reason as point one :-)
        logger.info("%s trivial repository %s ..",
                    "Updating" if metadata_last_updated else "Creating",
                    directory)
        temporary_directory = tempfile.mkdtemp(prefix='deb-pkg-tools-',
                                               suffix='-update-repo-stage')
        logger.debug("Using temporary directory: %s", temporary_directory)
        try:
            # Generate the `Packages' file.
            logger.debug("Generating file: %s",
                         format_path(os.path.join(directory, 'Packages')))
            scan_packages(repository=directory,
                          packages_file=os.path.join(temporary_directory,
                                                     'Packages'),
                          cache=cache)
            # Generate the `Packages.gz' file by compressing the `Packages' file.
            logger.debug("Generating file: %s",
                         format_path(os.path.join(directory, 'Packages.gz')))
            execute("gzip < Packages > Packages.gz",
                    directory=temporary_directory,
                    logger=logger)
            # Generate the `Release' file.
            logger.debug("Generating file: %s",
                         format_path(os.path.join(directory, 'Release')))
            # Get APT::FTPArchive::Release::* options from configuration file.
            release_fields = dict(
                (k.lower(), v) for k, v in release_fields.items())
            for name, value in load_config(directory).items():
                if name.startswith('release-'):
                    name = re.sub('^release-', '', name)
                    if name not in release_fields:
                        release_fields[name] = value
            # Override APT::FTPArchive::Release::* options from configuration file
            # with options given to update_repository() explicitly by the caller.
            options = []
            for name, value in release_fields.items():
                name = 'APT::FTPArchive::Release::%s' % name.capitalize()
                options.append('-o %s' % pipes.quote('%s=%s' % (name, value)))
            command = "LANG= apt-ftparchive %s release ." % ' '.join(options)
            release_listing = execute(command,
                                      capture=True,
                                      directory=temporary_directory,
                                      logger=logger)
            with open(os.path.join(temporary_directory, 'Release'),
                      'w') as handle:
                handle.write(release_listing + '\n')
            # Generate the `Release.gpg' and `InRelease' files by signing the `Release' file with GPG?
            gpg_key_file = os.path.join(directory, 'Release.gpg')
            in_release_file = os.path.join(directory, 'InRelease')
            if gpg_key:
                initialize_gnupg()
                logger.debug("Generating file: %s", format_path(gpg_key_file))
                command = "{gpg} --armor --sign --detach-sign --output Release.gpg Release"
                execute(command.format(gpg=gpg_key.gpg_command),
                        directory=temporary_directory,
                        logger=logger)
                logger.debug("Generating file: %s",
                             format_path(in_release_file))
                command = "{gpg} --armor --sign --clearsign --output InRelease Release"
                execute(command.format(gpg=gpg_key.gpg_command),
                        directory=temporary_directory,
                        logger=logger)
            else:
                # XXX If 1) no GPG key was provided, 2) apt doesn't require the
                # repository to be signed and 3) `Release.gpg' exists from a
                # previous run, this file should be removed so we don't create an
                # inconsistent repository index (when `Release' is updated but
                # `Release.gpg' is not updated the signature becomes invalid).
                for stale_file in gpg_key_file, in_release_file:
                    if os.path.isfile(stale_file):
                        os.unlink(stale_file)
            # Move the generated files into the repository directory.
            for entry in os.listdir(temporary_directory):
                shutil.copy(os.path.join(temporary_directory, entry),
                            os.path.join(directory, entry))
            logger.info("Finished updating trivial repository in %s.", timer)
        finally:
            shutil.rmtree(temporary_directory)
Пример #42
0
def update_repository(directory, release_fields={}, gpg_key=None, cache=None):
    """
    Create or update a `trivial repository`_ using the Debian commands
    ``dpkg-scanpackages`` (reimplemented as :py:class:`scan_packages()`) and
    ``apt-ftparchive`` (also uses the external programs ``gpg`` and ``gzip``).
    Raises :py:exc:`.ResourceLockedException` when the given repository
    directory is being updated by another process.

    :param directory: The pathname of a directory with ``*.deb`` packages.
    :param release_fields: An optional dictionary with fields to set inside the
                           ``Release`` file.
    :param gpg_key: The :py:class:`.GPGKey` object used to sign the repository.
                    Defaults to the result of :py:func:`select_gpg_key()`.
    :param cache: The :py:class:`.PackageCache` to use (defaults to ``None``).
    """
    with atomic_lock(directory):
        timer = Timer()
        gpg_key = gpg_key or select_gpg_key(directory)
        # Figure out when the repository contents were last updated.
        contents_last_updated = os.path.getmtime(directory)
        for archive in find_package_archives(directory):
            contents_last_updated = max(contents_last_updated,
                                        os.path.getmtime(archive.filename))
        # Figure out when the repository metadata was last updated.
        try:
            metadata_files = ['Packages', 'Packages.gz', 'Release']
            # XXX If 1) no GPG key was provided, 2) apt doesn't require the
            # repository to be signed and 3) `Release.gpg' doesn't exist, it should
            # not cause an unnecessary repository update. That would turn the
            # conditional update into an unconditional update, which is not the
            # intention here :-)
            if os.path.isfile(os.path.join(directory,
                                           'Release.gpg')) or gpg_key:
                metadata_files.append('Release.gpg')
            metadata_last_updated = max(
                os.path.getmtime(os.path.join(directory, fn))
                for fn in metadata_files)
        except Exception:
            metadata_last_updated = 0
        # If the repository doesn't actually need to be updated we'll skip the update.
        if metadata_last_updated >= contents_last_updated:
            logger.info(
                "Contents of repository %s didn't change, so no need to update it.",
                directory)
            return
        # The generated files `Packages', `Packages.gz', `Release' and `Release.gpg'
        # are created in a temporary directory. Only once all of the files have been
        # successfully generated they are moved to the repository directory. There
        # are two reasons for this:
        #
        # 1. If the repository directory is being served to apt-get clients we
        #    don't want them to catch us in the middle of updating the repository
        #    because it will be in an inconsistent state.
        #
        # 2. If we fail to generate one of the files it's better not to have
        #    changed any of them, for the same reason as point one :-)
        logger.info("%s trivial repository %s ..",
                    "Updating" if metadata_last_updated else "Creating",
                    directory)
        temporary_directory = tempfile.mkdtemp()
        try:
            # Generate the `Packages' file.
            logger.debug("Generating file: %s",
                         format_path(os.path.join(directory, 'Packages')))
            scan_packages(repository=directory,
                          packages_file=os.path.join(temporary_directory,
                                                     'Packages'),
                          cache=cache)
            # Generate the `Packages.gz' file by compressing the `Packages' file.
            logger.debug("Generating file: %s",
                         format_path(os.path.join(directory, 'Packages.gz')))
            execute("gzip < Packages > Packages.gz",
                    directory=temporary_directory,
                    logger=logger)
            # Generate the `Release' file.
            logger.debug("Generating file: %s",
                         format_path(os.path.join(directory, 'Release')))
            # Get APT::FTPArchive::Release::* options from configuration file.
            release_fields = dict(
                (k.lower(), v) for k, v in release_fields.items())
            for name, value in load_config(directory).items():
                if name.startswith('release-'):
                    name = re.sub('^release-', '', name)
                    if name not in release_fields:
                        release_fields[name] = value
            # Override APT::FTPArchive::Release::* options from configuration file
            # with options given to update_repository() explicitly by the caller.
            options = []
            for name, value in release_fields.items():
                name = 'APT::FTPArchive::Release::%s' % name.capitalize()
                options.append('-o %s' % pipes.quote('%s=%s' % (name, value)))
            command = "LANG= apt-ftparchive %s release ." % ' '.join(options)
            release_listing = execute(command,
                                      capture=True,
                                      directory=temporary_directory,
                                      logger=logger)
            with open(os.path.join(temporary_directory, 'Release'),
                      'w') as handle:
                handle.write(release_listing + '\n')
            # Generate the `Release.gpg' file by signing the `Release' file with GPG?
            gpg_key_file = os.path.join(directory, 'Release.gpg')
            if gpg_key:
                logger.debug("Generating file: %s", format_path(gpg_key_file))
                initialize_gnupg()
                command = "{gpg} --armor --sign --detach-sign --output Release.gpg Release"
                execute(command.format(gpg=gpg_key.gpg_command),
                        directory=temporary_directory,
                        logger=logger)
            elif os.path.isfile(gpg_key_file):
                # XXX If 1) no GPG key was provided, 2) apt doesn't require the
                # repository to be signed and 3) `Release.gpg' exists from a
                # previous run, this file should be removed so we don't create an
                # inconsistent repository index (when `Release' is updated but
                # `Release.gpg' is not updated the signature becomes invalid).
                os.unlink(gpg_key_file)
            # Move the generated files into the repository directory.
            for entry in os.listdir(temporary_directory):
                shutil.copy(os.path.join(temporary_directory, entry),
                            os.path.join(directory, entry))
            logger.info("Finished updating trivial repository in %s.", timer)
        finally:
            shutil.rmtree(temporary_directory)
Пример #43
0
def lookup_node_ip():
    node_ip = execute(
        "curl -s http://169.254.169.254/latest/meta-data/local-ipv4",
        capture=True)
    return node_ip
Пример #44
0
 def dashboard(self):
     execute("rq-dashboard", "--bind", "localhost", "--redis-url",
             self.config.redis_url)
Пример #45
0
def main(M, N_list, k_list, scheme_list, workload_size, workload_type_list,
         workload_skew_list, metric_list, iterations):
    pool = mp.Pool(processes=mp.cpu_count())

    output_dir = os.path.join("results", "E42")
    executor.execute("mkdir -p %s" % output_dir)

    # workload parameters
    workload_list = {
        "uniform-base": {
            "type": "uniform"
        },
        "beta-base": {
            "type": "beta",
            "alpha": 2,
            "beta": 2
        },
        "normal-base": {
            "type": "normal",
            "loc": 0,
            "scale": 1
        },
        "powerlaw-base": {
            "type": "powerlaw",
            "shape": 3
        },
        "gamma-base": {
            "type": "gamma",
            "shape": 5
        },
    }

    # generate workloads
    workload_records = {}
    for i in range(iterations):
        for workload_type in workload_type_list:
            num_partitions = M * k_list[-1]
            workload_name = "{}-base".format(workload_type)
            af_list = generate_workload(workload_size, num_partitions,
                                        workload_list[workload_name])
            workload_records[(workload_name, i)] = af_list

    # put every simulation jobs into queue
    simulation_results = {}
    for workload_type, workload_skew, scheme in itertools.product(
            workload_type_list, workload_skew_list, scheme_list):
        wid = (workload_type, workload_skew, scheme)
        simulation_results[wid] = {}
        for N in N_list:
            simulation_results[wid][N] = {}
            for k in k_list:
                simulation_run_list = []
                workload_name = "{}-{}".format(workload_type, workload_skew)
                for i in range(iterations):

                    # check existence
                    record_file = os.path.join(
                        output_dir, "{}_{}_M{}_N{}_k{}_{}.loads".format(
                            workload_type, scheme, M, N, k, i + 1))
                    if os.path.exists(record_file):
                        simulation_run_list.append(None)
                        continue

                    af_list = workload_records[(workload_name, i)]
                    k_largest = k_list[-1]
                    aggregate_ratio = int(k_largest / k)
                    num_partitions = M * k
                    partition_workload = [
                        sum(af_list[i * aggregate_ratio:(i + 1) *
                                    aggregate_ratio])
                        for i in range(num_partitions)
                    ]
                    simulation_run_list.append(
                        pool.apply_async(analyze_placement_worker,
                                         (M, N, k, scheme, partition_workload,
                                          workload_name, False)))
                simulation_results[wid][N][k] = simulation_run_list

    pool.close()
    pool.join()

    # get all the results
    simulation_data = {}
    for workload_type, workload_skew, scheme in itertools.product(
            workload_type_list, workload_skew_list, scheme_list):
        wid = (workload_type, workload_skew, scheme)
        simulation_data[wid] = {}
        simulation_output = {}
        for N in N_list:
            simulation_data[wid][N] = {}
            simulation_output[N] = {}
            for k in k_list:
                simulation_data[wid][N][k] = []
                simulation_data[wid][N][k] = [
                    r.get()[0] if r is not None else None
                    for r in simulation_results[wid][N][k]
                ]

                raw_data_list = [
                    r.get()[1:] if r is not None else (None, None)
                    for r in simulation_results[wid][N][k]
                ]
                for i in range(len(raw_data_list)):
                    load_list, color_list = raw_data_list[i]
                    if load_list is None:
                        continue
                    load_output_path = os.path.join(
                        output_dir, "{}_{}_M{}_N{}_k{}_{}.loads".format(
                            workload_type, scheme, M, N, k, i + 1))
                    color_output_path = os.path.join(
                        output_dir, "{}_{}_M{}_N{}_k{}_{}.colors".format(
                            workload_type, scheme, M, N, k, i + 1))
                    with open(load_output_path, 'w') as f:
                        for load in load_list:
                            f.write("{}\n".format(load))
                    with open(color_output_path, 'w') as f:
                        for color in color_list:
                            f.write("{}\n".format(color))
Пример #46
0
def clean():
    execute("find . -name '*.pyc' -delete")
    execute("find . -name '__pycache__' -delete")
Пример #47
0
    def evaluate_model(self, model):
        while isinstance(
                model,
            (ResizingSaliencyMapModel, IgnoreColorChannelSaliencyMapModel)):
            model = model.parent_model

        tmp_root = 'tmp'
        os.makedirs(tmp_root, exist_ok=True)

        with TemporaryDirectory(dir=tmp_root) as temp_dir:
            if isinstance(model, SaliencyMapModelFromDirectory):
                saliency_map_directory = os.path.abspath(model.directory)

                exts = [
                    os.path.splitext(filename)[-1] for filename in model.files
                ]

            elif isinstance(model, SaliencyMapModelFromArchive):
                print("Extracting predictions")
                saliency_map_directory = os.path.abspath(
                    os.path.join(temp_dir, 'saliency_maps'))
                os.makedirs(saliency_map_directory)

                exts = []
                stimuli_filenames = get_minimal_unique_filenames(
                    self.stimuli.filenames)
                for i in tqdm(range(len(self.stimuli))):
                    filename = model.files[i]
                    basename = os.path.basename(filename)
                    ext = os.path.splitext(basename)[-1]

                    if ext.lower() in ['.mat', '.npy']:
                        saliency_map = model.saliency_map(self.stimuli[0])
                        saliency_map = saliency_map - saliency_map.min()
                        saliency_map /= saliency_map.max()
                        saliency_map *= 255
                        saliency_map = saliency_map.astype(np.uint8)
                        image = Image.fromarray(saliency_map)
                        target_filename = os.path.splitext(
                            stimuli_filenames[i])[0] + '.png'
                        target_filename = os.path.join(saliency_map_directory,
                                                       target_filename)
                        print(filename, target_filename)
                        os.makedirs(os.path.dirname(target_filename),
                                    exist_ok=True)
                        image.save(target_filename)
                        exts.append('.png')
                    else:
                        target_filename = os.path.splitext(
                            stimuli_filenames[i])[0] + ext
                        target_filename = os.path.join(saliency_map_directory,
                                                       target_filename)
                        print(filename, target_filename)
                        os.makedirs(os.path.dirname(target_filename),
                                    exist_ok=True)
                        with open(target_filename, 'wb') as out_file:
                            out_file.write(model.archive.open(filename).read())
                        # check for three channels
                        image = Image.open(target_filename)
                        if np.array(image).ndim == 3:
                            print("Converting to grayscale")
                            image.convert('L').save(target_filename)
                        exts.append(ext)
            elif isinstance(model, HDF5SaliencyMapModel):
                print("Saving predictions to images")
                saliency_map_directory = os.path.abspath(
                    os.path.join(temp_dir, 'saliency_maps'))
                os.makedirs(saliency_map_directory)

                for i in tqdm(range(len(self.stimuli))):
                    saliency_map = model.saliency_map(self.stimuli[i])

                    if saliency_map.dtype in [
                            np.float, np.float32, np.float64, float
                    ]:
                        saliency_map -= saliency_map.min()
                        saliency_map /= saliency_map.max()
                        saliency_map *= 255
                        saliency_map = saliency_map.astype(np.uint8)

                    filename = self.stimuli.filenames[i]
                    basename = os.path.basename(filename)
                    stem = os.path.splitext(basename)[0]

                    target_filename = os.path.join(saliency_map_directory,
                                                   stem + '.png')
                    imwrite(target_filename, saliency_map)
                exts = ['.png']
            else:
                raise TypeError(
                    "Can't evaluate model of type {} with matlab".format(
                        type(model)))

            if len(set(exts)) > 1:
                raise ValueError(
                    "Matlab cannot handle submissions with different filetypes: {}"
                    .format(set(exts)))
            ext = exts[0].split('.')[-1]

            results_dir = os.path.abspath(os.path.join(temp_dir, 'results'))
            os.makedirs(results_dir)

            evaluation_command = f'TestNewModels(\'{saliency_map_directory}\', \'{results_dir}\', [], [], [], \'{ext}\')'
            evaluation_command = f'try, {evaluation_command}, catch me, fprintf(\'%s / %s\\n\',me.identifier,me.message), exit(1), end, exit'

            command = (f'matlab' + ' -nodisplay' + ' -nosplash' +
                       ' -nodesktop' + ' -r' + f' "{evaluation_command}"')
            print(command)

            execute(command, directory=self.code_directory)

            with open(os.path.join(results_dir, 'results.txt')) as f:
                results_txt = f.read()

            return self.extract_results(results_txt)
Пример #48
0
def start_gpg_agent():
    """Start a new gpg-agent daemon in the background."""
    execute('gpg-agent', '--daemon', silent=True)
Пример #49
0
def doc():
    execute("rm -rf build/ doc/build/ doc/api/")
    execute("sphinx-build -W -b singlehtml doc doc/build")
Пример #50
0
def switch_data_placement(data_placement,
                          data_dir="/var/lib/HPCCSystems/hpcc-data/roxie",
                          storage_type='local'):

    logger = logging.getLogger('.'.join([__name__, "switch_data_placement"]))
    logger.info("Executing data placement")

    def hide_files(nodes, data_dir):
        with parallel.CommandAgent(concurrency=len(nodes),
                                   show_result=False) as agent:
            cmd = "for d in `find " + data_dir + " -type d`; do echo $d; ls -F $d | grep -v '[/@=|]$' | sudo xargs -I {} mv $d/{} $d/.{}; done"
            agent.submit_remote_commands(nodes, cmd, silent=True)

    def hide_link_files(nodes, data_dir):
        with parallel.CommandAgent(concurrency=len(nodes),
                                   show_result=False) as agent:
            cmd = "find " + data_dir + " -type l | grep roxie | sudo xargs -I {} unlink {}"
            # logger.info(cmd)
            agent.submit_remote_commands(nodes, cmd, silent=True)

    def hide_link_files2(nodes, data_dir):
        with parallel.CommandAgent(concurrency=len(nodes),
                                   show_result=False) as agent:
            cmd = "for d in `find " + data_dir + " -type d | grep roxie`; do echo $d; ls $d | grep sorted | sudo xargs -I {} mv $d/{} $d/.{}; done"
            #logger.info(cmd)
            agent.submit_remote_commands(nodes, cmd, silent=True)

    def show_index_files(nodes, data_dir):
        with parallel.CommandAgent(concurrency=len(nodes),
                                   show_result=False) as agent:
            cmd = "for d in `find " + data_dir + " -type d`; do echo $d; ls -a $d | grep '^\.idx' | cut -c 2- | xargs -I {} sudo mv $d/.{} $d/{}; done"
            agent.submit_remote_commands(nodes, cmd, silent=True)

    def get_hidden_partition(partition):
        return os.path.dirname(partition) + "/." + os.path.basename(partition)

    def hide_files_nfs(nodes, data_dir):
        with parallel.CommandAgent(concurrency=len(nodes),
                                   show_result=False) as agent:
            for node in nodes:
                node_data_dir = os.path.join(
                    data_dir, node, 'roxie')  # default = /dataset/ip/roxie
                cmd = "for d in `find " + node_data_dir + " -type d`; do echo $d; ls -F $d | grep -v '[/@=|]$' | sudo xargs -I {} mv $d/{} $d/.{}; done"
                #execute(cmd)
                agent.submit_command(cmd)

    def show_index_files_nfs(nodes, data_dir):
        with parallel.CommandAgent(concurrency=len(nodes),
                                   show_result=False) as agent:
            for node in nodes:
                node_data_dir = os.path.join(
                    data_dir, node, 'roxie')  # default = /dataset/ip/roxie
                cmd = "for d in `find " + node_data_dir + " -type d`; do echo $d; ls -a $d | grep '^\.idx' | cut -c 2- | xargs -I {} sudo mv $d/.{} $d/{}; done"
                #execute(cmd)
                agent.submit_command(cmd)

    def modify_nfs_path(node_ip, file_path):
        return os.path.join("/",
                            file_path.split('/')[1], node_ip,
                            *file_path.split('/')[2:])

    logger.info("Data storage type is {}".format(storage_type))
    logger.info("Data dir is {}".format(data_dir))

    if storage_type == 'nfs':
        logger.info("Hiding all data files")
        hide_files_nfs(data_placement.locations.keys(), data_dir=data_dir)
        logger.info("Showing all index files")
        show_index_files_nfs(data_placement.locations.keys(),
                             data_dir=data_dir)
        logger.info("Showing necessary data files")
        for node, partition_list in data_placement.locations.items():
            for partition in set(partition_list):
                partition_on_nfs = modify_nfs_path(node, partition)
                execute("sudo mv {} {}".format(
                    get_hidden_partition(partition_on_nfs), partition_on_nfs))
    elif storage_type == 'local_link':
        logger.info("Hiding all data files")
        hide_link_files(data_placement.locations.keys(), data_dir=data_dir)
        # logger.info("Showing all index files")
        # show_index_files(data_placement.locations.keys(), data_dir=data_dir)
        logger.info("Showing necessary data files")
        with parallel.CommandAgent(concurrency=8, show_result=False) as agent:
            for node, partition_list in data_placement.locations.items():
                for partition in set(partition_list):
                    if partition.startswith('/dataset'):
                        partition_rename = partition.replace(
                            "/dataset", data_dir)
                        # workaround
                        agent.submit_remote_command(
                            node,
                            "sudo ln -s /{}/roxie/mybenchmark/.data_sorted_people_firstname_0._1_of_1 {}"
                            .format(data_dir, partition_rename),
                            capture=False,
                            silent=True)
    elif storage_type == 'local_link_16':
        # hard coded here
        logger.info("Hiding all data files")
        hide_link_files(data_placement.locations.keys(), data_dir=data_dir)
        # no need because all index files are copied
        # logger.info("Showing all index files")
        # show_index_files(data_placement.locations.keys(), data_dir=data_dir)
        logger.info("Showing necessary data files")
        with parallel.CommandAgent(concurrency=8, show_result=False) as agent:
            for node, partition_list in data_placement.locations.items():
                for partition in set(partition_list):
                    if partition.startswith('/dataset'):
                        partition_rename = partition.replace(
                            "/dataset", data_dir)
                        partition_id = int(
                            partition_rename.split('.')[-2].split('_')[-1])

                        agent.submit_remote_command(
                            node,
                            "sudo mv {} {}".format(
                                get_hidden_partition(partition_rename),
                                partition_rename),
                            capture=False,
                            silent=True)

    else:
        logger.info("Hiding all data files")
        hide_files(data_placement.locations.keys(), data_dir=data_dir)
        logger.info("Showing all index files")
        show_index_files(data_placement.locations.keys(), data_dir=data_dir)
        import sys
        sys.exit(0)
        logger.info("Showing necessary data files")
        with parallel.CommandAgent(concurrency=8, show_result=False) as agent:
            for node, partition_list in data_placement.locations.items():
                #logger.info("Host: {}".format(node))
                # remove duplicate partition to support monochromatic
                #logger.info(partition_list)
                for partition in set(partition_list):
                    #logger.info("\tpartition={}".format(partition))
                    agent.submit_remote_command(
                        node,
                        "sudo mv {} {}".format(get_hidden_partition(partition),
                                               partition),
                        capture=False,
                        silent=True)
Пример #51
0
def coverage():
    execute("pytest --cov=tri_again")
Пример #52
0
def doc_open():
    execute("open doc/build/index.html")
Пример #53
0
def install():
    execute("poetry install --remove-untracked")
Пример #54
0
def main():
    init.setup_logging(default_level=logging.DEBUG,
                       config_path="conf/logging.yaml",
                       log_dir="logs",
                       component="simulation")
    script_dir = os.path.dirname(os.path.realpath(__file__))

    workload_list = {
        #"uniform-base": {"type": "uniform"},
        # "beta-least": {"type": "beta", "alpha": 1, "beta": 1},
        # "beta-less": {"type": "beta", "alpha": 1.5, "beta": 1.5},
        #"beta-base": {"type": "beta", "alpha": 2, "beta": 2},
        # "beta-more": {"type": "beta", "alpha": 4, "beta": 4},
        # "beta-most": {"type": "beta", "alpha": 5, "beta": 5},
        #"normal-base": {"type": "normal", "loc": 0, "scale": 1},
        # "powerlaw-least": {"type": "powerlaw", "shape": 2},
        # "powerlaw-less": {"type": "powerlaw", "shape": 2.5},
        "powerlaw-base": {
            "type": "powerlaw",
            "shape": 3
        },
        # "powerlaw-more": {"type": "powerlaw", "shape": 4},
        # "powerlaw-most": {"type": "powerlaw", "shape": 5},
        # "gamma-least": {"type": "gamma", "shape": 7},
        # "gamma-less": {"type": "gamma", "shape": 6},
        #"gamma-base": {"type": "gamma", "shape": 5},
        # "gamma-more": {"type": "gamma", "shape": 4},
        # "gamma-most": {"type": "gamma", "shape": 3},
    }

    iterations = 1

    M = 4
    N = 8
    #scheme_list = ['rainbow', 'monochromatic']
    scheme_list = ['rainbow']
    #scheme_list = ['monochromatic']
    #workload_type_list = ['uniform', 'beta', 'normal', 'powerlaw', 'gamma']
    #workload_type_list = ['normal']
    workload_type_list = ['powerlaw']
    #workload_skew_list = ['least', 'less', 'base', 'more', 'most']
    workload_skew_list = ['base']

    workload_size = 30000
    #k_list = [1, 4, 8, 16, 32, 64, 128]
    #k_list = [1, 4, 8, 16]
    #k_list = [4, 8]
    #k_list = [8, 16]
    k_list = [1, 4]

    # start multi-iteration simulation

    # reuse workload
    workload_records = {}
    for i in range(iterations):
        for workload_type in workload_type_list:
            num_partitions = M * k_list[-1]
            workload_name = "{}-base".format(workload_type)
            af_list = generate_workload(workload_size, num_partitions,
                                        workload_list[workload_name])
            workload_records[(workload_name, i)] = af_list

    for scheme in scheme_list:
        skew_records = {}
        for k in k_list:
            skew_records[k] = {}
            for workload_type, workload_skew in itertools.product(
                    workload_type_list, workload_skew_list):
                skew_score_list = []
                for i in range(iterations):
                    workload_name = "{}-{}".format(workload_type,
                                                   workload_skew)
                    af_list = workload_records[(workload_name, i)]
                    print("##", sum(af_list))
                    k_largest = k_list[-1]
                    aggregate_ratio = int(k_largest / k)
                    num_partitions = M * k
                    partition_workload = [
                        sum(af_list[i * aggregate_ratio:(i + 1) *
                                    aggregate_ratio])
                        for i in range(num_partitions)
                    ]
                    print("$$", sum(partition_workload))
                    #skew_score = analyze_skew(M, N, k, scheme, partition_workload)
                    skew_score = analyze_partition_skew(
                        M, N, k, scheme, partition_workload)
                    skew_score_list.append(skew_score)
                skew_records[k][workload_name] = sum(skew_score_list) / len(
                    skew_score_list)

        print('=================')
        print(json.dumps(skew_records, indent=True, sort_keys=True))

        default_setting = ExperimentConfig.new()
        default_setting.set_config('experiment.id', 'E29')
        output_dir = os.path.join(default_setting['experiment.result_dir'],
                                  default_setting['experiment.id'])
        output_path = os.path.join(
            output_dir,
            "M{}_N{}_k{}_s{}_{}.json".format(M, N, k_list[-1], workload_size,
                                             scheme))
        #output_path = os.path.join(output_dir, "{}.json".format(scheme))

        executor.execute("mkdir -p %s" % output_dir)
        with open(output_path, 'w') as f:
            json.dump(skew_records, f, indent=True)
Пример #55
0
def download_package(ctx, version):
    execute(
        "wget -O hpccsystems-platform-community_5.4.6-1trusty_amd64.deb http://wpc.423a.rhocdn.net/00423A/releases/CE-Candidate-5.4.6/bin/platform/hpccsystems-platform-community_5.4.6-1trusty_amd64.deb",
        silent=True)
Пример #56
0
def build_package(directory,
                  repository=None,
                  check_package=True,
                  copy_files=True):
    """
    Create a Debian package using the ``dpkg-deb --build`` command. The
    ``dpkg-deb --build`` command requires a certain directory tree layout and
    specific files; for more information about this topic please refer to the
    `Debian Binary Package Building HOWTO`_. The :py:func:`build_package()`
    function performs the following steps to build a package:

    1. Copies the files in the source directory to a temporary build directory.
    2. Updates the Installed-Size_ field in the ``DEBIAN/control`` file
       based on the size of the given directory (using
       :py:func:`update_installed_size()`).
    3. Sets the owner and group of all files to ``root`` because this is the
       only user account guaranteed to always be available. This uses the
       ``fakeroot`` command so you don't actually need ``root`` access to use
       :py:func:`build_package()`.
    4. Runs the command ``fakeroot dpkg-deb --build`` to generate a Debian
       package from the files in the build directory.
    5. Runs Lintian_ to check the resulting package archive for possible
       issues. The result of Lintian is purely informational: If 'errors' are
       reported and Lintian exits with a nonzero status code, this is ignored
       by :py:func:`build_package()`.

    If any of the external commands invoked by this function fail,
    :py:exc:`executor.ExternalCommandFailed` is raised. If this function
    returns without raising an exception, the generated Debian package can be
    found in the parent directory of the directory given as the first
    argument.

    :param directory: The pathname of a directory tree suitable for packaging
                      with ``dpkg-deb --build``.
    :param repository: The pathname of an existing directory where the
                       generated ``*.deb`` archive should be stored (defaults
                       to the system wide temporary directory).
    :param check_package: If ``True`` (the default) Lintian_ is run to check
                          the resulting package archive for possible issues.
    :param copy_files: If ``True`` (the default) the package's files are copied
                       to a temporary directory before being modified. You can
                       set this to ``False`` if you're already working on a
                       copy and don't want yet another copy to be made.
    :returns: The pathname of the generated ``*.deb`` archive.

    .. _Debian Binary Package Building HOWTO: http://tldp.org/HOWTO/html_single/Debian-Binary-Package-Building-HOWTO/
    .. _Installed-Size: http://www.debian.org/doc/debian-policy/ch-controlfields.html#s-f-Installed-Size
    .. _Lintian: http://lintian.debian.org/
    """
    if not repository:
        repository = tempfile.gettempdir()
    package_file = os.path.join(repository,
                                determine_package_archive(directory))
    logger.debug("Preparing to build package: %s", format_path(package_file))
    try:
        if copy_files:
            build_directory = tempfile.mkdtemp()
            logger.debug("Created build directory: %s",
                         format_path(build_directory))
            # This no longer uses hard links because of all the file permission
            # magic going on further down in this function (permissions are
            # shared between all hard links pointing to an inode).
            copy_package_files(directory, build_directory, hard_links=False)
        else:
            build_directory = directory
        clean_package_tree(build_directory)
        update_conffiles(build_directory)
        update_installed_size(build_directory)
        # Sanitize the permission bits of the root directory. Most build
        # directories will have been created with tempfile.mkdtemp() which
        # creates the directory with mode 0700. The Debian packaging system
        # really doesn't care about any of this, but:
        #
        #  1. It looks weird in the output of ``deb-pkg-tools -i`` :-)
        #  2. When you convert a ``*.deb`` to ``*.rpm`` with Alien and install
        #     the RPM the 0700 mode is actually applied to the system where you
        #     install the package. As you can imagine, the results are
        #     disastrous...
        os.chmod(build_directory, 0o755)
        if ALLOW_CHOWN:
            # Make sure all files included in the package are owned by `root'
            # (the only account guaranteed to exist on all systems).
            root_user = os.environ.get('DPT_ROOT_USER', 'root')
            root_group = os.environ.get('DPT_ROOT_GROUP', 'root')
            user_spec = '%s:%s' % (root_user, root_group)
            logger.debug("Resetting file ownership (to %s) ..", user_spec)
            execute('chown',
                    '-R',
                    user_spec,
                    build_directory,
                    fakeroot=ALLOW_FAKEROOT_OR_SUDO,
                    logger=logger)
        # Reset the file modes of pre/post installation/removal scripts.
        for script_name in ('preinst', 'postinst', 'prerm', 'postrm'):
            script_path = os.path.join(build_directory, 'DEBIAN', script_name)
            if os.path.isfile(script_path):
                logger.debug("Resetting file modes (%s to 755) ..",
                             script_path)
                os.chmod(script_path, 0o755)
        # System packages generally install files that are read only and
        # readable (and possibly executable) for everyone (owner, group and
        # world) so we'll go ahead and remove some potentially harmful
        # permission bits (harmful enough that Lintian complains about them).
        logger.debug("Resetting file modes (go-w) ..")
        execute('chmod',
                '-R',
                'go-w',
                build_directory,
                fakeroot=ALLOW_FAKEROOT_OR_SUDO,
                logger=logger)
        # Remove the setgid bit from all directories in the package. Rationale:
        # In my situation package templates are stored in a directory where a
        # team of people have push access (I imagine that this is a common
        # setup). To facilitate shared push access a shared primary UNIX group
        # is used with the sticky bit on directories. However dpkg-deb *really*
        # doesn't like this, failing with the error "dpkg-deb: control
        # directory has bad permissions 2755 (must be >=0755 and <=0775)".
        if ALLOW_RESET_SETGID:
            logger.debug("Removing sticky bit from directories (g-s) ..")
            execute('find -type d -print0 | xargs -0 chmod g-s',
                    directory=build_directory,
                    fakeroot=ALLOW_FAKEROOT_OR_SUDO,
                    logger=logger)
        # Make sure files in /etc/sudoers.d have the correct permissions.
        sudoers_directory = os.path.join(build_directory, 'etc', 'sudoers.d')
        if os.path.isdir(sudoers_directory):
            for filename in os.listdir(sudoers_directory):
                pathname = os.path.join(sudoers_directory, filename)
                logger.debug("Resetting file modes (%s to 440) ..", pathname)
                os.chmod(pathname, 0o440)
        # Build the package using `dpkg-deb'.
        logger.info("Building package in %s ..", format_path(build_directory))
        execute('dpkg-deb',
                '--build',
                build_directory,
                package_file,
                fakeroot=ALLOW_FAKEROOT_OR_SUDO,
                logger=logger)
        # Check the package for possible issues using Lintian?
        if check_package:
            if not os.access('/usr/bin/lintian', os.X_OK):
                logger.warning(
                    "Lintian is not installed, skipping sanity check.")
            else:
                logger.info("Checking package for issues using Lintian ..")
                lintian_command = ['lintian']
                if os.getuid() == 0:
                    lintian_command.append('--allow-root')
                lintian_command.append('--color=auto')
                lintian_command.append(package_file)
                execute(*lintian_command, logger=logger, check=False)
        return package_file
    finally:
        if copy_files:
            logger.debug("Removing build directory: %s",
                         format_path(build_directory))
            shutil.rmtree(build_directory)
Пример #57
0
def lookup_dfs_files(esp_ip):
    cmd = "/opt/HPCCSystems/bin/dfuplus server={} action=list name=*"\
          .format(esp_ip)
    output = execute(cmd, capture=True)
    file_list = output.strip().split('\n')[1:]
    return file_list
Пример #58
0
def run_ecl(ecl_program, target_cluster, esp_ip, job_name='resize'):
    cmd = "/opt/HPCCSystems/bin/ecl run -v --server {} --target {} --name={} {}".format(
        esp_ip, target_cluster, job_name, ecl_program)
    return execute(cmd)
Пример #59
0
def upload():
    execute("rm -rf dist/")
    execute("python setup.py sdist")
    execute("twine upload dist/*")
Пример #60
0
def publish():
    execute("rm -rf dist/ build/")
    execute("poetry build")
    execute("twine upload dist/*")