Exemple #1
0
 def apply_and_restart(self):
     gsp.check_call([
         'supervisorctl', '-c', '/etc/supervisor/supervisord.conf',
         'restart', 'x:'
     ])
     self._w = self._h = self._health = None
     self.notify()
Exemple #2
0
def delete_rule(table, chain, rule_text):
    command = 'iptables -t %s -D %s %s' % (table, chain, rule_text)
    LOGGER.info('delete rule: %s' % command)
    try:
        subprocess.check_call(shlex.split(command))
    except:
        LOGGER.exception('failed to delete rule: %s' % command)
Exemple #3
0
def delete_rule(table, chain, rule_text):
    command = 'iptables -t %s -D %s %s' % (table, chain, rule_text)
    LOGGER.info('delete rule: %s' % command)
    try:
        subprocess.check_call(shlex.split(command))
    except:
        LOGGER.exception('failed to delete rule: %s' % command)
Exemple #4
0
def install_apt_packages(pkgs):
    apt = [
        "sudo", "-n", "apt-get", "-y",
        "--option=Dpkg::Options::=--force-confold", "install"
    ]
    apt.extend(pkgs)
    subprocess.check_call(apt)
 def apply_and_restart(self):
     gsp.check_call([
         'supervisorctl', '-c', '/etc/supervisor/supervisord.conf',
         'restart', 'x:'
     ])
     self._w = self._h = self._health = None
     self.notify()
    def _notebook_copier(self, code_directory, task):
        """
        Copies main.ipynb to the S3 bucket for the given +task_id+.
        """

        previous_time = 0
        path = os.path.join(code_directory, "__saved.ipynb")

        while True:

            # Ensure that we've actually changed since the last time.
            if os.path.exists(path) and os.path.getmtime(path) > previous_time:

                # Log the current state.
                logging.info("Pushing notebook file to S3.")

                # Upload the ipynb file to S3.
                try:
                    connection = boto.connect_s3()
                    bucket = connection.get_bucket('ml-submissions')
                    key = bucket.new_key('results/{}.ipynb'.
                      format(task.task_id))
                    key.content_type = "text/json"

                    # Upload the resulting notebook.
                    key.set_contents_from_filename(path)

                    # Generate a rendered version of this notebook.
                    with tempfile.TemporaryFile() as fp:

                        # Convert notebook to an HTML file.
                        subprocess.check_call(["/usr/bin/env",
                          "ipython", "nbconvert",
                          "--to=html", "--stdout", path], stdout = fp)

                        # Construct a rendered URL.
                        render_url = urllib.urlencode(task.__dict__)

                        fp.write((
                          '<script type="script/javascript" src="https://lsda.c'
                          's.uchicago.edu/render.js?{}"></script>'
                        ).format(render_url))
                        
                        fp.seek(0)

                        # Upload the HTML version of the notebook.
                        key = bucket.new_key('results/{}.html'.
                          format(task.task_id))
                        key.content_type = "text/html"
                        key.set_contents_from_file(fp)

                except Exception:
                    logging.exception("Unable to upload notebook.")

                else:
                    previous_time = os.path.getmtime(path)


            gevent.sleep(3)
Exemple #7
0
 def fast_forward(path, remote_name, remote_branch):
     subprocess.check_call(
         [
             'git', 'merge', '%s/%s' % (remote_name, remote_branch),
             '--ff-only'
         ],
         cwd=path
     )
Exemple #8
0
 def push(path, remote_name, local_branch, remote_branch):
     subprocess.check_call(
         [
             'git', 'push', remote_name,
             '%s:%s' % (local_branch, remote_branch)
         ],
         cwd=path
     )
Exemple #9
0
 def start_new_branch(path, branch_name, remote_name, remote_branch):
     subprocess.check_call(
         [
             'git', 'checkout', '-b', branch_name,
             '%s/%s' % (remote_name, remote_branch)
         ],
         cwd=path
     )
 def run_script(self, *args, **kwargs):
     cmd = [sys.executable, join(examples_directory, self.path)] + list(args)
     start = time()
     subprocess.check_call(cmd, **kwargs)
     took = time() - start
     min_time, max_time = getattr(self, 'time_range', default_time_range)
     assert took >= min_time, '%s exited too quickly %s %s' % (self.path, took, min_time)
     assert took <= max_time, '%s takes way too long %s %s' % (self.path, took, max_time)
Exemple #11
0
 def set_size(self, w, h):
     gsp.check_call(('sed -i \'s#'
                     '^exec /usr/bin/Xvfb.*$'
                     '#'
                     'exec /usr/bin/Xvfb :1 -screen 0 {}x{}x16'
                     '#\' /usr/local/bin/xvfb.sh').format(w, h),
                    shell=True)
     self.size_changed_count += 1
Exemple #12
0
def add_apt_ppa(ppa):
    subprocess.check_call([
        "sudo",
        "-n",
        "apt-add-repository",
        "-y",
        ppa,
    ])
 def set_size(self, w, h):
     gsp.check_call((
         'sed -i \'s#'
         '^exec /usr/bin/Xvfb.*$'
         '#'
         'exec /usr/bin/Xvfb :1 -screen 0 {}x{}x16'
         '#\' /usr/local/bin/xvfb.sh'
     ).format(w, h), shell=True)
     self.size_changed_count += 1
Exemple #14
0
def insert_rule(optional, table, chain, rule_text):
    command = 'iptables -t %s -I %s %s' % (table, chain, rule_text)
    LOGGER.info('insert %s rule: %s' % ('optional' if optional else 'mandatory', command))
    try:
        subprocess.check_call(shlex.split(command))
    except:
        if optional:
            LOGGER.exception('skip optional iptables rule')
        else:
            raise
Exemple #15
0
    def slice(self, stl):
        out = os.path.join(self._workdir, "output.gcode")
        args = [self.cura_cmd, '--slice']
        #args += ['--ini', settingsfile]
        args += ['--output', out, stl]

        #print ' '.join(args)
        try:
            subprocess.check_call(args)
        except Exception, e:
            return None
def _shutdown_instance():
    """
    Shuts down this instance and removes it from the worker pool.
    """

    # Trigger immediate shutdown.
    subprocess.check_call(["/usr/bin/sudo", "/sbin/shutdown", "-h", "now"])

    # Wait for shutdown.
    while True:
        gevent.sleep(3600)
Exemple #17
0
    def slice(self, stl):
        out = os.path.join(self._workdir, "output.gcode")
        args = [self.cura_cmd, '--slice']
        #args += ['--ini', settingsfile]
        args += ['--output', out, stl]

        #print ' '.join(args)
        try:
            subprocess.check_call(args)
        except Exception, e:
            return None
Exemple #18
0
def insert_rule(optional, table, chain, rule_text):
    command = 'iptables -t %s -I %s %s' % (table, chain, rule_text)
    LOGGER.info('insert %s rule: %s' %
                ('optional' if optional else 'mandatory', command))
    try:
        subprocess.check_call(shlex.split(command))
    except:
        if optional:
            LOGGER.exception('skip optional iptables rule')
        else:
            raise
Exemple #19
0
def set_dependencies(requirement):
    try:
        dependencies_needed = extras_require[requirement]
    except KeyError:
        print("ERROR: Incorrect requirement chosen")
        return
    cmds = [sys.executable, "-m", "pip", "install"]
    for dependency in dependencies_needed:
        cmds.append(dependency)
    subprocess.check_call(cmds)
    return
Exemple #20
0
 def fetch_remote(path, remote_name):
     try:
         subprocess.check_call(
             ['git', 'fetch', remote_name],
             cwd=path
         )
     except subprocess.CalledProcessError as e:
         if e.returncode == 128:
             return False
         else:
             raise
     return True
Exemple #21
0
def regen_docs(repo):
    """Regenerates README.recipes.md.

  Raises a CalledProcessError on failure.
  """
    subprocess.check_call([
        VPYTHON,
        os.path.join(repo.recipes_root_path, 'recipes.py'),
        'doc',
        '--kind',
        'gen',
    ])
Exemple #22
0
 def clone(path, remote_url, remote_branch, remote_name='origin'):
     try:
         subprocess.check_call(
             [
                 'git', 'clone', remote_url, '-b',  remote_branch, '-o',
                 remote_name, path
             ],
         )
     except subprocess.CalledProcessError as e:
         if e.returncode == 128:
             return False
         else:
             raise
     return True
Exemple #23
0
def check_call(args):
    if USE_SU:
        proc = subprocess.Popen('su', stdin=subprocess.PIPE)
        proc.stdin.write(' '.join(args))
        proc.stdin.write('\nexit\n')
        proc.communicate()
        retcode = proc.poll()
        if retcode:
            raise subprocess.CalledProcessError(retcode, args)
        return 0
    else:
        return subprocess.check_call(args)
Exemple #24
0
def check_call(args):
    if USE_SU:
        proc = subprocess.Popen('su', stdin=subprocess.PIPE)
        proc.stdin.write(' '.join(args))
        proc.stdin.write('\n')
        proc.stdin.write('exit\n')
        proc.communicate()
        if proc.poll():
            raise subprocess.CalledProcessError(proc.poll(), args)
        return 0
    else:
        return subprocess.check_call(args)
Exemple #25
0
def check_call(args):
    if USE_SU:
        proc = subprocess.Popen('su', stdin=subprocess.PIPE)
        proc.terminate = functools.partial(sudo_kill, proc.pid)
        proc.stdin.write('exec ')
        proc.stdin.write(' '.join(args))
        proc.stdin.write('\n')
        proc.communicate()
        retcode = proc.poll()
        if retcode:
            raise subprocess.CalledProcessError(retcode, args)
        return 0
    else:
        return subprocess.check_call(args)
Exemple #26
0
def check_call(args):
    if USE_SU:
        proc = subprocess.Popen("su", stdin=subprocess.PIPE)
        proc.terminate = functools.partial(sudo_kill, proc.pid)
        proc.stdin.write("exec ")
        proc.stdin.write(" ".join(args))
        proc.stdin.write("\n")
        proc.communicate()
        retcode = proc.poll()
        if retcode and retcode != -11:
            raise subprocess.CalledProcessError(retcode, args)
        return 0
    else:
        return subprocess.check_call(args)
Exemple #27
0
    def run(cmd, *args, **kwargs):
        result = FakeInvokeResult()

        try:
            cmd = [environ["SHELL"], cmd]
            try:
                code = check_call(cmd)
            except CalledProcessError as e:
                code = e.returncode
            result.ok = code == 0
        except Exception as e:
            logger.warning((
                "pyinfra encountered an error loading SSH config match exec {0}: {1}"
            ).format(
                cmd,
                e,
            ), )

        return result
Exemple #28
0
 def checkout_branch(path, branch_name):
     subprocess.check_call(
         ['git', 'checkout', branch_name],
         cwd=path
     )
Exemple #29
0
 def pull(path):
     subprocess.check_call(
         ['git', 'pull'],
         cwd=path
     )
Exemple #30
0
 def config_set(path, field, value):
     subprocess.check_call(
         ['git', 'config', field, value],
         cwd=path
     )
def test_forward_bacnet_cov_value(volttron_instance, test_agent):
    """Tests the functionality of BACnet change of value forwarding in the
    Platform Driver and driver.py"""
    # Reset platform driver config store
    cmd = ['volttron-ctl', 'config', 'delete', PLATFORM_DRIVER, '--all']
    retcode = check_call(cmd,
                         env=volttron_instance.env,
                         stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE)
    assert retcode == 0

    # Add fake device configuration
    fake_csv_infile = os.path.join(get_volttron_root(),
                                   'examples/configurations/drivers/fake.csv')
    cmd = [
        'volttron-ctl', 'config', 'store', PLATFORM_DRIVER, 'fake.csv',
        fake_csv_infile, '--csv'
    ]
    retcode = check_call(cmd,
                         env=volttron_instance.env,
                         stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE)
    assert retcode == 0

    fakedriver_infile = os.path.join(
        get_volttron_root(), 'examples/configurations/drivers/fake.config')
    cmd = [
        'volttron-ctl', 'config', 'store', PLATFORM_DRIVER,
        "devices/fakedriver", fakedriver_infile, '--json'
    ]
    retcode = check_call(cmd,
                         env=volttron_instance.env,
                         stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE)
    assert retcode == 0

    # install platform driver, start the platform driver, which starts the device
    platform_uuid = volttron_instance.install_agent(
        agent_dir=get_services_core("PlatformDriverAgent"),
        config_file={},
        start=True)
    print("agent id: ", platform_uuid)

    # tell the platform driver to forward the value
    point_name = "PowerState"
    device_path = "fakedriver"
    result_dict = {"fake1": "test", "fake2": "test", "fake3": "test"}
    test_agent.vip.rpc.call(PLATFORM_DRIVER, 'forward_bacnet_cov_value',
                            device_path, point_name, result_dict)
    # wait for the publishes to make it to the bus
    gevent.sleep(2)

    # Mock checks
    # Should have one "PowerState" publish for each item in the result dict
    # Total all publishes likely will include regular scrapes
    assert test_agent.cov_callback.call_count >= 3
    test_count = 0
    for call_arg in test_agent.cov_callback.call_args_list:
        if call_arg[0][5][0].get("PowerState", False):
            test_count += 1
    assert test_count == 3
 def _teardown(self):
     """ We need to scrub the directory if we were signalled """
     globs = glob(self._dir + '/*')
     subprocess.check_call(['rm', '-f'] + globs)
Exemple #33
0
 def _repoAddInternal(self, arch, pathname):
     with self._repo_lock[arch]:
         subprocess.check_call(
             (self._command_add,
              os.path.join(self._repo_dir, arch, self._db_name), pathname))
Exemple #34
0
 def _run_deployer(self, bundle):
     if os.path.isfile(bundle) is False:
         raise Exception("No such bundle file: %s" % bundle)
     args = ["juju", "deploy", "-m", self.model, bundle]
     subprocess.check_call(args)
Exemple #35
0
 def _repoRemoveInternal(self, arch, name):
     with self._repo_lock[arch]:
         subprocess.check_call(
             (self._command_remove,
              os.path.join(self._repo_dir, arch, self._db_name), name))
    def _run_in_sandbox(self, task, command):
        """
        Runs the given type of process inside a project sandbox.
        """

        # Ensure that we can write to /mnt (this is a band-aid).
        with Interruptable("Checking for /mnt", self):
            if not os.path.ismount("/mnt"):
                subprocess.check_call(["/usr/bin/sudo", "/bin/mount", "/mnt"])

            subprocess.check_call(
                ["/usr/bin/sudo", "/bin/chown", "lsda", "/mnt"])
            subprocess.check_call(
                ["/usr/bin/sudo", "/bin/chmod", "0777", "/mnt"])

        # Create a working directory for this project.
        code_directory = tempfile.mkdtemp(dir="/mnt")
        os.chmod(code_directory, 0o755)

        # Collect some per-task statistics.
        quota_used = self.zookeeper.Counter(
            '/quota_used/compute_time/{0}'.format(task.owner),
            default=0.0)
        quota_limit = self.zookeeper.Counter(
            '/quota_limit/compute_time/{0}'.format(task.owner),
            default=0.0)

        # Retrieve the zip archive from S3 for this commit.
        with tempfile.TemporaryFile() as fp:

            # Connect to S3.
            bucket = boto.connect_s3().get_bucket("ml-checkpoints")

            # Download the given git tree.
            for i in xrange(30):
                key = bucket.get_key("submissions/{}/{}.zip".format(
                    task.from_user, task.sha1))
                if key is not None:
                    break
            else:
                raise ValueError("Unable to find submission ZIP in 30s.")

            key.get_contents_to_file(fp)

            # Extract the Zip archive to our working directory.
            zipfile.ZipFile(fp).extractall(code_directory)

        try:
            # Trigger main IPython job.
            main_job = subprocess.Popen(
                ["/usr/bin/env", "sudo", "/worker/sandbox.py"] + command +
                [task.task_id, task.owner, task.file_name],

                cwd=code_directory,
                stdout=subprocess.PIPE,
                stderr=subprocess.STDOUT
            )

            if command[0] == "main":
                copy_notebook_to_s3 = gevent.spawn(
                   self._notebook_copier,
                   code_directory,
                   task
                )
            else:
                copy_notebook_to_s3 = None

            stderr_copier = gevent.spawn(self._stderr_copier, main_job, task)

            @gevent.spawn
            def drain_quarters():
                """
                Count total per-user execution time.
                """

                # Continue taking time in one-minute increments.
                while quota_used.value < quota_limit.value:
                    quota_used.__add__(60.0)
                    gevent.sleep(60)

                # Kill the job when we're out.
                self.apply_warning("OutOfQuota")

                logging.error('Job killed -- out of time: used {} of {}'.format(
                    quota_used.value, quota_limit.value))
                main_job.kill()
                stderr_copier.kill()

            # Actually wait for completion.
            stderr_copier.join()
            status = main_job.wait()

            return "exit {} {}".format(status,
              " ".join(self._compute_warnings()))

        finally:
            # Clean up main job.
            try:
                if main_job:
                    main_job.kill()
            except OSError:
                pass

            # Clean up helpers.
            if copy_notebook_to_s3:
                copy_notebook_to_s3.kill()

            if drain_quarters:
                drain_quarters.kill()

            if stderr_copier:
                stderr_copier.kill()
Exemple #37
0
 def set_remote_url(path, remote_name, remote_url):
     subprocess.check_call(
         ['git', 'config', 'remote.%s.url' % remote_name, remote_url],
         cwd=path
     )
Exemple #38
0
def apt_update():
    subprocess.check_call(["sudo", "-n", "apt-get", "update"])
Exemple #39
0
import sys
import json
import time
import pytest
from errno import EEXIST
from shutil import rmtree
from tempfile import mkdtemp
from gevent import socket
from httplib import HTTPConnection
from urllib2 import build_opener, AbstractHTTPHandler
from gevent.subprocess import Popen, check_call

SRC_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..'))
SRC_COPPER_NODE = os.path.join(SRC_ROOT, 'cmd/copper-node')

check_call(['go', 'install'], shell=False, cwd=SRC_COPPER_NODE)

@pytest.yield_fixture
def workdir():
    path = mkdtemp()
    try:
        yield path
    finally:
        rmtree(path, True)

@pytest.yield_fixture
def copper_node(workdir):
    logpath = os.path.join(workdir, 'copper.log')
    unixpath = os.path.join(workdir, 'copper.sock')
    httppath = os.path.join(workdir, 'copper.http')
    confpath = os.path.join(workdir, 'copper.conf')
 def _setup(self):
     """ We need to scrub the directory before we run """
     globs = glob(self._data_dir + '/*')
     subprocess.check_call(['rm', '-f'] + globs)