Exemple #1
0
def get_bayesdb(venv_dir, versions, options):
    for package in ("crosscat", "bayeslite", "bdbcontrib"):
        pdir = os.path.join(venv_dir, package)
        need_repo = (options.run_tests or not options.from_pypi or
                     not re.search(r'^tags', versions[package]))
        if need_repo:
            check_git()
            if os.path.exists(pdir):
                run("cd -- %s && git checkout master && git pull" % (pdir,),
                    stdout=options.stdout)
            else:
                run("git clone https://github.com/probcomp/%s %s" %
                    (package, shellquote(pdir)),
                    stdout=options.stdout)
            versions['have_repo_for_'+package] = True
        if need_repo and versions[package] != "HEAD":
            venv_run(venv_dir,
                     "cd -- %s && git checkout %s" % (
                         pdir, shellquote(versions[package])),
                     stdout=options.stdout)
        if options.from_pypi and re.search(r'^tags', versions[package]):
            pypi_version = re.sub(r'.*v', '', versions[package])
            install_package(venv_dir, package+"=="+pypi_version, options)
        elif need_repo and options.install_bayesdb:
            venv_run(venv_dir,
                     "cd -- %s && pip install ." % (pdir,),
                     stdout=options.stdout)
        else:
            pass # Not requesting installation is fine.
    return versions
Exemple #2
0
def ADBKill(serial, process, kill_app=False):
    """ Kill a process running on an Android device.

  serial: string indicating the serial number of the target device
  process: string indicating the name of the process to kill
  kill_app: bool indicating whether the process is an Android app, as opposed
      to a normal executable process.
  """
    if kill_app:
        ADBShell(serial, ['am', 'kill', process])
    else:
        try:
            stdout = shell_utils.run('%s -s %s shell ps | grep %s' %
                                     (PATH_TO_ADB, serial, process),
                                     shell=True)
        except Exception:
            return
        for line in stdout.split('\n'):
            if line != '':
                split = shlex.split(line)
                if len(split) < 2:
                    continue
                pid = split[1]
                ADBShell(serial, ['kill', pid])
        # Raise an exception if any Skia processes are still running.
        try:
            stdout = shell_utils.run('%s -s %s shell ps | grep %s' %
                                     (PATH_TO_ADB, serial, process),
                                     shell=True)
        except Exception:
            return
        if stdout:
            raise Exception(
                'There are still some skia processes running:\n%s\n'
                'Maybe the device should be rebooted?' % stdout)
Exemple #3
0
def GetSCP(local_path,
           remote_path,
           username,
           host,
           port,
           recurse=False,
           options=None):
    """ Retrieve a file from the given host over SCP. Assumes that public key
  authentication is set up between the client and server.

  local_path: destination path for the file on the client
  remote_path: path to the file to retrieve on the server
  username: ssh login name
  host: hostname or ip address of the server
  port: port on the server to use
  recurse: boolean indicating whether to transmit everything in a folder
  options: list of extra options to pass to scp
  """
    # TODO(borenet): This will hang for a while if the host does not recognize
    # the client.
    cmd = ['scp']
    if options:
        cmd.extend(options)
    if recurse:
        cmd.append('-r')
    cmd.extend(
        ['-P', port,
         '%s@%s:%s' % (username, host, remote_path), local_path])
    shell_utils.run(cmd)
Exemple #4
0
def make_starter_script(macos_path):
  starter_script = '''#!/bin/bash

set -e
wd=`dirname -- "$0"`
cd -- "$wd"
wd=`pwd -P`
NAME=`basename -- "$(dirname -- "$(dirname -- "$wd")")" .app`

activate="$wd/venv/bin/activate"
ldpath="$wd/lib"

# Clear any user's PYTHONPATH setting, which may interfere with what
# we need.
unset PYTHONPATH
export PYTHONPATH="$wd/venv/lib/python2.7/site-packages"

source "$activate"
export DYLD_LIBRARY_PATH="$ldpath"
export MPLBACKEND=pdf

# Download and run the examples in someplace writeable:
"$wd/venv/bin/bayesdb-demo" --destination "$HOME/Documents"
'''
  startsh_path = os.path.join(macos_path, "start.sh")
  with open(startsh_path, "w") as startsh:
    startsh.write(starter_script)
  run("chmod +x %s" % (shellquote(startsh_path),))
Exemple #5
0
def test_dmg(name):
  needed = ['osx/bayeslite/*.scpt',
            'src/shell_utils.py',
            'osx/bayeslite/test_dmg.py',
            os.path.join(SCRATCH, name)]
  run("scp %s test@%s:Desktop/" %
      (" ".join(needed), HOST))
  test_run("python Desktop/test_dmg.py %s" % name)
Exemple #6
0
def build_dmg():
  run("scp build_dmg.py shell_utils.py build@%s:" % (HOST,))
  build_run('PATH="%s:$PATH" python build_dmg.py' % (HPATH,))
  run("scp build@%s:Desktop/Bayeslite*.dmg %s" % (HOST, SCRATCH))
  name = build_outputof("cd Desktop && ls -t Bayeslite*.dmg | tail -1").strip()
  echo("NAME:", name)
  build_run("/bin/rm -f Desktop/Bayeslite*.dmg")
  return name
Exemple #7
0
def wrap_as_macos_dir(build_dir, name):
  """Return the dmg root dir inside build_dir, and within that the MacOs dir."""
  dist_dir = os.path.join(build_dir, "dmgroot")
  macos_path = os.path.join(dist_dir, name + ".app", "Contents", "MacOS")
  os.makedirs(macos_path)
  run("/bin/ln -s /Applications %s" % (shellquote(dist_dir),))
  make_starter_script(macos_path)
  make_launcher_script(macos_path, name)
  return dist_dir, macos_path
Exemple #8
0
def IsSKPValid(path_to_skp, path_to_skpinfo):
    """Calls the skpinfo binary to see if the specified SKP is valid."""
    skp_info_cmd = [path_to_skpinfo, '-i', path_to_skp]
    try:
        shell_utils.run(skp_info_cmd)
        return True
    except shell_utils.CommandFailedException:
        # Mark SKP as invalid if the skpinfo command gives a non 0 ret code.
        return False
Exemple #9
0
def make_dmg_on_desktop(dist_dir, name):
  dmg_path = os.path.join(os.environ['HOME'], 'Desktop', '%s.dmg' % (name,))
  naming_attempt = 0
  while os.path.exists(dmg_path):
    naming_attempt += 1
    dmg_path = os.path.join(os.environ['HOME'], 'Desktop',
                            "%s (%d).dmg" % (name, naming_attempt))
  run("hdiutil create -volname Bayeslite -format UDBZ -size 1g -srcfolder %s %s"
      % (shellquote(dist_dir), shellquote(dmg_path)))
def IsSKPValid(path_to_skp, path_to_skpinfo):
  """Calls the skpinfo binary to see if the specified SKP is valid."""
  skp_info_cmd = [path_to_skpinfo, '-i', path_to_skp]
  try:
    shell_utils.run(skp_info_cmd)
    return True
  except shell_utils.CommandFailedException:
    # Mark SKP as invalid if the skpinfo command gives a non 0 ret code.
    return False
Exemple #11
0
def Sync(revisions=None, force=False, delete_unversioned_trees=False,
         verbose=False, jobs=None, no_hooks=False, extra_args=None):
  """ Update the local checkout using gclient.

  Args:
      revisions: optional list of (branch, revision) tuples indicating which
          projects to sync to which revisions.
      force: whether to run with --force.
      delete_unversioned_trees: whether to run with --delete-unversioned-trees.
      verbose: whether to run with --verbose.
      jobs: optional argument for the --jobs flag.
      no_hooks: whether to run with --nohooks.
      extra_args: optional list; any additional arguments.
  """
  for branch, _ in (revisions or []):
    # Do whatever it takes to get up-to-date with origin/master.
    if os.path.exists(branch):
      with misc.ChDir(branch):
        # First, fix the git identity if needed.
        maybe_fix_identity()

        # If there are local changes, "git checkout" will fail.
        shell_utils.run([GIT, 'reset', '--hard', 'HEAD'])
        # In case HEAD is detached...
        shell_utils.run([GIT, 'checkout', 'master'])
        # Always fetch, in case we're unmanaged.
        shell_utils.run_retry([GIT, 'fetch'], attempts=5)
        # This updates us to origin/master even if master has diverged.
        shell_utils.run([GIT, 'reset', '--hard', 'origin/master'])

  cmd = ['sync', '--no-nag-max']
  if verbose:
    cmd.append('--verbose')
  if force:
    cmd.append('--force')
  if delete_unversioned_trees:
    cmd.append('--delete_unversioned_trees')
  if jobs:
    cmd.append('-j%d' % jobs)
  if no_hooks:
    cmd.append('--nohooks')
  for branch, revision in (revisions or []):
    if revision:
      cmd.extend(['--revision', '%s@%s' % (branch, revision)])
  if extra_args:
    cmd.extend(extra_args)
  output = _RunCmd(cmd)

  # "gclient sync" just downloads all of the commits. In order to actually sync
  # to the desired commit, we have to "git reset" to that commit.
  for branch, revision in (revisions or []):
    with misc.ChDir(branch):
      if revision:
        shell_utils.run([GIT, 'reset', '--hard', revision])
      else:
        shell_utils.run([GIT, 'reset', '--hard', 'origin/master'])
  return output
def GotADB(adb):
  """ Returns True iff ADB exists at the given location.

  adb: string; possible path to the ADB executable.
  """
  try:
    shell_utils.run([adb, 'version'], echo=False)
    return True
  except Exception:
    return False
 def __exit__(self, exc_type, _value, _traceback):
     if self._upload:
         # Only upload if no error occurred.
         try:
             if exc_type is None:
                 self.commit_and_upload(use_commit_queue=self._commit_queue)
         finally:
             shell_utils.run([GIT, 'checkout', 'master'])
             if self._delete_when_finished:
                 shell_utils.run([GIT, 'branch', '-D', self._branch_name])
Exemple #14
0
 def __exit__(self, exc_type, _value, _traceback):
   if self._upload:
     # Only upload if no error occurred.
     try:
       if exc_type is None:
         self.commit_and_upload(use_commit_queue=self._commit_queue)
     finally:
       shell_utils.run([GIT, 'checkout', 'master'])
       if self._delete_when_finished:
         shell_utils.run([GIT, 'branch', '-D', self._branch_name])
Exemple #15
0
def GotADB(adb):
    """ Returns True iff ADB exists at the given location.

  adb: string; possible path to the ADB executable.
  """
    try:
        shell_utils.run([adb, 'version'], echo=False)
        return True
    except Exception:
        return False
Exemple #16
0
def make_venv_dir(venv_dir, options):
    parent = os.path.dirname(venv_dir)
    if parent and not os.path.exists(parent):
        os.mkdir(parent, 0755)
    cmd = "virtualenv"
    if options.python:
        assert os.path.exists(options.python)
        cmd += " --python=%s" % (shellquote(options.python))
    cmd += " " + shellquote(venv_dir)
    run(cmd, stdout=options.stdout)
Exemple #17
0
def build_dmg():
  needed = ['osx/bayeslite/build_dmg.py',
            'src/shell_utils.py',
            'src/build_venv.py']
  run("scp %s build@%s:" % (" ".join(needed), HOST))
  build_run('PATH="%s:$PATH" python build_dmg.py -v HEAD' % (HPATH,))
  run("scp build@%s:Desktop/Bayeslite*.dmg %s" % (HOST, SCRATCH))
  name = build_outputof("cd Desktop && ls -t Bayeslite*.dmg | tail -1").strip()
  echo("NAME:", name)
  build_run("/bin/rm -f Desktop/Bayeslite*.dmg")
  return name
Exemple #18
0
 def __enter__(self):
   shell_utils.run([GIT, 'reset', '--hard', 'HEAD'])
   shell_utils.run([GIT, 'checkout', 'master'])
   if self._branch_name in shell_utils.run([GIT, 'branch']):
     shell_utils.run([GIT, 'branch', '-D', self._branch_name])
   shell_utils.run([GIT, 'checkout', '-b', self._branch_name,
                    '-t', 'origin/master'])
   return self
 def __enter__(self):
     shell_utils.run([GIT, 'reset', '--hard', 'HEAD'])
     shell_utils.run([GIT, 'checkout', 'master'])
     if self._branch_name in shell_utils.run([GIT, 'branch']):
         shell_utils.run([GIT, 'branch', '-D', self._branch_name])
     shell_utils.run(
         [GIT, 'checkout', '-b', self._branch_name, '-t', 'origin/master'])
     return self
Exemple #20
0
  def test_git_executable(git):
    """Test the git executable.

    Args:
        git: git executable path.
    Returns:
        True if test is successful.
    """
    try:
      shell_utils.run([git, '--version'], echo=False)
      return True
    except (OSError,):
      return False
Exemple #21
0
def basic_sanity_check(venv_dir):
  test_dir = tempfile.mkdtemp('bayeslite-test')
  try:
    getoptfile = 'from bdbcontrib.population import OPTFILE; print OPTFILE;'
    optfilename = venv_outputof(venv_dir, "python -c '%s'" % (getoptfile,))
    with open(os.path.join(test_dir, optfilename), "w") as optfile:
      optfile.write("False\n")
    envs = "MPLBACKEND=pdf"
    venv_run(venv_dir,
             "cd -- %s && %s bayesdb-demo --runipy" %
             (shellquote(test_dir), envs))
  finally:
    run("rm -rf -- %s" % (shellquote(test_dir),))
Exemple #22
0
def make_venv_truly_relocatable(venv_dir):
  relocable = '''VIRTUAL_ENV=$(dirname -- "$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" && pwd )")\n'''
  new_activate = tempfile.NamedTemporaryFile(delete=False)
  old_activate_path = os.path.join(venv_dir, "bin", "activate")
  with open(old_activate_path, "r") as old_activate:
    for line in old_activate:
      if line[:len("VIRTUAL_ENV=")] == "VIRTUAL_ENV=":
        new_activate.write(relocable)
      else:
        new_activate.write(line)
  new_activate.close()
  run("mv %s %s" %
      (shellquote(new_activate.name), shellquote(old_activate_path)))
    def test_git_executable(git):
        """Test the git executable.

    Args:
        git: git executable path.
    Returns:
        True if test is successful.
    """
        try:
            shell_utils.run([git, '--version'], echo=False)
            return True
        except (OSError, ):
            return False
Exemple #24
0
def SetCPUScalingMode(serial, mode):
    """ Set the CPU scaling governor for the device with the given serial number
  to the given mode.

  serial: string indicating the serial number of the device whose scaling mode
          is to be modified
  mode:   string indicating the desired CPU scaling mode.  Acceptable values
          are listed in CPU_SCALING_MODES.
  """
    if mode not in CPU_SCALING_MODES:
        raise ValueError('mode must be one of: %s' % CPU_SCALING_MODES)
    cpu_dirs = shell_utils.run('%s -s %s shell ls /sys/devices/system/cpu' %
                               (PATH_TO_ADB, serial),
                               echo=False,
                               shell=True)
    cpu_dirs_list = cpu_dirs.split('\n')
    regex = re.compile('cpu\d')
    for cpu_dir_from_list in cpu_dirs_list:
        cpu_dir = cpu_dir_from_list.rstrip()
        if regex.match(cpu_dir):
            path = '/sys/devices/system/cpu/%s/cpufreq/scaling_governor' % cpu_dir
            path_found = shell_utils.run('%s -s %s shell ls %s' %
                                         (PATH_TO_ADB, serial, path),
                                         echo=False,
                                         shell=True).rstrip()
            if path_found == path:
                # Unfortunately, we can't directly change the scaling_governor file over
                # ADB. Instead, we write a script to do so, push it to the device, and
                # run it.
                old_mode = shell_utils.run('%s -s %s shell cat %s' %
                                           (PATH_TO_ADB, serial, path),
                                           echo=False,
                                           shell=True).rstrip()
                print 'Current scaling mode for %s is: %s' % (cpu_dir,
                                                              old_mode)
                filename = 'skia_cpuscale.sh'
                with open(filename, 'w') as script_file:
                    script_file.write('echo %s > %s\n' % (mode, path))
                os.chmod(filename, 0777)
                RunADB(serial, ['push', filename, '/system/bin'], echo=False)
                RunADB(serial, ['shell', filename], echo=True)
                RunADB(
                    serial,
                    ['shell', 'rm', '/system/bin/%s' % filename],
                    echo=False)
                os.remove(filename)
                new_mode = shell_utils.run('%s -s %s shell cat %s' %
                                           (PATH_TO_ADB, serial, path),
                                           echo=False,
                                           shell=True).rstrip()
                print 'New scaling mode for %s is: %s' % (cpu_dir, new_mode)
Exemple #25
0
def basic_sanity_check(venv_dir):
  test_dir = tempfile.mkdtemp('bayeslite-test')
  try:
    venv_run(venv_dir,
             "cd -- %s && bayesdb-demo fetch" % (shellquote(test_dir),))
    venv_run(venv_dir,
             "cd -- %s && "
             "MPLBACKEND=pdf PYTHONPATH=%s runipy %s" %
             (shellquote(test_dir),
              shellquote(os.path.join(venv_dir,
                                      "lib/python2.7/site-packages")),
              "Bayeslite-v*/satellites/Satellites.ipynb"))
  finally:
    run("rm -rf -- %s" % (shellquote(test_dir),))
Exemple #26
0
def GetGitRepoPOSIXTimestamp():
  """Returns the POSIX timestamp for the current Skia commit as in int."""
  git_show_command = [GIT, 'show', '--format=%at', '-s']
  raw_timestamp = shell_utils.run(
      git_show_command, log_in_real_time=False, echo=False,
      print_timestamps=False)
  return int(raw_timestamp)
def IsMerge(commit):
    """Return True if the commit is a merge, False otherwise."""
    rev_parse = shell_utils.run(
        [GIT, 'rev-parse', commit, '--max-count=1', '--no-merges'])
    last_non_merge = rev_parse.split('\n')[0]
    # Get full hash since that is what was returned by rev-parse.
    return FullHash(commit) != last_non_merge
Exemple #28
0
def IsMerge(commit):
  """Return True if the commit is a merge, False otherwise."""
  rev_parse = shell_utils.run([GIT, 'rev-parse', commit, '--max-count=1',
                               '--no-merges'])
  last_non_merge = rev_parse.split('\n')[0]
  # Get full hash since that is what was returned by rev-parse.
  return FullHash(commit) != last_non_merge
def GetCheckedOutHash():
  """ Determine what commit we actually got. If there are local modifications,
  raise an exception. """
  checkout_root, config_dict = _GetLocalConfig()

  # Get the checked-out commit hash for the first gclient solution.
  with misc.ChDir(os.path.join(checkout_root, config_dict[0]['name'])):
    # First, print out the remote from which we synced, just for debugging.
    cmd = [GIT, 'remote', '-v']
    try:
      shell_utils.run(cmd)
    except shell_utils.CommandFailedException as e:
      print e

    # "git rev-parse HEAD" returns the commit hash for HEAD.
    return shell_utils.run([GIT, 'rev-parse', 'HEAD'],
                           log_in_real_time=False).rstrip('\n')
def GetGitRepoPOSIXTimestamp():
    """Returns the POSIX timestamp for the current Skia commit as in int."""
    git_show_command = [GIT, 'show', '--format=%at', '-s']
    raw_timestamp = shell_utils.run(git_show_command,
                                    log_in_real_time=False,
                                    echo=False,
                                    print_timestamps=False)
    return int(raw_timestamp)
def GetCheckedOutHash():
    """ Determine what commit we actually got. If there are local modifications,
  raise an exception. """
    checkout_root, config_dict = _GetLocalConfig()

    # Get the checked-out commit hash for the first gclient solution.
    with misc.ChDir(os.path.join(checkout_root, config_dict[0]['name'])):
        # First, print out the remote from which we synced, just for debugging.
        cmd = [GIT, 'remote', '-v']
        try:
            shell_utils.run(cmd)
        except shell_utils.CommandFailedException as e:
            print e

        # "git rev-parse HEAD" returns the commit hash for HEAD.
        return shell_utils.run([GIT, 'rev-parse', 'HEAD'],
                               log_in_real_time=False).rstrip('\n')
Exemple #32
0
 def commit_and_upload(self, use_commit_queue=False):
   """Commit all changes and upload a CL, returning the issue URL."""
   try:
     shell_utils.run([GIT, 'commit', '-a', '-m', self._commit_msg])
   except shell_utils.CommandFailedException as e:
     if not 'nothing to commit' in e.output:
       raise
   upload_cmd = [GIT, 'cl', 'upload', '-f', '--bypass-hooks',
                 '--bypass-watchlists']
   self._patch_set += 1
   if self._patch_set > 1:
     upload_cmd.extend(['-t', 'Patch set %d' % self._patch_set])
   if use_commit_queue:
     upload_cmd.append('--use-commit-queue')
   shell_utils.run(upload_cmd)
   output = shell_utils.run([GIT, 'cl', 'issue']).rstrip()
   return re.match('^Issue number: (?P<issue>\d+) \((?P<issue_url>.+)\)$',
                   output).group('issue_url')
def maybe_fix_identity(username='******', email='*****@*****.**'):
  """If either of user.name or user.email is not defined, define it."""
  try:
    shell_utils.run([GIT, 'config', '--get', 'user.name'])
  except shell_utils.CommandFailedException:
    shell_utils.run([GIT, 'config', 'user.name', '"%s"' % username])

  try:
    shell_utils.run([GIT, 'config', '--get', 'user.email'])
  except shell_utils.CommandFailedException:
    shell_utils.run([GIT, 'config', 'user.email', '"%s"' % email])
Exemple #34
0
def MultiPutSCP(local_paths, remote_path, username, host, port, options=None):
  """ Send files to the given host over SCP. Assumes that public key
  authentication is set up between the client and server.

  local_paths: list of paths of files and directories to send on the client
  remote_path: destination directory path on the server
  username: ssh login name
  host: hostname or ip address of the server
  port: port on the server to use
  options: list of extra options to pass to scp
  """
  # TODO: This will hang for a while if the host does not recognize the client
  cmd = ['scp']
  if options:
    cmd.extend(options)
  cmd.extend(['-r', '-P', port])
  cmd.extend(local_paths)
  cmd.append('%s@%s:%s' % (username, host, remote_path))
  shell_utils.run(cmd)
def maybe_fix_identity(username='******',
                       email='*****@*****.**'):
    """If either of user.name or user.email is not defined, define it."""
    try:
        shell_utils.run([GIT, 'config', '--get', 'user.name'])
    except shell_utils.CommandFailedException:
        shell_utils.run([GIT, 'config', 'user.name', '"%s"' % username])

    try:
        shell_utils.run([GIT, 'config', '--get', 'user.email'])
    except shell_utils.CommandFailedException:
        shell_utils.run([GIT, 'config', 'user.email', '"%s"' % email])
 def commit_and_upload(self, use_commit_queue=False):
     """Commit all changes and upload a CL, returning the issue URL."""
     try:
         shell_utils.run([GIT, 'commit', '-a', '-m', self._commit_msg])
     except shell_utils.CommandFailedException as e:
         if not 'nothing to commit' in e.output:
             raise
     upload_cmd = [
         GIT, 'cl', 'upload', '-f', '--bypass-hooks', '--bypass-watchlists'
     ]
     self._patch_set += 1
     if self._patch_set > 1:
         upload_cmd.extend(['-t', 'Patch set %d' % self._patch_set])
     if use_commit_queue:
         upload_cmd.append('--use-commit-queue')
     shell_utils.run(upload_cmd)
     output = shell_utils.run([GIT, 'cl', 'issue']).rstrip()
     return re.match('^Issue number: (?P<issue>\d+) \((?P<issue_url>.+)\)$',
                     output).group('issue_url')
def SetCPUScalingMode(serial, mode):
  """ Set the CPU scaling governor for the device with the given serial number
  to the given mode.

  serial: string indicating the serial number of the device whose scaling mode
          is to be modified
  mode:   string indicating the desired CPU scaling mode.  Acceptable values
          are listed in CPU_SCALING_MODES.
  """
  if mode not in CPU_SCALING_MODES:
    raise ValueError('mode must be one of: %s' % CPU_SCALING_MODES)
  cpu_dirs = shell_utils.run('%s -s %s shell ls /sys/devices/system/cpu' % (
      PATH_TO_ADB, serial), echo=False, shell=True)
  cpu_dirs_list = cpu_dirs.split('\n')
  regex = re.compile('cpu\d')
  for cpu_dir_from_list in cpu_dirs_list:
    cpu_dir = cpu_dir_from_list.rstrip()
    if regex.match(cpu_dir):
      path = '/sys/devices/system/cpu/%s/cpufreq/scaling_governor' % cpu_dir
      path_found = shell_utils.run('%s -s %s shell ls %s' % (
                                       PATH_TO_ADB, serial, path),
                                   echo=False, shell=True).rstrip()
      if path_found == path:
        # Unfortunately, we can't directly change the scaling_governor file over
        # ADB. Instead, we write a script to do so, push it to the device, and
        # run it.
        old_mode = shell_utils.run('%s -s %s shell cat %s' % (
                                       PATH_TO_ADB, serial, path),
                                   echo=False, shell=True).rstrip()
        print 'Current scaling mode for %s is: %s' % (cpu_dir, old_mode)
        filename = 'skia_cpuscale.sh'
        with open(filename, 'w') as script_file:
          script_file.write('echo %s > %s\n' % (mode, path))
        os.chmod(filename, 0777)
        RunADB(serial, ['push', filename, '/system/bin'], echo=False)
        RunADB(serial, ['shell', filename], echo=True)
        RunADB(serial, ['shell', 'rm', '/system/bin/%s' % filename], echo=False)
        os.remove(filename)
        new_mode = shell_utils.run('%s -s %s shell cat %s' % (
                                       PATH_TO_ADB, serial, path),
                                   echo=False, shell=True).rstrip()
        print 'New scaling mode for %s is: %s' % (cpu_dir, new_mode)
Exemple #38
0
def MultiPutSCP(local_paths, remote_path, username, host, port, options=None):
    """ Send files to the given host over SCP. Assumes that public key
  authentication is set up between the client and server.

  local_paths: list of paths of files and directories to send on the client
  remote_path: destination directory path on the server
  username: ssh login name
  host: hostname or ip address of the server
  port: port on the server to use
  options: list of extra options to pass to scp
  """
    # TODO(borenet): This will hang for a while if the host does not recognize
    # the client.
    cmd = ['scp']
    if options:
        cmd.extend(options)
    cmd.extend(['-r', '-P', port])
    cmd.extend(local_paths)
    cmd.append('%s@%s:%s' % (username, host, remote_path))
    shell_utils.run(cmd)
Exemple #39
0
def make_launcher_script(macos_path, name):
  launcher_script = '''#!/bin/bash

wd=`dirname -- "$0"`
cd -- "$wd"
wd=`pwd -P`

osascript -e '
    on run argv
        set wd to item 1 of argv
        set cmd to "/bin/bash -- " & quoted form of wd & "/start.sh"
        tell application "Terminal" to do script cmd
    end run
' -- "$wd"
'''

  launchsh_path = os.path.join(macos_path, name)
  with open(launchsh_path, "w") as launchsh:
    launchsh.write(launcher_script)
  run("chmod +x %s" % (shellquote(launchsh_path),))
Exemple #40
0
def GetGitNumber(commit_hash):
  """Returns the GIT number for the current Skia commit as in int."""
  try:
    git_show_command = [GIT, 'number']
    git_number = shell_utils.run(
        git_show_command, log_in_real_time=False, echo=False,
        print_timestamps=False)
    return int(git_number)
  except shell_utils.CommandFailedException:
    print 'GetGitNumber: Unable to get git number, returning -1'
    return -1
def GetGitNumber(commit_hash):
    """Returns the GIT number for the current Skia commit as in int."""
    try:
        git_show_command = [GIT, 'number']
        git_number = shell_utils.run(git_show_command,
                                     log_in_real_time=False,
                                     echo=False,
                                     print_timestamps=False)
        return int(git_number)
    except shell_utils.CommandFailedException:
        print 'GetGitNumber: Unable to get git number, returning -1'
        return -1
def RunSkia(serial, cmd, release, device):
  """ Run the given command through skia_launcher on a given device.

  serial: string indicating the serial number of the target device.
  cmd: list of strings; the command line to run.
  release: bool; whether or not to run the app in Release mode.
  device: string indicating the target device.
  """
  RunADB(serial, ['logcat', '-c'])
  try:
    os.environ['SKIA_ANDROID_VERBOSE_SETUP'] = '1'
    cmd_to_run = [os.path.join('platform_tools', 'android', 'bin',
                               'android_run_skia')]
    if release:
      cmd_to_run.extend(['--release'])
    cmd_to_run.extend(['-d', device])
    cmd_to_run.extend(['-s', serial])
    cmd_to_run.extend(cmd)
    shell_utils.run(cmd_to_run)
  finally:
    RunADB(serial, ['logcat', '-d', '-v', 'time'])
Exemple #43
0
def main():
  start_time = time.time()
  check_python()

  build_dir = tempfile.mkdtemp(prefix='BayesLite-app-')
  os.chdir(build_dir)
  echo("Building in", build_dir)
  echo("PATH is", os.environ["PATH"])

  venv_dir = os.path.join(build_dir, "venv")
  run('virtualenv %s' % (shellquote(venv_dir),))

  do_pre_installs(build_dir, venv_dir)
  do_main_installs(build_dir, venv_dir)
  do_post_installs(build_dir, venv_dir)
  fix_python_and_its_path(venv_dir)
  make_venv_truly_relocatable(venv_dir)

  name="Bayeslite-%s" % (composite_version(build_dir),)
  (dist_dir, macos_dir) = wrap_as_macos_dir(build_dir, name)
  run("mv -f %s %s" % (shellquote(venv_dir), shellquote(macos_dir)))
  venv_dir = os.path.join(macos_dir, "venv")

  basic_sanity_check(venv_dir)
  pause_to_modify(macos_dir)
  make_dmg_on_desktop(dist_dir, name)
  run("/bin/rm -fr %s" % (shellquote(build_dir),))
  echo("Done. %d seconds elapsed" % (time.time() - start_time,))
Exemple #44
0
def make_venv_truly_relocatable(venv_dir):
  relocable = '''VIRTUAL_ENV=$(dirname -- "$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" && pwd )")\n'''
  new_activate = tempfile.NamedTemporaryFile(delete=False)
  old_activate_path = os.path.join(venv_dir, "bin", "activate")
  with open(old_activate_path, "r") as old_activate:
    for line in old_activate:
      if line[:len("VIRTUAL_ENV=")] == "VIRTUAL_ENV=":
        new_activate.write(relocable)
        ppath = os.path.join('$VIRTUAL_ENV', "lib",
                             "python2.7", "site-packages")
        if 'PYTHONPATH' in os.environ:
            ppath += ":" + os.environ['PYTHONPATH']
        new_activate.write('PYTHONPATH=%s\n' % (ppath,))
        new_activate.write('export PYTHONPATH\n')
      else:
        new_activate.write(line)
  new_activate.close()
  for scriptname in os.listdir(os.path.join(venv_dir, "bin")):
    if scriptname in ('python', 'activate'):
      continue
    scriptfile = os.path.join(venv_dir, "bin", scriptname)
    modified_scriptfile = tempfile.NamedTemporaryFile(delete=False)
    fully_qualified_python = os.path.join(venv_dir, "bin", "python")
    with open(scriptfile, "r") as source:
      with open(modified_scriptfile.name, "w") as destin:
        for line in source.readlines():
          modded = re.sub(fully_qualified_python,
                          "/usr/bin/env python", line)
          destin.write(modded)
    run("mv -f %s %s" % (shellquote(modified_scriptfile.name),
                         shellquote(scriptfile)))
    run("chmod +x %s" % (shellquote(scriptfile),))
  run("mv %s %s" %
      (shellquote(new_activate.name), shellquote(old_activate_path)))
Exemple #45
0
def check_app(app_location, output_path):
  print "check_app(%r, %r)" % (app_location, output_path)
  run("open %s" % (shellquote(app_location),))
  with open(OPTFILE, "w") as optfile:
    optfile.write("False\n")
  time.sleep(30)
  run("osascript -e 'tell application \"Safari\" to activate'")
  run("osascript %s/run-the-notebook.scpt" % (SCRIPTSDIR,))
  result = None
  count = None
  start_time = time.time()
  while not count:
    time.sleep(20)
    result = outputof(
      "osascript %s/grab-safari-tab-contents.scpt" % (SCRIPTSDIR,))
    count = check_result(app_location, result)
    elapsed = time.time() - start_time
    echo("%d seconds elapsed." % (elapsed,))
    assert elapsed < 1200
  echo("That took less than %d wall-clock seconds" % (elapsed,))
  with open(output_path, "w") as outfile:
    print "Writing result [%r] for [%r]." % (output_path, app_location)
    outfile.write(result)
  assert count and count > 10, "%s\n%s" % (app_location, result)
  return result
Exemple #46
0
def SSHAdd(key_file):
    """ Call ssh-add, and call ssh-agent if necessary.
  """
    assert os.path.isfile(key_file)
    try:
        shell_utils.run(['ssh-add', key_file], log_in_real_time=False)
        return
    except shell_utils.CommandFailedException:
        ssh_agent_output = shell_utils.run(['ssh-agent', '-s'],
                                           log_in_real_time=False)
        if not ssh_agent_output:
            raise Exception('ssh-agent did not print anything')
        ssh_auth_sock = search_within_string(
            ssh_agent_output, r'SSH_AUTH_SOCK=(?P<return>[^;]*);')
        ssh_agent_pid = search_within_string(
            ssh_agent_output, r'SSH_AGENT_PID=(?P<return>[^;]*);')
        if not (ssh_auth_sock and ssh_agent_pid):
            raise Exception('ssh-agent did not print meaningful data')
        os.environ['SSH_AUTH_SOCK'] = ssh_auth_sock
        os.environ['SSH_AGENT_PID'] = ssh_agent_pid
        atexit.register(os.kill, int(ssh_agent_pid), signal.SIGTERM)
        shell_utils.run(['ssh-add', key_file])
Exemple #47
0
def RunSkia(serial, cmd, release, device):
    """ Run the given command through skia_launcher on a given device.

  serial: string indicating the serial number of the target device.
  cmd: list of strings; the command line to run.
  release: bool; whether or not to run the app in Release mode.
  device: string indicating the target device.
  """
    RunADB(serial, ['logcat', '-c'])
    try:
        os.environ['SKIA_ANDROID_VERBOSE_SETUP'] = '1'
        cmd_to_run = [
            os.path.join('platform_tools', 'android', 'bin',
                         'android_run_skia')
        ]
        if release:
            cmd_to_run.extend(['--release'])
        cmd_to_run.extend(['-d', device])
        cmd_to_run.extend(['-s', serial])
        cmd_to_run.extend(cmd)
        shell_utils.run(cmd_to_run)
    finally:
        RunADB(serial, ['logcat', '-d', '-v', 'time'])
Exemple #48
0
def RunSSHCmd(username, host, port, command, echo=True, options=None):
    """ Login to the given host and run the given command.

  username: ssh login name
  host: hostname or ip address of the server
  port: port on the server to use
  command: (string) command to run on the server
  options: list of extra options to pass to ssh
  """
    # TODO(borenet): This will hang for a while if the host does not recognize
    # the client.
    cmd = ['ssh']
    if options:
        cmd.extend(options)
    cmd.extend(['-p', port, '%s@%s' % (username, host), command])
    return shell_utils.run(cmd, echo=echo)
Exemple #49
0
def ADBShell(serial, cmd, echo=True):
    """ Runs 'cmd' in the ADB shell on an Android device and returns the exit
  code.

  serial: string indicating the serial number of the target device
  cmd: string; the command to issue on the device
  """
    # ADB doesn't exit with the exit code of the command we ran. It only exits
    # non-zero when ADB itself encountered a problem. Therefore, we have to use
    # the shell to print the exit code for the command and parse that from stdout.
    adb_cmd = '%s -s %s shell "%s; echo \$?"' % (PATH_TO_ADB, serial,
                                                 ' '.join(cmd))
    output = shell_utils.run(adb_cmd, shell=True, echo=echo)
    output_lines = output.splitlines()
    try:
        real_exitcode = int(output_lines[-1].rstrip())
    except ValueError:
        real_exitcode = -1
    if real_exitcode != 0:
        raise Exception('Command failed with code %s' % real_exitcode)
    return '\n'.join(output_lines[:-1])
Exemple #50
0
def GetRemoteMasterHash(git_url):
  return shell_utils.run(['git', 'ls-remote', git_url, '--verify',
                          'refs/heads/master']).rstrip()
def Revert():
    shell_utils.run([GIT, 'clean', '-f', '-d'])
    shell_utils.run([GIT, 'reset', '--hard', 'HEAD'])
def FullHash(commit):
    """Return full hash of specified commit."""
    return shell_utils.run([GIT, 'rev-parse', '--verify', commit]).rstrip()
def MergeAbort():
    """Abort in process merge."""
    shell_utils.run([GIT, 'merge', '--abort'])
def ShortHash(commit):
    """Return short hash of the specified commit."""
    return shell_utils.run([GIT, 'show', commit, '--format=%h', '-s']).rstrip()
def Fetch(remote=None):
    """Run "git fetch". """
    cmd = [GIT, 'fetch']
    if remote:
        cmd.append(remote)
    shell_utils.run(cmd)
def GetCurrentBranch():
    return shell_utils.run([GIT, 'rev-parse', '--abbrev-ref', 'HEAD']).rstrip()
def Sync(revisions=None,
         force=False,
         delete_unversioned_trees=False,
         verbose=False,
         jobs=None,
         no_hooks=False,
         extra_args=None):
    """ Update the local checkout using gclient.

  Args:
      revisions: optional list of (branch, revision) tuples indicating which
          projects to sync to which revisions.
      force: whether to run with --force.
      delete_unversioned_trees: whether to run with --delete-unversioned-trees.
      verbose: whether to run with --verbose.
      jobs: optional argument for the --jobs flag.
      no_hooks: whether to run with --nohooks.
      extra_args: optional list; any additional arguments.
  """
    for branch, _ in (revisions or []):
        # Do whatever it takes to get up-to-date with origin/master.
        if os.path.exists(branch):
            with misc.ChDir(branch):
                # First, fix the git identity if needed.
                maybe_fix_identity()

                # If there are local changes, "git checkout" will fail.
                shell_utils.run([GIT, 'reset', '--hard', 'HEAD'])
                # In case HEAD is detached...
                shell_utils.run([GIT, 'checkout', 'master'])
                # Always fetch, in case we're unmanaged.
                shell_utils.run_retry([GIT, 'fetch'], attempts=5)
                # This updates us to origin/master even if master has diverged.
                shell_utils.run([GIT, 'reset', '--hard', 'origin/master'])

    cmd = ['sync', '--no-nag-max']
    if verbose:
        cmd.append('--verbose')
    if force:
        cmd.append('--force')
    if delete_unversioned_trees:
        cmd.append('--delete_unversioned_trees')
    if jobs:
        cmd.append('-j%d' % jobs)
    if no_hooks:
        cmd.append('--nohooks')
    for branch, revision in (revisions or []):
        if revision:
            cmd.extend(['--revision', '%s@%s' % (branch, revision)])
    if extra_args:
        cmd.extend(extra_args)
    output = _RunCmd(cmd)

    # "gclient sync" just downloads all of the commits. In order to actually sync
    # to the desired commit, we have to "git reset" to that commit.
    for branch, revision in (revisions or []):
        with misc.ChDir(branch):
            if revision:
                shell_utils.run([GIT, 'reset', '--hard', revision])
            else:
                shell_utils.run([GIT, 'reset', '--hard', 'origin/master'])
    return output
def AIsAncestorOfB(a, b):
    """Return true if a is an ancestor of b."""
    return shell_utils.run([GIT, 'merge-base', a, b]).rstrip() == FullHash(a)
Exemple #59
0
def Sync(skia_revision=SKIA_REV_MASTER, chrome_revision=CHROME_REV_LKGR,
         fetch_target=DEFAULT_FETCH_TARGET,
         gyp_defines=None, gyp_generators=None):
  """ Create and sync a checkout of Skia inside a checkout of Chrome. Returns
  a tuple containing the actually-obtained revision of Skia and the actually-
  obtained revision of Chrome.

  skia_revision: revision of Skia to sync. Should be a commit hash or one of
      (SKIA_REV_DEPS, SKIA_REV_MASTER).
  chrome_revision: revision of Chrome to sync. Should be a commit hash or one
      of (CHROME_REV_LKGR, CHROME_REV_MASTER).
  fetch_target: string; Calls the fetch tool in depot_tools with the specified
      argument. Default is DEFAULT_FETCH_TARGET.
  gyp_defines: optional string; GYP_DEFINES to be passed to Gyp.
  gyp_generators: optional string; which GYP_GENERATORS to use.
  """
  # Figure out what revision of Skia we should use.
  if skia_revision == SKIA_REV_MASTER:
    output = GetRemoteMasterHash(SKIA_GIT_URL)
    if output:
      skia_revision = shlex.split(output)[0]
    if not skia_revision:
      raise Exception('Could not determine current Skia revision!')
  skia_revision = str(skia_revision)

  # Use Chrome LKGR, since gclient_utils will force a sync to origin/master.
  if chrome_revision == CHROME_REV_LKGR:
    chrome_revision = urllib2.urlopen(CHROME_LKGR_URL).read()
  elif chrome_revision == CHROME_REV_MASTER:
    chrome_revision = shlex.split(
        GetRemoteMasterHash(CHROME_GIT_URL))[0]

  # Run "fetch chromium". The initial run is allowed to fail after it does some
  # work. At the least, we expect the .gclient file to be present when it
  # finishes.
  if not os.path.isfile(GCLIENT_FILE):
    try:
      shell_utils.run([FETCH, fetch_target, '--nosvn=True'])
    except shell_utils.CommandFailedException:
      pass
  if not os.path.isfile(GCLIENT_FILE):
    raise Exception('Could not fetch %s!' % fetch_target)

  # Run "gclient sync"
  revisions = [('src', chrome_revision)]
  if skia_revision != SKIA_REV_DEPS:
    revisions.append(('src/third_party/skia', skia_revision))

  try:
    # Hack: We have to set some GYP_DEFINES, or upstream scripts will complain.
    os.environ['GYP_DEFINES'] = os.environ.get('GYP_DEFINES') or ''
    gclient_utils.Sync(
        revisions=revisions,
        jobs=1,
        no_hooks=True,
        force=True)
  except shell_utils.CommandFailedException as e:
    # We frequently see sync failures because a lock file wasn't deleted. In
    # that case, delete the lock file and try again.
    pattern = r".*fatal: Unable to create '(\S+)': File exists\..*"
    match = re.search(pattern, e.output)
    if not match:
      raise e
    file_to_delete = match.groups()[0]
    try:
      print 'Attempting to remove %s' % file_to_delete
      os.remove(file_to_delete)
    except OSError:
      # If the file no longer exists, just try again.
      pass
    gclient_utils.Sync(
        revisions=revisions,
        jobs=1,
        no_hooks=True,
        force=True)

  # Find the actually-obtained Chrome revision.
  os.chdir('src')
  actual_chrome_rev = shell_utils.run([GIT, 'rev-parse', 'HEAD'],
                                      log_in_real_time=False).rstrip()


  # Find the actually-obtained Skia revision.
  with misc.ChDir(os.path.join('third_party', 'skia')):
    actual_skia_rev = shell_utils.run([GIT, 'rev-parse', 'HEAD'],
                                      log_in_real_time=False).rstrip()

  # Run gclient hooks
  gclient_utils.RunHooks(gyp_defines=gyp_defines, gyp_generators=gyp_generators)

  # Fix the submodules so that they don't show up in "git status"
  # This fails on Windows...
  if os.name != 'nt':
    submodule_cmd = ('\'git config -f '
                     '$toplevel/.git/config submodule.$name.ignore all\'')
    shell_utils.run(' '.join([GIT, 'submodule', 'foreach', submodule_cmd]),
                    shell=True)

  # Verify that we got the requested revisions of Chrome and Skia.
  if (skia_revision != actual_skia_rev[:len(skia_revision)] and
      skia_revision != SKIA_REV_DEPS):
    raise Exception('Requested Skia revision %s but got %s!' % (
        skia_revision, actual_skia_rev))
  if (chrome_revision and
      chrome_revision != actual_chrome_rev[:len(chrome_revision)]):
    raise Exception('Requested Chrome revision %s but got %s!' % (
        chrome_revision, actual_chrome_rev))

  return (actual_skia_rev, actual_chrome_rev)
def Add(addition):
    """Run 'git add <addition>'"""
    shell_utils.run([GIT, 'add', addition])