def download_package(destination, product, version, compiler):
  remove_existing_package(destination, product, version)

  label = get_release_label()
  file_name = "{0}-{1}-{2}-{3}.tar.gz".format(product, version, compiler, label)
  url_path="/{0}/{1}-{2}/{0}-{1}-{2}-{3}.tar.gz".format(product, version, compiler, label)
  download_path = HOST + url_path

  print "URL {0}".format(download_path)
  print "Downloading {0} to {1}".format(file_name, destination)
  # --no-clobber avoids downloading the file if a file with the name already exists
  sh.wget(download_path, directory_prefix=destination, no_clobber=True)
  print "Extracting {0}".format(file_name)
  sh.tar(z=True, x=True, f=os.path.join(destination, file_name), directory=destination)
  sh.rm(os.path.join(destination, file_name))

  if product == "kudu":
    # The Kudu tarball is actually a renamed parcel. Rename the contents to match the
    # naming convention.
    kudu_dirs = glob.glob("{0}/KUDU*{1}*".format(destination, version))
    if not kudu_dirs:
      raise Exception("Could not find contents of Kudu tarball")
    if len(kudu_dirs) > 1:
      raise Exception("Found too many Kudu folders: %s" % (kudu_dirs, ))
    new_dir = "{0}/{1}-{2}".format(destination, product, version)
    if os.path.exists(new_dir):
      shutil.rmtree(new_dir)
    os.rename(kudu_dirs[0], new_dir)

  write_version_file(destination, product, version, compiler, label)
Beispiel #2
0
    def setup(self):
        self.work_dir = tempfile.mkdtemp("carpyncho_loader_test")
        self.teardown()

        if not os.path.exists(self.EXAMPLE_DATA_PATH):
            os.makedirs(self.EXAMPLE_DATA_PATH)
            sh.tar(j=True,
                   x=True,
                   f=self.TAR_DATA_PATH,
                   directory=self.EXAMPLE_DATA_PATH)

        shutil.copytree(self.EXAMPLE_DATA_PATH, self.work_dir)
        self.input_path = os.path.join(self.work_dir, "example_data")
        self.data_path = os.path.join(self.work_dir, "stored")
        self.test_cache = os.path.join(self.work_dir, "test_cache")

        dirs = {
            "INPUT_PATH": self.input_path,
            "DATA_PATH": self.data_path,
            "RAW_TILES_DIR": os.path.join(self.data_path, "raw_tiles"),
            "NPY_TILES_DIR": os.path.join(self.data_path, "npy_tiles"),
            "RAW_PAWPRINTS_DIR": os.path.join(self.data_path, "raw_pawprints"),
            "NPY_PAWPRINTS_DIR": os.path.join(self.data_path, "npy_pawprints"),
            "MATCHS_DIR": os.path.join(self.data_path, "matchs"),
            "LC_DIR": os.path.join(self.data_path, "light_curves"),
            "SAMPLES_DIR": os.path.join(self.data_path, "samples")
        }

        for k, d in dirs.items():
            if not os.path.exists(d):
                os.makedirs(d)
            self.patch("corral.conf.settings.{}".format(k), d)

        self.runned = self.run_another_tests(self.run_before)
Beispiel #3
0
def download_package(destination, product, version, compiler):
  remove_existing_package(destination, product, version)

  label = get_release_label()
  file_name = "{0}-{1}-{2}-{3}.tar.gz".format(product, version, compiler, label)
  url_path="/{0}/{1}-{2}/{0}-{1}-{2}-{3}.tar.gz".format(product, version, compiler, label)
  download_path = HOST + url_path

  print "URL {0}".format(download_path)
  print "Downloading {0} to {1}".format(file_name, destination)
  # --no-clobber avoids downloading the file if a file with the name already exists
  sh.wget(download_path, directory_prefix=destination, no_clobber=True)
  print "Extracting {0}".format(file_name)
  sh.tar(z=True, x=True, f=os.path.join(destination, file_name), directory=destination)
  sh.rm(os.path.join(destination, file_name))

  if product == "kudu":
    # The Kudu tarball is actually a renamed parcel. Rename the contents to match the
    # naming convention.
    kudu_dirs = glob.glob("{0}/KUDU*{1}*".format(destination, version))
    if not kudu_dirs:
      raise Exception("Could not find contents of Kudu tarball")
    if len(kudu_dirs) > 1:
      raise Exception("Found too many Kudu folders: %s" % (kudu_dirs, ))
    new_dir = "{0}/{1}-{2}".format(destination, product, version)
    if os.path.exists(new_dir):
      shutil.rmtree(new_dir)
    os.rename(kudu_dirs[0], new_dir)
    print "renamed shit"

  write_version_file(destination, product, version, compiler, label)
Beispiel #4
0
    def fetch(self, path):
        mkdir('-p', self._own_workdir)

        # download sources
        archive_filename = str(Path(self._workdir).joinpath('kernel.tar.gz'))
        if not Path(archive_filename).is_file():
            wget('-O{}'.format(archive_filename),
                 path,
                 _out=sys.stdout,
                 _err=sys.stderr)

        # extract sources
        extracted_dir = str(Path(self._own_workdir).joinpath('_extracted'))
        mkdir('-p', extracted_dir)
        tar('-xf',
            archive_filename,
            '-C',
            extracted_dir,
            _out=sys.stdout,
            _err=sys.stderr)

        kernel_src_dir = [
            d for d in Path(extracted_dir).iterdir() if d.is_dir()
        ]
        if (len(kernel_src_dir) != 1):
            raise RuntimeError('Linux Kernel sources failed to extract')

        self._dir = kernel_src_dir[0]
Beispiel #5
0
def file_backup_restore(process, action):
    if action == 'backup':
        with sh.pushd('/opt/anaconda'):
            sh.tar(
                "--exclude=storage/pgdata",
                "--exclude=storage/object/anaconda-repository",
                "-czvf",
                process.storage_backup_name,
                "storage"
            )

        sh.mv(
            f'/opt/anaconda/{process.storage_backup_name}',
            f'{process.backup_directory}/'
        )
    elif action == 'restore':
        sh.cp(
            f'{process.backup_directory}/{process.storage_backup_name}',
            '/opt/anaconda'
        )
        with sh.pushd('/opt/anaconda'):
            sh.tar(
                '-xzvf',
                f'/opt/anaconda/{process.storage_backup_name}'
            )
            sh.rm(f'{process.storage_backup_name}')
Beispiel #6
0
def initialize():
    # noinspection PyUnresolvedReferences
    from sh import wget, tar, rm, shasum
    if not os.path.exists(prefix):
        os.makedirs(prefix)

    if (not os.path.exists(dirs['inputs'])) or (not os.path.exists(
            dirs['intermediates'])):
        try:
            if not os.path.exists(prefix):
                logger.info("Creating {DIR}".format(DIR=prefix))
                os.makedirs(prefix)
            logger.info("Downloading data from {URL} to {DIR}".format(
                URL=data_url, DIR=prefix))
            tar(wget(data_url, "-qO-", _piped=True), "xz", _cwd=prefix)
            logger.info("Checking checksums of downloaded files")
            for line in shasum("-c", _cwd=prefix, _in=checksums, _iter=True):
                logger.info(line)
        except Exception as e:
            logger.info("Error: {}".format(e.message))
            logger.info("Deleting {DIR}".format(DIR=dirs['inputs']))
            rm(dirs['inputs'], '-rf')
            logger.info("Deleting {DIR}".format(DIR=dirs['intermediates']))
            rm(dirs['intermediates'], '-rf')
            raise

    # make sure all those directories exist
    for d in (dirs['outputs'], dirs['plots']):
        if not os.path.exists(d):
            logger.info("Creating {DIR}".format(DIR=d))
            os.makedirs(d)
Beispiel #7
0
def cleanup_and_restore_files(process):
    timestamp = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H%M%S')
    # Compress and timestamp the existing files minus the repos
    with sh.pushd('/opt/anaconda'):
        sh.tar(
            "--exclude=storage/object/anaconda-repository",
            "-czvf",
            f"git_pgdata.snapshot_{timestamp}.tar.gz",
            "storage"
        )

    # Cleanup directories as things will get restored
    sh.rm('-Rf', '/opt/anaconda/storage/git')
    sh.rm('-Rf', '/opt/anaconda/storage/pgdata')
    sh.rm('-Rf', '/opt/anaconda/storage/object/anaconda-objects')
    sh.rm('-Rf', '/opt/anaconda/storage/object/anaconda-projects')

    # Restore the files
    file_backup_restore(process, 'restore')

    # Recreate the postgres directory and set permissions
    sh.mkdir(process.postgres_system_backup)
    sh.chown('999:root', f'{process.postgres_system_backup}')
    sh.chmod('700', f'{process.postgres_system_backup}')

    return
Beispiel #8
0
def packaging_lib(libmace_output_dir, project_name):
    print("* Package libs for %s" % project_name)
    tar_package_name = "libmace_%s.tar.gz" % project_name
    project_dir = "%s/%s" % (libmace_output_dir, project_name)
    tar_package_path = "%s/%s" % (project_dir, tar_package_name)
    if os.path.exists(tar_package_path):
        sh.rm("-rf", tar_package_path)

    print("Start packaging '%s' libs into %s" % (project_name,
                                                 tar_package_path))
    which_sys = platform.system()
    if which_sys == "Linux":
        sh.tar(
            "cvzf",
            "%s" % tar_package_path,
            glob.glob("%s/*" % project_dir),
            "--exclude",
            "%s/_tmp" % project_dir,
            _fg=True)
    elif which_sys == "Darwin":
        sh.tar(
            "--exclude",
            "%s/_tmp" % project_dir,
            "-cvzf",
            "%s" % tar_package_path,
            glob.glob("%s/*" % project_dir),
            _fg=True)
    print("Packaging Done!\n")
    return tar_package_path
Beispiel #9
0
def main():
    tar_files = sys.argv[1]
    sample_ratio = float(sys.argv[2])

    np.random.seed(1234)

    tar_files = glob.glob(tar_files)
    print(tar_files)

    work_dir = os.path.dirname(tar_files[0])
    os.system(f'mkdir -p {work_dir}/binary_files/')

    for tar_file in tar_files:
        print(f'extracting {tar_file}')
        tar('-C', f'{work_dir}/binary_files/', '-xvf', tar_file)

    all_files = glob.glob(f'{work_dir}/binary_files/*.jsonl')
    all_files.sort()

    print(f'{len(all_files)} in total')
    sampled_files = np.random.choice(all_files,
                                     replace=False,
                                     size=int(sample_ratio * len(all_files)))
    print(f'{len(sampled_files)} sampled files')

    os.chdir(work_dir)
    with open(f'sampled_binaries.txt', 'w') as f:
        for fname in sampled_files:
            fname = os.path.basename(fname)
            f.write(fname + '\n')

    print('creating tar file')
    os.chdir('binary_files/')
    tar('-cf', f'../sampled_binaries_{sample_ratio}.tar', '-T',
        '../sampled_binaries.txt')
def extract_reviews(input_file_name, output_file_name):
    # extracts to folder "sorted_data"
    sh.tar("xvf", input_file_name)

    reviews = []
    raw_dir = "sorted_data"
    categories = [
        name for name in os.listdir(raw_dir)
        if os.path.isdir(os.path.join(raw_dir, name))
    ]
    for category in categories:
        positive_file_name = os.path.join(raw_dir, category, "positive.review")
        negative_file_name = os.path.join(raw_dir, category, "negative.review")

        positive_reviews = get_reviews(positive_file_name,
                                       label=':)',
                                       categroy=category)
        negative_reviews = get_reviews(negative_file_name,
                                       label=':(',
                                       categroy=category)

        reviews.extend(positive_reviews)
        reviews.extend(negative_reviews)

    # This folder is really big, and we still have the compressed version of this anyway so there's
    # no need to keep it around.
    sh.rm("-rf", raw_dir)

    with open(output_file_name, 'wb') as output_file:
        json.dump(reviews, output_file)
        output_file.write(u"\n")
Beispiel #11
0
def sh_asterisk():
    '''
    如果遇到要传递参数为 * 的命令,应该怎么做
    https://stackoverflow.com/questions/32923189/how-to-pass-an-asterisk-to-module-sh-in-python
    '''
    # 等价 linux 命令 tar czf 1.tar.gz *
    sh.tar("czf", "1.tar.gz", sh.glob("*"))
def copy_proguard_mapping(flavor, version_name):
    folder_path = 'releases'
    sh.mkdir("-p", folder_path)
    output_file = '%s/wikipedia-%s.mapping.tar.gz' % (folder_path, version_name)
    input_file = 'wikipedia/build/outputs/mapping/%s/release/mapping.txt' % flavor
    sh.tar('czf', output_file, input_file)
    print ' proguard mapping: %s' % output_file
Beispiel #13
0
def initialize():
    # noinspection PyUnresolvedReferences
    from sh import wget, tar, rm, shasum
    if not os.path.exists(prefix):
        os.makedirs(prefix)

    if (not os.path.exists(dirs['inputs'])) or (not os.path.exists(dirs['intermediates'])):
        try:
            if not os.path.exists(prefix):
                logger.info("Creating {DIR}".format(DIR=prefix))
                os.makedirs(prefix)
            logger.info("Downloading data from {URL} to {DIR}".format(URL=data_url, DIR=prefix))
            tar(wget(data_url, "-qO-", _piped=True), "xz", _cwd=prefix)
            logger.info("Checking checksums of downloaded files")
            for line in shasum("-c", _cwd=prefix, _in=checksums, _iter=True):
                logger.info(line)
        except Exception as e:
            logger.info("Error: {}".format(e.message))
            logger.info("Deleting {DIR}".format(DIR=dirs['inputs']))
            rm(dirs['inputs'], '-rf')
            logger.info("Deleting {DIR}".format(DIR=dirs['intermediates']))
            rm(dirs['intermediates'], '-rf')
            raise

    # make sure all those directories exist
    for d in (dirs['outputs'], dirs['plots']):
        if not os.path.exists(d):
            logger.info("Creating {DIR}".format(DIR=d))
            os.makedirs(d)
Beispiel #14
0
def build_stress(stress_revision, name=None):
    # Build a stress revision

    try:
        git_id = sh.git('--git-dir={home}/fab/cassandra.git'.format(home=HOME),
                        'rev-parse', stress_revision).strip()
    except sh.ErrorReturnCode:
        raise AssertionError(
            'Invalid stress_revision: {}'.format(stress_revision))

    path = os.path.join(CASSANDRA_STRESS_PATH, git_id)
    if not os.path.exists(path):
        logger.info("Building cassandra-stress '{}' in '{}'.".format(
            stress_revision, path))
        os.makedirs(path)
        sh.tar(
            sh.git("--git-dir={home}/fab/cassandra.git".format(home=HOME),
                   "archive", git_id), 'x', '-C', path)
        antcmd('-Dbasedir={}'.format(path),
               '-f',
               '{}/build.xml'.format(path),
               'realclean',
               'jar',
               _env={
                   "JAVA_TOOL_OPTIONS": "-Dfile.encoding=UTF8",
                   "JAVA_HOME": JAVA_HOME
               })

    name = name if name else stress_revision
    return {name: git_id}
Beispiel #15
0
    def add_files(self):
        files = ["pytorch_model.bin", "tf_model.h5", "config.json"]
        files.extend(self.ORIGINAL_FILES)

        files += ["README.md", "model_card"]
        with cd(self.git_path):
            sh.git("add", *files, _fg=True)

        if "nbest_predictions.json" in self.TASK_INFO[self.task]["eval_files"]:
            with cd(self.git_path / "eval"):
                sh.tar("-cvzf", "nbest_predictions.json.tgz",
                       "nbest_predictions.json")
                sh.rm("nbest_predictions.json")

        to_copy = self.get_copy_list()
        for files, dest in to_copy:
            with cd(dest):
                for f in files:
                    if f == "nbest_predictions.json":
                        f += ".tgz"
                    sh.git("add", f, _fg=True)

        assert ("checkpoint_dir" not in self.checkpoint_info)
        self.checkpoint_info["checkpoint_path"] = self.checkpoint_path

        with (self.git_path / "model_info.json").open("w") as f:
            f.write(pretty_json(self.checkpoint_info))

        with cd(self.git_path):
            sh.git("add", "model_info.json")
Beispiel #16
0
    def fetch(self, output):
        dl_path = self.get_latest_tarball()
        raw_path, _ = urllib.urlretrieve(dl_path)

        # The downloaded file is a gzip'd tarball.
        extract_path = self._make_temp_directory("rootfetch-apple-extracted")
        sh.tar("-xzv", "-f", raw_path, "-C", extract_path, strip_components=1)

        # We now have a directory with all the apple files. We need to find the
        # roots directory, parse out all the different formats, then generate a
        # single file that has PEMs in it.
        certificates_path = os.path.join(extract_path, "certificates", "roots")
        for f in os.listdir(certificates_path):
            full_path = os.path.join(certificates_path, f)
            if not os.path.isfile(full_path):
                continue
            # Skip hidden files, such as .cvsignore
            if f.startswith('.'):
                continue
            pem = self.make_pem(full_path)
            output.write("# ")
            output.write(f)
            output.write("\n")
            output.write("\n".join(pem))
            output.write("\n\n")
def zip_it(args, sourcedir, destinationdir):
    tail = sourcedir.split('/')[-1:][0]
    dest_dir = destinationdir + tail
    dest_file = dest_dir + ".tgz"
    if args.zipname:
        dest_file = args.zipname + ".tgz"

    #print('tail = ' + tail + ' dest_file=' + dest_file)
    if args.zip or args.zipdel:
        sh.tar("-cvzf", dest_file, "-C", destinationdir, tail)
        logging.info("Created " + dest_file)

    if args.zipdel:
        sh.rm("-rf", dest_dir)

    if args.useremail:
        print("Encrypting " + dest_file + "...")
        gpg_file = dest_file + ".gpg"
        gpg_home = os.getenv("HOME") + "/.gpghome"
        gpg = gnupg.GPG(gnupghome=gpg_home)

        with open(dest_file, 'rb') as f:
            status = gpg.encrypt_file(file=f,
                                      recipients=[args.useremail],
                                      output=gpg_file)
        print('encryption ok: ', status.ok)
        #print('encryption status: ', status.status)
        #print('encryption stderr: ', status.stderr)
        print('~' * 50)
Beispiel #18
0
    def _extract_keyring(self):
        ''' extract the parabola keyring '''
        cache = next(iter(self._repos['libre'].pkgentries_cache.values()))
        keyring_pkgentries = cache['parabola-keyring']
        keyring_pkgentry = keyring_pkgentries[0]
        keyring_pkgfile = next(iter(keyring_pkgentry.pkgfiles.values()))[0]

        src = keyring_pkgfile.path
        dst = self._keyring_dir

        if not os.path.isdir(
                dst) or os.path.getmtime(dst) <= os.path.getmtime(src):
            os.makedirs(dst, exist_ok=True)
            shutil.rmtree(dst)
            os.makedirs(dst, exist_ok=True)
            sh.tar('xf', src, _cwd=dst)

        keyring_file = os.path.join(dst, 'usr', 'share', 'pacman', 'keyrings',
                                    'parabola.gpg')
        self._keyring = GPG_PACMAN.scan_keys(keyring_file)
        for key in self._keyring:
            key['packages'] = []
            self._key_cache[key['keyid']] = key
            for key_id, subkey in key['subkey_info'].items():
                if 's' in subkey['cap']:
                    subkey['packages'] = []
                    self._key_cache[key_id] = subkey
                    self._key_cache[key_id]['master_key'] = key['keyid']
Beispiel #19
0
    def archive(self, ref, destination, *archive_args):
        """
        Archive the specified GIT ref to the specified destination

        Any extra args are passed to the sh command directly so you can
        add extra flags for `git archive` should you desire.
        """
        tmp_dir = tempfile.mkdtemp()
        tar_file = None

        try:
            # create our git interface via the sh module
            # see:
            # github.com/stackstrap/stackstrap/blob/master/scripts/loose_ssh.sh
            git = sh.git.bake(_cwd=tmp_dir, _env={'GIT_SSH': "loose_ssh.sh"})

            self.log.debug("Cloning %s to %s" % (self.url, tmp_dir))
            git('clone', '--recursive', self.url, tmp_dir)

            self.log.debug("Archiving '{ref}' to {destination}".format(
                ref=ref, destination=destination))

            settings.mkdir_p(destination)

            (fd, tar_file) = tempfile.mkstemp()
            git.archive('origin/%s' % ref, *archive_args, _out=tar_file)
            sh.tar("xpf", tar_file, _cwd=destination)
        finally:
            if tar_file:
                os.remove(tar_file)

            if os.path.isdir(tmp_dir):
                shutil.rmtree(tmp_dir)
Beispiel #20
0
def packaging_lib(libmace_output_dir, project_name):
    print("* Package libs for %s" % project_name)
    tar_package_name = "libmace_%s.tar.gz" % project_name
    project_dir = "%s/%s" % (libmace_output_dir, project_name)
    tar_package_path = "%s/%s" % (project_dir, tar_package_name)
    if os.path.exists(tar_package_path):
        sh.rm("-rf", tar_package_path)

    print("Start packaging '%s' libs into %s" % (project_name,
                                                 tar_package_path))
    which_sys = platform.system()
    if which_sys == "Linux":
        sh.tar(
            "cvzf",
            "%s" % tar_package_path,
            glob.glob("%s/*" % project_dir),
            "--exclude",
            "%s/_tmp" % project_dir,
            _fg=True)
    elif which_sys == "Darwin":
        sh.tar(
            "--exclude",
            "%s/_tmp" % project_dir,
            "-cvzf",
            "%s" % tar_package_path,
            glob.glob("%s/*" % project_dir),
            _fg=True)
    print("Packaging Done!\n")
    return tar_package_path
Beispiel #21
0
    def _load_pkgentries(self):
        ''' extract and then load the entries in the db.tar.xz '''
        arches_dir = os.path.join(self._pkgfiles_dir, 'os')
        for arch in os.scandir(arches_dir):
            if arch.name not in CONFIG.parabola.arches:
                continue

            repo_file = os.path.join(arch.path, '%s.db' % self._name)
            if os.path.exists(repo_file):
                mtime = os.path.getmtime(repo_file)

                dst = os.path.join(self._pkgentries_dir, 'os', arch.name)
                if os.path.isdir(dst) and os.path.getmtime(dst) > mtime:
                    continue

                os.makedirs(dst, exist_ok=True)
                shutil.rmtree(dst)
                os.makedirs(dst, exist_ok=True)

                sh.tar('xf', repo_file, _cwd=dst)

        i = 0
        arches_dir = os.path.join(self._pkgentries_dir, 'os')
        for arch in os.scandir(arches_dir):
            if arch.name not in CONFIG.parabola.arches:
                continue

            for pkgentry_dir in os.scandir(arch.path):
                pkgentry = PkgEntry(self, pkgentry_dir.path, arch.name)

                i += 1
                if sys.stdout.isatty():
                    sys.stdout.write(' %s pkgentries: %i\r' % (self._name, i))
                    sys.stdout.flush()

                self._pkgentries.append(pkgentry)
                if pkgentry.arch not in self._pkgentries_cache:
                    self._pkgentries_cache[pkgentry.arch] = {}
                if pkgentry.pkgname not in self._pkgentries_cache[
                        pkgentry.arch]:
                    self._pkgentries_cache[pkgentry.arch][
                        pkgentry.pkgname] = []
                self._pkgentries_cache[pkgentry.arch][pkgentry.pkgname].append(
                    pkgentry)

                for provides in pkgentry.provides.union([pkgentry.pkgname]):
                    if pkgentry.arch not in self._provides_cache:
                        self._provides_cache[pkgentry.arch] = {}

                    splits = ['==', '>=', '<=', '>', '<', '=']
                    for split in splits:
                        if split in provides:
                            provides = provides.split(split)[0]
                            break

                    if provides not in self._provides_cache[pkgentry.arch]:
                        self._provides_cache[pkgentry.arch][provides] = []
                    self._provides_cache[pkgentry.arch][provides].append(
                        pkgentry)
def wget_and_unpack_package(download_path, file_name, destination, wget_no_clobber):
  print "URL {0}".format(download_path)
  print "Downloading {0} to {1}".format(file_name, destination)
  # --no-clobber avoids downloading the file if a file with the name already exists
  sh.wget(download_path, directory_prefix=destination, no_clobber=wget_no_clobber)
  print "Extracting {0}".format(file_name)
  sh.tar(z=True, x=True, f=os.path.join(destination, file_name), directory=destination)
  sh.rm(os.path.join(destination, file_name))
Beispiel #23
0
def _archive(includes):
    tar_args = [git.archive('--format=tar','HEAD'),'czf','archive.tar.gz']
    for pttrn in includes:
        tar_args.append('--include')
        tar_args.append(pttrn)
    import debug
    tar_args.append('@-')
    tar(*tar_args)
Beispiel #24
0
def unzip_single_event(event_directory):
    """
    unzip the sac files in a directory
    """
    current_path = str(sh.pwd())[:-1]  # pylint: disable=not-callable
    sh.cd(event_directory)
    sh.tar("xvf", "SAC.tar.gz")
    sh.cd(current_path)
Beispiel #25
0
def wget_and_unpack_package(download_path, file_name, destination, wget_no_clobber):
  print "URL {0}".format(download_path)
  print "Downloading {0} to {1}".format(file_name, destination)
  # --no-clobber avoids downloading the file if a file with the name already exists
  sh.wget(download_path, directory_prefix=destination, no_clobber=wget_no_clobber)
  print "Extracting {0}".format(file_name)
  sh.tar(z=True, x=True, f=os.path.join(destination, file_name), directory=destination)
  sh.rm(os.path.join(destination, file_name))
Beispiel #26
0
def copy_proguard_mapping(flavor, target):
    folder_path = 'releases'
    sh.mkdir("-p", folder_path)
    output_file = '%s/wikipedia-%s.mapping.tar.gz' % (folder_path,
                                                      get_release_name(target))
    input_file = 'wikipedia/build/outputs/mapping/%s/release/mapping.txt' % flavor
    sh.tar('czf', output_file, input_file)
    print ' proguard mapping: %s' % output_file
Beispiel #27
0
    def input_dir(self):

        if not os.path.exists(self.test_data_dir):
            if not os.path.exists(self.test_data_tarball):
                sh.wget('-P', self.test_dir, data_tarball_url)
            sh.tar('zxvf', self.test_data_tarball, '-C', self.test_dir)

        return os.path.join(self.test_data_dir, 'input')
Beispiel #28
0
 def archive_repository(self, destination, *archive_args):
     try:
         (fd, tar_file) = tempfile.mkstemp()
         self.git.archive("remotes/origin/%s" % self.git_ref, *archive_args, _out=tar_file)
         sh.tar("xf", tar_file, _cwd=destination)
     finally:
         if tar_file:
             os.remove(tar_file)
Beispiel #29
0
def install_cmake( build_dir, prefix ):
    cmake_archive='cmake-2.8.11.2'
    sh.cd( build_dir )
    sh.wget( '-nc', 'http://www.cmake.org/files/v2.8/%s.tar.gz' % cmake_archive )
    sh.tar( 'xvzf', '%s.tar.gz' % cmake_archive )
    sh.cd( cmake_archive )
    subprocess.check_call( [ './configure', '--prefix', PREFIX ], shell = True )
    sh.make( '-j4' )
    sh.make.install()
Beispiel #30
0
def install_cmake(build_dir, prefix):
    cmake_archive = 'cmake-2.8.11.2'
    sh.cd(build_dir)
    sh.wget('-nc', 'http://www.cmake.org/files/v2.8/%s.tar.gz' % cmake_archive)
    sh.tar('xvzf', '%s.tar.gz' % cmake_archive)
    sh.cd(cmake_archive)
    subprocess.check_call(['./configure', '--prefix', PREFIX], shell=True)
    sh.make('-j4')
    sh.make.install()
def unpack(plugin):
    fname="../{pname}_{uversion}*.orig-{name}.tar*".format(pname=c.package, uversion=UPSTREAM_VERSION, name=plugin["Plugin"])
    fname = glob.glob(fname)[0]
    try:
        os.mkdir(plugin["Plugin"])
    except FileExistsError:
        shutil.rmtree(plugin["Plugin"])
        os.mkdir(plugin["Plugin"])
    sh.tar(["-C", plugin["Plugin"], "--strip-components=1", "-axf", fname])
Beispiel #32
0
    def test_make_tarball(self):
        """
        Can we make tarballs for all versions in the git repository.
        """

        experiment_repository = models.ExperimentRepository.objects.create(**self.setup_fields)

        for _hash, tstamp in experiment_repository.get_all_commits():
            experiment_details, tarball_path = utils.make_tarball(experiment_repository.path, _hash)

            ########
            # Test #
            ########

            # Do the experiment details look right?
            # Are the experiments mentioned in the settings file in the
            # experiment_details dir?
            experiments_settings = configobj.ConfigObj(
                os.path.join(experiment_repository.path, conf.repository_settings_filename)
            )

            for class_name in experiments_settings["experiments"]:
                self.assertTrue(experiment_details.has_key(class_name))

            ########
            # Test #
            ########

            # Are the contents of the tarball as they should be?
            tmpdir = tempfile.mkdtemp()

            if conf.tarball_compression_method == "bz2":
                tar_cmd = "-xjf"
            elif conf.tarball_compression_method == "gz":
                tar_cmd = "-xzf"

            sh.tar(tar_cmd, tarball_path, "-C", tmpdir)

            checksum_list = self.mock_repository.commit_dictionary[_hash]
            self.assertTrue(sys.check_directory_checksums(checksum_list, tmpdir))

            # Delete the exported directory.
            shutil.rmtree(tmpdir)

            ########
            # Test #
            ########

            # Check the contents of the tarball; does it import.
            tarball_model = utils.ExperimentArchiveTarball(tarball_path)
            self.assertTrue(tarball_model.integrity_check())
            self.assertTrue(tarball_model.import_check())

            # Delete the tarball
            os.unlink(tarball_path)

        models.ExperimentRepository.objects.all().delete()
Beispiel #33
0
def setup_test_input_dir():
    data_tarball_url = 'http://s3-ap-southeast-2.amazonaws.com/dp-drop/esmgrids/test/test_data.tar.gz'
    test_data_tarball = os.path.join(test_dir, 'test_data.tar.gz')

    if not os.path.exists(test_data_dir):
        if not os.path.exists(test_data_tarball):
            sh.wget('-P', test_dir, data_tarball_url)
        sh.tar('zxvf', test_data_tarball, '-C', test_dir)

    return os.path.join(test_data_dir, 'input')
Beispiel #34
0
    def setUp(self, *args, **kwargs):

        super(ChefPluginSoloTest, self).setUp(*args, **kwargs)

        self.blueprint_dir = self.copy_blueprint('chef-plugin')

        # Get resources
        with self.blueprint_dir:
            for res in 'cookbooks', 'data_bags', 'environments', 'roles':
                sh.tar('czf', res + '.tar.gz', res)
Beispiel #35
0
 def archive_repository(self, destination, *archive_args):
     try:
         (fd, tar_file) = tempfile.mkstemp()
         self.git.archive("remotes/origin/%s" % self.git_ref,
                          *archive_args,
                          _out=tar_file)
         sh.tar("xf", tar_file, _cwd=destination)
     finally:
         if tar_file:
             os.remove(tar_file)
def unpack(plugin):
    fname = "../{pname}_{uversion}*.orig-{name}.tar*".format(
        pname=c.package, uversion=UPSTREAM_VERSION, name=plugin["Plugin"])
    fname = glob.glob(fname)[0]
    try:
        os.mkdir(plugin["Plugin"])
    except FileExistsError:
        shutil.rmtree(plugin["Plugin"])
        os.mkdir(plugin["Plugin"])
    sh.tar(["-C", plugin["Plugin"], "--strip-components=1", "-axf", fname])
Beispiel #37
0
 def format(self, project: Project):
     """
     Take the original code from archive, extract it to a temp directory,
     run the formatter, and then copy the formatter result to the project
     root
     """
     with tempfile.TemporaryDirectory() as temp_dir:
         sh.tar("-C", temp_dir, "-x", "-f", str(project.archive_path))
         self.do_format(temp_dir, project)
         sh.rsync("-a", temp_dir + "/", str(project.root))
    def setUp(self, *args, **kwargs):

        super(ChefPluginSoloTest, self).setUp(*args, **kwargs)

        self.blueprint_dir = self.copy_blueprint('chef-plugin')

        # Get resources
        with self.blueprint_dir:
            for res in 'cookbooks', 'data_bags', 'environments', 'roles':
                sh.tar('czf', res+'.tar.gz', res)
Beispiel #39
0
 def run(self, repos, nightly):
     self.log("Tarballing it...")
     cd(self._basedir)
     version = get_version(repos, nightly)
     import platform
     bits = platform.architecture()[0][:2]
     bundle_name = "Bitmask-linux%s-%s" % (bits, version)
     mv("Bitmask", bundle_name)
     tar("cjf", bundle_name + ".tar.bz2", bundle_name)
     self.log("Done")
Beispiel #40
0
    def extract_tar_archive(self, to_directory='/opt'):
        if self.backup_directory != '/opt/anaconda_backup':
            temp_path = pathlib.Path(self.backup_directory)
            to_directory = temp_path.parent

        if not tarfile.is_tarfile(self.restore_file):
            raise exceptions.NotValidTarfile(
                'tar archive file is not a valid tar file')

        # Ensure the backup is extracted to the right place
        sh.tar('-xzvf', self.restore_file, '-C', to_directory)
    def get_heroku_client_path() -> str:
        """
        Get the path to the heroku executable client, download a new one if it
        doesnt exist.
        """
        print("Locating heroku...")
        # Install Heroku CLI
        os_name = None
        bit_architecture = None

        # Get the platform we are working on
        if sys.platform == "darwin":  # Mac OS X
            os_name = "darwin"
        elif sys.platform.startswith("linux"):  # Linux
            os_name = "linux"
        else:
            os_name = "windows"

        # Find our architecture
        bit_architecture_info = platform.architecture()[0]
        if "64bit" in bit_architecture_info:
            bit_architecture = "x64"
        else:
            bit_architecture = "x86"

        # Find existing heroku files to use
        existing_heroku_directory_names = glob.glob(
            os.path.join(HEROKU_TMP_DIR, "heroku-cli-*")
        )
        if len(existing_heroku_directory_names) == 0:
            print("Getting heroku")
            if os.path.exists(os.path.join(HEROKU_TMP_DIR, "heroku.tar.gz")):
                os.remove(os.path.join(HEROKU_TMP_DIR, "heroku.tar.gz"))

            # Get the heroku client and unzip
            tar_path = os.path.join(HEROKU_TMP_DIR, "heroku.tar.gz")
            sh.wget(
                shlex.split(
                    "{}-{}-{}.tar.gz -O {}".format(
                        HEROKU_CLIENT_URL, os_name, bit_architecture, tar_path
                    )
                )
            )
            sh.tar(shlex.split(f"-xvzf {tar_path} -C {HEROKU_TMP_DIR}"))

            # Clean up the tar
            if os.path.exists(tar_path):
                os.remove(tar_path)

        heroku_directory_name = os.path.basename(
            glob.glob(os.path.join(HEROKU_TMP_DIR, "heroku-cli-*"))[0]
        )
        heroku_directory_path = os.path.join(HEROKU_TMP_DIR, heroku_directory_name)
        return os.path.join(heroku_directory_path, "bin", "heroku")
Beispiel #42
0
def script():
    """Run 'cfy logs get' and extract output to CONFIGURATION_DIR/logs"""
    logs_dir = cosmo.dir / 'logs'
    if logs_dir.exists():
        shutil.rmtree(logs_dir, ignore_errors=True)
    logs_dir.mkdir()
    logs_tar = logs_dir / 'logs.tar.gz'
    with logs_dir:
        cfy.logs.get(destination_path=logs_tar).wait()
        tar('xf', logs_tar, strip_components=1)
        logs_tar.remove()
Beispiel #43
0
def archiveDownload(url, destination, archiveType):
    logging.info('Now downloading archive file from URL %s to %s' % (url, destination))
    filename = wget.download(url)
    if archiveType == 'zip':
        logging.info('Unzipping zip file from: ', filename)
        sh.unzip(filename)
    elif archiveType == 'tar.gz':
        logging.info('Untarring tar.gz file from: ', filename)
        sh.tar('-xvzf', filename )
    logging.info('Removing archive file.')
    sh.rm(filename)
    return
def download_package(name, destination, compiler=""):
    label = map_release_label()
    if len(compiler) > 0:
        compiler = "-" + compiler
    url = "{0}/{1}/label={2}/artifact/toolchain/build/{3}{4}.tar.gz".format(HOST, BUILD, label, name, compiler)

    # Download the file
    print "Downloading {0}".format(name)
    sh.wget(url, directory_prefix=destination, no_clobber=True)
    # Extract
    print "Extracting {0}".format(name)
    sh.tar(z=True, x=True, f="{0}/{1}{2}.tar.gz".format(destination, name, compiler), directory=destination)
    sh.rm("{0}/{1}{2}.tar.gz".format(destination, name, compiler))
Beispiel #45
0
    def handle(self, file_name, dest):
        dest = os.path.abspath(dest)
        from sh import unzip
        if not os.path.exists(dest):
            os.makedirs(dest)

        # Make moves
        tar(file_name, '-d', dest)

        if len(os.listdir(dest)) is 1:
            # somewhat properly packaged tarball
            dest = os.path.join(dest, os.listdir(dest).pop())
        return dest
def download_package(destination, product, version, compiler):
  label = get_release_label()
  file_name = "{0}-{1}-{2}-{3}.tar.gz".format(product, version, compiler, label)
  url_path="/{0}/{1}-{2}/{0}-{1}-{2}-{3}.tar.gz".format(product, version, compiler, label)
  download_path = HOST + url_path

  print "URL {0}".format(download_path)
  print "Downloading {0} to {1}".format(file_name, destination)
  # --no-clobber avoids downloading the file if a file with the name already exists
  sh.wget(download_path, directory_prefix=destination, no_clobber=True)
  print "Extracting {0}".format(file_name)
  sh.tar(z=True, x=True, f=os.path.join(destination, file_name), directory=destination)
  sh.rm(os.path.join(destination, file_name))
Beispiel #47
0
def tgz_handle(self, file_name, dest):
    dest = os.path.abspath(dest)
    from sh import tar
    ops_flags = "zxvf"
    if not os.path.exists(dest):
        os.makedirs(dest)

    # Make moves
    tar(ops_flags, file_name, "-C", dest)

    if len(os.listdir(dest)) is 1:
        # somewhat properly packaged tarball
        dest = os.path.join(dest, os.listdir(dest).pop())
    return dest
Beispiel #48
0
def gitCheckout(version):
  directory = versionDirectory(version)
  if os.path.exists(directory):
    # already checked out
    # (since version is always a commit ID, the code can never have changed)
    return False
  else:
    os.makedirs(directory)
    # Make sure we're inside the git repository first
    os.chdir(config.PROJECT_ROOT)
    # how to checkout branch to new working directory, see:
    # http://blog.jessitron.com/2013/10/git-checkout-multiple-branches-at-same.html
    sh.tar(sh.git.archive(version), "-xC", directory)
    return True
Beispiel #49
0
    def setup_tomcat(self, args):
        try:
            log("Downloading Tomcat ...")
            result = wget(dict['TOMCAT_DOWNLOAD_URL'])
        except:
            log("Error getting Tomcat from : " + dict['TOMCAT_DOWNLOAD_URL'])

        try:
            log("Extracting Tomcat ...")            
            result = tar("xvzf " , dict['TOMCAT_VERSION']+ ".tar.gz")
        except:
            log("Error extracting Tomcat ..." + dict['TOMCAT_VERSION']+ ".tar.gz")
        
        setup_airavata_server(args)

        try:
            log("Copying the Airavata war files to Tomcat's webapp directory ...")
            result = cp(dict['AIRAVATA_VERSION']+ "/*.war " , dict['TOMCAT_VERSION']+ "/webapps", "-v")
        except:
            log("Error copying the Airavata war files to Tomcat's webapp directory ...")

        try :
            log("Granting executeable permissions to the script")
            result = chmod("a+x" , dict['TOMCAT_VERSION']+ "/*.sh")
        except:
            log("Error granting executable permissions to " + dict['TOMCAT_VERSION']+ "/*.sh")
Beispiel #50
0
def git_export(project, _hash):
    '''
    Export commit `_hash` of git project `project` into a temporary directory.
    Return the name of the temporary directory.
    The process requires tar-ing and untar-ing.
    '''
    tmpdir = tempfile.mkdtemp()
    tarball_name = os.path.join(tmpdir, 'exported_git_project.tar')
    assert os.path.exists(project) and os.path.isdir(project)
    git = sh.git.bake(_cwd=project)
    git('archive', _hash, '-o', tarball_name)
    export_tmpdir = tempfile.mkdtemp()
    sh.tar('-xf', tarball_name, '-C', export_tmpdir)
    shutil.rmtree(tmpdir) # Delete the tmpdir.

    return export_tmpdir 
Beispiel #51
0
  def postClone(self, cloned_files, target_dir, version):
    """
    Extracts the compressed archives.

    .. versionadded:: 0.3.0
    """
    f = cloned_files[0]

    if self.newer("hg18", version):
      # GZIP and TAR the file and save to the target directory
      sh.tar("-xzf", f, "-C", target_dir)

    else:
      # Rename to ".zip"
      sh.mv(f, f.replace("tar.gz", "zip"))

      # GunZIP the file (and remove the archive)
      sh.gunzip(f)
Beispiel #52
0
def packaging_lib(libmace_output_dir, project_name):
    print("* Package libs for %s" % project_name)
    tar_package_name = "libmace_%s.tar.gz" % project_name
    project_dir = "%s/%s" % (libmace_output_dir, project_name)
    tar_package_path = "%s/%s" % (project_dir, tar_package_name)
    if os.path.exists(tar_package_path):
        sh.rm("-rf", tar_package_path)

    print("Start packaging '%s' libs into %s" % (project_name,
                                                 tar_package_path))
    sh.tar(
        "cvzf",
        "%s" % tar_package_path,
        glob.glob("%s/*" % project_dir),
        "--exclude",
        "%s/_tmp" % project_dir,
        _fg=True)
    print("Packaging Done!\n")
Beispiel #53
0
    def place(self, cache, path):
        data = self.cache(cache)
        opts = []

        if self.url.fragment is not None:
            slashes = self.url.fragment.count('/')
            # If the fragment is `a/b/d/`, then we get three slashes and pass
            # three to `--strip-components`. The effect is similar to using
            # `rsync` or `cp -a` with trailing slashes.
            if slashes > 0:
                opts += ['--strip-components', str(slashes)]
            # Append fragment to arguments list, so only this file/dir is
            # extracted.
            opts += ['--', self.url.fragment]

        mkdir('-p', path.dirname)
        if self.url.fragment is None or self.url.fragment.endswith('/'):
            tar('-xf', str(data), '-C', str(path), *opts)
        else:
            tar('-xf', str(data), '--to-stdout', *opts, _out=str(path))
Beispiel #54
0
    def untar_bundle(self, bundle_path):
        """Unpack tarred bundle

        :param bundle_path: path to bundle to unzip
        :returns unpacked_bundle: path to unpacked bundle
        """
        unpacked_bundle = sh.tar("xfz", bundle_path)
        unpacked_bundle = path(self.config["unpacked_bundle"])
        if not unpacked_bundle.exists():
            raise RuntimeError("Could not find unpacked bundle \"{}\"".
                               format(unpacked_bundle))
        return unpacked_bundle
Beispiel #55
0
def setup_stress(stress_revision):
    stress_path = None

    try:
        git_id = sh.git('--git-dir={home}/fab/cassandra.git'
                        .format(home=HOME), 'rev-parse', stress_revision).strip()
    except sh.ErrorReturnCode:
        raise AssertionError('Invalid stress_revision: {}'.format(stress_revision))

    path = os.path.join(CASSANDRA_STRESS_PATH, git_id)
    if not os.path.exists(path):
        logger.info("Building cassandra-stress '{}' in '{}'.".format(stress_revision, path))
        os.makedirs(path)
        sh.tar(
            sh.git("--git-dir={home}/fab/cassandra.git".format(home=HOME), "archive", git_id),
            'x', '-C', path
        )
        antcmd('-Dbasedir={}'.format(path), '-f', '{}/build.xml'.format(path), 'realclean', 'jar')

    stress_path = os.path.join(path, 'tools/bin/cassandra-stress')

    return stress_path
Beispiel #56
0
 def retrieve(self):
     if self.do_retrieve:
         sh.rm("-fr", self.dirname)
         os.mkdir(self.dirname)
         self.pushd(self.dirname)
         retrieved = FileBuildConfiguration.download(self.url)
         if ".tar.gz" in retrieved:
             sh.tar("xvzf", retrieved)
         if ".tar.bz2" in retrieved:
             sh.tar("xjvf", retrieved)
         if ".zip" in retrieved:
             sh.unzip(retrieved)
     else:
         self.pushd(self.dirname)
     # Either one directory *OR* one directory + a README.
     if(len(os.listdir(".")) <= 3):
         # we can assume that we need to chdir before we can build, so set that to the local build path
         for curr in os.listdir("."):
             if(os.path.isdir(curr)):
                 self.buildpath = curr
     if not getattr(self, 'buildpath'):
         self.buildpath = "."
     self.popd()
Beispiel #57
0
def _install_ckeditor(stdout=None):
    """Callback to install necessary library for the IMCE module"""
    arg = 'xf'

    if stdout:
        arg += 'v'

    http.dl(CKEDITOR_URI, 'ckeditor.tar.gz')
    output = tar('xf', 'ckeditor.tar.gz')

    if stdout and output:
        stdout.write(str(output).strip() + '\n')

    rm('ckeditor.tar.gz')
Beispiel #58
0
def build_stress(stress_revision, name=None):
    # Build a stress revision

    try:
        git_id = sh.git('--git-dir={home}/fab/cassandra.git'
                        .format(home=HOME), 'rev-parse', stress_revision).strip()
    except sh.ErrorReturnCode:
        raise AssertionError('Invalid stress_revision: {}'.format(stress_revision))

    path = os.path.join(CASSANDRA_STRESS_PATH, git_id)
    if not os.path.exists(path):
        logger.info("Building cassandra-stress '{}' in '{}'.".format(stress_revision, path))
        os.makedirs(path)
        sh.tar(
            sh.git("--git-dir={home}/fab/cassandra.git".format(home=HOME), "archive", git_id),
            'x', '-C', path
        )
        antcmd('-Dbasedir={}'.format(path), '-f', '{}/build.xml'.format(path),
               'realclean', 'jar', _env={"JAVA_TOOL_OPTIONS": "-Dfile.encoding=UTF8",
                                         "JAVA_HOME": JAVA_HOME})

    name = name if name else stress_revision
    return {name: git_id}
Beispiel #59
0
    def archive(self, ref, destination, *archive_args):
        """
        Archive the specified GIT ref to the specified destination

        Any extra args are passed to the sh command directly so you can
        add extra flags for `git archive` should you desire.
        """
        tmp_dir = tempfile.mkdtemp()
        tar_file = None

        try:
            # create our git interface via the sh module
            # see:
            # github.com/stackstrap/stackstrap/blob/master/scripts/loose_ssh.sh
            git = sh.git.bake(_cwd=tmp_dir, _env={
                'GIT_SSH': "loose_ssh.sh"
            })

            self.log.debug("Cloning %s to %s" % (self.url, tmp_dir))
            git('clone', '--recursive', self.url, tmp_dir)

            self.log.debug("Archiving '{ref}' to {destination}".format(
                ref=ref,
                destination=destination
            ))

            settings.mkdir_p(destination)

            (fd, tar_file) = tempfile.mkstemp()
            git.archive('origin/%s' % ref, *archive_args, _out=tar_file)
            sh.tar("xpf", tar_file, _cwd=destination)
        finally:
            if tar_file:
                os.remove(tar_file)

            if os.path.isdir(tmp_dir):
                shutil.rmtree(tmp_dir)
    def _get_code(self, nmpi_job, job_desc):
        """
        Obtain the code and place it in the working directory.

        If the experiment description is the URL of a Git repository, try to clone it.
        If it is the URL of a zip or .tar.gz archive, download and unpack it.
        Otherwise, the content of "code" is the code: write it to a file.
        """
        url_candidate = urlparse(nmpi_job['code'])
        logger.debug("Get code: %s %s", url_candidate.netloc, url_candidate.path)
        if url_candidate.scheme and url_candidate.path.endswith((".tar.gz", ".zip", ".tgz")):
            self._create_working_directory(job_desc.working_directory)
            target = os.path.join(job_desc.working_directory, os.path.basename(url_candidate.path))
            #urlretrieve(nmpi_job['code'], target) # not working via KIP https proxy
            curl(nmpi_job['code'], '-o', target)
            logger.info("Retrieved file from {} to local target {}".format(nmpi_job['code'], target))
            if url_candidate.path.endswith((".tar.gz", ".tgz")):
                tar("xzf", target, directory=job_desc.working_directory)
            elif url_candidate.path.endswith(".zip"):
                try:
                    # -o for auto-overwrite
                    unzip('-o', target, d=job_desc.working_directory)
                except:
                    logger.error("Could not unzip file {}".format(target))
        else:
            try:
                # Check the "code" field for a git url (clone it into the workdir) or a script (create a file into the workdir)
                # URL: use git clone
                git.clone('--recursive', nmpi_job['code'], job_desc.working_directory)
                logger.info("Cloned repository {}".format(nmpi_job['code']))
            except (sh.ErrorReturnCode_128, sh.ErrorReturnCode):
                # SCRIPT: create file (in the current directory)
                logger.info("The code field appears to be a script.")
                self._create_working_directory(job_desc.working_directory)
                with codecs.open(job_desc.arguments[0], 'w', encoding='utf8') as job_main_script:
                    job_main_script.write(nmpi_job['code'])