示例#1
0
def _update_settings_of_user(
    path: str,
    tar_file: TarFile,
    user_tars: List[TarInfo],
    user: str,
    local_users: Dict[str, Optional[str]],
) -> None:
    """Update files within user directory

    A user can be split in two tiers.

    Customer-Users belong to a customer when working on the CME. They only
    work on the GUI of their corresponding remote site. They are allowed to
    customize their bookmarks, views, dashboards, reports, etc. These user
    local configurations are retained when receiving files from master as
    changes are activated.

        This means all "user_*" files are retained during sync.

    Non-customer-users (e.g. GLOBAL users) normally work on the central
    site and thus they should be able to use their customizations when they
    log into remote sites. Thus all files are synced in their case.


    No backup of the remote site dir happens during sync, data is removed,
    added, skipped in place to avoid collisions."""

    is_customer_user = local_users.get(user) is not None
    _cleanup_user_dir(path, is_customer_user)
    if is_customer_user:
        user_tars = [m for m in user_tars if not is_user_file(m.name)]

    tar_file.extractall(os.path.dirname(path), members=user_tars)
示例#2
0
    def install_repo(self, repo):
        if repo in KNOWN_PUBLIC_REPOS:
            repo = KNOWN_PUBLIC_REPOS[repo]['path']  # replace it by the url
        git_path = which('git')

        if not git_path:
            return ('git command not found: You need to have git installed on '
                    'your system to be able to install git based plugins.', )

        # TODO: Update download path of plugin.
        if repo.endswith('tar.gz'):
            tar = TarFile(fileobj=urlopen(repo))
            tar.extractall(path=self.plugin_dir)
            s = repo.split(':')[-1].split('/')[-2:]
            human_name = '/'.join(s).rstrip('.tar.gz')
        else:
            human_name = human_name_for_git_url(repo)
            p = subprocess.Popen([git_path, 'clone', repo, human_name], cwd=self.plugin_dir, stdout=subprocess.PIPE,
                                 stderr=subprocess.PIPE)
            feedback = p.stdout.read().decode('utf-8')
            error_feedback = p.stderr.read().decode('utf-8')
            if p.wait():
                return "Could not load this plugin: \n\n%s\n\n---\n\n%s" % (feedback, error_feedback),
        self.add_plugin_repo(human_name, repo)
        return self.update_dynamic_plugins()
示例#3
0
文件: errBot.py 项目: glenbot/err
    def install(self, mess, args):
        """ install a plugin repository from the given source or a known public repo (see !repos to find those).
        for example from a known repo : !install err-codebot
        for example a git url : [email protected]:gbin/plugin.git
        or an url towards a tar.gz archive : http://www.gootz.net/plugin-latest.tar.gz
        """
        if not args.strip():
            return "You should have an urls/git repo argument"
        if args in KNOWN_PUBLIC_REPOS:
            args = KNOWN_PUBLIC_REPOS[args][0] # replace it by the url
        git_path = which('git')

        if not git_path:
            return 'git command not found: You need to have git installed on your system to by able to install git based plugins.'

        if args.endswith('tar.gz'):
            tar = TarFile(fileobj=urlopen(args))
            tar.extractall(path= PLUGIN_DIR)
            human_name = args.split('/')[-1][:-7]
        else:
            human_name = human_name_for_git_url(args)
            p = subprocess.Popen([git_path, 'clone', args, human_name], cwd = PLUGIN_DIR, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
            feedback = p.stdout.read()
            error_feedback = p.stderr.read()
            if p.wait():
               return "Could not load this plugin : \n%s\n---\n%s" % (feedback, error_feedback)
        self.add_plugin_repo(human_name, args)
        errors = self.update_dynamic_plugins()
        if errors:
            self.send(mess.getFrom(), 'Some plugins are generating errors:\n' + '\n'.join(errors) , message_type=mess.getType())
        else:
            self.send(mess.getFrom(), "A new plugin repository named %s has been installed correctly from %s. Refreshing the plugins commands..." % (human_name, args), message_type=mess.getType())
        self.activate_non_started_plugins()
        return "Plugin reload done."
示例#4
0
    def repos_install(self, mess, args):
        """ install a plugin repository from the given source or a known public repo (see !repos to find those).
        for example from a known repo : !install err-codebot
        for example a git url : [email protected]:gbin/plugin.git
        or an url towards a tar.gz archive : http://www.gootz.net/plugin-latest.tar.gz
        """
        if not args.strip():
            return "You should have an urls/git repo argument"
        if args in KNOWN_PUBLIC_REPOS:
            args = KNOWN_PUBLIC_REPOS[args][0]  # replace it by the url
        git_path = which('git')

        if not git_path:
            return 'git command not found: You need to have git installed on your system to by able to install git based plugins.'

        if args.endswith('tar.gz'):
            tar = TarFile(fileobj=urlopen(args))
            tar.extractall(path=self.plugin_dir)
            human_name = args.split('/')[-1][:-7]
        else:
            human_name = human_name_for_git_url(args)
            p = subprocess.Popen([git_path, 'clone', args, human_name], cwd=self.plugin_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
            feedback = p.stdout.read().decode('utf-8')
            error_feedback = p.stderr.read().decode('utf-8')
            if p.wait():
                return "Could not load this plugin : \n%s\n---\n%s" % (feedback, error_feedback)
        self.add_plugin_repo(human_name, args)
        errors = self.update_dynamic_plugins()
        if errors:
            self.send(mess.getFrom(), 'Some plugins are generating errors:\n' + '\n'.join(errors), message_type=mess.getType())
        else:
            self.send(mess.getFrom(), "A new plugin repository named %s has been installed correctly from %s. Refreshing the plugins commands..." % (human_name, args), message_type=mess.getType())
        self.activate_non_started_plugins()
        return "Plugin reload done."
def getDataFromTarfile(tarfile):
    tf = TarFile(tarfile)
    members = [m.name for m in tf.getmembers()]
    if list(set([os.path.exists(x) for x in members])) != [True]:
        tf.extractall()
    tf.close
    return members
示例#6
0
def _get_checkpoint_dir():
    from appdirs import AppDirs
    dirs = AppDirs(appname="nimare", appauthor="neurostuff", version="1.0")
    checkpoint_dir = os.path.join(dirs.user_data_dir, "ohbm2018_model")
    if not os.path.exists(checkpoint_dir):
        LGR.info('Downloading the model (this is a one-off operation)...')
        url = "https://zenodo.org/record/1257721/files/ohbm2018_model.tar.xz?download=1"
        # Streaming, so we can iterate over the response.
        r = requests.get(url, stream=True)
        f = BytesIO()

        # Total size in bytes.
        total_size = int(r.headers.get('content-length', 0))
        block_size = 1024 * 1024
        wrote = 0
        for data in tqdm(r.iter_content(block_size), total=math.ceil(total_size // block_size),
                         unit='MB', unit_scale=True):
            wrote = wrote + len(data)
            f.write(data)
        if total_size != 0 and wrote != total_size:
            raise Exception("Download interrupted")

        f.seek(0)
        LGR.info('Uncompressing the model to %s...'.format(checkpoint_dir))
        tarfile = TarFile(fileobj=LZMAFile(f), mode="r")
        tarfile.extractall(dirs.user_data_dir)
    return checkpoint_dir
示例#7
0
    def install_repo(self, repo):
        if repo in KNOWN_PUBLIC_REPOS:
            repo = KNOWN_PUBLIC_REPOS[repo]['path']  # replace it by the url
        git_path = which('git')

        if not git_path:
            return ('git command not found: You need to have git installed on '
                    'your system to be able to install git based plugins.', )

        # TODO: Update download path of plugin.
        if repo.endswith('tar.gz'):
            tar = TarFile(fileobj=urlopen(repo))
            tar.extractall(path=self.plugin_dir)
            s = repo.split(':')[-1].split('/')[-2:]
            human_name = '/'.join(s).rstrip('.tar.gz')
        else:
            human_name = human_name_for_git_url(repo)
            p = subprocess.Popen([git_path, 'clone', repo, human_name],
                                 cwd=self.plugin_dir,
                                 stdout=subprocess.PIPE,
                                 stderr=subprocess.PIPE)
            feedback = p.stdout.read().decode('utf-8')
            error_feedback = p.stderr.read().decode('utf-8')
            if p.wait():
                return "Could not load this plugin: \n\n%s\n\n---\n\n%s" % (
                    feedback, error_feedback),
        self.add_plugin_repo(human_name, repo)
        return self.update_dynamic_plugins()
示例#8
0
def analyze():
    try:
        fn = 'temp/{}.tar'.format(
            md5(request.remote_addr.encode()).hexdigest())

        if request.method == 'POST':
            fp = request.files['file']
            fp.save(fn)

            if not is_tarfile(fn):
                return '<script>alert("Uploaded file is not \'tar\' file.");history.back(-1);</script>'

            tf = TarFile(fn)
            tf.extractall(fn.split('.')[0])
            bd1 = fn.split('/')[1].split('.')[0]
            bd2 = fn.split('/')[1]

            return render_template('analyze',
                                   path=bd1,
                                   fn=bd1,
                                   files=tf.getnames())

    except Exception as e:
        return response('Error', 500)

    finally:
        try:
            os.remove(fn)

        except:
            return response('Error', 500)
示例#9
0
    def _prepare_dataset(cls):
        tar = TarFile(cls.dataset_tar_local)
        tar.extractall(cls.dataset_dir)

        # Get rid of non-image files
        for f in os.listdir(cls.dataset_dir):
            if f[-3:] != 'jpg':
                os.remove(os.path.join(cls.dataset_dir, f))

        images = {
            'up': [],
            'down': os.listdir(cls.dataset_dir),
            'left': [],
            'right': []
        }

        for rot_deg, orientation in zip((90, 180, 270),
                                        ('left', 'up', 'right')):
            for img_file in images['down']:
                img = cv2.imread(os.path.join(cls.dataset_dir, img_file))
                new_img_file = orientation + '_' + img_file
                cv2.imwrite(os.path.join(cls.dataset_dir, new_img_file),
                            rotate_image(img, rot_deg))
                images[orientation].append(new_img_file)

        return images
示例#10
0
def _extract_and_upload(archive: tarfile.TarFile, dry_run: bool) -> None:
    archive.extractall(members=strip_member_components(archive, 1))
    # TODO(cyrille): Also drop Showcase demos.
    subprocess.run('rm -r *Demo', shell=True, check=True)
    all_files = glob.glob('*', recursive=True)
    if dry_run:
        logging.info('Client assets:\n%s', '\n'.join(all_files))
    else:
        subprocess.run(
            ('swift', 'upload', _OPEN_STACK_CONTAINER, '--skip-identical', *all_files), check=True)
        subprocess.run(
            ('aws', 's3', 'cp', os.getcwd(), f's3://{_S3_BUCKET}/', '--recursive'), check=True)
示例#11
0
    def uncompress_groupby_to_df(self,
                                 result_tar,
                                 groupby_col_list,
                                 agg_list,
                                 where_terms_list,
                                 aggregate=False):
        # uncompress result returned by the groupby and convert it to a Pandas DataFrame
        tmp_dir = None
        try:
            try:
                tar_file = TarFile(fileobj=StringIO(result_tar))
                tmp_dir = tempfile.mkdtemp(prefix='tar_dir_')
                tar_file.extractall(tmp_dir)
            except TarError:
                self.logger.exception("Could not create/extract tar.")
                raise ValueError(result_tar)
            del result_tar
            del tar_file

            ct = None

            # now untar and aggregate the individual shard results
            for i, sub_tar in enumerate(glob.glob(os.path.join(tmp_dir, '*'))):
                new_dir = os.path.join(tmp_dir, 'bcolz_' + str(i))
                rm_file_or_dir(new_dir)
                with tarfile.open(sub_tar, mode='r') as tar_file:
                    tar_file.extractall(new_dir)
                # rm_file_or_dir(sub_tar)
                ctable_dir = glob.glob(os.path.join(new_dir, '*'))[0]
                new_ct = ctable(rootdir=ctable_dir, mode='a')
                if i == 0:
                    ct = new_ct
                else:
                    ct.append(new_ct)

            # aggregate by groupby parameters
            if ct is None:
                result_df = pd.DataFrame()
            elif aggregate:
                new_dir = os.path.join(tmp_dir, 'end_result')
                rm_file_or_dir(new_dir)
                # we can only sum now
                new_agg_list = [[x[2], 'sum', x[2]] for x in agg_list]
                result_ctable = ct.groupby(groupby_col_list,
                                           new_agg_list,
                                           rootdir=new_dir)
                result_df = result_ctable.todataframe()
            else:
                result_df = ct.todataframe()
        finally:
            rm_file_or_dir(tmp_dir)

        return result_df
示例#12
0
    def download(self, src, dest, extract_here=False):
        client = connect()

        with SpooledTemporaryFile() as file:
            file.write(client.copy(self.container_id, src).read())
            file.seek(0)
            tfile = TarFile(fileobj=file)
            if extract_here:
                base = len(os.path.basename(src)) + 1
                for member in tfile.getmembers():
                    member.name = member.name[base:]
            tfile.extractall(path=dest)
示例#13
0
    def install_repo(self, repo):
        """
        Install the repository from repo

        :param repo:
            The url, git url or path on disk of a repository. It can point to either a git repo or
             a .tar.gz of a plugin
        :returns:
            The path on disk where the repo has been installed on.
        :raises: :class:`~RepoException` if an error occured.
        """
        self.check_for_index_update()

        # try to find if we have something with that name in our index
        if repo in self[REPO_INDEX]:
            human_name = repo
            repo_url = next(iter(self[REPO_INDEX][repo].values()))['repo']
        else:
            # This is a repo url, make up a plugin definition for it
            # TODO read the definition if possible.
            human_name = human_name_for_git_url(repo)
            repo_url = repo

        git_path = which('git')
        if not git_path:
            raise RepoException(
                'git command not found: You need to have git installed on '
                'your system to be able to install git based plugins.', )

        # TODO: Update download path of plugin.
        if repo_url.endswith('tar.gz'):
            tar = TarFile(fileobj=urllib.urlopen(repo_url))
            tar.extractall(path=self.plugin_dir)
            s = repo_url.split(':')[-1].split('/')[-2:]
            human_name = human_name or '/'.join(s).rstrip('.tar.gz')
        else:
            human_name = human_name or human_name_for_git_url(repo_url)
            p = subprocess.Popen([git_path, 'clone', repo_url, human_name],
                                 cwd=self.plugin_dir,
                                 stdout=subprocess.PIPE,
                                 stderr=subprocess.PIPE)
            feedback = p.stdout.read().decode('utf-8')
            error_feedback = p.stderr.read().decode('utf-8')
            if p.wait():
                raise RepoException(
                    "Could not load this plugin: \n\n%s\n\n---\n\n%s" %
                    (feedback, error_feedback))

        self.add_plugin_repo(human_name, repo_url)
        return os.path.join(self.plugin_dir, human_name)
示例#14
0
    def ensure_template(self,
                        layer_stack,
                        connection=None):  # connection needs to be here
        """Create the base image for a container"""
        # find how many of the intermediate snapshots we already have
        last, success_count = self.find_furthest(layer_stack)
        logging.debug("Delta tree creating with head start: " +
                      str(success_count))

        # layer stack is a list of deltas in the order they were written in
        layers_so_far = '/'.join(layer_stack[:success_count])
        for layer in layer_stack[success_count:]:
            logging.debug("Creating layer: " + layer)

            # the / delimited list of sha256 deltas used to create this layer
            layers_so_far += ('/' if len(layers_so_far) != 0 else '') + layer

            # the next layer is a inherited (cloned) from a snapshot
            dest_fs = 'tf/layer-' + shortuuid.uuid(
            )  # need a uuid for namespace collisions
            call([
                'zfs', 'clone', '-o', 'recordsize=8k', '-o', 'compression=on',
                last.snapshot, dest_fs
            ],
                 stdout=DEVNULL)

            # fetch via http then untar a pretend file
            layer_http = requests.get('http://%s:1025/%s' %
                                      (self.location_ip, layer))
            with io.BytesIO(layer_http.content) as f:
                tar = TarFile(fileobj=f)
                tar.extractall('/' + dest_fs)

            # create the snapshot and mark it so we know what it represents
            call(['zfs', 'snapshot', '%s@final' % dest_fs], stdout=DEVNULL)
            call([
                'zfs', 'set', ':layer_stack=' + layers_so_far,
                '%s@final' % dest_fs
            ])

            # clean the mess up
            call(['zfs', 'unmount', dest_fs], stdout=DEVNULL)
            call(['rmdir', '/' + dest_fs])

            # let the delta tree know
            new_node = DtNode(dest_fs + "@final")
            last.deltas[layer] = new_node
            last = new_node

        return last.snapshot
示例#15
0
 def writer(self):
     """Expect written bytes to be a tarball."""
     result = BytesIO()
     yield result
     result.seek(0, 0)
     try:
         tarball = TarFile(fileobj=result, mode="r")
         if self.path.exists():
             self.path.remove()
         self.path.createDirectory()
         tarball.extractall(self.path.path)
     except:
         # This should really be dealt with, e.g. logged:
         # https://clusterhq.atlassian.net/browse/FLOC-122
         pass
示例#16
0
 def writer(self):
     """Expect written bytes to be a tarball."""
     result = BytesIO()
     yield result
     result.seek(0, 0)
     try:
         tarball = TarFile(fileobj=result, mode="r")
         if self.path.exists():
             self.path.remove()
         self.path.createDirectory()
         tarball.extractall(self.path.path)
     except:
         # This should really be dealt with, e.g. logged:
         # https://github.com/ClusterHQ/flocker/issues/122
         pass
示例#17
0
文件: memory.py 项目: verchol/flocker
 def writer(self):
     """Expect written bytes to be a tarball."""
     result = BytesIO()
     yield result
     result.seek(0, 0)
     try:
         tarball = TarFile(fileobj=result, mode="r")
         if self.path.exists():
             self.path.remove()
         self.path.createDirectory()
         tarball.extractall(self.path.path)
     except:
         # This should really be dealt with, e.g. logged:
         # https://clusterhq.atlassian.net/browse/FLOC-122
         pass
示例#18
0
 def writer(self):
     """Expect written bytes to be a tarball."""
     result = BytesIO()
     yield result
     result.seek(0, 0)
     try:
         tarball = TarFile(fileobj=result, mode="r")
         if self.path.exists():
             self.path.remove()
         self.path.createDirectory()
         tarball.extractall(self.path.path)
     except:
         # This should really be dealt with, e.g. logged:
         # https://github.com/ClusterHQ/flocker/issues/122
         pass
 def __handle_file(self, plugin_file):
     # Uncompress the file.
     temp_dir = tempfile.gettempdir()
     if is_zipfile(plugin_file):
         compress_fd = zipfile.ZipFile(plugin_file, allowZip64=True)
         compress_fd.extractall(path=temp_dir)
     elif is_bz2file(plugin_file):
         #first check if we can handle as tar.bz2 (big chances)
         try:
             compress_fd = TarFile(name=plugin_file, mode="r:bz2")
             compress_fd.extractall(path=temp_dir)
         except CompressionError:
             print "Upz!, fail in compressed file handling, Retrying"
             try:
                 compress_fd = bz2.BZ2File(plugin_file)
                 tmp_fd = tempfile.TemporaryFile(suffix=".tar",
                                                 prefix="ncmprs")
                 tmp_fd.file.write(compress_fd.read())
                 tmp_fd.file.flush()
                 tar_fd = TarFile.taropen(name=None, fileobj=tmp_fd)
                 tar_fd.extractall(path=temp_dir)
                 tar_fd.close()
                 tmp_fd.close()
             except:
                 print "Upz!, fail in compressed file handling, Again! :("
                 return None
     elif is_gzipfile(plugin_file):
         #first check if we can handle as tar.gz (big chances)
         try:
             compress_fd = TarFile(name=plugin_file, mode="r:gz")
             compress_fd.extractall(path=temp_dir)
         except CompressionError:
             print "Upz!, fail in compressed file handling, Retrying"
             try:
                 compress_fd = gzip.GzipFile(plugin_file)
                 tmp_fd = tempfile.TemporaryFile(suffix=".tar",
                                                 prefix="ncmprs")
                 tmp_fd.file.write(compress_fd.read())
                 tmp_fd.file.flush()
                 tar_fd = TarFile.taropen(name=None, fileobj=tmp_fd)
                 tar_fd.extractall(path=temp_dir)
                 tar_fd.close()
                 tmp_fd.close()
             except:
                 print "Upz!, fail in compressed file handling, Again! :("
                 return None
     return self.__handle_dir(temp_dir)
 def __handle_file(self, plugin_file):
     # Uncompress the file.
     temp_dir = tempfile.gettempdir()
     if is_zipfile(plugin_file):
         compress_fd = zipfile.ZipFile(plugin_file, allowZip64=True)
         compress_fd.extractall(path=temp_dir)
     elif is_bz2file(plugin_file):
         #first check if we can handle as tar.bz2 (big chances)
         try:
             compress_fd = TarFile(name=plugin_file, mode="r:bz2")
             compress_fd.extractall(path=temp_dir)
         except CompressionError:
             print "Upz!, fail in compressed file handling, Retrying"
             try:
                 compress_fd = bz2.BZ2File(plugin_file)
                 tmp_fd = tempfile.TemporaryFile(suffix=".tar", prefix="ncmprs")
                 tmp_fd.file.write(compress_fd.read())
                 tmp_fd.file.flush()
                 tar_fd = TarFile.taropen(name=None, fileobj=tmp_fd)
                 tar_fd.extractall(path=temp_dir)
                 tar_fd.close()
                 tmp_fd.close()
             except:
                 print "Upz!, fail in compressed file handling, Again! :("
                 return None
     elif is_gzipfile(plugin_file):
         #first check if we can handle as tar.gz (big chances)
         try:
             compress_fd = TarFile(name=plugin_file, mode="r:gz")
             compress_fd.extractall(path=temp_dir)
         except CompressionError:
             print "Upz!, fail in compressed file handling, Retrying"
             try:
                 compress_fd = gzip.GzipFile(plugin_file)
                 tmp_fd = tempfile.TemporaryFile(suffix=".tar", prefix="ncmprs")
                 tmp_fd.file.write(compress_fd.read())
                 tmp_fd.file.flush()
                 tar_fd = TarFile.taropen(name=None, fileobj=tmp_fd)
                 tar_fd.extractall(path=temp_dir)
                 tar_fd.close()
                 tmp_fd.close()
             except:
                 print "Upz!, fail in compressed file handling, Again! :("
                 return None
     return self.__handle_dir(temp_dir)
示例#21
0
    def install_repo(self, repo):
        """
        Install the repository from repo

        :param repo:
            The url, git url or path on disk of a repository. It can point to either a git repo or
             a .tar.gz of a plugin
        :returns:
            The path on disk where the repo has been installed on.
        :raises: :class:`~RepoException` if an error occured.
        """
        self.check_for_index_update()

        # try to find if we have something with that name in our index
        if repo in self[REPO_INDEX]:
            human_name = repo
            repo_url = next(iter(self[REPO_INDEX][repo].values()))['repo']
        else:
            # This is a repo url, make up a plugin definition for it
            # TODO read the definition if possible.
            human_name = human_name_for_git_url(repo)
            repo_url = repo

        git_path = which('git')
        if not git_path:
            raise RepoException('git command not found: You need to have git installed on '
                                'your system to be able to install git based plugins.', )

        # TODO: Update download path of plugin.
        if repo_url.endswith('tar.gz'):
            tar = TarFile(fileobj=urllib.urlopen(repo_url))
            tar.extractall(path=self.plugin_dir)
            s = repo_url.split(':')[-1].split('/')[-2:]
            human_name = human_name or '/'.join(s).rstrip('.tar.gz')
        else:
            human_name = human_name or human_name_for_git_url(repo_url)
            p = subprocess.Popen([git_path, 'clone', repo_url, human_name], cwd=self.plugin_dir, stdout=subprocess.PIPE,
                                 stderr=subprocess.PIPE)
            feedback = p.stdout.read().decode('utf-8')
            error_feedback = p.stderr.read().decode('utf-8')
            if p.wait():
                raise RepoException("Could not load this plugin: \n\n%s\n\n---\n\n%s" % (feedback, error_feedback))

        self.add_plugin_repo(human_name, repo_url)
        return os.path.join(self.plugin_dir, human_name)
示例#22
0
    def set_up(self):
        if self._repo_path.is_dir():
            shutil.rmtree(self._repo_path)
        archiver = TarFile(self._repo_path.with_suffix('.tar'))
        archiver.extractall(self._repo_path.parent)

        if self.test_parameters.work_directory.is_dir():
            shutil.rmtree(self.test_parameters.work_directory)

        if self._scheme.endswith('+git'):
            # TODO: We would start the daemon here
            raise NotImplementedError("Git protocol not supported")
        elif self._scheme.endswith('+svn'):
            command: List[str] = [
                'svnserve', '--root',
                str(self._repo_path), '-X', '--foreground'
            ]
            self._server = subprocess.Popen(command)
示例#23
0
 def update(self, name, version, target):
   shutil.rmtree(target)
   os.mkdir(target)
   try:
     updateServer=rpyc.connect(updateServerName,18862)
     validNames = config.readConfig("login.cfg")
     password = validNames['admin']['password']
     tar = updateServer.root.get(password, name, version)
     
     s = StringIO(decompress(tar))
     f = TarFile(mode='r', fileobj=s)
     
     f.extractall(target)
     print "updated",name
     return True
   except:
     traceback.print_exc()
     print "Failed to connet to the update server :("
     return False
示例#24
0
def extract_filesystem_bundle(docker_driver, container_id=None, image_name=None):
    temporary_dir = tempfile.mkdtemp()
    # Get and save filesystem bundle
    if container_id is not None:
        data = docker_driver.get_docker_client().export(container=container_id).data
        name = container_id
    else:
        data = docker_driver.get_docker_client().get_image(image=image_name).data
        name = image_name.replace('/', '_').replace(':', '_')
    with open(temporary_dir + "/" + name + ".tar", "wb") as file:
        file.write(data)
    # Untar filesystem bundle
    tarfile = TarFile(temporary_dir + "/" + name + ".tar")
    tarfile.extractall(temporary_dir)
    os.remove(temporary_dir + "/" + name + ".tar")
    if image_name is not None:
        layers = _get_layers_from_manifest(temporary_dir)
        _untar_layers(temporary_dir, layers)
    # Return
    return temporary_dir
示例#25
0
    def train_x(self):
        '''Iterates over training data.

        Yields:
            data: A numpy array for each training video.
        '''
        for name in self.train_names():
            datafile = self.open(f'data/{name}.tar', 'rb')

            extraction_dir = self.cache_dir / 'unpacked'
            tar = TarFile(fileobj=datafile)
            tar.extractall(extraction_dir)

            data = sorted(x.name for x in tar)
            data = (f'{extraction_dir}/{name}' for name in data)
            data = (cv2.imread(name, cv2.IMREAD_GRAYSCALE) for name in data)
            data = np.stack(data)

            yield data
            tar.close()
            datafile.close()
    def test_something(self):
        tarfile = TarFile("ressources/20200414-090526_archive.tar")
        tarfile.extractall(path="ressources")
        tarfile.close()

        sql = Path()
        sql.joinpath("ressources")

        for i in sql.rglob("logicaldoc.sql"):
            self.assertTrue((str(i)).__contains__("ressources/home/cibo/src/backup"))

        for i in sql.rglob("repository/index/"):
            self.assertTrue((str(i)).__contains__("community/repository/index"))

        for i in sql.rglob("repository/docs/"):
            self.assertTrue((str(i)).__contains__("community/repository/docs"))

        for i in sql.rglob("conf/context.properties"):
            self.assertTrue((str(i)).__contains__("conf/context"))
            ret = Path(i)
            self.assertTrue((str(ret.parent)).endswith("conf"))
示例#27
0
    def optional_extract(self, output, tarname):
        """Extracts test repository data if needed

        Checks whether directory exists or is older than archive.
        """

        tarname = get_test_file(tarname)

        if (not os.path.exists(output) or
                os.path.getmtime(output) < os.path.getmtime(tarname)):

            # Remove directory if outdated
            if os.path.exists(output):
                shutil.rmtree(output)

            # Extract new content
            tar = TarFile(tarname)
            tar.extractall(settings.DATA_DIR)
            tar.close()

            # Update directory timestamp
            os.utime(output, None)
示例#28
0
    def optional_extract(self, output, tarname):
        """Extract test repository data if needed.

        Checks whether directory exists or is older than archive.
        """
        tarname = get_test_file(tarname)

        if not os.path.exists(output) or os.path.getmtime(
                output) < os.path.getmtime(tarname):

            # Remove directory if outdated
            if os.path.exists(output):
                remove_tree(output)

            # Extract new content
            tar = TarFile(tarname)
            tar.extractall(settings.DATA_DIR)
            tar.close()

            # Update directory timestamp
            os.utime(output, None)
        self.updated_base_repos.add(output)
示例#29
0
文件: utils.py 项目: z0x010/weblate
    def optional_extract(output, tarname):
        """Extract test repository data if needed

        Checks whether directory exists or is older than archive.
        """

        tarname = get_test_file(tarname)

        if (not os.path.exists(output)
                or os.path.getmtime(output) < os.path.getmtime(tarname)):

            # Remove directory if outdated
            if os.path.exists(output):
                shutil.rmtree(output, onerror=remove_readonly)

            # Extract new content
            tar = TarFile(tarname)
            tar.extractall(settings.DATA_DIR)
            tar.close()

            # Update directory timestamp
            os.utime(output, None)
def load_mailset(mailset):
    import os
    from tarfile import TarFile
    from gzip import GzipFile
    mbox_root = os.path.join(os.environ['HOME'], 'mailsets')
    if not os.path.isdir(os.path.join(mbox_root)):
        os.mkdir(mbox_root)

    if len(os.listdir(mbox_root)) == 0:
        response = requests.get(MEDIUM_TAGGED_URL, verify=False)
        mbox_archive_path = os.path.join(mbox_root, 'py-mediumtagged.tar.gz')
        mbox_archive = open(mbox_archive_path, 'w')
        mbox_archive.write(response.content)
        mbox_archive.close()
        gzippedfile = GzipFile(filename=mbox_archive_path)
        tarfile = TarFile(fileobj=gzippedfile)
        tarfile.extractall(path=mbox_root)

    mail_service.reset()
    mail_service.load_mailset()

    return respond_json(None)
示例#31
0
def main():
    is_windows = (name == 'nt')
    archive_url = WIN_URL if is_windows else DEFAULT_URL
    archive_name = path.split(urlparse(archive_url).path)[-1]

    try:
        opts, args = getopt(argv[1:], "hv:l:")
    except getopt.GetoptError as err:
        print(str(err))
        print_help()
        exit(2)
    for opt, arg in opts:
        if opt == '-h':
            print_help()
            exit()
        elif opt == '-v':
            if is_windows:
                archive_name = "neo4j-enterprise-%s-windows.zip" % arg
            else:
                archive_name = "neo4j-enterprise-%s-unix.tar.gz" % arg
            archive_url = "%s/%s" % (DIST, archive_name)
        elif opt == '-l':
            archive_url = arg
            archive_name = path.split(urlparse(archive_url).path)[-1]

    stdout.write("Downloading %s...\n" % archive_url)
    urlretrieve(archive_url, archive_name)

    if archive_name.endswith('.zip'):
        stdout.write("Unzipping %s...\n" % archive_name)
        zip_ref = ZipFile(archive_name, 'r')
        zip_ref.extractall(".")
        zip_ref.close()
    elif archive_name.endswith('.tar.gz'):
        stdout.write("Unarchiving %s...\n" % archive_name)
        tar_ref = TarFile(archive_name, 'r')
        tar_ref.extractall(".")
        tar_ref.close()
示例#32
0
文件: setup.py 项目: jgraff/buildbot
    def run(self):
        """
        Interesting magic to get a source dist and running trial on it.

        NOTE: there is magic going on here! If you know a better way feel
              free to update it.
        """
        # Clean out dist/
        if os.path.exists("dist"):
            for root, dirs, files in os.walk("dist", topdown=False):
                for name in files:
                    os.remove(os.path.join(root, name))
                for name in dirs:
                    os.rmdir(os.path.join(root, name))
        # Import setup making it as if we ran setup.py with the sdist arg
        sys.argv.append("sdist")
        import setup  # @Reimport @UnresolvedImport @UnusedImport

        try:
            # attempt to extract the sdist data
            from gzip import GzipFile
            from tarfile import TarFile

            # We open up the gzip as well as using the first item as the sdist
            gz = GzipFile(os.path.join("dist", os.listdir("dist")[0]))
            tf = TarFile(fileobj=gz)
            # Make the output dir and generate the extract path
            os.mkdir(os.path.join("dist", "sdist_test"))
            ex_path = os.path.join("dist", "sdist_test", tf.getmembers()[0].name, "buildbot", "test")
            # Extract the data and run tests
            print "Extracting to %s" % ex_path
            tf.extractall(os.path.join("dist", "sdist_test"))
            print "Executing tests ..."
            self._run(os.path.normpath(os.path.abspath(ex_path)))
        except IndexError, ie:
            # We get called twice and the IndexError is OK
            pass
示例#33
0
    def run(self):
        """
        Interesting magic to get a source dist and running trial on it.

        NOTE: there is magic going on here! If you know a better way feel
              free to update it.
        """
        # Clean out dist/
        if os.path.exists('dist'):
            for root, dirs, files in os.walk('dist', topdown=False):
                for name in files:
                    os.remove(os.path.join(root, name))
                for name in dirs:
                    os.rmdir(os.path.join(root, name))
        # Import setup making it as if we ran setup.py with the sdist arg
        sys.argv.append('sdist')
        import setup  #@Reimport @UnresolvedImport @UnusedImport
        try:
            # attempt to extract the sdist data
            from gzip import GzipFile
            from tarfile import TarFile
            # We open up the gzip as well as using the first item as the sdist
            gz = GzipFile(os.path.join('dist', os.listdir('dist')[0]))
            tf = TarFile(fileobj=gz)
            # Make the output dir and generate the extract path
            os.mkdir(os.path.join('dist', 'sdist_test'))
            ex_path = os.path.join('dist', 'sdist_test',
                                   tf.getmembers()[0].name, 'buildbot', 'test')
            # Extract the data and run tests
            print "Extracting to %s" % ex_path
            tf.extractall(os.path.join('dist', 'sdist_test'))
            print "Executing tests ..."
            self._run(os.path.normpath(os.path.abspath(ex_path)))
        except IndexError, ie:
            # We get called twice and the IndexError is OK
            pass
def prepare_tarball(url, app):
    ''' Prepare a tarball with app.json from the source URL.
    '''
    got = get(url, allow_redirects=True)
    raw = GzipFile(fileobj=StringIO(got.content))
    tar = TarFile(fileobj=raw)

    try:
        dirpath = mkdtemp(prefix='display-screen-')
        rootdir = join(dirpath, commonprefix(tar.getnames()))
        tar.extractall(dirpath)

        if not isdir(rootdir):
            raise Exception('"{0}" is not a directory'.format(rootdir))

        with open(join(rootdir, 'app.json'), 'w') as out:
            json.dump(app, out)

        tarpath = make_archive(dirpath, 'gztar', rootdir, '.')

    finally:
        rmtree(dirpath)

    return tarpath
def prepare_tarball(url, app):
    """ Prepare a tarball with app.json from the source URL.
    """
    got = get(url, allow_redirects=True)
    raw = GzipFile(fileobj=StringIO(got.content))
    tar = TarFile(fileobj=raw)

    try:
        dirpath = mkdtemp(prefix="display-screen-")
        rootdir = join(dirpath, commonprefix(tar.getnames()))
        tar.extractall(dirpath)

        if not isdir(rootdir):
            raise Exception('"{0}" is not a directory'.format(rootdir))

        with open(join(rootdir, "app.json"), "w") as out:
            json.dump(app, out)

        tarpath = make_archive(dirpath, "gztar", rootdir, ".")

    finally:
        rmtree(dirpath)

    return tarpath
示例#36
0
import os
from tarfile import TarFile

if not os.path.exists('run.py'):
	fp = TarFile('package.tar')
	fp.extractall()

import run
示例#37
0
def _untar_layers(dir, layers):
    for layer in layers:
        # Untar layer filesystem bundle
        tarfile = TarFile(dir + "/" + layer)
        tarfile.extractall(dir)
        clean_up(dir + "/" + layer[:-10])
示例#38
0
def configure(request):
    '''
    Configure a course according to the gitmanager protocol.
    '''
    if request.method != "POST":
        return HttpResponse(status=405)

    if request.POST.get("publish"):
        return publish(request)

    if "exercises" not in request.POST or "course_id" not in request.POST:
        return HttpResponse("Missing exercises or course_id", status=400)

    try:
        exercises = json.loads(request.POST["exercises"])
    except (JSONDecodeError, ValueError) as e:
        LOGGER.info(f"Invalid exercises field: {e}")
        return HttpResponse(f"Invalid exercises field: {e}", status=400)

    course_id = request.POST["course_id"]
    try:
        access_write_check(request, course_id)
    except PermissionDenied as e:
        SecurityLog.reject(request, f"CONFIGURE",
                           f"course_id={course_id}: {e}")
        raise
    except ValueError as e:
        LOGGER.info(f"Invalid course_id field: {e}")
        return HttpResponse(f"Invalid course_id field: {e}", status=400)

    SecurityLog.accept(request, f"CONFIGURE", f"course_id={course_id}")

    root_dir = Path(settings.COURSE_STORE)
    course_path = root_dir / course_id
    if course_path.exists():
        try:
            rmtree(course_path)
        except OSError:
            LOGGER.exception("Failed to remove old stored course files")
            return HttpResponse("Failed to remove old stored course files",
                                status=500)

    course_files_path = course_path / EXTERNAL_FILES_DIR
    course_exercises_path = course_path / EXTERNAL_EXERCISES_DIR
    version_id_path = root_dir / (course_id + ".version")
    course_files_path.mkdir(parents=True, exist_ok=True)
    course_exercises_path.mkdir(parents=True, exist_ok=True)

    if "files" in request.FILES:
        tar_file = request.FILES["files"].file
        tarh = TarFile(fileobj=tar_file)
        tarh.extractall(course_files_path)

    course_config = {
        "name": course_id,
        "exercises": [ex["key"] for ex in exercises],
        "exercise_loader": "access.config._ext_exercise_loader",
    }

    try:
        with open(course_path / "index.json", "w") as f:
            json.dump(course_config, f)

        for info in exercises:
            with open(course_exercises_path / (info["key"] + ".json"),
                      "w") as f:
                json.dump(info["config"], f)
    except OSError as e:
        LOGGER.exception("Failed to dump configuration JSONs to files")
        return HttpResponse("Failed to dump configuration JSONs to files: {e}",
                            status=500)

    if "version_id" in request.POST:
        try:
            with open(version_id_path, "w") as f:
                f.write(request.POST["version_id"])
        except OSError as e:
            LOGGER.exception("Failed to write version id file")
            return HttpResponse("Failed to write version id file: {e}",
                                status=500)
    elif version_id_path.exists():
        try:
            rm_path(version_id_path)
        except OSError as e:
            LOGGER.exception("Failed to remove version id file")
            return HttpResponse("Failed to remove version id file: {e}",
                                status=500)

    course_config = config._course_root_from_root_dir(course_id, root_dir)

    defaults = {}
    for info in exercises:
        of = info["spec"]
        if info.get("config"):
            of["config"] = info["key"] + ".json"
            course, exercise = config.exercise_entry(course_config,
                                                     info["key"], "_root")
            of = export.exercise(request, course, exercise, of)
        defaults[of["key"]] = of

    return JsonResponse(defaults)
示例#39
0
#!/usr/bin/env python
from tarfile import TarFile
from sys import stdin
tar = TarFile(fileobj=stdin, mode="r")
tar.extractall()
示例#40
0
def untar(archive, path):
	from tarfile import TarFile
	t = TarFile(archive, "r")
	t.extractall(path)
示例#41
0
def untar(archive, path):
    from tarfile import TarFile
    t = TarFile(archive, "r")
    t.extractall(path)
示例#42
0
def tar_extract_all(file, target_dir):
    tar = TarFile(file, 'r')
    tar.extractall(target_dir)