Beispiel #1
0
    def _do_update(self, statepath: str, logger: Logger) -> bool:
        old_head = get_subprocess_output(['git', 'rev-parse', 'HEAD'],
                                         cwd=statepath,
                                         logger=logger).strip()

        run_subprocess([
            'timeout',
            str(self.fetch_timeout), 'git', 'fetch', '--progress', '--depth=1'
        ],
                       cwd=statepath,
                       logger=logger)
        run_subprocess(
            ['git', 'checkout'], cwd=statepath, logger=logger
        )  # needed for reset to not fail on changed sparse checkout
        self._setup_sparse_checkout(statepath, logger)
        run_subprocess(['git', 'reset', '--hard', 'origin/' + self.branch],
                       cwd=statepath,
                       logger=logger)
        run_subprocess(['git', 'reflog', 'expire', '--expire=0', '--all'],
                       cwd=statepath,
                       logger=logger)
        run_subprocess(['git', 'prune'], cwd=statepath, logger=logger)

        new_head = get_subprocess_output(['git', 'rev-parse', 'HEAD'],
                                         cwd=statepath,
                                         logger=logger).strip()

        if new_head == old_head:
            logger.log('HEAD has not changed: {}'.format(new_head))
            return False

        logger.log('HEAD was updated from {} to {}'.format(old_head, new_head))
        return True
Beispiel #2
0
    def fetch(self,
              statepath: str,
              update: bool = True,
              logger: Logger = NoopLogger()) -> bool:
        if os.path.exists(statepath) and not update:
            logger.log('no update requested, skipping')
            return False

        args = [
            '--info=stats2',
            '--archive',
            '--compress',
            '--delete',
            '--delete-excluded',
            '--safe-links',
        ]

        if self.fetch_timeout is not None:
            args += ['--timeout', str(self.fetch_timeout)]

        if self.rsync_include is not None:
            args += ['--include', self.rsync_include]

        if self.rsync_exclude is not None:
            args += ['--exclude', self.rsync_exclude]

        run_subprocess(['rsync'] + args + [self.url, statepath], logger)

        return True
Beispiel #3
0
    def _do_fetch(self, statepath: str, logger: Logger) -> bool:
        run_subprocess([
            'timeout',
            str(self.fetch_timeout), 'svn', 'checkout', self.url, statepath
        ],
                       logger=logger)

        return True
Beispiel #4
0
    def _do_fetch(self, statepath: str, logger: Logger) -> bool:
        run_subprocess([
            'timeout',
            str(self.fetch_timeout), 'git', 'clone', '--progress',
            '--no-checkout', '--depth=1', '--branch', self.branch, self.url,
            statepath
        ],
                       logger=logger)
        self._setup_sparse_checkout(statepath, logger)
        run_subprocess(['git', 'checkout'], cwd=statepath, logger=logger)

        return True
Beispiel #5
0
    def _setup_sparse_checkout(self, statepath: str, logger: Logger) -> None:
        sparse_checkout_path = os.path.join(statepath, '.git', 'info',
                                            'sparse-checkout')

        # We always enable sparse checkout, as it's harder to
        # properly disable sparse checkout and restore all files
        # than to leave it enabled with all files whitelisted
        #
        # See https://stackoverflow.com/questions/36190800/how-to-disable-sparse-checkout-after-enabled/36195275
        run_subprocess(['git', 'config', 'core.sparsecheckout', 'true'],
                       cwd=statepath,
                       logger=logger)
        with open(sparse_checkout_path, 'w') as sparse_checkout_file:
            if self.sparse_checkout:
                for item in self.sparse_checkout:
                    print(item, file=sparse_checkout_file)
            else:
                print('/*', file=sparse_checkout_file)
Beispiel #6
0
    def _do_update(self, statepath: str, logger: Logger) -> bool:
        old_rev = get_subprocess_output(
            ['svn', 'info', '--show-item', 'revision', statepath],
            logger=logger).strip()

        run_subprocess(
            ['timeout',
             str(self.fetch_timeout), 'svn', 'up', statepath],
            logger=logger)

        new_rev = get_subprocess_output(
            ['svn', 'info', '--show-item', 'revision', statepath],
            logger=logger).strip()

        if new_rev == old_rev:
            logger.log('Revision has not changed: {}'.format(new_rev))
            return False

        logger.log('Revision was updated from {} to {}'.format(
            old_rev, new_rev))
        return True
Beispiel #7
0
    def _do_fetch(self, statedir: AtomicDir, persdata: PersistentData,
                  logger: Logger) -> bool:
        tarpath = os.path.join(statedir.get_path(), '.temporary.tar')

        headers = {}

        if persdata.get('last-modified'):
            headers['if-modified-since'] = persdata.get('last-modified')
            logger.log('using if-modified-since: {}'.format(
                headers['if-modified-since']))

        logger.log('fetching {}'.format(self.url))

        try:
            with open(tarpath, 'wb') as tarfile:
                response = save_http_stream(self.url,
                                            tarfile,
                                            headers=headers,
                                            timeout=self.fetch_timeout)
        except NotModifiedException:
            logger.log('got 403 not modified')
            return False

        # XXX: may be unportable, FreeBSD tar automatically handles compression type,
        # may not be the case on linuxes
        # XXX: this extracts tarball permissions, which is not desirable and it may
        # produce non-readable files and dirs (blackarch). GNU tar has --mode, BSD tar
        # lacks this. We should probably require GNU tar, and handle binary name which
        # may differ on BSD.
        run_subprocess(
            ['tar', '-x', '-z', '-f', tarpath, '-C',
             statedir.get_path()], logger)
        os.remove(tarpath)

        if response.headers.get('last-modified'):
            persdata['last-modified'] = response.headers['last-modified']
            logger.log('storing last-modified: {}'.format(
                persdata['last-modified']))

        return True
Beispiel #8
0
    def _do_fetch(self, statedir: AtomicDir, persdata: PersistentData,
                  logger: Logger) -> bool:
        tarpath = os.path.join(statedir.get_path(), '.temporary.tar')

        headers = {}

        if persdata.get('last-modified'):
            headers['if-modified-since'] = persdata.get('last-modified')
            logger.log('using if-modified-since: {}'.format(
                headers['if-modified-since']))

        logger.log('fetching {}'.format(self.url))

        try:
            with open(tarpath, 'wb') as tarfile:
                response = save_http_stream(self.url,
                                            tarfile,
                                            headers=headers,
                                            timeout=self.fetch_timeout)
        except NotModifiedException:
            logger.log('got 403 not modified')
            return False

        # XXX: may be unportable, FreeBSD tar automatically handles compression type,
        # may not be the case on linuxes
        run_subprocess(
            ['tar', '-x', '-z', '-f', tarpath, '-C',
             statedir.get_path()], logger)
        os.remove(tarpath)

        if response.headers.get('last-modified'):
            persdata['last-modified'] = response.headers['last-modified']
            logger.log('storing last-modified: {}'.format(
                persdata['last-modified']))

        return True