def test_changes_get_chga_oscrc(self):
     '''Test if getting changesauthor from .oscrc works'''
     os.environ["VC_MAILADDR"] = '*****@*****.**'
     chg = Changes()
     author = chg.get_changesauthor(self.cli)
     self.assertEqual(author, '*****@*****.**')
     os.environ["VC_MAILADDR"] = ''
 def test_changes_get_chga_args(self):
     '''Test if getting changesauthor from cli args works'''
     chg                 = Changes()
     cli                 = copy.copy(self.cli)
     cli.changesauthor   = '*****@*****.**'
     author              = chg.get_changesauthor(cli)
     self.assertEqual(author, '*****@*****.**')
Example #3
0
    def __init__(self, args, task):
        # default settings
        # arch_dir - Directory which is used for the archive
        # e.g. myproject-2.0
        self.arch_dir = None
        self.repocachedir = None
        self.clone_dir = None
        self.lock_file = None
        self.basename = None
        self.repodir = None

        # mandatory arguments
        self.args = args
        self.task = task
        self.url = args.url

        # optional arguments
        self.revision = args.revision

        # preparation of required attributes
        self.helpers = Helpers()
        if self.args.changesgenerate:
            self.changes = Changes()

        self._calc_repocachedir()
        self._final_rename_needed = False

        # proxy support
        self.httpproxy = None
        self.httpsproxy = None
        self.noproxy = None
        self._calc_proxies()
 def test_changes_get_chga_args(self):
     '''Test if getting changesauthor from cli args works'''
     chg = Changes()
     cli = copy.copy(self.cli)
     cli.changesauthor = '*****@*****.**'
     author = chg.get_changesauthor(cli)
     self.assertEqual(author, '*****@*****.**')
Example #5
0
 def __init__(self, args):
     self.task_list = []
     self.cleanup_dirs = []
     self.helpers = Helpers()
     self.changes = Changes()
     self.scm_object = None
     self.data_map = None
     self.args = args
Example #6
0
 def test_changes_get_chga_default(self):
     '''Test if getting changesauthor from .oscrc'''
     home                = os.environ['HOME']
     os.environ['HOME']  = '/nir/va/na'
     chg                 = Changes()
     author              = chg.get_changesauthor(self.cli)
     os.environ['HOME']  = home
     self.assertEqual(author, '*****@*****.**')
Example #7
0
 def test_changes_get_chga_oscrc(self):
     '''Test if getting changesauthor from .oscrc works'''
     tc_name             = inspect.stack()[0][3]
     home                = os.environ['HOME']
     os.environ['HOME']  = os.path.join(self.fixtures_dir, tc_name)
     chg                 = Changes()
     author              = chg.get_changesauthor(self.cli)
     os.environ['HOME']  = home
     self.assertEqual(author, '*****@*****.**')
Example #8
0
    def __init__(self, args, task):
        # default settings
        # arch_dir - Directory which is used for the archive
        # e.g. myproject-2.0
        self.arch_dir = None
        self.repocachedir = None
        self.clone_dir = None
        self.lock_file = None
        self.basename = None
        self.repodir = None
        self.user = None
        self.password = None
        self._parent_tag = None
        self._backup_gnupghome = None

        # mandatory arguments
        self.args = args
        self.task = task
        self.url = args.url

        # optional arguments
        self.revision = args.revision
        if args.user and args.keyring_passphrase:
            if keyring_import_error == 1:
                raise SystemExit('Error while importing keyrings.alt.file but '
                                 '"--user" and "--keyring_passphrase" are set.'
                                 ' Please install keyrings.alt.file!')
            os.environ['XDG_DATA_HOME'] = '/etc/obs/services/tar_scm.d'
            _kr = keyrings.alt.file.EncryptedKeyring()
            _kr.keyring_key = args.keyring_passphrase
            try:
                self.password = _kr.get_password(self.url, args.user)
                if not self.password:
                    raise Exception(
                        'No user {u} in keyring for service {s}'.format(
                            u=args.user, s=self.url))
            except AssertionError:
                raise Exception('Wrong keyring passphrase')
            self.user = args.user

        # preparation of required attributes
        self.helpers = Helpers()
        if self.args.changesgenerate:
            self.changes = Changes()

        self._calc_repocachedir()
        self._final_rename_needed = False

        # proxy support
        self.httpproxy = None
        self.httpsproxy = None
        self.noproxy = None
        self._calc_proxies()

        if self.args.maintainers_asc:
            self._prepare_gpg_settings()
 def test_changes_get_chga_default(self):
     '''Test if getting default changesauthor if running inside OBS'''
     os.environ['OBS_SERVICE_DAEMON'] = "1"
     home = os.environ['HOME']
     os.environ['HOME'] = '/nir/va/na'
     chg = Changes()
     author = chg.get_changesauthor(self.cli)
     os.environ['HOME'] = home
     self.assertEqual(author, 'obs-service-tar-scm@invalid')
     os.environ['OBS_SERVICE_DAEMON'] = "0"
Example #10
0
    def __init__(self, args, task):
        # default settings
        # arch_dir - Directory which is used for the archive
        # e.g. myproject-2.0
        self.arch_dir            = None
        self.repocachedir   = None
        self.clone_dir      = None
        self.lock_file      = None
        self.basename       = None
        self.repodir        = None

        # mandatory arguments
        self.args           = args
        self.task           = task
        self.url            = args.url

        # optional arguments
        self.revision       = args.revision

        # preparation of required attributes
        self.helpers        = Helpers()
        if self.args.changesgenerate:
            self.changes    = Changes()

        self._calc_repocachedir()
        self._final_rename_needed = False
Example #11
0
 def __init__(self, args):
     self.task_list      = []
     self.cleanup_dirs   = []
     self.helpers        = Helpers()
     self.changes        = Changes()
     self.scm_object     = None
     self.data_map       = None
     self.args           = args
Example #12
0
class Tasks():
    '''
    Class to create a task list for formats which can contain more then one scm
    job like snapcraft or appimage
    '''
    def __init__(self, args):
        self.task_list      = []
        self.cleanup_dirs   = []
        self.helpers        = Helpers()
        self.changes        = Changes()
        self.scm_object     = None
        self.data_map       = None
        self.args           = args

    def cleanup(self):
        """Cleaning temporary directories."""
        if self.args.skip_cleanup:
            logging.debug("Skipping cleanup")
            return

        logging.debug("Cleaning: %s", ' '.join(self.cleanup_dirs))

        for dirname in self.cleanup_dirs:
            if not os.path.exists(dirname):
                continue
            shutil.rmtree(dirname)
        self.cleanup_dirs = []
        # Unlock to prevent dead lock in cachedir if exception
        # gets raised
        if self.scm_object:
            self.scm_object.unlock_cache()
            # calls the corresponding cleanup routine
            self.scm_object.cleanup()

    def generate_list(self):
        '''
        Generate list of scm jobs from appimage.yml, snapcraft.yaml or a single
        job from cli arguments.
        '''
        args = self.args
        scms = ['git', 'tar', 'svn', 'bzr', 'hg']

        if args.appimage:
            # we read the SCM config from appimage.yml
            filehandle = open('appimage.yml')
            self.data_map = yaml.safe_load(filehandle)
            filehandle.close()
            args.use_obs_scm = True
            build_scms = ()
            try:
                build_scms = self.data_map['build'].keys()
            except TypeError:
                pass
            # run for each scm an own task
            for scm in scms:
                if scm not in build_scms:
                    continue
                for url in self.data_map['build'][scm]:
                    args.url = url
                    args.scm = scm
                    self.task_list.append(copy.copy(args))

        elif args.snapcraft:
            # we read the SCM config from snapcraft.yaml instead
            # getting it via parameters
            filehandle = open('snapcraft.yaml')
            self.data_map = yaml.safe_load(filehandle)
            filehandle.close()
            args.use_obs_scm = True
            # run for each part an own task
            for part in self.data_map['parts'].keys():
                args.filename = part
                if 'source-type' not in self.data_map['parts'][part].keys():
                    continue
                pep8_1 = self.data_map['parts'][part]['source-type']
                if pep8_1 not in scms:
                    continue
                # avoid conflicts with files
                args.clone_prefix = "_obs_"
                args.url = self.data_map['parts'][part]['source']
                self.data_map['parts'][part]['source'] = part
                args.scm = self.data_map['parts'][part]['source-type']
                del self.data_map['parts'][part]['source-type']
                self.task_list.append(copy.copy(args))

        else:
            self.task_list.append(args)

    def process_list(self):
        '''
        process tasks from the task_list
        '''
        for task in self.task_list:
            self.process_single_task(task)

    def finalize(self):
        '''
        final steps after processing task list
        '''
        args = self.args
        if args.snapcraft:
            # write the new snapcraft.yaml file
            # we prefix our own here to be sure to not overwrite user files,
            # if he is using us in "disabled" mode
            new_file = args.outdir + '/_service:snapcraft:snapcraft.yaml'
            with open(new_file, 'w') as outfile:
                outfile.write(yaml.dump(self.data_map,
                                        default_flow_style=False))

    def process_single_task(self, args):
        '''
        do the work for a single task
        '''
        self.args = args

        logging.basicConfig(format="%(message)s", stream=sys.stderr,
                            level=logging.INFO)
        if args.verbose:
            logging.getLogger().setLevel(logging.DEBUG)

        # force cleaning of our workspace on exit
        atexit.register(self.cleanup)

        scm2class = {
            'git': 'Git',
            'bzr': 'Bzr',
            'hg':  'Hg',
            'svn': 'Svn',
            'tar': 'Tar',
        }

        # create objects for TarSCM.<scm> and TarSCM.helpers
        try:
            scm_class = getattr(TarSCM.scm, scm2class[args.scm])
        except:
            raise OptionsError("Please specify valid --scm=... options")

        # self.scm_object is need to unlock cache in cleanup
        # if exception occurs
        self.scm_object = scm_object   = scm_class(args, self)

        try:
            scm_object.check_scm()
        except OSError:
            print("Please install '%s'" % scm_object.scm)
            sys.exit(1)

        scm_object.fetch_upstream()

        if args.filename:
            dstname = basename = args.filename
        else:
            dstname = basename = os.path.basename(scm_object.clone_dir)

        version = self.get_version()
        changesversion = version
        if version and not sys.argv[0].endswith("/tar") \
           and not sys.argv[0].endswith("/snapcraft") \
           and not sys.argv[0].endswith("/appimage"):
            dstname += '-' + version

        logging.debug("DST: %s", dstname)

        detected_changes = scm_object.detect_changes()

        scm_object.prep_tree_for_archive(args.subdir, args.outdir,
                                         dstname=dstname)
        self.cleanup_dirs.append(scm_object.arch_dir)

        if args.use_obs_scm:
            arch = TarSCM.archive.ObsCpio()
        else:
            arch = TarSCM.archive.Tar()

        arch.extract_from_archive(scm_object.arch_dir, args.extract,
                                  args.outdir)

        arch.create_archive(
            scm_object,
            basename  = basename,
            dstname   = dstname,
            version   = version,
            cli       = args
        )

        if detected_changes:
            changesauthor = self.changes.get_changesauthor(args)

            logging.debug("AUTHOR: %s", changesauthor)

            if not version:
                args.version = "_auto_"
                changesversion = self.get_version()

            for filename in glob.glob('*.changes'):
                new_changes_file = os.path.join(args.outdir, filename)
                shutil.copy(filename, new_changes_file)
                self.changes.write_changes(new_changes_file,
                                           detected_changes['lines'],
                                           changesversion, changesauthor)
            self.changes.write_changes_revision(args.url, args.outdir,
                                                detected_changes['revision'])

        scm_object.finalize()

    def get_version(self):
        '''
        Generate final version number by detecting version from scm if not
        given as cli option and applying versionrewrite_pattern and
        versionprefix if given as cli option
        '''
        version = self.args.version
        if version == '_none_':
            return ''
        if version == '_auto_' or self.args.versionformat:
            version = self.detect_version()
        if self.args.versionrewrite_pattern:
            regex = re.compile(self.args.versionrewrite_pattern)
            version = regex.sub(self.args.versionrewrite_replacement, version)
        if self.args.versionprefix:
            version = "%s.%s" % (self.args.versionprefix, version)

        logging.debug("VERSION(auto): %s", version)
        return version

    def detect_version(self):
        """Automatic detection of version number for checked-out repository."""

        version = self.scm_object.detect_version(self.args.__dict__).strip()
        logging.debug("VERSION(auto): %s", version)
        return version
Example #13
0
class Tasks():
    '''
    Class to create a task list for formats which can contain more then one scm
    job like snapcraft or appimage
    '''
    def __init__(self, args):
        self.task_list = []
        self.cleanup_dirs = []
        self.helpers = Helpers()
        self.changes = Changes()
        self.scm_object = None
        self.data_map = None
        self.args = args

    def cleanup(self):
        """Cleaning temporary directories."""
        if self.args.skip_cleanup:
            logging.debug("Skipping cleanup")
            return

        logging.debug("Cleaning: %s", ' '.join(self.cleanup_dirs))

        for dirname in self.cleanup_dirs:
            if not os.path.exists(dirname):
                continue
            shutil.rmtree(dirname)
        self.cleanup_dirs = []
        # Unlock to prevent dead lock in cachedir if exception
        # gets raised
        if self.scm_object:
            self.scm_object.unlock_cache()

    def generate_list(self):
        '''
        Generate list of scm jobs from appimage.yml, snapcraft.yaml or a single
        job from cli arguments.
        '''
        args = self.args
        scms = ['git', 'tar', 'svn', 'bzr', 'hg']

        if args.appimage:
            # we read the SCM config from appimage.yml
            filehandle = open('appimage.yml')
            self.data_map = yaml.safe_load(filehandle)
            filehandle.close()
            args.use_obs_scm = True
            build_scms = ()
            try:
                build_scms = self.data_map['build'].keys()
            except TypeError:
                pass
            # run for each scm an own task
            for scm in scms:
                if scm not in build_scms:
                    continue
                for url in self.data_map['build'][scm]:
                    args.url = url
                    args.scm = scm
                    self.task_list.append(copy.copy(args))

        elif args.snapcraft:
            # we read the SCM config from snapcraft.yaml instead
            # getting it via parameters
            filehandle = open('snapcraft.yaml')
            self.data_map = yaml.safe_load(filehandle)
            filehandle.close()
            args.use_obs_scm = True
            # run for each part an own task
            for part in self.data_map['parts'].keys():
                args.filename = part
                if 'source-type' not in self.data_map['parts'][part].keys():
                    continue
                pep8_1 = self.data_map['parts'][part]['source-type']
                if pep8_1 not in scms:
                    continue
                # avoid conflicts with files
                args.clone_prefix = "_obs_"
                args.url = self.data_map['parts'][part]['source']
                self.data_map['parts'][part]['source'] = part
                args.scm = self.data_map['parts'][part]['source-type']
                del self.data_map['parts'][part]['source-type']
                self.task_list.append(copy.copy(args))

        else:
            self.task_list.append(args)

    def process_list(self):
        '''
        process tasks from the task_list
        '''
        for task in self.task_list:
            self.process_single_task(task)

    def finalize(self):
        '''
        final steps after processing task list
        '''
        args = self.args
        if args.snapcraft:
            # write the new snapcraft.yaml file
            # we prefix our own here to be sure to not overwrite user files,
            # if he is using us in "disabled" mode
            new_file = args.outdir + '/_service:snapcraft:snapcraft.yaml'
            with open(new_file, 'w') as outfile:
                outfile.write(
                    yaml.dump(self.data_map, default_flow_style=False))

    def process_single_task(self, args):
        '''
        do the work for a single task
        '''
        self.args = args

        logging.basicConfig(format="%(message)s",
                            stream=sys.stderr,
                            level=logging.INFO)
        if args.verbose:
            logging.getLogger().setLevel(logging.DEBUG)

        # force cleaning of our workspace on exit
        atexit.register(self.cleanup)

        scm2class = {
            'git': 'Git',
            'bzr': 'Bzr',
            'hg': 'Hg',
            'svn': 'Svn',
            'tar': 'Tar',
        }

        # create objects for TarSCM.<scm> and TarSCM.helpers
        try:
            scm_class = getattr(TarSCM.scm, scm2class[args.scm])
        except:
            raise OptionsError("Please specify valid --scm=... options")

        # self.scm_object is need to unlock cache in cleanup
        # if exception occurs
        self.scm_object = scm_object = scm_class(args, self)

        scm_object.fetch_upstream()

        if args.filename:
            dstname = basename = args.filename
        else:
            dstname = basename = os.path.basename(scm_object.clone_dir)

        version = self.get_version()
        changesversion = version
        if version and not sys.argv[0].endswith("/tar") \
           and not sys.argv[0].endswith("/snapcraft") \
           and not sys.argv[0].endswith("/appimage"):
            dstname += '-' + version

        logging.debug("DST: %s", dstname)

        detected_changes = scm_object.detect_changes()

        scm_object.prep_tree_for_archive(args.subdir,
                                         args.outdir,
                                         dstname=dstname)
        self.cleanup_dirs.append(scm_object.arch_dir)

        if args.use_obs_scm:
            arch = TarSCM.archive.ObsCpio()
        else:
            arch = TarSCM.archive.Tar()

        arch.extract_from_archive(scm_object.arch_dir, args.extract,
                                  args.outdir)

        arch.create_archive(scm_object,
                            basename=basename,
                            dstname=dstname,
                            version=version,
                            cli=args)

        if detected_changes:
            changesauthor = self.changes.get_changesauthor(args)

            logging.debug("AUTHOR: %s", changesauthor)

            if not version:
                args.version = "_auto_"
                changesversion = self.get_version()

            for filename in glob.glob('*.changes'):
                new_changes_file = os.path.join(args.outdir, filename)
                shutil.copy(filename, new_changes_file)
                self.changes.write_changes(new_changes_file,
                                           detected_changes['lines'],
                                           changesversion, changesauthor)
            self.changes.write_changes_revision(args.url, args.outdir,
                                                detected_changes['revision'])

        scm_object.finalize()

    def get_version(self):
        '''
        Generate final version number by detecting version from scm if not
        given as cli option and applying versionrewrite_pattern and
        versionprefix if given as cli option
        '''
        version = self.args.version
        if version == '_none_':
            return ''
        if version == '_auto_' or self.args.versionformat:
            version = self.detect_version()
        if self.args.versionrewrite_pattern:
            regex = re.compile(self.args.versionrewrite_pattern)
            version = regex.sub(self.args.versionrewrite_replacement, version)
        if self.args.versionprefix:
            version = "%s.%s" % (self.args.versionprefix, version)

        logging.debug("VERSION(auto): %s", version)
        return version

    def detect_version(self):
        """Automatic detection of version number for checked-out repository."""

        version = self.scm_object.detect_version(self.args.__dict__).strip()
        logging.debug("VERSION(auto): %s", version)
        return version
Example #14
0
class Scm():
    def __init__(self, args, task):
        # default settings
        # arch_dir - Directory which is used for the archive
        # e.g. myproject-2.0
        self.arch_dir            = None
        self.repocachedir   = None
        self.clone_dir      = None
        self.lock_file      = None
        self.basename       = None
        self.repodir        = None

        # mandatory arguments
        self.args           = args
        self.task           = task
        self.url            = args.url

        # optional arguments
        self.revision       = args.revision

        # preparation of required attributes
        self.helpers        = Helpers()
        if self.args.changesgenerate:
            self.changes    = Changes()

        self._calc_repocachedir()
        self._final_rename_needed = False

    def switch_revision(self):
        '''Switch sources to revision. Dummy implementation for version control
        systems that change revision during fetch/update.
        '''
        return

    def fetch_upstream(self):
        """Fetch sources from repository and checkout given revision."""
        logging.debug("CACHEDIR: '%s'", self.repocachedir)
        logging.debug("SCM: '%s'", self.scm)
        clone_prefix = ""
        if 'clone_prefix' in self.args.__dict__:
            clone_prefix = self.args.__dict__['clone_prefix']

        self._calc_dir_to_clone_to(clone_prefix)
        self.prepare_clone_dir()

        self.lock_cache()

        if not os.path.isdir(self.clone_dir):
            # initial clone
            logging.debug(
                "[fetch_upstream] Initial checkout/clone to directory: '%s'",
                self.clone_dir
            )
            os.mkdir(self.clone_dir)
            self.fetch_upstream_scm()
        else:
            logging.info("Detected cached repository...")
            self.update_cache()

        self.prepare_working_copy()

        # switch_to_revision
        self.switch_revision()

        # git specific: after switching to desired revision its necessary to
        # update
        # submodules since they depend on the actual version of the selected
        # revision
        self.fetch_submodules()

        self.unlock_cache()

    def fetch_submodules(self):
        """NOOP in other scm's than git"""
        pass

    def detect_changes(self):
        """Detect changes between revisions."""
        if not self.args.changesgenerate:
            return None

        chgs = self.changes.read_changes_revision(self.url, os.getcwd(),
                                                  self.args.outdir)

        logging.debug("CHANGES: %s", repr(chgs))

        chgs = self.detect_changes_scm(self.args.subdir, chgs)
        logging.debug("Detected changes:\n%s", repr(chgs))
        return chgs

    def detect_changes_scm(self, subdir, chgs):
        sys.exit("changesgenerate not supported with %s SCM" % self.scm)

    def get_repocache_hash(self, subdir):
        """Calculate hash fingerprint for repository cache."""
        u_url = self.url
        u_url = u_url.encode("utf-8")
        return hashlib.sha256(u_url).hexdigest()

    def get_current_commit(self):
        return None

    def _calc_repocachedir(self):
        # check for enabled caches in this order (first wins):
        #   1. local .cache
        #   2. environment
        #   3. user config
        #   4. system wide
        repocachedir  = None
        cwd = os.getcwd()
        if os.path.isdir(os.path.join(cwd, '.cache')):
            repocachedir = os.path.join(cwd, '.cache')

        if repocachedir is None:
            repocachedir = os.getenv('CACHEDIRECTORY')

        if repocachedir is None:
            repocachedir = Config().get('tar_scm', 'CACHEDIRECTORY')

        if repocachedir:
            logging.debug("REPOCACHE: %s", repocachedir)
            self.repohash = self.get_repocache_hash(self.args.subdir)
            self.repocachedir = os.path.join(repocachedir, self.repohash)

    def prepare_clone_dir(self):
        # special case when using osc and creating an obscpio, use
        # current work directory to allow the developer to work inside
        # of the git repo and fetch local changes
        if sys.argv[0].endswith("snapcraft") or \
           (self.args.use_obs_scm and os.getenv('OSC_VERSION')):
            self.repodir = os.getcwd()

        # construct repodir (the parent directory of the checkout)
        logging.debug("REPOCACHEDIR = '%s'", self.repocachedir)
        if self.repocachedir:
            if not os.path.isdir(self.repocachedir):
                os.makedirs(self.repocachedir)

    def _calc_dir_to_clone_to(self, prefix):
        # separate path from parameters etc.
        try:
            url_path = urlparse(self.url)[2].rstrip('/')
        except:
            url_path = urllib.parse.urlparse(self.url)[2].rstrip('/')

        # remove trailing scm extension
        logging.debug("Stripping '%s' extension from '%s'", self.scm, url_path)
        url_path = re.sub(r'\.%s$' % self.scm, '', url_path)
        logging.debug(" - New  url_path: '%s'", url_path)

        # special handling for cloning bare repositories (../repo/.git/)
        url_path = url_path.rstrip('/')

        self.basename = os.path.basename(os.path.normpath(url_path))
        self.basename = prefix + self.basename

        osc_version = 0

        try:
            osc_version = os.environ['OSC_VERSION']
        except:
            pass

        if osc_version == 0:
            tempdir = tempfile.mkdtemp(dir=self.args.outdir)
            self.task.cleanup_dirs.append(tempdir)
        else:
            tempdir = os.getcwd()

        self.repodir = os.path.join(tempdir, self.basename)

        if self.repocachedir:
            # Update atime and mtime of repocachedir to make it easier
            # for cleanup script
            if os.path.isdir(self.repocachedir):
                os.utime(self.repocachedir, (time.time(), time.time()))
            self.clone_dir = os.path.abspath(os.path.join(self.repocachedir,
                                                          self.basename))
        else:
            self.clone_dir = os.path.abspath(self.repodir)

        logging.debug("[_calc_dir_to_clone_to] CLONE_DIR: %s", self.clone_dir)

    def is_sslverify_enabled(self):
        """Returns ``True`` if the ``sslverify`` option has been enabled or
        not been set (default enabled) ``False`` otherwise."""
        return \
            'sslverify' not in self.args.__dict__ or \
            self.args.__dict__['sslverify']

    def version_iso_cleanup(self, version):
        """Reformat timestamp value."""
        version = re.sub(r'([0-9]{4})-([0-9]{2})-([0-9]{2}) +'
                         r'([0-9]{2})([:]([0-9]{2})([:]([0-9]{2}))?)?'
                         r'( +[-+][0-9]{3,4})', r'\1\2\3T\4\6\8', version)
        version = re.sub(r'[-:]', '', version)
        return version

    def prepare_working_copy(self):
        pass

    def prep_tree_for_archive(self, subdir, outdir, dstname):
        """Prepare directory tree for creation of the archive by copying the
        requested sub-directory to the top-level destination directory.
        """
        src = os.path.join(self.clone_dir, subdir)
        if not os.path.exists(src):
            raise Exception("%s: No such file or directory" % src)

        self.arch_dir = dst = os.path.join(outdir, dstname)
        if os.path.exists(dst) and \
            (os.path.samefile(src, dst) or
             os.path.samefile(os.path.dirname(src), dst)):
            return

        shutil.copytree(src, dst, symlinks=True)

    def lock_cache(self):
        pdir = os.path.join(self.clone_dir, os.pardir, '.lock')
        self.lock_file = open(os.path.abspath(pdir), 'w')
        fcntl.lockf(self.lock_file, fcntl.LOCK_EX)

    def unlock_cache(self):
        if self.lock_file and os.path.isfile(self.lock_file.name):
            fcntl.lockf(self.lock_file, fcntl.LOCK_UN)
            self.lock_file.close()
            self.lock_file = None

    def finalize(self):
        pass
Example #15
0
class Scm():
    def __init__(self, args, task):
        # default settings
        # arch_dir - Directory which is used for the archive
        # e.g. myproject-2.0
        self.arch_dir = None
        self.repocachedir = None
        self.clone_dir = None
        self.lock_file = None
        self.basename = None
        self.repodir = None

        # mandatory arguments
        self.args = args
        self.task = task
        self.url = args.url

        # optional arguments
        self.revision = args.revision

        # preparation of required attributes
        self.helpers = Helpers()
        if self.args.changesgenerate:
            self.changes = Changes()

        self._calc_repocachedir()
        self._final_rename_needed = False

        # proxy support
        self.httpproxy = None
        self.httpsproxy = None
        self.noproxy = None
        self._calc_proxies()

    def check_scm(self):
        '''check version of scm to proof, it is installed and executable'''
        subprocess.Popen([self.scm, '--version'],
                         stdout=subprocess.PIPE).communicate()

    def switch_revision(self):
        '''Switch sources to revision. Dummy implementation for version control
        systems that change revision during fetch/update.
        '''
        return

    def fetch_upstream(self):
        """Fetch sources from repository and checkout given revision."""
        logging.debug("CACHEDIR: '%s'", self.repocachedir)
        logging.debug("SCM: '%s'", self.scm)
        clone_prefix = ""
        if 'clone_prefix' in self.args.__dict__:
            clone_prefix = self.args.__dict__['clone_prefix']

        self._calc_dir_to_clone_to(clone_prefix)
        self.prepare_clone_dir()

        self.lock_cache()

        if not os.path.isdir(self.clone_dir):
            # initial clone
            logging.debug(
                "[fetch_upstream] Initial checkout/clone to directory: '%s'",
                self.clone_dir)
            os.mkdir(self.clone_dir)
            self.fetch_upstream_scm()
        else:
            logging.info("Detected cached repository...")
            self.update_cache()

        self.prepare_working_copy()

        # switch_to_revision
        self.switch_revision()

        # git specific: after switching to desired revision its necessary to
        # update
        # submodules since they depend on the actual version of the selected
        # revision
        self.fetch_submodules()

        self.unlock_cache()

    def fetch_submodules(self):
        """NOOP in other scm's than git"""
        pass

    def detect_changes(self):
        """Detect changes between revisions."""
        if not self.args.changesgenerate:
            return None

        old_servicedata = os.path.join(os.getcwd(), '.old', '_servicedata')
        old_changes_glob = os.path.join(os.getcwd(), '.old', '*.changes')
        if (os.path.isfile(old_servicedata)):
            shutil.copy2(old_servicedata, os.getcwd())
            for filename in glob.glob(old_changes_glob):
                shutil.copy2(filename, os.getcwd())

        chgs = self.changes.read_changes_revision(self.url, os.getcwd(),
                                                  self.args.outdir)

        logging.debug("CHANGES: %s", repr(chgs))

        chgs = self.detect_changes_scm(self.args.subdir, chgs)
        logging.debug("Detected changes:\n%s", repr(chgs))
        return chgs

    def detect_changes_scm(self, subdir, chgs):
        sys.exit("changesgenerate not supported with %s SCM" % self.scm)

    def get_repocache_hash(self, subdir):
        """Calculate hash fingerprint for repository cache."""
        u_url = self.url
        u_url = u_url.encode("utf-8")
        return hashlib.sha256(u_url).hexdigest()

    def get_current_commit(self):
        return None

    def _calc_repocachedir(self):
        # check for enabled caches in this order (first wins):
        #   1. local .cache
        #   2. environment
        #   3. user config
        #   4. system wide
        repocachedir = None
        cwd = os.getcwd()
        if os.path.isdir(os.path.join(cwd, '.cache')):
            repocachedir = os.path.join(cwd, '.cache')

        if repocachedir is None:
            repocachedir = os.getenv('CACHEDIRECTORY')

        if repocachedir is None:
            repocachedir = Config().get('tar_scm', 'CACHEDIRECTORY')

        if repocachedir:
            logging.debug("REPOCACHE: %s", repocachedir)
            self.repohash = self.get_repocache_hash(self.args.subdir)
            self.repocachedir = os.path.join(repocachedir, self.repohash)

    def _calc_proxies(self):
        # check for standard http/https proxy variables
        #   - http_proxy
        #   - https_proxy
        #   - no_proxy
        httpproxy = os.getenv('http_proxy')
        httpsproxy = os.getenv('https_proxy')
        noproxy = os.getenv('no_proxy')

        if httpproxy:
            logging.debug("HTTP proxy found: %s", httpproxy)
            self.httpproxy = httpproxy

        if httpsproxy:
            logging.debug("HTTPS proxy found: %s", httpsproxy)
            self.httpsproxy = httpsproxy

        if noproxy:
            logging.debug("HTTP no proxy found: %s", noproxy)
            self.noproxy = noproxy

    def prepare_clone_dir(self):
        # special case when using osc and creating an obscpio, use
        # current work directory to allow the developer to work inside
        # of the git repo and fetch local changes
        is_snap = sys.argv[0].endswith("snapcraft")
        is_obs_scm = self.args.use_obs_scm
        in_osc = bool(os.getenv('OSC_VERSION'))
        in_git = os.path.isdir('.git')
        if is_snap or (is_obs_scm and in_osc and in_git):
            self.repodir = os.getcwd()

        # construct repodir (the parent directory of the checkout)
        logging.debug("REPOCACHEDIR = '%s'", self.repocachedir)
        if self.repocachedir:
            if not os.path.isdir(self.repocachedir):
                os.makedirs(self.repocachedir)

    def _calc_dir_to_clone_to(self, prefix):
        # separate path from parameters etc.
        try:
            url_path = urlparse(self.url)[2].rstrip('/')
        except:
            url_path = urllib.parse.urlparse(self.url)[2].rstrip('/')

        # remove trailing scm extension
        logging.debug("Stripping '%s' extension from '%s'", self.scm, url_path)
        url_path = re.sub(r'\.%s$' % self.scm, '', url_path)
        logging.debug(" - New  url_path: '%s'", url_path)

        # special handling for cloning bare repositories (../repo/.git/)
        url_path = url_path.rstrip('/')

        self.basename = os.path.basename(os.path.normpath(url_path))
        self.basename = prefix + self.basename

        osc_version = 0

        try:
            osc_version = os.environ['OSC_VERSION']
        except:
            pass

        if osc_version == 0:
            tempdir = tempfile.mkdtemp(dir=self.args.outdir)
            self.task.cleanup_dirs.append(tempdir)
        else:
            tempdir = os.getcwd()

        self.repodir = os.path.join(tempdir, self.basename)

        if self.repocachedir:
            # Update atime and mtime of repocachedir to make it easier
            # for cleanup script
            if os.path.isdir(self.repocachedir):
                os.utime(self.repocachedir, (time.time(), time.time()))
            self.clone_dir = os.path.abspath(
                os.path.join(self.repocachedir, self.basename))
        else:
            self.clone_dir = os.path.abspath(self.repodir)

        logging.debug("[_calc_dir_to_clone_to] CLONE_DIR: %s", self.clone_dir)

    def is_sslverify_enabled(self):
        """Returns ``True`` if the ``sslverify`` option has been enabled or
        not been set (default enabled) ``False`` otherwise."""
        return \
            'sslverify' not in self.args.__dict__ or \
            self.args.__dict__['sslverify']

    def version_iso_cleanup(self, version):
        """Reformat timestamp value."""
        version = re.sub(
            r'([0-9]{4})-([0-9]{2})-([0-9]{2}) +'
            r'([0-9]{2})([:]([0-9]{2})([:]([0-9]{2}))?)?'
            r'( +[-+][0-9]{3,4})', r'\1\2\3T\4\6\8', version)
        version = re.sub(r'[-:]', '', version)
        return version

    def prepare_working_copy(self):
        pass

    def prep_tree_for_archive(self, subdir, outdir, dstname):
        """Prepare directory tree for creation of the archive by copying the
        requested sub-directory to the top-level destination directory.
        """
        src = os.path.join(self.clone_dir, subdir)
        if not os.path.exists(src):
            raise Exception("%s: No such file or directory" % src)

        self.arch_dir = dst = os.path.join(outdir, dstname)
        if os.path.exists(dst):
            same = os.path.samefile(src, dst) or \
                os.path.samefile(os.path.dirname(src), dst)
            if same:
                return

        r_path = os.path.realpath(src)
        c_dir = os.path.realpath(self.clone_dir)
        if not r_path.startswith(c_dir):
            sys.exit("--subdir %s tries to escape repository." % subdir)

        logging.debug("copying tree: '%s' to '%s'", src, dst)
        src = src.encode('utf-8')
        dst = dst.encode('utf-8')
        shutil.copytree(src, dst, symlinks=True)

    def lock_cache(self):
        pdir = os.path.join(self.clone_dir, os.pardir, '.lock')
        self.lock_file = open(os.path.abspath(pdir), 'w')
        fcntl.lockf(self.lock_file, fcntl.LOCK_EX)

    def unlock_cache(self):
        if self.lock_file and os.path.isfile(self.lock_file.name):
            fcntl.lockf(self.lock_file, fcntl.LOCK_UN)
            self.lock_file.close()
            self.lock_file = None

    def finalize(self):
        self.cleanup()

    def check_url(self):
        return True
Example #16
0
class Scm():
    def __init__(self, args, task):
        # default settings
        # arch_dir - Directory which is used for the archive
        # e.g. myproject-2.0
        self.arch_dir = None
        self.repocachedir = None
        self.clone_dir = None
        self.lock_file = None
        self.basename = None
        self.repodir = None
        self.user = None
        self.password = None
        self._parent_tag = None
        self._backup_gnupghome = None

        # mandatory arguments
        self.args = args
        self.task = task
        self.url = args.url

        # optional arguments
        self.revision = args.revision
        if args.user and args.keyring_passphrase:
            if keyring_import_error == 1:
                raise SystemExit('Error while importing keyrings.alt.file but '
                                 '"--user" and "--keyring_passphrase" are set.'
                                 ' Please install keyrings.alt.file!')
            os.environ['XDG_DATA_HOME'] = '/etc/obs/services/tar_scm.d'
            _kr = keyrings.alt.file.EncryptedKeyring()
            _kr.keyring_key = args.keyring_passphrase
            try:
                self.password = _kr.get_password(self.url, args.user)
                if not self.password:
                    raise Exception(
                        'No user {u} in keyring for service {s}'.format(
                            u=args.user, s=self.url))
            except AssertionError:
                raise Exception('Wrong keyring passphrase')
            self.user = args.user

        # preparation of required attributes
        self.helpers = Helpers()
        if self.args.changesgenerate:
            self.changes = Changes()

        self._calc_repocachedir()
        self._final_rename_needed = False

        # proxy support
        self.httpproxy = None
        self.httpsproxy = None
        self.noproxy = None
        self._calc_proxies()

        if self.args.maintainers_asc:
            self._prepare_gpg_settings()

    def __del__(self):
        if self.args.maintainers_asc:
            self._revert_gpg_settings()

    def auth_url(self):
        if self.scm not in ('bzr', 'git', 'hg'):
            return
        auth_patterns = {}
        auth_patterns['bzr'] = {}
        auth_patterns['bzr']['proto'] = r'^(ftp|bzr|https?)://.*'
        auth_patterns['bzr']['already'] = r'^(ftp|bzr|https?)://.*:.*@.*'
        auth_patterns['bzr']['sub'] = r'^((ftp|bzr|https?)://)(.*)'
        auth_patterns['bzr']['format'] = r'\g<1>{user}:{pwd}@\g<3>'
        auth_patterns['git'] = {}
        auth_patterns['git']['proto'] = r'^(ftps?|https?)://.*'
        auth_patterns['git']['already'] = r'^(ftps?|https?)://.*:.*@.*'
        auth_patterns['git']['sub'] = r'^((ftps?|https?)://)(.*)'
        auth_patterns['git']['format'] = r'\g<1>{user}:{pwd}@\g<3>'
        auth_patterns['hg'] = {}
        auth_patterns['hg']['proto'] = r'^https?://.*'
        auth_patterns['hg']['already'] = r'^https?://.*:.*@.*'
        auth_patterns['hg']['sub'] = r'^(https?://)(.*)'
        auth_patterns['hg']['format'] = r'\g<1>{user}:{pwd}@\g<2>'

        if self.user and self.password:
            pattern_proto = re.compile(auth_patterns[self.scm]['proto'])
            pattern = re.compile(auth_patterns[self.scm]['already'])
            if pattern_proto.match(self.url) and not pattern.match(self.url):
                logging.debug('[auth_url] settings credentials from keyring')
                self.url = re.sub(
                    auth_patterns[self.scm]['sub'],
                    auth_patterns[self.scm]['format'].format(
                        user=self.user, pwd=self.password), self.url)

    def check_scm(self):
        '''check version of scm to proof, it is installed and executable'''
        subprocess.Popen([self.scm, '--version'],
                         stdout=subprocess.PIPE).communicate()

    def switch_revision(self):
        '''Switch sources to revision. Dummy implementation for version control
        systems that change revision during fetch/update.
        '''
        return

    def fetch_upstream(self):
        """Fetch sources from repository and checkout given revision."""
        logging.debug("CACHEDIR: '%s'", self.repocachedir)
        logging.debug("SCM: '%s'", self.scm)
        clone_prefix = ""
        if 'clone_prefix' in self.args.__dict__:
            clone_prefix = self.args.__dict__['clone_prefix']

        self._calc_dir_to_clone_to(clone_prefix)
        self.prepare_clone_dir()

        self.lock_cache()

        if not os.path.isdir(self.clone_dir):
            # initial clone
            logging.debug(
                "[fetch_upstream] Initial checkout/clone to directory: '%s'",
                self.clone_dir)
            os.mkdir(self.clone_dir)
            self.fetch_upstream_scm()
        else:
            logging.info("Detected cached repository...")
            self.update_cache()

        self.prepare_working_copy()

        # switch_to_revision
        self.switch_revision()

        # git specific: after switching to desired revision its necessary to
        # update
        # submodules since they depend on the actual version of the selected
        # revision
        self.fetch_submodules()

        # obs_scm specific: do not allow running git-lfs to prevent storage
        #  duplication with tar_scm
        if self.args.use_obs_scm:
            self.fetch_lfs()

        self.unlock_cache()

    def fetch_submodules(self):
        """NOOP in other scm's than git"""
        pass

    def fetch_lfs(self):
        """NOOP in other scm's than git"""
        pass

    def detect_changes(self):
        """Detect changes between revisions."""
        if not self.args.changesgenerate:
            return None

        old_servicedata = os.path.join(os.getcwd(), '.old', '_servicedata')
        old_changes_glob = os.path.join(os.getcwd(), '.old', '*.changes')
        if (os.path.isfile(old_servicedata)):
            shutil.copy2(old_servicedata, os.getcwd())
            for filename in glob.glob(old_changes_glob):
                shutil.copy2(filename, os.getcwd())

        chgs = self.changes.read_changes_revision(self.url, os.getcwd(),
                                                  self.args.outdir)

        logging.debug("CHANGES: %s", repr(chgs))

        chgs = self.detect_changes_scm(self.args.subdir, chgs)
        logging.debug("Detected changes:\n%s", repr(chgs))
        return chgs

    def detect_changes_scm(self, subdir, chgs):
        sys.exit("changesgenerate not supported with %s SCM" % self.scm)

    def get_repocache_hash(self, subdir):
        """Calculate hash fingerprint for repository cache."""
        u_url = self.url.encode()
        return hashlib.sha256(u_url).hexdigest()

    def get_current_commit(self):
        return None

    def _calc_repocachedir(self):
        # check for enabled caches in this order (first wins):
        #   1. local .cache
        #   2. environment
        #   3. user config
        #   4. system wide
        repocachedir = None
        cwd = os.getcwd()
        if os.path.isdir(os.path.join(cwd, '.cache')):
            repocachedir = os.path.join(cwd, '.cache')

        if repocachedir is None:
            repocachedir = os.getenv('CACHEDIRECTORY')

        if repocachedir is None:
            repocachedir = Config().get('tar_scm', 'CACHEDIRECTORY')

        if repocachedir:
            logging.debug("REPOCACHE: %s", repocachedir)
            self.repohash = self.get_repocache_hash(self.args.subdir)
            self.repocachedir = os.path.join(repocachedir, self.repohash)

    def _calc_proxies(self):
        # check for standard http/https proxy variables
        #   - http_proxy
        #   - https_proxy
        #   - no_proxy
        httpproxy = os.getenv('http_proxy')
        httpsproxy = os.getenv('https_proxy')
        noproxy = os.getenv('no_proxy')

        if httpproxy:
            logging.debug("HTTP proxy found: %s", httpproxy)
            self.httpproxy = httpproxy

        if httpsproxy:
            logging.debug("HTTPS proxy found: %s", httpsproxy)
            self.httpsproxy = httpsproxy

        if noproxy:
            logging.debug("HTTP no proxy found: %s", noproxy)
            self.noproxy = noproxy

    def prepare_clone_dir(self):
        # special case when using osc and creating an obscpio, use
        # current work directory to allow the developer to work inside
        # of the git repo and fetch local changes
        is_snap = sys.argv[0].endswith("snapcraft")
        is_obs_scm = self.args.use_obs_scm
        in_osc = bool(os.getenv('OSC_VERSION'))
        in_git = os.path.isdir('.git')
        if is_snap or (is_obs_scm and in_osc and in_git):
            self.repodir = os.getcwd()

        # construct repodir (the parent directory of the checkout)
        logging.debug("REPOCACHEDIR = '%s'", self.repocachedir)
        if self.repocachedir:
            if not os.path.isdir(self.repocachedir):
                os.makedirs(self.repocachedir)

    def _calc_dir_to_clone_to(self, prefix):
        # separate path from parameters etc.
        url_path = urlparse(self.url)[2].rstrip('/')

        # remove trailing scm extension
        logging.debug("Stripping '%s' extension from '%s'", self.scm, url_path)
        url_path = re.sub(r'\.%s$' % self.scm, '', url_path)
        logging.debug(" - New  url_path: '%s'", url_path)

        # special handling for cloning bare repositories (../repo/.git/)
        url_path = url_path.rstrip('/')

        self.basename = os.path.basename(os.path.normpath(url_path))
        self.basename = prefix + self.basename

        osc_version = 0

        try:
            osc_version = os.environ['OSC_VERSION']
        except:
            pass

        if osc_version == 0:
            tempdir = tempfile.mkdtemp(dir=self.args.outdir)
            self.task.cleanup_dirs.append(tempdir)
        else:
            tempdir = os.getcwd()

        self.repodir = os.path.join(tempdir, self.basename)

        if self.repocachedir:
            # Update atime and mtime of repocachedir to make it easier
            # for cleanup script
            if os.path.isdir(self.repocachedir):
                os.utime(self.repocachedir, (time.time(), time.time()))
            self.clone_dir = os.path.abspath(
                os.path.join(self.repocachedir, self.basename))
        else:
            self.clone_dir = os.path.abspath(self.repodir)

        logging.debug("[_calc_dir_to_clone_to] CLONE_DIR: %s", self.clone_dir)

    def is_sslverify_enabled(self):
        """Returns ``True`` if the ``sslverify`` option has been enabled or
        not been set (default enabled) ``False`` otherwise."""
        return \
            'sslverify' not in self.args.__dict__ or \
            self.args.__dict__['sslverify']

    def version_iso_cleanup(self, version, debian=False):
        """Reformat timestamp value."""
        version = re.sub(
            r'([0-9]{4})-([0-9]{2})-([0-9]{2}) +'
            r'([0-9]{2})([:]([0-9]{2})([:]([0-9]{2}))?)?'
            r'( +[-+][0-9]{3,4})', r'\1\2\3T\4\6\8', version)
        # avoid removing "-" for Debian packages, which use it to split the
        # upstream vs downstream version
        # for RPM it has to be stripped instead, as it's an illegal character
        if not debian:
            version = re.sub(r'[-:]', '', version)
        return version

    def prepare_working_copy(self):
        pass

    def prep_tree_for_archive(self, subdir, outdir, dstname):
        """Prepare directory tree for creation of the archive by copying the
        requested sub-directory to the top-level destination directory.
        """
        src = os.path.join(self.clone_dir, subdir)
        if not os.path.exists(src):
            raise Exception("%s: No such file or directory" % src)

        self.arch_dir = dst = os.path.join(outdir, dstname)
        if os.path.exists(dst):
            same = os.path.samefile(src, dst) or \
                os.path.samefile(os.path.dirname(src), dst)
            if same:
                return

        r_path = os.path.realpath(src)
        c_dir = os.path.realpath(self.clone_dir)
        if not r_path.startswith(c_dir):
            sys.exit("--subdir %s tries to escape repository." % subdir)

        logging.debug("copying tree: '%s' to '%s'" % (src, dst))

        shutil.copytree(src, dst, symlinks=True)

    def lock_cache(self):
        pdir = os.path.join(self.clone_dir, os.pardir, '.lock')
        self.lock_file = open(os.path.abspath(pdir), 'w')
        fcntl.lockf(self.lock_file, fcntl.LOCK_EX)

    def unlock_cache(self):
        if self.lock_file and os.path.isfile(self.lock_file.name):
            fcntl.lockf(self.lock_file, fcntl.LOCK_UN)
            self.lock_file.close()
            self.lock_file = None

    def finalize(self):
        self.cleanup()

    def check_url(self):
        return True

    def _prepare_gpg_settings(self):
        logging.debug("preparing gpg settings")
        self._backup_gnupghome = os.getenv('GNUPGHOME')
        gpgdir = tempfile.mkdtemp()
        mode = int('700', 8)
        os.chmod(gpgdir, mode)
        os.putenv('GNUPGHOME', gpgdir)
        logging.debug("Importing file '%s' to gnupghome: '%s'.")
        self.helpers.safe_run(['gpg', '--import', self.args.maintainers_asc],
                              cwd=self.clone_dir,
                              interactive=sys.stdout.isatty())

    def _revert_gpg_settings(self):
        if self._backup_gnupghome:
            os.putenv('GNUPGHOME', self._backup_gnupghome)
Example #17
0
class Tasks():
    '''
    Class to create a task list for formats which can contain more then one scm
    job like snapcraft or appimage
    '''
    def __init__(self, args):
        self.task_list = []
        self.cleanup_dirs = []
        self.helpers = Helpers()
        self.changes = Changes()
        self.scm_object = None
        self.data_map = None
        self.args = args

    def cleanup(self):
        """Cleaning temporary directories."""
        if self.args.skip_cleanup:
            logging.debug("Skipping cleanup")
            return

        logging.debug("Cleaning: %s", ' '.join(self.cleanup_dirs))

        for dirname in self.cleanup_dirs:
            if not os.path.exists(dirname):
                continue
            ploc = locale.getpreferredencoding()
            shutil.rmtree(dirname.encode(ploc))
        self.cleanup_dirs = []
        # Unlock to prevent dead lock in cachedir if exception
        # gets raised
        if self.scm_object:
            self.scm_object.unlock_cache()
            # calls the corresponding cleanup routine
            self.scm_object.cleanup()

    def generate_list(self):
        '''
        Generate list of scm jobs from appimage.yml, snapcraft.yaml or a single
        job from cli arguments.
        '''
        args = self.args
        scms = ['git', 'tar', 'svn', 'bzr', 'hg']

        if args.appimage:
            # we read the SCM config from appimage.yml
            filehandle = open('appimage.yml')
            self.data_map = yaml.safe_load(filehandle)
            filehandle.close()
            args.use_obs_scm = True
            build_scms = ()
            try:
                build_scms = self.data_map['build'].keys()
            except (TypeError, KeyError):
                pass
            # run for each scm an own task
            for scm in scms:
                if scm not in build_scms:
                    continue
                for url in self.data_map['build'][scm]:
                    args.url = url
                    args.scm = scm
                    self.task_list.append(copy.copy(args))

        elif args.snapcraft:
            # we read the SCM config from snapcraft.yaml instead
            # getting it via parameters
            filehandle = open('snapcraft.yaml')
            self.data_map = yaml.safe_load(filehandle)
            filehandle.close()
            args.use_obs_scm = True
            # run for each part an own task
            for part in self.data_map['parts'].keys():
                args.filename = part
                if 'source-type' not in self.data_map['parts'][part].keys():
                    continue
                pep8_1 = self.data_map['parts'][part]['source-type']
                if pep8_1 not in scms:
                    continue
                # avoid conflicts with files
                args.clone_prefix = "_obs_"
                args.url = self.data_map['parts'][part]['source']
                self.data_map['parts'][part]['source'] = part
                args.scm = self.data_map['parts'][part]['source-type']
                del self.data_map['parts'][part]['source-type']
                self.task_list.append(copy.copy(args))

        else:
            self.task_list.append(args)

    def process_list(self):
        '''
        process tasks from the task_list
        '''
        for task in self.task_list:
            self.process_single_task(task)

    def finalize(self):
        '''
        final steps after processing task list
        '''
        args = self.args
        if args.snapcraft:
            # write the new snapcraft.yaml file
            # we prefix our own here to be sure to not overwrite user files,
            # if he is using us in "disabled" mode
            new_file = args.outdir + '/_service:snapcraft:snapcraft.yaml'
            with open(new_file, 'w') as outfile:
                outfile.write(
                    yaml.dump(self.data_map, default_flow_style=False))

        # execute also download_files for downloading single sources
        if args.snapcraft or args.appimage:
            download_files = '/usr/lib/obs/service/download_files'
            if os.path.exists(download_files):
                cmd = [download_files, '--outdir', args.outdir]
                rcode, output = self.helpers.run_cmd(cmd, None)

                if rcode != 0:
                    raise RuntimeError("download_files has failed:%s" % output)

    def check_for_branch_request(self, args):
        # we may have a _branch_request file. In that case we life in a
        # branch create by a webhook from github or gitlab pull/merge request
        # the source supposed to be merged is more important then the code
        # referenced in the _service file.
        if not os.path.exists('_branch_request'):
            return args

        # is it a branch request?
        import json
        f = open("_branch_request", "r")
        j = json.load(f)
        if j['object_kind'] == 'merge_request':
            # gitlab merge request
            args.url = j['project']['http_url']
            args.revision = j['object_attributes']['source']['default_branch']
        elif j['action'] == 'opened':
            # github pull request
            args.url = "https://github.com/"
            args.url += j['pull_request']['head']['repo']['full_name']
            args.revision = j['pull_request']['head']['sha']

        return args

    def process_single_task(self, args):
        '''
        do the work for a single task
        '''
        self.args = self.check_for_branch_request(args)

        logging.basicConfig(format="%(message)s",
                            stream=sys.stderr,
                            level=logging.INFO)

        # force cleaning of our workspace on exit
        atexit.register(self.cleanup)

        scm2class = {
            'git': 'Git',
            'bzr': 'Bzr',
            'hg': 'Hg',
            'svn': 'Svn',
            'tar': 'Tar',
        }

        # create objects for TarSCM.<scm> and TarSCM.helpers
        try:
            scm_class = getattr(TarSCM.scm, scm2class[args.scm])
        except:
            msg = "Please specify valid --scm=... options"
            raise OptionsError(msg)  # pylint: disable=W0707

        # self.scm_object is need to unlock cache in cleanup
        # if exception occurs
        self.scm_object = scm_object = scm_class(args, self)

        tmode = bool(os.getenv('TAR_SCM_TESTMODE'))
        if not tmode and not scm_object.check_url():
            sys.exit("--url does not match remote repository")

        try:
            scm_object.check_scm()
        except OSError:
            print("Please install '%s'" % scm_object.scm)
            sys.exit(1)

        scm_object.fetch_upstream()

        if args.filename:
            dstname = basename = args.filename
        else:
            dstname = basename = os.path.basename(scm_object.clone_dir)

        version = self.get_version()
        changesversion = version
        if version and args.filename_add_version \
           and not sys.argv[0].endswith("/tar") \
           and not sys.argv[0].endswith("/snapcraft") \
           and not sys.argv[0].endswith("/appimage"):
            if isinstance(dstname, bytes):
                version = version.encode('UTF-8')
            dstname += '-' + version

        logging.debug("DST: %s", dstname)

        detected_changes = scm_object.detect_changes()

        if not args.use_obs_gbp:
            scm_object.prep_tree_for_archive(args.subdir,
                                             args.outdir,
                                             dstname=dstname)
            self.cleanup_dirs.append(scm_object.arch_dir)

        # For the GBP service there is no copy in arch_dir, so use clone_dir
        # which has the same content
        extract_src = scm_object.arch_dir
        if args.use_obs_scm:
            arch = TarSCM.archive.ObsCpio()
        elif args.use_obs_gbp:
            arch = TarSCM.archive.Gbp()
            extract_src = scm_object.clone_dir
        else:
            arch = TarSCM.archive.Tar()

        arch.extract_from_archive(extract_src, args.extract, args.outdir)

        arch.create_archive(scm_object,
                            basename=basename,
                            dstname=dstname,
                            version=version,
                            cli=args)

        if detected_changes:
            self._process_changes(args, version, changesversion,
                                  detected_changes)

        scm_object.finalize()

    def _process_changes(self, args, ver, changesversion, detected_changes):
        changesauthor = self.changes.get_changesauthor(args)

        logging.debug("AUTHOR: %s", changesauthor)

        if not ver:
            args.version = "_auto_"
            changesversion = self.get_version()

        logging.debug("Searching for '*.changes' in %s", os.getcwd())
        for filename in glob.glob('*.changes'):
            new_changes_file = os.path.join(args.outdir, filename)
            shutil.copy(filename, new_changes_file)
            self.changes.write_changes(new_changes_file,
                                       detected_changes['lines'],
                                       changesversion, changesauthor)
        self.changes.write_changes_revision(args.url, args.outdir,
                                            detected_changes['revision'])

    def get_version(self):
        '''
        Generate final version number by detecting version from scm if not
        given as cli option and applying versionrewrite_pattern and
        versionprefix if given as cli option
        '''
        version = self.args.version
        if version == '_none_':
            return ''
        if version == '_auto_' or self.args.versionformat:
            version = self.detect_version()
        if self.args.versionrewrite_pattern:
            regex = re.compile(self.args.versionrewrite_pattern)
            version = regex.sub(self.args.versionrewrite_replacement, version)
        else:
            args = self.args.__dict__
            debian = args.get('use_obs_gbp', False)
            version = self.scm_object.version_iso_cleanup(version, debian)
        if self.args.versionprefix:
            version = "%s.%s" % (self.args.versionprefix, version)

        logging.debug("VERSION(auto): %s", version)
        return version

    def detect_version(self):
        """Automatic detection of version number for checked-out repository."""

        version = self.scm_object.detect_version(self.args.__dict__).strip()
        logging.debug("VERSION(auto): %s", version)
        return version