예제 #1
0
def get_conf(args, parser=None):
    if not parser:
        parser = optparse.OptionParser()

    parser.add_option('--config',
                      dest='config',
                      help='configuration file to use')
    parser.add_option('--opensuse',
                      dest='opensuse',
                      action='store_true',
                      default=False,
                      help='use the openSUSE config as a basis')
    parser.add_option('--log',
                      dest='log',
                      help='log file to use (default: stderr)')

    (options, args) = parser.parse_args()

    if options.log:
        path = os.path.realpath(options.log)
        util.safe_mkdir_p(os.path.dirname(path))
        sys.stderr = open(options.log, 'a')

    try:
        conf = config.Config(options.config, use_opensuse=options.opensuse)
    except config.ConfigException, e:
        print >> sys.stderr, e
        return (args, options, None)
예제 #2
0
def debug_thread(context, state, indent="", use_remaining=False):
    global USE_DEBUG
    global DEBUG_DIR

    if not USE_DEBUG:
        return

    # compatibility with old versions of python (< 2.6)
    if hasattr(threading.currentThread(), "name"):
        name = threading.currentThread().name
    else:
        name = threading.currentThread().getName()

    if context == "main":
        print "%s%s: %s" % (indent, name, state)
        return

    try:
        util.safe_mkdir_p(DEBUG_DIR)
        fout = open(os.path.join(DEBUG_DIR, "buildservice-" + name), "a")

        # ignore indent since we write in files
        fout.write("[%s] %s %s\n" % (context, time.strftime("%H:%M:%S", time.localtime()), state))

        if use_remaining:
            remaining = ""
            for i in threading.enumerate():
                remaining += i.name + ", "
            fout.write("Remaining: %s\n" % (remaining,))
        fout.close()
    except Exception, e:
        print >> sys.stderr, "Exception in debug_thread: %s" % (e,)
예제 #3
0
    def check_project(self, project, try_again=True):
        """ Checks if the current checkout of a project is up-to-date, and queue task if necessary. """
        project_dir = os.path.join(self.dest_dir, project)
        util.safe_mkdir_p(project_dir)

        filename = os.path.join(project_dir, "_status")

        try:
            url = osc_copy.makeurl(self.conf.apiurl, ["status", "project", project])
            length = self._download_url_to_file(url, filename)

            if length == 0:
                # metadata files should never be empty
                if try_again:
                    util.safe_unlink(filename)
                    return self.check_project(project, False)

        except (urllib2.HTTPError, urllib2.URLError, socket.error), e:
            util.safe_unlink(filename)

            if type(e) == urllib2.HTTPError:
                if e.code == 404:
                    print >> sys.stderr, "Project %s doesn't exist." % (project,)
                elif e.code == 400:
                    # the status page doesn't always work :/
                    self.queue_checkout_project(project, primary=False, force_simple_checkout=True, no_config=True)
            elif try_again:
                self.check_project(project, False)
            else:
                print >> sys.stderr, "Cannot get status of %s: %s" % (project, e)

            return
예제 #4
0
    def _get_packages_in_project(self, project, try_again=True):
        project_dir = os.path.join(self.dest_dir, project)
        util.safe_mkdir_p(project_dir)

        filename = os.path.join(project_dir, '_pkglist')

        try:
            url = osc_copy.makeurl(self.conf.apiurl,
                                   ['public', 'source', project])
            length = self._download_url_to_file(url, filename)

            if length == 0:
                # metadata files should never be empty
                if try_again:
                    util.safe_unlink(filename)
                    return self._get_packages_in_project(project, False)

        except (urllib2.HTTPError, urllib2.URLError, socket.error), e:
            util.safe_unlink(filename)

            if type(e) == urllib2.HTTPError and e.code == 404:
                return (None, 'Project %s doesn\'t exist.' % (project, ))
            elif try_again:
                return self._get_packages_in_project(project, False)
            else:
                return (None, str(e))
예제 #5
0
    def checkout_project_pkgmeta(self, project, try_again=True):
        """ Checks out the packages metadata of all packages in a project. """
        project_dir = os.path.join(self.dest_dir, project)
        util.safe_mkdir_p(project_dir)

        filename = os.path.join(project_dir, '_pkgmeta')
        tmpfilename = filename + '.new'

        try:
            url = osc_copy.makeurl(
                self.conf.apiurl, ['search', 'package'],
                ['match=%s' % urllib.quote('@project=\'%s\'' % project)])
            length = self._download_url_to_file(url, tmpfilename)

            if length == 0:
                # metadata files should never be empty
                if try_again:
                    util.safe_unlink(tmpfilename)
                    return self.checkout_project_pkgmeta(project, False)

            os.rename(tmpfilename, filename)

        except (urllib2.HTTPError, urllib2.URLError, socket.error), e:
            util.safe_unlink(tmpfilename)

            if type(e) == urllib2.HTTPError and e.code == 404:
                print >> sys.stderr, 'Project %s doesn\'t exist.' % (project, )
            elif try_again:
                self.checkout_project_pkgmeta(project, False)
            else:
                print >> sys.stderr, 'Cannot get packages metadata of %s: %s' % (
                    project, e)

            return
예제 #6
0
    def checkout_project_pkgmeta(self, project, try_again=True):
        """ Checks out the packages metadata of all packages in a project. """
        project_dir = os.path.join(self.dest_dir, project)
        util.safe_mkdir_p(project_dir)

        filename = os.path.join(project_dir, "_pkgmeta")
        tmpfilename = filename + ".new"

        try:
            url = osc_copy.makeurl(
                self.conf.apiurl, ["search", "package"], ["match=%s" % urllib.quote("@project='%s'" % project)]
            )
            length = self._download_url_to_file(url, tmpfilename)

            if length == 0:
                # metadata files should never be empty
                if try_again:
                    util.safe_unlink(tmpfilename)
                    return self.checkout_project_pkgmeta(project, False)

            os.rename(tmpfilename, filename)

        except (urllib2.HTTPError, urllib2.URLError, socket.error), e:
            util.safe_unlink(tmpfilename)

            if type(e) == urllib2.HTTPError and e.code == 404:
                print >> sys.stderr, "Project %s doesn't exist." % (project,)
            elif try_again:
                self.checkout_project_pkgmeta(project, False)
            else:
                print >> sys.stderr, "Cannot get packages metadata of %s: %s" % (project, e)

            return
예제 #7
0
def debug_thread(context, state, indent='', use_remaining=False):
    global USE_DEBUG
    global DEBUG_DIR

    if not USE_DEBUG:
        return

    # compatibility with old versions of python (< 2.6)
    if hasattr(threading.currentThread(), 'name'):
        name = threading.currentThread().name
    else:
        name = threading.currentThread().getName()

    if context == 'main':
        print('%s%s: %s' % (indent, name, state))
        return

    try:
        util.safe_mkdir_p(DEBUG_DIR)
        fout = open(os.path.join(DEBUG_DIR, 'buildservice-' + name), 'a')

        # ignore indent since we write in files
        fout.write(
            '[%s] %s %s\n' %
            (context, time.strftime("%H:%M:%S", time.localtime()), state))

        if use_remaining:
            remaining = ''
            for i in threading.enumerate():
                remaining += i.name + ', '
            fout.write('Remaining: %s\n' % (remaining, ))
        fout.close()
    except Exception as e:
        print('Exception in debug_thread: %s' % (e, ), file=sys.stderr)
예제 #8
0
    def _get_packages_in_project(self, project, try_again=True):
        project_dir = os.path.join(self.dest_dir, project)
        util.safe_mkdir_p(project_dir)

        filename = os.path.join(project_dir, "_pkglist")

        try:
            url = osc_copy.makeurl(self.conf.apiurl, ["public", "source", project])
            length = self._download_url_to_file(url, filename)

            if length == 0:
                # metadata files should never be empty
                if try_again:
                    util.safe_unlink(filename)
                    return self._get_packages_in_project(project, False)

        except (urllib2.HTTPError, urllib2.URLError, socket.error), e:
            util.safe_unlink(filename)

            if type(e) == urllib2.HTTPError and e.code == 404:
                return (None, "Project %s doesn't exist." % (project,))
            elif try_again:
                return self._get_packages_in_project(project, False)
            else:
                return (None, str(e))
예제 #9
0
    def _copy_project_config(self, project, copy_from):
        from_file = os.path.join(self.dest_dir, copy_from, '_obs-db-options')
        if not os.path.exists(from_file):
            return

        project_dir = os.path.join(self.dest_dir, project)
        util.safe_mkdir_p(project_dir)

        filename = os.path.join(project_dir, '_obs-db-options')
        shutil.copy(from_file, filename)
예제 #10
0
    def _copy_project_config(self, project, copy_from):
        from_file = os.path.join(self, self.dest_dir, copy_from, "_obs-db-options")
        if not os.path.exists(from_file):
            return

        project_dir = os.path.join(self.dest_dir, project)
        util.safe_mkdir_p(project_dir)

        filename = os.path.join(project_dir, "_obs-db-options")
        shutil.copy(from_file, filename)
예제 #11
0
    def run(self, cursor, changed_projects=None):
        """ Creates the XML files for all projects.

            changed_projects -- The list of projects for which we need to
                                generate a XML file. "None" means all projects.

        """
        if not cursor:
            raise InfoXmlException(
                'Database needed to create XML files is not available.')

        util.safe_mkdir_p(self.dest_dir)

        cursor.execute('''SELECT name FROM %(Project)s;''' % SQL_TABLES)
        projects = [row['name'] for row in cursor]

        self._create_version_cache(cursor, projects)

        if changed_projects is not None:
            # We have a specific list of projects for which we need to create
            # the XML. Note that None and [] don't have the same meaning.
            if not changed_projects:
                return

            # Get the list of projects containing a package which links to a
            # changed project, or which has a a devel project that has changed
            where = ' OR '.join([
                'B.link_project = ? OR B.devel_project = ?'
                for i in range(len(changed_projects))
            ])
            where_args = []
            for changed_project in changed_projects:
                where_args.append(changed_project)
                where_args.append(changed_project)

            mapping = SQL_TABLES.copy()
            mapping['where'] = where

            cursor.execute(
                '''SELECT A.name FROM %(Project)s as A, %(SrcPackage)s as B
                              WHERE A.id = B.project AND (%(where)s)
                              GROUP BY A.name
                              ;''' % mapping, where_args)

            changed_projects = set(changed_projects)
            for (project, ) in cursor:
                changed_projects.add(project)

            projects = changed_projects

        for project in projects:
            self._debug_print('Writing XML for %s' % project)
            self._write_xml_for_project(cursor, project)
예제 #12
0
    def run(self, cursor, changed_projects = None):
        """ Creates the XML files for all projects.

            changed_projects -- The list of projects for which we need to
                                generate a XML file. "None" means all projects.

        """
        if not cursor:
            raise InfoXmlException('Database needed to create XML files is not available.')

        util.safe_mkdir_p(self.dest_dir)

        cursor.execute('''SELECT name FROM %(Project)s;''' % SQL_TABLES)
        projects = [ row['name'] for row in cursor ]

        self._create_version_cache(cursor, projects)

        if changed_projects is not None:
            # We have a specific list of projects for which we need to create
            # the XML. Note that None and [] don't have the same meaning.
            if not changed_projects:
                return

            # Get the list of projects containing a package which links to a
            # changed project, or which has a a devel project that has changed
            where = ' OR '.join([ 'B.link_project = ? OR B.devel_project = ?' for i in range(len(changed_projects)) ])
            where_args = []
            for changed_project in changed_projects:
                where_args.append(changed_project)
                where_args.append(changed_project)

            mapping = SQL_TABLES.copy()
            mapping['where'] = where

            cursor.execute('''SELECT A.name FROM %(Project)s as A, %(SrcPackage)s as B
                              WHERE A.id = B.project AND (%(where)s)
                              GROUP BY A.name
                              ;''' % mapping, where_args)

            changed_projects = set(changed_projects)
            for (project,) in cursor:
                changed_projects.add(project)

            projects = changed_projects

        for project in projects:
            self._debug_print('Writing XML for %s' % project)
            self._write_xml_for_project(cursor, project)
예제 #13
0
    def _write_project_config(self, project):
        """ We need to write the project config to a file, because nothing
            remembers if a project is a devel project, and from which project
            it is, so it's impossible to know what settings should apply
            without such a file. """
        if not self.conf.projects.has_key(project):
            return

        project_dir = os.path.join(self.dest_dir, project)
        util.safe_mkdir_p(project_dir)

        filename = os.path.join(project_dir, "_obs-db-options")

        fout = open(filename, "w")
        fout.write("parent=%s\n" % self.conf.projects[project].parent)
        fout.write("branches=%s\n" % ",".join(self.conf.projects[project].branches))
        fout.write("force-project-parent=%d\n" % self.conf.projects[project].force_project_parent)
        fout.write("lenient-delta=%d\n" % self.conf.projects[project].lenient_delta)
        fout.close()
예제 #14
0
    def _get_packages_in_project(self, project, try_again=True):
        project_dir = os.path.join(self.dest_dir, project)
        util.safe_mkdir_p(project_dir)

        filename = os.path.join(project_dir, '_pkglist')

        try:
            url = osc_copy.makeurl(self.conf.apiurl,
                                   ['public', 'source', project])
            length = self._download_url_to_file(url, filename)

            if length == 0:
                # metadata files should never be empty
                if try_again:
                    util.safe_unlink(filename)
                    return self._get_packages_in_project(project, False)

        except (urllib.error.HTTPError, urllib.error.URLError,
                socket.error) as e:
            util.safe_unlink(filename)

            if type(e) == urllib.error.HTTPError and e.code == 404:
                return (None, 'Project %s doesn\'t exist.' % (project, ))
            elif try_again:
                return self._get_packages_in_project(project, False)
            else:
                return (None, str(e))

        try:
            root = ET.parse(filename).getroot()
        except SyntaxError as e:
            util.safe_unlink(filename)

            if try_again:
                return self._get_packages_in_project(project, False)
            else:
                return (None, 'Cannot parse list of packages in %s: %s' %
                        (project, e))

        packages = [node.get('name') for node in root.findall('entry')]
        util.safe_unlink(filename)

        return (packages, None)
예제 #15
0
    def checkout_package_meta(self, project, package, try_again=True):
        """ Checks out the metadata of a package.
        
            If we're interested in devel projects of this project, and the
            devel package is not in a checked out devel project, then we queue
            a checkout of this devel project.

        """
        package_dir = os.path.join(self.dest_dir, project, package)
        util.safe_mkdir_p(package_dir)

        filename = os.path.join(package_dir, "_meta")
        tmpfilename = filename + ".new"

        try:
            url = osc_copy.makeurl(self.conf.apiurl, ["public", "source", project, package, "_meta"])
            length = self._download_url_to_file(url, tmpfilename)

            if length == 0:
                # metadata files should never be empty
                if try_again:
                    util.safe_unlink(tmpfilename)
                    return self.checkout_package_meta(project, package, False)

            os.rename(tmpfilename, filename)

        except (urllib2.HTTPError, urllib2.URLError, socket.error), e:
            util.safe_unlink(tmpfilename)

            if type(e) == urllib2.HTTPError and e.code == 404:
                print >> sys.stderr, "Package %s of project %s doesn't exist." % (package, project)
            elif try_again:
                self.checkout_package_meta(project, package, False)
            else:
                print >> sys.stderr, "Cannot get metadata of package %s in %s: %s (queueing for next run)" % (
                    package,
                    project,
                    e,
                )
                self.error_queue.put((project, package))

            return
예제 #16
0
    def checkout_package_meta(self, project, package, try_again=True):
        """ Checks out the metadata of a package.
        
            If we're interested in devel projects of this project, and the
            devel package is not in a checked out devel project, then we queue
            a checkout of this devel project.

        """
        package_dir = os.path.join(self.dest_dir, project, package)
        util.safe_mkdir_p(package_dir)

        filename = os.path.join(package_dir, '_meta')
        tmpfilename = filename + '.new'

        try:
            url = osc_copy.makeurl(
                self.conf.apiurl,
                ['public', 'source', project, package, '_meta'])
            length = self._download_url_to_file(url, tmpfilename)

            if length == 0:
                # metadata files should never be empty
                if try_again:
                    util.safe_unlink(tmpfilename)
                    return self.checkout_package_meta(project, package, False)

            os.rename(tmpfilename, filename)

        except (urllib2.HTTPError, urllib2.URLError, socket.error), e:
            util.safe_unlink(tmpfilename)

            if type(e) == urllib2.HTTPError and e.code == 404:
                print >> sys.stderr, 'Package %s of project %s doesn\'t exist.' % (
                    package, project)
            elif try_again:
                self.checkout_package_meta(project, package, False)
            else:
                print >> sys.stderr, 'Cannot get metadata of package %s in %s: %s (queueing for next run)' % (
                    package, project, e)
                self.error_queue.put((project, package))

            return
예제 #17
0
def get_conf(args, parser=None):
    if not parser:
        parser = optparse.OptionParser()

    parser.add_option('--config',
                      dest='config',
                      help='configuration file to use')
    parser.add_option('--opensuse',
                      dest='opensuse',
                      action='store_true',
                      default=False,
                      help='use the openSUSE config as a basis')
    parser.add_option('--log',
                      dest='log',
                      help='log file to use (default: stderr)')

    (options, args) = parser.parse_args()

    if options.log:
        path = os.path.realpath(options.log)
        util.safe_mkdir_p(os.path.dirname(path))
        sys.stderr = open(options.log, 'a')

    try:
        conf = config.Config(options.config, use_opensuse=options.opensuse)
    except config.ConfigException as e:
        print(e, file=sys.stderr)
        return (args, options, None)

    if conf.sockettimeout > 0:
        # we have a setting for the default socket timeout to not hang forever
        socket.setdefaulttimeout(conf.sockettimeout)

    try:
        os.makedirs(conf.cache_dir)
    except OSError as e:
        if e.errno != errno.EEXIST:
            print('Cannot create cache directory.', file=sys.stderr)
            return (args, options, None)

    return (args, options, conf)
예제 #18
0
    def _open_db(self, create_if_needed = False):
        """ Open a database file, and sets up everything. """
        if self.db:
            return True

        create = False
        if not os.path.exists(self._dbfile):
            if not create_if_needed:
                return False
            else:
                util.safe_mkdir_p(os.path.dirname(self._dbfile))
                create = True

        self.db = sqlite3.connect(self._dbfile)
        self.db.row_factory = sqlite3.Row
        self.cursor = self.db.cursor()

        if create:
            self._sql_setup()

        return True
예제 #19
0
    def _write_project_config(self, project):
        """ We need to write the project config to a file, because nothing
            remembers if a project is a devel project, and from which project
            it is, so it's impossible to know what settings should apply
            without such a file. """
        if project not in self.conf.projects:
            return

        project_dir = os.path.join(self.dest_dir, project)
        util.safe_mkdir_p(project_dir)

        filename = os.path.join(project_dir, '_obs-db-options')

        fout = open(filename, 'w')
        fout.write('parent=%s\n' % self.conf.projects[project].parent)
        fout.write('branches=%s\n' %
                   ','.join(self.conf.projects[project].branches))
        fout.write('force-project-parent=%d\n' %
                   self.conf.projects[project].force_project_parent)
        fout.write('lenient-delta=%d\n' %
                   self.conf.projects[project].lenient_delta)
        fout.close()
예제 #20
0
    def check_project(self, project, try_again=True):
        """ Checks if the current checkout of a project is up-to-date, and queue task if necessary. """
        project_dir = os.path.join(self.dest_dir, project)
        util.safe_mkdir_p(project_dir)

        filename = os.path.join(project_dir, '_status')

        try:
            url = osc_copy.makeurl(self.conf.apiurl,
                                   ['status', 'project', project])
            length = self._download_url_to_file(url, filename)

            if length == 0:
                # metadata files should never be empty
                if try_again:
                    util.safe_unlink(filename)
                    return self.check_project(project, False)

        except (urllib2.HTTPError, urllib2.URLError, socket.error), e:
            util.safe_unlink(filename)

            if type(e) == urllib2.HTTPError:
                if e.code == 404:
                    print >> sys.stderr, 'Project %s doesn\'t exist.' % (
                        project, )
                elif e.code == 400:
                    # the status page doesn't always work :/
                    self.queue_checkout_project(project,
                                                primary=False,
                                                force_simple_checkout=True,
                                                no_config=True)
            elif try_again:
                self.check_project(project, False)
            else:
                print >> sys.stderr, 'Cannot get status of %s: %s' % (project,
                                                                      e)

            return
예제 #21
0
def get_conf(args, parser = None):
    if not parser:
        parser = optparse.OptionParser()

    parser.add_option('--config', dest='config',
                      help='configuration file to use')
    parser.add_option('--opensuse', dest='opensuse',
                      action='store_true', default=False,
                      help='use the openSUSE config as a basis')
    parser.add_option('--log', dest='log',
                      help='log file to use (default: stderr)')

    (options, args) = parser.parse_args()

    if options.log:
        path = os.path.realpath(options.log)
        util.safe_mkdir_p(os.path.dirname(path))
        sys.stderr = open(options.log, 'a')

    try:
        conf = config.Config(options.config, use_opensuse = options.opensuse)
    except config.ConfigException, e:
        print >>sys.stderr, e
        return (args, options, None)
예제 #22
0
    def checkout_package(self, project, package):
        """ Checks out a package.

            We use the files already checked out as a cache, to avoid
            downloading the same files again if possible.

            This means we need to make sure to remove all files that shouldn't
            be there when leaving this function. This is done with the calls to
            _cleanup_package_old_files().

        """
        if not package:
            print >> sys.stderr, "Internal error: checkout_package called instead of checkout_project_pkgmeta"
            self.checkout_project_pkgmeta(project)
            return

        package_dir = os.path.join(self.dest_dir, project, package)
        util.safe_mkdir_p(package_dir)

        # Never remove _meta files, since they're not handled by the checkout process
        downloaded_files = ["_meta"]

        metadata_cache = self._get_package_metadata_cache(project, package)

        # find files we're interested in from the metadata
        root = self._get_files_metadata(project, package, "_files")
        downloaded_files.append("_files")
        if root is None:
            self._cleanup_package_old_files(project, package, downloaded_files)
            return

        is_link = False
        link_error = False
        # revision to expand a link
        link_md5 = None

        # detect if the package is a link package
        linkinfos_nb = len(root.findall("linkinfo"))
        if linkinfos_nb == 1:
            link_node = root.find("linkinfo")
            # The logic is taken from islink() in osc/core.py
            is_link = link_node.get("xsrcmd5") not in [None, ""] or link_node.get("lsrcmd5") not in [None, ""]
            link_error = link_node.get("error") not in [None, ""]
            link_md5 = link_node.get("xsrcmd5")
        elif linkinfos_nb > 1:
            print >> sys.stderr, "Ignoring link in %s from %s: more than one <linkinfo>" % (package, project)

        if is_link:
            # download the _link file first. This makes it possible to know if
            # the project has a delta compared to the target of the link
            for node in root.findall("entry"):
                filename = node.get("name")
                md5 = node.get("md5")
                mtime = node.get("mtime")
                size = node.get("size")
                if filename == "_link":
                    if not self._get_package_file_checked_out(project, package, filename, metadata_cache, md5, mtime):
                        self._get_file(project, package, filename, size)
                    downloaded_files.append(filename)

            # if the link has an error, then we can't do anything else since we
            # won't be able to expand
            if link_error:
                self._cleanup_package_old_files(project, package, downloaded_files)
                return

            # look if we need to download the metadata of the expanded package
            if metadata_cache.has_key("_files-expanded") and metadata_cache["_files-expanded"][0] == link_md5:
                files = os.path.join(self.dest_dir, project, package, "_files-expanded")
                try:
                    root = ET.parse(files).getroot()
                except SyntaxError:
                    root = None
            else:
                root = self._get_files_metadata(project, package, "_files-expanded", link_md5)

            if root is None:
                self._cleanup_package_old_files(project, package, downloaded_files)
                return

            downloaded_files.append("_files-expanded")

        # look at all files and download what might be interesting
        for node in root.findall("entry"):
            filename = node.get("name")
            md5 = node.get("md5")
            mtime = node.get("mtime")
            size = node.get("size")
            # download .spec files
            if filename.endswith(".spec"):
                if not self._get_package_file_checked_out(project, package, filename, metadata_cache, md5, mtime):
                    self._get_file(project, package, filename, size, link_md5)
                downloaded_files.append(filename)

        self._cleanup_package_old_files(project, package, downloaded_files)
예제 #23
0
    def checkout_package_meta(self, project, package, try_again=True):
        """ Checks out the metadata of a package.
        
            If we're interested in devel projects of this project, and the
            devel package is not in a checked out devel project, then we queue
            a checkout of this devel project.

        """
        package_dir = os.path.join(self.dest_dir, project, package)
        util.safe_mkdir_p(package_dir)

        filename = os.path.join(package_dir, '_meta')
        tmpfilename = filename + '.new'

        try:
            url = osc_copy.makeurl(
                self.conf.apiurl,
                ['public', 'source', project, package, '_meta'])
            length = self._download_url_to_file(url, tmpfilename)

            if length == 0:
                # metadata files should never be empty
                if try_again:
                    util.safe_unlink(tmpfilename)
                    return self.checkout_package_meta(project, package, False)

            os.rename(tmpfilename, filename)

        except (urllib.error.HTTPError, urllib.error.URLError,
                socket.error) as e:
            util.safe_unlink(tmpfilename)

            if type(e) == urllib.error.HTTPError and e.code == 404:
                print('Package %s of project %s doesn\'t exist.' %
                      (package, project),
                      file=sys.stderr)
            elif try_again:
                self.checkout_package_meta(project, package, False)
            else:
                print(
                    'Cannot get metadata of package %s in %s: %s (queueing for next run)'
                    % (package, project, e),
                    file=sys.stderr)
                self.error_queue.put((project, package))

            return

        # Are we interested in devel projects of this project, and if yes,
        # should we check out the devel project if needed?
        if project not in self.conf.projects:
            return
        if not self.conf.projects[project].checkout_devel_projects:
            return

        try:
            package_node = ET.parse(filename).getroot()
        except SyntaxError:
            return

        devel_node = package_node.find('devel')
        if devel_node is None:
            return

        devel_project = devel_node.get('project')
        project_dir = os.path.join(self.dest_dir, devel_project)
        if not os.path.exists(project_dir):
            self.queue_checkout_project(devel_project,
                                        parent=project,
                                        primary=False)
예제 #24
0
    def checkout_package(self, project, package):
        """ Checks out a package.

            We use the files already checked out as a cache, to avoid
            downloading the same files again if possible.

            This means we need to make sure to remove all files that shouldn't
            be there when leaving this function. This is done with the calls to
            _cleanup_package_old_files().

        """
        if not package:
            print(
                'Internal error: checkout_package called instead of checkout_project_pkgmeta',
                file=sys.stderr)
            self.checkout_project_pkgmeta(project)
            return

        package_dir = os.path.join(self.dest_dir, project, package)
        util.safe_mkdir_p(package_dir)

        # Never remove _meta files, since they're not handled by the checkout process
        downloaded_files = ['_meta']

        metadata_cache = self._get_package_metadata_cache(project, package)

        # find files we're interested in from the metadata
        root = self._get_files_metadata(project, package, '_files')
        downloaded_files.append('_files')
        if root is None:
            self._cleanup_package_old_files(project, package, downloaded_files)
            return

        is_link = False
        link_error = False
        # revision to expand a link
        link_md5 = None

        # detect if the package is a link package
        linkinfos_nb = len(root.findall('linkinfo'))
        if linkinfos_nb == 1:
            link_node = root.find('linkinfo')
            # The logic is taken from islink() in osc/core.py
            is_link = link_node.get('xsrcmd5') not in [
                None, ''
            ] or link_node.get('lsrcmd5') not in [None, '']
            link_error = link_node.get('error') not in [None, '']
            link_md5 = link_node.get('xsrcmd5')
        elif linkinfos_nb > 1:
            print('Ignoring link in %s from %s: more than one <linkinfo>' %
                  (package, project),
                  file=sys.stderr)

        if is_link:
            # download the _link file first. This makes it possible to know if
            # the project has a delta compared to the target of the link
            for node in root.findall('entry'):
                filename = node.get('name')
                md5 = node.get('md5')
                mtime = node.get('mtime')
                size = node.get('size')
                if filename == '_link':
                    if not self._get_package_file_checked_out(
                            project, package, filename, metadata_cache, md5,
                            mtime):
                        self._get_file(project, package, filename, size)
                    downloaded_files.append(filename)

            # if the link has an error, then we can't do anything else since we
            # won't be able to expand
            if link_error:
                self._cleanup_package_old_files(project, package,
                                                downloaded_files)
                return

            # look if we need to download the metadata of the expanded package
            if '_files-expanded' in metadata_cache and metadata_cache[
                    '_files-expanded'][0] == link_md5:
                files = os.path.join(self.dest_dir, project, package,
                                     '_files-expanded')
                try:
                    root = ET.parse(files).getroot()
                except SyntaxError:
                    root = None
            else:
                root = self._get_files_metadata(project, package,
                                                '_files-expanded', link_md5)

            if root is None:
                self._cleanup_package_old_files(project, package,
                                                downloaded_files)
                return

            downloaded_files.append('_files-expanded')

        # look at all files and download what might be interesting
        for node in root.findall('entry'):
            filename = node.get('name')
            md5 = node.get('md5')
            mtime = node.get('mtime')
            size = node.get('size')
            # download .spec files
            if filename.endswith('.spec'):
                if not self._get_package_file_checked_out(
                        project, package, filename, metadata_cache, md5,
                        mtime):
                    self._get_file(project, package, filename, size, link_md5)
                downloaded_files.append(filename)

        self._cleanup_package_old_files(project, package, downloaded_files)
예제 #25
0
    def check_project(self, project, try_again=True):
        """ Checks if the current checkout of a project is up-to-date, and queue task if necessary. """
        project_dir = os.path.join(self.dest_dir, project)
        util.safe_mkdir_p(project_dir)

        filename = os.path.join(project_dir, '_status')

        try:
            url = osc_copy.makeurl(self.conf.apiurl,
                                   ['status', 'project', project])
            length = self._download_url_to_file(url, filename)

            if length == 0:
                # metadata files should never be empty
                if try_again:
                    util.safe_unlink(filename)
                    return self.check_project(project, False)

        except (urllib.error.HTTPError, urllib.error.URLError,
                socket.error) as e:
            util.safe_unlink(filename)

            if type(e) == urllib.error.HTTPError:
                if e.code == 404:
                    print('Project %s doesn\'t exist.' % (project, ),
                          file=sys.stderr)
                elif e.code == 400:
                    # the status page doesn't always work :/
                    self.queue_checkout_project(project,
                                                primary=False,
                                                force_simple_checkout=True,
                                                no_config=True)
            elif try_again:
                self.check_project(project, False)
            else:
                print('Cannot get status of %s: %s' % (project, e),
                      file=sys.stderr)

            return

        try:
            packages_node = ET.parse(filename).getroot()
        except SyntaxError as e:
            util.safe_unlink(filename)

            if try_again:
                return self.check_project(project, False)
            else:
                print('Cannot parse status of %s: %s' % (project, e),
                      file=sys.stderr)

            return

        # We will have to remove all subdirectories that just don't belong to
        # this project anymore.
        subdirs_to_remove = [
            file for file in os.listdir(project_dir)
            if os.path.isdir(os.path.join(project_dir, file))
        ]

        # Here's what we check to know if a package needs to be checked out again:
        #  - if there's no subdir
        #  - if it's a link:
        #    - check that the md5 from the status is the xsrcmd5 from the file
        #      list
        #    - check that we have _files-expanded and that all spec files are
        #      checked out
        #  - if it's not a link: check that the md5 from the status is the
        #    srcmd5 from the file list
        for node in packages_node.findall('package'):
            name = node.get('name')
            srcmd5 = node.get('srcmd5')
            is_link = len(node.findall('link')) > 0

            try:
                subdirs_to_remove.remove(name)
            except ValueError:
                pass

            files = os.path.join(project_dir, name, '_files')
            if not os.path.exists(files):
                self.queue_checkout_package(project, name, primary=False)
                continue

            try:
                files_root = ET.parse(files).getroot()
            except SyntaxError:
                self.queue_checkout_package(project, name, primary=False)
                continue

            if is_link:
                previous_srcmd5 = files_root.get('xsrcmd5')
            else:
                previous_srcmd5 = files_root.get('srcmd5')

            if srcmd5 != previous_srcmd5:
                self.queue_checkout_package(project, name, primary=False)

            # make sure we have all spec files

            if is_link:
                # for links, we open the list of files when expanded
                files = os.path.join(project_dir, name, '_files-expanded')
                if not os.path.exists(files):
                    self.queue_checkout_package(project, name, primary=False)
                    continue

                try:
                    files_root = ET.parse(files).getroot()
                except SyntaxError:
                    self.queue_checkout_package(project, name, primary=False)
                    continue

            cont = False
            for entry in files_root.findall('entry'):
                filename = entry.get('name')
                if filename.endswith('.spec'):
                    specfile = os.path.join(project_dir, name, filename)
                    if not os.path.exists(specfile):
                        self.queue_checkout_package(project,
                                                    name,
                                                    primary=False)
                        cont = True
                        break
            if cont:
                continue

        # Remove useless subdirectories
        for subdir in subdirs_to_remove:
            shutil.rmtree(os.path.join(project_dir, subdir))

        util.safe_unlink(filename)