예제 #1
0
def sizeof_fmt(num):
    for unit in ['B', 'KB', 'MB', 'GB']:
        if abs(num) < 1024.0:
            if unit == 'GB':
                log.error('The size of this file is bigger than 1 GB!')
                log.error('Please check')
            return str(int(round(float("%3f" % num), 0))) + "%s" % (unit)
        num /= 1024.0
    return str(int(round(float("%f" % num), 0))) + "%s" % ('Yi')
 def get_level(self, stage):
     try:
         return int(LIMITS[self.queue][self.arch][self.suite][stage][0])
     except KeyError:
         log.error('No limit defined for the %s queue on %s/%s stage %s. '
                   'Returning 1', self.queue, self.suite, self.arch, stage)
         return 1
     except IndexError:
         log.critical('The limit is not in the format "(level, limit)". '
                      'I can\'t guess what you want, giving up')
         sys.exit(1)
 def get_limit(self, stage):
     try:
         limit = LIMITS[self.queue][self.arch][self.suite][stage]
         limit = limit[1]
     except KeyError:
         log.error('No limit defined for the %s queue on %s/%s stage %s. '
                   'Returning 1', self.queue, self.suite, self.arch, stage)
         return 1
     except IndexError:
         log.critical('The limit is not in the format "(level, limit)". '
                      'I can\'t guess what you want, giving up')
         sys.exit(1)
     except TypeError:
         # this is the case of the default target
         if isinstance(limit, int):
             pass
         else:
             raise
     return int(limit)
def process_pkg(package, deactivate):
    if deactivate:
        _good('Deactivating notification for package ' + str(package))
        flag = 0
    else:
        _good('Activating notification for package ' + str(package))
        flag = 1

    sources_table = db_table('sources')
    update_query = sources_table.update().\
                   where(sources_table.c.name == package).\
                   values(notify_maintainer=flag)
    rows = conn_db.execute(update_query).rowcount

    if rows == 0:
        log.error(bcolors.FAIL + str(package) + ' does not exists')
        sys.exit(1)
    if DEBUG:
        log.debug('Double check the change:')
        query = 'SELECT * FROM sources WHERE name="{}"'.format(package)
        log.debug(query_db(query))
예제 #5
0
def load_notes():
    """
    format:
    { 'package_name': [
        {'suite': 'unstable', 'version': '0.0', 'comments': None,
         'bugs': [1234, 5678], 'issues': ['blalba','auauau']},
        {'suite': 'stretch', 'version': None, 'comments': 'strstr',
          'bugs': [], 'issues': []}],
      'package_name':<etc> }
    """
    with open(NOTES) as fd:
        original = yaml.load(fd)
    log.info("notes loaded. There are " + str(len(original)) +
             " packages listed")
    notes = {}
    for pkg in sorted(original):
        assert isinstance(pkg, str)
        try:
            assert 'version' in original[pkg]
        except AssertionError:
            print_critical_message(pkg + ' does not include a version')
            irc_msg('The note for ' + pkg + ' does not include a version.')
        query = """SELECT s.id, s.version, s.suite
                FROM results AS r JOIN sources AS s ON r.package_id=s.id
                WHERE s.name='{pkg}' AND r.status != ''"""
        # AND s.architecture='amd64'"""
        query = query.format(pkg=pkg)
        result = query_db(query)
        if not result:
            log.info('Warning: This query produces no results: ' + query +
                     '\nThis means there is no tested ' +
                     'package with the name ' + pkg)
            try:
                irc_msg(
                    "There is problem with the note for {} (it may "
                    "have been removed from the archive). Please check {} and {}"
                    .format(pkg, os.environ['BUILD_URL'],
                            "https://tracker.debian.org/pkg/" + pkg))
            except KeyError:
                log.error(
                    'There is a problem with the note for %s - please '
                    'check.', pkg)
        else:
            notes[pkg] = []
            for suite in result:
                pkg_details = {}
                # https://image-store.slidesharecdn.com/c2c44a06-5e28-4296-8d87-419529750f6b-original.jpeg
                try:
                    if apt_pkg.version_compare(str(original[pkg]['version']),
                                               str(suite[1])) > 0:
                        continue
                except KeyError:
                    pass
                pkg_details['suite'] = suite[2]
                try:
                    pkg_details['version'] = original[pkg]['version']
                except KeyError:
                    pkg_details['version'] = ''
                pkg_details['comments'] = original[pkg]['comments'] if \
                    'comments' in original[pkg] else None
                pkg_details['bugs'] = original[pkg]['bugs'] if \
                    'bugs' in original[pkg] else []
                pkg_details['issues'] = original[pkg]['issues'] if \
                    'issues' in original[pkg] else []
                pkg_details['id'] = int(suite[0])
                log.debug('adding %s => %s', pkg, pkg_details)
                notes[pkg].append(pkg_details)

    log.info("notes checked. There are " + str(len(notes)) +
             " packages listed")
    return notes
예제 #6
0
    log.info('Found schema updates.')
    for update in range(current + 1, last + 1):
        log.info('Applying database update #' + str(update) + '. Queries:')
        startTime = datetime.now()
        for query in schema_updates[update]:
            log.info('\t' + query)
            query_db(query)
        log.info(
            str(len(schema_updates[update])) + ' queries executed in ' +
            str(datetime.now() - startTime))
    return True


if __name__ == '__main__':
    changed_created = False
    if table_exists('rb_schema'):
        if not query_db('SELECT * FROM rb_schema'):
            # table exists but there is nothing in it
            changed_create = db_create_tables()
    else:
        log.error('There is no rb_schema table in the database.')
        log.error('Will run a full db_create_tables().')
        changed_created = db_create_tables()
    changed = db_update()
    if changed or changed_created:
        log.info('Total execution time: ' +
                 str(datetime.now() -
                     datetime.strptime(now, "%Y-%m-%d-%H-%M-%S")))
    else:
        log.info('No pending updates.')
예제 #7
0
def build_leading_text_section(section, rows, suite, arch):
    html = '<p>\n' + tab
    total = len(rows)
    count_total = int(
        query_db(queries['count_total'].params({
            'suite': suite,
            'arch': arch
        }))[0][0])
    try:
        percent = round(((total / count_total) * 100), 1)
    except ZeroDivisionError:
        log.error('Looks like there are either no tested package or no ' +
                  'packages available at all. Maybe it\'s a new database?')
        percent = 0.0
    try:
        html += '<a href="' + section['icon_link'] + '" target="_parent">'
        no_icon_link = False
    except KeyError:
        no_icon_link = True  # to avoid closing the </a> tag below
    if section.get('icon_status'):
        html += '<img src="/static/' + section['icon_status']
        html += '" alt="reproducible icon" />'
    if not no_icon_link:
        html += '</a>'
    html += '\n' + tab
    if section.get('text') and section.get('timespan'):
        count = len(
            query_db(queries[section['query2']].params({
                'suite': suite,
                'arch': arch
            })))
        percent = round(((count / count_total) * 100), 1)
        timespan = section['timespan']
        timespan_date = timespan_date_map[timespan]
        timespan_count = int(
            query_db(queries['count_timespan'].params({
                'suite':
                suite,
                'arch':
                arch,
                'timespan_date':
                timespan_date
            }))[0][0])
        try:
            timespan_percent = round(((total / timespan_count) * 100), 1)
        except ZeroDivisionError:
            log.error('Looks like there are either no tested package or no ' +
                      'packages available at all. Maybe it\'s a new database?')
            timespan_percent = 0

        html += section['text'].substitute(tot=total,
                                           percent=percent,
                                           timespan_percent=timespan_percent,
                                           timespan_count=timespan_count,
                                           count_total=count_total,
                                           count=count,
                                           suite=suite,
                                           arch=arch)
    elif section.get('text'):
        html += section['text'].substitute(tot=total,
                                           percent=percent,
                                           suite=suite,
                                           arch=arch)
    else:
        log.warning('There is no text for this section')
    html += '\n</p>\n'
    return html
def rest(scheduling_args, requester, local, suite, arch):
    "Actually schedule a package for a single suite on a single arch."

    # Shorter names
    reason = scheduling_args.message
    issue = scheduling_args.issue
    status = scheduling_args.status
    built_after = scheduling_args.after
    built_before = scheduling_args.before
    packages = scheduling_args.packages
    artifacts = scheduling_args.keep_artifacts
    notify = scheduling_args.notify
    notify_on_start = scheduling_args.notify_on_start
    dry_run = scheduling_args.dry_run

    log.info("Scheduling packages in %s/%s", arch, suite)

    ids = []
    pkgs = []

    query1 = """SELECT id FROM sources WHERE name='{pkg}' AND suite='{suite}'
                AND architecture='{arch}'"""
    query2 = """SELECT p.date_build_started
                FROM sources AS s JOIN schedule as p ON p.package_id=s.id
                WHERE p.package_id='{id}'"""
    for pkg in set(packages):
        # test whether the package actually exists
        result = query_db(query1.format(pkg=pkg, suite=suite, arch=arch))
        # tests whether the package is already building
        try:
            result2 = query_db(query2.format(id=result[0][0]))
        except IndexError:
            log.error('%sThe package %s is not available in %s/%s%s',
                      bcolors.FAIL, pkg, suite, arch, bcolors.ENDC)
            continue
        try:
            if not result2[0][0]:
                ids.append(result[0][0])
                pkgs.append(pkg)
            else:
                log.warning(bcolors.WARN + 'The package ' + pkg + ' is ' +
                            'already building, not scheduling it.' +
                            bcolors.ENDC)
        except IndexError:
            # it's not in the schedule
            ids.append(result[0][0])
            pkgs.append(pkg)

    def compose_irc_message():
        "One-shot closure to limit scope of the following local variables."
        blablabla = '✂…' if len(' '.join(pkgs)) > 257 else ''
        packages_txt = str(len(ids)) + ' packages ' if len(pkgs) > 1 else ''
        trailing = ' - artifacts will be preserved' if artifacts else ''
        trailing += ' - with irc notification' if notify else ''
        trailing += ' - notify on start too' if notify_on_start else ''

        message = requester + ' scheduled ' + packages_txt + \
            'in ' + suite + '/' + arch
        if reason:
            message += ', reason: \'' + reason + '\''
        message += ': ' + ' '.join(pkgs)[0:256] + blablabla + trailing
        return message

    info_msg = compose_irc_message()
    del compose_irc_message

    # these packages are manually scheduled, so should have high priority,
    # so schedule them in the past, so they are picked earlier :)
    # the current date is subtracted twice, so it sorts before early scheduling
    # schedule on the full hour so we can recognize them easily
    epoch = int(time.time())
    now = datetime.now()
    days = int(now.strftime('%j')) * 2
    hours = int(now.strftime('%H')) * 2
    minutes = int(now.strftime('%M'))
    time_delta = timedelta(days=days, hours=hours, minutes=minutes)
    date = (now - time_delta).strftime('%Y-%m-%d %H:%M')
    log.debug('date_scheduled = ' + date + ' time_delta = ' + str(time_delta))

    # a single person can't schedule more than 500 packages in the same day; this
    # is actually easy to bypass, but let's give some trust to the Debian people
    query = """SELECT count(*) FROM manual_scheduler
               WHERE requester = '{}' AND date_request > '{}'"""
    try:
        amount = int(
            query_db(query.format(requester, int(time.time() - 86400)))[0][0])
    except IndexError:
        amount = 0
    log.debug(requester + ' already scheduled ' + str(amount) +
              ' packages today')
    if amount + len(ids) > 500 and not local:
        log.error(
            bcolors.FAIL + 'You have exceeded the maximum number of manual ' +
            'reschedulings allowed for a day. Please ask in ' +
            '#debian-reproducible if you need to schedule more packages.' +
            bcolors.ENDC)
        sys.exit(1)

    # do the actual scheduling
    add_to_schedule = []
    update_schedule = []
    save_schedule = []
    artifacts_value = 1 if artifacts else 0
    if notify_on_start:
        do_notify = 2
    elif notify or artifacts:
        do_notify = 1
    else:
        do_notify = 0

    schedule_table = db_table('schedule')
    if ids:
        existing_pkg_ids = dict(
            query_db(
                sql.select([
                    schedule_table.c.package_id,
                    schedule_table.c.id,
                ]).where(schedule_table.c.package_id.in_(ids))))

    for id in ids:
        if id in existing_pkg_ids:
            update_schedule.append({
                'update_id': existing_pkg_ids[id],
                'package_id': id,
                'date_scheduled': date,
                'save_artifacts': artifacts_value,
                'notify': str(do_notify),
                'scheduler': requester,
            })
        else:
            add_to_schedule.append({
                'package_id': id,
                'date_scheduled': date,
                'save_artifacts': artifacts_value,
                'notify': str(do_notify),
                'scheduler': requester,
            })

        save_schedule.append({
            'package_id': id,
            'requester': requester,
            'date_request': epoch,
        })

    log.debug('Packages about to be scheduled: ' + str(add_to_schedule) +
              str(update_schedule))

    update_schedule_query = schedule_table.update().\
                            where(schedule_table.c.id == sql.bindparam('update_id'))
    insert_schedule_query = schedule_table.insert()
    insert_manual_query = db_table('manual_scheduler').insert()

    if not dry_run:
        transaction = conn_db.begin()
        if add_to_schedule:
            conn_db.execute(insert_schedule_query, add_to_schedule)
        if update_schedule:
            conn_db.execute(update_schedule_query, update_schedule)
        if save_schedule:
            conn_db.execute(insert_manual_query, save_schedule)
        transaction.commit()
    else:
        log.info('Ran with --dry-run, scheduled nothing')

    log.info(bcolors.GOOD + info_msg + bcolors.ENDC)
    if not (local
            and requester == "jenkins maintenance job") and len(ids) != 0:
        if not dry_run:
            # Always announce on -changes
            irc_msg(info_msg, 'debian-reproducible-changes')
            # Announce some messages on main channel
            if notify_on_start or artifacts:
                irc_msg(info_msg)
예제 #9
0
from rblib.const import PGDATABASE

parser = argparse.ArgumentParser(
    description='Create new Postgres database (reproducibledb) from backup.',
    epilog='This script creates a database and populates it with the result'
    ' of a "pg_dump". It will not run if the database already exists.'
    ' Database name and database user are defined in'
    ' reproducible_common.py .')
parser.add_argument("-f",
                    "--backup-file",
                    required=True,
                    help='result of a "pg_dump"')
args, unknown_args = parser.parse_known_args()
BACKUP_FILE = args.backup_file
if not os.access(BACKUP_FILE, os.R_OK):
    log.error("Backup file does not exist.")
    sys.exit(1)

# We skip the database connection because the database
# may not exist yet, but we would like to use the constants
# available in reproducible_common.py
sys.argv.append('--skip-database-connection')
from rblib.utils import print_critical_message

# Get database defined in reproducible_common.py
# Note: this script will ONLY run on a completely new DB. The backup
# file will be used to re-creates the schema and populate tables. If
# run on a database with existing information, it will error.
DB_NAME = PGDATABASE
DB_USER = '******'
from rblib.html import create_main_navigation, write_html_page


arch = 'amd64' # the arch is only relevant for link targets here
mirror = 'http://deb.debian.org/debian'

for suite in SUITES:
    remotefile = mirror + '/dists/' + suite + '/main/source/Sources.xz'
    os.makedirs('/tmp/reproducible', exist_ok=True)
    with NamedTemporaryFile(dir='/tmp/reproducible') as sources:
        log.info('Downloading sources file for ' + suite + ': ' + remotefile)
        xfile = lzma.decompress(urlopen(remotefile).read())
        if xfile:
            sources.write(xfile)
        else:
            log.error('Failed to get the ' + suite + 'sources')
            continue
        query = "SELECT s.name " + \
                "FROM results AS r JOIN sources AS s ON r.package_id=s.id " + \
                "WHERE r.status='unreproducible' AND s.suite='{suite}'"
        try:
            pkgs = [x[0] for x in query_db(query.format(suite=suite))]
        except IndexError:
            log.error('Looks like there are no unreproducible packages...')
        p = Popen(('dd-list --stdin --sources ' + sources.name).split(),
                  stdout=PIPE, stdin=PIPE, stderr=PIPE)
        out, err = p.communicate(input=('\n'.join(pkgs)).encode())
        if err:
            log.error('dd-list printed some errors:\n' + err.decode())
        log.debug('dd-list output:\n' + out.decode())
예제 #11
0
def gen_suitearch_details(package, version, suite, arch, status, spokenstatus,
                          build_date):
    eversion = strip_epoch(version)  # epoch_free_version is too long
    pkg = Package(package)
    build = pkg.builds[suite][arch]

    context = {}
    default_view = ''

    # Make notes the default default view
    notes_file = NOTES_PATH + '/' + package + '_note.html'
    notes_uri = NOTES_URI + '/' + package + '_note.html'
    if os.access(notes_file, os.R_OK):
        default_view = notes_uri

    # Get summary context
    context['status_html'] = gen_status_link_icon(status, spokenstatus, None,
                                                  suite, arch)
    context['build_date'] = build_date

    # Get diffoscope differences context
    dbd_links = get_dbd_links(package, eversion, suite, arch)
    dbd_uri = dbd_links.get('dbd_uri', '')
    if dbd_uri:
        context['dbd'] = {
            'dbd_page_uri': dbd_links['dbd_page_uri'],
            'dbdtxt_uri': dbd_links.get('dbdtxt_uri', ''),
            'dbdjson_uri': dbd_links.get('dbdjson_uri', ''),
        }
        default_view = default_view if default_view else dbd_uri

    # Get buildinfo context
    if build.buildinfo:
        context['buildinfo_uri'] = build.buildinfo.url
        default_view = default_view if default_view else build.buildinfo.url
    elif not args.ignore_missing_files and status not in \
        ('untested', 'blacklisted', 'FTBFS', 'NFU', 'depwait', '404'):
        log.critical('buildinfo not detected at ' + build.buildinfo.path)

    # Get rbuild, build2 and build diffs context
    if build.rbuild:
        context['rbuild_uri'] = build.rbuild.url
        context['rbuild_size'] = sizeof_fmt(build.rbuild.size)
        default_view = default_view if default_view else build.rbuild.url
        context['buildlogs'] = {}
        if build.build2 and build.logdiff:
            context['buildlogs']['build2_uri'] = build.build2.url
            context['buildlogs']['build2_size'] = build.build2.size
            context['buildlogs']['diff_uri'] = build.logdiff.url
        else:
            log.error('Either {} or {} is missing'.format(
                build.build2.path, build.logdiff.path))
    elif status not in ('untested', 'blacklisted') and \
         not args.ignore_missing_files:
        log.critical(
            DISTRO_URL + '/' + suite + '/' + arch + '/' + package +
            ' didn\'t produce a buildlog, even though it has been built.')

    context['has_buildloginfo'] = 'buildinfo_uri' in context or \
                                  'buildlogs' in context or \
                                  'rbuild_uri' in context

    default_view = '/untested.html' if not default_view else default_view
    suitearch_details_html = renderer.render(suitearch_details_template,
                                             context)
    return (suitearch_details_html, default_view)