Exemplo n.º 1
0
    # download the channel database for this repository
    update_channel_db(repo)
    # Latest OpenLoops process API version for which processes
    # are available in the repository
    latest_api_version = int(OLToolbox.import_dictionary(
        latest_api_version_url % repo)['process_api_version'])
    max_latest_api_version = max(max_latest_api_version, latest_api_version)
    # This fails if the repository does not contain processes
    # with the API of the installed OpenLoops version.
    process_dbs[repo] = OLToolbox.ProcessDB(db=(version_db_url % repo))
    # scan all repositories for collections to download
    for coll in collections:
        if coll == 'all':
            process_coll = process_dbs[repo].content.keys()
        else:
            process_coll = OLToolbox.import_list(
                (collection_url % repo) + '/' + coll, fatal = False)
        if process_coll:
            process_list.update(process_coll)
        else:
            if args.ignore:
                print 'IGNORED: process collection \'%s\' not found.' % coll
            else:
                print 'ERROR: process collection \'%s\' not found.' % coll
                sys.exit(1)

if local_api_version == max_latest_api_version:
    print 'process API version: %d' % local_api_version
elif local_api_version < max_latest_api_version:
    print 'local process API version: %d (server: %d)' % (
          local_api_version, max_latest_api_version)
    print '******************************************'
Exemplo n.º 2
0
    max_latest_api_version = max(max_latest_api_version, latest_api_version)
    # This fails if the repository does not contain processes
    # with the API of the installed OpenLoops version.
    process_dbs[repo] = OLToolbox.ProcessDB(db=(version_db_url % repo))
    libname_maps[repo] = OLToolbox.import_dictionary(libmappins_url % repo,
                                                     fatal=False)
    # scan all repositories for collections to download
    # Note that when the downloader is invoked by the build script,
    # the collections will already be resolved.
    for coll in collections:
        if coll == 'all.coll' or coll == repo_name + '.coll':
            process_coll = list(process_dbs[repo].content.keys())
        else:
            if first_repo:
                # check if the collection is available locally
                process_coll = OLToolbox.import_list(coll, fatal=False)
            else:
                process_coll = None
            if process_coll is None:
                # check if the collection is available in the repository
                process_coll = OLToolbox.import_list(
                    (collection_url % repo) + '/' + coll, fatal=False)
        if process_coll is not None:
            found_colls.add(coll)
        if process_coll:
            process_list.update(process_coll)
    first_repo = False

not_found_colls = [coll for coll in collections if coll not in found_colls]
if not_found_colls:
    if args.ignore:
Exemplo n.º 3
0
def upload_process(process, db, ch_db, api):
    """Compress a process code folder and upload the archive
    to a repository on the web server."""
    # need: repository_path, deprecated_path, backup_old_processes
    print '- upload process:', process, '...',
    sys.stdout.flush()
    old_date, old_hash, old_descr = db.get(process, (None, None, None))
    process_dir = os.path.join(config['process_src_dir'], process)
    process_version_file = os.path.join(process_dir, 'version.info')
    local_process_archive = process_dir + '.tar.gz'
    process_version = OLToolbox.import_dictionary(process_version_file,
                                                  fatal=False)

    server_process_archive = os.path.join(repository_path, process + '.tar.gz')
    local_process_definition = os.path.join(config['process_def_dir'],
                                            process + '.m')
    server_process_definition = os.path.join(repository_path, process + '.m')
    server_backup_archive = os.path.join(
        deprecated_path,
        str(api) + '_' + process + '_' + str(old_date) + '_' + str(old_hash) +
        '.tar.gz')
    server_backup_definition = os.path.join(
        deprecated_path,
        str(api) + '_' + process + '_' + str(old_date) + '_' + str(old_hash) +
        '.m')

    if not process_version:
        if args.ignore:
            print 'IGNORED: not available'
            return
        else:
            print 'ERROR: not available'
            sys.exit(1)
    to_upload_api = int(process_version['process_api_version'])
    old_local_hash = process_version.get('hash', None)
    old_local_date = process_version.get('date', None)

    if to_upload_api != api:
        if args.ignore:
            print 'IGNORED: process to upload does not match installed',
            print '         OpenLoops version (process: %d, OpenLoops: %d)' % (
                to_upload_api, api)
            return
        else:
            print 'ERROR: process to upload does not match installed',
            print '       OpenLoops version (process: %d, OpenLoops: %d)' % (
                to_upload_api, api)
            sys.exit(1)

    if old_local_hash:
        # the local process was downloaded or uploaded before
        if old_local_hash == old_hash:
            print 'skipped: is up-to-date'
            return
        elif old_date is not None and (time.strptime(
                old_local_date, OLToolbox.timeformat) < time.strptime(
                    old_date, OLToolbox.timeformat)):
            print 'skipped: process on server is newer'
            print '         (local: %s, server: %s)' % (old_local_date,
                                                        old_date)
            return

    if backup_old_processes and old_hash is not None:
        try:
            os.rename(server_process_archive, server_backup_archive)
        except OSError:
            print '[process backup failed]',
            sys.stdout.flush()
        try:
            os.rename(server_process_definition, server_backup_definition)
        except OSError:
            print '[definition backup failed]',
            sys.stdout.flush()

    # create process archive
    archive = tarfile.open(local_process_archive, 'w:gz')
    archive.add(process_dir, arcname=process)
    archive.close()
    # calculate archive hash and get upload time
    with open(local_process_archive, 'r') as fh:
        archive_hash = hashlib.md5(fh.read()).hexdigest()
    upload_date = time.strftime(OLToolbox.timeformat)
    # store hash and upload time in local process directory
    process_version['date'] = upload_date
    process_version['hash'] = archive_hash
    OLToolbox.export_dictionary(process_version_file,
                                process_version,
                                form='%-25s %s')
    # get process description from process definition file
    with open(local_process_definition, 'r') as fh:
        description = OLToolbox.ProcessDB.no_description
        for line in fh:
            line = line.strip()
            if line.startswith('(*') and line.endswith('*)'):
                line = line[2:-2].strip()
                if line.startswith('description'):
                    description = line.split('=', 1)[1].strip()
                    break
    # update process database
    db.update({process: (upload_date, archive_hash, description)})
    # update channel database
    info_options = OLToolbox.import_list(
        os.path.join(process_dir, 'info_' + process + '.txt'))
    info_options = [opt for opt in info_options if opt.startswith('options ')]
    if info_options:
        info_options = info_options[0].split()[1:]
    else:
        info_options = []
    info_files = OLToolbox.import_list(
        os.path.join(process_dir, "process_definition", "subprocesses.list"))
    info_files = [
        os.path.join(process_dir, "info_" + proc + ".txt")
        for proc in info_files
    ]
    info_files_extra = OLToolbox.import_list(
        os.path.join(process_dir, "process_definition",
                     "subprocesses_extra.list"))
    info_files_extra = [
        os.path.join(process_dir, "info_" + proc + ".txt")
        for proc in info_files_extra
    ]
    channels = []
    for inf in info_files:
        channels.extend([
            line.split() + info_options for line in OLToolbox.import_list(inf)
        ])
    channels.sort(key=lambda el: el[1])
    channels_extra = []
    for inf in info_files_extra:
        channels_extra.extend([
            line.split() + info_options for line in OLToolbox.import_list(inf)
        ])
    channels_extra.sort(key=lambda el: el[1])
    ch_db.update({process: channels + channels_extra})
    # upload process archive and definition, delete temporary local archive
    shutil.copyfile(local_process_archive, server_process_archive)
    os.remove(local_process_archive)
    shutil.copyfile(local_process_definition, server_process_definition)
    print 'done'
Exemplo n.º 4
0
        process_db = OLToolbox.ProcessDB()
        channel_db = OLToolbox.ChannelDB()
        process_db.export_db(version_db_file)
        channel_db.export_db(channel_db_file)
    else:
        process_db = OLToolbox.ProcessDB(db=version_db_file)
        channel_db = OLToolbox.ChannelDB(db=channel_db_file)

for coll in collections:
    # Add content of collections to the process list.
    # The special collection 'all' selects all processes in the repository.
    if coll == 'all':
        process_coll = process_db.content.keys()
    else:
        process_coll = OLToolbox.import_list(os.path.join(
            collection_path, coll),
                                             fatal=False)
    if process_coll:
        process_list.update(process_coll)
    else:
        if args.ignore:
            print 'IGNORED: process collection \'%s\' not found.' % coll
        else:
            print 'ERROR: process collection \'%s\' not found.' % coll
            sys.exit(1)

for process in process_list:
    if args.delete:
        if args.api > 0:
            delete_api = args.api
        else:
Exemplo n.º 5
0
def upload_process(process, db, ch_db, api):
    """Compress a process code folder and upload the archive
    to a repository on the web server."""
    # need: repository_path, deprecated_path, backup_old_processes
    print '- upload process:', process, '...',
    sys.stdout.flush()
    old_date, old_hash, old_descr = db.get(process, (None, None, None))
    process_dir = os.path.join(config['process_src_dir'], process)
    process_version_file = os.path.join(process_dir, 'version.info')
    local_process_archive = process_dir + '.tar.gz'
    process_version = OLToolbox.import_dictionary(
        process_version_file, fatal=False)

    server_process_archive = os.path.join(repository_path, process + '.tar.gz')
    local_process_definition = os.path.join(config['process_def_dir'],
                                            process + '.m')
    server_process_definition = os.path.join(repository_path, process + '.m')
    server_backup_archive = os.path.join(
        deprecated_path, str(api) + '_' + process +
        '_' + str(old_date) + '_' + str(old_hash) + '.tar.gz')
    server_backup_definition = os.path.join(
        deprecated_path, str(api) + '_' + process +
        '_' + str(old_date) + '_' + str(old_hash) + '.m')

    if not process_version:
        if args.ignore:
            print 'IGNORED: not available'
            return
        else:
            print 'ERROR: not available'
            sys.exit(1)
    to_upload_api = int(process_version['process_api_version'])
    old_local_hash = process_version.get('hash', None)
    old_local_date = process_version.get('date', None)

    if to_upload_api != api:
        if args.ignore:
            print 'IGNORED: process to upload does not match installed',
            print '         OpenLoops version (process: %d, OpenLoops: %d)' % (
                  to_upload_api, api)
            return
        else:
            print 'ERROR: process to upload does not match installed',
            print '       OpenLoops version (process: %d, OpenLoops: %d)' % (
                  to_upload_api, api)
            sys.exit(1)

    if old_local_hash:
        # the local process was downloaded or uploaded before
        if old_local_hash == old_hash:
            print 'skipped: is up-to-date'
            return
        elif old_date is not None and (
              time.strptime(old_local_date, OLToolbox.timeformat) <
              time.strptime(old_date, OLToolbox.timeformat)):
            print 'skipped: process on server is newer'
            print '         (local: %s, server: %s)' % (
                  old_local_date, old_date)
            return

    if backup_old_processes and old_hash is not None:
        try:
            os.rename(server_process_archive, server_backup_archive)
        except OSError:
            print '[process backup failed]',
            sys.stdout.flush()
        try:
            os.rename(server_process_definition, server_backup_definition)
        except OSError:
            print '[definition backup failed]',
            sys.stdout.flush()

    # create process archive
    archive = tarfile.open(local_process_archive, 'w:gz')
    archive.add(process_dir, arcname=process)
    archive.close()
    # calculate archive hash and get upload time
    with open(local_process_archive, 'r') as fh:
        archive_hash = hashlib.md5(fh.read()).hexdigest()
    upload_date = time.strftime(OLToolbox.timeformat)
    # store hash and upload time in local process directory
    process_version['date'] = upload_date
    process_version['hash'] = archive_hash
    OLToolbox.export_dictionary(process_version_file, process_version,
                                form = '%-25s %s')
    # get process description from process definition file
    with open(local_process_definition, 'r') as fh:
        description = OLToolbox.ProcessDB.no_description
        for line in fh:
            line = line.strip()
            if line.startswith('(*') and line.endswith('*)'):
                line = line[2:-2].strip()
                if line.startswith('description'):
                    description = line.split('=',1)[1].strip()
                    break
    # update process database
    db.update({process: (upload_date, archive_hash, description)})
    # update channel database
    info_options = OLToolbox.import_list(
        os.path.join(process_dir, 'info_' + process + '.txt'))
    info_options = [opt for opt in info_options if opt.startswith('options ')]
    if info_options:
        info_options = info_options[0].split()[1:]
    else:
        info_options = []
    info_files = OLToolbox.import_list(os.path.join(
        process_dir, "process_definition", "subprocesses.list"))
    info_files = [os.path.join(process_dir, "info_" + proc + ".txt")
                  for proc in info_files]
    info_files_extra = OLToolbox.import_list(os.path.join(
        process_dir, "process_definition", "subprocesses_extra.list"))
    info_files_extra = [os.path.join(process_dir, "info_" + proc + ".txt")
                  for proc in info_files_extra]
    channels = []
    for inf in info_files:
        channels.extend([line.split() + info_options
                         for line in OLToolbox.import_list(inf)])
    channels.sort(key=lambda el: el[1])
    channels_extra = []
    for inf in info_files_extra:
        channels_extra.extend([line.split() + info_options
                         for line in OLToolbox.import_list(inf)])
    channels_extra.sort(key=lambda el: el[1])
    ch_db.update({process: channels + channels_extra})
    # upload process archive and definition, delete temporary local archive
    shutil.copyfile(local_process_archive, server_process_archive)
    os.remove(local_process_archive)
    shutil.copyfile(local_process_definition, server_process_definition)
    print 'done'
Exemplo n.º 6
0
        os.mkdir(repository_path)
        process_db = OLToolbox.ProcessDB()
        channel_db = OLToolbox.ChannelDB()
        process_db.export_db(version_db_file)
        channel_db.export_db(channel_db_file)
    else:
        process_db = OLToolbox.ProcessDB(db=version_db_file)
        channel_db = OLToolbox.ChannelDB(db=channel_db_file)

for coll in collections:
    # Add content of collections to the process list.
    # The special collection 'all' selects all processes in the repository.
    if coll == 'all':
        process_coll = process_db.content.keys()
    else:
        process_coll = OLToolbox.import_list(
            os.path.join(collection_path, coll), fatal=False)
    if process_coll:
        process_list.update(process_coll)
    else:
        if args.ignore:
            print 'IGNORED: process collection \'%s\' not found.' % coll
        else:
            print 'ERROR: process collection \'%s\' not found.' % coll
            sys.exit(1)

for process in process_list:
    if args.delete:
        if args.api > 0:
            delete_api = args.api
        else:
            delete_api = latest_api_version