Exemplo n.º 1
0
def create_repository(repo, secret=False):
    if secret:
        import string
        import random
        seed_chars = string.ascii_lowercase + string.digits
        repo = (repo + '_' +
                ''.join(random.choice(seed_chars) for n in range(16)))
    repo_dir = os.path.join(config['local_server_path'], repo)
    process_dir = os.path.join(config['local_server_path'], repo, 'processes')
    deprectated_dir = os.path.join(config['local_server_path'], repo,
                                   'deprecated')
    collection_dir = os.path.join(config['local_server_path'], repo,
                                  'collections')
    latest_api_version_file = os.path.join(process_dir, 'latest_version')
    if not os.path.isdir(repo_dir):
        os.mkdir(repo_dir)
    if not os.path.isdir(process_dir):
        os.mkdir(process_dir)
    if not os.path.isdir(deprectated_dir):
        os.mkdir(deprectated_dir)
    if not os.path.isdir(collection_dir):
        os.mkdir(collection_dir)
    OLToolbox.export_dictionary(latest_api_version_file,
                                {'process_api_version': 0})
    return repo
Exemplo n.º 2
0
def create_repository(repo, secret=False):
    if secret:
        import string
        import random
        seed_chars = string.ascii_lowercase + string.digits
        repo = (repo + '_' +
                ''.join(random.choice(seed_chars) for n in range(16)))
    repo_dir = os.path.join(config['local_server_path'], repo)
    process_dir = os.path.join(config['local_server_path'],
                               repo, 'processes')
    deprectated_dir = os.path.join(config['local_server_path'],
                                   repo, 'deprecated')
    collection_dir = os.path.join(config['local_server_path'],
                                  repo, 'collections')
    latest_api_version_file = os.path.join(process_dir, 'latest_version')
    if not os.path.isdir(repo_dir):
        os.mkdir(repo_dir)
    if not os.path.isdir(process_dir):
        os.mkdir(process_dir)
    if not os.path.isdir(deprectated_dir):
        os.mkdir(deprectated_dir)
    if not os.path.isdir(collection_dir):
        os.mkdir(collection_dir)
    OLToolbox.export_dictionary(latest_api_version_file,
                                {'process_api_version': 0})
    return repo
Exemplo n.º 3
0
def update_channel_db(repo):
    # get repository name of secret repository
    repo_name = OLToolbox.repo_name(repo)
    local_channel_file = channel_db_file % repo_name
    remote_channel_url = channel_db_url % repo
    if os.path.isfile(local_channel_file):
        fh = open(local_channel_file)
        local_hash = fh.readline().split()[0]
        fh.close()
    else:
        local_hash = None
    try:
        if remote_channel_url.startswith('/'):
            rfh = open(remote_channel_url, 'rb')
        else:
            rfh = urlopen(remote_channel_url)
    except (URLError, IOError) as e:
        print('Warning: Channel database update for repository ' + repo_name +
              ' failed (' + str(e) + '). Skip this repository.')
        return False
    hash_line = rfh.readline().decode()
    if local_hash != hash_line.split()[0]:
        local_hash = hashlib.md5()
        tmp_file = local_channel_file + '.~' + str(os.getpid())
        lfh = open(tmp_file, 'w')
        lfh.write(hash_line.strip() + '  ' +
                  time.strftime(OLToolbox.timeformat) + '\n')
        for line in rfh:
            lfh.write(line.decode())
            local_hash.update(line.strip())
        lfh.close()
        local_hash = local_hash.hexdigest()
        if local_hash == hash_line.split()[0]:
            os.rename(tmp_file, local_channel_file)
            print('updated channel database for repository', repo_name)
        else:
            print('ERROR: downloaded channel database inconsistent ' +
                  'for repository ' + repo_name)
            sys.exit(1)
    rfh.close()
    return True
Exemplo n.º 4
0
def update_channel_db(repo):
    # get repository name of secret repository
    repo_name = OLToolbox.repo_name(repo)
    local_channel_file = channel_db_file % repo_name
    remote_channel_url = channel_db_url % repo
    if os.path.isfile(local_channel_file):
        fh = open(local_channel_file)
        local_hash = fh.readline().split()[0]
        fh.close()
    else:
        local_hash = None
    try:
        rfh = urllib2.urlopen(remote_channel_url)
    except urllib2.HTTPError:
        print ('*** Channel database update for repository ' + repo_name +
               ' failed ***')
        sys.exit(1)
    hash_line = rfh.readline()
    if local_hash != hash_line.split()[0]:
        local_hash = hashlib.md5()
        tmp_file = local_channel_file + '.~' + str(os.getpid())
        lfh = open(tmp_file, 'w')
        lfh.write(hash_line.strip() + '  ' +
                  time.strftime(OLToolbox.timeformat) + '\n')
        for line in rfh:
            lfh.write(line)
            local_hash.update(line.strip())
        lfh.close()
        local_hash = local_hash.hexdigest()
        if local_hash == hash_line.split()[0]:
            os.rename(tmp_file, local_channel_file)
            print 'updated channel database for repository', repo_name
        else:
            print ('ERROR: downloaded channel database inconsistent ' +
                   'for repository ' + repo_name)
            sys.exit(1)
    rfh.close()
Exemplo n.º 5
0
def download(process, dbs):
    # download process if one of the following conditions is met:
    # - the process API of the installed OpenLoops version
    #   differs from the one of the installed process;
    # - the version of the installed process (if any)
    #   differs from the version available on the server;
    # - download is forced by '--force' option;
    print '- process:', process, '...',
    local_process_dir = os.path.join(config['process_src_dir'], process)
    version_installed = OLToolbox.import_dictionary(
        os.path.join(local_process_dir, 'version.info'), fatal = False)
    if version_installed is None:
        version_installed = {}
    try:
        api_installed = int(version_installed.get('process_api_version', None))
    except (AttributeError, TypeError):
        api_installed = None
    hash_installed = version_installed.get('hash', None)
    date_installed = version_installed.get('date', None)
    available = [(cont.content[process][0], cont.content[process][1], repo)
                 for repo, cont in dbs.items() if process in cont.content]
    if not available:
        if args.ignore:
            print 'IGNORED: not available (installed: %s)' % version_installed
            return
        else:
            print 'ERROR: not available (installed: %s)' % version_installed
            sys.exit(1)
    # Only download, if the hash of the process to download differs
    # from the hash of the installed process, unless --force used.
    if not args.force:
        available = [src for src in available if src[1] != hash_installed]
    if not available:
        # the process is already available locally and is up-to-date.
        print 'skipped: is up-to-date'
        return
    # In particular when --force is used,
    # select the process which was uploaded last.
    available = sorted(
        available,
        key=lambda src: time.strptime(src[0], OLToolbox.timeformat))[-1]
    remote_archive = ((repository_url % available[2]) +
                      '/' + process + '.tar.gz')
    local_archive = os.path.join(local_process_dir + '.tar.gz')

    # download the process
    print 'download ...',
    try:
        rf = urllib2.urlopen(remote_archive)
    except urllib2.HTTPError:
        print '*** DOWNLOAD FAILED ***'
        if args.ignore:
            return
        else:
            sys.exit(1)
    lf = open(local_archive, 'wb')
    lf.write(rf.read())
    rf.close()
    lf.close()
    print 'extract ...',
    # remove target directory if it already exists
    try:
        shutil.rmtree(local_process_dir)
    except:
        pass
    # extract the process code from the archive
    untar(local_archive, config['process_src_dir'])
    # remove the local archive
    os.remove(local_archive)
    # store date and hash in the local process version.info
    process_version_file = os.path.join(local_process_dir, 'version.info')
    process_version = OLToolbox.import_dictionary(
        process_version_file, fatal=True)
    process_version['date'] = available[0]
    process_version['hash'] = available[1]
    OLToolbox.export_dictionary(process_version_file, process_version,
                                form = '%-25s %s')
    print 'done'
Exemplo n.º 6
0
                                form = '%-25s %s')
    print 'done'


process_dbs = {}
max_latest_api_version = 0

if not os.path.isdir(config['process_lib_dir']):
    os.mkdir(config['process_lib_dir'])

for repo in config['process_repositories']:
    # download the channel database for this repository
    update_channel_db(repo)
    # Latest OpenLoops process API version for which processes
    # are available in the repository
    latest_api_version = int(OLToolbox.import_dictionary(
        latest_api_version_url % repo)['process_api_version'])
    max_latest_api_version = max(max_latest_api_version, latest_api_version)
    # This fails if the repository does not contain processes
    # with the API of the installed OpenLoops version.
    process_dbs[repo] = OLToolbox.ProcessDB(db=(version_db_url % repo))
    # scan all repositories for collections to download
    for coll in collections:
        if coll == 'all':
            process_coll = process_dbs[repo].content.keys()
        else:
            process_coll = OLToolbox.import_list(
                (collection_url % repo) + '/' + coll, fatal = False)
        if process_coll:
            process_list.update(process_coll)
        else:
            if args.ignore:
Exemplo n.º 7
0
def download(process, dbs, libmaps):
    # download process if one of the following conditions is met:
    # - the process API of the installed OpenLoops version
    #   differs from the one of the installed process;
    # - the version of the installed process (if any)
    #   differs from the version available on the server;
    # - download is forced by '--force' option;
    print('- process:', process, '...', end=' ')
    sys.stdout.flush()
    available = []
    for repo, cont in dbs.items():
        mapped_process = process
        if libmaps[repo] and process in libmaps[repo]:
            mapped_process = libmaps[repo][process]
            if mapped_process in downloaded.values():
                downloaded[process] = mapped_process
                print('mapped to', mapped_process, '(already downloaded)')
                return
            print('does not exist; downloading',
                  mapped_process,
                  'instead ...',
                  end=' ')
            sys.stdout.flush()
        if mapped_process in cont.content:
            # (process, date, hash, repo)
            available.append((mapped_process, cont.content[mapped_process][0],
                              cont.content[mapped_process][1], repo))
        elif mapped_process != process:
            print('\nOops! There seems to be something wrong with the ' +
                  'library mapping table. Please contact the authors.')
    # Order processes by date: latest last (to be used)
    available = sorted(
        available, key=lambda src: time.strptime(src[1], OLToolbox.timeformat))
    mapped_process = process
    if available:
        mapped_process = available[-1][0]
        downloaded[process] = mapped_process
    local_process_dir = os.path.join(config['process_src_dir'], mapped_process)
    version_installed = OLToolbox.import_dictionary(os.path.join(
        local_process_dir, 'version.info'),
                                                    fatal=False)
    if version_installed is None:
        version_installed = {}
    try:
        api_installed = int(version_installed.get('process_api_version', None))
    except (AttributeError, TypeError):
        api_installed = None
    hash_installed = version_installed.get('hash', None)
    date_installed = version_installed.get('date', None)
    if not available:
        if args.ignore:
            print('IGNORED: not available (installed: %s)' % version_installed)
            return
        else:
            print('ERROR: not available (installed: %s)' % version_installed)
            sys.exit(1)
    # Only download, if the hash of the process to download differs
    # from the hash of the installed process, unless --force used.
    if not args.force:
        available = [src for src in available if src[2] != hash_installed]
    if not available:
        # the process is already available locally and is up-to-date.
        print('skipped: is up-to-date')
        return
    # Select the process which was uploaded last.
    available = available[-1]
    remote_archive = ((repository_url % available[3]) + '/' + available[0] +
                      '.tar.gz')
    local_archive = os.path.join(local_process_dir + '.tar.gz')
    # download the process
    print('download from repository: ' + available[3] + '...', end=' ')
    sys.stdout.flush()
    try:
        if remote_archive.startswith('/'):
            rf = open(remote_archive, 'rb')
        else:
            rf = urlopen(remote_archive)
    except (URLError, IOError):
        print('*** DOWNLOAD FAILED ***')
        if args.ignore:
            return
        else:
            sys.exit(1)
    lf = open(local_archive, 'wb')
    lf.write(rf.read())
    rf.close()
    lf.close()
    print('extract ...', end=' ')
    sys.stdout.flush()
    # remove target directory if it already exists
    try:
        shutil.rmtree(local_process_dir)
    except:
        pass
    # extract the process code from the archive
    untar(local_archive, config['process_src_dir'])
    # remove the local archive
    os.remove(local_archive)
    # store date and hash in the local process version.info
    process_version_file = os.path.join(local_process_dir, 'version.info')
    process_version = OLToolbox.import_dictionary(process_version_file,
                                                  fatal=True)
    process_version['date'] = available[1]
    process_version['hash'] = available[2]
    OLToolbox.export_dictionary(process_version_file,
                                process_version,
                                form='%-25s %s')
    print('done')
Exemplo n.º 8
0
                                process_version,
                                form='%-25s %s')
    print('done')


process_dbs = {}
libname_maps = {}
max_latest_api_version = 0

if not os.path.isdir(config['process_lib_dir']):
    os.mkdir(config['process_lib_dir'])

first_repo = True
found_colls = set()
for repo in config['process_repositories']:
    repo_name = OLToolbox.repo_name(repo)
    # download the channel database for this repository
    if not update_channel_db(repo):
        continue
    # Latest OpenLoops process API version for which processes
    # are available in the repository
    latest_api_version = int(
        OLToolbox.import_dictionary(latest_api_version_url %
                                    repo)['process_api_version'])
    max_latest_api_version = max(max_latest_api_version, latest_api_version)
    # This fails if the repository does not contain processes
    # with the API of the installed OpenLoops version.
    process_dbs[repo] = OLToolbox.ProcessDB(db=(version_db_url % repo))
    libname_maps[repo] = OLToolbox.import_dictionary(libmappins_url % repo,
                                                     fatal=False)
    # scan all repositories for collections to download
Exemplo n.º 9
0
def download(process, dbs):
    # download process if one of the following conditions is met:
    # - the process API of the installed OpenLoops version
    #   differs from the one of the installed process;
    # - the version of the installed process (if any)
    #   differs from the version available on the server;
    # - download is forced by '--force' option;
    print '- process:', process, '...',
    local_process_dir = os.path.join(config['process_src_dir'], process)
    version_installed = OLToolbox.import_dictionary(os.path.join(
        local_process_dir, 'version.info'),
                                                    fatal=False)
    if version_installed is None:
        version_installed = {}
    try:
        api_installed = int(version_installed.get('process_api_version', None))
    except (AttributeError, TypeError):
        api_installed = None
    hash_installed = version_installed.get('hash', None)
    date_installed = version_installed.get('date', None)
    available = [(cont.content[process][0], cont.content[process][1], repo)
                 for repo, cont in dbs.items() if process in cont.content]
    if not available:
        if args.ignore:
            print 'IGNORED: not available (installed: %s)' % version_installed
            return
        else:
            print 'ERROR: not available (installed: %s)' % version_installed
            sys.exit(1)
    # Only download, if the hash of the process to download differs
    # from the hash of the installed process, unless --force used.
    if not args.force:
        available = [src for src in available if src[1] != hash_installed]
    if not available:
        # the process is already available locally and is up-to-date.
        print 'skipped: is up-to-date'
        return
    # In particular when --force is used,
    # select the process which was uploaded last.
    available = sorted(
        available,
        key=lambda src: time.strptime(src[0], OLToolbox.timeformat))[-1]
    remote_archive = ((repository_url % available[2]) + '/' + process +
                      '.tar.gz')
    local_archive = os.path.join(local_process_dir + '.tar.gz')

    # download the process
    print 'download ...',
    try:
        rf = urllib2.urlopen(remote_archive)
    except urllib2.HTTPError:
        print '*** DOWNLOAD FAILED ***'
        if args.ignore:
            return
        else:
            sys.exit(1)
    lf = open(local_archive, 'wb')
    lf.write(rf.read())
    rf.close()
    lf.close()
    print 'extract ...',
    # remove target directory if it already exists
    try:
        shutil.rmtree(local_process_dir)
    except:
        pass
    # extract the process code from the archive
    untar(local_archive, config['process_src_dir'])
    # remove the local archive
    os.remove(local_archive)
    # store date and hash in the local process version.info
    process_version_file = os.path.join(local_process_dir, 'version.info')
    process_version = OLToolbox.import_dictionary(process_version_file,
                                                  fatal=True)
    process_version['date'] = available[0]
    process_version['hash'] = available[1]
    OLToolbox.export_dictionary(process_version_file,
                                process_version,
                                form='%-25s %s')
    print 'done'
Exemplo n.º 10
0
    print 'done'


process_dbs = {}
max_latest_api_version = 0

if not os.path.isdir(config['process_lib_dir']):
    os.mkdir(config['process_lib_dir'])

for repo in config['process_repositories']:
    # download the channel database for this repository
    update_channel_db(repo)
    # Latest OpenLoops process API version for which processes
    # are available in the repository
    latest_api_version = int(
        OLToolbox.import_dictionary(latest_api_version_url %
                                    repo)['process_api_version'])
    max_latest_api_version = max(max_latest_api_version, latest_api_version)
    # This fails if the repository does not contain processes
    # with the API of the installed OpenLoops version.
    process_dbs[repo] = OLToolbox.ProcessDB(db=(version_db_url % repo))
    # scan all repositories for collections to download
    for coll in collections:
        if coll == 'all':
            process_coll = process_dbs[repo].content.keys()
        else:
            process_coll = OLToolbox.import_list(
                (collection_url % repo) + '/' + coll, fatal=False)
        if process_coll:
            process_list.update(process_coll)
        else:
            if args.ignore:
Exemplo n.º 11
0
def upload_process(process, db, ch_db, api):
    """Compress a process code folder and upload the archive
    to a repository on the web server."""
    # need: repository_path, deprecated_path, backup_old_processes
    print '- upload process:', process, '...',
    sys.stdout.flush()
    old_date, old_hash, old_descr = db.get(process, (None, None, None))
    process_dir = os.path.join(config['process_src_dir'], process)
    process_version_file = os.path.join(process_dir, 'version.info')
    local_process_archive = process_dir + '.tar.gz'
    process_version = OLToolbox.import_dictionary(process_version_file,
                                                  fatal=False)

    server_process_archive = os.path.join(repository_path, process + '.tar.gz')
    local_process_definition = os.path.join(config['process_def_dir'],
                                            process + '.m')
    server_process_definition = os.path.join(repository_path, process + '.m')
    server_backup_archive = os.path.join(
        deprecated_path,
        str(api) + '_' + process + '_' + str(old_date) + '_' + str(old_hash) +
        '.tar.gz')
    server_backup_definition = os.path.join(
        deprecated_path,
        str(api) + '_' + process + '_' + str(old_date) + '_' + str(old_hash) +
        '.m')

    if not process_version:
        if args.ignore:
            print 'IGNORED: not available'
            return
        else:
            print 'ERROR: not available'
            sys.exit(1)
    to_upload_api = int(process_version['process_api_version'])
    old_local_hash = process_version.get('hash', None)
    old_local_date = process_version.get('date', None)

    if to_upload_api != api:
        if args.ignore:
            print 'IGNORED: process to upload does not match installed',
            print '         OpenLoops version (process: %d, OpenLoops: %d)' % (
                to_upload_api, api)
            return
        else:
            print 'ERROR: process to upload does not match installed',
            print '       OpenLoops version (process: %d, OpenLoops: %d)' % (
                to_upload_api, api)
            sys.exit(1)

    if old_local_hash:
        # the local process was downloaded or uploaded before
        if old_local_hash == old_hash:
            print 'skipped: is up-to-date'
            return
        elif old_date is not None and (time.strptime(
                old_local_date, OLToolbox.timeformat) < time.strptime(
                    old_date, OLToolbox.timeformat)):
            print 'skipped: process on server is newer'
            print '         (local: %s, server: %s)' % (old_local_date,
                                                        old_date)
            return

    if backup_old_processes and old_hash is not None:
        try:
            os.rename(server_process_archive, server_backup_archive)
        except OSError:
            print '[process backup failed]',
            sys.stdout.flush()
        try:
            os.rename(server_process_definition, server_backup_definition)
        except OSError:
            print '[definition backup failed]',
            sys.stdout.flush()

    # create process archive
    archive = tarfile.open(local_process_archive, 'w:gz')
    archive.add(process_dir, arcname=process)
    archive.close()
    # calculate archive hash and get upload time
    with open(local_process_archive, 'r') as fh:
        archive_hash = hashlib.md5(fh.read()).hexdigest()
    upload_date = time.strftime(OLToolbox.timeformat)
    # store hash and upload time in local process directory
    process_version['date'] = upload_date
    process_version['hash'] = archive_hash
    OLToolbox.export_dictionary(process_version_file,
                                process_version,
                                form='%-25s %s')
    # get process description from process definition file
    with open(local_process_definition, 'r') as fh:
        description = OLToolbox.ProcessDB.no_description
        for line in fh:
            line = line.strip()
            if line.startswith('(*') and line.endswith('*)'):
                line = line[2:-2].strip()
                if line.startswith('description'):
                    description = line.split('=', 1)[1].strip()
                    break
    # update process database
    db.update({process: (upload_date, archive_hash, description)})
    # update channel database
    info_options = OLToolbox.import_list(
        os.path.join(process_dir, 'info_' + process + '.txt'))
    info_options = [opt for opt in info_options if opt.startswith('options ')]
    if info_options:
        info_options = info_options[0].split()[1:]
    else:
        info_options = []
    info_files = OLToolbox.import_list(
        os.path.join(process_dir, "process_definition", "subprocesses.list"))
    info_files = [
        os.path.join(process_dir, "info_" + proc + ".txt")
        for proc in info_files
    ]
    info_files_extra = OLToolbox.import_list(
        os.path.join(process_dir, "process_definition",
                     "subprocesses_extra.list"))
    info_files_extra = [
        os.path.join(process_dir, "info_" + proc + ".txt")
        for proc in info_files_extra
    ]
    channels = []
    for inf in info_files:
        channels.extend([
            line.split() + info_options for line in OLToolbox.import_list(inf)
        ])
    channels.sort(key=lambda el: el[1])
    channels_extra = []
    for inf in info_files_extra:
        channels_extra.extend([
            line.split() + info_options for line in OLToolbox.import_list(inf)
        ])
    channels_extra.sort(key=lambda el: el[1])
    ch_db.update({process: channels + channels_extra})
    # upload process archive and definition, delete temporary local archive
    shutil.copyfile(local_process_archive, server_process_archive)
    os.remove(local_process_archive)
    shutil.copyfile(local_process_definition, server_process_definition)
    print 'done'
Exemplo n.º 12
0
# OpenLoops process API version
local_api_version = config['process_api_version']

repository_path = os.path.join(config['local_server_path'], repo_key,
                               'processes', str(local_api_version))
deprecated_path = os.path.join(config['local_server_path'], repo_key,
                               'deprecated')
collection_path = os.path.join(config['local_server_path'], repo_key,
                               'collections')
latest_api_version_file = os.path.join(config['local_server_path'], repo_key,
                                       'processes', 'latest_version')
# Latest OpenLoops process API version for which processes are available
# in the repository
latest_api_version = int(
    OLToolbox.import_dictionary(latest_api_version_file)
    ['process_api_version'])
version_db_file = os.path.join(config['local_server_path'], repo_key,
                               'processes', str(local_api_version),
                               'versions.db')
channel_db_file = os.path.join(config['local_server_path'], repo_key,
                               'processes', str(local_api_version),
                               'channels.db')


def upload_process(process, db, ch_db, api):
    """Compress a process code folder and upload the archive
    to a repository on the web server."""
    # need: repository_path, deprecated_path, backup_old_processes
    print '- upload process:', process, '...',
    sys.stdout.flush()
    old_date, old_hash, old_descr = db.get(process, (None, None, None))
Exemplo n.º 13
0
def upload_process(process, db, ch_db, api):
    """Compress a process code folder and upload the archive
    to a repository on the web server."""
    # need: repository_path, deprecated_path, backup_old_processes
    print '- upload process:', process, '...',
    sys.stdout.flush()
    old_date, old_hash, old_descr = db.get(process, (None, None, None))
    process_dir = os.path.join(config['process_src_dir'], process)
    process_version_file = os.path.join(process_dir, 'version.info')
    local_process_archive = process_dir + '.tar.gz'
    process_version = OLToolbox.import_dictionary(
        process_version_file, fatal=False)

    server_process_archive = os.path.join(repository_path, process + '.tar.gz')
    local_process_definition = os.path.join(config['process_def_dir'],
                                            process + '.m')
    server_process_definition = os.path.join(repository_path, process + '.m')
    server_backup_archive = os.path.join(
        deprecated_path, str(api) + '_' + process +
        '_' + str(old_date) + '_' + str(old_hash) + '.tar.gz')
    server_backup_definition = os.path.join(
        deprecated_path, str(api) + '_' + process +
        '_' + str(old_date) + '_' + str(old_hash) + '.m')

    if not process_version:
        if args.ignore:
            print 'IGNORED: not available'
            return
        else:
            print 'ERROR: not available'
            sys.exit(1)
    to_upload_api = int(process_version['process_api_version'])
    old_local_hash = process_version.get('hash', None)
    old_local_date = process_version.get('date', None)

    if to_upload_api != api:
        if args.ignore:
            print 'IGNORED: process to upload does not match installed',
            print '         OpenLoops version (process: %d, OpenLoops: %d)' % (
                  to_upload_api, api)
            return
        else:
            print 'ERROR: process to upload does not match installed',
            print '       OpenLoops version (process: %d, OpenLoops: %d)' % (
                  to_upload_api, api)
            sys.exit(1)

    if old_local_hash:
        # the local process was downloaded or uploaded before
        if old_local_hash == old_hash:
            print 'skipped: is up-to-date'
            return
        elif old_date is not None and (
              time.strptime(old_local_date, OLToolbox.timeformat) <
              time.strptime(old_date, OLToolbox.timeformat)):
            print 'skipped: process on server is newer'
            print '         (local: %s, server: %s)' % (
                  old_local_date, old_date)
            return

    if backup_old_processes and old_hash is not None:
        try:
            os.rename(server_process_archive, server_backup_archive)
        except OSError:
            print '[process backup failed]',
            sys.stdout.flush()
        try:
            os.rename(server_process_definition, server_backup_definition)
        except OSError:
            print '[definition backup failed]',
            sys.stdout.flush()

    # create process archive
    archive = tarfile.open(local_process_archive, 'w:gz')
    archive.add(process_dir, arcname=process)
    archive.close()
    # calculate archive hash and get upload time
    with open(local_process_archive, 'r') as fh:
        archive_hash = hashlib.md5(fh.read()).hexdigest()
    upload_date = time.strftime(OLToolbox.timeformat)
    # store hash and upload time in local process directory
    process_version['date'] = upload_date
    process_version['hash'] = archive_hash
    OLToolbox.export_dictionary(process_version_file, process_version,
                                form = '%-25s %s')
    # get process description from process definition file
    with open(local_process_definition, 'r') as fh:
        description = OLToolbox.ProcessDB.no_description
        for line in fh:
            line = line.strip()
            if line.startswith('(*') and line.endswith('*)'):
                line = line[2:-2].strip()
                if line.startswith('description'):
                    description = line.split('=',1)[1].strip()
                    break
    # update process database
    db.update({process: (upload_date, archive_hash, description)})
    # update channel database
    info_options = OLToolbox.import_list(
        os.path.join(process_dir, 'info_' + process + '.txt'))
    info_options = [opt for opt in info_options if opt.startswith('options ')]
    if info_options:
        info_options = info_options[0].split()[1:]
    else:
        info_options = []
    info_files = OLToolbox.import_list(os.path.join(
        process_dir, "process_definition", "subprocesses.list"))
    info_files = [os.path.join(process_dir, "info_" + proc + ".txt")
                  for proc in info_files]
    info_files_extra = OLToolbox.import_list(os.path.join(
        process_dir, "process_definition", "subprocesses_extra.list"))
    info_files_extra = [os.path.join(process_dir, "info_" + proc + ".txt")
                  for proc in info_files_extra]
    channels = []
    for inf in info_files:
        channels.extend([line.split() + info_options
                         for line in OLToolbox.import_list(inf)])
    channels.sort(key=lambda el: el[1])
    channels_extra = []
    for inf in info_files_extra:
        channels_extra.extend([line.split() + info_options
                         for line in OLToolbox.import_list(inf)])
    channels_extra.sort(key=lambda el: el[1])
    ch_db.update({process: channels + channels_extra})
    # upload process archive and definition, delete temporary local archive
    shutil.copyfile(local_process_archive, server_process_archive)
    os.remove(local_process_archive)
    shutil.copyfile(local_process_definition, server_process_definition)
    print 'done'
Exemplo n.º 14
0
#     processes/<api_version>/version.db

# OpenLoops process API version
local_api_version = config['process_api_version']

repository_path = os.path.join(config['local_server_path'],
                               repo_key, 'processes', str(local_api_version))
deprecated_path = os.path.join(config['local_server_path'],
                               repo_key, 'deprecated')
collection_path = os.path.join(config['local_server_path'],
                               repo_key, 'collections')
latest_api_version_file = os.path.join(config['local_server_path'],
                                       repo_key, 'processes', 'latest_version')
# Latest OpenLoops process API version for which processes are available
# in the repository
latest_api_version = int(OLToolbox.import_dictionary(
    latest_api_version_file)['process_api_version'])
version_db_file = os.path.join(
    config['local_server_path'], repo_key, 'processes',
    str(local_api_version), 'versions.db')
channel_db_file = os.path.join(
    config['local_server_path'], repo_key, 'processes',
    str(local_api_version), 'channels.db')


def upload_process(process, db, ch_db, api):
    """Compress a process code folder and upload the archive
    to a repository on the web server."""
    # need: repository_path, deprecated_path, backup_old_processes
    print '- upload process:', process, '...',
    sys.stdout.flush()
    old_date, old_hash, old_descr = db.get(process, (None, None, None))