def create_repository(repo, secret=False): if secret: import string import random seed_chars = string.ascii_lowercase + string.digits repo = (repo + '_' + ''.join(random.choice(seed_chars) for n in range(16))) repo_dir = os.path.join(config['local_server_path'], repo) process_dir = os.path.join(config['local_server_path'], repo, 'processes') deprectated_dir = os.path.join(config['local_server_path'], repo, 'deprecated') collection_dir = os.path.join(config['local_server_path'], repo, 'collections') latest_api_version_file = os.path.join(process_dir, 'latest_version') if not os.path.isdir(repo_dir): os.mkdir(repo_dir) if not os.path.isdir(process_dir): os.mkdir(process_dir) if not os.path.isdir(deprectated_dir): os.mkdir(deprectated_dir) if not os.path.isdir(collection_dir): os.mkdir(collection_dir) OLToolbox.export_dictionary(latest_api_version_file, {'process_api_version': 0}) return repo
def download(process, dbs): # download process if one of the following conditions is met: # - the process API of the installed OpenLoops version # differs from the one of the installed process; # - the version of the installed process (if any) # differs from the version available on the server; # - download is forced by '--force' option; print '- process:', process, '...', local_process_dir = os.path.join(config['process_src_dir'], process) version_installed = OLToolbox.import_dictionary( os.path.join(local_process_dir, 'version.info'), fatal = False) if version_installed is None: version_installed = {} try: api_installed = int(version_installed.get('process_api_version', None)) except (AttributeError, TypeError): api_installed = None hash_installed = version_installed.get('hash', None) date_installed = version_installed.get('date', None) available = [(cont.content[process][0], cont.content[process][1], repo) for repo, cont in dbs.items() if process in cont.content] if not available: if args.ignore: print 'IGNORED: not available (installed: %s)' % version_installed return else: print 'ERROR: not available (installed: %s)' % version_installed sys.exit(1) # Only download, if the hash of the process to download differs # from the hash of the installed process, unless --force used. if not args.force: available = [src for src in available if src[1] != hash_installed] if not available: # the process is already available locally and is up-to-date. print 'skipped: is up-to-date' return # In particular when --force is used, # select the process which was uploaded last. available = sorted( available, key=lambda src: time.strptime(src[0], OLToolbox.timeformat))[-1] remote_archive = ((repository_url % available[2]) + '/' + process + '.tar.gz') local_archive = os.path.join(local_process_dir + '.tar.gz') # download the process print 'download ...', try: rf = urllib2.urlopen(remote_archive) except urllib2.HTTPError: print '*** DOWNLOAD FAILED ***' if args.ignore: return else: sys.exit(1) lf = open(local_archive, 'wb') lf.write(rf.read()) rf.close() lf.close() print 'extract ...', # remove target directory if it already exists try: shutil.rmtree(local_process_dir) except: pass # extract the process code from the archive untar(local_archive, config['process_src_dir']) # remove the local archive os.remove(local_archive) # store date and hash in the local process version.info process_version_file = os.path.join(local_process_dir, 'version.info') process_version = OLToolbox.import_dictionary( process_version_file, fatal=True) process_version['date'] = available[0] process_version['hash'] = available[1] OLToolbox.export_dictionary(process_version_file, process_version, form = '%-25s %s') print 'done'
def download(process, dbs, libmaps): # download process if one of the following conditions is met: # - the process API of the installed OpenLoops version # differs from the one of the installed process; # - the version of the installed process (if any) # differs from the version available on the server; # - download is forced by '--force' option; print('- process:', process, '...', end=' ') sys.stdout.flush() available = [] for repo, cont in dbs.items(): mapped_process = process if libmaps[repo] and process in libmaps[repo]: mapped_process = libmaps[repo][process] if mapped_process in downloaded.values(): downloaded[process] = mapped_process print('mapped to', mapped_process, '(already downloaded)') return print('does not exist; downloading', mapped_process, 'instead ...', end=' ') sys.stdout.flush() if mapped_process in cont.content: # (process, date, hash, repo) available.append((mapped_process, cont.content[mapped_process][0], cont.content[mapped_process][1], repo)) elif mapped_process != process: print('\nOops! There seems to be something wrong with the ' + 'library mapping table. Please contact the authors.') # Order processes by date: latest last (to be used) available = sorted( available, key=lambda src: time.strptime(src[1], OLToolbox.timeformat)) mapped_process = process if available: mapped_process = available[-1][0] downloaded[process] = mapped_process local_process_dir = os.path.join(config['process_src_dir'], mapped_process) version_installed = OLToolbox.import_dictionary(os.path.join( local_process_dir, 'version.info'), fatal=False) if version_installed is None: version_installed = {} try: api_installed = int(version_installed.get('process_api_version', None)) except (AttributeError, TypeError): api_installed = None hash_installed = version_installed.get('hash', None) date_installed = version_installed.get('date', None) if not available: if args.ignore: print('IGNORED: not available (installed: %s)' % version_installed) return else: print('ERROR: not available (installed: %s)' % version_installed) sys.exit(1) # Only download, if the hash of the process to download differs # from the hash of the installed process, unless --force used. if not args.force: available = [src for src in available if src[2] != hash_installed] if not available: # the process is already available locally and is up-to-date. print('skipped: is up-to-date') return # Select the process which was uploaded last. available = available[-1] remote_archive = ((repository_url % available[3]) + '/' + available[0] + '.tar.gz') local_archive = os.path.join(local_process_dir + '.tar.gz') # download the process print('download from repository: ' + available[3] + '...', end=' ') sys.stdout.flush() try: if remote_archive.startswith('/'): rf = open(remote_archive, 'rb') else: rf = urlopen(remote_archive) except (URLError, IOError): print('*** DOWNLOAD FAILED ***') if args.ignore: return else: sys.exit(1) lf = open(local_archive, 'wb') lf.write(rf.read()) rf.close() lf.close() print('extract ...', end=' ') sys.stdout.flush() # remove target directory if it already exists try: shutil.rmtree(local_process_dir) except: pass # extract the process code from the archive untar(local_archive, config['process_src_dir']) # remove the local archive os.remove(local_archive) # store date and hash in the local process version.info process_version_file = os.path.join(local_process_dir, 'version.info') process_version = OLToolbox.import_dictionary(process_version_file, fatal=True) process_version['date'] = available[1] process_version['hash'] = available[2] OLToolbox.export_dictionary(process_version_file, process_version, form='%-25s %s') print('done')
not_found_colls = [coll for coll in collections if coll not in found_colls] if not_found_colls: if args.ignore: print('IGNORED: process collection(s) ', not_found_colls, ' not found.') else: print('ERROR: process collection(s) ', not_found_colls, ' not found.') sys.exit(1) if local_api_version == max_latest_api_version: print('process API version: %d' % local_api_version) elif local_api_version < max_latest_api_version: print('local process API version: %d (server: %d)' % (local_api_version, max_latest_api_version)) print('******************************************') print('Please update your OpenLoops installation.') print('******************************************') if not os.path.exists(config['process_src_dir']): os.mkdir(config['process_src_dir']) downloaded = {} for process in process_list: download(process, process_dbs, libname_maps) OLToolbox.export_dictionary( os.path.join(config['process_src_dir'], 'downloaded.dat'), downloaded) print('done\n')
def download(process, dbs): # download process if one of the following conditions is met: # - the process API of the installed OpenLoops version # differs from the one of the installed process; # - the version of the installed process (if any) # differs from the version available on the server; # - download is forced by '--force' option; print '- process:', process, '...', local_process_dir = os.path.join(config['process_src_dir'], process) version_installed = OLToolbox.import_dictionary(os.path.join( local_process_dir, 'version.info'), fatal=False) if version_installed is None: version_installed = {} try: api_installed = int(version_installed.get('process_api_version', None)) except (AttributeError, TypeError): api_installed = None hash_installed = version_installed.get('hash', None) date_installed = version_installed.get('date', None) available = [(cont.content[process][0], cont.content[process][1], repo) for repo, cont in dbs.items() if process in cont.content] if not available: if args.ignore: print 'IGNORED: not available (installed: %s)' % version_installed return else: print 'ERROR: not available (installed: %s)' % version_installed sys.exit(1) # Only download, if the hash of the process to download differs # from the hash of the installed process, unless --force used. if not args.force: available = [src for src in available if src[1] != hash_installed] if not available: # the process is already available locally and is up-to-date. print 'skipped: is up-to-date' return # In particular when --force is used, # select the process which was uploaded last. available = sorted( available, key=lambda src: time.strptime(src[0], OLToolbox.timeformat))[-1] remote_archive = ((repository_url % available[2]) + '/' + process + '.tar.gz') local_archive = os.path.join(local_process_dir + '.tar.gz') # download the process print 'download ...', try: rf = urllib2.urlopen(remote_archive) except urllib2.HTTPError: print '*** DOWNLOAD FAILED ***' if args.ignore: return else: sys.exit(1) lf = open(local_archive, 'wb') lf.write(rf.read()) rf.close() lf.close() print 'extract ...', # remove target directory if it already exists try: shutil.rmtree(local_process_dir) except: pass # extract the process code from the archive untar(local_archive, config['process_src_dir']) # remove the local archive os.remove(local_archive) # store date and hash in the local process version.info process_version_file = os.path.join(local_process_dir, 'version.info') process_version = OLToolbox.import_dictionary(process_version_file, fatal=True) process_version['date'] = available[0] process_version['hash'] = available[1] OLToolbox.export_dictionary(process_version_file, process_version, form='%-25s %s') print 'done'
def upload_process(process, db, ch_db, api): """Compress a process code folder and upload the archive to a repository on the web server.""" # need: repository_path, deprecated_path, backup_old_processes print '- upload process:', process, '...', sys.stdout.flush() old_date, old_hash, old_descr = db.get(process, (None, None, None)) process_dir = os.path.join(config['process_src_dir'], process) process_version_file = os.path.join(process_dir, 'version.info') local_process_archive = process_dir + '.tar.gz' process_version = OLToolbox.import_dictionary(process_version_file, fatal=False) server_process_archive = os.path.join(repository_path, process + '.tar.gz') local_process_definition = os.path.join(config['process_def_dir'], process + '.m') server_process_definition = os.path.join(repository_path, process + '.m') server_backup_archive = os.path.join( deprecated_path, str(api) + '_' + process + '_' + str(old_date) + '_' + str(old_hash) + '.tar.gz') server_backup_definition = os.path.join( deprecated_path, str(api) + '_' + process + '_' + str(old_date) + '_' + str(old_hash) + '.m') if not process_version: if args.ignore: print 'IGNORED: not available' return else: print 'ERROR: not available' sys.exit(1) to_upload_api = int(process_version['process_api_version']) old_local_hash = process_version.get('hash', None) old_local_date = process_version.get('date', None) if to_upload_api != api: if args.ignore: print 'IGNORED: process to upload does not match installed', print ' OpenLoops version (process: %d, OpenLoops: %d)' % ( to_upload_api, api) return else: print 'ERROR: process to upload does not match installed', print ' OpenLoops version (process: %d, OpenLoops: %d)' % ( to_upload_api, api) sys.exit(1) if old_local_hash: # the local process was downloaded or uploaded before if old_local_hash == old_hash: print 'skipped: is up-to-date' return elif old_date is not None and (time.strptime( old_local_date, OLToolbox.timeformat) < time.strptime( old_date, OLToolbox.timeformat)): print 'skipped: process on server is newer' print ' (local: %s, server: %s)' % (old_local_date, old_date) return if backup_old_processes and old_hash is not None: try: os.rename(server_process_archive, server_backup_archive) except OSError: print '[process backup failed]', sys.stdout.flush() try: os.rename(server_process_definition, server_backup_definition) except OSError: print '[definition backup failed]', sys.stdout.flush() # create process archive archive = tarfile.open(local_process_archive, 'w:gz') archive.add(process_dir, arcname=process) archive.close() # calculate archive hash and get upload time with open(local_process_archive, 'r') as fh: archive_hash = hashlib.md5(fh.read()).hexdigest() upload_date = time.strftime(OLToolbox.timeformat) # store hash and upload time in local process directory process_version['date'] = upload_date process_version['hash'] = archive_hash OLToolbox.export_dictionary(process_version_file, process_version, form='%-25s %s') # get process description from process definition file with open(local_process_definition, 'r') as fh: description = OLToolbox.ProcessDB.no_description for line in fh: line = line.strip() if line.startswith('(*') and line.endswith('*)'): line = line[2:-2].strip() if line.startswith('description'): description = line.split('=', 1)[1].strip() break # update process database db.update({process: (upload_date, archive_hash, description)}) # update channel database info_options = OLToolbox.import_list( os.path.join(process_dir, 'info_' + process + '.txt')) info_options = [opt for opt in info_options if opt.startswith('options ')] if info_options: info_options = info_options[0].split()[1:] else: info_options = [] info_files = OLToolbox.import_list( os.path.join(process_dir, "process_definition", "subprocesses.list")) info_files = [ os.path.join(process_dir, "info_" + proc + ".txt") for proc in info_files ] info_files_extra = OLToolbox.import_list( os.path.join(process_dir, "process_definition", "subprocesses_extra.list")) info_files_extra = [ os.path.join(process_dir, "info_" + proc + ".txt") for proc in info_files_extra ] channels = [] for inf in info_files: channels.extend([ line.split() + info_options for line in OLToolbox.import_list(inf) ]) channels.sort(key=lambda el: el[1]) channels_extra = [] for inf in info_files_extra: channels_extra.extend([ line.split() + info_options for line in OLToolbox.import_list(inf) ]) channels_extra.sort(key=lambda el: el[1]) ch_db.update({process: channels + channels_extra}) # upload process archive and definition, delete temporary local archive shutil.copyfile(local_process_archive, server_process_archive) os.remove(local_process_archive) shutil.copyfile(local_process_definition, server_process_definition) print 'done'
db.remove(process) ch_db.remove(process) print 'done' print 'repository:', repository if local_api_version == latest_api_version: print 'process API version: %d' % local_api_version process_db = OLToolbox.ProcessDB(db=version_db_file) channel_db = OLToolbox.ChannelDB(db=channel_db_file) else: if local_api_version > latest_api_version: print 'new process API version: %d (server: %d)' % (local_api_version, latest_api_version) OLToolbox.export_dictionary(latest_api_version_file, {'process_api_version': local_api_version}) else: print 'WARNING: local process API is outdated' print ' (local: %d, server: %d)' % (local_api_version, latest_api_version) if not os.path.isdir(repository_path): os.mkdir(repository_path) process_db = OLToolbox.ProcessDB() channel_db = OLToolbox.ChannelDB() process_db.export_db(version_db_file) channel_db.export_db(channel_db_file) else: process_db = OLToolbox.ProcessDB(db=version_db_file) channel_db = OLToolbox.ChannelDB(db=channel_db_file) for coll in collections:
def upload_process(process, db, ch_db, api): """Compress a process code folder and upload the archive to a repository on the web server.""" # need: repository_path, deprecated_path, backup_old_processes print '- upload process:', process, '...', sys.stdout.flush() old_date, old_hash, old_descr = db.get(process, (None, None, None)) process_dir = os.path.join(config['process_src_dir'], process) process_version_file = os.path.join(process_dir, 'version.info') local_process_archive = process_dir + '.tar.gz' process_version = OLToolbox.import_dictionary( process_version_file, fatal=False) server_process_archive = os.path.join(repository_path, process + '.tar.gz') local_process_definition = os.path.join(config['process_def_dir'], process + '.m') server_process_definition = os.path.join(repository_path, process + '.m') server_backup_archive = os.path.join( deprecated_path, str(api) + '_' + process + '_' + str(old_date) + '_' + str(old_hash) + '.tar.gz') server_backup_definition = os.path.join( deprecated_path, str(api) + '_' + process + '_' + str(old_date) + '_' + str(old_hash) + '.m') if not process_version: if args.ignore: print 'IGNORED: not available' return else: print 'ERROR: not available' sys.exit(1) to_upload_api = int(process_version['process_api_version']) old_local_hash = process_version.get('hash', None) old_local_date = process_version.get('date', None) if to_upload_api != api: if args.ignore: print 'IGNORED: process to upload does not match installed', print ' OpenLoops version (process: %d, OpenLoops: %d)' % ( to_upload_api, api) return else: print 'ERROR: process to upload does not match installed', print ' OpenLoops version (process: %d, OpenLoops: %d)' % ( to_upload_api, api) sys.exit(1) if old_local_hash: # the local process was downloaded or uploaded before if old_local_hash == old_hash: print 'skipped: is up-to-date' return elif old_date is not None and ( time.strptime(old_local_date, OLToolbox.timeformat) < time.strptime(old_date, OLToolbox.timeformat)): print 'skipped: process on server is newer' print ' (local: %s, server: %s)' % ( old_local_date, old_date) return if backup_old_processes and old_hash is not None: try: os.rename(server_process_archive, server_backup_archive) except OSError: print '[process backup failed]', sys.stdout.flush() try: os.rename(server_process_definition, server_backup_definition) except OSError: print '[definition backup failed]', sys.stdout.flush() # create process archive archive = tarfile.open(local_process_archive, 'w:gz') archive.add(process_dir, arcname=process) archive.close() # calculate archive hash and get upload time with open(local_process_archive, 'r') as fh: archive_hash = hashlib.md5(fh.read()).hexdigest() upload_date = time.strftime(OLToolbox.timeformat) # store hash and upload time in local process directory process_version['date'] = upload_date process_version['hash'] = archive_hash OLToolbox.export_dictionary(process_version_file, process_version, form = '%-25s %s') # get process description from process definition file with open(local_process_definition, 'r') as fh: description = OLToolbox.ProcessDB.no_description for line in fh: line = line.strip() if line.startswith('(*') and line.endswith('*)'): line = line[2:-2].strip() if line.startswith('description'): description = line.split('=',1)[1].strip() break # update process database db.update({process: (upload_date, archive_hash, description)}) # update channel database info_options = OLToolbox.import_list( os.path.join(process_dir, 'info_' + process + '.txt')) info_options = [opt for opt in info_options if opt.startswith('options ')] if info_options: info_options = info_options[0].split()[1:] else: info_options = [] info_files = OLToolbox.import_list(os.path.join( process_dir, "process_definition", "subprocesses.list")) info_files = [os.path.join(process_dir, "info_" + proc + ".txt") for proc in info_files] info_files_extra = OLToolbox.import_list(os.path.join( process_dir, "process_definition", "subprocesses_extra.list")) info_files_extra = [os.path.join(process_dir, "info_" + proc + ".txt") for proc in info_files_extra] channels = [] for inf in info_files: channels.extend([line.split() + info_options for line in OLToolbox.import_list(inf)]) channels.sort(key=lambda el: el[1]) channels_extra = [] for inf in info_files_extra: channels_extra.extend([line.split() + info_options for line in OLToolbox.import_list(inf)]) channels_extra.sort(key=lambda el: el[1]) ch_db.update({process: channels + channels_extra}) # upload process archive and definition, delete temporary local archive shutil.copyfile(local_process_archive, server_process_archive) os.remove(local_process_archive) shutil.copyfile(local_process_definition, server_process_definition) print 'done'
ch_db.remove(process) print 'done' print 'repository:', repository if local_api_version == latest_api_version: print 'process API version: %d' % local_api_version process_db = OLToolbox.ProcessDB(db=version_db_file) channel_db = OLToolbox.ChannelDB(db=channel_db_file) else: if local_api_version > latest_api_version: print 'new process API version: %d (server: %d)' % ( local_api_version, latest_api_version) OLToolbox.export_dictionary( latest_api_version_file, {'process_api_version': local_api_version}) else: print 'WARNING: local process API is outdated' print ' (local: %d, server: %d)' % ( local_api_version, latest_api_version) if not os.path.isdir(repository_path): os.mkdir(repository_path) process_db = OLToolbox.ProcessDB() channel_db = OLToolbox.ChannelDB() process_db.export_db(version_db_file) channel_db.export_db(channel_db_file) else: process_db = OLToolbox.ProcessDB(db=version_db_file) channel_db = OLToolbox.ChannelDB(db=channel_db_file)