def main( options ):
    api_key = options.api
    base_galaxy_url = options.local_url.rstrip( '/' )
    base_tool_shed_url = options.tool_shed_url.rstrip( '/' )
    cleaned_tool_shed_url = clean_url( base_tool_shed_url )
    installed_tool_shed_repositories_url = '%s/api/tool_shed_repositories' % base_galaxy_url
    tool_shed_repository_id = None
    installed_tool_shed_repositories = display( api_key, installed_tool_shed_repositories_url, return_formatted=False )
    for installed_tool_shed_repository in installed_tool_shed_repositories:
        tool_shed = str( installed_tool_shed_repository[ 'tool_shed' ] )
        name = str( installed_tool_shed_repository[ 'name' ] )
        owner = str( installed_tool_shed_repository[ 'owner' ] )
        changeset_revision = str( installed_tool_shed_repository[ 'changeset_revision' ] )
        if tool_shed == cleaned_tool_shed_url and name == options.name and owner == options.owner and changeset_revision == options.changeset_revision:
            tool_shed_repository_id = installed_tool_shed_repository[ 'id' ]
            break
    if tool_shed_repository_id:
        # Get the list of exported workflows contained in the installed repository.
        url = '%s%s' % ( base_galaxy_url, '/api/tool_shed_repositories/%s/exported_workflows' % str( tool_shed_repository_id ) )
        exported_workflows = display( api_key, url, return_formatted=False )
        if exported_workflows:
            # Import all of the workflows in the list of exported workflows.
            data = {}
            # NOTE: to import a single workflow, add an index to data (e.g.,
            # data[ 'index' ] = 0
            # and change the url to be ~/import_workflow (singular).  For example,
            # url = '%s%s' % ( base_galaxy_url, '/api/tool_shed_repositories/%s/import_workflow' % str( tool_shed_repository_id ) )
            url = '%s%s' % ( base_galaxy_url, '/api/tool_shed_repositories/%s/import_workflows' % str( tool_shed_repository_id ) )
            submit( options.api, url, data )
    else:
        print("Invalid tool_shed / name / owner / changeset_revision.")
Ejemplo n.º 2
0
def main(options):
    api_key = options.api
    base_galaxy_url = options.local_url.rstrip('/')
    base_tool_shed_url = options.tool_shed_url.rstrip('/')
    cleaned_tool_shed_url = clean_url(base_tool_shed_url)
    installed_tool_shed_repositories_url = '%s/api/tool_shed_repositories' % base_galaxy_url
    tool_shed_repository_id = None
    installed_tool_shed_repositories = display(api_key, installed_tool_shed_repositories_url, return_formatted=False)
    for installed_tool_shed_repository in installed_tool_shed_repositories:
        tool_shed = str(installed_tool_shed_repository['tool_shed'])
        name = str(installed_tool_shed_repository['name'])
        owner = str(installed_tool_shed_repository['owner'])
        changeset_revision = str(installed_tool_shed_repository['changeset_revision'])
        if tool_shed == cleaned_tool_shed_url and name == options.name and owner == options.owner and changeset_revision == options.changeset_revision:
            tool_shed_repository_id = installed_tool_shed_repository['id']
            break
    if tool_shed_repository_id:
        # Get the list of exported workflows contained in the installed repository.
        url = '{}{}'.format(base_galaxy_url, '/api/tool_shed_repositories/%s/exported_workflows' % str(tool_shed_repository_id))
        exported_workflows = display(api_key, url, return_formatted=False)
        if exported_workflows:
            # Import all of the workflows in the list of exported workflows.
            data = {}
            # NOTE: to import a single workflow, add an index to data (e.g.,
            # data[ 'index' ] = 0
            # and change the url to be ~/import_workflow (singular).  For example,
            # url = '%s%s' % ( base_galaxy_url, '/api/tool_shed_repositories/%s/import_workflow' % str( tool_shed_repository_id ) )
            url = '{}{}'.format(base_galaxy_url, '/api/tool_shed_repositories/%s/import_workflows' % str(tool_shed_repository_id))
            submit(options.api, url, data)
    else:
        print("Invalid tool_shed / name / owner / changeset_revision.")
Ejemplo n.º 3
0
def main(api_key, api_url, in_folder, data_library, uuid_field=None):
    # Find/Create data library with the above name.  Assume we're putting datasets in the root folder '/'
    libs = display(api_key, api_url + 'libraries', return_formatted=False)
    library_id = None
    for library in libs:
        if library['name'] == data_library:
            library_id = library['id']
    if not library_id:
        lib_create_data = {'name':data_library}
        library = submit(api_key, api_url + 'libraries', lib_create_data, return_formatted=False)
        library_id = library['id']
    folders = display(api_key, api_url + "libraries/%s/contents" % library_id, return_formatted = False)
    for f in folders:
        if f['name'] == "/":
            library_folder_id = f['id']
    if not library_id or not library_folder_id:
        print "Failure to configure library destination."
        sys.exit(1)

    if os.path.isfile(in_folder):
        if os.path.exists(in_folder + ".json"):
            fullpath = os.path.abspath(in_folder)
            print "Loading", fullpath
            load_file(fullpath, api_key, api_url, library_id, library_folder_id, uuid_field)
    else:
        for fname in os.listdir(in_folder):
            fullpath = os.path.join(in_folder, fname)
            if os.path.isfile(fullpath) and os.path.exists(fullpath + ".json"):
                print "Loading", fullpath
                load_file(fullpath, api_key, api_url, library_id, library_folder_id, uuid_field)
Ejemplo n.º 4
0
def isFileInGalaxyFolder(folder, file):
    folder_contents = display(_api_key, _api_url + "/api/folders/%s/contents" % folder['id'], return_formatted = False)
    for entry in folder_contents['folder_contents']:
        if entry['type'] == 'file':
            file_metadata = display(_api_key, _api_url + '/api/libraries/datasets/%s' % entry['id'], return_formatted = False)
            file_fullpath = file_metadata['file_name']     
            if (file == file_fullpath):
                return 1
    return 0
Ejemplo n.º 5
0
def main(api_key, api_url, in_folder, out_folder, data_library, workflow):
    # Find/Create data library with the above name.  Assume we're putting datasets in the root folder '/'
    libs = display(api_key, api_url + 'libraries', return_formatted=False)
    library_id = None
    for library in libs:
        if library['name'] == data_library:
            library_id = library['id']
    if not library_id:
        lib_create_data = {'name':data_library}
        library = submit(api_key, api_url + 'libraries', lib_create_data, return_formatted=False)
        library_id = library[0]['id']
    folders = display(api_key, api_url + "libraries/%s/contents" % library_id, return_formatted = False)
    for f in folders:
        if f['name'] == "/":
            library_folder_id = f['id']
    workflow = display(api_key, api_url + 'workflows/%s' % workflow, return_formatted = False)
    if not workflow:
        print "Workflow %s not found, terminating."
        sys.exit(1)
    if not library_id or not library_folder_id:
        print "Failure to configure library destination."
        sys.exit(1)
    while 1:
        # Watch in_folder, upload anything that shows up there to data library and get ldda,
        # invoke workflow, move file to out_folder.
        for fname in os.listdir(in_folder):
            fullpath = os.path.join(in_folder, fname)
            if os.path.isfile(fullpath):
                data = {}
                data['folder_id'] = library_folder_id
                data['file_type'] = 'auto'
                data['dbkey'] = ''
                data['upload_option'] = 'upload_paths'
                data['filesystem_paths'] = fullpath
                data['create_type'] = 'file'
                libset = submit(api_key, api_url + "libraries/%s/contents" % library_id, data, return_formatted = False)
                #TODO Handle this better, but the datatype isn't always
                # set for the followup workflow execution without this
                # pause.
                time.sleep(5)
                for ds in libset:
                    if 'id' in ds:
                        # Successful upload of dataset, we have the ldda now.  Run the workflow.
                        wf_data = {}
                        wf_data['workflow_id'] = workflow['id']
                        wf_data['history'] = "%s - %s" % (fname, workflow['name'])
                        wf_data['ds_map'] = {}
                        for step_id, ds_in in workflow['inputs'].iteritems():
                            wf_data['ds_map'][step_id] = {'src':'ld', 'id':ds['id']}
                        res = submit( api_key, api_url + 'workflows', wf_data, return_formatted=False)
                        if res:
                            print res
                            # Successful workflow execution, safe to move dataset.
                            shutil.move(fullpath, os.path.join(out_folder, fname))
        time.sleep(10)
Ejemplo n.º 6
0
def getLibFolderID(exp_info):
    api_key = exp_info['api_key']
    api_url = exp_info['api_url']
    lib_folder_name = exp_info['lib_folder_name']
    library_id = lib_folder_id = -1
    try:
        libs = display(api_key, api_url + 'libraries', return_formatted=False)
    except:
        logPrint(api_url + 'libraries')
        logPrint("Error:Failure when libs = display")
        return (-1, -1, -1)

    library_id = None
    for library in libs:
        if library['name'] == lib_folder_name:
            if library['deleted']:
                continue
            library_id = library['id']
            lib_exist = 1
            #print 'Library [%s] existed!\n'%lib_folder_name
            logPrint('Library [%s] existed!' % lib_folder_name)
            #common_del(api_key, api_url + "libraries/%s" % library_id, {'purge':True}, return_formatted = False)

#print 'delete %s'%lib_folder_name, library_id
    if not library_id:
        lib_create_data = {'name': lib_folder_name}
        try:
            library = submit(api_key,
                             api_url + 'libraries',
                             lib_create_data,
                             return_formatted=False)
        except:
            logPrint("Error:Failure when library = submit")
            return (-1, -1, -1)
        #print 'Library [%s] created!\n'%lib_folder_name
        logPrint('Library [%s] created!' % lib_folder_name)
        library_id = library['id']  #library[0]['id']
        lib_exist = 0

    folders = display(api_key,
                      api_url + "libraries/%s/contents" % library_id,
                      return_formatted=False)
    for f in folders:
        if f['name'] == "/":
            lib_folder_id = f['id']

    if not library_id or not lib_folder_id:
        #print "Failure to configure library destination."
        logPrint("Error:Failure to configure library destination.")
        return (-1, -1, -1)
        #sys.exit(1)

    return (library_id, lib_folder_id, lib_exist)
Ejemplo n.º 7
0
def main( options ):
    """Collect all user data and export the repository via the Tool Shed API."""
    base_tool_shed_url = options.tool_shed_url.rstrip( '/' )
    repositories_url = '%s/api/repositories' % base_tool_shed_url
    data = {}
    data[ 'tool_shed_url' ] = base_tool_shed_url
    data[ 'name' ] = options.name
    data[ 'owner' ] = options.owner
    data[ 'changeset_revision' ] = options.changeset_revision
    data[ 'export_repository_dependencies' ] = options.export_repository_dependencies
    repository_id = None
    repositories = display( repositories_url, api_key=None, return_formatted=False )
    for repository in repositories:
        name = str( repository[ 'name' ] )
        owner = str( repository[ 'owner' ] )
        if name == options.name and owner == options.owner:
            repository_id = repository[ 'id' ]
            break
    if repository_id:
        # We'll currently support only gzip-compressed tar archives.
        file_type = 'gz'
        url = '%s%s' % ( base_tool_shed_url, '/api/repository_revisions/%s/export' % str( repository_id ) )
        export_dict = submit( url, data, return_formatted=False )
        error_messages = export_dict[ 'error_messages' ]
        if error_messages:
            print "Error attempting to export revision ", options.changeset_revision, " of repository ", options.name, " owned by ", options.owner, ":\n", error_messages
        else:
            repositories_archive_filename = \
                export_util.generate_repository_archive_filename( base_tool_shed_url,
                                                                  options.name,
                                                                  options.owner,
                                                                  options.changeset_revision,
                                                                  file_type,
                                                                  export_repository_dependencies=string_as_bool( options.export_repository_dependencies ),
                                                                  use_tmp_archive_dir=False )
            download_url = export_dict[ 'download_url' ]
            download_dir = os.path.abspath( options.download_dir )
            file_path = os.path.join( download_dir, repositories_archive_filename )
            src = None
            dst = None
            try:
                src = urllib2.urlopen( download_url )
                dst = open( file_path, 'wb' )
                while True:
                    chunk = src.read( CHUNK_SIZE )
                    if chunk:
                        dst.write( chunk )
                    else:
                        break
            except:
                raise
            finally:
                if src:
                    src.close()
                if dst:
                    dst.close()
            print "Successfully exported revision ", options.changeset_revision, " of repository ", options.name, " owned by ", options.owner
            print "to location ", file_path
    else:
        print "Invalid tool_shed / name / owner ."
Ejemplo n.º 8
0
def main_debug(displaying):
    video_file = os.path.join(os.path.dirname(__file__), "video/staircase.mp4")

    vid_stream = VideoStream(video_file, interval=0.03)
    vid_stream.start()

    if debug:
        import ptvsd
        ptvsd.enable_attach(('0.0.0.0', 56781))
        ptvsd.wait_for_attach()
        ptvsd.break_into_debugger()

    while True:
        _, frame = vid_stream.get_frame_with_id()
        detections = detector.detect(frame)
        #logging.info(detections)

        if not displaying:
            logging.info(detections)
            continue

        frame = display(frame, detections)
        # # check to see if the output frame should be displayed to our
        # # screen
        cv2.imshow("Frame", frame)

        key = cv2.waitKey(1) & 0xFF

        if key == ord('q') or key == 27:
            break

    cv2.destroyAllWindows()
Ejemplo n.º 9
0
def get_LibDataList(exp_info, library_id):
    api_key = exp_info['api_key']
    api_url = exp_info['api_url']
    lib_folder_name = exp_info['source_lib_folder_name']

    dict_file = {}
    #print 'Delete lib [%s] and create it...'%lib_folder_name
    logPrint('Get file list from lib [%s]...' % lib_folder_name)
    '''
    logPrint('Delete lib [%s] and create it...' %lib_folder_name)
    data = {'purge':True}
    common_del(api_key, api_url + "libraries/%s" % library_id, data, return_formatted = False)
    (library_id,lib_folder_id,lib_exist) = getLibFolderID(api_key, api_url,lib_folder_name)
    '''
    libset = display(api_key,
                     api_url + "libraries/%s/contents" % library_id,
                     return_formatted=False)
    for lib in libset:
        if lib['type'] == 'folder': continue
        fn = lib['name'][1:] if lib['name'].startswith('/') else lib['name']
        dict_file[fn] = [lib]

    fname_count = 0
    for fname in dict_file:
        for tag in dict_file[fname]:
            if 'id' not in tag: continue
            fname_count += 1

    return (dict_file, fname_count)
Ejemplo n.º 10
0
def main():
    if _debug == 1:
      print 'Galaxy API URL: %s' % api_url
      print 'Galaxy API Key: %s' % api_key
      print 'Library to create: %s' % library_to_create
      print ''

    if api_url == None or api_key == None:
        print "Galaxy API Key and/or URL was not specified"
        sys.exit(1)

    libs = display(api_key, api_url + '/api/libraries', return_formatted=False)
    for library in libs:
        if library['name'] == library_to_create and library['deleted'] == False:
            print 'Error: Library %s already exists.' % library['name']
            sys.exit(1)

    data = {}
    data['name'] = library_to_create
    
    result = submit(api_key, api_url + "/api/libraries", data, return_formatted = False)
    if not result['id'] == 0:
        print 'Success: Library created.'
    else:
        print 'Error: Failed to create library (%s).' % result['id']
Ejemplo n.º 11
0
def main( options ):
    """Collect all user data and install the tools via the Galaxy API."""
    api_key = options.api
    base_galaxy_url = options.local_url.rstrip( '/' )
    base_tool_shed_url = options.tool_shed_url.rstrip( '/' )
    cleaned_tool_shed_url = clean_url( base_tool_shed_url )
    installed_tool_shed_repositories_url = '%s/api/%s' % ( base_galaxy_url, 'tool_shed_repositories' )
    data = {}
    data[ 'tool_shed_url' ] = cleaned_tool_shed_url
    data[ 'name' ] = options.name
    data[ 'owner' ] = options.owner
    data[ 'changeset_revision' ] = options.changeset_revision
    tool_shed_repository_id = None
    installed_tool_shed_repositories = display( api_key, installed_tool_shed_repositories_url, return_formatted=False )
    for installed_tool_shed_repository in installed_tool_shed_repositories:
        tool_shed = str( installed_tool_shed_repository[ 'tool_shed' ] )
        name = str( installed_tool_shed_repository[ 'name' ] )
        owner = str( installed_tool_shed_repository[ 'owner' ] )
        changeset_revision = str( installed_tool_shed_repository[ 'changeset_revision' ] )
        if tool_shed == cleaned_tool_shed_url and name == options.name and owner == options.owner and changeset_revision == options.changeset_revision:
            tool_shed_repository_id = installed_tool_shed_repository[ 'id' ]
            break
    if tool_shed_repository_id:
        url = '%s%s' % ( base_galaxy_url, '/api/tool_shed_repositories/%s/repair_repository_revision' % str( tool_shed_repository_id ) )
        submit( options.api, url, data )
    else:
        print("Invalid tool_shed / name / owner / changeset_revision.")
def main(options):
    """Collect all user data and install the tools via the Galaxy API."""
    api_key = options.api
    base_galaxy_url = options.local_url.rstrip("/")
    base_tool_shed_url = options.tool_shed_url.rstrip("/")
    cleaned_tool_shed_url = clean_url(base_tool_shed_url)
    installed_tool_shed_repositories_url = "%s/api/%s" % (base_galaxy_url, "tool_shed_repositories")
    data = {}
    data["tool_shed_url"] = cleaned_tool_shed_url
    data["name"] = options.name
    data["owner"] = options.owner
    data["changeset_revision"] = options.changeset_revision
    tool_shed_repository_id = None
    installed_tool_shed_repositories = display(api_key, installed_tool_shed_repositories_url, return_formatted=False)
    for installed_tool_shed_repository in installed_tool_shed_repositories:
        tool_shed = str(installed_tool_shed_repository["tool_shed"])
        name = str(installed_tool_shed_repository["name"])
        owner = str(installed_tool_shed_repository["owner"])
        changeset_revision = str(installed_tool_shed_repository["changeset_revision"])
        if (
            tool_shed == cleaned_tool_shed_url
            and name == options.name
            and owner == options.owner
            and changeset_revision == options.changeset_revision
        ):
            tool_shed_repository_id = installed_tool_shed_repository["id"]
            break
    if tool_shed_repository_id:
        url = "%s%s" % (
            base_galaxy_url,
            "/api/tool_shed_repositories/%s/repair_repository_revision" % str(tool_shed_repository_id),
        )
        submit(options.api, url, data)
    else:
        print "Invalid tool_shed / name / owner / changeset_revision."
Ejemplo n.º 13
0
def main(options):
    """Collect all user data and install the tools via the Galaxy API."""
    api_key = options.api
    base_galaxy_url = options.local_url.rstrip('/')
    base_tool_shed_url = options.tool_shed_url.rstrip('/')
    cleaned_tool_shed_url = clean_url(base_tool_shed_url)
    installed_tool_shed_repositories_url = '%s/api/%s' % (
        base_galaxy_url, 'tool_shed_repositories')
    data = {}
    data['tool_shed_url'] = cleaned_tool_shed_url
    data['name'] = options.name
    data['owner'] = options.owner
    data['changeset_revision'] = options.changeset_revision
    tool_shed_repository_id = None
    installed_tool_shed_repositories = display(
        api_key, installed_tool_shed_repositories_url, return_formatted=False)
    for installed_tool_shed_repository in installed_tool_shed_repositories:
        tool_shed = str(installed_tool_shed_repository['tool_shed'])
        name = str(installed_tool_shed_repository['name'])
        owner = str(installed_tool_shed_repository['owner'])
        changeset_revision = str(
            installed_tool_shed_repository['changeset_revision'])
        if tool_shed == cleaned_tool_shed_url and name == options.name and owner == options.owner and changeset_revision == options.changeset_revision:
            tool_shed_repository_id = installed_tool_shed_repository['id']
            break
    if tool_shed_repository_id:
        url = '%s%s' % (
            base_galaxy_url,
            '/api/tool_shed_repositories/%s/repair_repository_revision' %
            str(tool_shed_repository_id))
        submit(options.api, url, data)
    else:
        print("Invalid tool_shed / name / owner / changeset_revision.")
Ejemplo n.º 14
0
def getGalaxyLibrary(libraryName):  
    libs = display(_api_key, _api_url + '/api/libraries', return_formatted=False)
    for library in libs:
        if library['name'] == libraryName:
            return library
        
    print 'Unable to locate library %s in Galaxy' % libraryName
    sys.exit(1)
Ejemplo n.º 15
0
def getGalaxyFolderFromLibrary(library, folderPath):
    library_contents = display(_api_key, _api_url + "/api/libraries/%s/contents" % library['id'], return_formatted = False)
    for entry in library_contents:
        if entry['name'] == folderPath:
            return entry

    print 'Unable to locate folder %s in library %s' % (folderPath, library['name'])
    sys.exit(1)
Ejemplo n.º 16
0
def read_libs():
  global libs
  global projects 
  libs = display(api_key, api_url + 'libraries', return_formatted=False)
  print libs
  print
  print
  projects = [ x[ 'name' ] for x in libs ]
  print "projects " , projects
Ejemplo n.º 17
0
def read_libs():
    global libs
    global projects
    libs = display(api_key, api_url + 'libraries', return_formatted=False)
    print libs
    print
    print
    projects = [x['name'] for x in libs]
    print "projects ", projects
Ejemplo n.º 18
0
def main():
    print 'Galaxy API URL: %s' % _api_url
    print 'Galaxy API Key: %s' % _api_key
    print 'File: %s' % _file
    print 'Library: %s' % _libraryPath
    print ''
    if not os.path.isfile(_file):
        print 'Unable to location file'
        sys.exit(1)

    fields = split(_libraryPath, '/')
    libraryName = fields[0]
    if len(fields) == 1:
        folderPath = '/'
    else:
        sep = '/'
        folderPath = '/' + sep.join(fields[1:])
    
    library = getGalaxyLibrary(libraryName)
    folder = getGalaxyFolderFromLibrary(library, folderPath)
         
    if isFileInGalaxyFolder(folder, _file):
        print 'File already exists in Galaxy library'
        sys.exit(1)
    
    print 'Adding %s to %s' % (_file, _libraryPath)
    data = {}
    data['folder_id'] = folder['id']
    data['create_type'] = 'file'    
    data['file_type'] = 'auto'
    data['dbkey'] = ''
    data['upload_option'] = 'upload_paths'
    data['filesystem_paths'] = _file
    data['link_data_only'] = 'link_to_files'
    
    libset = submit(_api_key, _api_url + "/api/libraries/%s/contents" % library['id'], data, return_formatted = False)
    for lib in libset:
        file_metadata = display(_api_key, _api_url + '/api/libraries/datasets/%s' % lib['id'], return_formatted = False)
        while file_metadata['state'] == 'running' or file_metadata['state'] == 'queued':
            print 'State is %s.  Sleep for 5 seconds.' % file_metadata['state'] 
            time.sleep(5)
            file_metadata = display(_api_key, _api_url + '/api/libraries/datasets/%s' % lib['id'], return_formatted = False)

        print 'State is %s' % file_metadata['state']
Ejemplo n.º 19
0
def main(options):
    """Collect all user data and export the repository via the Tool Shed API."""
    base_tool_shed_url = options.tool_shed_url.rstrip('/')
    repositories_url = '%s/api/repositories' % base_tool_shed_url
    data = {}
    data['tool_shed_url'] = base_tool_shed_url
    data['name'] = options.name
    data['owner'] = options.owner
    data['changeset_revision'] = options.changeset_revision
    data[
        'export_repository_dependencies'] = options.export_repository_dependencies
    repository_id = None
    repositories = display(repositories_url,
                           api_key=None,
                           return_formatted=False)
    for repository in repositories:
        name = str(repository['name'])
        owner = str(repository['owner'])
        if name == options.name and owner == options.owner:
            repository_id = repository['id']
            break
    if repository_id:
        # We'll currently support only gzip-compressed tar archives.
        file_type = 'gz'
        url = '%s%s' % (base_tool_shed_url,
                        '/api/repository_revisions/%s/export' %
                        str(repository_id))
        export_dict = submit(url, data, return_formatted=False)
        error_messages = export_dict['error_messages']
        if error_messages:
            print "Error attempting to export revision ", options.changeset_revision, " of repository ", options.name, " owned by ", options.owner, ":\n", error_messages
        else:
            export_repository_dependencies = string_as_bool(
                options.export_repository_dependencies)
            repositories_archive_filename = \
                generate_repository_archive_filename( base_tool_shed_url,
                                                      options.name,
                                                      options.owner,
                                                      options.changeset_revision,
                                                      file_type,
                                                      export_repository_dependencies=export_repository_dependencies,
                                                      use_tmp_archive_dir=False )
            download_url = export_dict['download_url']
            download_dir = os.path.abspath(options.download_dir)
            file_path = os.path.join(download_dir,
                                     repositories_archive_filename)
            src = requests.get(download_url, stream=True)
            with open(file_path, 'wb') as dst:
                for chunk in src.iter_content(chunk_size=CHUNK_SIZE):
                    if chunk:
                        dst.write(chunk)
            print "Successfully exported revision ", options.changeset_revision, " of repository ", options.name, " owned by ", options.owner
            print "to location ", file_path
    else:
        print "Invalid tool_shed / name / owner ."
Ejemplo n.º 20
0
def read_folders():
  """Call the galaxy web server via their API, and get a list of server contents.  Only keep the folders, and make a hash
     that maps folders to their API keys, so we can import simply"""
  for lib in libs:
    print lib
    libname = lib[ 'name' ]
    id = lib[ 'id' ]
    l2 = display(api_key, api_url + 'libraries/%s/contents' % id , return_formatted=False )
    for l in l2:
      if l['type'] == 'folder':
        folders[ libname + l[ 'name' ] ] = l[ 'id' ]
Ejemplo n.º 21
0
def main():
    print 'Galaxy API URL: %s' % api_url
    print 'Galaxy API Key: %s' % api_key
    print ''
    print 'Libraries'
    print '---------'
    i = 1;
    libs = display(api_key, api_url + '/api/libraries', return_formatted=False)
    for library in libs:
        # Print the name/description of the library
        if len(library['description']) != 0:
            print '%d. %s - %s' % (i, library['name'], library['description'])
        else:
            print '%d. %s' % (i, library['name'])
        if _debug:
            print '(%s)' % library

        # Print the Library Contents
        # Galaxy returns a list of files, folders, and files/folders within folders.  There is no tree of elements, just a list.
        library_contents = display(api_key, api_url + "/api/libraries/%s/contents" % library['id'], return_formatted = False) 
        for entry in library_contents:
            #if entry['name'] == '/':
            #    continue
            if entry['type'] == 'folder':
                print '   %s' % entry['name']
                if _debug:
                    print '   (%s)' % entry
            if entry['type'] == 'file':
                fields = split(entry['name'], '/')
                # First entry is always a /, so remove this by shifting off the first element
                fields = fields[1:]
                spaces = ''
                for i in range(len(fields)):
                    spaces += '   '
                    
                print '%s%s' % (spaces, fields[len(fields) - 1])
                if _debug:
                    print '%s(%s)' % (spaces, entry)
        print ''
        i+=1
Ejemplo n.º 22
0
def main(api_key, api_url, in_folder, data_library):
    # Find/Create data library with the above name.  Assume we're putting datasets in the root folder '/'
    libs = display(api_key, api_url + "libraries", return_formatted=False)
    library_id = None
    for library in libs:
        if library["name"] == data_library:
            library_id = library["id"]
    if not library_id:
        lib_create_data = {"name": data_library}
        library = submit(api_key, api_url + "libraries", lib_create_data, return_formatted=False)
        library_id = library[0]["id"]
    folders = display(api_key, api_url + "libraries/%s/contents" % library_id, return_formatted=False)
    for f in folders:
        if f["name"] == "/":
            library_folder_id = f["id"]
    if not library_id or not library_folder_id:
        print "Failure to configure library destination."
        sys.exit(1)

    for fname in os.listdir(in_folder):
        fullpath = os.path.join(in_folder, fname)
        if os.path.isfile(fullpath) and os.path.exists(fullpath + ".json"):
            print "Loading", fullpath
            data = {}
            data["folder_id"] = library_folder_id
            data["file_type"] = "auto"
            data["dbkey"] = ""
            data["upload_option"] = "upload_paths"
            data["filesystem_paths"] = fullpath
            data["create_type"] = "file"

            data["link_data_only"] = "link_to_files"

            handle = open(fullpath + ".json")
            smeta = handle.read()
            handle.close()
            data["extended_metadata"] = json.loads(smeta)
            libset = submit(api_key, api_url + "libraries/%s/contents" % library_id, data, return_formatted=True)
            print libset
Ejemplo n.º 23
0
def read_folders():
    """Call the galaxy web server via their API, and get a list of server contents.  Only keep the folders, and make a hash
     that maps folders to their API keys, so we can import simply"""
    for lib in libs:
        print lib
        libname = lib['name']
        id = lib['id']
        l2 = display(api_key,
                     api_url + 'libraries/%s/contents' % id,
                     return_formatted=False)
        for l in l2:
            if l['type'] == 'folder':
                folders[libname + l['name']] = l['id']
Ejemplo n.º 24
0
def main(api_key, api_url, in_folder, data_library):
    # Find/Create data library with the above name.  Assume we're putting datasets in the root folder '/'
    libs = display(api_key, api_url + 'libraries', return_formatted=False)
    library_id = None
    for library in libs:
        if library['name'] == data_library:
            library_id = library['id']
    if not library_id:
        lib_create_data = {'name':data_library}
        library = submit(api_key, api_url + 'libraries', lib_create_data, return_formatted=False)
        library_id = library[0]['id']
    folders = display(api_key, api_url + "libraries/%s/contents" % library_id, return_formatted = False)
    for f in folders:
        if f['name'] == "/":
            library_folder_id = f['id']
    if not library_id or not library_folder_id:
        print "Failure to configure library destination."
        sys.exit(1)

    for fname in os.listdir(in_folder):
        fullpath = os.path.join(in_folder, fname)
        if os.path.isfile(fullpath) and os.path.exists(fullpath + ".json"):
            print "Loading", fullpath
            data = {}
            data['folder_id'] = library_folder_id
            data['file_type'] = 'auto'
            data['dbkey'] = ''
            data['upload_option'] = 'upload_paths'
            data['filesystem_paths'] = fullpath
            data['create_type'] = 'file'

            data['link_data_only'] = 'link_to_files'

            handle = open( fullpath + ".json" )
            smeta = handle.read()
            handle.close()
            data['extended_metadata'] = json.loads(smeta)
            libset = submit(api_key, api_url + "libraries/%s/contents" % library_id, data, return_formatted = True)
            print libset
Ejemplo n.º 25
0
def main(options):
    """Collect all user data and export the repository via the Tool Shed API."""
    base_tool_shed_url = options.tool_shed_url.rstrip('/')
    repositories_url = '%s/api/repositories' % base_tool_shed_url
    data = {}
    data['tool_shed_url'] = base_tool_shed_url
    data['name'] = options.name
    data['owner'] = options.owner
    data['changeset_revision'] = options.changeset_revision
    data['export_repository_dependencies'] = options.export_repository_dependencies
    repository_id = None
    repositories = display(repositories_url, api_key=None, return_formatted=False)
    for repository in repositories:
        name = str(repository['name'])
        owner = str(repository['owner'])
        if name == options.name and owner == options.owner:
            repository_id = repository['id']
            break
    if repository_id:
        # We'll currently support only gzip-compressed tar archives.
        file_type = 'gz'
        url = '%s%s' % (base_tool_shed_url, '/api/repository_revisions/%s/export' % str(repository_id))
        export_dict = submit(url, data, return_formatted=False)
        error_messages = export_dict['error_messages']
        if error_messages:
            print("Error attempting to export revision ", options.changeset_revision, " of repository ", options.name, " owned by ", options.owner, ":\n", error_messages)
        else:
            export_repository_dependencies = string_as_bool(options.export_repository_dependencies)
            repositories_archive_filename = \
                generate_repository_archive_filename(base_tool_shed_url,
                                                     options.name,
                                                     options.owner,
                                                     options.changeset_revision,
                                                     file_type,
                                                     export_repository_dependencies=export_repository_dependencies,
                                                     use_tmp_archive_dir=False)
            download_url = export_dict['download_url']
            download_dir = os.path.abspath(options.download_dir)
            file_path = os.path.join(download_dir, repositories_archive_filename)
            src = requests.get(download_url, stream=True)
            with open(file_path, 'wb') as dst:
                for chunk in src.iter_content(chunk_size=CHUNK_SIZE):
                    if chunk:
                        dst.write(chunk)
            print("Successfully exported revision ", options.changeset_revision, " of repository ", options.name, " owned by ", options.owner)
            print("to location ", file_path)
    else:
        print("Invalid tool_shed / name / owner .")
Ejemplo n.º 26
0
def get_workflows(format, api_key, api_url):
    workflow_id = ''
    wname = 'General Workflow'
    if format == 'wiff':
        wname = 'QTOF5600'
    #workflow_list = {}
    workflows = display(api_key, api_url + 'workflows', return_formatted=False)
    #i = 0
    for workflow in workflows:
        #i += 1
        if wname == workflow['name']:
            workflow_id = workflow['id']
            break
        #workflow_name = workflow['name']
        #workflow_info = display(api_key,api_url + 'workflows/%s' % workflow_id, return_formatted = False)
        #workflow_list[workflow_id] = [workflow_name,workflow_info,i]
    return workflow_id
Ejemplo n.º 27
0
def main():
    print 'Galaxy API URL: %s' % api_url
    print 'Galaxy API Key: %s' % api_key
    print 'Library to create: %s' % library_to_create
    print ''

    libs = display(api_key, api_url + '/api/libraries', return_formatted=False)
    for library in libs:
        if library['name'] == library_to_create:
            print 'Library already exists.'
            sys.exit(1)

    data = {}
    data['name'] = library_to_create
    
    result = submit(api_key, api_url + "/api/libraries", data, return_formatted = False)
    if not result['id'] == 0:
        print 'Library created.'
Ejemplo n.º 28
0
def reload_tool_data(ctx, tool_id):
    url = ctx.obj['galaxy_url'] + '/tool_data/{}/reload'.format(tool_id)
    print(url)
    common.display(ctx.obj['api_key'], url)
Ejemplo n.º 29
0
#!/usr/bin/env python

import os
import sys
from urllib.error import URLError

from common import display  # noqa: I100,I202

try:
    display(*sys.argv[1:3])
except TypeError as e:
    print('usage: %s key url' % os.path.basename(sys.argv[0]))
    print(e)
    sys.exit(1)
except URLError as e:
    print(e)
    sys.exit(1)
Ejemplo n.º 30
0
def display_diagnostics(ctx, tool_id):
    url = ctx.obj['galaxy_url'] + '/tools/{}/diagnostics'.format(tool_id)
    print(url)
    common.display(ctx.obj['api_key'], url)
Ejemplo n.º 31
0
def __main__():
    ''' Prevent this script from running again '''
    check_self_running()

    options = getCmd()

    api_url = ConfigSectionMap("Firmiana")['address'] + 'api/'
    base = os.path.join(GALAXY_ROOT, 'database/files/raw_files')
    base_bprc = ConfigSectionMap("Galaxy")[
        'bprc_nas']  #'/usr/local/galaxyDATA01/bprc_dingchen'
    base_ftp_tmp = ConfigSectionMap("Galaxy")[
        'ftp']  #'/usr/local/galaxyDATA01/data/ftpdata'

    if not os.path.isfile('%s/database/files/NFS_192.168.12.89' % GALAXY_ROOT):
        stop_err('NFS folder error!')
    """ all about this experiment """
    exp_info = {}

    exp_info['source_ExpName'] = 'Exp001608'
    exp_info['target_ExpName'] = 'Exp002084'

    experiment = getExp(exp_info['target_ExpName'])
    experiment = experiment[0]

    exp_info['api_url'] = api_url
    exp_info['eid'] = experiment[0]
    exp_info['e_name'] = exp_info[
        'target_ExpName']  #'Exp001608' #experiment[1]
    exp_info['priority'] = experiment[2]
    exp_info['instru_full'] = experiment[3]
    exp_info['species'] = experiment[4]
    exp_info['num_fra'] = experiment[5]
    exp_info['num_rep'] = experiment[6]

    exp_info['check_flag'] = 0

    #Prevent exp being searched more than once
    #expStart(conn,meta,eid,1)
    logLine = ''
    dividline = '-' * 30

    (ok, log1) = expPrepare(base, exp_info)
    if not ok:
        print 'not ok'
        exit(0)

    logLine += '%s\n*** Time: %s ***\n' % (dividline, date_now())
    logLine += 'Exp Name: %s | Email: %s | API_key: %s\n' % (
        exp_info['target_ExpName'], exp_info['email'], exp_info['api_key'])
    logLine += log1
    logPrint(logLine)

    ##################################################################

    (library_id, lib_folder_id, lib_exist) = getLibFolderID(exp_info)
    if library_id == -1 or lib_folder_id == -1:
        print 'library_id == -1 or lib_folder_id == -1:'
        exit(0)

    status = 1
    path_list = []
    dict_file = {}
    #####################  Upload to libraries  #####################
    #===================================================================
    # Be care whether uploading to library is successful
    #===================================================================
    target_Exp_folder, x1, x2 = getFilePath(base, base_bprc, base_ftp_tmp,
                                            exp_info)

    if lib_exist == 1:
        (dict_file, fname_count) = get_LibDataList(exp_info, library_id)

        for fname, v in dict_file.iteritems():
            content_id = v[0]['url'].split('/')[-1]
            res = display(exp_info['api_key'],
                          exp_info['api_url'] + "libraries/%s/contents/%s" %
                          (library_id, content_id),
                          return_formatted=False)
            fPath = res['file_name']
            oldExpStr = '_E' + exp_info['source_ExpName'][3:] + '_F'
            newExpStr = '_E' + exp_info['target_ExpName'][3:] + '_F'
            targetLink = os.path.join(target_Exp_folder,
                                      fname.replace(oldExpStr, newExpStr))
            #print fPath
            #print targetLink
            os.symlink(fPath, targetLink)
            #exit(0)

    #################################################################
    ii = len(dict_file)
    print ii

    exit(0)
Ejemplo n.º 32
0
def expPrepare(base, exp_info):
    e_name = exp_info['e_name']  #exp_info['e_name']
    eid = exp_info['eid']
    api_url = exp_info['api_url']
    (num_fra, num_rep) = exp_info['num_fra'], exp_info['num_rep']
    '''
    user_id = getUserID(conn,meta,e_name)
    if user_id == None:
        #print 'No user_id found!\n'
        logPrint('No user_id found!')
        continue
    '''
    user_id = 2  #[email protected]
    user = '******'
    email = getEmail(conn, meta, user_id)
    email_folder = os.path.join(base, email)
    if not os.path.exists(email_folder):
        os.makedirs(email_folder)
    #logPrint('Email: %s'%email)
    api_key = getAPIkey(conn, meta, user_id)
    if api_key == None:
        return (0, '')
    #logPrint('API_key: %s'%key)
    (instru_folder, format,
     workflow_key) = get_instru_format_wf(conn, meta, exp_info)
    workflow_key = get_workflows(format, api_key, api_url)
    if workflow_key == '':
        logPrint('Error:No workflow_key found!')
        return (0, '')

    workflow = display(api_key,
                       api_url + 'workflows/%s' % workflow_key,
                       return_formatted=False)
    if not workflow:
        #print "Workflow [%s] not found."%workflow_key
        logPrint("Error:Workflow [%s] not found." % workflow_key)
        return (0, '')

    #===========================================================================
    #
    # out_folder = '/usr/local/firmiana/data/rawfile_BPRC_bak/%s/%s' %(instru, exp_Num)
    # if not os.path.exists(out_folder):
    #     os.makedirs(out_folder)
    #===========================================================================
    exp_info['e_num'] = e_name[3:]
    exp_info['uid'] = user_id
    exp_info['user'] = user
    exp_info['email'] = email
    exp_info['api_key'] = api_key
    exp_info['instru_folder'] = instru_folder
    exp_info['workflow_key'] = workflow_key
    exp_info['workflow'] = workflow
    exp_info['format'] = format

    exp_info['source_lib_folder_name'] = 'API imported %s' % exp_info[
        'source_ExpName']
    exp_info['target_lib_folder_name'] = 'API imported %s' % exp_info[
        'target_ExpName']

    exp_info['num_files'] = int(num_rep) * int(
        num_fra) if format != 'wiff' else int(num_rep) * int(num_fra) * 2
    #lib_folder_name = 'New imported %s' %e_name
    log_line = 'Species: %s | Instrument: %s | Workflow_key: %s' % (
        exp_info['species'], exp_info['instru_full'], workflow_key)

    return (1, log_line)
Ejemplo n.º 33
0
def list_resolvers(ctx):
    url = ctx.obj['galaxy_url'] + '/dependency_resolvers'
    print(url)
    common.display(ctx.obj['api_key'], url)
Ejemplo n.º 34
0
#!/usr/bin/env python
from __future__ import print_function

import os
import sys

from six.moves.urllib.error import URLError

from common import display

try:
    display( *sys.argv[1:3] )
except TypeError as e:
    print('usage: %s key url' % os.path.basename( sys.argv[0] ))
    print(e)
    sys.exit( 1 )
except URLError as e:
    print(e)
    sys.exit( 1 )
Ejemplo n.º 35
0
def list_tool_data(ctx):
    url = ctx.obj['galaxy_url'] + '/tool_data'
    print(url)
    common.display(ctx.obj['api_key'], url)
Ejemplo n.º 36
0
def display_result(*argv):
    return display( *argv[0:2] , return_formatted=False)