예제 #1
0
    def _own_handle(self, genome_data, handle_property):
        """
        _own_handle: check that handle_property point to shock nodes owned by calling user
        """

        log('start checking handle {} ownership'.format(handle_property))

        if handle_property in genome_data:
            handle_id = genome_data[handle_property]
            hs = HandleService(self.handle_url, token=self.token)
            handles = hs.hids_to_handles([handle_id])
            shock_id = handles[0]['id']

            # Copy from DataFileUtil.own_shock_node implementation:
            header = {'Authorization': 'Oauth {}'.format(self.token)}
            res = requests.get(self.shock_url + '/node/' + shock_id +
                               '/acl/?verbosity=full',
                               headers=header,
                               allow_redirects=True)
            self._check_shock_response(
                res, 'Error getting ACLs for Shock node {}: '.format(shock_id))
            owner = res.json()['data']['owner']['username']
            user_id = self.auth_client.get_user(self.token)

            if owner != user_id:
                log('start copying node to owner: {}'.format(user_id))
                dfu_shock = self.dfu.copy_shock_node({
                    'shock_id': shock_id,
                    'make_handle': True
                })
                handle_id = dfu_shock['handle']['hid']
                genome_data[handle_property] = handle_id
예제 #2
0
    def own_handle(self, genome, handle_property, ctx):
        if not handle_property in genome:
            return
        token = ctx['token']
        handle_id = genome[handle_property]
        hs = HandleService(self.handle_url, token=token)
        handles = hs.hids_to_handles([handle_id])
        shock_id = handles[0]['id']

        ## Copy from DataFileUtil.own_shock_node implementation:
        header = {'Authorization': 'Oauth {}'.format(token)}
        res = requests.get(self.shock_url + '/node/' + shock_id +
                           '/acl/?verbosity=full',
                           headers=header, allow_redirects=True)
        self.check_shock_response(
            res, 'Error getting ACLs for Shock node {}: '.format(shock_id))
        owner = res.json()['data']['owner']['username']
        if owner != ctx['user_id']:
            shock_id = self.copy_shock_node(ctx, shock_id)
            r = requests.get(self.shock_url + '/node/' + shock_id,
                             headers=header, allow_redirects=True)
            errtxt = ('Error downloading attributes from shock ' +
                      'node {}: ').format(shock_id)
            self.check_shock_response(r, errtxt)
            shock_data = r.json()['data']
            handle = {'id': shock_data['id'],
                      'type': 'shock',
                      'url': self.shock_url,
                      'file_name': shock_data['file']['name'],
                      'remote_md5': shock_data['file']['checksum']['md5']
                      }
            handle_id = hs.persist_handle(handle)
            genome[handle_property] = handle_id
예제 #3
0
    def __init__(self, url=None, timeout=30 * 60, user_id=None,
                 password=None, token=None, ignore_authrc=False,
                 trust_all_ssl_certificates=False):

        self.url = url
        self.timeout = int(timeout)
        self._headers = dict()
        self.trust_all_ssl_certificates = trust_all_ssl_certificates
        self.token = token
        # token overrides user_id and password
        if token is not None: pass
        elif user_id is not None and password is not None:
            self.token = _get_token(user_id, password)
        elif 'KB_AUTH_TOKEN' in _os.environ:
            self.token = _os.environ.get('KB_AUTH_TOKEN')
        elif not ignore_authrc:
            authdata = _read_inifile()
            if authdata is None:
                authdata = _read_rcfile()
            if authdata is not None:
                if authdata.get('token') is not None:
                    self.token = authdata['token']
                elif(authdata.get('user_id') is not None
                     and authdata.get('password') is not None):
                    self.token = _get_token(
                        authdata['user_id'], authdata['password'])
        if self.timeout < 1:
            raise ValueError('Timeout value must be at least 1 second')

        self.dsi = AbstractHandle(url=url, token=self.token, trust_all_ssl_certificates=trust_all_ssl_certificates)
def main(argv):
    parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter, prog='trns_transform_KBaseAssembly.FQ-to-KBaseAssembly.SingleEndLibrary', epilog=desc3)
    parser.add_argument('-s', '--shock_url', help='Shock url', action='store', dest='shock_url', default='https://kbase.us/services/shock-api')
    parser.add_argument('-n', '--hndl_svc_url', help='Handle service url', action='store', dest='hndl_url', default='https://kbase.us/services/handle_service')
    parser.add_argument('-i', '--in_id', help='Input Shock node id', action='store', dest='inobj_id', default=None, required=False)
    parser.add_argument('-f','--file_name', help = 'File Name', action= 'store', dest='file_name',default=None,required=False)
    parser.add_argument('-d','--hid', help = 'Handle id', action= 'store', dest='hid',default=None, required=False)
    parser.add_argument('-o', '--out_file_name', help='Output file name', action='store', dest='out_fn', default=None, required=True)
    usage = parser.format_usage()
    parser.description = desc1 + ' ' + usage + desc2
    parser.usage = argparse.SUPPRESS
    args = parser.parse_args()

    if args.inobj_id is None and args.hid is None:
      print >> sys.stderr, parser.description
      print >> sys.stderr, "Need to provide either shock node id or handle id"
      exit(1)
    
    kb_token = os.environ.get('KB_AUTH_TOKEN')
    hs = AbstractHandle(url=args.hndl_url, token = kb_token)
    
    if args.hid is None:
      try:
        args.hid = hs.persist_handle({ "id" : args.inobj_id , "type" : "shock" , "url" : args.shock_url})
      except:
        try:
          args.hid=hs.ids_to_handles([args.inobj_id])[0]["hid"]
        except:
          traceback.print_exc(file=sys.stderr)
          print >> sys.stderr, "Please provide handle id.\nThe input shock node id {} is already registered or could not be registered".format(args.inobj_id)
          exit(3)
    
    hds = hs.hids_to_handles([args.hid])

    if len(hds) <= 0: 
      print >> sys.stderr, 'Could not register a new handle with shock node id {} or wrong input handle id'.format(args.inobj_id)
      exit(2)

    ret = { "handle" : hds[0] }
    
    of = open(args.out_fn, "w")
    of.write(to_JSON(ret))
    of.close()
예제 #5
0
def getHandles(logger,
               shock_url = "https://kbase.us/services/shock-api/",
               handle_url = "https://kbase.us/services/handle_service/",
               shock_ids = None,
               handle_ids = None,
               token = None):
    
    hs = AbstractHandle(url=handle_url, token=token)
    
    handles = list()
    if shock_ids is not None:
        header = dict()
        header["Authorization"] = "Oauth {0}".format(token)
    
        for sid in shock_ids:
            info = None
            try:
                logger.info("Found shock id {0}, retrieving information about the data.".format(sid))

                response = requests.get("{0}/node/{1}".format(shock_url, sid), headers=header, verify=True)
                info = response.json()["data"]
            except:
                logger.error("There was an error retrieving information about the shock node id {0} from url {1}".format(sid, shock_url))
            try:
                logger.info("Retrieving a handle id for the data.")
                handle_id = hs.persist_handle({"id" : sid, 
                                           "type" : "shock",
                                           "url" : shock_url,
                                           "file_name": info["file"]["name"],
                                           "remote_md5": info["file"]["md5"]})
            except:
                try:
                    handle_id = hs.ids_to_handles([sid])[0]["hid"]
                    single_handle = hs.hids_to_handles([handle_id])
                
                    assert len(single_handle) != 0
                    
                    if info is not None:
                        single_handle[0]["file_name"] = info["file"]["name"]
                        single_handle[0]["remote_md5"] = info["file"]["md5"]
                        print >> sys.stderr, single_handle
                    
                    handles.append(single_handle[0])
                except:
                    logger.error("The input shock node id {} is already registered or could not be registered".format(sid))
                    raise
    elif handle_ids is not None:
        for hid in handle_ids:
            try:
                single_handle = hs.hids_to_handles([hid])

                assert len(single_handle) != 0
                
                handles.append(single_handle[0])
            except:
                logger.error("Invalid handle id {0}".format(hid))
                raise
    
    return handles
예제 #6
0
class HandleService(object):
    def __init__(self, url=None, timeout=30 * 60, user_id=None,
                 password=None, token=None, ignore_authrc=False,
                 trust_all_ssl_certificates=False):

        self.url = url
        self.timeout = int(timeout)
        self._headers = dict()
        self.trust_all_ssl_certificates = trust_all_ssl_certificates
        self.token = token
        # token overrides user_id and password
        if token is not None: pass
        elif user_id is not None and password is not None:
            self.token = _get_token(user_id, password)
        elif 'KB_AUTH_TOKEN' in _os.environ:
            self.token = _os.environ.get('KB_AUTH_TOKEN')
        elif not ignore_authrc:
            authdata = _read_inifile()
            if authdata is None:
                authdata = _read_rcfile()
            if authdata is not None:
                if authdata.get('token') is not None:
                    self.token = authdata['token']
                elif(authdata.get('user_id') is not None
                     and authdata.get('password') is not None):
                    self.token = _get_token(
                        authdata['user_id'], authdata['password'])
        if self.timeout < 1:
            raise ValueError('Timeout value must be at least 1 second')

        self.dsi = AbstractHandle(url=url, token=self.token, trust_all_ssl_certificates=trust_all_ssl_certificates)



    def upload(self,infile) :

        handle = self.dsi.new_handle()
	url = "{}/node/{}".format(handle["url"], handle["id"]);
        ref_data = {}
        try:
            ref_data = _upload_file_to_shock(url,filePath=infile,token=self.token)
        except:
            raise

	remote_md5 = ref_data["file"]["checksum"]["md5"]
	#remote_sha1 = ref_data["file"]["checksum"]["sha1"] #SHOCK PUT would not create remote_sha1

        if remote_md5 is None: raise Exception("looks like upload failed and no md5 returned from remote server")
	
	handle["remote_md5"] = remote_md5
	#handle["remote_sha1"] = remote_sha1 	
        handle["file_name"] = _os.path.basename(infile)

	self.dsi.persist_handle(handle)
	return handle


    def download (self, handle, outfile):

        if( isinstance(handle, dict)): raise Exception("hande is not a dictionary")
        if "id" not in handle: raise Exception("no id in handle")
        if "url" not in handle: raise Exception("no url in handle")
        if outfile is None: raise Exception("outfile is not defined")
        raise Exception("Not implemented yet")

    def new_handle(self, *arg):
        return self.dsi.new_handle(*arg)

    def localize_handle(self, *arg):
        return self.dsi.localize_handle(*arg)

    def initialize_handle (self, *arg):
        return self.dsi.initialize_handle (*arg)

    def persist_handle(self, *arg):
        return self.dsi.persist_handle (*arg)

    def upload_metadata(self, handle, infile):
        raise Exception("Not implemented yet")

    def download_metadata (self, handle, outfile):
        raise Exception("Not implemented yet")

    def list_handles (self, *arg):
        return self.dsi.list_handles (*arg)

    def are_readable (self, *arg):
        return self.dsi.are_readable(*arg)

    def is_readable(self, *arg):
        return self.dsi.is_readable(*arg)

    def hids_to_handles(self, *arg):
        return self.dsi.hids_to_handles(*arg)

    def ids_to_handles(self, *arg):
        return self.dsi.ids_to_handles(*arg)
예제 #7
0
    def getPairedEndLibInfo(self):
        if hasattr(self.__class__, 'pairedEndLibInfo'):
            return self.__class__.pairedEndLibInfo
        # 1) upload files to shock
        token = self.ctx['token']
        forward_shock_file = self.upload_file_to_shock(
            shock_service_url=self.shockURL,
            filePath='data/small.forward.fq',
            token=token)
        reverse_shock_file = self.upload_file_to_shock(
            shock_service_url=self.shockURL,
            filePath='data/small.reverse.fq',
            token=token)
        #pprint(forward_shock_file)
        #pprint(reverse_shock_file)

        # 2) create handle
        hs = HandleService(url=self.handleURL, token=token)
        handf = {
            'id': forward_shock_file['id'],
            'type': 'shock',
            'url': self.shockURL,
            'file_name': forward_shock_file['file']['name'],
            'remote_md5': forward_shock_file['file']['checksum']['md5']
        }
        forward_handle = hs.persist_handle(handf)
        handr = {
            'id': reverse_shock_file['id'],
            'type': 'shock',
            'url': self.shockURL,
            'file_name': reverse_shock_file['file']['name'],
            'remote_md5': reverse_shock_file['file']['checksum']['md5']
        }

        reverse_handle = hs.persist_handle(handr)

        # 3) save to WS
        paired_end_library = {
            'lib1': {
                'file': {
                    'hid': forward_handle,
                    'file_name': forward_shock_file['file']['name'],
                    'id': forward_shock_file['id'],
                    'url': self.shockURL,
                    'type': 'shock',
                    'remote_md5': forward_shock_file['file']['checksum']['md5']
                },
                'encoding': 'UTF8',
                'type': 'fastq',
                'size': forward_shock_file['file']['size']
            },
            'lib2': {
                'file': {
                    'hid': reverse_handle,
                    'file_name': reverse_shock_file['file']['name'],
                    'id': reverse_shock_file['id'],
                    'url': self.shockURL,
                    'type': 'shock',
                    'remote_md5': reverse_shock_file['file']['checksum']['md5']
                },
                'encoding': 'UTF8',
                'type': 'fastq',
                'size': reverse_shock_file['file']['size']
            },
            'interleaved': 0,
            'sequencing_tech': 'artificial reads',
            'read_length_mean': 100,
            'insert_size_mean': 250,
            'insert_size_std_dev': 10,
            'total_bases': 125000,
            'read_orientation_outward': 1
        }
        ws_obj = {
            'workspace':
            self.getWsName(),
            'objects': [{
                'type':
                'KBaseFile.PairedEndLibrary',
                'data':
                paired_end_library,
                'name':
                'test.pe.reads',
                'meta': {},
                'provenance': [{
                    'service': 'hipmer',
                    'method': 'test_hipmer'
                }]
            }]
        }

        new_obj_info = self.ws.save_objects(ws_obj)
        self.__class__.pairedEndLibInfo = new_obj_info[0]
        return new_obj_info[0]
def main(argv):
    parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter, prog='trnf_Convert_fastq', epilog=desc3)
    parser.add_argument('-s', '--shock_url', help='Shock url', action='store', dest='shock_url', default='https://kbase.us/services/shock-api')
    parser.add_argument('-n', '--handle_service_url', help='Handle service url', action='store', dest='hndl_url', default='https://kbase.us/services/handle_service')
    parser.add_argument('-i', '--in_ids', help='Two input Shock node ids (comma separated)', action='store', dest='inobj_id', default=None, required=False)
    parser.add_argument('-f','--file_names', help = 'Two optional handle file names (comma separated)', action= 'store', dest='loc_filepath',default=None,nargs=1,required=False)
    parser.add_argument('-d','--hids', help = 'Two handle ids (comma separated)', action= 'store', dest='hid',default=None,required=False)
    parser.add_argument('-m','--ins_mean', help = 'Mean insert size', action= 'store', dest='ins_mean',type=float,default=None)
    parser.add_argument('-k','--std_dev', help = 'Standard deviation', action= 'store', dest='std_dev',type=float,default=None)
    parser.add_argument('-l','--inl', help = 'Interleaved  -- true/false', action= 'store', dest='inl',default=None)
    parser.add_argument('-r','--r_ori', help = 'Read Orientation -- true/false', action= 'store', dest='read_orient',default=None)
    parser.add_argument('-o', '--out_file_name', help='Output file name', action='store', dest='out_fn', default=None, required=True)
    usage = parser.format_usage()
    parser.description = desc1 + ' ' + usage + desc2
    parser.usage = argparse.SUPPRESS
    args = parser.parse_args()

    if args.inobj_id is None and args.hid is None:
      print >> sys.stderr, parser.description
      print >> sys.stderr, "Need to provide either shock node ids or handle ids"
      exit(1)

    kb_token = os.environ.get('KB_AUTH_TOKEN')	
    hs = AbstractHandle(url=args.hndl_url, token = kb_token)

    hids = []
    if args.hid is None:
      snids = args.inobj_id.split(',')
      if len(snids) != 2:
        print >> sys.stderr, "Please provide two shock node ids for pairend library"
        exit(4)
      try:
        hids.append(hs.persist_handle({ "id" : snids[0] , "type" : "shock" , "url" : args.shock_url}))
      except:
        try:
          hids.append(hs.ids_to_handles([snids[0]])[0]["hid"])
        except:
          traceback.print_exc(file=sys.stderr)
          e,v = sys.exc_info()[:2]
          print >> sys.stderr, "Please provide handle id.\nThe input shock node id {} is already registered or could not be registered : {} -- {}".format(snids[0], str(e), str(v))
          exit(3)
         
      try:
        hids.append(hs.persist_handle({ "id" : snids[1] , "type" : "shock" , "url" : args.shock_url}))
      except:
        try:
          hids.append(hs.ids_to_handles([snids[1]])[0]["hid"])
        except:
          traceback.print_exc(file=sys.stderr)
          e,v = sys.exc_info()[:2]
          print >> sys.stderr, "Please provide handle id.\nThe input shock node id {} is already registered or could not be registered : {} -- {}".format(snids[1], str(e), str(v))
          exit(3)
    else:
      hids = args.hid.split(',')
      if len(hids) != 2:
        print >> sys.stderr, "Please provide two handle ids for pairend library"
        exit(5)
    
    hds = hs.hids_to_handles(hids)
    if len(hds) != 2: 
      print >> sys.stderr, 'Could not register a new handle with shock node id {} or wrong input handle id'.format(args.inobj_id)
      exit(2)
    ret = {"handle_1" : hds[0], "handle_2" :  hds[1]}
    if args.ins_mean is not None :
    	ret["insert_size_mean"] = args.ins_mean
    if args.std_dev is not None:
    	ret["insert_size_std_dev"] = args.std_dev
    if args.inl == 'true':
    	ret["interleaved"] = 0
    if args.read_orient == 'true':
    	 ret["read_orientation_outward"] = 0	

    of = open(args.out_fn, "w")
    of.write(to_JSON(ret))
    of.close()
예제 #9
0
    def getPairedEndLibInfo(self):

        if hasattr(self.__class__, 'pairedEndLibInfo'):
            return self.__class__.pairedEndLibInfo

        # try to reuse persist test json file if it exists
        testFile = 'data/testPairedEndLibInfo.json'
        if os.path.exists(testFile):
            logger.info("Reading pairedEndLibInfo from {}".format(testFile))
            with open(testFile) as testInfoFile:
                return json.load(testInfoFile)

        # 1) upload files to shock
        token = self.ctx['token']
        forward_shock_file = self.curl_upload_file_to_shock(
            shock_service_url=self.shockURL,
            filePath='/kb/module/data/small.forward.fq',
            token=token)
        reverse_shock_file = self.curl_upload_file_to_shock(
            shock_service_url=self.shockURL,
            filePath='/kb/module/data/small.reverse.fq',
            token=token)
        #pprint(forward_shock_file)
        #pprint(reverse_shock_file)

        # 2) create handle
        hs = HandleService(url=self.handleURL, token=token)
        forward_handle = hs.persist_handle({
            'id':
            forward_shock_file['id'],
            'type':
            'shock',
            'url':
            self.shockURL,
            'file_name':
            forward_shock_file['file']['name'],
            'remote_md5':
            forward_shock_file['file']['checksum']['md5']
        })

        reverse_handle = hs.persist_handle({
            'id':
            reverse_shock_file['id'],
            'type':
            'shock',
            'url':
            self.shockURL,
            'file_name':
            reverse_shock_file['file']['name'],
            'remote_md5':
            reverse_shock_file['file']['checksum']['md5']
        })

        # 3) save to WS
        paired_end_library = {
            'lib1': {
                'file': {
                    'hid': forward_handle,
                    'file_name': forward_shock_file['file']['name'],
                    'id': forward_shock_file['id'],
                    'url': self.shockURL,
                    'type': 'shock',
                    'remote_md5': forward_shock_file['file']['checksum']['md5']
                },
                'encoding': 'UTF8',
                'type': 'fastq',
                'size': forward_shock_file['file']['size']
            },
            'lib2': {
                'file': {
                    'hid': reverse_handle,
                    'file_name': reverse_shock_file['file']['name'],
                    'id': reverse_shock_file['id'],
                    'url': self.shockURL,
                    'type': 'shock',
                    'remote_md5': reverse_shock_file['file']['checksum']['md5']
                },
                'encoding': 'UTF8',
                'type': 'fastq',
                'size': reverse_shock_file['file']['size']
            },
            'interleaved': 0,
            'sequencing_tech': 'artificial reads'
        }

        new_obj_info = self.ws.save_objects({
            'workspace':
            self.getWsName(),
            'objects': [{
                'type':
                'KBaseFile.PairedEndLibrary',
                'data':
                paired_end_library,
                'name':
                'test.pe.reads',
                'meta': {},
                'provenance': [{
                    'service': 'AssemblyRAST',
                    'method': 'test_kiki'
                }]
            }]
        })
        self.__class__.pairedEndLibInfo = new_obj_info[0]

        logger.info("pairedEndLibInfo='{}'".format(json.dumps(
            new_obj_info[0])))

        return new_obj_info[0]
예제 #10
0
    def getPairedEndInterleavedLibInfo(self, read_lib_basename, lib_i=0):
        if hasattr(self.__class__, 'pairedEndLibInfo_list'):
            try:
                info = self.__class__.pairedEndLibInfo_list[lib_i]
                name = self.__class__.pairedEndLibName_list[lib_i]
                if info != None:
                    if name != read_lib_basename:
                        self.__class__.singleEndLibInfo_list[lib_i] = None
                        self.__class__.singleEndLibName_list[lib_i] = None
                    else:
                        return info
            except:
                pass

        # 1) upload files to shock
        token = self.ctx['token']
        forward_shock_file = self.upload_file_to_shock('data/'+read_lib_basename+'.inter.fq')
        #pprint(forward_shock_file)
        #pprint(reverse_shock_file)

        # 2) create handle
        hs = HandleService(url=self.handleURL, token=token)
        forward_handle = hs.persist_handle({
                                        'id' : forward_shock_file['shock_id'],
                                        'type' : 'shock',
                                        'url' : self.shockURL,
                                        'file_name': forward_shock_file['node_file_name'],
                                        'remote_md5': forward_shock_file['handle']['remote_md5']})

        # 3) save to WS
        paired_end_library = {
            'lib1': {
                'file': {
                    'hid':forward_handle,
                    'file_name': forward_shock_file['node_file_name'],
                    'id': forward_shock_file['shock_id'],
                    'url': self.shockURL,
                    'type':'shock',
                    'remote_md5':forward_shock_file['handle']['remote_md5']
                },
                'encoding':'UTF8',
                'type':'fastq',
                'size':forward_shock_file['size']
            },
            'interleaved':1,
            'sequencing_tech':'artificial reads'
        }

        new_obj_info = self.wsClient.save_objects({
                        'workspace':self.getWsName(),
                        'objects':[
                            {
                                'type':'KBaseFile.PairedEndLibrary',
                                'data':paired_end_library,
                                'name':'test-'+str(lib_i)+'.pe.reads',
                                'meta':{},
                                'provenance':[
                                    {
                                        'service':'kb_ea_utils',
                                        'method':'test_run_ea-utils'
                                    }
                                ]
                            }]
                        })[0]

        # store it
        if not hasattr(self.__class__, 'pairedEndLibInfo_list'):
            self.__class__.pairedEndLibInfo_list = []
            self.__class__.pairedEndLibName_list = []
        for i in range(lib_i+1):
            try:
                assigned = self.__class__.pairedEndLibInfo_list[i]
            except:
                self.__class__.pairedEndLibInfo_list.append(None)
                self.__class__.pairedEndLibName_list.append(None)

        self.__class__.pairedEndLibInfo_list[lib_i] = new_obj_info
        self.__class__.pairedEndLibName_list[lib_i] = read_lib_basename
        return new_obj_info
    def getPairedEndLibInfo(self, read_lib_basename, lib_i=0):
        if hasattr(self.__class__, 'pairedEndLibInfo_list'):
            try:
                info = self.__class__.pairedEndLibInfo_list[lib_i]
                name = self.__class__.pairedEndLibName_list[lib_i]
                if info != None:
                    if name != read_lib_basename:
                        self.__class__.singleEndLibInfo_list[lib_i] = None
                        self.__class__.singleEndLibName_list[lib_i] = None
                    else:
                        return info
            except:
                pass

        # 1) upload files to shock
        token = self.ctx['token']
        forward_shock_file = self.upload_file_to_shock('data/'+read_lib_basename+'.fwd.fq')
        reverse_shock_file = self.upload_file_to_shock('data/'+read_lib_basename+'.rev.fq')
        #pprint(forward_shock_file)
        #pprint(reverse_shock_file)

        # 2) create handle
        hs = HandleService(url=self.handleURL, token=token)
        forward_handle = hs.persist_handle({
                                        'id' : forward_shock_file['id'],
                                        'type' : 'shock',
                                        'url' : self.shockURL,
                                        'file_name': forward_shock_file['file']['name'],
                                        'remote_md5': forward_shock_file['file']['checksum']['md5']})

        reverse_handle = hs.persist_handle({
                                        'id' : reverse_shock_file['id'],
                                        'type' : 'shock',
                                        'url' : self.shockURL,
                                        'file_name': reverse_shock_file['file']['name'],
                                        'remote_md5': reverse_shock_file['file']['checksum']['md5']})

        # 3) save to WS
        paired_end_library = {
            'lib1': {
                'file': {
                    'hid':forward_handle,
                    'file_name': forward_shock_file['file']['name'],
                    'id': forward_shock_file['id'],
                    'url': self.shockURL,
                    'type':'shock',
                    'remote_md5':forward_shock_file['file']['checksum']['md5']
                },
                'encoding':'UTF8',
                'type':'fastq',
                'size':forward_shock_file['file']['size']
            },
            'lib2': {
                'file': {
                    'hid':reverse_handle,
                    'file_name': reverse_shock_file['file']['name'],
                    'id': reverse_shock_file['id'],
                    'url': self.shockURL,
                    'type':'shock',
                    'remote_md5':reverse_shock_file['file']['checksum']['md5']
                },
                'encoding':'UTF8',
                'type':'fastq',
                'size':reverse_shock_file['file']['size']

            },
            'interleaved':0,
            'sequencing_tech':'artificial reads'
        }

        new_obj_info = self.wsClient.save_objects({
                        'workspace':self.getWsName(),
                        'objects':[
                            {
                                'type':'KBaseFile.PairedEndLibrary',
                                'data':paired_end_library,
                                'name':'test-'+str(lib_i)+'.pe.reads',
                                'meta':{},
                                'provenance':[
                                    {
                                        'service':'kb_trimmomatic',
                                        'method':'test_runTrimmomatic'
                                    }
                                ]
                            }]
                        })[0]

        # store it
        if not hasattr(self.__class__, 'pairedEndLibInfo_list'):
            self.__class__.pairedEndLibInfo_list = []
            self.__class__.pairedEndLibName_list = []
        for i in range(lib_i+1):
            try:
                assigned = self.__class__.pairedEndLibInfo_list[i]
            except:
                self.__class__.pairedEndLibInfo_list.append(None)
                self.__class__.pairedEndLibName_list.append(None)

        self.__class__.pairedEndLibInfo_list[lib_i] = new_obj_info
        self.__class__.pairedEndLibName_list[lib_i] = read_lib_basename
        return new_obj_info
예제 #12
0
    def upload_SingleEndLibrary_to_shock_and_ws(
            self,
            ctx,
            console,  # DEBUG
            workspace_name,
            obj_name,
            file_path,
            provenance,
            sequencing_tech):

        self.log(
            console, 'UPLOADING FILE ' + file_path + ' TO ' + workspace_name +
            '/' + obj_name)

        # 1) upload files to shock
        token = ctx['token']
        forward_shock_file = self.upload_file_to_shock(
            console,  # DEBUG
            shock_service_url=self.shockURL,
            filePath=file_path,
            token=token)
        #pprint(forward_shock_file)
        self.log(console, 'SHOCK UPLOAD DONE')

        # 2) create handle
        self.log(console, 'GETTING HANDLE')
        hs = HandleService(url=self.handleURL, token=token)
        forward_handle = hs.persist_handle({
            'id':
            forward_shock_file['id'],
            'type':
            'shock',
            'url':
            self.shockURL,
            'file_name':
            forward_shock_file['file']['name'],
            'remote_md5':
            forward_shock_file['file']['checksum']['md5']
        })

        # 3) save to WS
        self.log(console, 'SAVING TO WORKSPACE')
        single_end_library = {
            'lib': {
                'file': {
                    'hid': forward_handle,
                    'file_name': forward_shock_file['file']['name'],
                    'id': forward_shock_file['id'],
                    'url': self.shockURL,
                    'type': 'shock',
                    'remote_md5': forward_shock_file['file']['checksum']['md5']
                },
                'encoding': 'UTF8',
                'type': 'fasta',
                'size': forward_shock_file['file']['size']
            },
            'sequencing_tech': sequencing_tech
        }
        self.log(console, 'GETTING WORKSPACE SERVICE OBJECT')
        ws = workspaceService(self.workspaceURL, token=ctx['token'])
        self.log(console, 'SAVE OPERATION...')
        new_obj_info = ws.save_objects({
            'workspace':
            workspace_name,
            'objects': [{
                'type': 'KBaseFile.SingleEndLibrary',
                'data': single_end_library,
                'name': obj_name,
                'meta': {},
                'provenance': provenance
            }]
        })[0]
        self.log(console, 'SAVED TO WORKSPACE')

        return new_obj_info[0]
예제 #13
0
forward_shock_file = upload_file_to_shock(
    shock_service_url = 'https://ci.kbase.us/services/shock-api',
    filePath = 'small.forward.fq',
    token = token
    )
reverse_shock_file = upload_file_to_shock(
    shock_service_url = 'https://ci.kbase.us/services/shock-api',
    filePath = 'small.reverse.fq',
    token = token
    )
pprint(forward_shock_file)
pprint(reverse_shock_file)


# 2) create handle
hs = HandleService(url=HANDLE_URL, token=token)
forward_handle = hs.persist_handle({
                                'id' : forward_shock_file['id'], 
                                'type' : 'shock',
                                'url' : SHOCK_URL,
                                'file_name': forward_shock_file['file']['name'],
                                'remote_md5': forward_shock_file['file']['checksum']['md5']})

reverse_handle = hs.persist_handle({
                                'id' : reverse_shock_file['id'], 
                                'type' : 'shock',
                                'url' : SHOCK_URL,
                                'file_name': reverse_shock_file['file']['name'],
                                'remote_md5': reverse_shock_file['file']['checksum']['md5']})
pprint(forward_handle)
pprint(reverse_handle)
예제 #14
0
    def getSingleEndLibInfo(self,name):
 #       if hasattr(self.__class__, 'SingleEndLibInfo'):
 #           if self.__class__.SingeEndLibInfo[name]:
 #               return self.__class__.SingleEndLibInfo[name]

        # 1) upload files to shock
        token = self.ctx['token']
        forward_shock_file = self.upload_file_to_shock(
            shock_service_url = self.shockURL,
            filePath = 'kb_vsearch_test_data/'+name,
            token = token
            )
        #pprint(forward_shock_file)

        # 2) create handle
        hs = HandleService(url=self.handleURL, token=token)
        forward_handle = hs.persist_handle({
                                        'id' : forward_shock_file['id'], 
                                        'type' : 'shock',
                                        'url' : self.shockURL,
                                        'file_name': forward_shock_file['file']['name'],
                                        'remote_md5': forward_shock_file['file']['checksum']['md5']})

        
        # 3) save to WS
        single_end_library = {
            'lib': {
                'file': {
                    'hid':forward_handle,
                    'file_name': forward_shock_file['file']['name'],
                    'id': forward_shock_file['id'],
                    'url': self.shockURL,
                    'type':'shock',
                    'remote_md5':forward_shock_file['file']['checksum']['md5']
                },
                'encoding':'UTF8',
                'type':'fasta',
                'size':forward_shock_file['file']['size']
            },
            'sequencing_tech':'artificial reads'
        }

        new_obj_info = self.ws.save_objects({
                        'workspace':self.getWsName(),
                        'objects':[
                            {
                                'type':'KBaseFile.SingleEndLibrary',
                                'data':single_end_library,
                                'name':name,
                                'meta':{},
                                'provenance':[
                                    {
                                        'service':'kb_vsearch',
                                        'method':'test_kb_vsearch'
                                    }
                                ]
                            }]
                        })
#        if not hasattr(self.__class__, 'SingleEndLibInfo'):
#            self.__class__.SingleEndLibInfo = dict()
#        self.__class__.SingleEndLibInfo[name] = new_obj_info[0]
        return new_obj_info[0]
예제 #15
0
    def getPairedEndLibInfo(self):

        if hasattr(self.__class__, 'pairedEndLibInfo'):
            return self.__class__.pairedEndLibInfo

        # try to reuse persist test json file if it exists
        testFile = 'data/testPairedEndLibInfo.json'
        if os.path.exists(testFile):
            logger.info("Reading pairedEndLibInfo from {}".format(testFile))
            with open(testFile) as testInfoFile:
                return json.load(testInfoFile)

        # 1) upload files to shock
        token = self.ctx['token']
        forward_shock_file = self.curl_upload_file_to_shock(
            shock_service_url = self.shockURL,
            filePath = 'data/small.forward.fq',
            token = token
            )
        reverse_shock_file = self.curl_upload_file_to_shock(
            shock_service_url = self.shockURL,
            filePath = 'data/small.reverse.fq',
            token = token
            )
        #pprint(forward_shock_file)
        #pprint(reverse_shock_file)

        # 2) create handle
        hs = HandleService(url=self.handleURL, token=token)
        forward_handle = hs.persist_handle({
                                        'id' : forward_shock_file['id'],
                                        'type' : 'shock',
                                        'url' : self.shockURL,
                                        'file_name': forward_shock_file['file']['name'],
                                        'remote_md5': forward_shock_file['file']['checksum']['md5']})

        reverse_handle = hs.persist_handle({
                                        'id' : reverse_shock_file['id'],
                                        'type' : 'shock',
                                        'url' : self.shockURL,
                                        'file_name': reverse_shock_file['file']['name'],
                                        'remote_md5': reverse_shock_file['file']['checksum']['md5']})

        # 3) save to WS
        paired_end_library = {
            'lib1': {
                'file': {
                    'hid':forward_handle,
                    'file_name': forward_shock_file['file']['name'],
                    'id': forward_shock_file['id'],
                    'url': self.shockURL,
                    'type':'shock',
                    'remote_md5':forward_shock_file['file']['checksum']['md5']
                },
                'encoding':'UTF8',
                'type':'fastq',
                'size':forward_shock_file['file']['size']
            },
            'lib2': {
                'file': {
                    'hid':reverse_handle,
                    'file_name': reverse_shock_file['file']['name'],
                    'id': reverse_shock_file['id'],
                    'url': self.shockURL,
                    'type':'shock',
                    'remote_md5':reverse_shock_file['file']['checksum']['md5']
                },
                'encoding':'UTF8',
                'type':'fastq',
                'size':reverse_shock_file['file']['size']

            },
            'interleaved':0,
            'sequencing_tech':'artificial reads'
        }

        new_obj_info = self.ws.save_objects({
                        'workspace':self.getWsName(),
                        'objects':[
                            {
                                'type':'KBaseFile.PairedEndLibrary',
                                'data':paired_end_library,
                                'name':'test.pe.reads',
                                'meta':{},
                                'provenance':[
                                    {
                                        'service':'AssemblyRAST',
                                        'method':'test_kiki'
                                    }
                                ]
                            }]
                        })
        self.__class__.pairedEndLibInfo = new_obj_info[0]

        logger.info("pairedEndLibInfo='{}'".format(json.dumps(new_obj_info[0])))

        return new_obj_info[0]
예제 #16
0
    def pick_closed_reference_otus(self, ctx, params):
        # ctx is the context object
        # return variables are: returnVal
        #BEGIN pick_closed_reference_otus

        print('Running QIIME.pick_closed_reference_otus with params=')
        print(pformat(params))

        #### do some basic checks
        objref = ''
        if 'workspace' not in params:
            raise ValueError('workspace parameter is required')
        if 'post_split_lib' not in params:
            raise ValueError('post_split_lib parameter is required')
        if 'otu_table_name' not in params:
            raise ValueError('otu_table_name parameter is required')

        # setup provenance
        provenance = [{}]
        if 'provenance' in ctx:
            provenance = ctx['provenance']
        # add additional info to provenance here, in this case the input data object reference
        provenance[0]['input_ws_objects']=[params['workspace']+'/'+params['post_split_lib']]


        # get the file
        ws = workspaceService(self.workspaceURL, token=ctx['token'])
        try:
            objects = ws.get_objects([{'ref': params['workspace']+'/'+params['post_split_lib']}])
            data = objects[0]['data']
            info = objects[0]['info']
            # Object Info Contents
            # absolute ref = info[6] + '/' + info[0] + '/' + info[4]
            # 0 - obj_id objid
            # 1 - obj_name name
            # 2 - type_string type
            # 3 - timestamp save_date
            # 4 - int version
            # 5 - username saved_by
            # 6 - ws_id wsid
            # 7 - ws_name workspace
            # 8 - string chsum
            # 9 - int size 
            # 10 - usermeta meta
            type_name = info[2].split('.')[1].split('-')[0]
        except Exception as e:
            raise ValueError('Unable to fetch read library object from workspace: ' + str(e))

        input_file_path = os.path.join(self.scratch,data['fasta']['file_name'])

        self.KBaseDataUtil.download_file_from_shock(
                                 shock_service_url = data['fasta']['url'],
                                 shock_id = data['fasta']['id'],
                                 filePath = input_file_path,
                                 token = ctx['token'])


        # create a UUID for output directory and parameter file name; not generally needed in KBase,
        # but useful for local testing in case directories already exist.
        unique_id = str(hex(uuid.getnode()));
        params_file_path = os.path.join(self.scratch,'parameters_'+unique_id+'.txt')

        # If a parameters file is specified, write it out and save in provenance
        if 'parameters_config' in params:
            if params['parameters_config']:
                try:
                    objects = ws.get_objects([{'ref': params['workspace']+'/'+params['parameters_config']}])
                except Exception as e:
                    raise ValueError('Unable to fetch parameters configuration from workspace: ' + str(e))

                p_file = open(params_file_path, 'w')
                p_lines = objects[0]['data']['lines']
                for l in p_lines:
                    p_file.write(l+'\n')
                p_file.close()

                provenance[0]['input_ws_objects'].append(params['workspace']+'/'+params['parameters_config'])


        # Write any additional parameters to the end of the configuration file
        p_file = open(params_file_path, 'a+')
        if 'rev_strand_match' in params:
            if params['rev_strand_match']:
                p_file.write('pick_otus:enable_rev_strand_match True\n');
        p_file.close()

        # be nice and print the parameters file to the log
        f = open(params_file_path, "r")
        print('\nParameters File: ')
        print(f.read())
        print('END Parameters File.\n')
        f.close()

        out_dir = os.path.join(self.scratch,'out_'+unique_id)
        cmd = ['pick_closed_reference_otus.py', '-i', input_file_path, '-o', out_dir, '-p', params_file_path]

        print('running: '+' '.join(cmd))
        p = subprocess.Popen(cmd,
                        cwd = self.scratch,
                        stdout = subprocess.PIPE, 
                        stderr = subprocess.STDOUT, shell = False)


        # capture the log as it is written
        stopLogStream= threading.Event()
        logThread = DisplayLogFileThread(out_dir,stopLogStream)
        logThread.start()

        console_messages = '';
        while True:
            # Read std out/err and print anything we get
            line = p.stdout.readline()
            if not line: break
            console_messages += line
            print(line.replace('\n', ''))

        p.stdout.close()
        p.wait()
        stopLogStream.set()
        print('command return code: ' + str(p.returncode))
        if p.returncode != 0:
            raise ValueError('Error running pick_closed_reference_otus.py, return code: '+str(p.returncode) + ' - ' + console_messages)


        # analyze stats for the output biom
        biom_file = os.path.join(out_dir,'otu_table.biom')
        print('Collecting summary of output OTU Table ('+biom_file+')')
        cmd = ['biom', 'summarize-table', '-i', biom_file]
        print('running: '+' '.join(cmd))
        p = subprocess.Popen(cmd,
                        cwd = self.scratch,
                        stdout = subprocess.PIPE, 
                        stderr = subprocess.STDOUT, shell = False)
        biom_file_summary = '';
        while True:
            # Read std out/err and print anything we get
            line = p.stdout.readline()
            if not line: break
            biom_file_summary += line
            print('SUMMARY: '+line.replace('\n', ''))

        p.stdout.close()
        p.wait()
        print('command return code: ' + str(p.returncode))
        if p.returncode != 0:
            raise ValueError('Error running biom summarize-table, return code: '+str(p.returncode) + ' - ' + biom_file_summary)




        # collect output and save the result
        print('saving BIOM output: ' + biom_file)
        # tree file: tree_file = os.path.join(out_dir,'otu_table.biom')

        # upload files to shock
        shock_file = self.KBaseDataUtil.upload_file_to_shock(
            shock_service_url = self.shockURL,
            filePath = biom_file,
            token = ctx['token']
            )
        # create handle
        hs = HandleService(url=self.handleURL, token=ctx['token'])
        file_handle = hs.persist_handle({
                                        'id' : shock_file['id'], 
                                        'type' : 'shock',
                                        'url' : self.shockURL,
                                        'file_name': shock_file['file']['name'],
                                        'remote_md5': shock_file['file']['checksum']['md5']})
        # save to WS
        otu_tbl = {
            'biom': {
                'hid':file_handle,
                'file_name': shock_file['file']['name'],
                'id': shock_file['id'],
                'url': self.shockURL,
                'remote_md5':shock_file['file']['checksum']['md5'],
                'size':shock_file['file']['size']
            },
            #'n_samples':0,
            #'n_observations':0,
            #'count':0,
            #'density':0,
            #'sample_detail':{},
            'summary':biom_file_summary
        }

        otu_tbl_info = ws.save_objects({
                        'id':info[6],
                        'objects':[
                            {
                                'type':'QIIME.OTUTable',
                                'data':otu_tbl,
                                'name':params['otu_table_name'],
                                'meta':{},
                                'provenance':provenance
                            }]
                        })[0]
        print(pformat(otu_tbl_info))



        # create the report
        report = ''
        report += 'OTU Table saved to: '+otu_tbl_info[7]+'/'+otu_tbl_info[1]+'\n'
        report += 'OTU Table Summary:\n'
        report += biom_file_summary

        reportObj = {
            'objects_created':[{'ref':otu_tbl_info[7]+'/'+otu_tbl_info[1], 'description':'The new OTU Table'}],
            'text_message':report
        }

        reportName = 'QIIME.pick_closed_reference_otus_report_'+str(hex(uuid.getnode()))
        report_obj_info = ws.save_objects({
                'id':info[6],
                'objects':[
                    {
                        'type':'KBaseReport.Report',
                        'data':reportObj,
                        'name':reportName,
                        'meta':{},
                        'hidden':1,
                        'provenance':provenance
                    }
                ]
            })[0]

        # return the result
        returnVal = { 
            'report_name': reportName,
            'report_ref': str(report_obj_info[6]) + '/' + str(report_obj_info[0]) + '/' + str(report_obj_info[4]),
            'otu_table_ref': str(otu_tbl_info[6]) + '/' + str(otu_tbl_info[0]) + '/' + str(otu_tbl_info[4]) }
        
        #END pick_closed_reference_otus

        # At some point might do deeper type checking...
        if not isinstance(returnVal, dict):
            raise ValueError('Method pick_closed_reference_otus return value ' +
                             'returnVal is not type dict as required.')
        # return the results
        return [returnVal]
예제 #17
0
    def getPairedEndLibInfo(self):
        if hasattr(self.__class__, 'pairedEndLibInfo'):
            return self.__class__.pairedEndLibInfo
        # 1) upload files to shock
        token = self.ctx['token']
        forward_shock_file = self.upload_file_to_shock(
            shock_service_url = self.shockURL,
            filePath = 'data/small.forward.fq',
            token = token
            )
        reverse_shock_file = self.upload_file_to_shock(
            shock_service_url = self.shockURL,
            filePath = 'data/small.reverse.fq',
            token = token
            )
        #pprint(forward_shock_file)
        #pprint(reverse_shock_file)

        # 2) create handle
        hs = HandleService(url=self.handleURL, token=token)
        forward_handle = hs.persist_handle({
                                        'id' : forward_shock_file['id'], 
                                        'type' : 'shock',
                                        'url' : self.shockURL,
                                        'file_name': forward_shock_file['file']['name'],
                                        'remote_md5': forward_shock_file['file']['checksum']['md5']})

        reverse_handle = hs.persist_handle({
                                        'id' : reverse_shock_file['id'], 
                                        'type' : 'shock',
                                        'url' : self.shockURL,
                                        'file_name': reverse_shock_file['file']['name'],
                                        'remote_md5': reverse_shock_file['file']['checksum']['md5']})

        # 3) save to WS
        paired_end_library = {
            'lib1': {
                'file': {
                    'hid':forward_handle,
                    'file_name': forward_shock_file['file']['name'],
                    'id': forward_shock_file['id'],
                    'url': self.shockURL,
                    'type':'shock',
                    'remote_md5':forward_shock_file['file']['checksum']['md5']
                },
                'encoding':'UTF8',
                'type':'fastq',
                'size':forward_shock_file['file']['size']
            },
            'lib2': {
                'file': {
                    'hid':reverse_handle,
                    'file_name': reverse_shock_file['file']['name'],
                    'id': reverse_shock_file['id'],
                    'url': self.shockURL,
                    'type':'shock',
                    'remote_md5':reverse_shock_file['file']['checksum']['md5']
                },
                'encoding':'UTF8',
                'type':'fastq',
                'size':reverse_shock_file['file']['size']

            },
            'interleaved':0,
            'sequencing_tech':'artificial reads'
        }

        new_obj_info = self.ws.save_objects({
                        'workspace':self.getWsName(),
                        'objects':[
                            {
                                'type':'KBaseFile.PairedEndLibrary',
                                'data':paired_end_library,
                                'name':'test.pe.reads',
                                'meta':{},
                                'provenance':[
                                    {
                                        'service':'MegaHit',
                                        'method':'test_megahit'
                                    }
                                ]
                            }]
                        })
        self.__class__.pairedEndLibInfo = new_obj_info[0]
        return new_obj_info[0]
예제 #18
0
    def upload_SingleEndLibrary_to_shock_and_ws (self,
                                                 ctx,
                                                 console,  # DEBUG
                                                 workspace_name,
                                                 obj_name,
                                                 file_path,
                                                 provenance,
                                                 sequencing_tech):

        self.log(console,'UPLOADING FILE '+file_path+' TO '+workspace_name+'/'+obj_name)

        # 1) upload files to shock
        token = ctx['token']
        forward_shock_file = self.upload_file_to_shock(
            console,  # DEBUG
            shock_service_url = self.shockURL,
            filePath = file_path,
            token = token
            )
        #pprint(forward_shock_file)
        self.log(console,'SHOCK UPLOAD DONE')

        # 2) create handle
        self.log(console,'GETTING HANDLE')
        hs = HandleService(url=self.handleURL, token=token)
        forward_handle = hs.persist_handle({
                                        'id' : forward_shock_file['id'], 
                                        'type' : 'shock',
                                        'url' : self.shockURL,
                                        'file_name': forward_shock_file['file']['name'],
                                        'remote_md5': forward_shock_file['file']['checksum']['md5']})

        
        # 3) save to WS
        self.log(console,'SAVING TO WORKSPACE')
        single_end_library = {
            'lib': {
                'file': {
                    'hid':forward_handle,
                    'file_name': forward_shock_file['file']['name'],
                    'id': forward_shock_file['id'],
                    'url': self.shockURL,
                    'type':'shock',
                    'remote_md5':forward_shock_file['file']['checksum']['md5']
                },
                'encoding':'UTF8',
                'type':'fasta',
                'size':forward_shock_file['file']['size']
            },
            'sequencing_tech':sequencing_tech
        }
        self.log(console,'GETTING WORKSPACE SERVICE OBJECT')
        ws = workspaceService(self.workspaceURL, token=ctx['token'])
        self.log(console,'SAVE OPERATION...')
        new_obj_info = ws.save_objects({
                        'workspace':workspace_name,
                        'objects':[
                            {
                                'type':'KBaseFile.SingleEndLibrary',
                                'data':single_end_library,
                                'name':obj_name,
                                'meta':{},
                                'provenance':provenance
                            }]
                        })[0]
        self.log(console,'SAVED TO WORKSPACE')

        return new_obj_info[0]
def main(argv):
    parser = argparse.ArgumentParser(
        formatter_class=argparse.RawDescriptionHelpFormatter,
        prog='trnf_Convert_fastq',
        epilog=desc3)
    parser.add_argument('-s',
                        '--shock_url',
                        help='Shock url',
                        action='store',
                        dest='shock_url',
                        default='https://kbase.us/services/shock-api')
    parser.add_argument('-n',
                        '--handle_service_url',
                        help='Handle service url',
                        action='store',
                        dest='hndl_url',
                        default='https://kbase.us/services/handle_service')
    parser.add_argument('-i',
                        '--in_ids',
                        help='Two input Shock node ids (comma separated)',
                        action='store',
                        dest='inobj_id',
                        default=None,
                        required=False)
    parser.add_argument(
        '-f',
        '--file_names',
        help='Two optional handle file names (comma separated)',
        action='store',
        dest='loc_filepath',
        default=None,
        nargs=1,
        required=False)
    parser.add_argument('-d',
                        '--hids',
                        help='Two handle ids (comma separated)',
                        action='store',
                        dest='hid',
                        default=None,
                        required=False)
    parser.add_argument('-m',
                        '--ins_mean',
                        help='Mean insert size',
                        action='store',
                        dest='ins_mean',
                        type=float,
                        default=None)
    parser.add_argument('-k',
                        '--std_dev',
                        help='Standard deviation',
                        action='store',
                        dest='std_dev',
                        type=float,
                        default=None)
    parser.add_argument('-l',
                        '--inl',
                        help='Interleaved  -- true/false',
                        action='store',
                        dest='inl',
                        default=None)
    parser.add_argument('-r',
                        '--r_ori',
                        help='Read Orientation -- true/false',
                        action='store',
                        dest='read_orient',
                        default=None)
    parser.add_argument('-o',
                        '--out_file_name',
                        help='Output file name',
                        action='store',
                        dest='out_fn',
                        default=None,
                        required=True)
    usage = parser.format_usage()
    parser.description = desc1 + ' ' + usage + desc2
    parser.usage = argparse.SUPPRESS
    args = parser.parse_args()

    if args.inobj_id is None and args.hid is None:
        print >> sys.stderr, parser.description
        print >> sys.stderr, "Need to provide either shock node ids or handle ids"
        exit(1)

    kb_token = os.environ.get('KB_AUTH_TOKEN')
    hs = AbstractHandle(url=args.hndl_url, token=kb_token)

    hids = []
    if args.hid is None:
        snids = args.inobj_id.split(',')
        if len(snids) != 2:
            print >> sys.stderr, "Please provide two shock node ids for pairend library"
            exit(4)
        try:
            hids.append(
                hs.persist_handle({
                    "id": snids[0],
                    "type": "shock",
                    "url": args.shock_url
                }))
        except:
            try:
                hids.append(hs.ids_to_handles([snids[0]])[0]["hid"])
            except:
                traceback.print_exc(file=sys.stderr)
                e, v = sys.exc_info()[:2]
                print >> sys.stderr, "Please provide handle id.\nThe input shock node id {} is already registered or could not be registered : {} -- {}".format(
                    snids[0], str(e), str(v))
                exit(3)

        try:
            hids.append(
                hs.persist_handle({
                    "id": snids[1],
                    "type": "shock",
                    "url": args.shock_url
                }))
        except:
            try:
                hids.append(hs.ids_to_handles([snids[1]])[0]["hid"])
            except:
                traceback.print_exc(file=sys.stderr)
                e, v = sys.exc_info()[:2]
                print >> sys.stderr, "Please provide handle id.\nThe input shock node id {} is already registered or could not be registered : {} -- {}".format(
                    snids[1], str(e), str(v))
                exit(3)
    else:
        hids = args.hid.split(',')
        if len(hids) != 2:
            print >> sys.stderr, "Please provide two handle ids for pairend library"
            exit(5)

    hds = hs.hids_to_handles(hids)
    if len(hds) != 2:
        print >> sys.stderr, 'Could not register a new handle with shock node id {} or wrong input handle id'.format(
            args.inobj_id)
        exit(2)
    ret = {"handle_1": hds[0], "handle_2": hds[1]}
    if args.ins_mean is not None:
        ret["insert_size_mean"] = args.ins_mean
    if args.std_dev is not None:
        ret["insert_size_std_dev"] = args.std_dev
    if args.inl == 'true':
        ret["interleaved"] = 0
    if args.read_orient == 'true':
        ret["read_orientation_outward"] = 0

    of = open(args.out_fn, "w")
    of.write(to_JSON(ret))
    of.close()
    def upload(self, ctx, params):
        # ctx is the context object
        # return variables are: output
        #BEGIN upload
        print('Parameters:')
        pprint(params)

        # 0) download file from shock

        ### NOTE: this section is what could be replaced by the transform services
        forward_reads_file_location = os.path.join(self.scratch,'f1.fq')
        forward_reads_file = open(forward_reads_file_location, 'w', 0)
        print('downloading reads file from staging: '+str(forward_reads_file_location))
        headers = {'Authorization': 'OAuth '+ctx['token']}
        r = requests.get(self.shockURL+'/node/'+params['fastqFile1']+'?download', stream=True, headers=headers)
        for chunk in r.iter_content(1024):
            forward_reads_file.write(chunk)
        forward_reads_file.close();
        print('done downloading')


        # 1) upload files to shock
        token = ctx['token']
        forward_shock_file = self.upload_file_to_shock(
            shock_service_url = self.shockURL,
            filePath = forward_reads_file_location,
            token = token
            )
        pprint(forward_shock_file)

        # 2) create handle
        hs = HandleService(url=self.handleURL, token=token)
        forward_handle = hs.persist_handle({
                                        'id' : forward_shock_file['id'], 
                                        'type' : 'shock',
                                        'url' : self.shockURL,
                                        'file_name': forward_shock_file['file']['name'],
                                        'remote_md5': forward_shock_file['file']['checksum']['md5']})

        # 3) save to WS
        paired_end_library = {
            'lib1': {
                'file': {
                    'hid':forward_handle,
                    'file_name': forward_shock_file['file']['name'],
                    'id': forward_shock_file['id'],
                    'url': self.shockURL,
                    'type':'shock',
                    'remote_md5':forward_shock_file['file']['checksum']['md5']
                },
                'encoding':'UTF8',
                'type':'fastq',
                'size':forward_shock_file['file']['size']
            },
            'interleaved':1,
            'sequencing_tech':'artificial reads'
        }


        provenance = [{}]
        if 'provenance' in ctx:
            provenance = ctx['provenance']

        ws = workspaceService(self.workspaceURL, token=ctx['token'])
        new_obj_info = ws.save_objects({
                        'workspace':params['workspace_name'],
                        'objects':[
                            {
                                'type':'KBaseFile.PairedEndLibrary',
                                'data':paired_end_library,
                                'name':params['read_library_name'],
                                'meta':{},
                                'provenance':provenance
                            }]
                        })

        new_obj_info = new_obj_info[0]
        print('saved data to WS:')
        pprint(new_obj_info)


        # create a Report
        report = ''
        report += 'Uploaded read library to: '+params['workspace_name']+'/'+params['read_library_name']+'\n'

        reportObj = {
            'objects_created':[{'ref':params['workspace_name']+'/'+params['read_library_name'], 'description':'Uploaded reads library'}],
            'text_message':report
        }

        reportName = 'pe_uploader_report'+str(hex(uuid.getnode()))
        report_obj_info = ws.save_objects({
                'id':new_obj_info[6],
                'objects':[
                    {
                        'type':'KBaseReport.Report',
                        'data':reportObj,
                        'name':reportName,
                        'meta':{},
                        'hidden':1,
                        'provenance':provenance
                    }
                ]
            })[0]

        output = { 'report_name': reportName, 'report_ref': str(report_obj_info[6]) + '/' + str(report_obj_info[0]) + '/' + str(report_obj_info[4]) }


        print('all done!')
        #END upload

        # At some point might do deeper type checking...
        if not isinstance(output, dict):
            raise ValueError('Method upload return value ' +
                             'output is not type dict as required.')
        # return the results
        return [output]