示例#1
0
    def map_self(self):
        """
        Get's mapping from Mapper.py using Genotype of Herd member

        """

        self.phenotype, self.genotype_int, self.nodes, \
        self.invalid, self.max_depth, self.used_codons = mp.mapper(self)
示例#2
0
def process(options, pstat, OUT):
    ## process (options,pstat,OUT) - function
    # Starts processing as specified in pstat['tbd'] and
    #  according the request list given bey the options
    #
    # Parameters:
    # -----------
    # 1. options (OptionsParser object)
    # 2. pstat   (process status dict)
    #

    # set list of request lsits for single or multi mode:
    mode = None
    procOptions = [
        'community', 'source', 'verb', 'mdprefix', 'mdsubset',
        'target_mdschema'
    ]
    if (options.source):
        mode = 'single'
        mandParams = ['community', 'verb',
                      'mdprefix']  # mandatory processing params
        for param in mandParams:
            if not getattr(options, param):
                logger.critical(
                    "Processing parameter %s is required in single mode" %
                    param)
                sys.exit(-1)
        reqlist = [[
            options.community, options.source, options.verb, options.mdprefix,
            options.mdsubset, options.ckan_check, options.handle_check,
            options.target_mdschema
        ]]
    elif (options.list):
        mode = 'multi'
        logger.debug(' |- Joblist:  \t%s' % options.list)
        reqlist = parse_list_file(options)

    logger.debug(' |- Requestlist:  \t%s' % reqlist)

    ## check job request (processing) options
    logger.debug('|- Command line options')
    for opt in procOptions:
        if hasattr(options, opt):
            logger.debug(' |- %s:\t%s' % (opt.upper(), getattr(options, opt)))

    ## HARVESTING mode:
    if (pstat['status']['h'] == 'tbd'):
        logger.info('\n|- Harvesting started : %s' %
                    time.strftime("%Y-%m-%d %H:%M:%S"))
        HV = Harvester(OUT, pstat, options.outdir, options.fromdate)
        process_harvest(HV, reqlist)

    ## MAPPINING - Mode:
    if (pstat['status']['m'] == 'tbd'):
        logger.info('\n|- Mapping started : %s' %
                    time.strftime("%Y-%m-%d %H:%M:%S"))
        MP = Mapper(OUT, options.outdir, options.fromdate)
        process_map(MP, reqlist)

    ## VALIDATING - Mode:
    if (pstat['status']['v'] == 'tbd'):
        logger.info(' |- Validating started : %s' %
                    time.strftime("%Y-%m-%d %H:%M:%S"))
        MP = Mapper(OUT, options.outdir, options.fromdate)
        process_validate(MP, reqlist)

    ## OAI-CONVERTING - Mode:
    if (pstat['status']['o'] == 'tbd'):
        logger.info('\n|- OAI-Converting started : %s' %
                    time.strftime("%Y-%m-%d %H:%M:%S"))
        MP = Mapper(OUT, options.outdir, options.fromdate)
        process_oaiconvert(MP, reqlist)

    ## UPLOADING - Mode:
    if (pstat['status']['u'] == 'tbd'):
        logger.info('\n|- Uploading started : %s' %
                    time.strftime("%Y-%m-%d %H:%M:%S"))
        # create CKAN object
        CKAN = CKAN_CLIENT(options.iphost, options.auth)
        # create credentials and handle client if required
        if (options.handle_check):
            try:
                cred = PIDClientCredentials.load_from_JSON('credentials_11098')
            except Exception as err:
                logger.critical(
                    "%s : Could not create credentials from credstore %s" %
                    (err, options.handle_check))
                ##p.print_help()
                sys.exit(-1)
            else:
                logger.debug("Create EUDATHandleClient instance")
                HandleClient = EUDATHandleClient.instantiate_with_credentials(
                    cred)
        else:
            cred = None
            HandleClient = None

        UP = Uploader(CKAN, options.ckan_check, HandleClient, cred, OUT,
                      options.outdir, options.fromdate, options.iphost)
        logger.info(' |- Host:  \t%s' % CKAN.ip_host)
        process_upload(UP, reqlist)

    ## DELETING - Mode:
    if (pstat['status']['d'] == 'tbd'):
        # start the process deleting:
        logger.info('\n|- Deleting started : %s' %
                    time.strftime("%Y-%m-%d %H:%M:%S"))

        if mode is 'multi':
            dir = options.outdir + '/delete'
            if os.path.exists(dir):
                process_delete(OUT, dir, options)
            else:
                logger.error(
                    '[ERROR] The directory "%s" does not exist! No files for deleting are found!'
                    % (dir))
        else:
            logger.critical(
                "[CRITICAL] Deleting mode only supported in 'multi mode' and an explicitly deleting script given !"
            )
示例#3
0
def get_pcd_from_numpy(pcd_np):
    pcd = o3d.geometry.PointCloud()
    pcd.points = o3d.utility.Vector3dVector(pcd_np[:, :3])
    return pcd


if __name__ == '__main__':
    # folder = '../../alignment/numpy/'
    folder = '/home/anastasiya/data/data_odometry_velodyne.zip/'
    loader = LoaderKITTI(folder, '00')

    odometry = OdometryEstimator()
    global_transform = np.eye(4)
    pcds = []
    mapper = Mapper()
    for i in range(loader.length()):
        if i >= 50:
            pcd = loader.get_item(i)
            T, sharp_points, flat_points = odometry.append_pcd(pcd)
            mapper.append_undistorted(pcd[0],
                                      T,
                                      sharp_points,
                                      flat_points,
                                      vis=(i % 1 == 0))

    # Visual comparison with point-to-plane ICP
    pcds = []
    global_transform = np.eye(4)
    for i in range(50, 56):
        print(i)
示例#4
0
def process_analysis(input_sentence, intents,  entities, postags):

    mapping_dict = Mapper(df).mappingI()
    # print ("This is the mapping dict:       {}".format(mapping_dict))
    # print (entities)

    rasa_entities = {}
    for ent in entities:
        if ent["entity"] == ent["value"]:
            rasa_entities[ent["entity"]] = ""
        else:
            rasa_entities[ent["entity"]] = ent["value"]


    # print ("Rasa_entities:{}".format(rasa_entities))
    if "time" in rasa_entities.keys():
        period_count = periodic_counts(input_sentence)
        if period_count != 0:
            rasa_entities["period_count"] = period_count

    ######lookup values
    lookup_entities = {}
    for key, value in lookup_dict.items():
        for word in value:
            if word in input_sentence:
                lookup_entities[key] = word.lower()

    ###compare lookups and rasa_entities
    values1 = rasa_entities.values()
    for k, v in lookup_entities.items():
        if v not in values1:
            rasa_entities[k] = v

    rasa_keys = rasa_entities.keys()
    mapper_keys = mapping_dict.keys()
    entities_to_map = list(set(mapper_keys) & set(rasa_keys))

    for entity in entities_to_map:
        new_key = mapping_dict[entity]
        old_key = entity
        if new_key not in rasa_keys:
            rasa_entities[new_key] = rasa_entities.pop(old_key)

    intents_list = intents.split("+")
    rasa_level1 = intents_list[0]
    rasa_level2 = intents_list[1]
    rasa_level3 = intents_list[2]
    rasa_level4 = intents_list[3]
    rasa_level5 = intents_list[4]

    level2_dict = {}
    if rasa_level2 != "nan":  ##to check for level2 intent and mapping a column to it and adding in a dict
        # print(mapping_dict[rasa_level2])
        level2_dict[mapping_dict[rasa_level2]] = rasa_level2
    final_intents = [rasa_level1, level2_dict, rasa_level3, rasa_level4, rasa_level5]  ##final intent list where level2 is dict of column and value

    if rasa_level3 == "top" or rasa_level3 == "bottom" or rasa_level3 == "mid":
        top_count = top_bottom_count(input_sentence)
        if top_count != 0:
            rasa_entities["count"] = top_count


    # print ("Entities b4 interpreter:    {}".format(rasa_entities))
    final_entities = Value_Interpretation().interpreter(rasa_entities, lookup_dict, df, mapping_dict)
    date_extracted = date_checker(input_sentence)
    if date_extracted:
        final_entities[mapping_dict['time']] = date_extracted
    print("INTENTS:    ", final_intents)
    print("ENTITIES:   ", final_entities)

    nlsql = Nl_Sql(cursor, final_entities, final_intents, postags, df_cols, lookup_dict, mapping_dict)
    gen_result = (nlsql.nl2sql())
    print (gen_result)
    print("\n\n")

    return gen_result
示例#5
0
def process(options, pstat, OUT):
    ## process (options,pstat) - function
    # Starts processing as specified in pstat['tbd'] and
    #  according the request list given bey the options
    #
    # Parameters:
    # -----------
    # 1. options (OptionsParser object)
    # 2. pstat   (process status dict)
    #

    # set single or multi mode:
    mode = None
    procOptions = [
        'community', 'source', 'verb', 'mdprefix', 'mdsubset',
        'target_mdschema'
    ]
    if (options.source):
        mode = 'single'
        ##HEW Not used in training
        options.target_mdschema = None
        mandParams = ['community', 'verb',
                      'mdprefix']  # mandatory processing params
        for param in mandParams:
            if not getattr(options, param):
                logger.critical(
                    "Processing parameter %s is required in single mode" %
                    param)
                sys.exit(-1)
        reqlist = [[
            options.community, options.source, options.verb, options.mdprefix,
            options.mdsubset, options.ckan_check, options.handle_check,
            options.target_mdschema
        ]]
    elif (options.list):
        if (pstat['status']['g'] == 'tbd'):
            logger.critical(
                "  Processing parameter [ --source | -s SOURCE ] is required in generation mode"
            )
            sys.exit(-1)
        mode = 'multi'
        logger.debug(' |- Joblist:  \t%s' % options.list)
        ## HEW set options.target_mdschema to NONE for Training

        ## options.target_mdschema=None
        reqlist = parse_list_file(options)

    ## check job request (processing) options
    for opt in procOptions:
        if hasattr(options, opt):
            logger.debug(' |- %s:\t%s' % (opt.upper(), getattr(options, opt)))

    ## GENERATION mode:
    if (pstat['status']['g'] == 'tbd'):
        GEN = Generator(pstat, options.outdir)
        process_generate(GEN, reqlist)

    ## HARVESTING mode:
    if (pstat['status']['h'] == 'tbd'):
        ### print('\n|- Harvesting started : %s' % time.strftime("%Y-%m-%d %H:%M:%S"))
        HV = Harvester(pstat, options.outdir, options.fromdate)
        process_harvest(HV, reqlist)

    ## MAPPINING - Mode:
    if (pstat['status']['m'] == 'tbd'):
        print('\n|- Mapping started : %s' % time.strftime("%Y-%m-%d %H:%M:%S"))
        MP = Mapper(OUT, options.outdir, options.fromdate)
        process_map(MP, reqlist)

    ## VALIDATOR - Mode:
    if (pstat['status']['v'] == 'tbd'):
        print('\n|- Validating started : %s' %
              time.strftime("%Y-%m-%d %H:%M:%S"))
        MP = Mapper(OUT, options.outdir, options.fromdate)
        process_validate(MP, reqlist)

    ## UPLOADING - Mode:
    if (pstat['status']['u'] == 'tbd'):
        # create CKAN object
        CKAN = CKAN_CLIENT(options.iphost, options.auth)

        # create credentials and handle client if required
        if (options.handle_check):
            try:
                cred = PIDClientCredentials.load_from_JSON('credentials_11098')
            except Exception as err:
                logger.critical(
                    "%s : Could not create credentials from credstore %s" %
                    (err, options.handle_check))
                ##p.print_help()
                sys.exit(-1)
            else:
                logger.debug("Create EUDATHandleClient instance")
                HandleClient = EUDATHandleClient.instantiate_with_credentials(
                    cred)
        else:
            cred = None
            HandleClient = None

        UP = Uploader(CKAN, options.ckan_check, HandleClient, cred, OUT,
                      options.outdir, options.fromdate, options.iphost,
                      options.ckan_organization)
        logger.info(' |- Host:  \t%s' % CKAN.ip_host)
        process_upload(UP, reqlist)