Beispiel #1
0
def negative_test():
    """
    Read from invalid_queries.txt line by line and test against the parser.
    The parser should reject all the queries.
    """
    dirname = os.path.dirname(os.path.realpath(__file__))
    filename = os.path.join(dirname, "invalid_queries.txt")
    with open(filename) as f:
        parser = Parser()
        success = 0
        times = 0
        for line in f.readlines():
            times += 1
            try:
                parse_tree = parser.parse(line.strip())
                print("Failure: " + line.strip())
                print(repr(parse_tree))
            except ParserError:
                print("Success, length: " + str(len(line.strip())))
                success += 1
        print("End of test, " + str(success) + "/" + str(times) + " passed")
def negative_test():
    """
    Read from invalid_queries.txt line by line and test against the parser.
    The parser should reject all the queries.
    """
    dirname = os.path.dirname(os.path.realpath(__file__))
    filename = os.path.join(dirname, "invalid_queries.txt")
    with open(filename) as f:
        parser = Parser()
        success = 0
        times = 0
        for line in f.readlines():
            times += 1
            try:
                parse_tree = parser.parse(line.strip())
                print("Failure: " + line.strip())
                print(repr(parse_tree))
            except ParserError:
                print("Success, length: " + str(len(line.strip())))
                success += 1
        print("End of test, " + str(success) + "/" + str(times) + " passed")
Beispiel #3
0
def random_test():
    """
    Randomly generate positive cases and test against the parser.
    The parser should accept all the generated cases.
    """
    times = int(sys.argv[2])
    print("Randomly test " + str(times) + " times")

    gen = RandomQueryGenerator(10)
    parser = Parser()
    success = 0

    for i in range(times):
        max_depth = random.randint(1, 5)
        random_query = gen.query(max_depth)
        try:
            parse_tree = parser.parse(random_query)
            print("Success, length: " + str(len(random_query)))
            success += 1
        except ParserError:
            print("Failure: " + random_query)

    print("End of test, " + str(success) + "/" + str(times) + " passed")
def random_test():
    """
    Randomly generate positive cases and test against the parser.
    The parser should accept all the generated cases.
    """
    times = int(sys.argv[2])
    print("Randomly test " + str(times) + " times")

    gen = RandomQueryGenerator(10)
    parser = Parser()
    success = 0

    for i in range(times):
        max_depth = random.randint(1, 5)
        random_query = gen.query(max_depth)
        try:
            parse_tree = parser.parse(random_query)
            print("Success, length: " + str(len(random_query)))
            success += 1
        except ParserError:
            print("Failure: " + random_query)

    print("End of test, " + str(success) + "/" + str(times) + " passed")
Beispiel #5
0
def query_data(file_types,
               query_logical_expressions,
               output_file_names,
               cache_timestamp=None):
    """
          This function is used to query data.
          @param file_types the file types,it is supposed to be a str array,
          each item in the array should be a non-None/empty str. Required.
          @param query_logical_expressions the query logical expressions,
          it is supposed to be a str array,each item in the array should be
          a non-None/empty str. Required.
          @param output_file_names the output file names (including full paths),
          this function will assume the files exist,
          it is supposed to be a non-None/empty dictionary,
          key is file type(non-None/empty str,must be one of
          the strings in file_types),value is the output file
          names (non-None/empty str, including full path). Required.
          @param cache_timestamp the cache timestamp,
          it is supposed to be None or a non-None datetime. Optional, default to None.
          @return True if use cache, False otherwise
          @throws TypeError throws if any argument isn't of right type
          @throws ValueError throws if any argument isn't valid (refer to the argument documentation)
          @throws DataQueryError: if any other error occurred during the operation
       """
    signature = 'hfppnetwork.partner.httpservices.dataappliance.query_data'
    method_enter(
        signature, {
            "file_types": file_types,
            "query_logical_expressions": query_logical_expressions,
            "output_file_names": output_file_names,
            "cache_timestamp": cache_timestamp
        })

    # Parameters checking
    acceptableTypes = [
        'beneficiary', 'carrier', 'inpatient', 'outpatient', 'prescription'
    ]
    check_str_list('file_types', file_types)
    for one_type in file_types:
        if not one_type in acceptableTypes:
            raise ValueError('File type ' + one_type + ' not acceptable.')
    check_str_list('query_logical_expressions', query_logical_expressions)
    if not len(query_logical_expressions) == len(file_types):
        raise ValueError(
            'query_logical_expressions and file_types length not match.')
    check_dict('output_file_names', output_file_names)
    if not len(output_file_names) == len(file_types):
        raise ValueError('output_file_names and file_types length not match.')
    if cache_timestamp is not None:
        check_datetime('cache_timestamp', cache_timestamp)

    # Check if cache needs to be updated
    try:
        use_cache = check_use_cache(file_types, cache_timestamp)
    except:
        raise DataQueryError('Error occurs during checking cache data.')
    # Loading data from database if not use cache
    if not use_cache:
        try:
            type_index = 0
            for one_type in file_types:
                # Parse and generate query string
                parser = Parser()
                generator = SQLGenerator()
                parse_tree = parser.parse(
                    query_logical_expressions[type_index].strip())
                sql = generator.visit(parse_tree)
                logging.debug("sql %s ", sql)
                # Query data
                # Instantiate relevant classes
                if config.dbconfig["type"] == 'redis':
                    claimPersistence = RedisClaimPersistence()
                elif config.dbconfig["type"] == "mysql":
                    claimPersistence = MySQLClaimPersistence()
                else:
                    raise ValueError("Invalid db type: " +
                                     config.dbconfig["type"])
                claimPersistence.connectionConfig = config.dbconfig
                claimFile = CSVClaimFile()
                processor = ClaimFileProcessor()
                processor.claimPersistence = claimPersistence
                processor.claimFile = claimFile
                processor.queryData(one_type, sql if
                                    (sql and len(sql) > 0) else '1=1', 0, 0,
                                    output_file_names[one_type])
                # Update loop index
                type_index = type_index + 1
        except ParserError:
            raise DataQueryError('Error occurs during parsing query string.')
        except:
            raise DataQueryError('Error occurs during querying data.')

    method_exit(signature, use_cache)
    return use_cache
def query_data(file_types,query_logical_expressions,
                                output_file_names, cache_timestamp=None):
       """
          This function is used to query data.
          @param file_types the file types,it is supposed to be a str array,
          each item in the array should be a non-None/empty str. Required.
          @param query_logical_expressions the query logical expressions,
          it is supposed to be a str array,each item in the array should be
          a non-None/empty str. Required.
          @param output_file_names the output file names (including full paths),
          this function will assume the files exist,
          it is supposed to be a non-None/empty dictionary,
          key is file type(non-None/empty str,must be one of
          the strings in file_types),value is the output file
          names (non-None/empty str, including full path). Required.
          @param cache_timestamp the cache timestamp,
          it is supposed to be None or a non-None datetime. Optional, default to None.
          @return True if use cache, False otherwise
          @throws TypeError throws if any argument isn't of right type
          @throws ValueError throws if any argument isn't valid (refer to the argument documentation)
          @throws DataQueryError: if any other error occurred during the operation
       """
       signature = 'hfppnetwork.partner.httpservices.dataappliance.query_data'
       method_enter(signature,{
        "file_types":file_types,
        "query_logical_expressions":query_logical_expressions,
        "output_file_names":output_file_names,
        "cache_timestamp":cache_timestamp
       })

       # Parameters checking
       acceptableTypes = ['beneficiary', 'carrier', 'inpatient', 'outpatient', 'prescription']
       check_str_list('file_types', file_types)
       for one_type in file_types:
        if not one_type in acceptableTypes:
            raise ValueError('File type ' + one_type + ' not acceptable.')
       check_str_list('query_logical_expressions', query_logical_expressions)
       if not len(query_logical_expressions) == len(file_types):
        raise ValueError('query_logical_expressions and file_types length not match.')
       check_dict('output_file_names', output_file_names)
       if not len(output_file_names) == len(file_types):
        raise ValueError('output_file_names and file_types length not match.')
       if cache_timestamp is not None:
        check_datetime('cache_timestamp', cache_timestamp)

       # Check if cache needs to be updated
       try:
        use_cache = check_use_cache(file_types, cache_timestamp)
       except:
        raise DataQueryError('Error occurs during checking cache data.')
       # Loading data from database if not use cache
       if not use_cache:
        try:
            type_index = 0
            for one_type in file_types:
                # Parse and generate query string
                parser = Parser()
                generator = SQLGenerator()
                parse_tree = parser.parse(query_logical_expressions[type_index].strip())
                sql = generator.visit(parse_tree)
                logging.debug("sql %s ",sql)
                # Query data
                 # Instantiate relevant classes
                if config.dbconfig["type"]=='redis':
                    claimPersistence = RedisClaimPersistence()
                elif config.dbconfig["type"] == "mysql":
                    claimPersistence = MySQLClaimPersistence()
                else:
                    raise ValueError("Invalid db type: " + config.dbconfig["type"])
                claimPersistence.connectionConfig = config.dbconfig
                claimFile = CSVClaimFile()
                processor = ClaimFileProcessor()
                processor.claimPersistence = claimPersistence
                processor.claimFile = claimFile
                processor.queryData(one_type,sql if (sql and len(sql) > 0) else '1=1',
                                                        0, 0,
                                                        output_file_names[one_type])
                # Update loop index
                type_index = type_index + 1
        except ParserError:
            raise DataQueryError('Error occurs during parsing query string.')
        except:
            raise DataQueryError('Error occurs during querying data.')

       method_exit(signature,use_cache)
       return use_cache