コード例 #1
0
ファイル: computorv1.py プロジェクト: htkachuk/computorv1
def main():
    try:
        parse_input(sys.argv[1])
    except:
        print(
            "usage:\npython3 computorv1.py \"5 * X^0 + 4 * X^1 - 9.3 * X^2 = 1 * X^0\""
        )
        return
    try:
        polinom, err = parse_input(sys.argv[1])
    except:
        print("Error")
    if (err != None):
        print(err)
        exit()
    degree = polinom_degree(polinom)
    reduced_form, err = make_reduced(polinom, degree)
    if (err != None):
        print(err)
        exit()
    print("Reduced form: " + reduced_string(deepcopy(reduced_form), degree))
    print("Polynomial degree: " + str(degree))
    if degree > 2:
        print(
            "The polynomial degree is stricly greater than 2, I can't solve.")
        exit()
    solve_polinom(degree, reduced_form)
コード例 #2
0
ファイル: test_parser.py プロジェクト: Tedworthy/ATLAST
 def test_symboltable_generation(self):
   f = open('test/input_file.txt', 'r')
   logic = f.read().decode('utf-8')
   symbolTable = st.SymTable()
   logicAST = p.parse_input(logic)
   logicAST.generateSymbolTable(symbolTable)
   assert_equals(str(symbolTable), 'q,x,p')
コード例 #3
0
ファイル: main.py プロジェクト: 97varun/codac
def get_uniq_sems(transcript):
    parses = parse_input(grammar, transcript)
    if len(parses) > 0:
        score_parses(parses)
    else:
        return {
            'input': transcript,
            'output': 'ParseError',
            'error': 'Could not generate parse of the input',
            'id': 'ParseError'
        }
    parses.sort(key=lambda x: (x.semantics['score']), reverse=True)

    uniq_sems = []
    seen_sems = set()
    for parse in parses:
        sem_wo_score = deepcopy(parse.semantics)
        del sem_wo_score['score']
        sem_wo_score = tuple(sorted(sem_wo_score.items()))
        if sem_wo_score not in seen_sems:
            seen_sems.add(sem_wo_score)
            uniq_sems.append(parse.semantics)

    # post process semantics
    for i in range(len(uniq_sems)):
        if 'request' in uniq_sems[i] and\
           uniq_sems[i]['request'] == 'declare' and\
           'construct' not in uniq_sems[i]:
            uniq_sems[i]['construct'] = 'variable'

    return uniq_sems[:5]
コード例 #4
0
def overtriggering_experiment():
    domain = TravelDomain()
    grammar = domain.grammar()
    import re
    line_pattern = re.compile(r'^ *[0-9]+ (.*)$')
    queries = []
    query_file = '/Users/wcmac/Desktop/aol-data/AOL-user-ct-collection/all-queries-counted.txt'
    f = open(query_file, 'rU')
    for line in f.readlines():
        match = line_pattern.match(line)
        if not match:
            raise StandardError, 'unexpected line: %s' % line
        query = match.group(1)
        parses = parse_input(grammar, query)
        parses = filter(lambda parse: domain.is_travel_parse(parse), parses)
        if len(parses) > 0:
            print
            print query
            for parse in parses:
                print parse.semantics
        queries.append(' '.join(query))
        if len(queries) % 1000 == 0:
            print
            print '-' * 80
            print 'Processed %d queries' % len(queries)
    f.close()
コード例 #5
0
def filter_queries_containing_locations(start=0, size=10):
    import re
    line_pattern = re.compile(r'^ *[0-9]+ (.*)$')
    queries = []
    query_file = '/Users/wcmac/Desktop/aol-data/AOL-user-ct-collection/20150220/possible-travel-queries.txt'
    f = open(query_file, 'rU')
    for line in f.readlines():
        match = line_pattern.match(line)
        if not match:
            raise StandardError, 'unexpected line: %s' % line
        query = match.group(1)
        queries.append(query)
    f.close()
    print 'Read %d queries' % len(queries)
    selected_queries = queries[start:(start+size)]
    print 'Selected %d queries' % len(selected_queries)

    domain = ContainsLocationDomain()
    grammar = domain.grammar()
    for query in selected_queries:
        print
        print 'Trying to parse', query
        parses = parse_input(grammar, query)
        # parses = filter(lambda parse: domain.is_travel_parse(parse), parses)
        if len(parses) > 0:
            print 'got %d parses' % len(parses)
            for parse in parses:
                print parse.semantics
        else:
            print 'no parse'
コード例 #6
0
  def raises_errors(self, logic, expected_errors):
    print 'Logic Recieved: ' + logic
    # Create a Logic Tree from the Logic
    logicTree = p.parse_input(logic)

    # Run dat semantic analysis bro
    dbSchema = schema.Schema()
    semanticAnalyser = sa.SemanticAnalyser(logicTree, dbSchema)

    errors = []
    try:
      semanticAnalyser.analyse()
    except Exception, e:
      errors = e.getDict() # Actually returns list??
コード例 #7
0
ファイル: test_codegen.py プロジェクト: Tedworthy/ATLAST
  def translates_to(self, logic, expectedSQL):
    print 'Logic Recieved: ' + logic
    # Create a Logic Tree from the Logic
    logicTree = p.parse_input(logic)
    print "|*** LOGIC AST ***|\n"
    print str(logicTree)
    # Run dat semantic analysis bro
    dbSchema = schema.Schema()
    semanticAnalyser = sa.SemanticAnalyser(logicTree, dbSchema)
    semanticAnalyser.analyse()

    # Generate the Symbol Table from the Logic Tree
    symbolTable = st.SymTable()
    logicTree.generateSymbolTable(symbolTable)

    # Generate an IR from the Logic Tree (uses Symbol Table)
    irGenerator = irg.IRGenerator(dbSchema)
    logicTree.accept(irGenerator)

    # Pull out the SQL IR
    ir = irGenerator.getIR()

    # Translate the IR to an SQL string
    sqlGenerator = sg.SQLGenerator()
    ir.accept(sqlGenerator)
    translatedSQL = sqlGenerator.getSQL()

    # If the query result does not match the expectation, let the user know.
    if translatedSQL.replace('\n', ' ') != expectedSQL.replace('\n', ' '):
      print "WARNING: Translated SQL does not match the expected result"
      print "Translated SQL: {"
      print translatedSQL
      print "}"
      print "Expected SQL: {"
      print expectedSQL
      print "}"

    # Run translated and expected SQL queries and compare results.
    # Force decode to ASCII as unicode SQL throws a massive wobbly.
    configData = cp.parse_file('dbbackend/db.cfg')
    con = pg.connect(configData)
    translatedResult = pg.query(con, translatedSQL.decode('ascii', 'ignore'))
    expectedResult = pg.query(con, expectedSQL)
    con.close()
    result = translatedResult == expectedResult
    if not result:
      print translatedResult, " != ", expectedResult

    return result
コード例 #8
0
 def test_right_parse_2(self):
     data = '2 0 | 2 0'
     exp = str([[2, 0], [2, 0]])
     res = str(parse_input(data))
     self.assertEqual(exp, res)
コード例 #9
0
 def test_right_parse_1(self):
     data = '2 0 0 0 0 | 2 2 0 0 0 | 0 0 0 0 9 | 4 0 0 4 0 | 0 0 2 0 0'
     exp = str([[2, 0, 0, 0, 0], [2, 2, 0, 0, 0], [0, 0, 0, 0, 9], [4, 0, 0, 4, 0], [0, 0, 2, 0, 0]])
     res = str(parse_input(data))
     self.assertEqual(exp, res)
コード例 #10
0
ファイル: manual.py プロジェクト: Tedworthy/ATLAST
  print "Usage:\n  python {0} <logic_input_file_name>".format(sys.argv[0])
  sys.exit(1)
elif not os.path.exists(sys.argv[1]):
  print "ERROR: Specified file '{0}' does not exist".format(sys.argv[1])
  sys.exit(2)

# Open and display the contents of the input file
f = open(sys.argv[1], 'r')
print_prefix("Logic input:")
f_contents = f.read().decode('utf8')
for line in f_contents.splitlines():
  print_prefix(line)

# Parse the input file
print_prefix("Parsing...")
result = parsing.parse_input(f_contents)

# Print out the generated AST
print_prefix("AST generated:")
print_prefix(result)

# Set up a symbol table and code generation visitor
symbolTable = SymTable()
codegenVisitor = IRGenerator(schema.Schema())
sqlGeneratorVisitor = SQLGenerator()

# Generate the symbol table
print_prefix("Generating symbol table...")
result.generateSymbolTable(symbolTable)

# Show the generated symbol table
コード例 #11
0
def parsing():
    """Метод, осуществляющий парсинг и выводящий результат"""
    parser = argparse.ArgumentParser(description='Shikaku solver')
    parser.add_argument('-mt',
                        '--matrix',
                        type=str,
                        metavar='',
                        help='Matrix with a field for the game')
    parser.add_argument('-doc',
                        '--document',
                        type=str,
                        metavar='',
                        help='Document with a field for the game')
    parser.add_argument('-save',
                        '--save',
                        type=str,
                        metavar='',
                        help='The path of the file in which to save result '
                        'field')
    parser.add_argument('-gui',
                        '--interface',
                        type=str,
                        metavar='',
                        help='Using the GUI (true or None)')
    parser.add_argument('-len',
                        '--length',
                        type=str,
                        metavar='',
                        help='Length of field')
    parser.add_argument('-wd',
                        '--width',
                        type=str,
                        metavar='',
                        help='Width of field')
    args = parser.parse_args()
    matrix = args.matrix
    doc = args.document
    save = args.save
    gui = args.interface
    ln = args.length
    wd = args.width
    result = None
    if gui is not None and ln is not None and wd is not None:
        return get_gui(int(ln), int(wd))
    elif gui is not None and ln is None and wd is None:
        length = int(input('Введите длину игрового поля: '))
        width = int(input('Введите ширину игрового поля: '))
        return get_gui(length, width)
    else:
        if matrix is None and doc is None:
            mt = input('Введите игровое поле: ')
            mt = mt[1:-1]
            mt = parse_input(mt)
            length = len(mt)
            width = len(mt[0])
            if length != width:
                new_mat, key = create_square_field(length, width, mt)
                new_mat = np.array(new_mat)
                result = return_result_matrix(new_mat)
                result = process_res_matrix(result)
                result = back_to_orig(result, key)
            else:
                result = return_result_matrix(np.array(mt))
        elif doc is not None and matrix is None:
            result = get_contains(doc)

        elif doc is None and matrix is not None:
            mt = parse_input(matrix)
            length = len(mt)
            width = len(mt[0])
            if length != width:
                new_mat, key = create_square_field(length, width, mt)
                new_mat = np.array(new_mat)
                result = return_result_matrix(new_mat)
                result = process_res_matrix(result)
                result = back_to_orig(result, key)
            else:
                result = return_result_matrix(np.array(mt))

        if len(re.findall(r'\w+', str(result))) == 0:
            result = 'Решений нет'

        if save is None:
            print(result)
        else:
            with open(save, 'w') as f:
                f.write(result)