Пример #1
0
def insert_into(cursor, table, payload):
    cols = payload['table']['cols']
    values = ', '.join('?' for col in cols)
    query = 'INSERT INTO "{table}" VALUES ({values})'.format(table=table,
                                                             values=values)
    rows = convert_rows(cols, payload['table']['rows'])
    logger.info(query)
    cursor.executemany(query, rows)
Пример #2
0
def execute(query, headers=0, credentials=None):
    try:
        parsed_query = parse_sql(query)
    except pyparsing.ParseException as e:
        raise ProgrammingError(format_moz_error(query, e))

    # fetch aliases, since they will be removed by the translator
    original_aliases = extract_column_aliases(parsed_query)

    # extract URL from the `FROM` clause
    from_ = extract_url(query)
    baseurl = get_url(from_, headers)

    # verify that URL is actually a Google spreadsheet
    parsed = parse.urlparse(baseurl)
    if not parsed.netloc == 'docs.google.com':
        raise InterfaceError('Invalid URL, must be a docs.google.com URL!')

    # map between labels and ids, eg, `{ 'country': 'A' }`
    column_map = get_column_map(baseurl, credentials)

    # preprocess
    used_processors = []
    for cls in processors:
        if cls.match(parsed_query):
            processor = cls()
            parsed_query = processor.pre_process(parsed_query, column_map)
            used_processors.append(processor)
    processed_aliases = extract_column_aliases(parsed_query)

    # translate colum names to ids and remove aliases
    translated_query = translate(parsed_query, column_map)
    logger.info('Original query: {}'.format(query))
    logger.info('Translated query: {}'.format(translated_query))

    # run query
    payload = run_query(baseurl, translated_query, credentials)
    if payload['status'] == 'error':
        raise ProgrammingError(
            format_gsheet_error(query, translated_query, payload['errors']))

    # postprocess
    for processor in used_processors:
        payload = processor.post_process(payload, processed_aliases)

    # add aliases back
    cols = payload['table']['cols']
    for alias, col in zip(original_aliases, cols):
        if alias is not None:
            col['label'] = alias

    description = get_description_from_payload(payload)

    # convert rows to proper type (datetime, eg)
    rows = payload['table']['rows']
    results = convert_rows(cols, rows)

    return results, description