def main():
    args = parse_argument()
    byte_word = args.word.encode('utf-8')
    log.info('検索語: %s' % args.word)
    log.info('検索語のバイト長: %d' % len(byte_word))

    if args.measure:
        log.setLevel(logging.ERROR)

    with args.data() as blob:
        results = brute_force_search(blob, byte_word)

    for result in results:
        log.debug(result.decode('utf-8').strip())
    log.info('検索結果: %d 件' % len(results))
def main():
    args = parse_argument()
    byte_word = args.word.encode('utf-8')
    log.info('検索語: %s' % args.word)
    log.info('検索語のバイト長: %d' % len(byte_word))

    if args.measure:
        log.setLevel(logging.ERROR)

    with args.data() as blob:
        table = make_qs_table(byte_word)
        results = boyer_moore_sunday_search(blob, byte_word, table)

    for result in results:
        log.debug(result.decode('utf-8').strip())
    log.info('検索結果: %d 件' % len(results))
    log.info('ループ回数: %d' % cnt)
    return results


def main():
    args = parse_argument()
    byte_word = args.word.encode('utf-8')
    log.info('検索語: %s' % args.word)
    log.info('検索語のバイト長: %d' % len(byte_word))

    if args.measure:
        log.setLevel(logging.ERROR)

    with args.data() as blob:
        results = brute_force_search(blob, byte_word)

    for result in results:
        log.debug(result.decode('utf-8').strip())
    log.info('検索結果: %d 件' % len(results))


if __name__ == '__main__':
    args = parse_argument()
    if args.measure:
        setup = 'from __main__ import main'
        sec = timeit.repeat('main()', setup=setup, repeat=3, number=5)
        log.setLevel(logging.INFO)
        log.info('平均実行時間: %f sec' % statistics.mean(sec))
    else:
        main()
Beispiel #4
0
    return results


def main():
    args = parse_argument()
    byte_word = args.word.encode('utf-8')
    log.info('検索語: %s' % args.word)
    log.info('検索語のバイト長: %d' % len(byte_word))

    if args.measure:
        log.setLevel(logging.ERROR)

    with args.data() as blob:
        table = make_qs_table(byte_word)
        results = boyer_moore_sunday_search(blob, byte_word, table)

    for result in results:
        log.debug(result.decode('utf-8').strip())
    log.info('検索結果: %d 件' % len(results))


if __name__ == '__main__':
    args = parse_argument()
    if args.measure:
        setup = 'from __main__ import main'
        sec = timeit.repeat('main()', setup=setup, repeat=3, number=5)
        log.setLevel(logging.INFO)
        log.info('平均実行時間: %f sec' % statistics.mean(sec))
    else:
        main()
# function to put the refresh query logic
def put_query_refresh():
    pass


def get_csv_dump(query_id):
    with requests.Session() as s:
        CSV_URL = redash_config['query_url'] + query_id + "/results.csv"
        download = s.get(CSV_URL,
                         params={'api_key': redash_config['user_api_key']})
        temp_file_name = query_id + '_results.csv'
        with open(temp_file_name, 'w') as temp_file:
            temp_file.writelines(download.content)
        return temp_file_name


def send_email_alert(query_details, query_result, recepient_emails, query_id,
                     file_name):
    message = get_html_table(query_result, query_id)
    send_email(recepient_emails, query_details['name'], message, file_name)


options = parse_argument()
query_details = get_query_details(options.query_id)
query_result = get_query_results(options.query_id)
temp_file_name = None
if options.send_dump == 'Y':
    temp_file_name = get_csv_dump(options.query_id)
send_email_alert(query_details, query_result, options.recepient_emails,
                 options.query_id, temp_file_name)