Exemple #1
0
def main():
    parser = argparse.ArgumentParser(description='ETL(es to odbc)')
    parser.add_argument('--conf', '--conf-dir', required=True)
    parser.add_argument('--profile', required=True)
    parser.add_argument('--body', required=True)
    parser.add_argument('--optimize', action='store_true', default=False)

    from settings import configure_logging
    configure_logging()

    config = Configuration(vars(parser.parse_args()))
    extractor = etl_es.ElasticsearchDataExtractor(config)
    transformer = etl.SimpleDataTransformer(config)
    loader = etl_odbc.ODBCDataLoader(config)
    etl.etl(config, extractor, transformer, loader)
Exemple #2
0
def main():
    parser = argparse.ArgumentParser(description='ETL(mongo to es)')
    parser.add_argument('--conf', '--conf-dir', required=True)
    parser.add_argument('--query', required=True)
    parser.add_argument('--profile', required=False)
    parser.add_argument('--optimize', action='store_true', default=False)
    parser.add_argument('--index-template-name', required=False)
    parser.add_argument('--index-settings', required=False)
    parser.add_argument('--settings', required=False, default='{}')
        
    from settings import configure_logging
    configure_logging()

    config = Configuration(vars(parser.parse_args()))
    extractor = etl_mongo.MongoDataExtractor(config)
    transformer = etl.SimpleDataTransformer(config)
    loader = etl_es.ElasticsearchDataLoader(config)
    etl.etl(config, extractor, transformer, loader)
Exemple #3
0
def main():
    parser = argparse.ArgumentParser(description='ETL(mongo to mongo)')
    parser.add_argument('--conf', '--conf-dir', required=True)
    parser.add_argument('--query', required=True)
    parser.add_argument('--profile', required=False)
    parser.add_argument('--optimize', action='store_true', default=False)
    parser.add_argument('--update', action='store_true', default=False)
    parser.add_argument('--settings', required=False, default='{}')

    from settings import configure_logging
    configure_logging()

    args = parser.parse_args()
    config = Configuration(vars(args))
    extractor = etl_mongo.MongoDataExtractor(config)
    if args.update:
        transformer = etl_mongo.MongoUpdateDataTransformer(config)
        loader = etl_mongo.MongoUpdateDataLoader(config)
    else:
        transformer = etl.SimpleDataTransformer(config)
        loader = etl_mongo.MongoDataLoader(config)

    etl.etl(config, extractor, transformer, loader)
import logging
import os
import random

from sqlalchemy.sql import text

import db
import settings
from db.models import SQLAlchemyBase, User, GenereEnum, UserToken, RolEnum, PositionEnum, SmashEnum, TournamentTypeEnum, \
    TournamentPrivacyTypeEnum, Facility, TournamentGenereEnum, AgeCategoriesTypeEnum, Category, Tournament, Couple, \
    Round, Match
from settings import DEFAULT_LANGUAGE

# LOGGING
mylogger = logging.getLogger(__name__)
settings.configure_logging()


def execute_sql_file(sql_file):
    sql_folder_path = os.path.join(os.path.dirname(__file__), "sql")
    sql_file_path = open(os.path.join(sql_folder_path, sql_file),
                         encoding="utf-8")
    sql_command = text(sql_file_path.read())
    db_session.execute(sql_command)
    db_session.commit()
    sql_file_path.close()


if __name__ == "__main__":
    settings.configure_logging()
Exemple #5
0
import json
import logging
from test import RandomMock, TimeMock

from db import get_db
from settings import configure_logging

from .exceptions import FixtureError

configure_logging()


class DbFixture:
    def __init__(self, app):
        self.app = app
        self.client = app.test_client()

    def __enter__(self):
        self.commit()

    def __exit__(self, exc_type, exc_val, exc_tb):
        self.rollback()

    def check_reponse(self, r):
        if r.status_code != 200:
            logging.error('Response: %s', r.data)
            raise FixtureError()

    def commit(self):
        get_db().create_all(app=self.app)
        r = self.client.post('/api/v1/tag/',
Exemple #6
0
__author__ = 'caninemwenja'

from twisted.protocols.basic import LineReceiver
from twisted.internet.protocol import Factory
from twisted.internet import reactor

from siafu import Siafu, SiafuError, SiafuSyntaxError

import logging

import settings

settings.configure_logging()


class SiafuProtocol(LineReceiver):

    def __init__(self, addr):
        self.addr = addr
        self.siafu = Siafu(settings.DATABASE)

    def connectionMade(self):
        logging.info("new connection: {0}".format(self.addr))
        self.sendLine("Connected, Send it")

    def connectionLost(self, reason):
        logging.info("connection lost: {0}".format(self.addr))

    def lineReceived(self, line):
        logging.info("Received line: {0} from {1}".format(
            line, self.addr))
Exemple #7
0
            result[key] = value
            if isinstance(value, list):
                _TOKEN_COUNTS[key].update(set(value))

        yield result


if __name__ == '__main__':
    parser = argparse.ArgumentParser()
    parser.add_argument('config', help='JSON processing config')
    parser.add_argument('infile', help='raw JSON records, one per line')
    parser.add_argument('outfile')
    parser.add_argument('--vocabulary',
                        help='target JSON file of token-count mappings')
    parser.add_argument('--logging', help='JSON logging config')
    args = parser.parse_args()
    settings.configure_logging(args.logging)

    config = load_json(args.config)
    preprocessors = list(
        filter(None, [get_preprocessor(item) for item in config]))

    with open(args.infile) as f_in:
        with open(args.outfile, 'w') as f_out:
            LOGGER.info('writing %s', args.outfile)
            for record in preprocess(preprocessors, f_in):
                f_out.write('%s\n' % json.dumps(record))

    if args.vocabulary:
        dump_json(args.vocabulary, _TOKEN_COUNTS)