Exemplo n.º 1
0
def mock_client():
    client = Mock()
    client.search.return_value = dummy_response()
    connections.add_connection('mock', client)
    yield client
    connections._conn = {}
    connections._kwargs = {}
Exemplo n.º 2
0
    def setUp(self):
        super().setUp()
        self.es = search.init_es()
        connections.add_connection('default', self.es)
        self.s = Search(index=settings.ELASTICSEARCH_INDEX)
        search.Image.init()

        self.es.cluster.health(wait_for_status='yellow', request_timeout=2000)
        self.img1 = models.Image(title='greyhounds are fast',
                                 creator="Rashid",
                                 url='http://example.com/1',
                                 license='CC0',
                                 provider="flickr",
                                 source="openimages",
                                 tags_list=['greyhound', 'dog', 'object'])
        self.img2 = models.Image(title='pumpkins are orange',
                                 creator='諸葛亮',
                                 url='http://example.com/2',
                                 license='CC-BY',
                                 provider="rijksmuseum",
                                 source="rijksmuseum",
                                 tags_list=['gourds', 'fruit', 'object'])
        self.img1.save()
        self.img2.save()
        self.url = reverse('index')
        self.removed = models.Image.objects.create(title='removed',
                                                   url=FOREIGN_URL +
                                                   TEST_IMAGE_REMOVED,
                                                   license="cc0")
Exemplo n.º 3
0
def mock_client(dummy_response):
    client = Mock()
    client.search.return_value = dummy_response
    connections.add_connection('mock', client)
    yield client
    connections._conn = {}
    connections._kwargs = {}
Exemplo n.º 4
0
def client():
    try:
        connection = get_test_client(nowait='WAIT_FOR_ES' not in os.environ)
        connections.add_connection('default', connection)
        return connection
    except SkipTest:
        skip()
Exemplo n.º 5
0
def get_top_k(text_query):
    text_query = ' '.join(pre_process_text(text_query))

    es = Elasticsearch()
    connections.add_connection('CivilArticle', es)
    es_index = 'coliee_bm25_index'

    es_query = {
        "multi_match": {
            "query": text_query,
            "fields": ["title", "content"]
        }
    }
    res = es.search(index=es_index,
                    body={
                        "from": 0,
                        "size": top_k,
                        "query": es_query
                    })
    res = res['hits']['hits']

    articles_scores = [{
        'article': article['_source']["code"].lower(),
        'score': article['_score']
    } for article in res]

    return articles_scores
Exemplo n.º 6
0
def client():
    try:
        connection = get_test_client(nowait='WAIT_FOR_ES' not in os.environ)
        connections.add_connection('default', connection)
        return connection
    except SkipTest:
        skip()
Exemplo n.º 7
0
def init(timeout=TIMEOUT):
    """Initialize all search objects"""
    es = init_es(timeout=timeout)
    connections.add_connection('default', es)
    log.debug("Initializing search objects for connection %s:%s",
              settings.ELASTICSEARCH_URL, settings.ELASTICSEARCH_PORT)
    return es
Exemplo n.º 8
0
def configure_es_from_config(settings):
    global elasticsearch_config
    client = client_from_config(settings)
    connections.add_connection('default', client)
    elasticsearch_config['client'] = client
    elasticsearch_config['index'] = settings['elasticsearch.index']
    elasticsearch_config['host'] = settings['elasticsearch.host']
    elasticsearch_config['port'] = int(settings['elasticsearch.port'])
Exemplo n.º 9
0
def es_client():
    # NOTE (fschmidt): I took a look on how elasticsearch-dsl does the mocking:
    # https://github.com/elastic/elasticsearch-dsl-py/blob/master/test_elasticsearch_dsl/conftest.py
    client = mock.Mock()
    client.search.return_value = {}
    connections.add_connection("default", client)
    yield client
    connections._conn = {}
    connections._kwargs = {}
Exemplo n.º 10
0
def mock_client(request):
    # inner import to avoid throwing off coverage
    from elasticsearch_dsl.connections import connections
    client = Mock()
    client.search.return_value = dummy_response()
    connections.add_connection('mock', client)
    yield client
    connections._conn = {}
    connections._kwargs = {}
Exemplo n.º 11
0
def mock_es_client(request):
    """
    Mock ElasticSearch client.

    User should override client.search.return_value.
    Based on test fixture from elasticsearch_dsl
    """
    client = mock.Mock()
    connections.add_connection('default', client)
    yield client
    connections._conn = {}
    connections._kwargs = {}
Exemplo n.º 12
0
def mock_es_client(request):
    """
    Mock ElasticSearch client.

    User should override client.search.return_value.
    Based on test fixture from elasticsearch_dsl
    """
    client = mock.Mock()
    connections.add_connection('default', client)
    yield client
    connections._conn = {}
    connections._kwargs = {}
Exemplo n.º 13
0
def mock_client(request):
    # inner import to avoid throwing off coverage
    from elasticsearch_dsl.connections import connections

    def reset_connections():
        c = connections
        c._conn = {}
        c._kwargs = {}
    request.addfinalizer(reset_connections)

    client = Mock()
    client.search.return_value = dummy_response()
    connections.add_connection('mock', client)
    return client
Exemplo n.º 14
0
def _es_client(worker_id):
    """
    Makes the ES test helper client available.

    Also patches settings.ES_INDEX_PREFIX using the xdist worker ID so that each process
    gets unique indices when running tests using multiple processes using pytest -n.
    """
    # pytest's monkeypatch does not work in session fixtures, but there is no need to restore
    # the value so we just overwrite it normally
    settings.ES_INDEX_PREFIX = f'test_{worker_id}'

    from elasticsearch_dsl.connections import connections
    client = get_test_client(nowait=False)
    connections.add_connection('default', client)
    yield client
Exemplo n.º 15
0
def client(request):
    # inner import to avoid throwing off coverage
    from elasticsearch_dsl.connections import connections
    # hack to workaround pytest not caching skip on fixtures (#467)
    global _client_loaded
    if _client_loaded:
        skip()

    _client_loaded = True
    try:
        client = get_test_client(nowait='WAIT_FOR_ES' not in os.environ)
        connections.add_connection('default', client)
        return client
    except SkipTest:
        skip()
Exemplo n.º 16
0
def mock_client(request):
    # inner import to avoid throwing off coverage
    from elasticsearch_dsl.connections import connections

    def reset_connections():
        c = connections
        c._conn = {}
        c._kwargs = {}

    request.addfinalizer(reset_connections)

    client = Mock()
    client.search.return_value = dummy_response()
    connections.add_connection('mock', client)
    return client
Exemplo n.º 17
0
    def run(self) -> None:
        LOGGER.info('Running search metadata to Elasticsearch task')
        try:
            # extract records from metadata store
            record = self.extractor.extract()

            # create connection
            connections.add_connection('default', self.elasticsearch_client)
            connection = connections.get_connection()

            # health check ES
            health = connection.cluster.health()
            status = health["status"]
            if status not in ("green", "yellow"):
                msg = f"Elasticsearch healthcheck failed: {status}"
                LOGGER.error(msg)
                raise Exception(msg)

            # create index
            LOGGER.info(f"Creating ES index {self.elasticsearch_new_index}")
            index = Index(name=self.elasticsearch_new_index, using=self.elasticsearch_client)
            index.document(self.document_mapping)
            index.create()

            # publish search metadata to ES
            cnt = 0
            for success, info in parallel_bulk(connection,
                                               self.generate_documents(record=record),
                                               raise_on_error=False,
                                               chunk_size=self.elasticsearch_batch_size,
                                               request_timeout=self.elasticsearch_timeout_sec):
                if not success:
                    LOGGER.warn(f"There was an error while indexing a document to ES: {info}")
                else:
                    cnt += 1
                if cnt == self.elasticsearch_batch_size:
                    LOGGER.info(f'Published {str(cnt*self.elasticsearch_batch_size)} records to ES')

            # delete old index
            self._delete_old_index(connection=connection,
                                   document_index=index)

            LOGGER.info("Elasticsearch Indexing completed")
        finally:
            self._closer.close()
Exemplo n.º 18
0
    def ready(self):
        from elasticsearch import Elasticsearch, RequestsHttpConnection, RequestError, ConnectionTimeout
        from elasticsearch_dsl.connections import connections
        from requests_aws4auth import AWS4Auth
        from django.conf import settings
        from shoutit.models import LocationIndex, ShoutIndex
        from shoutit.utils import error_logger

        import shoutit
        # Todo (Nour): Cleanup!
        # Define a default global Elasticsearch client
        if 'es.amazonaws.com' in settings.ES_URL:
            # Connect using IAM  based authentication on AWS
            awsauth = AWS4Auth(settings.AWS_ACCESS_KEY_ID,
                               settings.AWS_SECRET_ACCESS_KEY, 'eu-west-1',
                               'es')
            ES = Elasticsearch(hosts=[settings.ES_URL],
                               http_auth=awsauth,
                               use_ssl=True,
                               verify_certs=True,
                               connection_class=RequestsHttpConnection)
            connections.add_connection(alias='default', conn=ES)
        else:
            ES = connections.create_connection(hosts=[settings.ES_URL])

        shoutit.ES = ES

        # Initiate the index if not initiated
        try:
            LocationIndex.init()
        except RequestError:
            pass
        except ConnectionTimeout:
            error_logger.warn("ES Server is down", exc_info=True)

        # Initiate the index if not initiated
        try:
            ShoutIndex.init()
        except RequestError:
            pass
        except ConnectionTimeout:
            error_logger.warn("ES Server is down", exc_info=True)
Exemplo n.º 19
0
def get_top_k_scores(text_query):
    text_query = ' '.join(pre_process_text(text_query))

    es = Elasticsearch()
    connections.add_connection('CivilArticle', es)
    es_index = 'coliee_bm25_index'

    es_query = {
        "multi_match": {
            "query": text_query,
            "fields": ["title", "content"]
        }
    }
    res = es.search(index=es_index,
                    body={
                        "from": 0,
                        "size": k,
                        "query": es_query
                    })
    res = res['hits']['hits']
    count = 0

    articles = []
    scores = []
    i = 0
    while i < len(res) and count < k:
        try:
            if res[i]:
                doc_code = res[i]['_source']["code"].lower()
                articles.append(doc_code)
                scores.append(str(res[i]['_score']))
            else:
                break
            i += 1
        except IndexError:
            continue

    return articles, scores
Exemplo n.º 20
0
from datetime import datetime

from elasticsearch import Elasticsearch
from elasticsearch_dsl import Document, Date, Integer, Keyword, Text
from elasticsearch_dsl.connections import connections


HOSTS = ['localhost:9200']
CONNECTION_ALIAS = 'dev'
es_client = Elasticsearch(HOSTS, timeout=5)
connections.add_connection(CONNECTION_ALIAS, es_client)


class Article(Document):
    title = Text(analyzer='snowball', fields={'raw': Keyword()})
    body = Text(analyzer='snowball')
    tags = Keyword()
    published_from = Date()
    lines = Integer()

    class Index:
        name = 'blog'
        settings = {
          "number_of_shards": 2,
        }
        using = CONNECTION_ALIAS

    def save(self, ** kwargs):
        self.lines = len(self.body.split())
        return super(Article, self).save(** kwargs)
Exemplo n.º 21
0
def client(elasticsearch_instance):
    """Return a connection to the elasticsearch server
    """
    connection = get_test_client(nowait=True)
    connections.add_connection("default", connection)
    return connection
Exemplo n.º 22
0
def create_app():
    global app_created
    if not app_created:
        BlueprintsManager.register(app)
    Migrate(app, db)

    app.config.from_object(env('APP_CONFIG',
                               default='config.ProductionConfig'))
    db.init_app(app)
    _manager = Manager(app)
    _manager.add_command('db', MigrateCommand)

    if app.config['CACHING']:
        cache.init_app(app, config={'CACHE_TYPE': 'simple'})
    else:
        cache.init_app(app, config={'CACHE_TYPE': 'null'})

    stripe.api_key = 'SomeStripeKey'
    app.secret_key = 'super secret key'
    app.config['JSONIFY_PRETTYPRINT_REGULAR'] = False
    app.config['FILE_SYSTEM_STORAGE_FILE_VIEW'] = 'static'

    app.logger.addHandler(logging.StreamHandler(sys.stdout))
    app.logger.setLevel(logging.ERROR)

    # set up jwt
    app.config['JWT_HEADER_TYPE'] = 'JWT'
    app.config['JWT_ACCESS_TOKEN_EXPIRES'] = timedelta(days=1)
    app.config['JWT_REFRESH_TOKEN_EXPIRES'] = timedelta(days=365)
    app.config['JWT_ERROR_MESSAGE_KEY'] = 'error'
    app.config['JWT_TOKEN_LOCATION'] = ['cookies', 'headers']
    app.config['JWT_REFRESH_COOKIE_PATH'] = '/v1/auth/token/refresh'
    app.config['JWT_SESSION_COOKIE'] = False
    app.config['JWT_BLACKLIST_ENABLED'] = True
    app.config['JWT_BLACKLIST_TOKEN_CHECKS'] = ['refresh']
    _jwt = JWTManager(app)
    _jwt.user_loader_callback_loader(jwt_user_loader)
    _jwt.token_in_blacklist_loader(is_token_blacklisted)

    # setup celery
    app.config['CELERY_BROKER_URL'] = app.config['REDIS_URL']
    app.config['CELERY_RESULT_BACKEND'] = app.config['CELERY_BROKER_URL']
    app.config['CELERY_ACCEPT_CONTENT'] = ['json', 'application/text']

    CORS(app, resources={r"/*": {"origins": "*"}})
    AuthManager.init_login(app)

    if app.config['TESTING'] and app.config['PROFILE']:
        # Profiling
        app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30])

    # development api
    with app.app_context():
        from app.api.admin_statistics_api.events import event_statistics
        from app.api.auth import auth_routes
        from app.api.attendees import attendee_misc_routes
        from app.api.bootstrap import api_v1
        from app.api.celery_tasks import celery_routes
        from app.api.event_copy import event_copy
        from app.api.exports import export_routes
        from app.api.imports import import_routes
        from app.api.uploads import upload_routes
        from app.api.users import user_misc_routes
        from app.api.orders import order_misc_routes
        from app.api.role_invites import role_invites_misc_routes
        from app.api.auth import ticket_blueprint, authorised_blueprint
        from app.api.admin_translations import admin_blueprint
        from app.api.orders import alipay_blueprint
        from app.api.settings import admin_misc_routes
        from app.api.server_version import info_route

        app.register_blueprint(api_v1)
        app.register_blueprint(event_copy)
        app.register_blueprint(upload_routes)
        app.register_blueprint(export_routes)
        app.register_blueprint(import_routes)
        app.register_blueprint(celery_routes)
        app.register_blueprint(auth_routes)
        app.register_blueprint(event_statistics)
        app.register_blueprint(user_misc_routes)
        app.register_blueprint(attendee_misc_routes)
        app.register_blueprint(order_misc_routes)
        app.register_blueprint(role_invites_misc_routes)
        app.register_blueprint(ticket_blueprint)
        app.register_blueprint(authorised_blueprint)
        app.register_blueprint(admin_blueprint)
        app.register_blueprint(alipay_blueprint)
        app.register_blueprint(admin_misc_routes)
        app.register_blueprint(info_route)

        add_engine_pidguard(db.engine)

        if app.config['SQLALCHEMY_DATABASE_URI'].startswith("sqlite://"):
            sqlite_datetime_fix()

    sa.orm.configure_mappers()

    if app.config['SERVE_STATIC']:
        app.add_url_rule('/static/<path:filename>',
                         endpoint='static',
                         view_func=app.send_static_file)

    # sentry
    if not app_created and 'SENTRY_DSN' in app.config:
        sentry_sdk.init(app.config['SENTRY_DSN'],
                        integrations=[
                            FlaskIntegration(),
                            RedisIntegration(),
                            CeleryIntegration(),
                            SqlalchemyIntegration()
                        ])

    # redis
    redis_store.init_app(app)

    # elasticsearch
    if app.config['ENABLE_ELASTICSEARCH']:
        client.init_app(app)
        connections.add_connection('default', client.elasticsearch)
        with app.app_context():
            try:
                cron_rebuild_events_elasticsearch.delay()
            except Exception:
                pass

    app_created = True
    return app, _manager, db, _jwt
Exemplo n.º 23
0
def make_app(project_root):
    """
    Construct the rubberband app.

    This method is being called from server.py, also from bin/rubberband-ctl.

    Parameters
    ----------
    project_root : str
        Root path of rubberband source code.

    Returns
    -------
    app
        the rubberband app
    """
    # init logger
    logging.basicConfig(
        level=logging.DEBUG,
        format='%(asctime)s%(msecs)03d %(levelname)-5s %(name)-15s %(message)s',
        datefmt='%d-%m-%Y %H:%M:%S - ')
    loggr = logging.getLogger()
    loggr.setLevel(level=20)
    # Load options from environment
    config = "/etc/rubberband/app.cfg"
    if os.path.isfile(config):
        logging.info(
            "Loading additional configuration from /etc/rubberband/app.cfg")
        options.parse_config_file(config)
    else:
        logging.info("Using default config.")

    # settings for tornado
    settings = {
        "debug": True if options.num_processes == 1 else False,
        "static_path": os.path.join(project_root, "static"),
        "template_path": os.path.join(project_root, "templates"),
        "cookie_secret": options.cookie_secret,
        "xsrf_cookies": True,
        "default_handler_class": ErrorView,
        "logger": loggr,
    }

    # set up tornado application
    # From the doc: "A Tornado web application maps URLs or URL patterns to subclasses
    # of tornado.web.RequestHandler. Those classes define get() or post() methods
    # to handle HTTP GET or POST requests to that URL."
    # these patterns are defined in routes.py
    # default:  app = Application(routes, **settings)
    app = Application([(HostMatches(r'(localhost|127\.0\.0\.1|{}|)'.format(
        options.prod_url.replace("https://", ""))), routes)], **settings)

    logging.info("Setting up Elasticsearch connection.")
    # set up elasticsearch
    # create connection instance
    # the timeout argument is needed when you upload big files
    conn = Elasticsearch([options.elasticsearch_url],
                         use_ssl=options.elasticsearch_use_ssl,
                         verify_certs=options.elasticsearch_verify_certs,
                         ca_certs=options.elasticsearch_ca_certs,
                         timeout=100)
    # connect connection to pool
    connections.add_connection("default", conn)

    # settings of tornado app
    if app.settings["debug"]:
        app.base_url = "http://127.0.0.1:{}".format(options.port)
    else:
        app.base_url = options.prod_url

    return app
Exemplo n.º 24
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

from django.core.management.base import BaseCommand

from elasticsearch import Elasticsearch
from elasticsearch_dsl.connections import connections

from recipe.models import Recipe
from recipe.search_indexes import RecipeIndex

es = Elasticsearch([
    {'host': 'localhost', 'port': 9200},
])

connections.add_connection(alias='default', conn=es)

class Command(BaseCommand):
    args = 'No arguments needed'
    help = 'Command to index recipe to elasticsearch'

    def handle(self, *args, **options):
        if not es.indices.exists('recipe'):
            RecipeIndex.init()
        recipes = Recipe.objects.only(
            'id',
            'name',
            'preparation_time',
            'ingredients',
            'servings',
            'likes',
Exemplo n.º 25
0
Result object used to push to ElasticSearch
"""
from datetime import datetime
from elasticsearch_dsl import DocType, String, Date, Integer
from elasticsearch_dsl.connections import connections
from elasticsearch import Elasticsearch, RequestsHttpConnection
"""
Future work: certs, TLS, or any kind of security/auth.
"""
#http_auth=('user', 'secret'),
#verify_certs=True,
#ca_certs='/path/to/cacert.pem',
#client_cert='/path/to/client_cert.pem',
#client_key='/path/to/client_key.pem',
es = Elasticsearch(['localhost'], port=9200, use_ssl=False)
connections.add_connection('default', es)
"""
es_result
Define a result class to be passed to ElasticSearch directly
"""


class es_result(DocType):
    source = String()
    referrer = String()
    data = String()
    dataHash = String()
    dataBytes = Integer()
    regex_hit = Integer()
    regex_hits = String()
    searchterm_hit = Integer()
Exemplo n.º 26
0
def create_app():
    global app_created
    if not app_created:
        BlueprintsManager.register(app)
    Migrate(app, db)

    app.config.from_object(env('APP_CONFIG', default='config.ProductionConfig'))
    db.init_app(app)
    _manager = Manager(app)
    _manager.add_command('db', MigrateCommand)

    if app.config['CACHING']:
        cache.init_app(app, config={'CACHE_TYPE': 'simple'})
    else:
        cache.init_app(app, config={'CACHE_TYPE': 'null'})

    stripe.api_key = 'SomeStripeKey'
    app.secret_key = 'super secret key'
    app.config['JSONIFY_PRETTYPRINT_REGULAR'] = False
    app.config['FILE_SYSTEM_STORAGE_FILE_VIEW'] = 'static'

    app.logger.addHandler(logging.StreamHandler(sys.stdout))
    app.logger.setLevel(logging.ERROR)

    # set up jwt
    app.config['JWT_AUTH_USERNAME_KEY'] = 'email'
    app.config['JWT_EXPIRATION_DELTA'] = timedelta(seconds=24 * 60 * 60)
    app.config['JWT_AUTH_URL_RULE'] = '/auth/session'
    _jwt = JWT(app, jwt_authenticate, jwt_identity)

    # setup celery
    app.config['CELERY_BROKER_URL'] = app.config['REDIS_URL']
    app.config['CELERY_RESULT_BACKEND'] = app.config['CELERY_BROKER_URL']

    CORS(app, resources={r"/*": {"origins": "*"}})
    AuthManager.init_login(app)

    if app.config['TESTING'] and app.config['PROFILE']:
        # Profiling
        app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30])

    # development api
    with app.app_context():
        from app.api.admin_statistics_api.events import event_statistics
        from app.api.auth import auth_routes
        from app.api.attendees import attendee_misc_routes
        from app.api.bootstrap import api_v1
        from app.api.celery_tasks import celery_routes
        from app.api.event_copy import event_copy
        from app.api.exports import export_routes
        from app.api.imports import import_routes
        from app.api.uploads import upload_routes
        from app.api.users import user_misc_routes
        from app.api.orders import order_misc_routes

        app.register_blueprint(api_v1)
        app.register_blueprint(event_copy)
        app.register_blueprint(upload_routes)
        app.register_blueprint(export_routes)
        app.register_blueprint(import_routes)
        app.register_blueprint(celery_routes)
        app.register_blueprint(auth_routes)
        app.register_blueprint(event_statistics)
        app.register_blueprint(user_misc_routes)
        app.register_blueprint(attendee_misc_routes)
        app.register_blueprint(order_misc_routes)

    sa.orm.configure_mappers()

    if app.config['SERVE_STATIC']:
        app.add_url_rule('/static/<path:filename>',
                         endpoint='static',
                         view_func=app.send_static_file)

    # sentry
    if not app_created and 'SENTRY_DSN' in app.config:
        sentry.init_app(app, dsn=app.config['SENTRY_DSN'])

    # redis
    redis_store.init_app(app)

    # elasticsearch
    if app.config['ENABLE_ELASTICSEARCH']:
        client.init_app(app)
        connections.add_connection('default', client.elasticsearch)
        with app.app_context():
            try:
                cron_rebuild_events_elasticsearch.delay()
            except Exception:
                pass

    app_created = True
    return app, _manager, db, _jwt
Exemplo n.º 27
0
def create_app():
    global app_created
    if not app_created:
        BlueprintsManager.register(app)
    Migrate(app, db)

    app.config.from_object(env('APP_CONFIG', default='config.ProductionConfig'))
    db.init_app(app)
    _manager = Manager(app)
    _manager.add_command('db', MigrateCommand)

    if app.config['CACHING']:
        cache.init_app(app, config={'CACHE_TYPE': 'simple'})
    else:
        cache.init_app(app, config={'CACHE_TYPE': 'null'})

    stripe.api_key = 'SomeStripeKey'
    app.secret_key = 'super secret key'
    app.config['JSONIFY_PRETTYPRINT_REGULAR'] = False
    app.config['FILE_SYSTEM_STORAGE_FILE_VIEW'] = 'static'

    app.logger.addHandler(logging.StreamHandler(sys.stdout))
    app.logger.setLevel(logging.ERROR)

    # set up jwt
    app.config['JWT_AUTH_USERNAME_KEY'] = 'email'
    app.config['JWT_EXPIRATION_DELTA'] = timedelta(seconds=24 * 60 * 60)
    app.config['JWT_AUTH_URL_RULE'] = '/auth/session'
    _jwt = JWT(app, jwt_authenticate, jwt_identity)

    # setup celery
    app.config['CELERY_BROKER_URL'] = app.config['REDIS_URL']
    app.config['CELERY_RESULT_BACKEND'] = app.config['CELERY_BROKER_URL']
    app.config['CELERY_ACCEPT_CONTENT'] = ['json', 'application/text']

    CORS(app, resources={r"/*": {"origins": "*"}})
    AuthManager.init_login(app)

    if app.config['TESTING'] and app.config['PROFILE']:
        # Profiling
        app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30])

    # development api
    with app.app_context():
        from app.api.admin_statistics_api.events import event_statistics
        from app.api.auth import auth_routes
        from app.api.attendees import attendee_misc_routes
        from app.api.bootstrap import api_v1
        from app.api.celery_tasks import celery_routes
        from app.api.event_copy import event_copy
        from app.api.exports import export_routes
        from app.api.imports import import_routes
        from app.api.uploads import upload_routes
        from app.api.users import user_misc_routes
        from app.api.orders import order_misc_routes
        from app.api.role_invites import role_invites_misc_routes
        from app.api.auth import ticket_blueprint, authorised_blueprint
        from app.api.admin_translations import admin_blueprint
        from app.api.orders import alipay_blueprint
        from app.api.settings import admin_misc_routes

        app.register_blueprint(api_v1)
        app.register_blueprint(event_copy)
        app.register_blueprint(upload_routes)
        app.register_blueprint(export_routes)
        app.register_blueprint(import_routes)
        app.register_blueprint(celery_routes)
        app.register_blueprint(auth_routes)
        app.register_blueprint(event_statistics)
        app.register_blueprint(user_misc_routes)
        app.register_blueprint(attendee_misc_routes)
        app.register_blueprint(order_misc_routes)
        app.register_blueprint(role_invites_misc_routes)
        app.register_blueprint(ticket_blueprint)
        app.register_blueprint(authorised_blueprint)
        app.register_blueprint(admin_blueprint)
        app.register_blueprint(alipay_blueprint)
        app.register_blueprint(admin_misc_routes)

    sa.orm.configure_mappers()

    if app.config['SERVE_STATIC']:
        app.add_url_rule('/static/<path:filename>',
                         endpoint='static',
                         view_func=app.send_static_file)

    # sentry
    if not app_created and 'SENTRY_DSN' in app.config:
        sentry_sdk.init(app.config['SENTRY_DSN'], integrations=[FlaskIntegration()])

    # redis
    redis_store.init_app(app)

    # elasticsearch
    if app.config['ENABLE_ELASTICSEARCH']:
        client.init_app(app)
        connections.add_connection('default', client.elasticsearch)
        with app.app_context():
            try:
                cron_rebuild_events_elasticsearch.delay()
            except Exception:
                pass

    app_created = True
    return app, _manager, db, _jwt
connections.create_connection(hosts=['localhost'], timeout=20)

###############################################
# Multiple clusters
connections.configure(
        default={'hosts': 'localhost'},
        dev={
            'hosts': ['esdev1.example.com:9200'],
            'sniff_on_start': True
            }
        )

# We can also add them later using aliases...
connections.create_connection('qa', hosts=['esqa1.example.com'], sniff_on_start=True)

connections.add_connection('qa', my_client)

# When searching we can refer to a specific connection by using aliases
s = Search(using='qa')

###############################################
# The api is chainable
s = Search().using(client).query('match', title='python')
# Send the request
response = s.execute()

# Requests are cached by default by the python client,
# subsequent calls to execute will not trigger additional
# requests being sent to Elasticsearch
# To force a request specify `ignore_cache=True` when sending
# a request
def es_client():
    """Create and return elasticsearch connection"""
    connection = Elasticsearch([os.environ.get('TEST_ES_SERVER', {})])
    connections.add_connection('default', connection)
    return connection
Exemplo n.º 30
0
        Phải khai báo lại class Meta với doc_type và tên index đúng như dưới
        """
        doc_type = 'CivilArticle'
        index = index_name

    class Index:
        """
        Từ bản 0.10.2 trở đi phải khai báo thêm cả class Index có thuộc tính name là tên index trong elastic search như dưới đây
        """
        name = index_name


# Create connection
es = Elasticsearch()
connections.create_connection(hosts=['localhost'], timeout=20)
connections.add_connection('CivilArticle', es)
CivilArticle.init(index_name)


def preprocess_content(content):
    res = []
    lines = content.split('\n')
    for line in lines:
        line = remove_numbering(line)
        words = pre_process_text(line)
        res.append(' '.join(words))

    return ' '.join(res)


def insert(doc):
Exemplo n.º 31
0
from app.models import AWSKey
from elasticsearch_dsl.connections import connections
from app import app
import elasticsearch_dsl as dsl

try:
    es_aws_auth = app.config['ES_AUTH']
    es_auth = AWS4Auth(es_aws_auth['key'], es_aws_auth['secret'],
                       es_aws_auth['region'], 'es')
except:
    es_auth = None
client = Elasticsearch(hosts=app.config['ES_HOSTS'],
                       http_auth=es_auth,
                       connection_class=RequestsHttpConnection,
                       sniff_timeout=60)
connections.add_connection('default', client)

SHORT_NAMES = {
    "Amazon Elastic Compute Cloud": "EC2",
    "Amazon EC2 Container Registry (ECR)": "ECR",
    "Amazon Simple Storage Service": "S3",
    "Amazon RDS Service": "RDS",
    "Amazon ElastiCache": "ElastiCache",
    "Amazon Elastic File System": "EFS",
    "Amazon Elastic MapReduce": "EMR",
    "Amazon Route 53": "Route 53",
    "AWS Key Management Service": "KMS",
    "Amazon DynamoDB": "DynamoDB",
    "Amazon Simple Email Service": "SES",
    "Amazon Simple Notification Service": "SNS",
    "Amazon Simple Queue Service": "SQS",
Exemplo n.º 32
0
from elasticsearch import Elasticsearch, RequestsHttpConnection

"""
Future work: certs, TLS, or any kind of security/auth.
"""
    #http_auth=('user', 'secret'),
    #verify_certs=True,
    #ca_certs='/path/to/cacert.pem',
    #client_cert='/path/to/client_cert.pem',
    #client_key='/path/to/client_key.pem',
es = Elasticsearch(
    ['localhost'],
    port=9200,
    use_ssl=False
)
connections.add_connection('default', es)

"""
es_result
Define a result class to be passed to ElasticSearch directly
"""
class es_result(DocType):
    source = String()
    referrer = String()
    data = String()
    dataHash = String()
    dataBytes = Integer()
    regex_hit = Integer()
    regex_hits = String()
    searchterm_hit = Integer()
    searchterm_hits = String()