コード例 #1
0
import click
from elasticsearch import helpers, exceptions, logger as elastic_logger
from lxml import etree
from parse import parse

from msc_pygeoapi.env import (
    MSC_PYGEOAPI_BASEPATH,
    MSC_PYGEOAPI_ES_TIMEOUT,
    MSC_PYGEOAPI_ES_URL,
    MSC_PYGEOAPI_ES_AUTH,
)
from msc_pygeoapi.loader.base import BaseLoader
from msc_pygeoapi.util import get_es, json_pretty_print, strftime_rfc3339

LOGGER = logging.getLogger(__name__)
elastic_logger.setLevel(logging.WARNING)

# index settings
INDEX_NAME = 'marine_weather_{}'

MAPPINGS = {
    'regular-forecasts': {
        'issued_datetime_utc': {
            'type': 'date',
            'format': 'date_time_no_millis',
            'ignore_malformed': False,
        },
        'issued_datetime_local': {
            'type': 'date',
            'format': 'date_time_no_millis',
            'ignore_malformed': False,
コード例 #2
0
import click
from elasticsearch import logger as elastic_logger
from parse import parse

from msc_pygeoapi import cli_options
from msc_pygeoapi.connector.elasticsearch_ import ElasticsearchConnector
from msc_pygeoapi.env import (MSC_PYGEOAPI_LOGGING_LOGLEVEL)
from msc_pygeoapi.loader.base import BaseLoader
from msc_pygeoapi.util import (
    configure_es_connection,
    check_es_indexes_to_delete,
)

LOGGER = logging.getLogger(__name__)
elastic_logger.setLevel(getattr(logging, MSC_PYGEOAPI_LOGGING_LOGLEVEL))

# cleanup settings
DAYS_TO_KEEP = 3

# index settings
INDEX_BASENAME = 'aqhi-{}-realtime.'

MAPPINGS = {
    'forecasts': {
        'properties': {
            'geometry': {
                'type': 'geo_shape'
            },
            'properties': {
                'properties': {
コード例 #3
0
ファイル: segmentfault.py プロジェクト: sichenjin/crawler
from string import Template
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String
from sqlalchemy.orm import sessionmaker
from dateutil.parser import parse as dateparse


# settings.py
from dotenv import load_dotenv
from pathlib import Path

from elasticsearch import Elasticsearch
from elasticsearch import logger as es_logger

es_logger.setLevel(50)
es_user = os.getenv("ES_USER")
es_pwd = os.getenv("ES_PWD")
es = Elasticsearch(http_auth=(es_user, es_pwd))

env_path = Path('..')/'.env'
load_dotenv(dotenv_path=env_path)

DB_DATABASE = os.getenv("DB_DATABASE")
DB_USERNAME = os.getenv("DB_USERNAME")
DB_PASSWORD = os.getenv("DB_PASSWORD")

# engine = create_engine("mysql+pymysql://"+DB_USERNAME+":"+DB_PASSWORD+"@localhost/Game?charset=utf8", encoding='utf-8', echo=False)
engine = create_engine("mysql+pymysql://"+DB_USERNAME+":"+DB_PASSWORD +
                       "@localhost/"+DB_DATABASE+"?charset=utf8", encoding='utf-8', echo=False)
Base = declarative_base()
コード例 #4
0
import logging
from copy import deepcopy
from typing import Optional, Dict, Union, Any, Iterable

from elasticsearch import Elasticsearch
from elasticsearch.helpers import bulk
from elasticsearch import logger as es_logger

__all__ = ["ElasticSearchIndexer"]

from forte.common.configuration import Config

# It seems that elastic search shows a lot of debug message, sometimes can
# suffocate travis. Trying to depress the logging.
es_logger.setLevel(logging.INFO)


class ElasticSearchIndexer:
    r"""Indexer class for `Elasticsearch`."""
    def __init__(self, config: Optional[Union[Dict, Config]] = None):
        super().__init__()
        self._config = Config(config, self.default_configs())
        self.elasticsearch = Elasticsearch(hosts=self._config.hosts)

    def index(
        self,
        document: Dict[str, Any],
        index_name: Optional[str] = None,
        refresh: Optional[Union[bool, str]] = False,
    ) -> None: