Ejemplo n.º 1
0
    def test_missing_api_key(self, conf, monkeypatch, capfd):
        loggers.config(level=10, logger="ext.metrics.metrics")
        monkeypatch.setattr(conf, "DATADOG_API_KEY", None, raising=True)
        monkeypatch.setattr(conf, "DATADOG_APP_KEY", None, raising=True)
        monkeypatch.setattr(conf, "DATADOG_ENABLED", True, raising=True)

        load()
Ejemplo n.º 2
0
        async def sync_all():
            if not db.is_bound():
                await db.startup()

            loggers.config(level=20)

            for hole_direction in [HoleDirection.V]:
                if hole_direction == HoleDirection.H:
                    ids_path = IHSPath.well_h_ids
                else:
                    ids_path = IHSPath.well_v_ids

                areas = await IHSClient.get_areas(path=ids_path)
                areas = ["tx-upton", "tx-reagan", "tx-midland"]

                counts = []
                datasets = []

                batch_size = 100
                for area in areas:
                    logger.warning(f"running area: {area}")
                    api14s = await IHSClient.get_ids_by_area(path=ids_path,
                                                             area=area)

                    for chunk in util.chunks(api14s, n=batch_size):
                        for executor in [
                                WellExecutor, GeomExecutor, ProdExecutor
                        ]:
                            try:
                                count, dataset = executor(hole_direction).run(
                                    api14s=chunk)
                                counts.append(count)
                                datasets.append(dataset)
                            except Exception as e:
                                print(e)
Ejemplo n.º 3
0
    def test_load_datadog_ext(self, conf, monkeypatch, capfd):  # TODO: needs assertion
        loggers.config(level=10, logger="ext.metrics.metrics")
        monkeypatch.setattr(conf, "DATADOG_API_KEY", "pretend_api_key", raising=True)
        monkeypatch.setattr(conf, "DATADOG_APP_KEY", "pretend_app_key", raising=True)
        monkeypatch.setattr(conf, "DATADOG_ENABLED", True, raising=True)

        load()
Ejemplo n.º 4
0
    def test_post_failed(self, conf, monkeypatch, capfd, requests_mock):
        loggers.config(level=10, logger="ext.metrics.metrics")
        requests_mock.register_uri(ANY, ANY, json={"status": "ok"})

        monkeypatch.delattr(metrics, "to_tags", raising=True)

        load()
        post("test", 10)
Ejemplo n.º 5
0
    def test_post_success(self, conf, monkeypatch, capfd, requests_mock):
        requests_mock.register_uri(ANY, ANY, json={"status": "ok"})
        loggers.config(level=10, logger="ext.metrics.metrics")
        monkeypatch.setattr(conf, "DATADOG_API_KEY", None, raising=True)
        monkeypatch.setattr(conf, "DATADOG_APP_KEY", None, raising=True)
        monkeypatch.setattr(conf, "DATADOG_ENABLED", True, raising=True)
        monkeypatch.setattr(metrics, "api_endpoint", api_endpoint, raising=True)

        load()
        post("test", 10)
Ejemplo n.º 6
0
 def test_configure_existing_logger_by_name(self, test_logger):
     expected = test_logger.level
     config(logger=test_logger.name)
     actual = test_logger.level
     assert expected == actual
Ejemplo n.º 7
0
 def test_configure_non_existing_logger(self):
     with pytest.raises(ValueError):
         config(logger="not.real")
Ejemplo n.º 8
0
def setup_loggers(logger, *args, **kwargs):  # pylint: disable=unused-argument
    loggers.config(logger=logger)
Ejemplo n.º 9
0
        return super().run(api14s=api14s,
                           api10s=api10s,
                           return_data=return_data,
                           **kwargs)


if __name__ == "__main__":
    import loggers
    import calc.prod  # noqa
    from db import db  # noqa
    from collector import IHSClient

    # import itertools
    # import multiprocessing as mp

    loggers.config(level=10, formatter="funcname")

    ranges = ProdStatRange.PEAKNORM
    months = [3, 6]
    include_zeroes = [True, False]
    calc.prodstat_option_matrix(ranges=ranges,
                                months=months,
                                include_zeroes=include_zeroes)

    # def get_id_sets(area: str) -> Tuple[List[str], List[str]]:
    #     loop = asyncio.get_event_loop()
    #     coroh = IHSClient.get_ids_by_area(path=IHSPath.well_h_ids, area=area)
    #     corov = IHSClient.get_ids_by_area(path=IHSPath.well_v_ids, area=area)
    #     return loop.run_until_complete(asyncio.gather(coroh, corov))

    # area = "tx-upton"
Ejemplo n.º 10
0
from api.models import (
    County,
    WellMasterHorizontal,
    WellMasterVertical,
    ProductionMasterHorizontal,
    ProductionMasterVertical,
)
from collector import Collector
from util import load_json


from ihs import create_app
import loggers

loggers.config(10)

app = create_app()
app.app_context().push()

counties = load_json("data/counties.json")

# * seed counties model
coll = Collector(County)
coll.save(counties, replace=False)

# * replicate county definitions to well/prod master lists where missing
county_name_only = [{"name": d["name"]} for d in counties]
for model in [
    WellMasterHorizontal,
    WellMasterVertical,
    ProductionMasterHorizontal,
Ejemplo n.º 11
0
sys.path.extend(["./"])


# To include a model in migrations, add a line here.

###############################################################################


config = context.config
config.set_main_option("sqlalchemy.url", str(ALEMBIC_CONFIG.url))
exclude_tables = config.get_section("alembic:exclude").get("tables", "").split(",")

fileConfig(config.config_file_name)
target_metadata = db

loggers.config(20, formatter="simple")
logger = logging.getLogger(__name__)


class CustomRewriter(rewriter.Rewriter):  # nocover
    """ Extends self.process_revision_directives since a standalone
        process_revision_directives function and a rewriter cant both
        be passed to the MigrationContext at the same time."""

    def process_revision_directives(self, context, revision, directives):
        if config.cmd_opts.autogenerate:
            script = directives[0]

            # Dont generate a new migration file if there are no pending operations
            if script.upgrade_ops.is_empty():
                directives[:] = []
Ejemplo n.º 12
0
        while len(self.requests) > 0:
            yield self.requests.pop()

        logger.info(f"Model sync complete: {self}")


if __name__ == "__main__":
    from iwell import create_app, db

    from collector.endpoint import load_from_config
    from config import get_active_config
    from collector.collector import Collector, IWellCollector
    import loggers
    from celery_queue.task import Task

    loggers.config(level=10, formatter="layman")

    app = create_app()
    app.app_context().push()

    conf = get_active_config()
    endpoints = load_from_config(conf)
    functions = conf.functions
    url = conf.API_BASE_URL
    # dt = datetime(year=1970, month=1, day=1)
    # ts = int(dt.timestamp())

    tasks = Task.from_config(conf)

    task = tasks[15]
    dir(task)
Ejemplo n.º 13
0
"""
Entrypoint module. This will hijack the script entrypoint to prevent circular
import errors if the top-level module is ran from the command line
(i.e. python -m sunstruck)
"""
import sys

import loggers
from manage import main

loggers.config()  # Setup default logging configuration

if __name__ == "__main__":
    sys.exit(main())
Ejemplo n.º 14
0
sys.path.extend(["./"])

# To include a model in migrations, add a line here.

###############################################################################

config = context.config
config.set_main_option("sqlalchemy.url", str(ALEMBIC_CONFIG.url))
exclude_tables = config.get_section("alembic:exclude").get("tables",
                                                           "").split(",")

fileConfig(config.config_file_name)
target_metadata = db

loggers.config(20, formatter="layman")
logger = logging.getLogger(__name__)


class CustomRewriter(rewriter.Rewriter):
    """ Extends self.process_revision_directives since a standalone
        process_revision_directives function and a rewriter cant both
        be passed to the MigrationContext at the same time."""
    def process_revision_directives(self, context, revision, directives):
        if config.cmd_opts.autogenerate:
            script = directives[0]

            # Dont generate a new migration file if there are no pending operations
            if script.upgrade_ops.is_empty():
                directives[:] = []
                logger.warning(
Ejemplo n.º 15
0
def setup_task_logger(logger, *args, **kwargs):
    """ Configure loggers on worker/beat startup """
    loggers.config(logger=logger,
                   level=conf.CELERY_LOG_LEVEL,
                   formatter=conf.CELERY_LOG_FORMAT)
Ejemplo n.º 16
0
import loggers
import util
from cq.tasks import sync_area_manifest
from db import db

loggers.config(level=10)

util.aio.async_to_sync(db.startup())

sync_area_manifest.apply()
Ejemplo n.º 17
0
from flask import Flask, request, g
from flask_sqlalchemy import SQLAlchemy
from flask_debugtoolbar import DebugToolbarExtension
from flask_migrate import Migrate


from config import get_active_config, APP_SETTINGS
import loggers
from util import ensure_list
from util.dt import utcnow
import shortuuid

conf = get_active_config()

loggers.config()

logger = logging.getLogger("app.access")


# instantiate the extensions
db = SQLAlchemy()
toolbar = DebugToolbarExtension()
migrate = Migrate()


def create_app(script_info=None):
    app = Flask(__name__)
    app.config.from_object(APP_SETTINGS)

    # set up extensions
Ejemplo n.º 18
0
    else:
        raise ValueError(f"Invalid hole direction: {hole_dir=}")

    run_executors(hole_dir, api14s=api14s, executors=executors, **kwargs)


if __name__ == "__main__":
    from db import db

    util.aio.async_to_sync(db.startup())
    import db.models
    import loggers
    import cq.tasks
    from const import HoleDirection

    loggers.config()
    hole_dir = HoleDirection.H
    # cq.tasks.sync_area_manifest.apply_async()
    # cq.tasks.run_next_available(HoleDirection.H, log_vs=10, log_hs=None)

    api14s = [
        "42475014800000",
        "42475014810000",
        "42475014810001",
        "42475014820000",
        "42475014820001",
        "42475014830000",
        "42475014840000",
        "42475014850000",
        "42475014860000",
        "42475014860001",