Пример #1
0
def write_env_file(env_long, env_short, exports, db_name, username, password):
    env_dir = os.path.join(os.path.dirname(__file__), '..', 'env')
    env_file_path = os.path.join(env_dir, env_long + '.env')
    db_hostname = get_export_val(exports, env_short, 'DatabaseHostname')
    db_url = 'postgresql://%s:%s@%s/%s' % (username, password, db_hostname,
                                           db_name)
    print("Writing env file", env_file_path)
    with open(env_file_path, 'w') as f:
        f.write('%s=%s\n' % ('MJCS_DATABASE_URL', db_url))
        f.write('%s=%s\n' %
                ('CASE_DETAILS_BUCKET',
                 get_export_val(exports, env_short, 'CaseDetailsBucketName')))
        f.write('%s=%s\n' %
                ('SCRAPER_QUEUE_NAME',
                 get_export_val(exports, env_short, 'ScraperQueueName')))
        f.write('%s=%s\n' %
                ('SCRAPER_FAILED_QUEUE_NAME',
                 get_export_val(exports, env_short, 'ScraperFailedQueueName')))
        f.write(
            '%s=%s\n' %
            ('SCRAPER_DYNAMODB_TABLE_NAME',
             get_export_val(exports, env_short, 'ScraperDynamoDBTableName')))
        f.write('%s=%s\n' %
                ('SCRAPER_QUEUE_ALARM_NAME',
                 get_export_val(exports, env_short, 'ScraperQueueAlarmName')))
        f.write('%s=%s\n' %
                ('PARSER_FAILED_QUEUE_NAME',
                 get_export_val(exports, env_short, 'ParserFailedQueueName')))
        f.write('%s=%s\n' %
                ('PARSER_TRIGGER_ARN',
                 get_export_val(exports, env_short, 'ParserTriggerArn')))
    # re-load config
    config.initialize_from_environment(env_long)
Пример #2
0
def write_env_file(env_long, env_short, exports, db_name, username, password):
    env_dir = os.path.join(os.path.dirname(__file__), '..', 'env')
    env_file_path = os.path.join(env_dir, env_long + '.env')
    db_hostname = get_export_val(exports, env_short, 'DatabaseHostname')
    db_url = f'postgresql://{username}:{password}@{db_hostname}/{db_name}'
    print("Writing env file", env_file_path)
    with open(env_file_path, 'w') as f:
        f.write(f'MJCS_DATABASE_URL={db_url}\n')
        f.write(
            f"CASE_DETAILS_BUCKET={get_export_val(exports,env_short,'CaseDetailsBucketName')}\n"
        )
        f.write(
            f"SPIDER_DYNAMODB_TABLE_NAME={get_export_val(exports,env_short,'SpiderDynamoDBTableName')}\n"
        )
        f.write(
            f"SPIDER_RUNS_BUCKET_NAME={get_export_val(exports,env_short,'SpiderRunsBucketName')}\n"
        )
        f.write(
            f"SPIDER_TASK_DEFINITION_ARN={get_export_val(exports,env_short,'SpiderTaskDefinitionArn')}\n"
        )
        f.write(
            f"SCRAPER_QUEUE_NAME={get_export_val(exports,env_short,'ScraperQueueName')}\n"
        )
        f.write(
            f"PARSER_FAILED_QUEUE_NAME={get_export_val(exports,env_short,'ParserFailedQueueName')}\n"
        )
        f.write(
            f"PARSER_QUEUE_NAME={get_export_val(exports,env_short,'ParserQueueName')}\n"
        )
        f.write(
            f"PARSER_TRIGGER_ARN={get_export_val(exports,env_short,'ParserTriggerArn')}\n"
        )
        f.write(
            f"VPC_SUBNET_1_ID={get_export_val(exports,env_short,'VPCPublicSubnet1Id')}\n"
        )
        f.write(
            f"VPC_SUBNET_2_ID={get_export_val(exports,env_short,'VPCPublicSubnet2Id')}\n"
        )
        f.write(
            f"ECS_CLUSTER_ARN={get_export_val(exports,env_short,'ECSClusterArn')}\n"
        )
    # re-load config
    config.initialize_from_environment(env_long)
def get_model_names():
    from mjcs.config import config
    from mjcs import models
    if os.getenv('CASEHARVESTER_ENV') == 'production':
        config.initialize_from_environment('production')
    else:
        config.initialize_from_environment('development')
    model_exports = models.__dict__.keys()
    secondary_model_names = list(
        filter(lambda x: x if x.isupper() else None, model_exports))
    secondary_models = [
        models.__dict__[model_name] for model_name in secondary_model_names
    ]
    tertiary_models = [
        models.__dict__[model_name] for model_name in filter(
            lambda x: x if x[:2].isupper() and x not in secondary_model_names
            else None, model_exports)
    ]
    return secondary_models, tertiary_models
Пример #4
0
    parser.add_argument(
        '--parallel',
        '-p',
        action='store_true',
        default=False,
        help=
        f"Search for expunged charges in parallel with {os.cpu_count()} worker processes"
    )
    parser.add_argument('--verbose',
                        '-v',
                        action='store_true',
                        help="Print debug information")
    args = parser.parse_args()
    if (hasattr(args, 'verbose') and args.verbose) or os.getenv('VERBOSE'):
        logger.setLevel(logging.DEBUG)
    config.initialize_from_environment(args.environment)

    if args.case:
        detail_loc = get_detail_loc(args.case)
        check_case(args.case, parsers[detail_loc][0], parsers[detail_loc][1])
    elif args.load_queue:
        if args.load_queue == 'all':
            load_queue()
        else:
            load_queue(args.load_queue)
    elif args.parallel:
        cpus = os.cpu_count()
        set_start_method(
            'fork')  # multiprocessing logging won't work with the spawn method
        # start worker processes according to available CPU resources
        with Pool() as worker_pool:
Пример #5
0
from __future__ import with_statement
from alembic import context
from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig
import sys
import os

# Add our source path to the search paths for modules
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src'))

# Import our models and database objects
from mjcs.config import config as my_config
from mjcs.db import TableBase
from mjcs.models import *
if os.getenv('PRODUCTION_ENV'):
    my_config.initialize_from_environment('production')
else:
    my_config.initialize_from_environment('development')

# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config

# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)

# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
Пример #6
0
        parser_db.add_argument('--secrets-file',
                               required=True,
                               type=argparse.FileType('r'),
                               help="Secrets file (in JSON format)")
        parser_db.add_argument('--create-tables-only',
                               action='store_true',
                               help="Only create tables in the database")
        parser_db.add_argument(
            '--write-env-only',
            action='store_true',
            help=
            "Only write environment files from cloudformation stack exports")
        parser_db.set_defaults(func=run_db_init)

    args = parser.parse_args()

    if (hasattr(args, 'verbose') and args.verbose) or os.getenv('VERBOSE'):
        logger.setLevel(logging.DEBUG)

    config.initialize_from_environment(args.environment, args.profile)
    if args.environment[:3] == 'dev':
        args.environment = 'development'
        args.environment_short = 'dev'
    elif args.environment[:4] == 'prod':
        args.environment = 'production'
        args.environment_short = 'prod'

    if hasattr(args, 'func'):
        args.func(args)
    print("Goodbye!")