Esempio n. 1
0
from airflow import DAG
from airflow.operators import BashOperator
from airflow.models import Variable
from datetime import datetime, timedelta, time

from common.configs import get_config
from common.log import get_log_path
from common.tasks import alert_task

try:
    default_config = Variable.get("DEFAULT_CONFIG")
except:
    default_config = None

if default_config:
    common_config = get_config('common.yaml')
    task_config = get_config('cron_clickstream_load.yaml')
else:
    raise Exception("Can't use default config, no another configs")

LOG_PATH = get_log_path(
    common_config['LOGS'],
    task_config['LOG_PREFIX'])

today = datetime.today()
hour = 9
minute = 3
d_delta = 1
h_delta = 0
m_delta = 0
Esempio n. 2
0
from airflow import DAG
from airflow.operators import BashOperator
from airflow.models import AirflowException, Variable
from datetime import datetime, timedelta, time

from common.configs import get_config
from common.log import get_log_path

RUN_AS = 'sudo env PATH=$PATH su -c "{{ params.path }}/{{ params.cmd }}" {{ params.user }}'

try:
    default_config = Variable.get("DEFAULT_CONFIG")
    common_config = get_config('common.yaml')
    task_config = get_config('monitoring_etl.yaml')
except AirflowException as error:
    raise RuntimeError("Can't use default config, no another configs")

LOG_PATH = get_log_path(common_config['LOGS'], task_config['LOG_PREFIX'])

today = datetime.today()
hour = 4
minute = 30
d_delta = 1
h_delta = 0
m_delta = 0

default_args = {
    'owner': task_config['user'],
    'depends_on_past': False,
    'start_date': datetime.combine(today, time(4, 00, 0)) - timedelta(days=1),
    'email': ['*****@*****.**'],
Esempio n. 3
0
from airflow import DAG
from airflow.operators import BashOperator
from airflow.models import Variable
from datetime import datetime, timedelta, time

from common.configs import get_config
from common.log import get_log_path
from common.tasks import alert_task

try:
    default_config = Variable.get("DEFAULT_CONFIG")
except:
    default_config = None

if default_config:
    common_config = get_config('common.yaml')
    task_config = get_config('cron_mml.yaml')
else:
    raise Exception("Can't use default config, no another configs")

LOG_PATH = get_log_path(
    common_config['LOGS'],
    task_config['LOG_PREFIX'])

today = datetime.today()
hour = 2
minute = 30
d_delta = 1
h_delta = 0
m_delta = 0
Esempio n. 4
0
from airflow import DAG
from airflow.operators import BashOperator
from airflow.models import Variable
from datetime import datetime, timedelta, time

from common.configs import get_config
from common.log import get_log_path
from common.tasks import alert_task

try:
    default_config = Variable.get("DEFAULT_CONFIG")
except:
    default_config = None

if default_config:
    common_config = get_config('common.yaml')
    task_config = get_config('cron_stacktach_load.yaml')
else:
    raise Exception("Can't use default config, no another configs")

LOG_PATH = get_log_path(common_config['LOGS'], task_config['LOG_PREFIX'])

today = datetime.today()
hour = 1
minute = 45
d_delta = 1
h_delta = 0
m_delta = 0

default_args = {
    'owner':
Esempio n. 5
0
from airflow import DAG
from airflow.operators import BashOperator
from airflow.models import Variable
from datetime import datetime, timedelta, time

from common.configs import get_config
from common.log import get_log_path
from common.tasks import alert_task

try:
    default_config = Variable.get("DEFAULT_CONFIG")
except:
    default_config = None

if default_config:
    common_config = get_config('common.yaml')
    task_config = get_config('cron_hdfs_uploader_stacktach_prod.yaml')
else:
    raise Exception("Can't use default config, no another configs")

LOG_PATH = get_log_path(common_config['LOGS'], task_config['LOG_PREFIX'])

today = datetime.today()
hour = 1
minute = 5
d_delta = 1
h_delta = 0
m_delta = 0

default_args = {
    'owner':
Esempio n. 6
0
try:
    default_config = Variable.get("DEFAULT_CONFIG")
except:
    default_config = None

try:
    quotes_conf_path = Variable.get("quotes_conf_path")
except:
    if default_config:
        print 'quotes_conf_path not specified in Variables, using defaults.'
        quotes_conf_path = 'quote_management/staging.yaml'
    else:
        raise

print 'quotes_conf_path', quotes_conf_path
quotes_copy_config = get_config(quotes_conf_path)
mongossl = quotes_copy_config['MONGO_SSL']
mongodb_creds = get_safepass(quotes_copy_config['SAFEPASS_MONGO_PROJECT_ID'],
                             quotes_copy_config['SAFEPASS_MONGO_CRED_ID'])
postgres_creds = get_safepass(
    quotes_copy_config['SAFEPASS_POSTGRES_ODS_PROJECT_ID'],
    quotes_copy_config['SAFEPASS_POSTGRES_ODS_CRED_ID'])
jenkins = quotes_copy_config['JENKINS']
jenkins_token = quotes_copy_config['JENKINS_TOKEN']
jenkins_mongo_hadoop_proj = quotes_copy_config['JENKINS_MONGO_HADOOP_PROJ']
jenkins_mongo_hive_mapping_proj = quotes_copy_config[
    'JENKINS_MONGO_HIVE_MAPPING_PROJ']
jenkins_caspian_data_access_proj = quotes_copy_config[
    'JENKINS_CASPIAN_DATA_ACCESS_PROJ']
mongo_hadoop_ver = quotes_copy_config['CUR_VER_MONGO_HADOOP']
hive_mapping_ver = quotes_copy_config['CUR_VER_MONGO_HIVE_MAPPING']
Esempio n. 7
0
from airflow import DAG
from airflow.operators import BashOperator
from airflow.models import Variable
from datetime import datetime, timedelta, time

from common.configs import get_config
from common.log import get_log_path
from common.tasks import alert_task

try:
    default_config = Variable.get("DEFAULT_CONFIG")
except:
    default_config = None

if default_config:
    common_config = get_config('common.yaml')
    task_config = get_config('cron_hdfs_uploader_clickstream_hits.yaml')
else:
    raise Exception("Can't use default config, no another configs")

LOG_PATH = get_log_path(common_config['LOGS'], task_config['LOG_PREFIX'])

today = datetime.today()
hour = 1
minute = 17
d_delta = 1
h_delta = 0
m_delta = 0

default_args = {
    'owner':