Exemple #1
0
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from pipeline_manager import PipelineManager

if __name__ == '__main__':
    project_id = sys.argv[1]
    batch_id = sys.argv[2]
    batch_run_id = sys.argv[3]
    pipeline_mgr = PipelineManager(project_id, batch_id, batch_run_id,
                                   "etl_bcm_circuit_monthly_cage_agg",
                                   "DM_BCM_CIRCUIT_MONTHLY_CAGE_AGG")
    pipeline_mgr.execute_pipeline("aggregate_etl")
    pipeline_mgr = PipelineManager(project_id, batch_id, batch_run_id,
                                   "etl_bcm_circuit_monthly_cage_agg_merge1",
                                   "DM_BCM_CIRCUIT_MONTHLY_CAGE_AGG")
    pipeline_mgr.execute_pipeline("query_etl")
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from pipeline_manager import PipelineManager

if __name__ == '__main__':
    project_id = sys.argv[1]
    batch_id = sys.argv[2]
    batch_run_id = sys.argv[3]
    pipeline_mgr = PipelineManager(project_id, batch_id, batch_run_id,
                                   "etl_bcm_cage_agg",
                                   "DM_BCM_CAGE_MONTHLY_IBX_AGG")
    pipeline_mgr.execute_pipeline("aggregate_etl")
Exemple #3
0
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from pipeline_manager import PipelineManager

if __name__ == '__main__':
    project_id = sys.argv[1]
    batch_id = sys.argv[2]
    batch_run_id = sys.argv[3]
    pipeline_mgr = PipelineManager(project_id, batch_id, batch_run_id,
                                   "etl_large_deals_hist_cage_agg",
                                   "DM_PLP_LARGE_DEALS_HIST_CAGE_AGG")
    pipeline_mgr.execute_pipeline("aggregate_etl")
Exemple #4
0
import click

if __package__ is None or __package__ == '':
    from pipeline_manager import PipelineManager
else:
    from src.data_mining.pipeline_manager import PipelineManager

pipeline_manager = PipelineManager()


@click.group()
def main():
    pass


@main.command()
@main.option(param_decls='-p',
             show_default='--pipe-line',
             help='predefined pipeline to be trained',
             required=True)
@main.option(
    param_decls='-d',
    show_default='--dev-mode',
    help=
    'Development mode. If True then only small sample of data will be used',
    is_flag=True,
    required=False)
@main.option(param_decls='-t',
             show_default='--tag',
             help='Tagging',
             required=False)
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from pipeline_manager import PipelineManager

if __name__ == '__main__':
    project_id = sys.argv[1]
    batch_id = sys.argv[2]
    batch_run_id = sys.argv[3]
    pipeline_mgr = PipelineManager(project_id, batch_id, batch_run_id,
                                   "etl_power_cage_agg",
                                   "DM_PLP_POWER_CAGE_AGG")
    pipeline_mgr.execute_pipeline("aggregate_etl")
    pipeline_mgr = PipelineManager(project_id, batch_id, batch_run_id,
                                   "etl_power_cage_agg_merge1",
                                   "DM_PLP_POWER_CAGE_AGG")
    pipeline_mgr.execute_pipeline("query_etl")
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from pipeline_manager import PipelineManager

if __name__ == '__main__':
    project_id = sys.argv[1]
    batch_id = sys.argv[2]
    batch_run_id = sys.argv[3]
    pipeline_mgr = PipelineManager(project_id,
                                   batch_id,
                                   batch_run_id,
                                   "etl_dcim_asset_trend_agg",
                                   "DM_DCIM_REFINED_ASSET_TREND_MONTHLY_AGG")
    pipeline_mgr.execute_pipeline("aggregate_etl")
Exemple #7
0
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from pipeline_manager import PipelineManager

if __name__ == '__main__':
    project_id = sys.argv[1]
    batch_id = sys.argv[2]
    batch_run_id = sys.argv[3]
    pipeline_mgr = PipelineManager(project_id, batch_id, batch_run_id,
                                   "etl_space_cabe_cage_agg",
                                   "DM_PLP_SPACE_CABE_CAGE_AGG")
    pipeline_mgr.execute_pipeline("aggregate_etl")
    pipeline_mgr = PipelineManager(project_id, batch_id, batch_run_id,
                                   "etl_space_cabe_cage_agg_merge1",
                                   "DM_PLP_SPACE_CABE_CAGE_AGG")
    pipeline_mgr.execute_pipeline("query_etl")
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from pipeline_manager import PipelineManager

if __name__ == '__main__':
    project_id = sys.argv[1]
    batch_id = sys.argv[2]
    batch_run_id = sys.argv[3]
    pipeline_mgr = PipelineManager(project_id,
                                   batch_id,
                                   batch_run_id,
                                   "etl_dcim_asset_map_hive_extract",
                                   "DM_DCIM_REFINED_ASSET_MAP")
    pipeline_mgr.execute_pipeline("hive_etl")
from pipeline.preprocessing.csv_parser import CSVParser
from pipeline.data.reference_tests import TimeseriesTest, TestTypes
from view.visualization import Visualization
from pipeline.preprocessing.csv_parser import CSVParser


def henon_map_test():
    TimeseriesTest(type=TestTypes.HENON,
                   dimension=2,
                   level=7,
                   training_length=5000,
                   lambda_parameter=pow(2, -22),
                   training_accuracy=pow(10, -20))


def jumpmap_test():
    TimeseriesTest(type=TestTypes.JUMP_MAP,
                   dimension=5,
                   level=5,
                   training_length=5000,
                   lambda_parameter=pow(10, -4),
                   training_accuracy=pow(10, -13),
                   with_adaptivity=True)


if __name__ == "__main__":
    #henon_map_test()
    #jumpmap_test()
    PipelineManager()
    #Visualization().plot_rmse_evolution()
    #CSVParser().get_mean_rmse()
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from pipeline_manager import PipelineManager

if __name__ == '__main__':
    project_id = sys.argv[1]
    batch_id = sys.argv[2]
    batch_run_id = sys.argv[3]
    pipeline_mgr = PipelineManager(project_id,
                                   batch_id,
                                   batch_run_id,
                                   "etl_sbl_order_churn_asset",
                                   "PLP_SBL_ORDER_CHURN_ASSET")
    pipeline_mgr.execute_pipeline("query_load_etl")
    pipeline_mgr = PipelineManager(project_id,
                                   batch_id,
                                   batch_run_id,
                                   "etl_sbl_order_churn_asset_merge1",
                                   "PLP_SBL_ORDER_CHURN_ASSET")
    pipeline_mgr.execute_pipeline("query_etl")
    pipeline_mgr = PipelineManager(project_id,
                                   batch_id,
                                   batch_run_id,
                                   "etl_sbl_order_churn_asset_merge2",
                                   "PLP_SBL_ORDER_CHURN_ASSET")
    pipeline_mgr.execute_pipeline("query_etl")
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from pipeline_manager import PipelineManager

if __name__ == '__main__':
    project_id = sys.argv[1]
    batch_id = sys.argv[2]
    batch_run_id = sys.argv[3]
    pipeline_mgr = PipelineManager(project_id,
                                   batch_id,
                                   batch_run_id,
                                   "etl_pipeline_agg",
                                   "DM_PLP_PIPELINE_AGG")
    pipeline_mgr.execute_pipeline("aggregate_etl")
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from pipeline_manager import PipelineManager

if __name__ == '__main__':
    project_id = sys.argv[1]
    batch_id = sys.argv[2]
    batch_run_id = sys.argv[3]
    pipeline_mgr = PipelineManager(project_id, batch_id, batch_run_id,
                                   "etl_bcm_cage_hive_extract",
                                   "DM_BCM_CAGE_DAILY_DATA")
    pipeline_mgr.execute_pipeline("hive_etl")