Пример #1
0
import traceback
import sys
from operations import TopOperation
from operations import JoinOperation
from operations import AggregationOperation
from operations import FormulaOperation
from operations import FilterOperation
from connectors import DBFSConnector
from connectors import CosmosDBConnector
from datatransformations import TranformationsMainFlow
from automl import tpot_execution
from core import PipelineNotification
import json

try: 
	PipelineNotification.PipelineNotification().started_notification('5e85f6229723fb68fd4e52c5','5df78f4be2f2eff24740bbd7','http://13.68.212.36:3200/pipeline/notify')
	BostonHousingPricesRegression_DBFS = DBFSConnector.DBFSConnector.fetch([], {}, "5e85f6229723fb68fd4e52c5", spark, "{'url': '/Demo/BostonTrain.csv', 'file_type': 'Delimeted', 'dbfs_token': 'dapi0ef076722999cf4cd8859e9aafdb7b76', 'dbfs_domain': 'westus.azuredatabricks.net', 'delimiter': ',', 'is_header': 'Use Header Line'}")

	PipelineNotification.PipelineNotification().completed_notification('5e85f6229723fb68fd4e52c5','5df78f4be2f2eff24740bbd7','http://13.68.212.36:3200/pipeline/notify')
except Exception as ex: 
	PipelineNotification.PipelineNotification().failed_notification(ex,'5e85f6229723fb68fd4e52c5','5df78f4be2f2eff24740bbd7','http://13.68.212.36:3200/pipeline/notify','http://13.68.212.36:3200/logs/getProductLogs')
	sys.exit(1)
try: 
	PipelineNotification.PipelineNotification().started_notification('5e85f6229723fb68fd4e52c6','5df78f4be2f2eff24740bbd7','http://13.68.212.36:3200/pipeline/notify')
	BostonHousingPricesRegression_AutoFE = TranformationsMainFlow.TramformationMain.run(["5e85f6229723fb68fd4e52c5"],{"5e85f6229723fb68fd4e52c5": BostonHousingPricesRegression_DBFS}, "5e85f6229723fb68fd4e52c6", spark,json.dumps( {"FE": [{"feature": "CRIM", "type": "real", "selected": "True", "replaceby": "mean", "stats": {"count": "140", "mean": "4.71", "stddev": "11.7", "min": "0.01301", "max": "88.9762", "missing": "0"}, "transformation": ""}, {"feature": "ZN", "type": "real", "selected": "True", "replaceby": "mean", "stats": {"count": "140", "mean": "9.08", "stddev": "18.96", "min": "0.0", "max": "100.0", "missing": "0"}, "transformation": ""}, {"feature": "INDUS", "type": "real", "selected": "True", "replaceby": "mean", "stats": {"count": "140", "mean": "11.24", "stddev": "6.62", "min": "0.46", "max": "27.74", "missing": "0"}, "transformation": ""}, {"feature": "CHAS", "type": "real", "selected": "True", "replaceby": "mean", "stats": {"count": "140", "mean": "0.06", "stddev": "0.25", "min": "0.0", "max": "1.0", "missing": "0"}, "transformation": ""}, {"feature": "NOX", "type": "real", "selected": "True", "replaceby": "mean", "stats": {"count": "140", "mean": "0.57", "stddev": "0.12", "min": "0.392", "max": "0.871", "missing": "0"}, "transformation": ""}, {"feature": "RM", "type": "real", "selected": "True", "replaceby": "mean", "stats": {"count": "140", "mean": "6.26", "stddev": "0.73", "min": "3.561", "max": "8.725", "missing": "0"}, "transformation": ""}, {"feature": "AGE", "type": "real", "selected": "True", "replaceby": "mean", "stats": {"count": "140", "mean": "71.58", "stddev": "26.56", "min": "6.5", "max": "100.0", "missing": "0"}, "transformation": ""}, {"feature": "DIS", "type": "real", "selected": "True", "replaceby": "mean", "stats": {"count": "140", "mean": "3.71", "stddev": "2.07", "min": "1.3459", "max": "9.2229", "missing": "0"}, "transformation": ""}, {"feature": "RAD", "type": "real", "selected": "True", "replaceby": "mean", "stats": {"count": "140", "mean": "10.08", "stddev": "8.95", "min": "1.0", "max": "24.0", "missing": "0"}, "transformation": ""}, {"feature": "TAX", "type": "real", "selected": "True", "replaceby": "mean", "stats": {"count": "140", "mean": "415.13", "stddev": "172.51", "min": "188.0", "max": "711.0", "missing": "0"}, "transformation": ""}, {"feature": "PTRATIO", "type": "real", "selected": "True", "replaceby": "mean", "stats": {"count": "140", "mean": "18.31", "stddev": "2.11", "min": "13.0", "max": "21.0", "missing": "0"}, "transformation": ""}, {"feature": "B", "type": "real", "selected": "True", "replaceby": "mean", "stats": {"count": "140", "mean": "349.07", "stddev": "103.57", "min": "0.32", "max": "396.9", "missing": "0"}, "transformation": ""}, {"feature": "LSTAT", "type": "real", "selected": "True", "replaceby": "mean", "stats": {"count": "140", "mean": "13.36", "stddev": "7.29", "min": "2.87", "max": "34.02", "missing": "0"}, "transformation": ""}, {"feature": "MEDV", "type": "real", "selected": "True", "replaceby": "mean", "stats": {"count": "140", "mean": "21.56", "stddev": "9.39", "min": "5.0", "max": "50.0", "missing": "0"}, "transformation": ""}]}))

	PipelineNotification.PipelineNotification().completed_notification('5e85f6229723fb68fd4e52c6','5df78f4be2f2eff24740bbd7','http://13.68.212.36:3200/pipeline/notify')
except Exception as ex: 
	PipelineNotification.PipelineNotification().failed_notification(ex,'5e85f6229723fb68fd4e52c6','5df78f4be2f2eff24740bbd7','http://13.68.212.36:3200/pipeline/notify','http://13.68.212.36:3200/logs/getProductLogs')
	sys.exit(1)
try: 
from operations import TopOperation
from operations import JoinOperation
from operations import AggregationOperation
from operations import FormulaOperation
from operations import FilterOperation
from connectors import DBFSConnector
from connectors import CosmosDBConnector
from datatransformations import TranformationsMainFlow
from automl import tpot_execution
from core import PipelineNotification
from clustering.ClusteringMain import Clustering
import json

try:
    PipelineNotification.PipelineNotification().started_notification(
        '5e7c888d8e2f083e6bf22b1a', '5e1eb97a7d1a8956f654a15f',
        'http://137.116.116.173:3200/pipeline/notify')
    AppClustering_DBFS = DBFSConnector.DBFSConnector.fetch(
        [], {}, "5e7c888d8e2f083e6bf22b1a", spark,
        "{'url': '/Demo/MovieRatingsTrain.csv', 'file_type': 'Delimeted', 'dbfs_token': 'dapid40af94a6c7d8d818acf548df4c773f8', 'dbfs_domain': 'eastus.azuredatabricks.net', 'delimiter': ',', 'is_header': 'Use Header Line'}"
    )

    PipelineNotification.PipelineNotification().completed_notification(
        '5e7c888d8e2f083e6bf22b1a', '5e1eb97a7d1a8956f654a15f',
        'http://137.116.116.173:3200/pipeline/notify')
except Exception as ex:
    PipelineNotification.PipelineNotification().failed_notification(
        ex, '5e7c888d8e2f083e6bf22b1a', '5e1eb97a7d1a8956f654a15f',
        'http://137.116.116.173:3200/pipeline/notify',
        'http://137.116.116.173:3200/logs/getProductLogs')
    sys.exit(1)
import sys
from operations import TopOperation
from operations import JoinOperation
from operations import AggregationOperation
from operations import FormulaOperation
from operations import FilterOperation
from connectors import DBFSConnector
from connectors import CosmosDBConnector
from datatransformations import TranformationsMainFlow
from automl import tpot_execution
from core import PipelineNotification
import json

try:
    PipelineNotification.PipelineNotification().started_notification(
        '5e1c50fdd35748bc2faef776', '567a95c8ca676c1d07d5e3e7',
        'http://104.40.91.74:3200/pipeline/notify')
    PredictiveChurnSystem_DBFSs = DBFSConnector.DBFSConnector.fetch(
        [], {}, "5e1c50fdd35748bc2faef776", spark,
        "{'url': '/Demo/PredictiveChurnTrain.csv', 'file_type': 'Delimeted', 'dbfs_token': 'dapi743e2d3cc92a32916f8c2fa9bd7d0606', 'dbfs_domain': 'westus.azuredatabricks.net', 'delimiter': ',', 'is_header': 'Use Header Line'}"
    )

    PipelineNotification.PipelineNotification().completed_notification(
        '5e1c50fdd35748bc2faef776', '567a95c8ca676c1d07d5e3e7',
        'http://104.40.91.74:3200/pipeline/notify')
except Exception as ex:
    PipelineNotification.PipelineNotification().failed_notification(
        ex, '5e1c50fdd35748bc2faef776', '567a95c8ca676c1d07d5e3e7',
        'http://104.40.91.74:3200/pipeline/notify',
        'http://104.40.91.74:3200/logs/getProductLogs')
    sys.exit(1)
import sys
from operations import TopOperation
from operations import JoinOperation
from operations import AggregationOperation
from operations import FormulaOperation
from operations import FilterOperation
from connectors import DBFSConnector
from connectors import CosmosDBConnector
from datatransformations import TranformationsMainFlow
from automl import tpot_execution
from core import PipelineNotification
import json

try:
    PipelineNotification.PipelineNotification().started_notification(
        '5e68b4f85e784b82eb28e555', '5df78f4be2f2eff24740bbd7',
        'http://13.68.212.36:3200/pipeline/notify')
    TestAPP_DBFS = DBFSConnector.DBFSConnector.fetch(
        [], {}, "5e68b4f85e784b82eb28e555", spark,
        "{'url': '/Demo/MovieRatingsTrain.csv', 'file_type': 'Delimeted', 'dbfs_token': 'dapi0ef076722999cf4cd8859e9aafdb7b76', 'dbfs_domain': 'westus.azuredatabricks.net', 'delimiter': ',', 'is_header': 'Use Header Line'}"
    )

    PipelineNotification.PipelineNotification().completed_notification(
        '5e68b4f85e784b82eb28e555', '5df78f4be2f2eff24740bbd7',
        'http://13.68.212.36:3200/pipeline/notify')
except Exception as ex:
    PipelineNotification.PipelineNotification().failed_notification(
        ex, '5e68b4f85e784b82eb28e555', '5df78f4be2f2eff24740bbd7',
        'http://13.68.212.36:3200/pipeline/notify',
        'http://13.68.212.36:3200/logs/getProductLogs')
    sys.exit(1)
import sys
from operations import TopOperation
from operations import JoinOperation
from operations import AggregationOperation
from operations import FormulaOperation
from operations import FilterOperation
from connectors import DBFSConnector
from connectors import CosmosDBConnector
from datatransformations import TranformationsMainFlow
from automl import tpot_execution
from core import PipelineNotification
import json

try:
    PipelineNotification.PipelineNotification().started_notification(
        '5e87618e1e049f046a1d8a0e', '5df78f4be2f2eff24740bbd7',
        'http://13.68.212.36:3200/pipeline/notify')
    CustomerAcquisitionClassification_DBFS = DBFSConnector.DBFSConnector.fetch(
        [], {}, "5e87618e1e049f046a1d8a0e", spark,
        "{'url': '/Demo/CustomerAcquisitionTrain.csv', 'file_type': 'Delimeted', 'dbfs_token': 'dapi0ef076722999cf4cd8859e9aafdb7b76', 'dbfs_domain': 'westus.azuredatabricks.net', 'delimiter': ',', 'is_header': 'Use Header Line'}"
    )

    PipelineNotification.PipelineNotification().completed_notification(
        '5e87618e1e049f046a1d8a0e', '5df78f4be2f2eff24740bbd7',
        'http://13.68.212.36:3200/pipeline/notify')
except Exception as ex:
    PipelineNotification.PipelineNotification().failed_notification(
        ex, '5e87618e1e049f046a1d8a0e', '5df78f4be2f2eff24740bbd7',
        'http://13.68.212.36:3200/pipeline/notify',
        'http://13.68.212.36:3200/logs/getProductLogs')
    sys.exit(1)
from azureml.core.authentication import ServicePrincipalAuthentication
from core.CustomExceptions import HandleExceptions
from azureml.core.webservice import AciWebservice, Webservice
from azureml.core import Workspace
import traceback
import sys
from connectors import DBFSConnector
from datatransformations import TranformationsMainFlow
from automl import tpot_execution
from core import PipelineNotification
import json

try:
    PipelineNotification.PipelineNotification().started_notification(
        '5e6b2684be588d7717182f32', '5df78f4be2f2eff24740bbd7',
        'http://13.68.212.36:3200/pipeline/notify')
    PredictHighestIncome_DBFS = DBFSConnector.DBFSConnector.fetch(
        [], {}, "5e6b2684be588d7717182f32", spark,
        "{'url': '/Demo/PredictHighestIncomeTrain.csv', 'file_type': 'Delimeted', 'dbfs_token': 'dapi0ef076722999cf4cd8859e9aafdb7b76', 'dbfs_domain': 'westus.azuredatabricks.net', 'delimiter': ',', 'is_header': 'Use Header Line'}"
    )

    PipelineNotification.PipelineNotification().completed_notification(
        '5e6b2684be588d7717182f32', '5df78f4be2f2eff24740bbd7',
        'http://13.68.212.36:3200/pipeline/notify')
except Exception as ex:
    PipelineNotification.PipelineNotification().failed_notification(
        ex, '5e6b2684be588d7717182f32', '5df78f4be2f2eff24740bbd7',
        'http://13.68.212.36:3200/pipeline/notify',
        'http://13.68.212.36:3200/logs/getProductLogs')
    sys.exit(1)
try:
import traceback
from operations import TopOperation
from operations import JoinOperation
from operations import AggregationOperation
from operations import FormulaOperation
from operations import FilterOperation
from connectors import DBFSConnector
from connectors import CosmosDBConnector
from datatransformations import TranformationsMainFlow
from automl import tpot_execution
from core import PipelineNotification
import json

PipelineNotification.PipelineNotification().started_notification(
    '5da5c65e3cba417ba39880f5', '5df78f4be2f2eff24740bbd7',
    'http://13.68.212.36:3200/pipeline/notify')
source = DBFSConnector.DBFSConnector.fetch(
    [], {}, "5da5c65e3cba417ba39880f5", spark,
    "{'url': '/Demo/PredictiveChurnTrain.csv', 'file_type': 'Delimeted', 'dbfs_token': 'dapi0ef076722999cf4cd8859e9aafdb7b76', 'dbfs_domain': 'westus.azuredatabricks.net', 'delimiter': ',', 'is_header': 'Use Header Line'}"
)
PipelineNotification.PipelineNotification().completed_notification(
    '5da5c65e3cba417ba39880f5', '5df78f4be2f2eff24740bbd7',
    'http://13.68.212.36:3200/pipeline/notify')
PipelineNotification.PipelineNotification().started_notification(
    '5da5c65e3cba417ba39880f6', '5df78f4be2f2eff24740bbd7',
    'http://13.68.212.36:3200/pipeline/notify')
transformations = TranformationsMainFlow.TramformationMain.run(
    ["5da5c65e3cba417ba39880f5"], {"5da5c65e3cba417ba39880f5": source},
    "5da5c65e3cba417ba39880f6", spark,
    json.dumps({
        "FE": [{
Пример #8
0
import traceback
import sys
from operations import TopOperation
from operations import JoinOperation
from operations import AggregationOperation
from operations import FormulaOperation
from operations import FilterOperation
from connectors import DBFSConnector
from connectors import CosmosDBConnector
from datatransformations import TranformationsMainFlow
from automl import tpot_execution
from core import PipelineNotification
import json

try: 
	PipelineNotification.PipelineNotification().started_notification('5e7b4b9140bf64e99a8d5dd5','5e39734e0204cd465d4d2e10','http://40.83.140.93:3200/pipeline/notify')
	Movie_recommendation_DBFS = DBFSConnector.DBFSConnector.fetch([], {}, "5e7b4b9140bf64e99a8d5dd5", spark, "{'url': '/Demo/MovieRatingsTrain.csv', 'file_type': 'Delimeted', 'dbfs_token': 'dapib8073bbfa952efa9d363b234ce06e2c6', 'dbfs_domain': 'westus.azuredatabricks.net', 'delimiter': ',', 'is_header': 'Use Header Line'}")

	PipelineNotification.PipelineNotification().completed_notification('5e7b4b9140bf64e99a8d5dd5','5e39734e0204cd465d4d2e10','http://40.83.140.93:3200/pipeline/notify')
except Exception as ex: 
	PipelineNotification.PipelineNotification().failed_notification(ex,'5e7b4b9140bf64e99a8d5dd5','5e39734e0204cd465d4d2e10','http://40.83.140.93:3200/pipeline/notify','http://40.83.140.93:3200/logs/getProductLogs')
	sys.exit(1)
try: 
	PipelineNotification.PipelineNotification().started_notification('5e7b4b9140bf64e99a8d5dd6','5e39734e0204cd465d4d2e10','http://40.83.140.93:3200/pipeline/notify')
	Movie_recommendation_AutoFE = TranformationsMainFlow.TramformationMain.run(["5e7b4b9140bf64e99a8d5dd5"],{"5e7b4b9140bf64e99a8d5dd5": Movie_recommendation_DBFS}, "5e7b4b9140bf64e99a8d5dd6", spark,json.dumps( {"FE": [{"transformationsData": {}, "feature": "UserId", "transformation": "", "type": "numeric", "replaceby": "mean", "selected": "True", "stats": {"count": "2587", "mean": "465.06", "stddev": "264.69", "min": "1", "max": "943", "missing": "0"}}, {"transformationsData": {}, "feature": "MovieId", "transformation": "", "type": "numeric", "replaceby": "mean", "selected": "True", "stats": {"count": "2587", "mean": "432.85", "stddev": "337.75", "min": "1", "max": "1656", "missing": "0"}}, {"transformationsData": {}, "feature": "Rating", "type": "real", "selected": "True", "replaceby": "mean", "stats": {"count": "2587", "mean": "3.52", "stddev": "1.15", "min": "1.0", "max": "5.0", "missing": "0"}, "transformation": ""}, {"transformationsData": {"feature_label": "Timestamp"}, "feature": "Timestamp", "type": "date", "selected": "True", "replaceby": "random", "stats": {"count": "", "mean": "", "stddev": "", "min": "", "max": "", "missing": "0"}, "transformation": "Extract Date"}, {"transformationsData": {}, "feature": "AvgRating", "type": "real", "selected": "True", "replaceby": "mean", "stats": {"count": "2587", "mean": "3.53", "stddev": "0.44", "min": "1.51", "max": "4.67", "missing": "0"}, "transformation": ""}, {"feature": "Timestamp_dayofmonth", "transformation": "", "transformationsData": {}, "type": "numeric", "generated": "True", "selected": "True", "stats": {"count": "2587", "mean": "16.05", "stddev": "9.06", "min": "1", "max": "31", "missing": "0"}}, {"feature": "Timestamp_month", "transformation": "", "transformationsData": {}, "type": "numeric", "generated": "True", "selected": "True", "stats": {"count": "2587", "mean": "7.0", "stddev": "4.34", "min": "1", "max": "12", "missing": "0"}}, {"feature": "Timestamp_year", "transformation": "", "transformationsData": {}, "type": "numeric", "generated": "True", "selected": "True", "stats": {"count": "2587", "mean": "1997.45", "stddev": "0.5", "min": "1997", "max": "1998", "missing": "0"}}]}))

	PipelineNotification.PipelineNotification().completed_notification('5e7b4b9140bf64e99a8d5dd6','5e39734e0204cd465d4d2e10','http://40.83.140.93:3200/pipeline/notify')
except Exception as ex: 
	PipelineNotification.PipelineNotification().failed_notification(ex,'5e7b4b9140bf64e99a8d5dd6','5e39734e0204cd465d4d2e10','http://40.83.140.93:3200/pipeline/notify','http://40.83.140.93:3200/logs/getProductLogs')
	sys.exit(1)
try: 
Пример #9
0
import sys
from operations import TopOperation
from operations import JoinOperation
from operations import AggregationOperation
from operations import FormulaOperation
from operations import FilterOperation
from connectors import DBFSConnector
from connectors import CosmosDBConnector
from datatransformations import TranformationsMainFlow
from automl import tpot_execution
from core import PipelineNotification
import json

try:
    PipelineNotification.PipelineNotification().started_notification(
        '5e37c1eb88bcadd8bc9b7c13', '567a95c8ca676c1d07d5e3e7',
        'http://104.40.91.74:3200/pipeline/notify')
    MovieAvgRecom_DBFS = DBFSConnector.DBFSConnector.fetch(
        [], {}, "5e37c1eb88bcadd8bc9b7c13", spark,
        "{'url': '/Demo/Marketing/MovieRatings (2).csv', 'file_type': 'Delimeted', 'dbfs_token': 'dapi743e2d3cc92a32916f8c2fa9bd7d0606', 'dbfs_domain': 'westus.azuredatabricks.net', 'delimiter': ',', 'is_header': 'Use Header Line'}"
    )

    PipelineNotification.PipelineNotification().completed_notification(
        '5e37c1eb88bcadd8bc9b7c13', '567a95c8ca676c1d07d5e3e7',
        'http://104.40.91.74:3200/pipeline/notify')
except Exception as ex:
    PipelineNotification.PipelineNotification().failed_notification(
        ex, '5e37c1eb88bcadd8bc9b7c13', '567a95c8ca676c1d07d5e3e7',
        'http://104.40.91.74:3200/pipeline/notify',
        'http://104.40.91.74:3200/logs/getProductLogs')
    sys.exit(1)
from operations import TopOperation
from operations import JoinOperation
from operations import AggregationOperation
from operations import FormulaOperation
from operations import FilterOperation
from connectors import DBFSConnector
from connectors import CosmosDBConnector
from datatransformations import TranformationsMainFlow
from automl import tpot_execution
from core import PipelineNotification
from clustering.ClusteringMain import Clustering
import json

try:
    PipelineNotification.PipelineNotification().started_notification(
        '5e7e07507ef19e176f7ab146', '5e1eb97a7d1a8956f654a15f',
        'http://137.116.116.173:3200/pipeline/notify')
    Test_DBFS = DBFSConnector.DBFSConnector.fetch(
        [], {}, "5e7e07507ef19e176f7ab146", spark,
        "{'url': '/clustering/IrisTrain.csv', 'file_type': 'Delimeted', 'dbfs_token': 'dapid40af94a6c7d8d818acf548df4c773f8', 'dbfs_domain': 'eastus.azuredatabricks.net', 'delimiter': ',', 'is_header': 'Use Header Line'}"
    )

    PipelineNotification.PipelineNotification().completed_notification(
        '5e7e07507ef19e176f7ab146', '5e1eb97a7d1a8956f654a15f',
        'http://137.116.116.173:3200/pipeline/notify')
except Exception as ex:
    PipelineNotification.PipelineNotification().failed_notification(
        ex, '5e7e07507ef19e176f7ab146', '5e1eb97a7d1a8956f654a15f',
        'http://137.116.116.173:3200/pipeline/notify',
        'http://137.116.116.173:3200/logs/getProductLogs')
    sys.exit(1)
Пример #11
0
import sys
from operations import TopOperation
from operations import JoinOperation
from operations import AggregationOperation
from operations import FormulaOperation
from operations import FilterOperation
from connectors import DBFSConnector
from connectors import CosmosDBConnector
from datatransformations import TranformationsMainFlow
from automl import tpot_execution
from core import PipelineNotification
import json

try:
    PipelineNotification.PipelineNotification().started_notification(
        '5e7b4eda7f9f3826a4b96214', '5e39734e0204cd465d4d2e10',
        'http://40.83.140.93:3200/pipeline/notify')
    testappcommitview_DBFS = DBFSConnector.DBFSConnector.fetch(
        [], {}, "5e7b4eda7f9f3826a4b96214", spark,
        "{'url': '', 'file_type': 'Delimeted', 'dbfs_token': '', 'dbfs_domain': '', 'delimiter': ',', 'is_header': 'Use Header Line'}"
    )

    PipelineNotification.PipelineNotification().completed_notification(
        '5e7b4eda7f9f3826a4b96214', '5e39734e0204cd465d4d2e10',
        'http://40.83.140.93:3200/pipeline/notify')
except Exception as ex:
    PipelineNotification.PipelineNotification().failed_notification(
        ex, '5e7b4eda7f9f3826a4b96214', '5e39734e0204cd465d4d2e10',
        'http://40.83.140.93:3200/pipeline/notify',
        'http://40.83.140.93:3200/logs/getProductLogs')
    sys.exit(1)
import sys
from operations import TopOperation
from operations import JoinOperation
from operations import AggregationOperation
from operations import FormulaOperation
from operations import FilterOperation
from connectors import DBFSConnector
from connectors import CosmosDBConnector
from datatransformations import TranformationsMainFlow
from automl import tpot_execution
from core import PipelineNotification
import json

try:
    PipelineNotification.PipelineNotification().started_notification(
        '5e68d88680cd15a2cbbe4ad9', '5e68d53980cd15a2cbbe4a18',
        'http://40.83.140.93:3200/pipeline/notify')
    QA2_DBFS = DBFSConnector.DBFSConnector.fetch(
        [], {}, "5e68d88680cd15a2cbbe4ad9", spark,
        "{'url': '/Demo/MovieRatingsTrain.csv', 'file_type': 'Delimeted', 'dbfs_token': 'dapib8073bbfa952efa9d363b234ce06e2c6', 'dbfs_domain': 'westus.azuredatabricks.net', 'delimiter': ',', 'is_header': 'Use Header Line'}"
    )

    PipelineNotification.PipelineNotification().completed_notification(
        '5e68d88680cd15a2cbbe4ad9', '5e68d53980cd15a2cbbe4a18',
        'http://40.83.140.93:3200/pipeline/notify')
except Exception as ex:
    PipelineNotification.PipelineNotification().failed_notification(
        ex, '5e68d88680cd15a2cbbe4ad9', '5e68d53980cd15a2cbbe4a18',
        'http://40.83.140.93:3200/pipeline/notify',
        'http://40.83.140.93:3200/logs/getProductLogs')
    sys.exit(1)
import sys
from operations import TopOperation
from operations import JoinOperation
from operations import AggregationOperation
from operations import FormulaOperation
from operations import FilterOperation
from connectors import DBFSConnector
from connectors import CosmosDBConnector
from datatransformations import TranformationsMainFlow
from automl import tpot_execution
from core import PipelineNotification
import json

try:
    PipelineNotification.PipelineNotification().started_notification(
        '5d80e907b276bfcc57d2e600', '5e26b8e42fb16412176c3a5d',
        'http://104.42.172.209:3200/pipeline/notify')
    retail_source_01 = DBFSConnector.DBFSConnector.fetch(
        [], {}, "5d80e907b276bfcc57d2e600", spark,
        "{'url': '/Demo/RetailCreditScoringTrain.csv', 'file_type': 'Delimeted', 'dbfs_token': 'dapi2936395fb3cbcb995e4fe803cc653542', 'dbfs_domain': 'westus.azuredatabricks.net', 'delimiter': ',', 'is_header': 'Use Header Line'}"
    )

    PipelineNotification.PipelineNotification().completed_notification(
        '5d80e907b276bfcc57d2e600', '5e26b8e42fb16412176c3a5d',
        'http://104.42.172.209:3200/pipeline/notify')
except Exception as ex:
    PipelineNotification.PipelineNotification().failed_notification(
        ex, '5d80e907b276bfcc57d2e600', '5e26b8e42fb16412176c3a5d',
        'http://104.42.172.209:3200/pipeline/notify',
        'http://104.42.172.209:3200/logs/getProductLogs')
    sys.exit(1)
Пример #14
0
import sys
from operations import TopOperation
from operations import JoinOperation
from operations import AggregationOperation
from operations import FormulaOperation
from operations import FilterOperation
from connectors import DBFSConnector
from connectors import CosmosDBConnector
from datatransformations import TranformationsMainFlow
from automl import tpot_execution
from core import PipelineNotification
from clustering.ClusteringMain import Clustering
import json

try: 
	PipelineNotification.PipelineNotification().started_notification('5e7ca8098f00eb66322e0112','5e1eb97a7d1a8956f654a15f','http://137.116.116.173:3200/pipeline/notify')
	newc_DBFS = DBFSConnector.DBFSConnector.fetch([], {}, "5e7ca8098f00eb66322e0112", spark, "{'url': '/Demo/MovieRatingsTrain.csv', 'file_type': 'Delimeted', 'dbfs_token': 'dapid40af94a6c7d8d818acf548df4c773f8', 'dbfs_domain': 'eastus.azuredatabricks.net', 'delimiter': ',', 'is_header': 'Use Header Line'}")

	PipelineNotification.PipelineNotification().completed_notification('5e7ca8098f00eb66322e0112','5e1eb97a7d1a8956f654a15f','http://137.116.116.173:3200/pipeline/notify')
except Exception as ex: 
	PipelineNotification.PipelineNotification().failed_notification(ex,'5e7ca8098f00eb66322e0112','5e1eb97a7d1a8956f654a15f','http://137.116.116.173:3200/pipeline/notify','http://137.116.116.173:3200/logs/getProductLogs')
	sys.exit(1)
try: 
	PipelineNotification.PipelineNotification().started_notification('5e7ca8098f00eb66322e0113','5e1eb97a7d1a8956f654a15f','http://137.116.116.173:3200/pipeline/notify')
	newc_AutoFE = TranformationsMainFlow.TramformationMain.run(["5e7ca8098f00eb66322e0112"],{"5e7ca8098f00eb66322e0112": newc_DBFS}, "5e7ca8098f00eb66322e0113", spark,json.dumps( {"FE": [{"transformationsData": {}, "feature": "UserId", "transformation": "", "type": "numeric", "replaceby": "mean", "selected": "True", "stats": {"count": "2587", "mean": "465.06", "stddev": "264.69", "min": "1", "max": "943", "missing": "0"}}, {"transformationsData": {}, "feature": "MovieId", "transformation": "", "type": "numeric", "replaceby": "mean", "selected": "True", "stats": {"count": "2587", "mean": "432.85", "stddev": "337.75", "min": "1", "max": "1656", "missing": "0"}}, {"transformationsData": {}, "feature": "Rating", "type": "real", "selected": "True", "replaceby": "mean", "stats": {"count": "2587", "mean": "3.52", "stddev": "1.15", "min": "1.0", "max": "5.0", "missing": "0"}, "transformation": ""}, {"transformationsData": {"feature_label": "Timestamp"}, "feature": "Timestamp", "type": "date", "selected": "True", "replaceby": "random", "stats": {"count": "", "mean": "", "stddev": "", "min": "", "max": "", "missing": "0"}, "transformation": "Extract Date"}, {"transformationsData": {}, "feature": "AvgRating", "type": "real", "selected": "True", "replaceby": "mean", "stats": {"count": "2587", "mean": "3.53", "stddev": "0.44", "min": "1.51", "max": "4.67", "missing": "0"}, "transformation": ""}, {"feature": "Timestamp_dayofmonth", "transformation": "", "transformationsData": {}, "type": "numeric", "generated": "True", "selected": "True", "stats": {"count": "2587", "mean": "16.05", "stddev": "9.06", "min": "1", "max": "31", "missing": "0"}}, {"feature": "Timestamp_month", "transformation": "", "transformationsData": {}, "type": "numeric", "generated": "True", "selected": "True", "stats": {"count": "2587", "mean": "7.0", "stddev": "4.34", "min": "1", "max": "12", "missing": "0"}}, {"feature": "Timestamp_year", "transformation": "", "transformationsData": {}, "type": "numeric", "generated": "True", "selected": "True", "stats": {"count": "2587", "mean": "1997.45", "stddev": "0.5", "min": "1997", "max": "1998", "missing": "0"}}]}))

	PipelineNotification.PipelineNotification().completed_notification('5e7ca8098f00eb66322e0113','5e1eb97a7d1a8956f654a15f','http://137.116.116.173:3200/pipeline/notify')
except Exception as ex: 
	PipelineNotification.PipelineNotification().failed_notification(ex,'5e7ca8098f00eb66322e0113','5e1eb97a7d1a8956f654a15f','http://137.116.116.173:3200/pipeline/notify','http://137.116.116.173:3200/logs/getProductLogs')
	sys.exit(1)
try: 
import traceback
import sys
from operations import TopOperation
from operations import JoinOperation
from operations import AggregationOperation
from operations import FormulaOperation
from operations import FilterOperation
from connectors import DBFSConnector
from connectors import CosmosDBConnector
from datatransformations import TranformationsMainFlow
from automl import tpot_execution
from core import PipelineNotification
import json

try: 
	PipelineNotification.PipelineNotification().started_notification('5e283774b97f2a7ad4154c57','567a95c8ca676c1d07d5e3e7','http://104.40.91.74:3200/pipeline/notify')
	CustomerChurn_DBFS = DBFSConnector.DBFSConnector.fetch([], {}, "5e283774b97f2a7ad4154c57", spark, "{'url': '/Demo/CustomerChurnTrain.csv', 'file_type': 'Delimeted', 'dbfs_token': 'dapi743e2d3cc92a32916f8c2fa9bd7d0606', 'dbfs_domain': 'westus.azuredatabricks.net', 'delimiter': ',', 'is_header': 'Use Header Line'}")

	PipelineNotification.PipelineNotification().completed_notification('5e283774b97f2a7ad4154c57','567a95c8ca676c1d07d5e3e7','http://104.40.91.74:3200/pipeline/notify')
except Exception as ex: 
	PipelineNotification.PipelineNotification().failed_notification(ex,'5e283774b97f2a7ad4154c57','567a95c8ca676c1d07d5e3e7','http://104.40.91.74:3200/pipeline/notify','http://104.40.91.74:3200/logs/getProductLogs')
	sys.exit(1)
try: 
	PipelineNotification.PipelineNotification().started_notification('5e283774b97f2a7ad4154c58','567a95c8ca676c1d07d5e3e7','http://104.40.91.74:3200/pipeline/notify')
	CustomerChurn_AutoFE = TranformationsMainFlow.TramformationMain.run(["5e283774b97f2a7ad4154c57"],{"5e283774b97f2a7ad4154c57": CustomerChurn_DBFS}, "5e283774b97f2a7ad4154c58", spark,json.dumps( {"FE": [{"transformationsData": {"feature_label": "customerID"}, "feature": "customerID", "type": "string", "selected": "True", "replaceby": "max", "stats": {"count": "7043", "mean": "", "stddev": "", "min": "0002-ORFBO", "max": "9995-HOTOH", "missing": "0", "distinct": "730"}, "transformation": "String Indexer"}, {"transformationsData": {"feature_label": "gender"}, "feature": "gender", "type": "string", "selected": "True", "replaceby": "max", "stats": {"count": "7043", "mean": "", "stddev": "", "min": "Female", "max": "Male", "missing": "0", "distinct": "2"}, "transformation": "String Indexer"}, {"transformationsData": {}, "feature": "SeniorCitizen", "transformation": "", "type": "numeric", "replaceby": "mean", "selected": "True", "stats": {"count": "7043", "mean": "0.16", "stddev": "0.37", "min": "0", "max": "1", "missing": "0"}}, {"transformationsData": {"feature_label": "Partner"}, "feature": "Partner", "type": "string", "selected": "True", "replaceby": "max", "stats": {"count": "7043", "mean": "", "stddev": "", "min": "No", "max": "Yes", "missing": "0", "distinct": "2"}, "transformation": "String Indexer"}, {"transformationsData": {"feature_label": "Dependents"}, "feature": "Dependents", "type": "string", "selected": "True", "replaceby": "max", "stats": {"count": "7043", "mean": "", "stddev": "", "min": "No", "max": "Yes", "missing": "0", "distinct": "2"}, "transformation": "String Indexer"}, {"transformationsData": {}, "feature": "tenure", "transformation": "", "type": "numeric", "replaceby": "mean", "selected": "True", "stats": {"count": "7043", "mean": "32.37", "stddev": "24.56", "min": "0", "max": "72", "missing": "0"}}, {"transformationsData": {"feature_label": "PhoneService"}, "feature": "PhoneService", "type": "string", "selected": "True", "replaceby": "max", "stats": {"count": "7043", "mean": "", "stddev": "", "min": "No", "max": "Yes", "missing": "0", "distinct": "2"}, "transformation": "String Indexer"}, {"transformationsData": {"feature_label": "MultipleLines"}, "feature": "MultipleLines", "type": "string", "selected": "True", "replaceby": "max", "stats": {"count": "7043", "mean": "", "stddev": "", "min": "No", "max": "Yes", "missing": "0", "distinct": "3"}, "transformation": "String Indexer"}, {"transformationsData": {"feature_label": "InternetService"}, "feature": "InternetService", "type": "string", "selected": "True", "replaceby": "max", "stats": {"count": "7043", "mean": "", "stddev": "", "min": "DSL", "max": "No", "missing": "0", "distinct": "3"}, "transformation": "String Indexer"}, {"transformationsData": {"feature_label": "OnlineSecurity"}, "feature": "OnlineSecurity", "type": "string", "selected": "True", "replaceby": "max", "stats": {"count": "7043", "mean": "", "stddev": "", "min": "No", "max": "Yes", "missing": "0", "distinct": "3"}, "transformation": "String Indexer"}, {"transformationsData": {"feature_label": "OnlineBackup"}, "feature": "OnlineBackup", "type": "string", "selected": "True", "replaceby": "max", "stats": {"count": "7043", "mean": "", "stddev": "", "min": "No", "max": "Yes", "missing": "0", "distinct": "3"}, "transformation": "String Indexer"}, {"transformationsData": {"feature_label": "DeviceProtection"}, "feature": "DeviceProtection", "type": "string", "selected": "True", "replaceby": "max", "stats": {"count": "7043", "mean": "", "stddev": "", "min": "No", "max": "Yes", "missing": "0", "distinct": "3"}, "transformation": "String Indexer"}, {"transformationsData": {"feature_label": "TechSupport"}, "feature": "TechSupport", "type": "string", "selected": "True", "replaceby": "max", "stats": {"count": "7043", "mean": "", "stddev": "", "min": "No", "max": "Yes", "missing": "0", "distinct": "3"}, "transformation": "String Indexer"}, {"transformationsData": {"feature_label": "StreamingTV"}, "feature": "StreamingTV", "type": "string", "selected": "True", "replaceby": "max", "stats": {"count": "7043", "mean": "", "stddev": "", "min": "No", "max": "Yes", "missing": "0", "distinct": "3"}, "transformation": "String Indexer"}, {"transformationsData": {"feature_label": "StreamingMovies"}, "feature": "StreamingMovies", "type": "string", "selected": "True", "replaceby": "max", "stats": {"count": "7043", "mean": "", "stddev": "", "min": "No", "max": "Yes", "missing": "0", "distinct": "3"}, "transformation": "String Indexer"}, {"transformationsData": {"feature_label": "Contract"}, "feature": "Contract", "type": "string", "selected": "True", "replaceby": "max", "stats": {"count": "7043", "mean": "", "stddev": "", "min": "Month-to-month", "max": "Two year", "missing": "0", "distinct": "3"}, "transformation": "String Indexer"}, {"transformationsData": {"feature_label": "PaperlessBilling"}, "feature": "PaperlessBilling", "type": "string", "selected": "True", "replaceby": "max", "stats": {"count": "7043", "mean": "", "stddev": "", "min": "No", "max": "Yes", "missing": "0", "distinct": "2"}, "transformation": "String Indexer"}, {"transformationsData": {"feature_label": "PaymentMethod"}, "feature": "PaymentMethod", "type": "string", "selected": "True", "replaceby": "max", "stats": {"count": "7043", "mean": "", "stddev": "", "min": "Bank transfer (automatic)", "max": "Mailed check", "missing": "0", "distinct": "4"}, "transformation": "String Indexer"}, {"transformationsData": {}, "feature": "MonthlyCharges", "type": "real", "selected": "True", "replaceby": "mean", "stats": {"count": "7043", "mean": "64.76", "stddev": "30.09", "min": "18.25", "max": "118.75", "missing": "0"}, "transformation": ""}, {"transformationsData": {"feature_label": "TotalCharges"}, "feature": "TotalCharges", "type": "string", "selected": "True", "replaceby": "max", "stats": {"count": "7043", "mean": "2283.3", "stddev": "2266.77", "min": " ", "max": "999.9", "missing": "0", "distinct": "727"}, "transformation": "String Indexer"}, {"transformationsData": {"feature_label": "Churn"}, "feature": "Churn", "type": "string", "selected": "True", "replaceby": "max", "stats": {"count": "7043", "mean": "", "stddev": "", "min": "No", "max": "Yes", "missing": "0", "distinct": "2"}, "transformation": "String Indexer"}, {"feature": "customerID_transform", "transformation": "", "transformationsData": {}, "type": "real", "selected": "True", "stats": {"count": "730", "mean": "364.5", "stddev": "210.88", "min": "0.0", "max": "729.0", "missing": "0"}}, {"feature": "gender_transform", "transformation": "", "transformationsData": {}, "type": "numeric", "selected": "True", "stats": {"count": "730", "mean": "0.5", "stddev": "0.5", "min": "0", "max": "1", "missing": "0"}}, {"feature": "Partner_transform", "transformation": "", "transformationsData": {}, "type": "numeric", "selected": "True", "stats": {"count": "730", "mean": "0.48", "stddev": "0.5", "min": "0", "max": "1", "missing": "0"}}, {"feature": "Dependents_transform", "transformation": "", "transformationsData": {}, "type": "numeric", "selected": "True", "stats": {"count": "730", "mean": "0.32", "stddev": "0.46", "min": "0", "max": "1", "missing": "0"}}, {"feature": "PhoneService_transform", "transformation": "", "transformationsData": {}, "type": "numeric", "selected": "True", "stats": {"count": "730", "mean": "0.1", "stddev": "0.3", "min": "0", "max": "1", "missing": "0"}}, {"feature": "MultipleLines_transform", "transformation": "", "transformationsData": {}, "type": "numeric", "selected": "True", "stats": {"count": "730", "mean": "0.62", "stddev": "0.66", "min": "0", "max": "2", "missing": "0"}}, {"feature": "InternetService_transform", "transformation": "", "transformationsData": {}, "type": "numeric", "selected": "True", "stats": {"count": "730", "mean": "0.79", "stddev": "0.79", "min": "0", "max": "2", "missing": "0"}}, {"feature": "OnlineSecurity_transform", "transformation": "", "transformationsData": {}, "type": "numeric", "selected": "True", "stats": {"count": "730", "mean": "0.76", "stddev": "0.8", "min": "0", "max": "2", "missing": "0"}}, {"feature": "OnlineBackup_transform", "transformation": "", "transformationsData": {}, "type": "numeric", "selected": "True", "stats": {"count": "730", "mean": "0.81", "stddev": "0.78", "min": "0", "max": "2", "missing": "0"}}, {"feature": "DeviceProtection_transform", "transformation": "", "transformationsData": {}, "type": "numeric", "selected": "True", "stats": {"count": "730", "mean": "0.79", "stddev": "0.79", "min": "0", "max": "2", "missing": "0"}}, {"feature": "TechSupport_transform", "transformation": "", "transformationsData": {}, "type": "numeric", "selected": "True", "stats": {"count": "730", "mean": "0.74", "stddev": "0.81", "min": "0", "max": "2", "missing": "0"}}, {"feature": "StreamingTV_transform", "transformation": "", "transformationsData": {}, "type": "numeric", "selected": "True", "stats": {"count": "730", "mean": "0.84", "stddev": "0.77", "min": "0", "max": "2", "missing": "0"}}, {"feature": "StreamingMovies_transform", "transformation": "", "transformationsData": {}, "type": "numeric", "selected": "True", "stats": {"count": "730", "mean": "0.83", "stddev": "0.77", "min": "0", "max": "2", "missing": "0"}}, {"feature": "Contract_transform", "transformation": "", "transformationsData": {}, "type": "numeric", "selected": "True", "stats": {"count": "730", "mean": "0.68", "stddev": "0.8", "min": "0", "max": "2", "missing": "0"}}, {"feature": "PaperlessBilling_transform", "transformation": "", "transformationsData": {}, "type": "numeric", "selected": "True", "stats": {"count": "730", "mean": "0.4", "stddev": "0.49", "min": "0", "max": "1", "missing": "0"}}, {"feature": "PaymentMethod_transform", "transformation": "", "transformationsData": {}, "type": "numeric", "selected": "True", "stats": {"count": "730", "mean": "1.32", "stddev": "1.15", "min": "0", "max": "3", "missing": "0"}}, {"feature": "TotalCharges_transform", "transformation": "", "transformationsData": {}, "type": "real", "selected": "True", "stats": {"count": "730", "mean": "361.51", "stddev": "210.86", "min": "0.0", "max": "726.0", "missing": "0"}}, {"feature": "Churn_transform", "transformation": "", "transformationsData": {}, "type": "numeric", "selected": "True", "stats": {"count": "730", "mean": "0.23", "stddev": "0.42", "min": "0", "max": "1", "missing": "0"}}]}))

	PipelineNotification.PipelineNotification().completed_notification('5e283774b97f2a7ad4154c58','567a95c8ca676c1d07d5e3e7','http://104.40.91.74:3200/pipeline/notify')
except Exception as ex: 
	PipelineNotification.PipelineNotification().failed_notification(ex,'5e283774b97f2a7ad4154c58','567a95c8ca676c1d07d5e3e7','http://104.40.91.74:3200/pipeline/notify','http://104.40.91.74:3200/logs/getProductLogs')
	sys.exit(1)
try: 
import sys
from operations import TopOperation
from operations import JoinOperation
from operations import AggregationOperation
from operations import FormulaOperation
from operations import FilterOperation
from connectors import DBFSConnector
from connectors import CosmosDBConnector
from datatransformations import TranformationsMainFlow
from automl import tpot_execution
from core import PipelineNotification
import json

try:
    PipelineNotification.PipelineNotification().started_notification(
        '5e1c162b5b457a9e66200985', '567a95c8ca676c1d07d5e3e7',
        'http://104.40.91.74:3200/pipeline/notify')
    PredictChurn_DBFSS = DBFSConnector.DBFSConnector.fetch(
        [], {}, "5e1c162b5b457a9e66200985", spark,
        "{'url': '/Demo/PredictiveChurnTrain.csv', 'file_type': 'Delimeted', 'dbfs_token': 'dapi743e2d3cc92a32916f8c2fa9bd7d0606', 'dbfs_domain': 'westus.azuredatabricks.net', 'delimiter': ',', 'is_header': 'Use Header Line'}"
    )

    PipelineNotification.PipelineNotification().completed_notification(
        '5e1c162b5b457a9e66200985', '567a95c8ca676c1d07d5e3e7',
        'http://104.40.91.74:3200/pipeline/notify')
except Exception as ex:
    PipelineNotification.PipelineNotification().failed_notification(
        ex, '5e1c162b5b457a9e66200985', '567a95c8ca676c1d07d5e3e7',
        'http://104.40.91.74:3200/pipeline/notify',
        'http://104.40.91.74:3200/logs/getProductLogs')
    sys.exit(1)
Пример #17
0
import sys
from operations import TopOperation
from operations import JoinOperation
from operations import AggregationOperation
from operations import FormulaOperation
from operations import FilterOperation
from connectors import DBFSConnector
from connectors import CosmosDBConnector
from datatransformations import TranformationsMainFlow
from automl import tpot_execution
from core import PipelineNotification
import json

try:
    PipelineNotification.PipelineNotification().started_notification(
        '5e5f80c22a4014e42a781ec1', '5e39734e0204cd465d4d2e10',
        'http://40.83.140.93:3200/pipeline/notify')
    pipe1_DBFS = DBFSConnector.DBFSConnector.fetch(
        [], {}, "5e5f80c22a4014e42a781ec1", spark,
        "{'url': '/Demo/MovieRatingsTrain.csv', 'file_type': 'Delimeted', 'dbfs_token': 'dapib8073bbfa952efa9d363b234ce06e2c6', 'dbfs_domain': 'westus.azuredatabricks.net', 'delimiter': ',', 'is_header': 'Use Header Line'}"
    )

    PipelineNotification.PipelineNotification().completed_notification(
        '5e5f80c22a4014e42a781ec1', '5e39734e0204cd465d4d2e10',
        'http://40.83.140.93:3200/pipeline/notify')
except Exception as ex:
    PipelineNotification.PipelineNotification().failed_notification(
        ex, '5e5f80c22a4014e42a781ec1', '5e39734e0204cd465d4d2e10',
        'http://40.83.140.93:3200/pipeline/notify',
        'http://40.83.140.93:3200/logs/getProductLogs')
    sys.exit(1)
Пример #18
0
import sys
from operations import TopOperation
from operations import JoinOperation
from operations import AggregationOperation
from operations import FormulaOperation
from operations import FilterOperation
from connectors import DBFSConnector
from connectors import CosmosDBConnector
from datatransformations import TranformationsMainFlow
from automl import tpot_execution
from core import PipelineNotification
import json

try:
    PipelineNotification.PipelineNotification().started_notification(
        '5e1c54c548c393f8cb6a6dbf', '567a95c8ca676c1d07d5e3e7',
        'http://104.40.91.74:3200/pipeline/notify')
    AvgMovieRecommendation_DBFSs = DBFSConnector.DBFSConnector.fetch(
        [], {}, "5e1c54c548c393f8cb6a6dbf", spark,
        "{'url': '/Demo/Marketing/MovieRatings (2).csv', 'file_type': 'Delimeted', 'dbfs_token': 'dapib3c8e0614707f7e6d2addea6ce7c33d0', 'dbfs_domain': 'westus.azuredatabricks.net', 'delimiter': ',', 'is_header': 'Use Header Line'}"
    )

    PipelineNotification.PipelineNotification().completed_notification(
        '5e1c54c548c393f8cb6a6dbf', '567a95c8ca676c1d07d5e3e7',
        'http://104.40.91.74:3200/pipeline/notify')
except Exception as ex:
    PipelineNotification.PipelineNotification().failed_notification(
        ex, '5e1c54c548c393f8cb6a6dbf', '567a95c8ca676c1d07d5e3e7',
        'http://104.40.91.74:3200/pipeline/notify',
        'http://104.40.91.74:3200/logs/getProductLogs')
    sys.exit(1)
import sys
from operations import TopOperation
from operations import JoinOperation
from operations import AggregationOperation
from operations import FormulaOperation
from operations import FilterOperation
from connectors import DBFSConnector
from connectors import CosmosDBConnector
from datatransformations import TranformationsMainFlow
from automl import tpot_execution
from core import PipelineNotification
import json

try:
    PipelineNotification.PipelineNotification().started_notification(
        '5e7b51f42d4b80de5155fc25', '5e39734e0204cd465d4d2e10',
        'http://40.83.140.93:3200/pipeline/notify')
    MovieReco_DBFS = DBFSConnector.DBFSConnector.fetch(
        [], {}, "5e7b51f42d4b80de5155fc25", spark,
        "{'url': '/Demo/MovieRatingsTrain.csv', 'file_type': 'Delimeted', 'dbfs_token': 'dapib8073bbfa952efa9d363b234ce06e2c6', 'dbfs_domain': 'westus.azuredatabricks.net', 'delimiter': ',', 'is_header': 'Use Header Line'}"
    )

    PipelineNotification.PipelineNotification().completed_notification(
        '5e7b51f42d4b80de5155fc25', '5e39734e0204cd465d4d2e10',
        'http://40.83.140.93:3200/pipeline/notify')
except Exception as ex:
    PipelineNotification.PipelineNotification().failed_notification(
        ex, '5e7b51f42d4b80de5155fc25', '5e39734e0204cd465d4d2e10',
        'http://40.83.140.93:3200/pipeline/notify',
        'http://40.83.140.93:3200/logs/getProductLogs')
    sys.exit(1)
Пример #20
0
from operations import TopOperation
from operations import JoinOperation
from operations import AggregationOperation
from operations import FormulaOperation
from operations import FilterOperation
from connectors import DBFSConnector
from connectors import CosmosDBConnector
from datatransformations import TranformationsMainFlow
from automl import tpot_execution
from core import PipelineNotification
from clustering.ClusteringMain import Clustering
import json

try:
    PipelineNotification.PipelineNotification().started_notification(
        '5e7c5f4b64c6ede468699f1d', '5e1eb97a7d1a8956f654a15f',
        'http://137.116.116.173:3200/pipeline/notify')
    visualTest_DBFS = DBFSConnector.DBFSConnector.fetch(
        [], {}, "5e7c5f4b64c6ede468699f1d", spark,
        "{'url': '/clustering/IrisTrain.csv', 'file_type': 'Delimeted', 'dbfs_token': 'dapid40af94a6c7d8d818acf548df4c773f8', 'dbfs_domain': 'eastus.azuredatabricks.net', 'delimiter': ',', 'is_header': 'Use Header Line'}"
    )

    PipelineNotification.PipelineNotification().completed_notification(
        '5e7c5f4b64c6ede468699f1d', '5e1eb97a7d1a8956f654a15f',
        'http://137.116.116.173:3200/pipeline/notify')
except Exception as ex:
    PipelineNotification.PipelineNotification().failed_notification(
        ex, '5e7c5f4b64c6ede468699f1d', '5e1eb97a7d1a8956f654a15f',
        'http://137.116.116.173:3200/pipeline/notify',
        'http://137.116.116.173:3200/logs/getProductLogs')
    sys.exit(1)
import sys
from operations import TopOperation
from operations import JoinOperation
from operations import AggregationOperation
from operations import FormulaOperation
from operations import FilterOperation
from connectors import DBFSConnector
from connectors import CosmosDBConnector
from datatransformations import TranformationsMainFlow
from automl import tpot_execution
from core import PipelineNotification
from clustering.ClusteringMain import Clustering
import json

try: 
	PipelineNotification.PipelineNotification().started_notification('5e7dddb99ce151def126612a','5e1eb97a7d1a8956f654a15f','http://137.116.116.173:3200/pipeline/notify')
	FinalTest_DBFS = DBFSConnector.DBFSConnector.fetch([], {}, "5e7dddb99ce151def126612a", spark, "{'url': '/clustering/IrisTrain.csv', 'file_type': 'Delimeted', 'dbfs_token': 'dapid40af94a6c7d8d818acf548df4c773f8', 'dbfs_domain': 'eastus.azuredatabricks.net', 'delimiter': ',', 'is_header': 'Use Header Line'}")

	PipelineNotification.PipelineNotification().completed_notification('5e7dddb99ce151def126612a','5e1eb97a7d1a8956f654a15f','http://137.116.116.173:3200/pipeline/notify')
except Exception as ex: 
	PipelineNotification.PipelineNotification().failed_notification(ex,'5e7dddb99ce151def126612a','5e1eb97a7d1a8956f654a15f','http://137.116.116.173:3200/pipeline/notify','http://137.116.116.173:3200/logs/getProductLogs')
	sys.exit(1)
try: 
	PipelineNotification.PipelineNotification().started_notification('5e7dddb99ce151def126612b','5e1eb97a7d1a8956f654a15f','http://137.116.116.173:3200/pipeline/notify')
	FinalTest_AutoFE = TranformationsMainFlow.TramformationMain.run(["5e7dddb99ce151def126612a"],{"5e7dddb99ce151def126612a": FinalTest_DBFS}, "5e7dddb99ce151def126612b", spark,json.dumps( {"FE": [{"transformationsData": {}, "feature": "SepalLengthCm", "type": "real", "selected": "True", "replaceby": "mean", "stats": {"count": "16", "mean": "5.72", "stddev": "0.79", "min": "4.6", "max": "7.1", "missing": "0"}, "transformation": ""}, {"transformationsData": {}, "feature": "SepalWidthCm", "type": "real", "selected": "True", "replaceby": "mean", "stats": {"count": "16", "mean": "3.15", "stddev": "0.39", "min": "2.4", "max": "3.8", "missing": "0"}, "transformation": ""}, {"transformationsData": {}, "feature": "PetalLengthCm", "type": "real", "selected": "True", "replaceby": "mean", "stats": {"count": "16", "mean": "3.43", "stddev": "1.87", "min": "1.3", "max": "5.9", "missing": "0"}, "transformation": ""}, {"transformationsData": {}, "feature": "PetalWidthCm", "type": "real", "selected": "True", "replaceby": "mean", "stats": {"count": "16", "mean": "1.09", "stddev": "0.8", "min": "0.2", "max": "2.3", "missing": "0"}, "transformation": ""}]}))

	PipelineNotification.PipelineNotification().completed_notification('5e7dddb99ce151def126612b','5e1eb97a7d1a8956f654a15f','http://137.116.116.173:3200/pipeline/notify')
except Exception as ex: 
	PipelineNotification.PipelineNotification().failed_notification(ex,'5e7dddb99ce151def126612b','5e1eb97a7d1a8956f654a15f','http://137.116.116.173:3200/pipeline/notify','http://137.116.116.173:3200/logs/getProductLogs')
	sys.exit(1)
try: 
import sys
from operations import TopOperation
from operations import JoinOperation
from operations import AggregationOperation
from operations import FormulaOperation
from operations import FilterOperation
from connectors import DBFSConnector
from connectors import CosmosDBConnector
from datatransformations import TranformationsMainFlow
from automl import tpot_execution
from core import PipelineNotification
import json

try:
    PipelineNotification.PipelineNotification().started_notification(
        '5e609d1cefb51b481c0dc16b', '5e39734e0204cd465d4d2e10',
        'http://40.83.140.93:3200/pipeline/notify')
    CustomerResponse_DBFS = DBFSConnector.DBFSConnector.fetch(
        [], {}, "5e609d1cefb51b481c0dc16b", spark,
        "{'url': '/Demo/CustomerChurnTrain.csv', 'file_type': 'Delimeted', 'dbfs_token': 'dapib8073bbfa952efa9d363b234ce06e2c6', 'dbfs_domain': 'westus.azuredatabricks.net', 'delimiter': ',', 'is_header': 'Use Header Line'}"
    )

    PipelineNotification.PipelineNotification().completed_notification(
        '5e609d1cefb51b481c0dc16b', '5e39734e0204cd465d4d2e10',
        'http://40.83.140.93:3200/pipeline/notify')
except Exception as ex:
    PipelineNotification.PipelineNotification().failed_notification(
        ex, '5e609d1cefb51b481c0dc16b', '5e39734e0204cd465d4d2e10',
        'http://40.83.140.93:3200/pipeline/notify',
        'http://40.83.140.93:3200/logs/getProductLogs')
    sys.exit(1)
Пример #23
0
import sys
from operations import TopOperation
from operations import JoinOperation
from operations import AggregationOperation
from operations import FormulaOperation
from operations import FilterOperation
from connectors import DBFSConnector
from connectors import CosmosDBConnector
from datatransformations import TranformationsMainFlow
from automl import tpot_execution
from core import PipelineNotification
import json

try:
    PipelineNotification.PipelineNotification().started_notification(
        '5e7b599dd90568e9c50f2fb7', '5e39734e0204cd465d4d2e10',
        'http://40.83.140.93:3200/pipeline/notify')
    newmltestcommitapp_DBFS = DBFSConnector.DBFSConnector.fetch(
        [], {}, "5e7b599dd90568e9c50f2fb7", spark,
        "{'url': '', 'file_type': 'Delimeted', 'dbfs_token': '', 'dbfs_domain': '', 'delimiter': ',', 'is_header': 'Use Header Line'}"
    )

    PipelineNotification.PipelineNotification().completed_notification(
        '5e7b599dd90568e9c50f2fb7', '5e39734e0204cd465d4d2e10',
        'http://40.83.140.93:3200/pipeline/notify')
except Exception as ex:
    PipelineNotification.PipelineNotification().failed_notification(
        ex, '5e7b599dd90568e9c50f2fb7', '5e39734e0204cd465d4d2e10',
        'http://40.83.140.93:3200/pipeline/notify',
        'http://40.83.140.93:3200/logs/getProductLogs')
    sys.exit(1)
import sys
from operations import TopOperation
from operations import JoinOperation
from operations import AggregationOperation
from operations import FormulaOperation
from operations import FilterOperation
from connectors import DBFSConnector
from connectors import CosmosDBConnector
from datatransformations import TranformationsMainFlow
from automl import tpot_execution
from core import PipelineNotification
from clustering.ClusteringMain import Clustering
import json

try: 
	PipelineNotification.PipelineNotification().started_notification('5e7a182edd2cef6591905210','5e1eb97a7d1a8956f654a15f','http://137.116.116.173:3200/pipeline/notify')
	abcxyzzzNEwClusterApplication_DBFS = DBFSConnector.DBFSConnector.fetch([], {}, "5e7a182edd2cef6591905210", spark, "{'url': '/Demo/MovieRatingsTrain.csv', 'file_type': 'Delimeted', 'dbfs_token': 'dapid40af94a6c7d8d818acf548df4c773f8', 'dbfs_domain': 'eastus.azuredatabricks.net', 'delimiter': ',', 'is_header': 'Use Header Line'}")

	PipelineNotification.PipelineNotification().completed_notification('5e7a182edd2cef6591905210','5e1eb97a7d1a8956f654a15f','http://137.116.116.173:3200/pipeline/notify')
except Exception as ex: 
	PipelineNotification.PipelineNotification().failed_notification(ex,'5e7a182edd2cef6591905210','5e1eb97a7d1a8956f654a15f','http://137.116.116.173:3200/pipeline/notify','http://137.116.116.173:3200/logs/getProductLogs')
	sys.exit(1)
try: 
	PipelineNotification.PipelineNotification().started_notification('5e7a182edd2cef6591905211','5e1eb97a7d1a8956f654a15f','http://137.116.116.173:3200/pipeline/notify')
	abcxyzzzNEwClusterApplication_AutoFE = TranformationsMainFlow.TramformationMain.run(["5e7a182edd2cef6591905210"],{"5e7a182edd2cef6591905210": abcxyzzzNEwClusterApplication_DBFS}, "5e7a182edd2cef6591905211", spark,json.dumps( {"FE": [{"transformationsData": {}, "feature": "UserId", "transformation": "", "type": "numeric", "replaceby": "mean", "selected": "True", "stats": {"count": "2587", "mean": "465.06", "stddev": "264.69", "min": "1", "max": "943", "missing": "0"}}, {"transformationsData": {}, "feature": "MovieId", "transformation": "", "type": "numeric", "replaceby": "mean", "selected": "True", "stats": {"count": "2587", "mean": "432.85", "stddev": "337.75", "min": "1", "max": "1656", "missing": "0"}}, {"transformationsData": {}, "feature": "Rating", "type": "real", "selected": "True", "replaceby": "mean", "stats": {"count": "2587", "mean": "3.52", "stddev": "1.15", "min": "1.0", "max": "5.0", "missing": "0"}, "transformation": ""}, {"transformationsData": {"feature_label": "Timestamp"}, "feature": "Timestamp", "type": "date", "selected": "True", "replaceby": "random", "stats": {"count": "", "mean": "", "stddev": "", "min": "", "max": "", "missing": "0"}, "transformation": "Extract Date"}, {"transformationsData": {}, "feature": "AvgRating", "type": "real", "selected": "True", "replaceby": "mean", "stats": {"count": "2587", "mean": "3.53", "stddev": "0.44", "min": "1.51", "max": "4.67", "missing": "0"}, "transformation": ""}, {"feature": "Timestamp_dayofmonth", "transformation": "", "transformationsData": {}, "type": "numeric", "generated": "True", "selected": "True", "stats": {"count": "2587", "mean": "16.05", "stddev": "9.06", "min": "1", "max": "31", "missing": "0"}}, {"feature": "Timestamp_month", "transformation": "", "transformationsData": {}, "type": "numeric", "generated": "True", "selected": "True", "stats": {"count": "2587", "mean": "7.0", "stddev": "4.34", "min": "1", "max": "12", "missing": "0"}}, {"feature": "Timestamp_year", "transformation": "", "transformationsData": {}, "type": "numeric", "generated": "True", "selected": "True", "stats": {"count": "2587", "mean": "1997.45", "stddev": "0.5", "min": "1997", "max": "1998", "missing": "0"}}]}))

	PipelineNotification.PipelineNotification().completed_notification('5e7a182edd2cef6591905211','5e1eb97a7d1a8956f654a15f','http://137.116.116.173:3200/pipeline/notify')
except Exception as ex: 
	PipelineNotification.PipelineNotification().failed_notification(ex,'5e7a182edd2cef6591905211','5e1eb97a7d1a8956f654a15f','http://137.116.116.173:3200/pipeline/notify','http://137.116.116.173:3200/logs/getProductLogs')
	sys.exit(1)
try: 
import sys
from operations import TopOperation
from operations import JoinOperation
from operations import AggregationOperation
from operations import FormulaOperation
from operations import FilterOperation
from connectors import DBFSConnector
from connectors import CosmosDBConnector
from datatransformations import TranformationsMainFlow
from automl import tpot_execution
from core import PipelineNotification
import json

try:
    PipelineNotification.PipelineNotification().started_notification(
        '5e37c36288430f5a3614bb0b', '567a95c8ca676c1d07d5e3e7',
        'http://104.40.91.74:3200/pipeline/notify')
    TestCommitAPP_DBFS = DBFSConnector.DBFSConnector.fetch(
        [], {}, "5e37c36288430f5a3614bb0b", spark,
        "{'url': '', 'file_type': 'Delimeted', 'dbfs_token': '', 'dbfs_domain': '', 'delimiter': ',', 'is_header': 'Use Header Line'}"
    )

    PipelineNotification.PipelineNotification().completed_notification(
        '5e37c36288430f5a3614bb0b', '567a95c8ca676c1d07d5e3e7',
        'http://104.40.91.74:3200/pipeline/notify')
except Exception as ex:
    PipelineNotification.PipelineNotification().failed_notification(
        ex, '5e37c36288430f5a3614bb0b', '567a95c8ca676c1d07d5e3e7',
        'http://104.40.91.74:3200/pipeline/notify',
        'http://104.40.91.74:3200/logs/getProductLogs')
    sys.exit(1)
Пример #26
0
import sys
from operations import TopOperation
from operations import JoinOperation
from operations import AggregationOperation
from operations import FormulaOperation
from operations import FilterOperation
from connectors import DBFSConnector
from connectors import CosmosDBConnector
from datatransformations import TranformationsMainFlow
from automl import tpot_execution
from core import PipelineNotification
import json

try:
    PipelineNotification.PipelineNotification().started_notification(
        '5e58b21c1386312df04498e1', '5e39734e0204cd465d4d2e10',
        'http://40.83.140.93:3200/pipeline/notify')
    pchurntestapp_DBFS = DBFSConnector.DBFSConnector.fetch(
        [], {}, "5e58b21c1386312df04498e1", spark,
        "{'url': '/Demo/PredictiveChurnTrain.csv', 'file_type': 'Delimeted', 'dbfs_token': 'dapib8073bbfa952efa9d363b234ce06e2c6', 'dbfs_domain': 'westus.azuredatabricks.net', 'delimiter': ',', 'is_header': 'Use Header Line'}"
    )

    PipelineNotification.PipelineNotification().completed_notification(
        '5e58b21c1386312df04498e1', '5e39734e0204cd465d4d2e10',
        'http://40.83.140.93:3200/pipeline/notify')
except Exception as ex:
    PipelineNotification.PipelineNotification().failed_notification(
        ex, '5e58b21c1386312df04498e1', '5e39734e0204cd465d4d2e10',
        'http://40.83.140.93:3200/pipeline/notify',
        'http://40.83.140.93:3200/logs/getProductLogs')
    sys.exit(1)
import sys
from operations import TopOperation
from operations import JoinOperation
from operations import AggregationOperation
from operations import FormulaOperation
from operations import FilterOperation
from connectors import DBFSConnector
from connectors import CosmosDBConnector
from datatransformations import TranformationsMainFlow
from automl import tpot_execution
from core import PipelineNotification
import json

try:
    PipelineNotification.PipelineNotification().started_notification(
        '5e69ff73e80b442bf284dbc9', '5df78f4be2f2eff24740bbd7',
        'http://13.68.212.36:3200/pipeline/notify')
    PredictiveChurn_DBFS = DBFSConnector.DBFSConnector.fetch(
        [], {}, "5e69ff73e80b442bf284dbc9", spark,
        "{'url': '/Demo/PredictiveChurnTraining.csv', 'file_type': 'Delimeted', 'dbfs_token': 'dapi0ef076722999cf4cd8859e9aafdb7b76', 'dbfs_domain': 'westus.azuredatabricks.net', 'delimiter': ',', 'is_header': 'Use Header Line'}"
    )

    PipelineNotification.PipelineNotification().completed_notification(
        '5e69ff73e80b442bf284dbc9', '5df78f4be2f2eff24740bbd7',
        'http://13.68.212.36:3200/pipeline/notify')
except Exception as ex:
    PipelineNotification.PipelineNotification().failed_notification(
        ex, '5e69ff73e80b442bf284dbc9', '5df78f4be2f2eff24740bbd7',
        'http://13.68.212.36:3200/pipeline/notify',
        'http://13.68.212.36:3200/logs/getProductLogs')
    sys.exit(1)