コード例 #1
0
ファイル: deployment.py プロジェクト: radon-h2020/radon-ctt
    def delete_by_uuid(cls, del_uuid):
        deployment = Deployment.query.filter_by(uuid=del_uuid)
        if deployment:
            from models.execution import Execution
            linked_executions = Execution.query.filter_by(deployment_uuid=del_uuid)
            for result in linked_executions:
                Execution.delete_by_uuid(result.uuid)

            deployment.delete()
            # rmtree(self.fq_storage_path)
            db_session.commit()
コード例 #2
0
    def __init__(self, execution):
        self.uuid = str(uuid.uuid4())
        self.execution_uuid = execution.uuid
        self.storage_path = os.path.join(self.__tablename__, self.uuid)

        if execution and os.path.isfile(execution.fq_result_storage_path):
            if not os.path.exists(self.fq_storage_path):
                os.makedirs(self.fq_storage_path)

            # Potentially post-process the results before storing them.
            shutil.copy2(execution.fq_result_storage_path,
                         self.fq_result_storage_path)
            db_session.add(self)
            db_session.commit()
            current_app.logger.info(f'AutoUndeploy is set to {AutoUndeploy}.')
            if AutoUndeploy:
                current_app.logger.info(f'Initiating undeployment.')
                linked_execution: Execution = Execution.get_by_uuid(
                    self.execution_uuid)
                linked_execution.undeploy()
            else:
                current_app.logger.info(
                    f'NOT initiating undeployment due to disabled AutoUndeploy.'
                )
        else:
            raise Exception(f'Linked entities do not exist.')
コード例 #3
0
def test_data_source_not_found(mocker, eraser, caplog):
    service = mocker.MagicMock()

    mocker.patch.object(eraser, '_get_analytics_service')
    eraser._get_analytics_service.return_value = service

    mocker.patch.object(eraser, '_is_table_empty')
    eraser._is_table_empty.return_value = False

    service.management().customDataSources().list().execute.return_value = {
        'items': [{
            'id': 1,
            'name': 'wrong_name'
        }]
    }

    execution = Execution(
        AccountConfig('', False, '', '', ''),
        Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers']),
        Destination('dest1', DestinationType.GA_DATA_IMPORT,
                    ['web_property', 'data_import_name']))
    # Act
    try:
        next(eraser.process(Batch(execution, [])))
    except StopIteration:
        pass

    assert 'data_import_name - data import not found, please configure it in Google Analytics' in caplog.text
コード例 #4
0
def test_bigquery_write(mocker, uploader):
  bq_client = mocker.MagicMock()

  mocker.patch.object(uploader, "_get_bq_client")
  uploader._get_bq_client.return_value = bq_client

  table = mocker.MagicMock()
  bq_client.get_table.return_value = table

  now = datetime.datetime.now().timestamp()

  account_config = AccountConfig("account_id", False, "ga_account_id", "", "")
  destination = Destination(
      "dest1",
      DestinationType.GA_MEASUREMENT_PROTOCOL,
      ["web_property", "view", "c", "list", "d", "buyers_custom_dim"])
  source = Source("orig1", SourceType.BIG_QUERY, ["dt1", "buyers"])
  execution = Execution(account_config, source, destination)

  uploader._do_process(Batch(execution, [{"uuid": "uuid-1"}, {"uuid": "uuid-2"}]), now)

  bq_client.insert_rows.assert_called_once_with(
      table,
      [{"uuid": "uuid-1", "timestamp": now},
       {"uuid": "uuid-2", "timestamp": now}],
      (SchemaField("uuid", "string"),
       SchemaField("timestamp", "timestamp")))
コード例 #5
0
def test_conversion_upload_match_id(mocker, uploader):
    mocker.patch.object(uploader, '_get_dcm_service')

    floodlight_activity_id = 'floodlight_activity_id'
    floodlight_configuration_id = 'floodlight_configuration_id'

    source = Source('orig1', SourceType.BIG_QUERY, ('dt1', 'buyers'))
    destination = Destination(
        'dest1', DestinationType.CM_OFFLINE_CONVERSION,
        (floodlight_activity_id, floodlight_configuration_id))
    execution = Execution(_account_config, source, destination)
    current_time = time.time()

    mocker.patch.object(time, 'time')
    time.time.return_value = current_time

    uploader._do_process(Batch(execution, [{'matchId': 'abc'}]), current_time)

    expected_body = {
        'conversions': [{
            'matchId': 'abc',
            'floodlightActivityId': floodlight_activity_id,
            'floodlightConfigurationId': floodlight_configuration_id,
            'ordinal': math.floor(current_time * 10e5),
            'timestampMicros': math.floor(current_time * 10e5)
        }],
    }

    uploader._get_dcm_service().conversions().batchinsert.assert_any_call(
        profileId='dcm_profile_id', body=expected_body)
コード例 #6
0
def test_error_on_api_call(mocker, uploader, caplog):
    caplog.set_level(logging.INFO,
                     'megalista.CampaignManagerConversionsUploader')
    mocker.patch.object(uploader, '_get_dcm_service')
    service = mocker.MagicMock()
    uploader._get_dcm_service.return_value = service

    service.conversions().batchinsert().execute.return_value = {
        'hasFailures': True,
        'status': [{
            'errors': [{
                'code': '123',
                'message': 'error_returned'
            }]
        }]
    }

    source = Source('orig1', SourceType.BIG_QUERY, ('dt1', 'buyers'))
    destination = Destination('dest1', DestinationType.CM_OFFLINE_CONVERSION,
                              ['a', 'b'])
    execution = Execution(_account_config, source, destination)

    uploader._do_process(Batch(execution, [{'gclid': '123'}]), time.time())

    assert 'Error(s) inserting conversions:' in caplog.text
    assert '[123]: error_returned' in caplog.text
コード例 #7
0
def test_fail_missing_destination_metadata(uploader, mocker):
    mocker.patch.object(uploader, '_get_ssd_service')
    source = Source('orig1', SourceType.BIG_QUERY, ('dt1', 'buyers'))
    destination = Destination('dest1', DestinationType.ADS_SSD_UPLOAD, ['1'])
    execution = Execution(_account_config, source, destination)
    batch = Batch(execution, [])
    uploader.process(batch)
    uploader._get_ssd_service.assert_not_called()
コード例 #8
0
 def create_execution(self, test_id, deployment_uuid):
     if not deployment_uuid:
         self.__outputs[test_id] += f'\nNo Deployment UUID provided.'
     execution = Execution.create(deployment_uuid)
     if execution.uuid:
         self.__outputs[test_id] += f'\nCreated execution: {execution}'
         return execution.uuid
     return None
コード例 #9
0
def get_executions():  # noqa: E501
    """Get all executions

     # noqa: E501


    :rtype: List[Execution]
    """
    executions = ExecutionImpl.get_all()
    return execution_schema_many.dump(executions)
コード例 #10
0
    def read(self, range_tracker):
        def document_to_dict(doc):
            if not doc.exists:
                return None
            doc_dict = doc.to_dict()
            doc_dict['id'] = doc.id
            return doc_dict

        firestore_collection = self._setup_firestore_collection.get()
        logging.getLogger("megalista.FirestoreExecutionSource").info(
            f"Loading Firestore collection {firestore_collection}...")
        db = firestore.Client()
        entries = db.collection(self._setup_firestore_collection.get()).where(
            'active', '==', 'yes').stream()
        entries = [document_to_dict(doc) for doc in entries]

        account_data = document_to_dict(
            db.collection(self._setup_firestore_collection.get()).document(
                'account_config').get())

        if not account_data:
            raise Exception('Firestore collection is absent')
        google_ads_id = account_data.get('google_ads_id', 'empty')
        mcc_trix = account_data.get('mcc_trix', 'FALSE')
        mcc = False if mcc_trix is None else bool(
            distutils.util.strtobool(mcc_trix))
        app_id = account_data.get('app_id', 'empty')
        google_analytics_account_id = account_data.get(
            'google_analytics_account_id', 'empty')
        campaign_manager_account_id = account_data.get(
            'campaign_manager_account_id', 'empty')

        account_config = AccountConfig(google_ads_id, mcc,
                                       google_analytics_account_id,
                                       campaign_manager_account_id, app_id)
        logging.getLogger("megalista.FirestoreExecutionSource").info(
            f"Loaded: {account_config}")

        sources = self._read_sources(entries)
        destinations = self._read_destination(entries)
        if entries:
            for entry in entries:
                if entry['active'].upper() == 'YES':
                    logging.getLogger(
                        "megalista.FirestoreExecutionSource"
                    ).info(
                        f"Executing step Source:{sources[entry['id'] + '_source'].source_name} -> Destination:{destinations[entry['id'] + '_destination'].destination_name}"
                    )
                    yield Execution(account_config,
                                    sources[entry['id'] + '_source'],
                                    destinations[entry['id'] + '_destination'])
        else:
            logging.getLogger("megalista.FirestoreExecutionSource").warn(
                "No schedules found!")
コード例 #11
0
def get_execution_by_uuid(execution_uuid):  # noqa: E501
    """Retrieve an execution

     # noqa: E501

    :param execution_uuid: UUID of the execution to return
    :type execution_uuid: str

    :rtype: Execution
    """
    execution = ExecutionImpl.get_by_uuid(execution_uuid)
    return execution_schema.dump(execution)
コード例 #12
0
def delete_execution_by_uuid(execution_uuid):  # noqa: E501
    """Delete an execution

    Deletes the execution with the given UUID and all elements depending on it # noqa: E501

    :param execution_uuid: UUID of the execution to delete
    :type execution_uuid: str

    :rtype: Execution
    """
    execution = ExecutionImpl.delete_by_uuid(execution_uuid)
    return execution_schema.dump(execution)
コード例 #13
0
def test_elements_uploading(mocker, uploader):
    service = mocker.MagicMock()

    mocker.patch.object(uploader, '_get_analytics_service')
    uploader._get_analytics_service.return_value = service

    service.management().customDataSources().list().execute.return_value = {
        'items': [{
            'id': 1,
            'name': 'data_import_name'
        }]
    }

    execution = Execution(
        _account_config,
        Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers']),
        Destination('dest1', DestinationType.GA_DATA_IMPORT,
                    ['web_property', 'data_import_name']))

    # Add mock to side effect of uploadData()
    my_mock = mocker.MagicMock()
    service.management().uploads().uploadData.side_effect = my_mock

    # Act
    uploader.process(
        Batch(execution, [{
            'user_id': '12',
            'cd1': 'value1a',
            'cd2': 'value2a'
        }, {
            'user_id': '34',
            'cd1': 'value1b',
            'cd2': 'value2b'
        }, {
            'user_id': '56',
            'cd1': None,
            'cd2': ''
        }]))

    # Called once
    my_mock.assert_called_once()

    # Intercept args called
    _, kwargs = my_mock.call_args

    # Check if really sent values from custom field
    media_bytes = kwargs['media_body'].getbytes(0, -1)

    print(media_bytes)
    assert media_bytes == b'ga:user_id,ga:cd1,ga:cd2\n' \
                          b'12,value1a,value2a\n' \
                          b'34,value1b,value2b\n' \
                          b'56,,'
コード例 #14
0
def test_exception_no_event_nor_user_property(uploader, caplog):
    with requests_mock.Mocker() as m:
        m.post(requests_mock.ANY, status_code=204)
        destination = Destination(
            'dest1', DestinationType.GA_4_MEASUREMENT_PROTOCOL,
            ['api_secret', 'False', 'False', '', 'some_id', ''])
        source = Source('orig1', SourceType.BIG_QUERY, [])
        execution = Execution(_account_config, source, destination)
        with pytest.raises(
                ValueError,
                match='GA4 MP should be called either for sending events'):
            next(uploader.process(Batch(execution, [])))
コード例 #15
0
def test_files_deleted(mocker, eraser):
    service = mocker.MagicMock()

    mocker.patch.object(eraser, '_get_analytics_service')
    eraser._get_analytics_service.return_value = service

    mocker.patch.object(eraser, '_is_table_empty')
    eraser._is_table_empty.return_value = False

    service.management().customDataSources().list().execute.return_value = {
        'items': [{
            'id': 1,
            'name': 'data_import_name'
        }, {
            'id': 2,
            'name': 'data_import_name2'
        }]
    }

    execution = Execution(
        AccountConfig('', False, '', '', ''),
        Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers']),
        Destination('dest1', DestinationType.GA_DATA_IMPORT,
                    ['web_property', 'data_import_name']))

    # Add mock to side effect of list uploads
    service.management().uploads().list().execute.return_value = {
        'items': [{
            'id': 'ab'
        }, {
            'id': 'cd'
        }]
    }

    # Add mock to side effect of deleteUploadData
    delete_call_mock = mocker.MagicMock()
    service.management().uploads(
    ).deleteUploadData.side_effect = delete_call_mock

    # Act
    next(eraser.process(Batch(execution, [])))

    # Called once
    delete_call_mock.assert_called_once()

    # Intercept args called
    _, kwargs = delete_call_mock.call_args

    # Check if really sent values from custom field
    ids = kwargs['body']
コード例 #16
0
def test_exception_app_and_web(uploader, caplog):
    with requests_mock.Mocker() as m:
        m.post(requests_mock.ANY, status_code=204)
        destination = Destination(
            'dest1', DestinationType.GA_4_MEASUREMENT_PROTOCOL,
            ['api_secret', 'False', 'True', '', 'some_app_id', 'some_web_id'])
        source = Source('orig1', SourceType.BIG_QUERY, [])
        execution = Execution(_account_config, source, destination)
        with pytest.raises(
                ValueError,
                match='GA4 MP should be called either with a firebase_app_id'):
            next(uploader.process(Batch(execution, [{
                'name': 'event_name',
            }])))
コード例 #17
0
def create_execution(post_execution=None):  # noqa: E501
    """Creates an execution

     # noqa: E501

    :param post_execution:
    :type post_execution: dict | bytes

    :rtype: None
    """
    if connexion.request.is_json:
        post_execution = POSTExecution.from_dict(connexion.request.get_json())  # noqa: E501

    created_execution = ExecutionImpl.create(post_execution.deployment_uuid)
    return execution_schema.dump(created_execution)
コード例 #18
0
def handleResults(node, input, output):
    results = input.get("results", [])

    if results:
        for commandId, result in results.items():
            (exitCode, stdout, stderr) = result
            execution = Execution(commandId, node.id, exitCode, stdout, stderr)
            # Insert or Update if already exists
            db.session.merge(execution)
        db.session.commit()

    lastNodeExecution = node.executions.order_by(
        Execution.commandId.desc()).first()

    if lastNodeExecution:
        output["resultAck"] = lastNodeExecution.commandId
コード例 #19
0
def test_list_creation_not_mcc(mocker, uploader):
    ads_account_id = 'xxx-yyy-zzzz'
    ga_account_id = 'acc'

    service = mocker.MagicMock()

    mocker.patch.object(uploader, '_get_analytics_service')
    uploader._get_analytics_service.return_value = service

    service.management().remarketingAudience().insert().execute.return_value = {
        'id': 1
    }

    execution = Execution(
        AccountConfig(ads_account_id, False, ga_account_id, '', ''),
        Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers']),
        Destination(
            'dest1', DestinationType.GA_USER_LIST_UPLOAD,
            ['web_property', 'view', 'c', 'list', 'd', 'buyers_custom_dim']))
    uploader.process(Batch(execution, []))

    service.management().remarketingAudience().insert.assert_any_call(
        accountId=ga_account_id,
        webPropertyId='web_property',
        body={
            'name': 'list',
            'linkedViews': ['view'],
            'linkedAdAccounts': [{
                'type': 'ADWORDS_LINKS',
                'linkedAccountId': ads_account_id
            }],
            'audienceType': 'SIMPLE',
            'audienceDefinition': {
                'includeConditions': {
                    'kind':
                        'analytics#includeConditions',
                    'isSmartList':
                        False,
                    'segment':
                        'users::condition::%s==buyer' % 'buyers_custom_dim',
                    'membershipDurationDays':
                        365
                }
            }
        })
コード例 #20
0
def test_succesful_web_user_property_call_with_user_id(uploader, caplog):
    with requests_mock.Mocker() as m:
        m.post(requests_mock.ANY, status_code=204)
        destination = Destination(
            'dest1', DestinationType.GA_4_MEASUREMENT_PROTOCOL,
            ['api_secret', 'False', 'True', '', '', 'some_id'])
        source = Source('orig1', SourceType.BIG_QUERY, [])
        execution = Execution(_account_config, source, destination)
        next(
            uploader.process(
                Batch(execution, [{
                    'user_ltv': '42',
                    'user_id': 'Id42',
                    'client_id': 'someId'
                }])))

        assert m.call_count == 1
        assert m.last_request.json()['user_id'] == 'Id42'
コード例 #21
0
def test_avoid_list_creation_when_name_blank(mocker, uploader):
    ads_account_id = 'xxx-yyy-zzzz'
    ga_account_id = 'acc'

    service = mocker.MagicMock()

    mocker.patch.object(uploader, '_get_analytics_service')
    uploader._get_analytics_service.return_value = service

    execution = Execution(
        AccountConfig(ads_account_id, True, ga_account_id, '', ''),
        Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers']),
        Destination('dest1', DestinationType.GA_USER_LIST_UPLOAD,
                    ['web_property', 'view', 'c', '', 'd', 'buyers_custom_dim']))

    uploader.process(Batch(execution, []))

    service.management().remarketingAudience().insert.assert_not_called()
コード例 #22
0
def test_elements_uploading_custom_field(mocker, uploader):
    service = mocker.MagicMock()

    mocker.patch.object(uploader, '_get_analytics_service')
    uploader._get_analytics_service.return_value = service

    service.management().customDataSources().list().execute.return_value = {
        'items': [{
            'id': 1,
            'name': 'data_import_name'
        }]
    }

    execution = Execution(
        AccountConfig('', False, '', '', ''),
        Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers']),
        Destination('dest1', DestinationType.GA_USER_LIST_UPLOAD, [
            'web_property', 'b', 'data_import_name', 'd', 'user_id_custom_dim',
            'buyer_custom_dim', 'my_field'
        ]))

    # Add mock to side effect of uploadData()
    my_mock = mocker.MagicMock()
    service.management().uploads().uploadData.side_effect = my_mock

    # Act
    uploader.process(Batch(execution, [{
        'user_id': '12',
        'my_field': '11'
    },  {
        'user_id': '34',
        'my_field': '22'
    }]))

    # Called once
    my_mock.assert_called_once()

    # Intercept args called
    _, kwargs = my_mock.call_args

    # Check if really sent values from custom field
    media_bytes = kwargs['media_body'].getbytes(0, -1)

    assert media_bytes == b'user_id_custom_dim,buyer_custom_dim\n12,11\n34,22'
コード例 #23
0
def test_succesful_app_event_call(uploader, caplog):
    with requests_mock.Mocker() as m:
        m.post(requests_mock.ANY, status_code=204)
        destination = Destination(
            'dest1', DestinationType.GA_4_MEASUREMENT_PROTOCOL,
            ['api_secret', 'True', 'False', '', 'some_id', ''])
        source = Source('orig1', SourceType.BIG_QUERY, [])
        execution = Execution(_account_config, source, destination)
        next(
            uploader.process(
                Batch(execution, [{
                    'app_instance_id': '123',
                    'name': 'event_name',
                    'value': '42',
                    'important_event': 'False'
                }])))

        assert m.call_count == 1
        assert m.last_request.json()['events'][0]['params']['value'] == '42'
コード例 #24
0
def test_conversion_upload(mocker, uploader):
    mocker.patch.object(uploader, '_get_oc_service')
    conversion_name = 'user_list'
    destination = Destination('dest1', DestinationType.ADS_OFFLINE_CONVERSION,
                              ['user_list'])
    source = Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers'])
    execution = Execution(_account_config, source, destination)

    time1 = '2020-04-09T14:13:55.0005'
    time1_result = '20200409 141355 America/Sao_Paulo'

    time2 = '2020-04-09T13:13:55.0005'
    time2_result = '20200409 131355 America/Sao_Paulo'

    batch = Batch(execution, [{
        'time': time1,
        'amount': '123',
        'gclid': '456'
    }, {
        'time': time2,
        'amount': '234',
        'gclid': '567'
    }])

    uploader.process(batch)

    uploader._get_oc_service.return_value.mutate.assert_any_call([{
        'operator': 'ADD',
        'operand': {
            'conversionName': conversion_name,
            'conversionTime': time1_result,
            'conversionValue': '123',
            'googleClickId': '456'
        }
    }, {
        'operator': 'ADD',
        'operand': {
            'conversionName': conversion_name,
            'conversionTime': time2_result,
            'conversionValue': '234',
            'googleClickId': '567'
        }
    }])
コード例 #25
0
def test_bigquery_write_failure(mocker, uploader, caplog):
  bq_client = mocker.MagicMock()

  mocker.patch.object(uploader, "_get_bq_client")
  uploader._get_bq_client.return_value = bq_client

  error_message = "This is an error message"
  bq_client.insert_rows.return_value = [{"errors": error_message}]

  account_config = AccountConfig("account_id", False, "ga_account_id", "", "")
  source = Source("orig1", SourceType.BIG_QUERY, ["dt1", "buyers"])
  destination = Destination(
      "dest1",
      DestinationType.GA_MEASUREMENT_PROTOCOL,
      ["web_property", "view", "c", "list", "d", "buyers_custom_dim"])

  execution = Execution(account_config, source, destination)

  uploader.process(Batch(execution, [{"uuid": "uuid-1"}]))

  assert error_message in caplog.text
コード例 #26
0
def test_exception_app_event_without_app_instance_id(uploader, caplog):
    with requests_mock.Mocker() as m:
        m.post(requests_mock.ANY, status_code=204)
        destination = Destination(
            'dest1', DestinationType.GA_4_MEASUREMENT_PROTOCOL,
            ['api_secret', 'True', 'False', '', 'some_id', ''])
        source = Source('orig1', SourceType.BIG_QUERY, [])
        execution = Execution(_account_config, source, destination)
        with pytest.raises(
                ValueError,
                match=
                'GA4 MP needs an app_instance_id parameter when used for an App Stream.'
        ):
            next(
                uploader.process(
                    Batch(execution, [{
                        'client_id': '123',
                        'name': 'event_name',
                        'value': '42',
                        'important_event': 'False'
                    }])))
コード例 #27
0
def test_list_already_exists(mocker, uploader):
    service = mocker.MagicMock()
    service.management().remarketingAudience().list().execute = mocker.Mock(
        return_value={'items': [{
            'id': 1,
            'name': 'list'
        }]})

    mocker.patch.object(uploader, '_get_analytics_service')
    uploader._get_analytics_service.return_value = service

    execution = Execution(
        AccountConfig('', False, '', '', ''),
        Source('orig1', SourceType.BIG_QUERY, ['dt1', 'buyers']),
        Destination('dest1', DestinationType.GA_USER_LIST_UPLOAD,
                    ['a', 'b', 'c', 'list', 'd', 'e']))

    uploader.process(Batch(execution, []))

    uploader._get_analytics_service().management().remarketingAudience(
    ).insert.assert_not_called()
コード例 #28
0
    def read(self, range_tracker):
        sheet_id = self._setup_sheet_id.get()
        logging.getLogger("megalista.SpreadsheetExecutionSource").info(
            f"Loading configuration sheet {sheet_id}...")
        google_ads_id = self._sheets_config.get_value(sheet_id,
                                                      "GoogleAdsAccountId")
        mcc_trix = self._sheets_config.get_value(sheet_id, "GoogleAdsMCC")
        mcc = False if mcc_trix is None else bool(
            distutils.util.strtobool(mcc_trix))
        app_id = self._sheets_config.get_value(sheet_id, "AppId")
        google_analytics_account_id = self._sheets_config.get_value(
            sheet_id, "GoogleAnalyticsAccountId")
        campaign_manager_account_id = self._sheets_config.get_value(
            sheet_id, "CampaignManagerAccountId")
        account_config = AccountConfig(google_ads_id, mcc,
                                       google_analytics_account_id,
                                       campaign_manager_account_id, app_id)
        logging.getLogger("megalista.SpreadsheetExecutionSource").info(
            f"Loaded: {account_config}")

        sources = self._read_sources(self._sheets_config, sheet_id)
        destinations = self._read_destination(self._sheets_config, sheet_id)

        schedules_range = self._sheets_config.get_range(
            sheet_id, 'SchedulesRange')
        if 'values' in schedules_range:
            for schedule in schedules_range['values']:
                if schedule[0] == 'YES':
                    logging.getLogger(
                        "megalista.SpreadsheetExecutionSource"
                    ).info(
                        f"Executing step Source:{sources[schedule[1]].source_name} -> Destination:{destinations[schedule[2]].destination_name}"
                    )
                    yield Execution(account_config, sources[schedule[1]],
                                    destinations[schedule[2]])
        else:
            logging.getLogger("megalista.SpreadsheetExecutionSource").warn(
                "No schedules found!")
コード例 #29
0
 def fake_exec(self, commandId, node=None, exitCode=0, stdout="OK", stderr=""):
     if not node:
         node = Node.query.one()
     execution = Execution(commandId, node.id, 0, stdout, stderr)
     db.session.add(execution)
     db.session.commit()
コード例 #30
0
 def create(cls, execution_uuid):
     linked_execution = Execution.get_by_uuid(execution_uuid)
     return Result(linked_execution)