Beispiel #1
0
    def execute(self, connector, process_action):
        job_name = process_action['job_name']
        logger = Logger(connector, self)

        chunk_repository = ChunkRepository(connector)
        process_repository = ProcessRepository(connector)
        process_property_repository = ProcessPropertyRepository(connector)
        process_action_property_repository = ProcessActionPropertyRepository(connector)

        # retrieve the chunk properties
        process = process_repository.find_by_id(process_action['pcs_id'])
        chunk_id = process_property_repository.get_property(process, 'chunk_id')
        message = process_property_repository.get_property(process, 'message')
        backtrace = process_property_repository.get_property(process, 'backtrace')

        chunk = chunk_repository.find_by_id(chunk_id)

        state = process_action_property_repository.get_property(process_action, 'state')

        # retrieve the datasource of the payload
        datasource = chunk['datasource']
        payload_ds = DatasourceBuilder.find(connector, datasource)
        payload_c = ConnectorFactory.create_connector(payload_ds)
        payload_repository = PayloadRepository(payload_c)

        payload = json.loads(chunk['payload'])
        payload_repository.state(chunk, payload, state)
        payload_c.close()
        logger.info(job_name, 'job_name: ' + job_name + " state: " + chunk['state'])
Beispiel #2
0
    def test_payload(self):
        # retrieve the next payload

        paprika_ds = DatasourceBuilder.build('paprika-ds.json')
        connector = ConnectorFactory.create_connector(paprika_ds)

        hook_repository = HookRepository(connector)
        hooks = hook_repository.list_active()

        # retrieve the first hook, not the best method but for now it works
        # expected to retrieve the hook to send an offer (nuon).
        hook = hooks[0]
        print hook

        claim = Claim()

        payload_ds = DatasourceBuilder.find(connector, hook['datasource'])
        payload_c = ConnectorFactory.create_connector(payload_ds)
        payload_repository = PayloadRepository(payload_c)
        #payload = payload_repository.dequeue(claim, hook)
        payload = payload_repository.next(hook)
        print payload
        print payload['proces_bestelling_id']

        payload_repository.state(hook, payload, 'READY')

        # options = json.loads(hook['options'])
        #
        # state = 'READY'
        # #options = [{'status': '#state'}, {'proces_bestelling_id': '#payload.proces_bestelling_id'}]
        # options = ExpressionParser.parse(options, locals())

        # payload_repository.state(hook, payload, 'READY')
        payload_c.close()
Beispiel #3
0
    def execute(self, connector, process_action):
        logger = Logger(connector, self)
        job_name = process_action['job_name']

        process_repository = ProcessRepository(connector)
        process_property_repository = ProcessPropertyRepository(connector)
        process_action_property_repository = ProcessActionPropertyRepository(
            connector)

        # retrieve the file properties
        process = process_repository.find_by_id(process_action['pcs_id'])
        identifier = process_property_repository.get_property(
            process, 'identifier')
        datasource = process_action_property_repository.get_property(
            process_action, 'datasource')

        oracle_ds = DatasourceBuilder.find(connector, datasource)
        oracle_c = ConnectorFactory.create_connector(oracle_ds)
        scheduler = OracleScheduler(oracle_c)

        if scheduler.is_running(identifier):
            oracle_c.close()
            return process_action

        # the result should be
        created_at = process['created_at']
        run_result = scheduler.run_result(identifier, job_name, created_at)
        oracle_c.close()

        process_property_repository.set_property(process, 'message',
                                                 run_result['message'])
        process_property_repository.set_property(process, 'state',
                                                 run_result['state'])
        process_property_repository.set_property(process, 'backtrace',
                                                 run_result['backtrace'])
Beispiel #4
0
    def execute(self, connector, process_action):
        logger = Logger(connector, self)
        job_name = process_action['job_name']

        file_repository = FileRepository(connector)
        process_repository = ProcessRepository(connector)
        process_property_repository = ProcessPropertyRepository(connector)
        process_action_property_repository = ProcessActionPropertyRepository(
            connector)

        # retrieve the file properties
        process = process_repository.find_by_id(process_action['pcs_id'])

        payload = process_property_repository.get_property(process, 'payload')
        if payload:
            payload = json.loads(payload)

        datasource = process_action_property_repository.get_property(
            process_action, 'datasource')
        method_name = process_action_property_repository.get_property(
            process_action, 'method_name')
        params = process_action_property_repository.get_property(
            process_action, 'params')
        test_result_params = process_action_property_repository.get_property(
            process_action, 'test_result_params')
        if params:
            params = json.loads(params)
            params = ExpressionParser.parse(params, locals())

        oracle_ds = DatasourceBuilder.find(connector, datasource)
        oracle_c = ConnectorFactory.create_connector(oracle_ds)
        scheduler = OracleScheduler(oracle_c)

        identifier = job_name + '_' + Strings.identifier(10)

        message = dict()
        message['method_name'] = method_name
        message['identifier'] = identifier
        message['params'] = params
        if test_result_params:
            message['test_result_params'] = json.loads(test_result_params)
        scheduler.create_job(message)

        logger.info(job_name, json.dumps(message))

        process_property_repository.set_property(process, 'identifier',
                                                 identifier)
        oracle_c.close()
Beispiel #5
0
    def execute(self, connector, process_action):
        logger = Logger(connector, self)
        job_name = process_action['job_name']

        process_repository = ProcessRepository(connector)
        process_property_repository = ProcessPropertyRepository(connector)
        process_action_property_repository = ProcessActionPropertyRepository(
            connector)

        # retrieve the file properties
        process = process_repository.find_by_id(process_action['pcs_id'])

        # retrieve the payload if present
        payload = process_property_repository.get_property(process, 'payload')
        if payload:
            payload = json.loads(payload)

        datasource = process_action_property_repository.get_property(
            process_action, 'datasource')
        method_name = process_action_property_repository.get_property(
            process_action, 'method_name')
        params = process_action_property_repository.get_property(
            process_action, 'params')
        if params:
            params = json.loads(params)
            params = ExpressionParser.parse(params, locals())

        oracle_ds = DatasourceBuilder.find(connector, datasource)
        oracle_c = ConnectorFactory.create_connector(oracle_ds)
        oracle_call = OracleCall(oracle_c)

        call = dict()
        call['method_name'] = method_name
        call['params'] = params
        oracle_call.execute(call)

        logger.info(job_name, json.dumps(call))
        oracle_c.close()

        process_property_repository.set_property(process, 'call',
                                                 json.dumps(call))
Beispiel #6
0
    def execute(self, process_action):

        job_name = process_action['job_name']

        paprika_ds = DatasourceBuilder.build('paprika-ds.json')
        logger = Logger(self)
        file_repository = FileRepository(paprika_ds)
        process_repository = ProcessRepository(paprika_ds)
        process_property_repository = ProcessPropertyRepository(paprika_ds)
        process_action_property_repository = ProcessActionPropertyRepository(
            paprika_ds)

        # retrieve the file properties
        process = process_repository.find_by_id(process_action['pcs_id'])
        file_id = process_property_repository.get_property(process, 'file_id')
        file = file_repository.find_by_id(file_id)

        datasource = process_action_property_repository.get_property(
            process_action, 'datasource')
        drop_location = process_action_property_repository.get_property(
            process_action, 'drop_location')
        filename = drop_location + '/' + file['filename']

        CsvFile.normalize(filename, filename + '.norm')
        source_encoding = CsvFile.guess_encoding(filename + '.norm')
        CsvFile.iconv(filename + '.norm', source_encoding, filename + '.utf8',
                      'utf8')
        delimiter = CsvFile.guess_delimiter(filename + '.utf8')

        skip_header = False
        if not header:
            header = CsvFile.read_header(filename + '.utf8', delimiter)
            skip_header = True

        ds = DatasourceBuilder.find(datasource)
        connector = ConnectorFactory.create_connector(ds)

        file_repository = FileRepository(ds)
        file = dict()
        file['pcs_id'] = 0
        file['job_name'] = job_name
        file['filename'] = filename
        file['state'] = 'READY'
        file['rule'] = ''
        file['hashcode'] = ''
        file['pickup_location'] = ''
        file['path'] = ''
        file['filesize'] = 0
        file['pattern'] = ''
        file = file_repository.insert(file)

        statics = dict()
        statics['job_name'] = job_name
        statics['fle_id'] = file['id']

        mapping = 'id.eeid;notification.action;job_name.job_name;fle_id.fle_id'

        stager = Stager(ds)
        stager.stage(filename + '.utf8', header, delimiter,
                     'tripolis_mailings', mapping, skip_header, statics)
        stager.close()

        logger.info(job_name,
                    'job_name: ' + job_name + 'file: ' + filename + " staged")
Beispiel #7
0
    def execute(self, connector, process_action):

        job_name = process_action['job_name']

        logger = Logger(connector, self)

        process_action_property_repository = ProcessActionPropertyRepository(
            connector)

        # retrieve the properties
        uuid = process_action_property_repository.get_property(
            process_action, 'uuid')
        page_size = process_action_property_repository.get_property(
            process_action, 'page_size')
        datasource = process_action_property_repository.get_property(
            process_action, 'datasource')

        # create the scraper
        scraper = Clang()

        response = scraper.mailing_get_quickmails(uuid)
        resource_id = response.msg

        # get resource id
        resource_status = 'BUSY'
        while resource_status != "READY":
            response = scraper.resource_get_by_id(uuid, resource_id)
            resource_status = response.msg.status
            time.sleep(1)

        # get mailings
        resource_size = response.msg.size

        mailing_ids = []
        for i in xrange(0, resource_size):
            response = scraper.mailing_set_get_mailing_ids(
                uuid, resource_id, i, 2)
            mailing_ids.append(response.msg.integer[0])

        scraper.resource_free(uuid, resource_id)

        # get summaries
        mail_summaries = []
        for mailing_id in mailing_ids:
            response = scraper.mailing_get_by_id(uuid, mailing_id)

            campaign_name = response.msg.campaignName
            content_name = response.msg.contentName
            started_at = response.msg.startedAt
            ended_at = response.msg.endedAt
            description = response.msg.description
            received = response.msg.received
            unique_clicks = response.msg.uniqueClicks
            unique_opens = response.msg.uniqueOpens
            bounces = response.msg.bounces

            message = dict()
            message['mailing_id'] = mailing_id
            message['campaign_name'] = Strings.encode(campaign_name, 'utf-8')
            message['started_at'] = Strings.encode(started_at, 'utf-8')
            message['ended_at'] = Strings.encode(ended_at, 'utf-8')
            message['content_name'] = Strings.encode(content_name, 'utf-8')
            message['description'] = Strings.encode(description, 'utf-8')
            message['received'] = received
            message['unique_clicks'] = unique_clicks
            message['unique_opens'] = unique_opens
            message['bounces'] = bounces
            message['cor'] = MathHelper.divide(unique_opens, received) * 100.0
            message['cto'] = MathHelper.divide(unique_clicks,
                                               unique_opens) * 100.0
            message['ctr'] = MathHelper.divide(unique_clicks, received) * 100.0

            mail_summaries.append(message)

        # delete all the mailings and insert the new ones.
        mi_ds = DatasourceBuilder.find(datasource)
        clang_mail_summary_repository = ClangMailSummaryRepository(mi_ds)
        clang_mail_summary_repository.clean()
        for summary in mail_summaries:
            clang_mail_summary_repository.insert(summary)

        logger.info(job_name, 'job_name: ' + job_name)