Exemplo n.º 1
0
    def execute(self, connector, process_action):
        job_name = process_action['job_name']
        logger = Logger(connector, self)

        chunk_repository = ChunkRepository(connector)
        process_repository = ProcessRepository(connector)
        process_property_repository = ProcessPropertyRepository(connector)
        process_action_property_repository = ProcessActionPropertyRepository(connector)

        # retrieve the chunk properties
        process = process_repository.find_by_id(process_action['pcs_id'])
        chunk_id = process_property_repository.get_property(process, 'chunk_id')
        message = process_property_repository.get_property(process, 'message')
        backtrace = process_property_repository.get_property(process, 'backtrace')

        chunk = chunk_repository.find_by_id(chunk_id)

        state = process_action_property_repository.get_property(process_action, 'state')

        # retrieve the datasource of the payload
        datasource = chunk['datasource']
        payload_ds = DatasourceBuilder.find(connector, datasource)
        payload_c = ConnectorFactory.create_connector(payload_ds)
        payload_repository = PayloadRepository(payload_c)

        payload = json.loads(chunk['payload'])
        payload_repository.state(chunk, payload, state)
        payload_c.close()
        logger.info(job_name, 'job_name: ' + job_name + " state: " + chunk['state'])
Exemplo n.º 2
0
    def execute(self, connector, process_action):

        job_name = process_action['job_name']
        logger = Logger(connector, self)

        event_repository = EventRepository(connector)
        process_repository = ProcessRepository(connector)
        process_property_repository = ProcessPropertyRepository(connector)
        process_action_property_repository = ProcessActionPropertyRepository(
            connector)

        # retrieve the chunk properties
        process = process_repository.find_by_id(process_action['pcs_id'])
        event_id = process_property_repository.get_property(
            process, 'event_id')
        message = process_property_repository.get_property(process, 'message')
        backtrace = process_property_repository.get_property(
            process, 'backtrace')

        event = event_repository.find_by_id(event_id)

        state = process_action_property_repository.get_property(
            process_action, 'state')
        event['state'] = state
        event['message'] = message
        event['backtrace'] = backtrace
        event_repository.state(event)

        logger.info(job_name,
                    'job_name: ' + job_name + " state: " + event['state'])
Exemplo n.º 3
0
    def execute(self, connector, process_action):
        logger = Logger(connector, self)
        job_name = process_action['job_name']

        # create instances of classes
        file_property_repository = FilePropertyRepository(connector)
        file_repository = FileRepository(connector)
        process_repository = ProcessRepository(connector)
        process_property_repository = ProcessPropertyRepository(connector)
        process_action_property_repository = ProcessActionPropertyRepository(
            connector)

        # retrieve the required properties
        process = process_repository.find_by_id(process_action['pcs_id'])
        file_id = process_property_repository.get_property(process, 'file_id')
        name = process_action_property_repository.get_property(
            process_action, 'name')
        value = process_action_property_repository.get_property(
            process_action, 'value')

        # get the file using the file_id we collected
        file = file_repository.find_by_id(file_id)
        filename = file['filename']

        file_property_repository.set_property(file, name, value)

        logger.info(
            job_name,
            "filename: " + filename + ", name: " + name + ", value: " + value)
Exemplo n.º 4
0
    def execute(self, connector, process_action):
        logger = Logger(connector, self)
        job_name = process_action['job_name']

        file_repository = FileRepository(connector)
        process_repository = ProcessRepository(connector)
        process_property_repository = ProcessPropertyRepository(connector)
        process_action_property_repository = ProcessActionPropertyRepository(connector)

        # retrieve the process properties
        process = process_repository.find_by_id(process_action['pcs_id'])
        file_id = process_property_repository.get_property(process, 'file_id')
        message = process_property_repository.get_property(process, 'message')
        backtrace = process_property_repository.get_property(process, 'backtrace')

        file = file_repository.find_by_id(file_id)
        filename = file['filename']

        state = process_action_property_repository.get_property(process_action, 'state')
        if not state:
            state = process_property_repository.get_property(process, 'state')
            if not state:
                state = 'ERROR_NO_STATE'

        file['state'] = state
        file['message'] = message
        file['backtrace'] = backtrace
        file_repository.state(file)

        logger.info(job_name, "filename: " + filename + ", state: " + file['state'])
Exemplo n.º 5
0
    def execute(self, connector, process_action):
        logger = Logger(connector, self)
        job_name = process_action['job_name']

        process_repository = ProcessRepository(connector)
        process_property_repository = ProcessPropertyRepository(connector)
        process_action_property_repository = ProcessActionPropertyRepository(
            connector)
        # retrieve the file properties
        process = process_repository.find_by_id(process_action['pcs_id'])

        # retrieve the payload if present
        payload = JsonExt.loads(
            process_property_repository.get_property(process, 'payload'))

        twitter_token = process_action_property_repository.get_property(
            process_action, 'twitter_token')
        twitter_token = JsonExt.loads(twitter_token)

        tweet_content = process_action_property_repository.get_property(
            process_action, 'tweet_content')
        tweet_content = ExpressionParser.parse(tweet_content, locals())

        tweet_hashtags = process_action_property_repository.get_property(
            process_action, 'tweet_hashtags')
        tweet_hashtags = ExpressionParser.parse(tweet_hashtags, locals())

        headers = json.loads(
            process_action_property_repository.get_property(
                process_action, 'headers'))
        certificate = JsonExt.loads(
            process_action_property_repository.get_property(
                process_action, 'certificate'))
        proxies = JsonExt.loads(
            process_action_property_repository.get_property(
                process_action, 'proxies'))
        url = process_action_property_repository.get_property(
            process_action, 'url')

        message = {
            'twitter_token': twitter_token,
            'tweet_content': tweet_content,
            'tweet_hashtags': tweet_hashtags
        }

        logger.info(job_name, json.dumps(message))
        response = RestRequest.post(headers, url, message, certificate,
                                    proxies)
        logger.info(
            job_name,
            "status_code : " + str(response.status_code) + ", reason : " +
            response.reason + ", content : " + response.content)

        if response.status_code != 200:
            raise ProcessException("status_code : " +
                                   str(response.status_code) + ", reason : " +
                                   response.reason + ", content : " +
                                   response.content)
Exemplo n.º 6
0
    def execute(self, connector, process_action):
        logger = Logger(connector, self)
        job_name = process_action['job_name']

        process_repository = ProcessRepository(connector)
        process_property_repository = ProcessPropertyRepository(connector)

        file_repository = FileRepository(connector)

        process = process_repository.find_by_id(process_action['pcs_id'])
        file_id = process_property_repository.get_property(process, 'file_id')

        file = file_repository.find_by_id(file_id)

        pickup_location = process_property_repository.get_property(
            process, 'pickup_location')
        pickup_filename = file['filename']
        pickup_path = process_property_repository.get_property(process, 'path')

        logger.info(
            job_name, 'filename: ' + pickup_filename + ", pickup_location: " +
            pickup_location)

        # copy the file from pickup folder to the tmp folder.
        pickup_client = VfsFactory.create_client(pickup_location)
        pickup_client.connect()
        pickup_path = pickup_path + os.sep + pickup_filename
        pickup_client.delete(pickup_path)
        pickup_client.close()

        # set the process properties for the next process_action
        process_property_repository.set_property(process, 'pickup_location',
                                                 '')
        process_property_repository.set_property(process, 'path', '')
Exemplo n.º 7
0
    def execute(self, connector, process_action):
        logger = Logger(connector, self)
        job_name = process_action['job_name']

        process_repository = ProcessRepository(connector)
        process_property_repository = ProcessPropertyRepository(connector)
        process_action_property_repository = ProcessActionPropertyRepository(
            connector)

        # retrieve the file properties
        process = process_repository.find_by_id(process_action['pcs_id'])
        identifier = process_property_repository.get_property(
            process, 'identifier')
        datasource = process_action_property_repository.get_property(
            process_action, 'datasource')

        oracle_ds = DatasourceBuilder.find(connector, datasource)
        oracle_c = ConnectorFactory.create_connector(oracle_ds)
        scheduler = OracleScheduler(oracle_c)

        if scheduler.is_running(identifier):
            oracle_c.close()
            return process_action

        # the result should be
        created_at = process['created_at']
        run_result = scheduler.run_result(identifier, job_name, created_at)
        oracle_c.close()

        process_property_repository.set_property(process, 'message',
                                                 run_result['message'])
        process_property_repository.set_property(process, 'state',
                                                 run_result['state'])
        process_property_repository.set_property(process, 'backtrace',
                                                 run_result['backtrace'])
Exemplo n.º 8
0
    def test_call(self):
        paprika_ds = DatasourceBuilder.build('paprika-ds.json')
        connector = ConnectorFactory.create_connector(paprika_ds)

        job_repository = JobRepository(connector)
        job_name = job_repository.job()

        process_respository = ProcessRepository(connector)
        process = dict()
        process['job_name'] = job_name['job_name']
        process['pdn_id'] = None
        process['state'] = None
        process['e_pdn_id'] = None
        process['name'] = None
        process['queue'] = None
        process_respository.insert(process)

        event_respository = EventRepository(connector)
        event = dict()
        event['job_name'] = job_name['job_name']
        event['state'] = None
        event['repetition'] = 'DAYS'
        event['intermission'] = '1'
        event['pcs_id'] = process['id']
        event_respository.insert(event)

        process_property_repository = ProcessPropertyRepository(connector)
        process_property_repository.set_property(process, 'event_id',
                                                 event['id'])

        process_action_repository = ProcessActionRepository(connector)
        process_action = dict()
        process_action['job_name'] = job_name['job_name']
        process_action['pcs_id'] = process['id']
        process_action['dan_id'] = None
        process_action['name'] = 'copy'
        process_action['state'] = 'processed'
        process_action_repository.insert(process_action)

        process_action_property_repository = ProcessActionPropertyRepository(
            connector)
        process_action_property = dict()
        process_action_property['name'] = 'file_id'
        process_action_property['value'] = 1
        process_action_property['pan_id'] = process_action['id']
        process_action_property_repository.insert(process_action_property)

        copy = Copy()
        copy.execute(connector, process_action)

        connector.close()
Exemplo n.º 9
0
    def execute(self, connector, process_action):

        job_name = process_action['job_name']
        logger = Logger(connector, self)

        process_action_property_repository = ProcessActionPropertyRepository(connector)

        # retrieve the properties
        days = process_action_property_repository.get_property(process_action, 'days')

        process_property_repository = ProcessPropertyRepository(connector)
        count = process_property_repository.clean(days)

        logger.info(job_name, str(count) + ' processes_properties record(s) purged.')
Exemplo n.º 10
0
    def execute(self, connector, process_action):
        logger = Logger(connector, self)
        job_name = process_action['job_name']

        file_repository = FileRepository(connector)
        process_repository = ProcessRepository(connector)
        process_property_repository = ProcessPropertyRepository(connector)
        process_action_property_repository = ProcessActionPropertyRepository(
            connector)

        # retrieve the file properties
        process = process_repository.find_by_id(process_action['pcs_id'])

        payload = process_property_repository.get_property(process, 'payload')
        if payload:
            payload = json.loads(payload)

        datasource = process_action_property_repository.get_property(
            process_action, 'datasource')
        method_name = process_action_property_repository.get_property(
            process_action, 'method_name')
        params = process_action_property_repository.get_property(
            process_action, 'params')
        test_result_params = process_action_property_repository.get_property(
            process_action, 'test_result_params')
        if params:
            params = json.loads(params)
            params = ExpressionParser.parse(params, locals())

        oracle_ds = DatasourceBuilder.find(connector, datasource)
        oracle_c = ConnectorFactory.create_connector(oracle_ds)
        scheduler = OracleScheduler(oracle_c)

        identifier = job_name + '_' + Strings.identifier(10)

        message = dict()
        message['method_name'] = method_name
        message['identifier'] = identifier
        message['params'] = params
        if test_result_params:
            message['test_result_params'] = json.loads(test_result_params)
        scheduler.create_job(message)

        logger.info(job_name, json.dumps(message))

        process_property_repository.set_property(process, 'identifier',
                                                 identifier)
        oracle_c.close()
Exemplo n.º 11
0
    def execute(self, connector, process_action):
        logger = Logger(connector, self)
        job_name = process_action['job_name']

        properties = PropertyRepository(connector)
        process_repository = ProcessRepository(connector)
        process_property_repository = ProcessPropertyRepository(connector)
        process_action_property_repository = ProcessActionPropertyRepository(
            connector)

        file_repository = FileRepository(connector)

        process = process_repository.find_by_id(process_action['pcs_id'])
        file_id = process_property_repository.get_property(process, 'file_id')

        file = file_repository.find_by_id(file_id)

        tmp = properties.get_property('scanner.tmp')

        # retrieve the file properties
        pickup_location = process_property_repository.get_property(
            process, 'pickup_location')
        pickup_filename = file['filename']
        pickup_path = process_property_repository.get_property(process, 'path')
        drop_location = process_action_property_repository.get_property(
            process_action, 'drop_location')

        logger.info(
            job_name, 'filename: ' + pickup_filename + ", pickup_location: " +
            pickup_location + ", drop_location: " + drop_location)

        # copy the file from pickup folder to the tmp folder.
        pickup_client = VfsFactory.create_client(pickup_location)
        pickup_client.connect()
        pickup_path = pickup_path + os.sep + pickup_filename
        tmp_path = tmp + os.sep + pickup_filename
        pickup_client.get(pickup_path, tmp_path)
        pickup_client.close()

        # copy the file form the tmp folder to the drop folder.
        drop_client = VfsFactory.create_client(drop_location)
        drop_client.connect()
        property_path = drop_client.get_path()
        drop_path = drop_client.get_path() + os.sep + pickup_filename
        drop_client.put(tmp_path, drop_path)
        drop_client.close()

        # set the process properties for the next process_action
        process_property_repository.set_property(process, 'pickup_location',
                                                 drop_location)
        process_property_repository.set_property(process, 'path',
                                                 property_path)
Exemplo n.º 12
0
    def action(self, connector, message):
        stream_repository = StreamRepository(connector)
        payload = json.loads(message['payload'])
        stream = stream_repository.find_by_hashcode(payload['oxyma.stream.hashcode'])

        rule_repository = RuleRepository(connector)
        chunk_repository = ChunkRepository(connector)

        # ask for a new job_name
        job_repository = JobRepository(connector)
        job = job_repository.job()
        job_name = job['job_name']

        # find the rule
        found_rule = None
        rules = rule_repository.find_by_stream(stream)
        for rule in rules:
            if Matcher.match(DictionaryMethod, json.loads(rule['pattern']), payload):
                found_rule = rule

        # only start a process when a rule is found,
        # if the rule is not found, the given payload is simply ignored
        if found_rule:

            # store the payload as chunk in paprika
            process = ProcessService.create_process(connector, found_rule['pdn_id'], job['job_name'], found_rule['e_pdn_id'])

            chunk = dict()
            chunk['job_name'] = job_name
            chunk['pcs_id'] = process['id']
            chunk['state'] = 'READY'
            chunk['datasource'] = ''
            chunk['tablename'] = ''
            chunk['selector'] = ''
            chunk['options'] = ''
            chunk['payload'] = json.dumps(payload)
            chunk['rle_id'] = found_rule['id']
            chunk['rule'] = found_rule['rule']
            chunk['pattern'] = found_rule['pattern']
            chunk['updater'] = ''
            chunk = chunk_repository.insert(chunk)

            process_property_repository = ProcessPropertyRepository(connector)
            process_property_repository.set_property(process, 'chunk_id', chunk['id'])

            ProcessService.execute_process(connector, process)
Exemplo n.º 13
0
    def execute(self, connector, process_action):
        logger = Logger(connector, self)
        job_name = process_action['job_name']

        process_repository = ProcessRepository(connector)
        process_property_repository = ProcessPropertyRepository(connector)
        process_action_property_repository = ProcessActionPropertyRepository(
            connector)

        # retrieve the file properties
        process = process_repository.find_by_id(process_action['pcs_id'])

        # retrieve the payload if present
        payload = process_property_repository.get_property(process, 'payload')
        if payload:
            payload = json.loads(payload)

        datasource = process_action_property_repository.get_property(
            process_action, 'datasource')
        method_name = process_action_property_repository.get_property(
            process_action, 'method_name')
        params = process_action_property_repository.get_property(
            process_action, 'params')
        if params:
            params = json.loads(params)
            params = ExpressionParser.parse(params, locals())

        oracle_ds = DatasourceBuilder.find(connector, datasource)
        oracle_c = ConnectorFactory.create_connector(oracle_ds)
        oracle_call = OracleCall(oracle_c)

        call = dict()
        call['method_name'] = method_name
        call['params'] = params
        oracle_call.execute(call)

        logger.info(job_name, json.dumps(call))
        oracle_c.close()

        process_property_repository.set_property(process, 'call',
                                                 json.dumps(call))
Exemplo n.º 14
0
    def execute(self, connector, process_action):

        job_name = process_action['job_name']
        logger = Logger(connector, self)
        file_repository = FileRepository(connector)
        process_repository = ProcessRepository(connector)
        process_property_repository = ProcessPropertyRepository(connector)

        # retrieve the file properties
        process = process_repository.find_by_id(process_action['pcs_id'])
        file_id = process_property_repository.get_property(process, 'file_id')
        file = file_repository.find_by_id(file_id)
        filename = file['filename']

        locked = file_repository.locked(file)

        if locked:
            logger.info(job_name, 'file: ' + filename + " locked ")
            return process_action
        else:
            logger.info(job_name, 'file: ' + filename + " not locked ")

        logger.info(job_name, filename + " state: " + file['state'])
Exemplo n.º 15
0
    def action(self, connector, message):
        try:
            process_action = json.loads(message['payload'])
            process_action_repository = ProcessActionRepository(connector)

            # set the state of the process action
            process_action['state'] = 'PROCESSING'
            process_action['message'] = ''
            process_action['backtrace'] = ''
            process_action_repository.state(process_action)

            process_action_property_repository = ProcessActionPropertyRepository(
                connector)
            action = ClassLoader.find(
                process_action_property_repository.get_property(
                    process_action, 'action'))
            payload = action.execute(connector, process_action)
            if payload:
                process_repository = ProcessRepository(connector)
                process_action_property_repository = ProcessActionPropertyRepository(
                    connector)
                process = process_repository.find_by_id(payload['pcs_id'])
                sleep = float(
                    process_action_property_repository.get_property(
                        process_action, 'sleep'))
                now = datetime.now()
                delay = now + timedelta(seconds=int(sleep))
                delay = delay.strftime('%Y-%m-%d %H:%M:%S')
                Message.enqueue_wait(
                    connector, process['queue'], delay, payload, 'message',
                    'paprika.consumers.ProcessAction.ProcessAction')
            else:
                process_repository = ProcessRepository(connector)
                process = process_repository.find_by_id(
                    process_action['pcs_id'])
                process_definition_action_repository = ProcessDefinitionActionRepository(
                    connector)
                process_definition_action = process_definition_action_repository.find_next_by_process_action(
                    process_action, process)
                if process_definition_action:
                    next_process_action = dict()
                    next_process_action['job_name'] = process_action[
                        'job_name']
                    next_process_action['pcs_id'] = process_action['pcs_id']
                    next_process_action['dan_id'] = process_definition_action[
                        'id']
                    next_process_action['name'] = process_definition_action[
                        'name']
                    next_process_action['state'] = 'READY'
                    next_process_action = process_action_repository.insert(
                        next_process_action)

                    process_definition_action_property_repository = ProcessDefinitionActionPropertyRepository(
                        connector)
                    process_definition_action_properties = process_definition_action_property_repository.list_by_process_definition_action(
                        process_definition_action)
                    for process_definition_action_property in process_definition_action_properties:
                        process_action_property_repository.set_property(
                            next_process_action,
                            process_definition_action_property['name'],
                            process_definition_action_property['value'])

                    payload = next_process_action
                    Message.enqueue(
                        connector, process['queue'], payload, 'message',
                        'paprika.consumers.ProcessAction.ProcessAction')
                else:
                    payload = process_repository.find_by_id(
                        process_action['pcs_id'])
                    Message.enqueue(
                        connector, process['queue'], payload, 'message',
                        'paprika.consumers.ProcessFinish.ProcessFinish')

                process_action['state'] = 'PROCESSED'
                process_action['message'] = ''
                process_action['backtrace'] = ''
                process_action_repository.state(process_action)
        except:
            # set the process_action to failed
            process_action = json.loads(message['payload'])
            process_action_repository = ProcessActionRepository(connector)
            result = Traceback.build()
            result['id'] = process_action['id']
            result['state'] = 'FAILED'
            process_action_repository.state(result)

            # set the process to failed
            process_repository = ProcessRepository(connector)
            process = process_repository.find_by_id(process_action['pcs_id'])
            result['id'] = process['id']
            result['state'] = 'FAILED'
            process_repository.state(result)

            # log a fatal of the process
            logger = Logger(connector, self)
            logger.fatal(process['job_name'], result['message'],
                         result['backtrace'])

            # start the exception process if present
            if process['e_pdn_id']:
                e_process = ProcessService.create_process(
                    connector, process['e_pdn_id'], process['job_name'])

                process_property_repository = ProcessPropertyRepository(
                    connector)
                process_property_repository.copy(process, e_process)
                process_property_repository.set_property(
                    e_process, 'message', result['message'])
                process_property_repository.set_property(
                    e_process, 'backtrace', result['backtrace'])

                ProcessService.execute_process(connector, e_process)
Exemplo n.º 16
0
    def run(self, location):
        abort = self.get_abort()
        stop = self.get_stop()
        paprika_ds = DatasourceBuilder.build('paprika-ds.json')
        connector = ConnectorFactory.create_connector(paprika_ds)
        logger = Logger(connector, self)

        job_repository = JobRepository(connector)
        job = job_repository.job()
        job_name = job['job_name']

        settings = self.get_settings()
        while self.is_running():
            try:
                properties = PropertyRepository(connector)
                registry = FileRepository(connector)
                rule_repository = RuleRepository(connector)

                excluded_extensions = properties.get_property(
                    'scanner.excluded_extensions')
                stable_check_delay = properties.get_property(
                    'scanner.stable_check_delay')

                url = location['url']
                patterns = location['patterns']
                client = VfsFactory.create_client(url)
                client.set_excluded_extensions(excluded_extensions)
                client.set_stable_check_delay(int(stable_check_delay))
                client.set_regular_expressions(patterns)
                path = client.get_path()
                recursive = int(location['recursive'])
                depth = int(location['depth'])

                client.connect()
                files = client.list_stable(path,
                                           recursive=recursive,
                                           depth=depth)
                for file in files:
                    registered_file = registry.get_by_hashcode(
                        file['hashcode'])
                    if not registered_file:

                        # find the rule
                        found_rule = None
                        rules = rule_repository.find_by_location(location)
                        for rule in rules:
                            if Matcher.match(ReMethod, rule['pattern'],
                                             file['filename']):
                                found_rule = rule
                        if not found_rule:
                            found_rule = rule_repository.find_failsafe()

                        job = job_repository.job()
                        file_job_name = job['job_name']

                        logger.info(
                            file_job_name, "file: " + file['url'] + '/' +
                            file['filename'] + " rule: " + found_rule['rule'] +
                            " hascode:" + file['hashcode'])

                        process = ProcessService.create_process(
                            connector, found_rule['pdn_id'], file_job_name,
                            found_rule['e_pdn_id'])

                        message = dict()
                        message['job_name'] = file_job_name
                        message['filename'] = file['filename']
                        message['path'] = file['path']
                        message['pattern'] = found_rule['pattern']
                        message['rle_id'] = found_rule['id']
                        message['rule'] = found_rule['rule']
                        message['pickup_location'] = file['url']
                        message['filesize'] = file['size']
                        message['hashcode'] = file['hashcode']
                        message['pcs_id'] = process['id']
                        message['state'] = 'READY'
                        registered_file = registry.insert(message)

                        process_property_repository = ProcessPropertyRepository(
                            connector)
                        process_property_repository.set_property(
                            process, 'file_id', registered_file['id'])
                        process_property_repository.set_property(
                            process, 'pickup_location', file['url'])
                        process_property_repository.set_property(
                            process, 'path', file['path'])
                        process_property_repository.set_property(
                            process, 'payload',
                            json.dumps({
                                'filename': file['filename'],
                                'job_name': file_job_name
                            }))

                        ProcessService.execute_process(connector, process)

                client.close()

                # check if we need to abort, can be called from the main thread or other thread
                aborted = abort.is_aborted()
                self.running(not aborted)

                # check if we need to stop, will be set by the agent's WatchWorker thread
                if not aborted:
                    stopped = stop.is_stopped()
                    self.running(not stopped)

                connector.close()
                time.sleep(settings['worker_idle_delay'])
                logger.trace(job_name,
                             'worker #' + str(self.get_id()) + " executed.")
            except:
                aborted = abort.is_aborted()
                self.running(not aborted)

                if not aborted:
                    stopped = stop.is_stopped()
                    self.running(not stopped)

                result = Traceback.build()
                logger.fatal(job_name, result['message'], result['backtrace'])
                connector.close()
                time.sleep(settings['worker_exception_delay'])
Exemplo n.º 17
0
    def execute(self, connector, process_action):
        logger = Logger(connector, self)
        job_name = process_action['job_name']

        process_repository = ProcessRepository(connector)
        process_property_repository = ProcessPropertyRepository(connector)
        process_action_property_repository = ProcessActionPropertyRepository(
            connector)

        print json.dumps(process_action)
        # retrieve the file properties
        process = process_repository.find_by_id(process_action['pcs_id'])
        print json.dumps(process)

        # retrieve the payload if present
        payload = JsonExt.loads(
            process_property_repository.get_property(process, 'payload'))
        mailjob_id = process_property_repository.get_property(
            process, 'mailjob_id')

        auth_info = json.loads(
            process_action_property_repository.get_property(
                process_action, 'auth_info'))
        headers = json.loads(
            process_action_property_repository.get_property(
                process_action, 'headers'))
        certificate = JsonExt.loads(
            process_action_property_repository.get_property(
                process_action, 'certificate'))
        proxies = JsonExt.loads(
            process_action_property_repository.get_property(
                process_action, 'proxies'))
        url = process_action_property_repository.get_property(
            process_action, 'url')

        message = {"auth_info": auth_info, "mailjob_id": mailjob_id}

        logger.info(job_name, json.dumps(message))
        response = RestRequest.post(headers, url, message, certificate,
                                    proxies)
        logger.info(
            job_name,
            "status_code : " + str(response.status_code) + ", reason : " +
            response.reason + ", content : " + response.content)

        if response.status_code != 200:
            message = "status_code : " + str(
                response.status_code
            ) + ", reason : " + response.reason + ", content : " + response.content
            raise ProcessException(message)

        content = json.loads(response.content)
        status = content['status']
        numberOfSkipped = content['numberOfSkipped']

        if status in ['ENDED_WITH_ERRORS', 'FAILED']:
            message = "status : " + status + ", reason : " + content['error']
            raise ProcessException(message)

        if numberOfSkipped != 0:
            message = "numberOfSkipped : " + str(
                numberOfSkipped
            ) + ", reason : the email is not send but skipped. Possibly emailadress empty?"
            raise ProcessException(message)

        if not status == 'ENDED':
            return process_action
Exemplo n.º 18
0
    def execute(self, process_action):

        job_name = process_action['job_name']

        paprika_ds = DatasourceBuilder.build('paprika-ds.json')
        logger = Logger(self)
        file_repository = FileRepository(paprika_ds)
        process_repository = ProcessRepository(paprika_ds)
        process_property_repository = ProcessPropertyRepository(paprika_ds)
        process_action_property_repository = ProcessActionPropertyRepository(
            paprika_ds)

        # retrieve the file properties
        process = process_repository.find_by_id(process_action['pcs_id'])
        file_id = process_property_repository.get_property(process, 'file_id')
        file = file_repository.find_by_id(file_id)

        datasource = process_action_property_repository.get_property(
            process_action, 'datasource')
        drop_location = process_action_property_repository.get_property(
            process_action, 'drop_location')
        filename = drop_location + '/' + file['filename']

        CsvFile.normalize(filename, filename + '.norm')
        source_encoding = CsvFile.guess_encoding(filename + '.norm')
        CsvFile.iconv(filename + '.norm', source_encoding, filename + '.utf8',
                      'utf8')
        delimiter = CsvFile.guess_delimiter(filename + '.utf8')

        skip_header = False
        if not header:
            header = CsvFile.read_header(filename + '.utf8', delimiter)
            skip_header = True

        ds = DatasourceBuilder.find(datasource)
        connector = ConnectorFactory.create_connector(ds)

        file_repository = FileRepository(ds)
        file = dict()
        file['pcs_id'] = 0
        file['job_name'] = job_name
        file['filename'] = filename
        file['state'] = 'READY'
        file['rule'] = ''
        file['hashcode'] = ''
        file['pickup_location'] = ''
        file['path'] = ''
        file['filesize'] = 0
        file['pattern'] = ''
        file = file_repository.insert(file)

        statics = dict()
        statics['job_name'] = job_name
        statics['fle_id'] = file['id']

        mapping = 'id.eeid;notification.action;job_name.job_name;fle_id.fle_id'

        stager = Stager(ds)
        stager.stage(filename + '.utf8', header, delimiter,
                     'tripolis_mailings', mapping, skip_header, statics)
        stager.close()

        logger.info(job_name,
                    'job_name: ' + job_name + 'file: ' + filename + " staged")
Exemplo n.º 19
0
    def run(self, scheduled_event, test_mode=False):
        abort = self.get_abort()
        stop = self.get_stop()
        paprika_ds = DatasourceBuilder.build('paprika-ds.json')
        connector = ConnectorFactory.create_connector(paprika_ds)
        logger = Logger(connector, self)

        job_repository = JobRepository(connector)
        job = job_repository.job()
        job_name = job['job_name']

        settings = self.get_settings()
        while self.is_running():
            try:
                now = datetime.now()
                scheduled_event_repository = ScheduledEventRepository(
                    connector)
                scheduled_event = scheduled_event_repository.find_by_hashcode(
                    scheduled_event['hashcode'])
                job_repository = JobRepository(connector)
                event_repository = EventRepository(connector)

                repetition = scheduled_event['repetition']
                intermission = int(scheduled_event['intermission'])
                expected = datetime.strptime(scheduled_event['expected'],
                                             "%Y-%m-%d %H:%M:%S")
                next_expected = None
                if expected < now:
                    if repetition == 'HOURS':
                        next_expected = expected + timedelta(
                            hours=int(intermission))
                        while next_expected < now:
                            next_expected += timedelta(hours=int(intermission))
                    if repetition == 'DAYS':
                        next_expected = expected + timedelta(
                            days=int(intermission))
                        while next_expected < now:
                            next_expected += timedelta(days=int(intermission))
                    if repetition == 'MINUTES':
                        next_expected = expected + timedelta(
                            minutes=int(intermission))
                        while next_expected < now:
                            next_expected += timedelta(
                                minutes=int(intermission))

                    if next_expected:
                        message = dict()
                        message['id'] = scheduled_event['id']
                        message['expected'] = next_expected.__str__()
                        scheduled_event_repository.expected(message)

                    job = job_repository.job()
                    event_job_name = job['job_name']

                    process = ProcessService.create_process(
                        connector, scheduled_event['pdn_id'], event_job_name,
                        scheduled_event['e_pdn_id'])

                    event = dict()
                    event['state'] = 'READY'
                    event['repetition'] = repetition
                    event['intermission'] = intermission
                    event['expected'] = scheduled_event['expected']
                    event['job_name'] = event_job_name
                    event['pcs_id'] = process['id']
                    event = event_repository.insert(event)

                    process_property_repository = ProcessPropertyRepository(
                        connector)
                    process_property_repository.set_property(
                        process, 'event_id', event['id'])

                    ProcessService.execute_process(connector, process)

                # check if we need to abort, can be called from the main thread or other thread
                aborted = abort.is_aborted()
                self.running(not aborted)

                # check if we need to stop, will be set by the agent's WatchWorker thread
                if not aborted:
                    stopped = stop.is_stopped()
                    self.running(not stopped)

                # check for test_mode, break the loop
                if test_mode:
                    self.running(False)

                connector.close()
                time.sleep(settings['worker_idle_delay'])
                logger.trace(job_name,
                             'worker #' + str(self.get_id()) + " executed.")
            except:
                aborted = abort.is_aborted()
                self.running(not aborted)

                if not aborted:
                    stopped = stop.is_stopped()
                    self.running(not stopped)

                result = Traceback.build()
                logger.fatal(job_name, result['message'], result['backtrace'])
                connector.close()
                time.sleep(settings['worker_exception_delay'])
Exemplo n.º 20
0
    def execute(self, connector, process_action):
        job_name = process_action['job_name']
        logger = Logger(connector, self)

        properties = PropertyRepository(connector)
        process_repository = ProcessRepository(connector)
        process_property_repository = ProcessPropertyRepository(connector)
        process_action_property_repository = ProcessActionPropertyRepository(
            connector)

        file_repository = FileRepository(connector)

        process = process_repository.find_by_id(process_action['pcs_id'])
        file_id = process_property_repository.get_property(process, 'file_id')

        file = file_repository.find_by_id(file_id)

        tmp = properties.get_property('scanner.tmp')

        # retrieve the file properties
        pickup_location = process_property_repository.get_property(
            process, 'pickup_location')
        pickup_filename = file['filename']
        pickup_path = process_property_repository.get_property(process, 'path')
        drop_location = process_action_property_repository.get_property(
            process_action, 'drop_location')

        # copy the file from pickup folder to the tmp folder.
        logger.info(job_name, pickup_location + os.sep + pickup_filename)
        pickup_client = VfsFactory.create_client(pickup_location)
        pickup_client.connect()
        pickup_path = pickup_path + os.sep + pickup_filename
        tmp_path = tmp + os.sep + pickup_filename
        pickup_client.get(pickup_path, tmp_path)
        pickup_client.close()

        # unzip the file and copy the unzipped files to the unzip folder
        logger.info(job_name,
                    'unzip ' + pickup_location + os.sep + pickup_filename)
        zip_client = VfsFactory.create_client("zip://" + tmp_path)
        zip_client.connect()
        zip_files = zip_client.list('')
        for zip_file in zip_files:

            logger.info(job_name,
                        'unzipping ' + zip_file['path'] + zip_file['filename'])
            zip_client.get(
                zip_file['path'] + zip_file['filename'],
                tmp + os.sep + zip_file['path'] + zip_file['filename'])

            # copy the file from the tmp folder to the drop folder
            logger.info(
                job_name, 'copying ' + tmp + os.sep + zip_file['path'] +
                zip_file['filename'] + ' to ' + drop_location)
            drop_client = VfsFactory.create_client(drop_location)
            drop_client.connect()

            tmp_path = zip_file['path'] + zip_file['filename']
            drop_path = drop_client.get_path() + os.sep + zip_file['filename']
            drop_client.put(tmp_path, drop_path)
            drop_client.close()

        logger.info(job_name,
                    pickup_location + os.sep + pickup_filename + " unzipped")