Пример #1
0
class TalendTimDocumentIntegrator(IntegratorCallbacks, ThingRunner):
    """ TalendTimDocumentIntegrator initializing """

    def __init__(self, config, agent_config):

        super().__init__(config=agent_config)

        if not (isinstance(config, dict) and all(section in config for section in ('integrator', 'config'))):
            raise ValueError('Configuration invalid / missing required section')

        # Whilst the integrator core requires particular configuration, top-level sections could be defined to provide
        # parameters specific to this integrator.
        self.__integrator = Integrator(config['integrator'], self.client, self)
        self.__assets = set()
        self.__config = config
        # data cache used to check that the asset has been changed or not before publishing the event
        self.__data_cache = get_cache(config, config_path='integrator.asset.cache.method')
        self.__use_mock_data = NestedConfig.get(self.__config, 'config.use_mock_data', required=False, default=False)
        self.__workers = NestedConfig.get(self.__config,
                                          'config.workers', required=False, default=1, check=non_negative_int)
        self.__loop_time = NestedConfig.get(self.__config,
                                            'config.loop_time', required=False, default=5, check=non_negative_int)

        self.__req_pool = ThreadPoolExecutor(max_workers=self.__workers)

        self.__talend_config_info = TalendConfig(
            endpoint=NestedConfig.get(self.__config,
                                      'config.talend.endpoint', required=True, check=non_empty_str),
            endpoint_single=NestedConfig.get(self.__config,
                                             'config.talend.endpoint_single', required=True, check=non_empty_str),
            usr=NestedConfig.get(self.__config, 'config.talend.usr', required=True, check=non_empty_str),
            pwd=NestedConfig.get(self.__config, 'config.talend.pwd', required=True, check=non_empty_str),
            timeout=int(NestedConfig.get(self.__config,
                                         'config.talend.timeout', required=False, default=10, check=non_negative_int))
        )

    def on_startup(self):
        """ Starts the integrator. Other public methods herein must not be called beforehand.
        Should be called after starting Iotic agent.
        Alternatively use with keyword on the instance. """
        log.info('Talend Tim Document Integrator Startup')
        self.__data_cache.start()
        self.__integrator.start()

    def main(self):
        """ Talend Tim Document Integrator Started Running """
        log.info('Talend Tim Document Integrator Running')
        self.__process_data()
        loop_time = self.__loop_time
        while not self.wait_for_shutdown(loop_time):
            self.__process_data()

    def on_shutdown(self, exc_info):
        """ Stops the integrator. Should be called before shutting down the Iotic agent. """
        log.info('Talend Tim Document Integrator Shutdown')
        self.__integrator.stop()
        self.__data_cache.stop()

    # for IntegratorCallbacks
    def on_asset_created(self, asset_id):
        """ A new asset has been created.
        Called once for each known asset on startup as well as whenever
        a new asset appears whilst the integrator is running. """
        log.info('Asset created: %s', asset_id)
        self.__assets.add(asset_id)

    # for IntegratorCallbacks
    def on_asset_deleted(self, asset_id):
        """ An asset has been deleted.
        Called whenever an asset has been removed and should no longer be considered by the integrator.
        Note: This is NOT called if an asset has been deleted whilst the integrator is not running. """
        log.info('Asset deleted: %s', asset_id)
        self.__assets.discard(asset_id)

    # for IntegratorCallbacks
    def on_t2_request(self, request):
        """ A new type2 request has been made for a particular asset.
        request - instance of T2Request """
        self.__req_pool.submit(self.__process_t2, request)

    # Wrap since run via thread pool without handling return/exception
    @log_exceptions(log)
    def __process_t2(self, request):
        """ processing t2 request """
        log.info('New T2 req for %s - %s(%r)', request.asset_id, request.type_, request.data)

        if request.type_ != T2_REQUEST_TALEND_DOCUMENT:
            log.warning('Ignoring unknown request type %s', request.type_)
            return
        self.__t2_do_tlddoc(request)

    def __t2_do_tlddoc(self, request):
        decoded = json.loads(request.data.decode('utf-8'))
        data = self.__get_tim_doc(decoded["serialNumber"], decoded["documentLabel"], decoded["documentName"])
        if data:
            try:
                self.__integrator.t2_respond(request, "application/pdf", b64decode(data))
            except binascii_Error:
                log.error("Failed to b64decode data")
                self.__integrator.t2_respond_error(request, T2ProviderFailureReason.REQ_UNHANDLED)
        else:
            self.__integrator.t2_respond_error(request, T2ProviderFailureReason.REQ_UNHANDLED)

    def __get_tim_doc(self, serial_no, document_label, document_name):
        log.info("Get Talend doc for: %s", serial_no)

        data = None
        try:
            if self.__use_mock_data == 1:
                return mockpdf.data
        except ValueError as ex:
            log.error(ex)

        endpoint = self.__talend_config_info.endpoint_single.\
            format(asset_id=serial_no, doc_label=document_label, doc_name=document_name)
        usr = self.__talend_config_info.usr
        pwd = self.__talend_config_info.pwd
        timeout = self.__talend_config_info.timeout

        log.debug("Calling: %s", endpoint)

        try:
            resp = requests.get(endpoint, auth=(usr, pwd), verify=False, timeout=timeout)
            log.debug("Response status: %s", resp.status_code)
            if resp.text and resp.status_code == requests.codes['ok']:
                try:
                    data = resp.json()['document']
                except Exception as ex:  # pylint: disable=broad-except
                    log.error("Could not parse JSON from response: %s", resp.text)
                    log.error(ex)
        except requests.exceptions.RequestException as ex:
            log.error(ex)

        return data

    def __get_data_for_asset(self, asset_id):
        """ returns Talend data for asset_id """
        log.info("Get Talend data for: %s", asset_id)

        data = None

        if self.__use_mock_data == 1:
            log.debug("Using mock data")
            with open(MOCK_DATA_FILE, mode="r", encoding="utf-8") as f:
                data = json.load(f)

        else:
            # asset_id hack for RR Dev environment
            # The dev environment TalendTimDocument API uses a specific asset_id we will swap two of our test IDs.
            if asset_id in ('1000021', '1000015'):
                asset_id = '16701003340'

            usr = self.__talend_config_info.usr
            pwd = self.__talend_config_info.pwd
            endpoint = self.__talend_config_info.endpoint_single.format(asset_id=asset_id)
            timeout = self.__talend_config_info.timeout

            log.debug("Calling: %s", endpoint)

            try:
                resp = requests.get(endpoint, auth=(usr, pwd), verify=False, timeout=timeout)
                log.debug("Response status: %s", resp.status_code)
                if resp.text and resp.status_code == requests.codes['ok']:
                    try:
                        data = resp.json()
                    except Exception as ex:
                        log.error("Could not parse JSON from response: %s", resp.text)
                        raise ex
            except requests.exceptions.RequestException as ex:
                log.error(ex)

        return data

    def __process_data(self):
        """ Processing Talend Tim Documents """
        log.info("Processing Talend Tim Documents")
        for asset_id in list(self.__assets):
            log.debug("Processing asset: %s", asset_id)
            data = self.__get_data_for_asset(asset_id)
            if data and self.__has_asset_data_changed_for(asset_id, data):
                event = TalendTimDocumentSet(asset_id, data=data["documentList"])
                log.debug("Publishing event: %s", event)

                try:
                    self.__integrator.publish_event(event)
                    self.__cache_asset_data_for(asset_id, data)

                # These will all retry
                except EventPublishFailure as ex:
                    log.error("Event Publish Failure: %s", ex)
                except AssetUnknown:
                    pass

    # Checks to see if the given data for the asset has changed
    # since it was last processed.
    def __has_asset_data_changed_for(self, asset_id, data):
        """ Checking wheather asset cache data has changed for asset_id or not """
        log.info("Checking asset cache for: %s", asset_id)
        try:
            asset_id_hash = self.__data_cache.get_attr(asset_id, 'hash')
        except KeyError:
            # No cache so this is new data
            return True

        data_hash = self.__compute_data_hash(data)

        if asset_id_hash['hash'] != data_hash:
            # data has changed
            return True
        # Nothing has changed for this data
        return False

    @classmethod
    def __compute_data_hash(cls, data):
        """ computing data"""
        jdata = json.dumps(data, sort_keys=True, separators=(',', ':'))
        return hashlib_md5(jdata.encode('utf8')).hexdigest()

    # After publishing the event, update the cache
    def __cache_asset_data_for(self, asset_id, data):
        """ updating Cache asset data for asset_id """
        log.info("Cache asset for: %s", asset_id)
        data_hash = self.__compute_data_hash(data)
        self.__data_cache.mark_as_known(asset_id, hash=data_hash)
Пример #2
0
class SAPEquipmentHistoryIntegrator(IntegratorCallbacks, RetryingThingRunner):
    def __init__(self, config, agent_config):
        super().__init__(config=agent_config)

        if not (isinstance(config, Mapping)
                and all(section in config
                        for section in ('integrator', 'config'))):
            raise ValueError(
                'Configuration invalid / missing required section')

        # parameters specific to this integrator.
        self.__integrator = Integrator(config['integrator'], self.client, self)
        self.__assets = set()
        self.__config = config
        # data cache used to check that the asset has been changed or not before publishing the event
        self.__data_cache = get_cache(
            self.__config, config_path='integrator.asset.cache.method')
        # Pool of workers to execture type2 requests
        workers = NestedConfig.get(self.__config,
                                   'config.workers',
                                   required=False,
                                   default=1,
                                   check=non_negative_int)
        self.__req_pool = ThreadPoolExecutor(max_workers=workers)

        # Validate config
        self.__sap_config_info = SapConfig(
            eq_hist_endp=NestedConfig.get(
                self.__config,
                'config.sap.equipment_history_endpoint',
                required=True,
                check=non_empty_str),
            eq_doc_endp=NestedConfig.get(
                self.__config,
                'config.sap.equipment_document_endpoint',
                required=True,
                check=non_empty_str),
            eq_doc_single=NestedConfig.get(
                self.__config,
                'config.sap.equipment_document_single',
                required=True,
                check=non_empty_str),
            eq_doc_test=NestedConfig.get(self.__config,
                                         'config.sap.equipment_document_test',
                                         required=True,
                                         check=non_empty_str),
            usr=NestedConfig.get(self.__config,
                                 'config.sap.usr',
                                 required=True,
                                 check=non_empty_str),
            pwd=NestedConfig.get(self.__config,
                                 'config.sap.pwd',
                                 required=True,
                                 check=non_empty_str),
            timeout=int(
                NestedConfig.get(self.__config,
                                 'config.sap.timeout',
                                 required=False,
                                 default=10,
                                 check=non_negative_int)))

    def on_startup(self):
        log.info('Startup')
        self.__data_cache.start()
        self.__integrator.start()

    def main(self):
        log.info('Running')
        loop_time = NestedConfig.get(self.__config,
                                     'config.loop_time',
                                     required=False,
                                     default=5,
                                     check=non_negative_int)
        while not self.wait_for_shutdown(loop_time):
            self.__process_data()

    def on_shutdown(self, exc_info):
        log.info('Shutdown')
        self.__integrator.stop()
        self.__data_cache.stop()

    # for IntegratorCallbacks
    def on_asset_created(self, asset_id):
        log.info('Asset created: %s', asset_id)
        self.__assets.add(asset_id)

    # for IntegratorCallbacks
    def on_asset_deleted(self, asset_id):
        log.info('Asset deleted: %s', asset_id)
        self.__assets.discard(asset_id)

    # for IntegratorCallbacks
    def on_t2_request(self, request):
        self.__req_pool.submit(self.__process_t2, request)

    # Wrap since run via thread pool without handling return/exception
    @log_exceptions(log)
    def __process_t2(self, request):
        log.info('New T2 req for %s - %s(%r)', request.asset_id, request.type_,
                 request.data)

        if request.type_ == T2_REQUEST_SAP_DOCUMENTSINGLE:
            self.__t2_do_document_req(request)
        elif request.type_ == T2_REQUEST_SAP_SDOCK:
            self.__t2_do_test_document_req(request)
        else:
            log.warning('Ignoring unknown request type %s', request.type_)
            return

    def __integrator_t2_respond_error(self,
                                      request,
                                      reason=T2ProviderFailureReason.
                                      REQ_UNHANDLED):
        try:
            self.__integrator.t2_respond_error(request, reason)
        except ShutdownRequested:
            pass
        except TypeError:
            log.error(
                'Could not send T2 error response, invalid request or reason',
                exc_info=DEBUG_ENABLED)
        except T2ResponseFailure:
            log.error('Could not send T2 error response',
                      exc_info=DEBUG_ENABLED)

    def __integrator_t2_test_respond(self, result, request):
        try:
            value = result['d']['results'][0]['FileName']
        except KeyError:
            log.error("Error in finding mime type")
            self.__integrator_t2_respond_error(request)
            return

        mime = guess_type(value)[0]
        if not mime:
            log.error('Unknown file type')
            self.__integrator_t2_respond_error(request)
            return

        try:
            file_name = json.dumps(value).encode('utf8')
            self.__integrator.t2_respond(
                request, mime,
                a2b_base64(result['d']['results'][0]['Xstring']))
            print("\n", mime, "\n")
            print("\n", file_name, "\n")
            # print("\n", a2b_base64(result['d']['results'][0]['Xstring']), "\n")

            # doc_data = result['d']['results']
            # response_data = []
            # for doc in doc_data:
            #     doc_template = {"FileName": "", "Xstring": ""}
            #
            #     doc_template["FileName"] = doc['FileName']
            #     doc_template["Xstring"] = doc['Xstring']
            #     response_data.append(doc_template)
            #
            # # self.__integrator.t2_respond(
            # #     request, mime, json.dumps(response_data).encode('utf-8'))
            # self.__integrator.t2_respond(
            #     request, mime, a2b_base64(response_data))
            # print("\nrequest\n", request)
            # print("\nmime\n", mime)
            # print("\nvalue\n", value)
            # # print("\nresult['d']['results'][0]['Xstring']\n", result['d']['results'][0]['Xstring'])
            # # print("\njson.dumps(value).encode('utf8')\n", json.dumps(value).encode('utf8'))
            # # print("\na2b_base64(result['d']['results'][0]['Xstring']\n", a2b_base64(result['d']['results'][0]['Xstring']))
            # # self.__integrator.t2_respond(
            # #     request, mime, json.dumps(value).encode('utf8'), a2b_base64(result['d']['results'][0]['Xstring']))

        except binascii_Error:
            log.error("Failed to a2b_base64 data")
            self.__integrator_t2_respond_error(request)
        except ShutdownRequested:
            pass
        except AssetUnknown:
            log.error('Could not send T2 response, asset unknown',
                      exc_info=DEBUG_ENABLED)
        except T2ResponseFailure:
            log.error('Could not send T2 response', exc_info=DEBUG_ENABLED)

    def __integrator_t2_respond(self, result, request):
        try:
            value = result['d']['results'][0]['FileName']
        except KeyError:
            log.error("Error in finding mime type")
            self.__integrator_t2_respond_error(request)
            return

        mime = guess_type(value)[0]
        if not mime:
            log.error('Unknown file type')
            self.__integrator_t2_respond_error(request)
            return

        try:
            self.__integrator.t2_respond(
                request, mime, a2b_base64(result['d']['results'][0]['String']))
        except binascii_Error:
            log.error("Failed to a2b_base64 data")
            self.__integrator_t2_respond_error(request)
        except ShutdownRequested:
            pass
        except AssetUnknown:
            log.error('Could not send T2 response, asset unknown',
                      exc_info=DEBUG_ENABLED)
        except T2ResponseFailure:
            log.error('Could not send T2 response', exc_info=DEBUG_ENABLED)

    def __t2_do_test_document_req(self, request):
        decoded = json.loads(request.data.decode('utf-8'))
        try:
            equnr = decoded['equnr']
            dokar = decoded['document_type']
        except KeyError:
            log.warning('Equnr or Dokar not in request')
            self.__integrator_t2_respond_error(request)
            return

        log.debug("Getting document by Equnr: %s", equnr)

        equipment_document_test = self.__sap_config_info.eq_doc_test.format(
            Equnr=equnr, Dokar=dokar)
        log.debug("Calling Document Test endpoint: %s",
                  equipment_document_test)
        try:
            resp = requests.get(equipment_document_test,
                                auth=(self.__sap_config_info.usr,
                                      self.__sap_config_info.pwd),
                                verify=False,
                                timeout=self.__sap_config_info.timeout)
            if resp.status_code == requests.codes.not_found:  # pylint: disable=no-member
                self.__integrator_t2_respond_error(
                    request, reason=T2ProviderFailureReason.RESOURCE_UNKNOWN)
                return
            log.debug("Response status: %s", resp.status_code)
            resp.raise_for_status()
            if resp.ok:
                try:
                    result = resp.json()
                except:  # pylint: disable=broad-except
                    log.error(
                        "Could not parse JSON from response for Equnr %s",
                        equnr,
                        exc_info=DEBUG_ENABLED)
                else:
                    self.__integrator_t2_test_respond(result, request)
                    return

        except requests.exceptions.HTTPError as ex:
            log.error("__get_document_by_type %s with Equnr: %s", ex, equnr)
        except requests.exceptions.RequestException as ex:
            log.error(ex, exc_info=DEBUG_ENABLED)

        self.__integrator_t2_respond_error(request)

    def __t2_do_document_req(self, request):
        decoded = json.loads(request.data.decode('utf-8'))
        try:
            instid = decoded['instid']
        except KeyError:
            log.warning('instid not in request')
            self.__integrator_t2_respond_error(request)
            return

        log.debug("Getting document by instid: %s", instid)

        equipment_document_single = self.__sap_config_info.eq_doc_single.format(
            instid=instid)
        log.debug("Calling Document Single endpoint: %s",
                  equipment_document_single)
        try:
            resp = requests.get(equipment_document_single,
                                auth=(self.__sap_config_info.usr,
                                      self.__sap_config_info.pwd),
                                verify=False,
                                timeout=self.__sap_config_info.timeout)
            if resp.status_code == requests.codes.not_found:  # pylint: disable=no-member
                self.__integrator_t2_respond_error(
                    request, reason=T2ProviderFailureReason.RESOURCE_UNKNOWN)
                return
            log.debug("Response status: %s", resp.status_code)
            resp.raise_for_status()
            if resp.ok:
                try:
                    result = resp.json()
                except:  # pylint: disable=broad-except
                    log.error(
                        "Could not parse JSON from response for instid %s",
                        instid,
                        exc_info=DEBUG_ENABLED)
                else:
                    self.__integrator_t2_respond(result, request)
                    return

        except requests.exceptions.HTTPError as ex:
            log.error("__get_document_by_instid %s with instid: %s", ex,
                      instid)
        except requests.exceptions.RequestException as ex:
            log.error(ex, exc_info=DEBUG_ENABLED)

        self.__integrator_t2_respond_error(request)

    def __get_values(self, results, enable_document_hack=False, mock=False):
        """Get values from the dictionary and update it in a formatted way"""
        for item in results:
            item['Datum'] = int(search(r'\d+', item['Datum']).group())

            # If there is an equpiment number, fetch the document details
            # and append them to this item
            if enable_document_hack:
                equnr = '10000018'
                enable_document_hack = False
            elif not mock:
                try:
                    equnr = item['Equnr']
                    log.debug("Getting documents for Equnr: %s", equnr)
                except KeyError:
                    log.error("\'Equnr\' key not found for this item")
                    item['Documents'] = []
                    continue

            if mock:
                with open(MOCK_DOCUMENT_FILE, 'r') as dfp:
                    results = json.load(dfp)
            else:
                results = self.__get_document_by_equnr(equnr)

            if results is None:
                item['Documents'] = []
                continue

            try:
                documents = results['d']['results']
            except KeyError:
                log.error("KeyError exception in __get_data_for_asset()")
                item['Documents'] = []
                continue

            # Strip date string values and convert to epoch longs
            for document in documents:
                document['Crdat'] = int(
                    search(r'\d+', document['Crdat']).group())
                document['Chdat'] = int(
                    search(r'\d+', document['Chdat']).group())

            item['Documents'] = documents

    def __get_data_for_asset(self, asset_id):
        log.debug("Get Equipment History Data for: %s", asset_id)

        enable_document_hack = False
        if asset_id in ('1000021', '1000015'):
            enable_document_hack = True
            asset_id = 'WEBER-TEST-01'

        equipment_history_endpoint = self.__sap_config_info.eq_hist_endp.format(
            asset_id=asset_id)
        log.debug("Calling Equipment History endpoint: %s",
                  equipment_history_endpoint)
        try:
            resp = requests.get(equipment_history_endpoint,
                                auth=(self.__sap_config_info.usr,
                                      self.__sap_config_info.pwd),
                                verify=False,
                                timeout=self.__sap_config_info.timeout)
        except requests.exceptions.RequestException:
            log.error("RequestException in __get_data_for_asset()")
            return None

        results = None
        log.debug("Response status: %s", resp.status_code)
        if resp.status_code == requests.codes['ok']:
            try:
                data = resp.json()
            except:  # pylint: disable=broad-except
                log.error("Could not parse JSON from response for asset %s",
                          asset_id,
                          exc_info=DEBUG_ENABLED)
                return None

            try:
                results = data['d']['results']
            except KeyError:
                log.error("Could not find results in response for asset %s",
                          asset_id)
                return None

            self.__get_values(results,
                              enable_document_hack=enable_document_hack)
        else:
            log.error("Endpoint response failed: %s", resp.status_code)

        return results

    def __get_document_by_equnr(self, equnr):
        if not equnr:
            return None

        log.debug("Getting documents for Equnr: %s", equnr)
        equipment_document_endpoint = self.__sap_config_info.eq_doc_endp.format(
            equnr=equnr)
        log.debug("Calling Equipment Document endpoint: %s",
                  equipment_document_endpoint)
        try:
            resp = requests.get(equipment_document_endpoint,
                                auth=(self.__sap_config_info.usr,
                                      self.__sap_config_info.pwd),
                                verify=False,
                                timeout=self.__sap_config_info.timeout)
            log.debug("Response status: %s", resp.status_code)

            if resp.status_code == requests.codes.not_found:  # pylint: disable=no-member
                return None
            resp.raise_for_status()
            if resp.ok:
                try:
                    return resp.json()
                except:  # pylint: disable=broad-except
                    log.error(
                        "Could not parse JSON from response for equnr %s",
                        equnr,
                        exc_info=DEBUG_ENABLED)
            else:
                log.error("Endpoint response failed: %s", resp.status_code)

        except requests.exceptions.HTTPError as ex:
            log.error("__get_document_by_equnr %s with equnr: %s", ex, equnr)

        except requests.exceptions.RequestException as ex:
            log.error(ex, exc_info=DEBUG_ENABLED)

        return None

    def __create_event_and_publish(self, asset_id, data):
        log.debug("Publish event for %s", asset_id)
        # Publish the event based on the document type
        for item in data:
            if not item.get('Documents'):
                log.error(
                    "No documents found in equipment history for asset %s",
                    asset_id)
                continue
            try:
                doctype = item['Doctype']
            except KeyError:
                log.error("KeyError exception in __process_data")
                continue

            event = self.__create_document_event(asset_id, doctype, item)
            if not event:
                log.error("Could not create document event for this asset")
                continue

            log.debug("Event: %s", event)

            self.__integrator.publish_event(event, retry=True)

        self.__cache_asset_data_for(asset_id, data)

    def __process_data(self):
        log.debug("Processing Equipment History")
        for asset_id in list(self.__assets):
            log.debug("Processing asset: %s", asset_id)
            data = self.__get_data_for_asset(asset_id)
            if data and self.__has_asset_data_changed_for(asset_id, data):
                try:
                    self.__create_event_and_publish(asset_id, data)
                except ShutdownRequested:
                    log.debug("Shutdown requested while publishing event")
                    return
                except AssetUnknown:
                    pass

    @classmethod
    def __create_document_event(cls, asset_id, doctype, item):
        try:
            event_time = item['Datum']
        except KeyError:
            log.error("Datum KeyError for asset_id %s", asset_id)
            return None

        try:
            event_time = datetime.datetime.utcfromtimestamp(event_time // 1000)
        except OverflowError:
            log.error("Could not create a valid datetime from %s", event_time)
            return None

        log.info("Creating document event for: %s", doctype)
        doctypes = {
            'DELI': SapEquipmentHistoryDeliverySet,
            'MAIN': SapEquipmentHistoryMaintenanceContractSet,
            'MOVE': SapEquipmentHistoryMaterialMovementSet,
            'INLO': SapEquipmentHistoryInspectionLotSet,
            'PROD': SapEquipmentHistoryProductionOrderSet,
            'INVE': SapEquipmentHistoryPhysicalInventorySet,
            'PURO': SapEquipmentHistoryPurchaseOrderSet,
            'PMOD': SapEquipmentHistoryPmOrderSet,
            'NOTI': SapEquipmentHistoryNotificationSet,
            'HIST': SapEquipmentHistoryInstallationHistorySet
        }

        try:
            return doctypes[doctype](asset_id, data=item, time=event_time)
        except KeyError:
            log.error("Unknown document type: %s", doctype)
            return None

    @classmethod
    def __compute_data_hash(cls, data):
        jdata = json.dumps(data, sort_keys=True, separators=(',', ':'))
        return hashlib_md5(jdata.encode('utf8')).hexdigest()

    # Checks to see if the given data for the asset has changed
    # since it was last processed.
    def __has_asset_data_changed_for(self, asset_id, data):

        log.info("Checking asset cache for: %s", asset_id)
        try:
            asset_id_hash = self.__data_cache.get_attr(asset_id, 'hash')
        except KeyError:
            # No cache so this is new data
            return True

        data_hash = self.__compute_data_hash(data)

        if asset_id_hash['hash'] != data_hash:
            # data has changed
            return True
        # Nothing has changed for this data
        return False

    # After publishing the event, update the cache
    def __cache_asset_data_for(self, asset_id, data):

        log.info("Cache asset for: %s", asset_id)
        data_hash = self.__compute_data_hash(data)
        self.__data_cache.mark_as_known(asset_id, hash=data_hash)
Пример #3
0
class SAPEquipmentHistoryIntegrator(IntegratorCallbacks, ThingRunner):

    def __init__(self, config, agent_config):
        super().__init__(config=agent_config)

        if not (isinstance(config, Mapping) and all(section in config for section in ('integrator', 'config'))):
            raise ValueError(
                'Configuration invalid / missing required section')

        # parameters specific to this integrator.
        self.__integrator = Integrator(config['integrator'], self.client, self)
        self.__assets = set()
        self.__config = config
        self.__data_cache = NestedConfig.get(self.__config, 'config.data-cache', required=True, check=non_empty_str)

        # Pool of workers to execture type2 requests
        workers = NestedConfig.get(self.__config, 'config.workers', required=False, default=1, check=non_negative_int)
        self.__req_pool = ThreadPoolExecutor(max_workers=workers)

        # Validate config
        self.__use_mock_data = NestedConfig.get(self.__config, 'config.use_mock_data', required=False, default=False)

        self.__sap_config_info = SapConfig(
            eq_hist_endp=NestedConfig.get(
                self.__config, 'config.sap.equipment_history_endpoint', required=True, check=non_empty_str
            ),
            eq_doc_endp=NestedConfig.get(
                self.__config, 'config.sap.equipment_document_endpoint', required=True, check=non_empty_str
            ),
            eq_doc_single=NestedConfig.get(
                self.__config, 'config.sap.equipment_document_single', required=True, check=non_empty_str
            ),
            usr=NestedConfig.get(
                self.__config, 'config.sap.usr', required=True, check=non_empty_str
            ),
            pwd=NestedConfig.get(
                self.__config, 'config.sap.pwd', required=True, check=non_empty_str
            ),
            timeout=int(NestedConfig.get(
                self.__config, 'config.sap.timeout', required=False, default=10, check=non_negative_int
            ))
        )

    def on_startup(self):
        log.info('Startup')
        self.__integrator.start()

    def main(self):
        log.info('Running')
        loop_time = NestedConfig.get(
            self.__config, 'config.loop_time', required=False, default=5, check=non_negative_int
        )
        while not self.wait_for_shutdown(loop_time):
            self.__process_data()

    def on_shutdown(self, exc_info):
        log.info('Shutdown')
        self.__integrator.stop()

    # for IntegratorCallbacks
    def on_asset_created(self, asset_id):
        log.info('Asset created: %s', asset_id)
        self.__assets.add(asset_id)

    # for IntegratorCallbacks
    def on_asset_deleted(self, asset_id):
        log.info('Asset deleted: %s', asset_id)
        self.__assets.discard(asset_id)

    # for IntegratorCallbacks
    def on_t2_request(self, request):
        self.__req_pool.submit(self.__process_t2, request)

    # Wrap since run via thread pool without handling return/exception
    @log_exceptions(log)
    def __process_t2(self, request):
        log.info('New T2 req for %s - %s(%r)', request.asset_id, request.type_, request.data)

        if request.type_ != T2_REQUEST_SAP_DOCUMENTSINGLE:
            log.warning('Ignoring unknown request type %s', request.type_)
            return
        self.__t2_do_document_req(request)

    def __integrator_respond_error(self, request):
        try:
            self.__integrator.t2_respond_error(request, T2ProviderFailureReason.REQ_UNHANDLED)
        except AssetUnknown:
            pass

    def __t2_do_document_req(self, request):
        decoded = json.loads(request.data.decode('utf-8'))
        try:
            instid = decoded['instid']
        except KeyError:
            log.warning('instid not in request')
            self.__integrator_respond_error(request)

        result = self.__get_document_by_instid(instid)
        if result:
            try:
                result['d']['results'][0]['String']
            except KeyError:
                log.error('Unknown file type')
                self.__integrator_respond_error(request)
                return
            try:
                value = result['d']['results'][0]['FileName']
            except KeyError:
                log.error("Error in finding mime type")
                self.__integrator_respond_error(request)
                return

            mime = guess_type(value)[0]
            if not mime:
                log.error('Unknown file type')
                self.__integrator_respond_error(request)
                return

            try:
                self.__integrator.t2_respond(
                    request, mime, a2b_base64(result['d']['results'][0]['String'], validate=True)
                )
            except binascii_Error:
                log.error("Failed to a2b_base64 data")
                self.__integrator_respond_error(request)
        else:
            log.error("Failed to get Document Single for: %s. %s", instid, result)
            self.__integrator_respond_error(request)

    def __get_document_by_instid(self, instid):
        log.debug("Getting document by instid: %s", instid)

        if self.__use_mock_data:
            return {"d": {"results": [
                {"Instid": "123456", "FileName": "text.txt", "FileType": "ASC",
                 "String": "VGltIHdhcyBoZXJlISAyMDE4LTExLTMwIDE4OjA5IQ=="}
            ]}}  # noqa

        equipment_document_single = self.__sap_config_info.eq_doc_single.format(instid=instid)
        log.debug("Calling Document Single endpoint: %s", equipment_document_single)
        try:
            resp = requests.get(
                equipment_document_single,
                auth=(self.__sap_config_info.usr, self.__sap_config_info.pwd),
                verify=False,
                timeout=self.__sap_config_info.timeout
            )
            log.debug("Response status: %s", resp.status_code)
        except requests.exceptions.RequestException:
            log.error("__get_document_by_instid() error")
            return None

        if resp.status_code == requests.codes['ok']:
            try:
                results = resp.json()
            except ValueError:
                log.error('Decoding JSON has failed')
                return None

        return results

    def __get_values(self, results, enable_document_hack=False, mock=False):
        """Get values from the dictionary and update it in a formatted way"""
        for item in results:
            item['Datum'] = int(search(r'\d+', item['Datum']).group())

            # If there is an equpiment number, fetch the document details
            # and append them to this item
            if enable_document_hack:
                equnr = '10000018'
                enable_document_hack = False
            elif not mock:
                try:
                    equnr = item['Equnr']
                    log.debug("Getting documents for Equnr: %s", equnr)
                except KeyError:
                    log.error("\'Equnr\' key not found for this item")
                    item['Documents'] = []
                    continue

            if mock:
                with open(MOCK_DOCUMENT_FILE, 'r') as dfp:
                    results = json.load(dfp)
            else:
                results = self.__get_document_by_equnr(equnr)

            try:
                documents = results['d']['results']
            except KeyError:
                log.error("KeyError exception in __get_data_for_asset()")
                item['Documents'] = []
                continue

            # Strip date string values and convert to epoch longs
            for document in documents:
                document['Crdat'] = int(search(r'\d+', document['Crdat']).group())
                document['Chdat'] = int(search(r'\d+', document['Chdat']).group())

            item['Documents'] = documents

    def __get_mock_data(self):
        log.debug("Using mock Equipment History Data data")
        with open(MOCK_DATA_FILE, 'r') as f:
            data = json.load(f)

        try:
            results = data['d']['results']
        except KeyError:
            log.error("KeyError exception in __get_data_for_asset")
            return None

        self.__get_values(results, mock=True)

        return results

    def __get_data_for_asset(self, asset_id):
        log.debug("Get Equipment History Data for: %s", asset_id)
        if self.__use_mock_data:
            return self.__get_mock_data()

        enable_document_hack = False
        if asset_id in ('1000021', '1000015'):
            enable_document_hack = True
            asset_id = 'WEBER-TEST-01'

        equipment_history_endpoint = self.__sap_config_info.eq_hist_endp.format(asset_id=asset_id)
        log.debug("Calling Equipment History endpoint: %s", equipment_history_endpoint)
        try:
            resp = requests.get(
                equipment_history_endpoint,
                auth=(self.__sap_config_info.usr, self.__sap_config_info.pwd),
                verify=False,
                timeout=self.__sap_config_info.timeout
            )
        except requests.exceptions.RequestException:
            log.error("RequestException in __get_data_for_asset()")
            return None

        results = None
        log.debug("Response status: %s", resp.status_code)
        if resp.status_code == requests.codes['ok']:
            data = resp.json()
            try:
                results = data['d']['results']
            except KeyError:
                log.error("KeyError exception in __get_data_for_asset()")
                return None

            self.__get_values(results, enable_document_hack=enable_document_hack)

        return results

    def __get_document_by_equnr(self, equnr):
        if not equnr:
            return None

        log.debug("Getting documents for Equnr: %s", equnr)
        equipment_document_endpoint = self.__sap_config_info.eq_doc_endp.format(equnr=equnr)
        log.debug("Calling Equipment Document endpoint: %s", equipment_document_endpoint)
        try:
            resp = requests.get(
                equipment_document_endpoint,
                auth=(self.__sap_config_info.usr, self.__sap_config_info.pwd),
                verify=False,
                timeout=self.__sap_config_info.timeout
            )
            log.debug("Response status: %s", resp.status_code)
        except requests.exceptions.RequestException:
            log.error("__get_document_by_equnr error")

        if resp.status_code == requests.codes['ok']:
            try:
                results = resp.json()
            except ValueError:
                log.error('Decoding JSON has failed')
                return None

        return results

    def __process_data(self):
        log.debug("Processing Equipment History")
        for asset_id in list(self.__assets):
            log.debug("Processing asset: %s", asset_id)
            data = self.__get_data_for_asset(asset_id)
            if data and self.__has_asset_data_changed_for(asset_id, data):
                log.debug("Publish event for %s", asset_id)
                # Publish the event based on the document type
                for item in data:
                    if not item.get('Documents'):
                        log.error("No documents found in equipment history for asset %s", asset_id)
                        continue
                    try:
                        doctype = item['Doctype']
                    except KeyError:
                        log.error("KeyError exception in __process_data")
                        continue

                    event = self.__create_document_event(asset_id, doctype, item)
                    if not event:
                        log.error("Could not create document event for this asset")
                        continue

                    log.debug("Event: %s", event)

                    try:
                        self.__integrator.publish_event(event)
                    # These will all retry
                    except EventPublishFailure as ex:
                        log.error("Event Publish Failure: %s", ex)
                    except AssetUnknown:
                        pass

                self.__cache_asset_data_for(asset_id, data)

    @classmethod
    def __create_document_event(cls, asset_id, doctype, item):
        try:
            event_time = item['Datum']
        except KeyError:
            log.error("Datum KeyError for asset_id %s", asset_id)
            return None

        try:
            event_time = datetime.datetime.utcfromtimestamp(event_time // 1000)
        except OverflowError:
            log.error("Could not create a valid datetime from %s", event_time)
            return None

        log.info("Creating document event for: %s", doctype)
        doctypes = {
            'DELI': SapEquipmentHistoryDeliverySet,
            'MAIN': SapEquipmentHistoryMaintenanceContractSet,
            'MOVE': SapEquipmentHistoryMaterialMovementSet,
            'INLO': SapEquipmentHistoryInspectionLotSet,
            'PROD': SapEquipmentHistoryProductionOrderSet,
            'INVE': SapEquipmentHistoryPhysicalInventorySet,
            'PURO': SapEquipmentHistoryPurchaseOrderSet,
            'PMOD': SapEquipmentHistoryPmOrderSet,
            'NOTI': SapEquipmentHistoryNotificationSet,
            'HIST': SapEquipmentHistoryInstallationHistorySet
        }

        try:
            return doctypes[doctype](asset_id, data=item, time=event_time)
        except KeyError:
            log.error("Unknown document type: %s", doctype)
            return None

    def __has_asset_data_changed_for(self, asset_id, data):
        """Checks whether the given data for the asset has changed since it was last processed"""
        log.debug("Checking asset cache for: %s", asset_id)
        if not os.path.exists(self.__data_cache):
            # No cache so this is new data
            return True

        file_path = os.path.join(self.__data_cache, asset_id + '.json')

        if not os.path.isfile(file_path):
            # No file exists so this is new data
            return True

        with open(file_path, mode="r", encoding="utf-8") as f:
            cached_data_hash = f.read()

        data_hash = self.__compute_data_hash(data)
        if cached_data_hash != data_hash:
            os.remove(file_path)
            # The data has changed so flush the cache
            return True

        # Nothing has changed for this data
        log.debug("Asset %s already in cache", asset_id)
        return False

    @classmethod
    def __compute_data_hash(cls, data):
        jdata = json.dumps(data, sort_keys=True, separators=(',', ':'))
        return hashlib_md5(jdata.encode('utf8')).hexdigest()

    def __cache_asset_data_for(self, asset_id, data):
        log.debug("Cache asset for: %s", asset_id)
        if not os.path.exists(self.__data_cache):
            log.debug("Creating data cache")
            os.makedirs(self.__data_cache, exist_ok=True)

        file_path = os.path.join(self.__data_cache, asset_id + '.json')

        if not os.path.isfile(file_path):
            with open(file_path, mode="w", encoding="utf-8") as f:
                f.write(self.__compute_data_hash(data))
            log.debug("Caching data for asset %s", asset_id)
Пример #4
0
class SAPSupersessionIntegrator(IntegratorCallbacks, ThingRunner):
    def __init__(self, config, agent_config):
        super().__init__(config=agent_config)

        if not (isinstance(config, dict)
                and all(section in config
                        for section in ('integrator', 'config'))):
            raise ValueError(
                'Configuration invalid / missing required section')

        # Whilst the integrator core requires particular configuration, top-level sections could be defined to provide
        # parameters specific to this integrator.
        self.__integrator = Integrator(config['integrator'], self.client, self)
        self.__config = config
        self.__data_cache = self.__config['config']['data-cache']
        self.__req_pool = ThreadPoolExecutor(
            max_workers=self.__config['config']['workers'])

    def on_startup(self):
        log.debug('SAP Supersession Integrator Startup')
        self.__integrator.start()

    def main(self):
        log.debug('SAP Supersession Integrator Running')
        loop_time = self.__config['config']['loop_time']
        while not self.wait_for_shutdown(loop_time):
            pass

    def on_shutdown(self, exc_info):
        log.debug('SAP Supersession Integrator Shutdown')
        self.__integrator.stop()

    # for IntegratorCallbacks
    def on_asset_created(self, asset_id):
        log.debug('Asset created: %s', asset_id)

    # for IntegratorCallbacks
    def on_asset_deleted(self, asset_id):
        log.debug('Asset deleted: %s', asset_id)

    # for IntegratorCallbacks
    def on_t2_request(self, request):
        self.__req_pool.submit(self.__process_t2, request)

    # Wrap since run via thread pool without handling return/exception
    @log_exceptions(log)
    def __process_t2(self, request):
        log.info('New T2 req for %s - %s(%r)', request.asset_id, request.type_,
                 request.data)

        if request.type_ != T2_REQUEST_SAP_SUPERSESSION:
            log.warning('Ignoring unknown request type %s', request.type_)
            return
        self.__t2_do_sapspc(request)

    def __t2_do_sapspc(self, request):
        decoded = json.loads(request.data.decode('utf-8'))
        data = self._get_data_for_material(decoded["Matnr"])
        if data:
            self.__integrator.t2_respond(request, "application/json",
                                         json.dumps(data).encode('utf8'))
        else:
            self.__integrator.t2_respond_error(
                request, T2ProviderFailureReason.REQ_UNHANDLED)

    def _get_data_for_material(self, material_no):
        log.info("Get Supersession Data for: %s", material_no)

        data = None

        if self.__config['config']['use_mock_data'] == 1:
            log.debug("Using mock data")
            with open(self.MOCK_DATA_FILE, mode="r", encoding="utf-8") as f:
                results = json.load(f)

            data = results['d']['results']

            for item in data:
                item.pop("__metadata",
                         {})  # remove the unnecessary metadata if it's there
                item['Brgew'] = self._convert_to_double(item['Brgew'])
                item['Ntgew'] = self._convert_to_double(item['Ntgew'])
                item['Laeng'] = self._convert_to_double(item['Laeng'])
                item['Breit'] = self._convert_to_double(item['Breit'])
                item['Hoehe'] = self._convert_to_double(item['Hoehe'])

        else:
            endpoint = self.__config['config']['sap']['endpoint']
            usr = self.__config['config']['sap']['usr']
            pwd = self.__config['config']['sap']['pwd']

            endpoint = endpoint.replace('XXX_MATERIAL_NO_XXX', material_no)
            timeout = int(self.__config['config']['sap']['timeout'])

            log.debug("Calling: %s", endpoint)

            try:
                resp = requests.get(endpoint,
                                    auth=(usr, pwd),
                                    verify=False,
                                    timeout=timeout)
                log.debug("Response status: %s", resp.status_code)
                if resp.status_code == requests.codes['ok']:
                    results = resp.json()

                    data = results['d']['results']

                    for item in data:
                        item.pop(
                            "__metadata", {}
                        )  # remove the unnecessary metadata if it's there
                        item['Brgew'] = self._convert_to_double(item['Brgew'])
                        item['Ntgew'] = self._convert_to_double(item['Ntgew'])
                        item['Laeng'] = self._convert_to_double(item['Laeng'])
                        item['Breit'] = self._convert_to_double(item['Breit'])
                        item['Hoehe'] = self._convert_to_double(item['Hoehe'])

            except requests.exceptions.RequestException as ex:
                log.error(ex)

        return data

    @classmethod
    def _convert_to_double(cls, str_val):
        try:
            value = float(str_val)
            return value
        except:
            log.error("Error converting str %s to float", str_val)
            return None

    MOCK_DATA_FILE = os.path.join('cfg', 'mock-data.json')
Пример #5
0
class TalendTimDocumentIntegrator(IntegratorCallbacks, ThingRunner):
    def __init__(self, config, agent_config):

        super().__init__(config=agent_config)

        if not (isinstance(config, dict)
                and all(section in config
                        for section in ('integrator', 'config'))):
            raise ValueError(
                'Configuration invalid / missing required section')

        # Whilst the integrator core requires particular configuration, top-level sections could be defined to provide
        # parameters specific to this integrator.
        self.__integrator = Integrator(config['integrator'], self.client, self)
        self.__assets = set()
        self.__config = config
        self.__data_cache = self.__config['config']['data-cache']
        self.__req_pool = ThreadPoolExecutor(
            max_workers=self.__config['config']['workers'])

    def on_startup(self):
        log.info('Talend Tim Document Integrator Startup')

        self.__integrator.start()

    def main(self):
        log.info('Talend Tim Document Integrator Running')
        self._process_data()
        loop_time = self.__config['config']['loop_time']
        while not self.wait_for_shutdown(loop_time):
            self._process_data()

    def on_shutdown(self, exc_info):
        log.info('Talend Tim Document  Integrator Shutdown')
        self.__integrator.stop()

    # for IntegratorCallbacks
    def on_asset_created(self, asset_id):
        log.info('Asset created: %s', asset_id)
        self.__assets.add(asset_id)

    # for IntegratorCallbacks
    def on_asset_deleted(self, asset_id):
        log.info('Asset deleted: %s', asset_id)
        self.__assets.discard(asset_id)

    # for IntegratorCallbacks
    def on_t2_request(self, request):
        self.__req_pool.submit(self.__process_t2, request)

    # Wrap since run via thread pool without handling return/exception
    @log_exceptions(log)
    def __process_t2(self, request):
        log.info('New T2 req for %s - %s(%r)', request.asset_id, request.type_,
                 request.data)

        if request.type_ != T2_REQUEST_TALEND_DOCUMENT:
            log.warning('Ignoring unknown request type %s', request.type_)
            return
        self.__t2_do_tlddoc(request)

    def __t2_do_tlddoc(self, request):
        decoded = json.loads(request.data.decode('utf-8'))
        data = self._get_tim_doc(decoded["serialNumber"],
                                 decoded["documentLabel"],
                                 decoded["documentName"])
        if data:
            try:
                self.__integrator.t2_respond(request, "application/pdf",
                                             b64decode(data))
            except binascii_Error:
                log.error("Failed to b64decode data")
                self.__integrator.t2_respond_error(
                    request, T2ProviderFailureReason.REQ_UNHANDLED)
        else:
            self.__integrator.t2_respond_error(
                request, T2ProviderFailureReason.REQ_UNHANDLED)

    def _get_tim_doc(self, serial_no, document_label, document_name):
        log.info("Get Talend doc for: %s", serial_no)

        data = None

        if self.__config['config']['use_mock_data'] == 1:
            return mockpdf.data

        endpoint = self.__config['config']['talend']['endpoint_single']
        usr = self.__config['config']['talend']['usr']
        pwd = self.__config['config']['talend']['pwd']
        endpoint = endpoint.replace('XXX_ASSET_ID_XXX', serial_no)
        endpoint = endpoint.replace('XXX_DOC_LABEL_XXX', document_label)
        endpoint = endpoint.replace('XXX_DOC_NAME_XXX', document_name)
        timeout = int(self.__config['config']['talend']['timeout'])

        log.debug("Calling: %s", endpoint)

        try:
            resp = requests.get(endpoint,
                                auth=(usr, pwd),
                                verify=False,
                                timeout=timeout)
            log.debug("Response status: %s", resp.status_code)
            if resp.text and resp.status_code == requests.codes['ok']:
                try:
                    data = resp.json()['document']
                except Exception as ex:  # pylint: disable=broad-except
                    log.error("Could not parse JSON from response: %s",
                              resp.text)
        except requests.exceptions.RequestException as ex:
            log.error(ex)

        return data

    def _get_data_for_asset(self, asset_id):
        log.info("Get Talend data for: %s", asset_id)

        data = None

        if self.__config['config']['use_mock_data'] == 1:
            log.debug("Using mock data")
            with open(self.MOCK_DATA_FILE, mode="r", encoding="utf-8") as f:
                data = json.load(f)

        else:
            endpoint = self.__config['config']['talend']['endpoint']
            usr = self.__config['config']['talend']['usr']
            pwd = self.__config['config']['talend']['pwd']

            key = 'config.enable_sap_sample_serial_hack'
            if NestedConfig.get(self.__config,
                                key,
                                required=False,
                                default=False,
                                check=bool):
                if asset_id == '1000021' or asset_id == '1000015':
                    asset_id = '16701003340'

            endpoint = endpoint.replace('XXX_ASSET_ID_XXX', asset_id)
            timeout = int(self.__config['config']['talend']['timeout'])

            log.debug("Calling: %s", endpoint)

            try:
                resp = requests.get(endpoint,
                                    auth=(usr, pwd),
                                    verify=False,
                                    timeout=timeout)
                log.debug("Response status: %s", resp.status_code)
                if resp.text and resp.status_code == requests.codes['ok']:
                    try:
                        data = resp.json()
                    except Exception as ex:
                        log.error("Could not parse JSON from response: %s",
                                  resp.text)
                        raise ex
            except requests.exceptions.RequestException as ex:
                log.error(ex)

        return data

    def _process_data(self):
        log.info("Processing Talend Tim Documents")
        for asset_id in list(self.__assets):
            log.debug("Processing asset: %s", asset_id)
            data = self._get_data_for_asset(asset_id)
            if data is not None:
                if self._has_asset_data_changed_for(asset_id, data):
                    event = TalendTimDocumentSet(asset_id,
                                                 data=data["documentList"])
                    log.debug("Publishing event: %s", event)

                    try:
                        self.__integrator.publish_event(event)
                        self._cache_asset_data_for(asset_id, data)

                    # These will all retry
                    except EventPublishFailure as ex:
                        log.error("Event Publish Failure: %s", ex)
                    except AssetUnknown as ex:
                        pass

    # Checks to see if the given data for the asset has changed
    # since it was last processed.
    def _has_asset_data_changed_for(self, asset_id, data):
        log.info("Checking asset cache for: %s", asset_id)
        if not os.path.exists(self.__data_cache):
            # No cache so this is new data
            return True

        filename = asset_id + '.json'
        file_path = os.path.join(self.__data_cache, filename)

        if not os.path.isfile(file_path):
            # No file exists so this is new data
            return True

        with open(file_path, mode="r", encoding="utf-8") as f:
            cached_data_hash = f.read()

        data_hash = self.__compute_data_hash(data)
        if cached_data_hash != data_hash:
            os.remove(file_path)
            # The data has changed so flush the cache
            return True

        # Nothing has changed for this data
        return False

    @classmethod
    def __compute_data_hash(cls, data):
        jdata = json.dumps(data, sort_keys=True, separators=(',', ':'))
        return hashlib_md5(jdata.encode('utf8')).hexdigest()

    def _cache_asset_data_for(self, asset_id, data):
        log.info("Cache asset for: %s", asset_id)
        if not os.path.exists(self.__data_cache):
            log.debug("Creating data cache")
            os.makedirs(self.__data_cache, exist_ok=True)

        filename = asset_id + '.json'
        file_path = os.path.join(self.__data_cache, filename)

        if not os.path.isfile(file_path):
            with open(file_path, mode="w", encoding="utf-8") as f:
                f.write(self.__compute_data_hash(data))
            log.debug("Caching data for asset %s", asset_id)

    MOCK_DATA_FILE = os.path.join('cfg', 'mock-data.json')
Пример #6
0
class SAPBomAsBuiltIntegrator(IntegratorCallbacks, ThingRunner):

    __TRANSFER_KEYS = frozenset(
        ("ParRecno", "SonRecno", "Matnr", "Descr", "Valfr", "Valto"))

    def __init__(self, config, agent_config):
        super().__init__(config=agent_config)

        if not (isinstance(config, dict)
                and all(section in config
                        for section in ('integrator', 'config'))):
            raise ValueError(
                'Configuration invalid / missing required section')

        # Whilst the integrator core requires particular configuration, top-level sections could be defined to provide
        # parameters specific to this integrator.
        self.__integrator = Integrator(config['integrator'], self.client, self)
        self.__assets = set()
        self.__config = config
        self.__data_cache = self.__config['config']['data-cache']
        self.__req_pool = ThreadPoolExecutor(
            max_workers=self.__config['config']['workers'])

    def on_startup(self):
        log.debug('SAP Bom As Built Integrator Startup')
        self.__integrator.start()

    def main(self):
        log.debug('SAP Bom As Built Integrator Running')
        loop_time = self.__config['config']['loop_time']
        while not self.wait_for_shutdown(loop_time):
            self._process_sap_data()

    def on_shutdown(self, exc_info):
        log.debug('SAP Bom As Built Integrator Shutdown')
        self.__integrator.stop()

    # for IntegratorCallbacks
    def on_asset_created(self, asset_id):
        log.debug('Asset created: %s', asset_id)
        self.__assets.add(asset_id)

    # for IntegratorCallbacks
    def on_asset_deleted(self, asset_id):
        log.debug('Asset deleted: %s', asset_id)
        self.__assets.discard(asset_id)

    # for IntegratorCallbacks
    def on_t2_request(self, request):
        self.__req_pool.submit(self.__process_t2, request)

    # Wrap since run via thread pool without handling return/exception
    @log_exceptions(log)
    def __process_t2(self, request):
        log.info('New T2 req for %s - %s(%r)', request.asset_id, request.type_,
                 request.data)

        if request.type_ != T2_REQUEST_SAP_BOMASMAINT:
            log.warning('Ignoring unknown request type %s', request.type_)
            return
        self.__t2_do_bommaint(request)

    def __t2_do_bommaint(self, request):
        decoded = json.loads(request.data.decode('utf-8'))
        data = self._get_bom_as_maintained(request.asset_id, decoded["Valfr"])
        if data:
            to_send = self.__tidy_dict(data['d']['results'])
            self.__integrator.t2_respond(request, "application/json",
                                         json.dumps(to_send).encode('utf8'))
        else:
            self.__integrator.t2_respond_error(
                request, T2ProviderFailureReason.REQ_UNHANDLED)

    def __tidy_dict(self, results):
        ret = []
        for row in results:
            temp = {}
            for key in self.__TRANSFER_KEYS:
                temp[key] = row[key]
            ret.append(temp)
        return ret

    def _get_bom_as_maintained(self, asset_id, valid_from):
        log.info("Get Bom As Maintained Data for: %s %s", asset_id, valid_from)

        data = None

        if self.__config['config']['use_mock_data'] == 1:
            log.debug("Using mock bom data")
            with open(self.MOCK_DATA_FILE, mode="r", encoding="utf-8") as f:
                data = json.load(f)
        else:
            endpoint = self.__config['config']['bomgar']['endpoint_maint']
            usr = self.__config['config']['bomgar']['usr']
            pwd = self.__config['config']['bomgar']['pwd']

            key = 'config.enable_sap_sample_serial_hack'
            if NestedConfig.get(self.__config,
                                key,
                                required=False,
                                default=False,
                                check=bool):
                if asset_id == '1000021' or asset_id == '1000015':
                    asset_id = '526104875'

            endpoint = endpoint.replace('XXX_ASSET_ID_XXX', asset_id)
            endpoint = endpoint.replace('XXX_VALID_FROM_XXX', valid_from)
            timeout = int(self.__config['config']['bomgar']['timeout'])

            log.debug("Calling: %s", endpoint)

            try:
                resp = requests.get(endpoint,
                                    auth=(usr, pwd),
                                    verify=False,
                                    timeout=timeout)
                log.debug("Response status: %s", resp.status_code)
                if resp.status_code == requests.codes['ok']:
                    data = resp.json()

            except requests.exceptions.RequestException as ex:
                log.error(ex)

        return data

    def _get_data_for_asset(self, asset_id):
        log.info("Get Bom As Built Data for: %s", asset_id)

        data = None

        if self.__config['config']['use_mock_data'] == 1:
            log.debug("Using mock bom data")
            with open(self.MOCK_DATA_FILE, mode="r", encoding="utf-8") as f:
                data = json.load(f)

        else:
            endpoint = self.__config['config']['bomgar']['endpoint']
            usr = self.__config['config']['bomgar']['usr']
            pwd = self.__config['config']['bomgar']['pwd']

            key = 'config.enable_sap_sample_serial_hack'
            if NestedConfig.get(self.__config,
                                key,
                                required=False,
                                default=False,
                                check=bool):
                if asset_id == '1000021' or asset_id == '1000015':
                    asset_id = '526104875'

            endpoint = endpoint.replace('XXX_ASSET_ID_XXX', asset_id)
            timeout = int(self.__config['config']['bomgar']['timeout'])

            log.debug("Calling: %s", endpoint)

            try:
                resp = requests.get(endpoint,
                                    auth=(usr, pwd),
                                    verify=False,
                                    timeout=timeout)
                log.debug("Response status: %s", resp.status_code)
                if resp.status_code == requests.codes['ok']:
                    data = resp.json()

            except requests.exceptions.RequestException as ex:
                log.error(ex)

        return data

    def _process_sap_data(self):
        log.debug("Processing Bom As Built")
        for asset_id in list(self.__assets):
            log.debug("Processing asset: %s", asset_id)
            data = self._get_data_for_asset(asset_id)
            if data is not None:
                if self._has_asset_data_changed_for(asset_id, data):
                    log.info("Publish event for %s", asset_id)

                    items = data['d']['results']
                    parents = [
                        i for i in items if i.get('ParRecno', None) == ''
                    ]
                    event_time = None
                    if parents:
                        event_time = parents[0].get('Valfr', None)
                    if event_time:
                        try:
                            event_time = datetime.datetime.strptime(
                                event_time, '%Y%m%d%H%M%S')
                        except Exception as ex:
                            log.error(
                                "Could not create a valid datetime from %s",
                                event_time)
                            log.error(ex)

                    event = SapBomAsBuiltSet(asset_id,
                                             data=data['d']['results'],
                                             time=event_time)
                    log.debug("Event: %s", event)
                    try:
                        self.__integrator.publish_event(event)
                        self._cache_asset_data_for(asset_id, data)

                    # These will all retry
                    except EventPublishFailure as ex:
                        log.error("Event Publish Failure: %s", ex)
                    except AssetUnknown as ex:
                        pass

    # Checks to see if the given data for the asset has changed
    # since it was last processed.

    def _has_asset_data_changed_for(self, asset_id, data):
        log.info("Checking asset cache for: %s", asset_id)
        if not os.path.exists(self.__data_cache):
            # No cache so this is new data
            return True

        filename = asset_id + '.json'
        file_path = os.path.join(self.__data_cache, filename)

        if not os.path.isfile(file_path):
            # No file exists so this is new data
            return True

        with open(file_path, mode="r", encoding="utf-8") as f:
            cached_data_hash = f.read()

        data_hash = self.__compute_data_hash(data)
        if cached_data_hash != data_hash:
            os.remove(file_path)
            # The data has changed so flush the cache
            return True

        # Nothing has changed for this data
        return False

    @classmethod
    def __compute_data_hash(cls, data):
        jdata = json.dumps(data, sort_keys=True, separators=(',', ':'))
        return hashlib_md5(jdata.encode('utf8')).hexdigest()

    def _cache_asset_data_for(self, asset_id, data):
        log.info("Cache asset for: %s", asset_id)
        if not os.path.exists(self.__data_cache):
            log.debug("Creating data cache")
            os.makedirs(self.__data_cache, exist_ok=True)

        filename = asset_id + '.json'
        file_path = os.path.join(self.__data_cache, filename)

        if not os.path.isfile(file_path):
            with open(file_path, mode="w", encoding="utf-8") as f:
                f.write(self.__compute_data_hash(data))
            log.debug("Caching data for asset %s", asset_id)

    MOCK_DATA_FILE = os.path.join('cfg', 'mock-ibase-data.json')