def analysis_result(self):
        """
        This method is used to serve analysis result request partner client http service.
        @param self the PartnerHTTPServices itself, it should be PartnerHTTPServices
        @throws PartnerClientError throws if request body is empty
        @throws Exception any error should be raised to caller.
        CherryPy will handle the error and translate to HTTP code 500 (refer to partnercli#handle_error)
        """
        signature='hfppnetwork.partner.httpservices.PartnerHTTPServices.analysis_result'
        method_enter(signature,{"self":self})
        # Read the data request XML
        request_body = cherrypy.request.body.read().decode("utf-8")
        logging.debug('%s:%s', 'request_body', request_body)
        if len(request_body)==0:
            raise PartnerClientError("request body can not be empty")
        # Parse data request XML
        root = ET.fromstring(request_body)
        request_id = root.findtext('./RequestID')
        study_id = root.findtext('./StudyID')
        result = root.findtext('./Result')

        # Kick off a new thread to handle the request
        handler = AnalysisResultHandler()
        t = Thread(target=handler.handle_analysis_result, args=(request_id, study_id, result))
        t.daemon = False
        t.start()
        method_exit(signature)
    def handle_analysis_result(self, request_id, study_id, result):
        """
        This method is used to handle analysis result.
        This method will not throw exceptions. Any error should be caught and logged.
        @param self the AnalysisResultHandler itself, it should be AnalysisResultHandler
        @param request_id the request ID,it is supposed to be a non-None/empty str. Required.
        @param study_id the study ID,it is supposed to be a non-None/empty str. Required.
        @param result the analysis result. Required
        """
        signature='hfppnetwork.partner.httpservices.AnalysisResultHandler.handle_analysis_result'
        method_enter(signature,{
            "self":self,
            "request_id":request_id,
            "result":result
        })
        uncompressed_file = tempfile.NamedTemporaryFile(delete=False).name
        try:
            #check input arguments
            check_string("request_id",request_id)
            check_string("study_id", study_id)
            check_string("result",result)

            decoded_data = zlib.decompress(base64.b64decode(result))
            file_name = STUDY_REPORT_DIRECTORY + "/" + study_id + ".xlsx"
            with open(file_name, "wb") as out_file:
                out_file.write(decoded_data)

            method_exit(signature)
        except Exception as e:
            # log error
            method_error(signature, e)
        finally:
            os.remove(uncompressed_file)
 def data_request(self):
   """
    This method is used to serve data request partner client http service.
    @param self the PartnerHTTPServices itself, it should be PartnerHTTPServices
    @throws PartnerClientError throws if request body is empty
    @throws Exception any error should be raised to caller.
    CherryPy will handle the error and translate to HTTP code 500 (refer to partnercli#handle_error)
   """
   signature='hfppnetwork.partner.httpservices.PartnerHTTPServices.data_request'
   method_enter(signature,{"self":self})
   # Read the data request XML
   request_body = cherrypy.request.body.read().decode("utf-8")
   logging.debug('%s:%s', 'request_body', request_body)
   if len(request_body)==0:
     raise PartnerClientError("request body can not be empty")
   # Parse data request XML
   root = ET.fromstring(request_body)
   request_id = root.findtext('./RequestID')
   study_id = root.findtext('./StudyID')
   query = root.findtext('./Query')
   expiration_time = isodate.parse_datetime(root.findtext('./ExpirationTime'))
   #CacheAvailable and CacheTimestamp could not exist
   cache_available = 'true' == root.findtext('./CacheAvailable')
   cache_timestamp = None
   if root.findtext('./CacheTimestamp'):
       cache_timestamp =isodate.parse_datetime(root.findtext('./CacheTimestamp'))
   # Kick off a new thread to handle the request
   handler = DataRequestHandler()
   t = Thread(target=handler.handle_data_request, args=(request_id, study_id, query,
                                                        expiration_time, cache_available, cache_timestamp,))
   t.daemon = False
   t.start()
   method_exit(signature)
Beispiel #4
0
def handle_error():
    """
      This function is used to handle errors in CherryPy handlers.
      This function sends 500 response.
    """
    signature = 'hfppnetwork.partner.partnercli.handle_error'
    method_enter(signature)
    cherrypy.response.status = 500
    logging.exception('')
    method_exit(signature)
def handle_error():
    """
      This function is used to handle errors in CherryPy handlers.
      This function sends 500 response.
    """
    signature='hfppnetwork.partner.partnercli.handle_error'
    method_enter(signature)
    cherrypy.response.status = 500
    logging.exception('')
    method_exit(signature)
def can_fulfill_data_request(request_id,
                             study_id,
                             query,
                             expiration_time,
                             cache_available=False,
                             cache_timestamp=None,
                             force_fullfil=False):
    """
      This function is used to decide if the partner should fulfill the data request.
      @param request_id the request ID - it is supposed to be a non-None/empty str. Required.
      @param study_id the study ID - it is supposed to be a non-None/empty str. Required.
      @param query  the query string - it is supposed to be a non-None/empty str. Required.
      @param expiration_time the request expiration time - it is supposed to be a non-None datetime. Required.
      @param cache_available whether cache is available - it is supposed to be a bool. Optional, default to False.
      @param cache_timestamp  the cache timestamp - it is supposed to be a datetime. Optional, default to None.
      @param force_fullfil this parameter is set to True when this method is called by decision module.
      @return True if the partner client can fulfill the data request, False otherwise.
      @throws TypeError throws if any argument isn't of right type
      @throws ValueError throws if any argument isn't valid (refer to the argument documentation)
      @throws PartnerClientError throws if any other error occurred during the operation
    """
    signature = 'hfppnetwork.partner.httpservices.datafulfillment.can_fulfill_data_request'
    method_enter(
        signature, {
            "request_id": request_id,
            "study_id": study_id,
            "query": query,
            "expiration_time": expiration_time,
            "cache_available": cache_available,
            "cache_timestamp": cache_timestamp
        })

    if not PARTNER_IMMEDIATE_FULLFIL and not force_fullfil:
        url = DECISION_MODULE_URL
        values = {
            'request_id':
            request_id,
            'study_id':
            study_id,
            'query':
            query,
            'expiration_time':
            isodate.datetime_isoformat(expiration_time),
            'cache_available': ('true' if cache_available else 'false'),
            'cache_timestamp': ('' if cache_timestamp is None else
                                isodate.datetime_isoformat(cache_timestamp)),
            'status':
            'pending'
        }
        data = urllib.parse.urlencode(values).encode('utf-8')
        urllib.request.urlopen(url, data)

    ret = PARTNER_IMMEDIATE_FULLFIL or force_fullfil
    method_exit(signature, ret)
    return ret
def can_fulfill_data_request(request_id, study_id, query, expiration_time,
                             cache_available=False, cache_timestamp=None,
                             force_fullfil=False):
    """
      This function is used to decide if the partner should fulfill the data request.
      @param request_id the request ID - it is supposed to be a non-None/empty str. Required.
      @param study_id the study ID - it is supposed to be a non-None/empty str. Required.
      @param query  the query string - it is supposed to be a non-None/empty str. Required.
      @param expiration_time the request expiration time - it is supposed to be a non-None datetime. Required.
      @param cache_available whether cache is available - it is supposed to be a bool. Optional, default to False.
      @param cache_timestamp  the cache timestamp - it is supposed to be a datetime. Optional, default to None.
      @param force_fullfil this parameter is set to True when this method is called by decision module.
      @return True if the partner client can fulfill the data request, False otherwise.
      @throws TypeError throws if any argument isn't of right type
      @throws ValueError throws if any argument isn't valid (refer to the argument documentation)
      @throws PartnerClientError throws if any other error occurred during the operation
    """
    signature = 'hfppnetwork.partner.httpservices.datafulfillment.can_fulfill_data_request'
    method_enter(signature,{
        "request_id":request_id,
        "study_id":study_id,
        "query":query,
        "expiration_time":expiration_time,
        "cache_available":cache_available,
        "cache_timestamp":cache_timestamp
    })
    
    if not PARTNER_IMMEDIATE_FULLFIL and not force_fullfil:
        url = DECISION_MODULE_URL
        values = {'request_id':request_id,
                'study_id':study_id,
        	    'query':query,
              'expiration_time':isodate.datetime_isoformat(expiration_time),
              'cache_available':('true' if cache_available else 'false'),
              'cache_timestamp':('' if cache_timestamp is None else isodate.datetime_isoformat(cache_timestamp)),
              'status':'pending'}
        data = urllib.parse.urlencode(values).encode('utf-8')
        urllib.request.urlopen(url, data)
    
    ret = PARTNER_IMMEDIATE_FULLFIL or force_fullfil
    method_exit(signature,ret)
    return ret
 def _remove_file(self,file_name):
     """
        This method is used to remove file.
        @param self the DataRequestHandler itself, it should be DataRequestHandler
        @param file_name the file name,it is supposed to be a str, can be None/empty.
        @throw Exception Any error should be raised to caller.
     """
     signature='hfppnetwork.partner.httpservices.DataRequestHandler._remove_file'
     method_enter(signature,{
         "self":self,
         "file_name":file_name
     })
     try:
         if file_name is not None:
             check_string("file_name",file_name)
             if file_name and os.path.exists(file_name):
                 os.remove(file_name)
         method_exit(signature)
     except Exception as e:
         method_error(signature, e)
def convert_data(file_type, input_file_name, output_file_name):
    """
      This function is used to convert data file.
      @param file_type: the file type - it is supposed to be a str, not None/empty. Required.
      @param input_file_name: the input file name (including full path),
      this function will assume the file exists - it is supposed to be a str, not None/empty. Required.
      @param output_file_name: the output file name (including full path), this function will assume the file exists,
      hence it will not create the file - it is supposed to be a str, not None/empty. Required.
      @throws TypeError throws if any argument isn't of right type
      @throws ValueError throws if any argument isn't valid (refer to the argument documentation)
      @throws DataConversionError throws if any other error occurred during the operation
    """
    signature = 'hfppnetwork.partner.httpservices.dataconversion.convert_data'
    method_enter(signature,{
        'file_type':file_type,
        'input_file_name':input_file_name,
        'output_file_name':output_file_name
       })
    
    # Acceptable file types
    types_mapping = {
        'beneficiary': 'BeneficiarySummary',
        'carrier': 'CarrierClaim',
        'inpatient': 'InpatientClaim',
        'outpatient': 'OutpatientClaim',
        'prescription': 'PrescriptionEvent'}
    check_string('file_type', file_type)
    if not file_type in types_mapping:
    	raise ValueError('File type "' + file_type + '" is not acceptable. Use '
    										+ str(types_mapping))
    check_string('input_file_name', input_file_name)
    check_string('output_file_name', output_file_name)
    if os.path.exists(input_file_name) is False:
    	raise ValueError('input_file_name should be valid file path')
    if os.path.exists(output_file_name) is False:
    	raise ValueError('output_file_name should be valid file path')
    try:
    	csv2xml(input_file_name, output_file_name, types_mapping[file_type])
    except:
    	raise DataConversionError('Data conversion internal error.')
    method_exit(signature)
def convert_data(file_type, input_file_name, output_file_name):
    """
      This function is used to convert data file.
      @param file_type: the file type - it is supposed to be a str, not None/empty. Required.
      @param input_file_name: the input file name (including full path),
      this function will assume the file exists - it is supposed to be a str, not None/empty. Required.
      @param output_file_name: the output file name (including full path), this function will assume the file exists,
      hence it will not create the file - it is supposed to be a str, not None/empty. Required.
      @throws TypeError throws if any argument isn't of right type
      @throws ValueError throws if any argument isn't valid (refer to the argument documentation)
      @throws DataConversionError throws if any other error occurred during the operation
    """
    signature = "hfppnetwork.partner.httpservices.dataconversion.convert_data"
    method_enter(
        signature, {"file_type": file_type, "input_file_name": input_file_name, "output_file_name": output_file_name}
    )

    # Acceptable file types
    types_mapping = {
        "beneficiary": "BeneficiarySummary",
        "carrier": "CarrierClaim",
        "inpatient": "InpatientClaim",
        "outpatient": "OutpatientClaim",
        "prescription": "PrescriptionEvent",
    }
    check_string("file_type", file_type)
    if not file_type in types_mapping:
        raise ValueError('File type "' + file_type + '" is not acceptable. Use ' + str(types_mapping))
    check_string("input_file_name", input_file_name)
    check_string("output_file_name", output_file_name)
    if os.path.exists(input_file_name) is False:
        raise ValueError("input_file_name should be valid file path")
    if os.path.exists(output_file_name) is False:
        raise ValueError("output_file_name should be valid file path")
    try:
        csv2xml(input_file_name, output_file_name, types_mapping[file_type])
    except:
        raise DataConversionError("Data conversion internal error.")
    method_exit(signature)
 def handle_data_request(self,request_id, study_id, query,expiration_time,
                         cache_available=False,cache_timestamp=None,
                         force_fullfil=False):
     """
        This method is used to handle data request.
        This method will not throw exceptions. Any error should be caught and logged.
        @param self the DataRequestHandler itself, it should be DataRequestHandler
        @param request_id the request ID,it is supposed to be a non-None/empty str. Required.
        @param study_id the study ID,it is supposed to be a non-None/empty str. Required. 
        @param query the query string,it is supposed to be a non-None/empty str. Required.
        @param expiration_time the request expiration time,it is supposed to be a non-None datetime. Required.
        @param cache_available whether cache is available,it is supposed to be a bool. Optional, default to False.
        @param cache_timestamp the cache timestamp,it is supposed to be a datetime. Optional, default to None.
        @param force_fullfil this parameter is set to True when this method is called by decision module.
     """
     signature='hfppnetwork.partner.httpservices.DataRequestHandler.handle_data_request'
     method_enter(signature,{
         "self":self,
         "request_id":request_id,
         "study_id":study_id,
         "query":query,
         "expiration_time":expiration_time,
         "cache_available":cache_available,
         "cache_timestamp":cache_timestamp
     })
     # Dictionary to hold data query result file names
     query_result_file_names = {}
     try:
         #check input arguments
         check_string("request_id",request_id)
         check_string("study_id",study_id)
         check_string("query",query)
         check_datetime("expiration_time",expiration_time)
         check_bool("cache_available",cache_available)
         if cache_timestamp is not None:
            check_datetime("cache_timestamp",cache_timestamp)
          # Parse the query string
         try:
             query_dict = json.loads(query)
         except ValueError as e:
              query_dict = None
              method_error(signature, e)
         # Check if we can fulfill the data request
         can_fulfill_request = can_fulfill_data_request(request_id, study_id, query,
                                                        expiration_time, cache_available,
                                                        cache_timestamp, force_fullfil)
         # Dictionary to hold data conversion result file names
         conversion_result_file_names = {}
         # Data Response XML file name
         response_xml_file_name = None
         # Compressed file name
         compressed_file_name = None
         logging.debug('%s:%s', 'can_fulfill_request', can_fulfill_request)
         #can_fulfill_request
         if query_dict is not None and 'file_types' in query_dict \
             and 'logical_expressions' in query_dict and can_fulfill_request:
             # Can fulfill the request, create temporary files
             for file_type in query_dict['file_types']:
                 query_result_file_names[file_type] = tempfile.NamedTemporaryFile(delete=False).name
                 conversion_result_file_names[file_type] = tempfile.NamedTemporaryFile(delete=False).name
             response_xml_file_name = tempfile.NamedTemporaryFile(delete=False).name
             compressed_file_name = tempfile.NamedTemporaryFile(delete=False).name
             # Query data
             use_cache = query_data(query_dict['file_types'], query_dict['logical_expressions'],
                                    query_result_file_names,
                                    cache_timestamp if cache_available else None)
             with open(response_xml_file_name, 'ab') as response_xml_file:
                 # Write XML
                 xml = '<?xml version="1.0" encoding="utf-8"?>' \
                     '<DataResponse>' \
                     '<RequestID>{request_id}</RequestID>' \
                     '<RequestDenied>false</RequestDenied>' \
                     '<ErrorMessage></ErrorMessage>' \
                     '<Data useCache="{use_cache}"><![CDATA['.\
                     format(request_id=request_id, use_cache='true' if use_cache else 'false')
                 response_xml_file.write(xml.encode('utf-8'))
                 if not use_cache:
                     logging.debug('not use cache will use result from converted data')
                     # Convert data
                     for file_type in query_dict['file_types']:
                         convert_data(file_type, query_result_file_names[file_type],
                                      conversion_result_file_names[file_type])
                     # Aggregate and compress data
                     compressor = zlib.compressobj(level=9)
                     with open(compressed_file_name, 'wb') as out_file:
                         for file_type in query_dict['file_types']:
                             with open(conversion_result_file_names[file_type], 'rb') as in_file:
                                 out_file.write(compressor.compress(in_file.read()))
                         out_file.write(compressor.flush())
                     # Encode in Base64
                     with open(compressed_file_name, 'rb') as in_file:
                         base64.encode(in_file, response_xml_file)
                 # Write XML
                 response_xml_file.write(']]></Data></DataResponse>'.encode('utf-8'))
         # POST XML to Network Node /data_response service
         if datetime.now(timezone.utc) < expiration_time:
             logging.debug('post to data response url %s%s',
                           HFPP_NODE_HTTP_SERVICE_BASE_URL ,'/data_response')
             # Only POST the XML if the request has not been expired
             request = urllib.request.Request(HFPP_NODE_HTTP_SERVICE_BASE_URL + '/data_response')
             request.add_header('Content-Type','application/xml;charset=utf-8')
             request.add_header('x-hfpp-username', HFPP_PARTNER_USERNAME)
             request.add_header('x-hfpp-password', HFPP_PARTNER_PASSWORD)
             if response_xml_file_name is not None and can_fulfill_request:
                 with open(response_xml_file_name, 'rb') as in_file,\
                     mmap.mmap(in_file.fileno(), 0, access=mmap.ACCESS_READ) as data_response_xml:
                     try:
                         resp = urllib.request.urlopen(request, data_response_xml,
                                                       cafile=CA_CERTIFICATE_FILE, cadefault=True)
                           # Parse response XML
                         resp_content = resp.read().decode('utf-8')
                         logging.debug('response code:%s',resp.getcode())
                         logging.debug('response:%s',resp_content)
                     except urllib.error.HTTPError as e:
                             method_error(signature, e)
                             self._handle_error_response(e)
             else:
                 data_response_xml = '<?xml version="1.0" encoding="utf-8"?>' \
                     '<DataResponse>' \
                     '<RequestID>{request_id}</RequestID>' \
                     '<RequestDenied>true</RequestDenied>' \
                     '<ErrorMessage>{waitApproval}</ErrorMessage>' \
                     '<Data></Data>' \
                     '</DataResponse>'.format(request_id=request_id,
                     waitApproval=('' if PARTNER_IMMEDIATE_FULLFIL else 'Waiting Approval'))
                 logging.debug('post data response xml %s', data_response_xml)
                 try:
                      resp = urllib.request.urlopen(request, data_response_xml.encode('utf-8'),
                                                    cafile=CA_CERTIFICATE_FILE, cadefault=True)
                      # Parse response XML
                      resp_content = resp.read().decode('utf-8')
                      logging.debug('response code:%s',resp.getcode())
                      logging.debug('response:%s',resp_content)
                 except urllib.error.HTTPError as e:
                      method_error(signature, e)
                      self._handle_error_response(e)
         else:
             # Request expired, log error
             logging.error('Request expired')
         method_exit(signature)
     except Exception as e:
             # log error
             method_error(signature, e)
     finally:
         if query_dict is not None and 'file_types' in  query_dict:
             # Remove temporary files
             for file_type in query_dict['file_types']:
                 if file_type in query_result_file_names:
                     self._remove_file(query_result_file_names[file_type])
                 if file_type in conversion_result_file_names:
                     self._remove_file(conversion_result_file_names[file_type])
         self._remove_file(compressed_file_name)
         self._remove_file(response_xml_file_name)
Beispiel #12
0
def query_data(file_types,
               query_logical_expressions,
               output_file_names,
               cache_timestamp=None):
    """
          This function is used to query data.
          @param file_types the file types,it is supposed to be a str array,
          each item in the array should be a non-None/empty str. Required.
          @param query_logical_expressions the query logical expressions,
          it is supposed to be a str array,each item in the array should be
          a non-None/empty str. Required.
          @param output_file_names the output file names (including full paths),
          this function will assume the files exist,
          it is supposed to be a non-None/empty dictionary,
          key is file type(non-None/empty str,must be one of
          the strings in file_types),value is the output file
          names (non-None/empty str, including full path). Required.
          @param cache_timestamp the cache timestamp,
          it is supposed to be None or a non-None datetime. Optional, default to None.
          @return True if use cache, False otherwise
          @throws TypeError throws if any argument isn't of right type
          @throws ValueError throws if any argument isn't valid (refer to the argument documentation)
          @throws DataQueryError: if any other error occurred during the operation
       """
    signature = 'hfppnetwork.partner.httpservices.dataappliance.query_data'
    method_enter(
        signature, {
            "file_types": file_types,
            "query_logical_expressions": query_logical_expressions,
            "output_file_names": output_file_names,
            "cache_timestamp": cache_timestamp
        })

    # Parameters checking
    acceptableTypes = [
        'beneficiary', 'carrier', 'inpatient', 'outpatient', 'prescription'
    ]
    check_str_list('file_types', file_types)
    for one_type in file_types:
        if not one_type in acceptableTypes:
            raise ValueError('File type ' + one_type + ' not acceptable.')
    check_str_list('query_logical_expressions', query_logical_expressions)
    if not len(query_logical_expressions) == len(file_types):
        raise ValueError(
            'query_logical_expressions and file_types length not match.')
    check_dict('output_file_names', output_file_names)
    if not len(output_file_names) == len(file_types):
        raise ValueError('output_file_names and file_types length not match.')
    if cache_timestamp is not None:
        check_datetime('cache_timestamp', cache_timestamp)

    # Check if cache needs to be updated
    try:
        use_cache = check_use_cache(file_types, cache_timestamp)
    except:
        raise DataQueryError('Error occurs during checking cache data.')
    # Loading data from database if not use cache
    if not use_cache:
        try:
            type_index = 0
            for one_type in file_types:
                # Parse and generate query string
                parser = Parser()
                generator = SQLGenerator()
                parse_tree = parser.parse(
                    query_logical_expressions[type_index].strip())
                sql = generator.visit(parse_tree)
                logging.debug("sql %s ", sql)
                # Query data
                # Instantiate relevant classes
                if config.dbconfig["type"] == 'redis':
                    claimPersistence = RedisClaimPersistence()
                elif config.dbconfig["type"] == "mysql":
                    claimPersistence = MySQLClaimPersistence()
                else:
                    raise ValueError("Invalid db type: " +
                                     config.dbconfig["type"])
                claimPersistence.connectionConfig = config.dbconfig
                claimFile = CSVClaimFile()
                processor = ClaimFileProcessor()
                processor.claimPersistence = claimPersistence
                processor.claimFile = claimFile
                processor.queryData(one_type, sql if
                                    (sql and len(sql) > 0) else '1=1', 0, 0,
                                    output_file_names[one_type])
                # Update loop index
                type_index = type_index + 1
        except ParserError:
            raise DataQueryError('Error occurs during parsing query string.')
        except:
            raise DataQueryError('Error occurs during querying data.')

    method_exit(signature, use_cache)
    return use_cache
def query_data(file_types,query_logical_expressions,
                                output_file_names, cache_timestamp=None):
       """
          This function is used to query data.
          @param file_types the file types,it is supposed to be a str array,
          each item in the array should be a non-None/empty str. Required.
          @param query_logical_expressions the query logical expressions,
          it is supposed to be a str array,each item in the array should be
          a non-None/empty str. Required.
          @param output_file_names the output file names (including full paths),
          this function will assume the files exist,
          it is supposed to be a non-None/empty dictionary,
          key is file type(non-None/empty str,must be one of
          the strings in file_types),value is the output file
          names (non-None/empty str, including full path). Required.
          @param cache_timestamp the cache timestamp,
          it is supposed to be None or a non-None datetime. Optional, default to None.
          @return True if use cache, False otherwise
          @throws TypeError throws if any argument isn't of right type
          @throws ValueError throws if any argument isn't valid (refer to the argument documentation)
          @throws DataQueryError: if any other error occurred during the operation
       """
       signature = 'hfppnetwork.partner.httpservices.dataappliance.query_data'
       method_enter(signature,{
        "file_types":file_types,
        "query_logical_expressions":query_logical_expressions,
        "output_file_names":output_file_names,
        "cache_timestamp":cache_timestamp
       })

       # Parameters checking
       acceptableTypes = ['beneficiary', 'carrier', 'inpatient', 'outpatient', 'prescription']
       check_str_list('file_types', file_types)
       for one_type in file_types:
        if not one_type in acceptableTypes:
            raise ValueError('File type ' + one_type + ' not acceptable.')
       check_str_list('query_logical_expressions', query_logical_expressions)
       if not len(query_logical_expressions) == len(file_types):
        raise ValueError('query_logical_expressions and file_types length not match.')
       check_dict('output_file_names', output_file_names)
       if not len(output_file_names) == len(file_types):
        raise ValueError('output_file_names and file_types length not match.')
       if cache_timestamp is not None:
        check_datetime('cache_timestamp', cache_timestamp)

       # Check if cache needs to be updated
       try:
        use_cache = check_use_cache(file_types, cache_timestamp)
       except:
        raise DataQueryError('Error occurs during checking cache data.')
       # Loading data from database if not use cache
       if not use_cache:
        try:
            type_index = 0
            for one_type in file_types:
                # Parse and generate query string
                parser = Parser()
                generator = SQLGenerator()
                parse_tree = parser.parse(query_logical_expressions[type_index].strip())
                sql = generator.visit(parse_tree)
                logging.debug("sql %s ",sql)
                # Query data
                 # Instantiate relevant classes
                if config.dbconfig["type"]=='redis':
                    claimPersistence = RedisClaimPersistence()
                elif config.dbconfig["type"] == "mysql":
                    claimPersistence = MySQLClaimPersistence()
                else:
                    raise ValueError("Invalid db type: " + config.dbconfig["type"])
                claimPersistence.connectionConfig = config.dbconfig
                claimFile = CSVClaimFile()
                processor = ClaimFileProcessor()
                processor.claimPersistence = claimPersistence
                processor.claimFile = claimFile
                processor.queryData(one_type,sql if (sql and len(sql) > 0) else '1=1',
                                                        0, 0,
                                                        output_file_names[one_type])
                # Update loop index
                type_index = type_index + 1
        except ParserError:
            raise DataQueryError('Error occurs during parsing query string.')
        except:
            raise DataQueryError('Error occurs during querying data.')

       method_exit(signature,use_cache)
       return use_cache
Beispiel #14
0
        check_file('CA_CERTIFICATE_FILE', CA_CERTIFICATE_FILE)
        check_file('PARTNER_CERTIFICATE', PARTNER_CERTIFICATE)
        check_file('PARTNER_PRIVATE_KEY', PARTNER_PRIVATE_KEY)
        check_string('HFPP_PARTNER_ID', HFPP_PARTNER_ID)
        check_port('PARTNER_CLIENT_HTTP_SERVICE_PORT',
                   PARTNER_CLIENT_HTTP_SERVICE_PORT)
        check_bool('PARTNER_IMMEDIATE_FULLFIL', PARTNER_IMMEDIATE_FULLFIL)
        check_string('DECISION_MODULE_URL', DECISION_MODULE_URL)
        check_file('STUDY_REPORT_DIRECTORY', STUDY_REPORT_DIRECTORY)
        #badly formed hexadecimal UUID string will throw if not uuid string
        partner_id = uuid.UUID(HFPP_PARTNER_ID)
        logging.debug('hfpp partner id:%s', partner_id)
    except (TypeError, ValueError) as e:
        method_error(signature, e)
        sys.exit(-1)
    conf = {'global': {'request.error_response': handle_error}}
    #configure cherrypy
    cherrypy.config.update(conf)

    wsgi_app = cherrypy.Application(PartnerHTTPServices(), '/callbacks')
    dispatcher = WSGIPathInfoDispatcher({'/': wsgi_app})
    server = CherryPyWSGIServer(('0.0.0.0', PARTNER_CLIENT_HTTP_SERVICE_PORT),
                                dispatcher)
    sslAdapter = BuiltinSSLAdapter(PARTNER_CERTIFICATE, PARTNER_PRIVATE_KEY)
    server.ssl_adapter = sslAdapter
    try:
        server.start()
    except KeyboardInterrupt:
        server.stop()
    method_exit(signature)
        check_file('PARTNER_PRIVATE_KEY', PARTNER_PRIVATE_KEY)
        check_string('HFPP_PARTNER_ID',HFPP_PARTNER_ID)
        check_port('PARTNER_CLIENT_HTTP_SERVICE_PORT',PARTNER_CLIENT_HTTP_SERVICE_PORT)
        check_bool('PARTNER_IMMEDIATE_FULLFIL', PARTNER_IMMEDIATE_FULLFIL)
        check_string('DECISION_MODULE_URL', DECISION_MODULE_URL)
        check_file('STUDY_REPORT_DIRECTORY', STUDY_REPORT_DIRECTORY)
        #badly formed hexadecimal UUID string will throw if not uuid string
        partner_id = uuid.UUID(HFPP_PARTNER_ID)
        logging.debug('hfpp partner id:%s',partner_id)
    except (TypeError,ValueError) as e:
        method_error(signature, e)
        sys.exit(-1)
    conf = {'global': {
            'request.error_response' : handle_error
        }
    }
    #configure cherrypy
    cherrypy.config.update(conf)
    
    wsgi_app = cherrypy.Application(PartnerHTTPServices(), '/callbacks')
    dispatcher = WSGIPathInfoDispatcher({'/': wsgi_app})
    server = CherryPyWSGIServer(('0.0.0.0', PARTNER_CLIENT_HTTP_SERVICE_PORT), dispatcher)
    sslAdapter = BuiltinSSLAdapter(PARTNER_CERTIFICATE, PARTNER_PRIVATE_KEY)
    server.ssl_adapter = sslAdapter
    try:
        server.start()
    except KeyboardInterrupt:
        server.stop()
    method_exit(signature)