def query(self): """Query all remote targets for data. Args: None Returns: None """ # Initialize key variables use_script = False _running = False config = self.config interval = config.ingester_interval() script = '{}{}{}'.format(_BIN_DIRECTORY, os.sep, PATTOO_INGESTER_SCRIPT) # Post data to the remote server while True: # Get start time ts_start = time() # Check lockfile status if use_script is True: _running = check_lockfile() # Process if _running is False: if bool(use_script) is True: # Process cache with script _result = shared_files.execute(script, die=False) success = not bool(_result) else: # Process cache with function success = files.process_cache() if bool(success) is False: log_message = ('''\ Ingester failed to run. Please check log files for possible causes.''') log.log2warning(20029, log_message) else: log_message = ('''\ Ingester is unexpectedly still running. Check your parameters of error logs \ for possible causes.''') log.log2warning(20129, log_message) # Sleep. The duration could exceed the polling interval. Set sleep # time to the polling interval when this occurs. duration = time() - ts_start if duration >= interval: log_message = ('''Ingestion exceeded configured \ "ingester_interval" parameter by {:6.2f}s.'''.format(duration - interval)) sleep_time = 0 else: sleep_time = abs(interval - duration) log_message = ( 'Ingester sleeping for {:6.2f}s.'.format(sleep_time)) log.log2info(20100, log_message) sleep(sleep_time)
def post(self): """Send encrypted data to the API server. Args: gpg (obj): Pgpier object to facilitate encryption Returns (bool): True if data was posted successfully False if data failed to post """ # Predefine variables result = False # Check if key was exchanged exchanged = self.set_encryption() # If the key exchanged failed, return result if exchanged is False: return result # Post data if bool(self._data) is True: result = encrypted_post(self._gpg, self._symmetric_key, self._session, self._encryption, self._data, self._identifier) else: log_message = ('Blank data. No data to post from ' 'identifier {}.'.format(self._identifier)) log.log2warning(1056, log_message) return result
def check_lockfile(): """Delete lockfile if found and ingester is not running. Args: None Returns: running: True if ingester script is running """ # Initialize key variables agent_name = 'pattoo_ingester' config = Config() lockfile = shared_files.lock_file(agent_name, config) # Script running running = sysinfo.process_running(PATTOO_INGESTER_SCRIPT) # Delete lockfile if found and ingester is not running. # Caused by possible crash. if os.path.exists(lockfile) is True and running is False: os.remove(lockfile) log_message = ('''\ Lock file {} found, but the {} script is not running\ '''.format(lockfile, PATTOO_INGESTER_SCRIPT)) log.log2warning(20030, log_message) return running
def __init__(self, data): """Initialize the class. Args: data: Dict of results from the GraphQL query Returns: None """ # Initialize the class self._nodes = [] self._page_info = {} # Check for validity if bool(data) is True: try: self._nodes = data['data']['allDatapoints'].get('edges') except: log_message = ('Invalid datapoint data to process.') log.log2warning(80012, log_message) try: self._page_info = data['data']['allDatapoints'].get('pageInfo') except: log_message = ('Invalid pageInfo data to process.') log.log2warning(80013, log_message) self.valid = bool(self._nodes)
def execute(command, die=True): """Run the command UNIX CLI command and record output. Args: command: CLI command to execute die: Die if errors found Returns: returncode: Return code of command execution """ # Initialize key variables messages = [] stdoutdata = ''.encode() stderrdata = ''.encode() returncode = 1 # Run update_targets script do_command_list = list(command.split(' ')) # Create the subprocess object try: process = subprocess.Popen(do_command_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdoutdata, stderrdata = process.communicate() returncode = process.returncode except: (etype, evalue, etraceback) = sys.exc_info() log_message = ('''\ Command failure: [Exception:{}, Exception Instance: {}, Stack Trace: {}]\ '''.format(etype, evalue, etraceback)) log.log2warning(1052, log_message) # Crash if the return code is not 0 if returncode != 0: # Print the Return Code header messages.append('Return code:{}'.format(returncode)) # Print the STDOUT for line in stdoutdata.decode().split('\n'): messages.append('STDOUT: {}'.format(line)) # Print the STDERR for line in stderrdata.decode().split('\n'): messages.append('STDERR: {}'.format(line)) # Log message for log_message in messages: log.log2warning(1042, log_message) # Die if required after error found if bool(die) is True: log.log2die(1044, 'Command Failed: {}'.format(command)) # Return return returncode
def stop(self): """Stop the daemon. Args: None Returns: None """ # Get the pid from the pidfile try: with open(self.pidfile, 'r') as pf_handle: pid = int(pf_handle.read().strip()) except IOError: pid = None if not pid: log_message = ('PID file: {} does not exist. Daemon not running?' ''.format(self.pidfile)) log.log2warning(1063, log_message) # Not an error in a restart return # Try killing the daemon process try: while 1: if self.lockfile is None: os.kill(pid, signal.SIGTERM) else: time.sleep(0.3) if os.path.exists(self.lockfile) is True: continue else: os.kill(pid, signal.SIGTERM) time.sleep(0.3) except OSError as err: error = str(err.args) if error.find("No such process") > 0: self.delpid() self.dellock() else: log_message = (str(err.args)) log_message = ('{} - PID file: {}'.format( log_message, self.pidfile)) log.log2die(1068, log_message) except: log_message = ('Unknown daemon "stop" error for PID file: {}' ''.format(self.pidfile)) log.log2die(1066, log_message) # Log success self.delpid() self.dellock() log_message = ('Daemon {} stopped - PID file: {}' ''.format(self.name, self.pidfile)) log.log2info(1071, log_message)
def get(query): """Get pattoo API server GraphQL query results. Args: query: GraphQL query string Returns: result: Dict of JSON response """ # Initialize key variables success = False config = Config() result = None # Get the data from the GraphQL API url = config.web_api_server_url() try: response = requests.get(url, params={'query': query}) # Trigger HTTP errors if present response.raise_for_status() success = True except requests.exceptions.Timeout as err: # Maybe set up for a retry, or continue in a retry loop log_message = ('''\ Timeout when attempting to access {}. Message: {}\ '''.format(url, err)) log.log2warning(80000, log_message) except requests.exceptions.TooManyRedirects as err: # Tell the user their URL was bad and try a different one log_message = ('''\ Too many redirects when attempting to access {}. Message: {}\ '''.format(url, err)) log.log2warning(80001, log_message) except requests.exceptions.HTTPError as err: log_message = ('''\ HTTP error when attempting to access {}. Message: {}\ '''.format(url, err)) log.log2warning(80002, log_message) except requests.exceptions.RequestException as err: # catastrophic error. bail. log_message = ('''\ Exception when attempting to access {}. Message: {}\ '''.format(url, err)) log.log2warning(80003, log_message) except: log_message = ('''API Failure: [{}, {}, {}]\ '''.format(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])) log.log2warning(80004, log_message) # Process the data if bool(success) is True: result = response.json() return result
def route_chart_data(idx_datapoint): """Get API data from remote host. Args: idx_datapoint: Datapoint index value to chart Returns: None """ # Initialize key variables success = False response = False data = [] config = Config() # Get URL parameters secondsago = uri.integerize_arg(request.args.get('secondsago')) if bool(secondsago) is False: secondsago = SECONDS_IN_DAY # Create URL for DataPoint data url = ('{}/{}?secondsago={}'.format( config.web_api_server_url(graphql=False), idx_datapoint, secondsago)) # Get data try: result = requests.get(url) response = True except: # Most likely no connectivity or the TCP port is unavailable error = sys.exc_info()[:2] log_message = ('Error contacting URL {}: ({} {})' ''.format(url, error[0], error[1])) log.log2info(80010, log_message) # Define success if response is True: if result.status_code == 200: success = True else: log_message = ('''\ HTTP {} error for receiving data from server {}\ '''.format(result.status_code, url)) log.log2warning(80011, log_message) # Present the data if success is True: data = result.json() return jsonify(data)
def post(self): """Send encrypted data to the API server. Args: None Returns: success: True if data was posted successfully """ # Initialize key variables success = False key = encrypt.generate_key(20) # Create a session and post data with requests.Session() as session: # Turn off HTTP Persistent connection session.keep_alive = False # Exchange keys result = key_exchange( _KeyExchange( encryption=self._encryption, session=session, key_exchange_url=self.config.agent_api_key_url(), symmetric_key_url=self.config.agent_api_validation_url(), symmetric_key=key)) # Return if exchange failed if result is False: return success # Post data if bool(self._data) is True: success = encrypted_post( _EncrypedPost( encryption=self._encryption, session=session, symmetric_key=key, encryption_url=self.config.agent_api_encrypted_url(), data=self._data, identifier=self._identifier)) else: log_message = '''\ Blank data. No data to post from identifier {}.'''.format(self._identifier) log.log2warning(1056, log_message) return success
def delpid(self): """Delete the PID file. Args: None Returns: None """ # Delete file if os.path.exists(self.pidfile) is True: try: os.remove(self.pidfile) except: log_message = ('PID file {} already deleted'.format( self.pidfile)) log.log2warning(1041, log_message)
def stop(self): """Stop the daemon. Args: None Returns: None """ # Check for a pidfile to see if the daemon already runs pid = _pid(self.pidfile) if bool(pid) is False: log_message = ('PID file: {} does not exist. Daemon not running?' ''.format(self.pidfile)) log.log2warning(1063, log_message) # Not an error in a restart return # Try killing the daemon process try: os.kill(pid, signal.SIGTERM) except OSError as err: error = str(err.args) if error.find('No such process') > 0: self.delpid() self.dellock() else: log_message = (str(err.args)) log_message = ('{} - PID file: {}'.format( log_message, self.pidfile)) log.log2die(1068, log_message) except: log_message = ('Unknown daemon "stopped" error for PID file: {}' ''.format(self.pidfile)) log.log2die(1066, log_message) # Log success self.delpid() self.dellock() log_message = ('Daemon {} stopped - PID file: {}' ''.format(self.name, self.pidfile)) log.log2info(1071, log_message)
def post(self): """Post data to central server. Args: None Returns: success: True: if successful """ # Initialize key variables success = False # Post data if bool(self._data) is True: success = post(self._url, self._data, self._identifier) else: log_message = ('''\ Blank data. No data to post from identifier {}.'''.format(self._identifier)) log.log2warning(1018, log_message) return success
def purge(self): """Purge cache files. Args: None Returns: None """ # Initialize key variables filepaths = [filepath for filepath, _ in self._data] # Delete cache files after processing for filepath in filepaths: if os.path.exists(filepath): try: os.remove(filepath) except: log_message = ('''\ Error deleting cache file {}.'''.format(filepath)) log.log2warning(20110, log_message)
def connectivity(die=True): """Check connectivity to the database. Args: die: Die if true Returns: valid: True if connectivity is OK """ # Initialize key variables valid = False # Do test try: with db_query(20008) as session: rows = session.query(DataPoint.idx_datapoint).filter( and_(DataPoint.idx_datapoint == 1, DataPoint.checksum == '-1'.encode())) for _ in rows: break valid = True except: _exception = sys.exc_info() log.log2exception_die(20115, _exception) # Log if valid is False: log_message = ('''\ No connectivity to database. Make sure the installation script has been run. \ Check log files and do appropriate troubleshooting.''') if bool(die) is False: log.log2warning(20083, log_message) else: log.log2die(20112, log_message) # Return return valid
def poll_target_address(ip_target, address, object2poll, bacnet): """Poll each spoke in parallel. Args: ip_target: Target to poll polltargets: List of PollingPoint objects to poll bacnet: BAC0 connect object Returns: result: Result of the poll """ # Intialize data gathering result = None poller_string = ('{} analogValue {} {}'.format(ip_target, address, object2poll)) try: result = bacnet.read(poller_string) except NoResponseFromController: log_message = ( 'No BACnet response from {}. Timeout.'.format(ip_target)) log.log2warning(60004, log_message) except UnknownObjectError: log_message = ('''\ Unknown BACnet object {} requested from target {} at address {}.\ '''.format(object2poll, ip_target, address)) log.log2warning(60005, log_message) except Exception as reason: log_message = ('BACnet error polling {}. Reason: {}'.format( ip_target, str(reason))) log.log2warning(60006, log_message) except: log_message = ('''Unknown BACnet error polling {}: [{}, {}, {}]\ '''.format(ip_target, sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])) log.log2warning(60007, log_message) # Return return result
def _save_data(data, identifier): """Save data to cache file. Args: data: Dict to save identifier: Unique identifier for the source of the data. (AgentID) Returns: success: True: if successful """ # Initialize key variables success = False config = Config() cache_dir = config.agent_cache_directory(identifier) timestamp = int(time() * 1000) # Create a unique very long filename to reduce risk of filename = ('''{}{}{}_{}.json\ '''.format(cache_dir, os.sep, timestamp, identifier)) # Save data try: with open(filename, 'w') as f_handle: json.dump(data, f_handle) success = True except Exception as err: log_message = '{}'.format(err) log.log2warning(1030, log_message) except: (etype, evalue, etraceback) = sys.exc_info() log_message = ('''\ Cache-file save error: [{}, {}, {}]'''.format(etype, evalue, etraceback)) log.log2warning(1031, log_message) # Delete file if there is a failure. # Helps to protect against full file systems. if os.path.isfile(filename) is True and success is False: os.remove(filename) log_message = ('''\ Deleting corrupted cache file {} for identifier {}.\ '''.format(filename, identifier)) log.log2warning(1037, log_message) # Return return success
def _lock(delete=False): """Create a lock file. Args: delete: Delete the file if true Returns: None """ # Initialize key variables config = Config() lockfile = files.lock_file(PATTOO_INGESTER_NAME, config) success = False # Lock if bool(delete) is False: if os.path.exists(lockfile) is True: log_message = ('''\ Lockfile {} exists. Will not start ingester script. Is another Ingester \ instance running? If not, delete the lockfile and rerun this script.\ '''.format(lockfile)) log.log2warning(20023, log_message) else: open(lockfile, 'a').close() success = True else: if os.path.exists(lockfile) is True: try: os.remove(lockfile) success = True except: log_message = ('Error deleting lockfile {}.'.format(lockfile)) log.log2warning(20107, log_message) else: log_message = ('Lockfile {} not found.'.format(lockfile)) log.log2warning(20108, log_message) return success
def cache_to_keypairs(_data): """Convert agent cache data to AgentPolledData object. Args: _data: Data read from JSON cache file Returns: result: Validated cache data. [] if invalid. """ # Initialize key variables result = [] _log_message = 'Invalid cache data.' # Basic validation if isinstance(_data, dict) is False: log.log2warning(1032, _log_message) return [] if len(_data) != len(CACHE_KEYS): log.log2warning(1033, _log_message) return [] for key in _data.keys(): if key not in CACHE_KEYS: log.log2warning(1034, _log_message) return [] #################################################################### # Verify pattoo_datapoints #################################################################### # Verify we are getting a dict of datapoints if isinstance(_data['pattoo_datapoints'], dict) is False: log.log2warning(1035, _log_message) return [] # Verify we are getting the correct key count if len(_data['pattoo_datapoints']) != 2: log.log2warning(1048, _log_message) return [] # Verify we are getting the correct keys for item in ['key_value_pairs', 'datapoint_pairs']: if item not in _data['pattoo_datapoints']: log.log2warning(1049, _log_message) return [] # Verify there are datapoint defining keys if isinstance( _data['pattoo_datapoints']['key_value_pairs'], dict) is False: log.log2warning(1050, _log_message) return [] if isinstance( _data['pattoo_datapoints']['datapoint_pairs'], list) is False: log.log2warning(1051, _log_message) return [] # Prepare for datapoint processing key_value_pairs = _data['pattoo_datapoints']['key_value_pairs'] datapoint_pairs = _data['pattoo_datapoints']['datapoint_pairs'] # Process each datapoint for pair_ids in datapoint_pairs: item = {} # Validate and assign key-values from datapoints for pair_id in pair_ids: # Lookup on a string of pair_id as the JSON in the cache file is # keyed by string integers _kv = key_value_pairs.get(str(pair_id)) if isinstance(_kv, list) is False: log.log2warning(1046, _log_message) return [] if len(_kv) != 2: log.log2warning(1045, _log_message) return [] (key, value) = _kv item[key] = value # Assign datapoint values to PattooDBrecord pattoo_db_variable = _make_pattoo_db_record(item) if bool(pattoo_db_variable) is True: result.append(pattoo_db_variable) # Return return result
def post(url, data, identifier, save=True): """Post data to central server. Args: url: URL to receive posted data identifier: Unique identifier for the source of the data. (AgentID) data: Data dict to post. If None, then uses self._post_data ( Used for testing and cache purging) save: When True, save data to cache directory if postinf fails Returns: success: True: if successful """ # Initialize key variables success = False response = False # Fail if nothing to post if isinstance(data, dict) is False or bool(data) is False: return success # Post data save to cache if this fails try: result = requests.post(url, json=data) response = True except: if save is True: # Save data to cache _save_data(data, identifier) else: # Proceed normally if there is a failure. # This will be logged later pass # Define success if response is True: if result.status_code == 200: success = True else: log_message = ('''\ HTTP {} error for identifier "{}" posted to server {}\ '''.format(result.status_code, identifier, url)) log.log2warning(1017, log_message) # Save data to cache, remote webserver isn't working properly _save_data(data, identifier) # Log message if success is True: log_message = ('''\ Data for identifier "{}" posted to server {}\ '''.format(identifier, url)) log.log2debug(1027, log_message) else: log_message = ('''\ Data for identifier "{}" failed to post to server {}\ '''.format(identifier, url)) log.log2warning(1028, log_message) # Return return success
async def _serial_poller_async(tpp): """Poll OPCUA agent data. Args: tpp: TargetDataPoints object Returns: target_datapoints: TargetDataPoints object """ # Initialize key variables connected = False # Test for validity if isinstance(tpp, TargetPollingPoints) is False: return None if isinstance(tpp.target, OPCUAauth) is False: return None if tpp.valid is False: return None # Create URL for polling ip_target = tpp.target.ip_target ip_port = tpp.target.ip_port username = tpp.target.username password = tpp.target.password url = 'opc.tcp://{}:{}'.format(ip_target, ip_port) # Intialize data gathering target_datapoints = TargetDataPoints(ip_target) # Create a client object to connect to OPCUA server client = Client(url=url) client.set_user(username) client.set_password(password) # Connect try: await client.connect() connected = True except: log_message = ( 'Authentication for polling target {} is incorrect'.format(url)) log.log2warning(51011, log_message) pass if connected is True: for point in tpp.data: # Make sure we have the right data type if isinstance(point, PollingPoint) is False: log_message = ('''\ Invalid polling point {} for OPC UA URL {}'''.format(point, url)) log.log2info(51012, log_message) continue # Get data address = point.address try: node = client.get_node(address) value = await node.read_value() except BadNodeIdUnknown: log_message = ('''\ OPC UA node {} not found on server {}'''.format(address, url)) log.log2warning(51015, log_message) continue except: _exception = sys.exc_info() log_message = ('OPC UA server communication error') log.log2exception(51014, _exception, message=log_message) log_message = ('''\ Cannot get value from polling point {} for OPC UA URL {}\ '''.format(address, url)) log.log2info(51013, log_message) continue # Create datapoint if bool(point.multiplier) is True: if is_numeric(value) is True and (is_numeric(point.multiplier) is True): value = value * point.multiplier else: value = 0 datapoint = DataPoint(address, value) datapoint.add(DataPointMetadata('OPCUA Server', ip_target)) target_datapoints.add(datapoint) # Disconnect client await client.disconnect() return target_datapoints
def receive(source): """Handle the agent posting route. Args: source: Unique Identifier of an pattoo agent Returns: Text response of Received """ # Initialize key variables prefix = 'Invalid posted data.' # Read configuration config = Config() cache_dir = config.agent_cache_directory(PATTOO_API_AGENT_NAME) # Get JSON from incoming agent POST try: posted_data = request.json except: # Don't crash if we cannot convert JSON abort(404) # Abort if posted_data isn't a list if isinstance(posted_data, dict) is False: log_message = '{} Not a dictionary'.format(prefix) log.log2warning(20024, log_message) abort(404) if len(posted_data) != len(CACHE_KEYS): log_message = ('''\ {} Incorrect length. Expected length of {}'''.format(prefix, len(CACHE_KEYS))) log.log2warning(20019, log_message) abort(404) for key in posted_data.keys(): if key not in CACHE_KEYS: log_message = '{} Invalid key'.format(prefix) log.log2warning(20018, log_message) abort(404) # Extract key values from posting try: timestamp = posted_data['pattoo_agent_timestamp'] except: _exception = sys.exc_info() log_message = ('API Failure') log.log2exception(20025, _exception, message=log_message) abort(404) # Create filename. Add a suffix in the event the source is posting # frequently. suffix = str(randrange(100000)).zfill(6) json_path = ('{}{}{}_{}_{}.json'.format(cache_dir, os.sep, timestamp, source, suffix)) # Create cache file try: with open(json_path, 'w+') as temp_file: json.dump(posted_data, temp_file) except Exception as err: log_message = '{}'.format(err) log.log2warning(20016, log_message) abort(404) except: _exception = sys.exc_info() log_message = ('API Failure') log.log2exception(20017, _exception, message=log_message) abort(404) # Return return 'OK'
def test_log2warning(self): """Testing function log2warning.""" # Test should not cause script to crash log.log2warning(self.code, self.message)
def _serial_poller(drv): """Poll each spoke in parallel. Args: drv: Target to poll input_registers: Input registers to poll holding_registers: Holding registers to poll Returns: ddv: TargetDataPoints for the ip_target """ # Intialize data gathering ip_target = drv.target ddv = TargetDataPoints(ip_target) # Get list of type DataPoint datapoints = [] for _rv in drv.data: # Ignore invalid data if isinstance(_rv, RegisterVariable) is False: continue if _rv.valid is False: continue # Poll client = ModbusTcpClient(ip_target) if isinstance(_rv, InputRegisterVariable): try: response = client.read_input_registers(_rv.address, count=_rv.count, unit=_rv.unit) key = 'input_register' except ConnectionException as _err: log_message = ('''\ Cannot connect to target {} to retrieve input register {}, count {}, \ unit {}: {}'''.format(ip_target, _rv.register, _rv.count, _rv.unit, str(_err))) log.log2warning(65028, log_message) continue except: log_message = ('''\ Cause unknown failure with target {} getting input register {}, count {}, \ unit {}'''.format(ip_target, _rv.register, _rv.count, _rv.unit)) log.log2warning(65030, log_message) continue elif isinstance(_rv, HoldingRegisterVariable): try: response = client.read_holding_registers(_rv.address) key = 'holding_register' except ConnectionException: log_message = ('''\ Cannot connect to target {} to retrieve input register {}, count {}, \ unit {}'''.format(ip_target, _rv.register, _rv.count, _rv.unit)) log.log2warning(65032, log_message) continue except: log_message = ('''\ Cause unknown failure with target {} getting holding register {}, count {}, \ unit {}. [{}, {}, {}]\ '''.format(ip_target, _rv.register, _rv.count, _rv.unit, sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])) log.log2warning(65031, log_message) continue # Process data if response.isError() is True: _log_modbus(ip_target, _rv, response) else: values = response.registers for data_index, _value in enumerate(values): # Do multiplication value = _value * _rv.multiplier # Create DataPoint and append new_key = ('{}_{}'.format(key, _rv.register + data_index)) datapoint = DataPoint(new_key, value, data_type=DATA_INT) datapoint.add(DataPointMetadata('unit', str(_rv.unit).zfill(3))) datapoints.append(datapoint) ddv.add(datapoints) # Return return ddv
def _log_modbus(ip_target, registervariable, response): """Log error. Args: ip_target: Target that caused the error registervariable: RegisterVariable object response: Pymodbus response object Returns: None """ # Initialize key variables exception_codes = { 1: '''Illegal Function. Function code received in the query is not \ recognized or allowed by slave''', 2: '''Illegal Data Address. Data address of some or all the required \ entities are not allowed or do not exist in slave''', 3: '''Illegal Data Value. Value is not accepted by slave''', 4: '''Slave Target Failure. Unrecoverable error occurred while slave \ was attempting to perform requested action''', 5: '''Acknowledge. Slave has accepted request and is processing it, \ but a long duration of time is required. This response is returned to \ prevent a timeout error from occurring in the master. Master can next issue \ a Poll Program Complete message to determine whether processing is \ completed''', 6: '''Slave Target Busy. Slave is engaged in processing a \ long-duration command. Master should retry later''', 7: '''Negative Acknowledge. Slave cannot perform the programming \ functions. Master should request diagnostic or error information from slave''', 8: '''Memory Parity Error. Slave detected a parity error in memory. \ Master can retry the request, but service may be required on the \ slave target''', 10: '''Gateway Path Unavailable. Specialized for Modbus gateways. \ Indicates a misconfigured gateway''', 11: '''Gateway Target Target Failed to Respond. Specialized for \ Modbus gateways. Sent when slave fails to respond''' } # Intialize data gathering if isinstance(response, ExceptionResponse): # Provide more context if required. if response.exception_code in exception_codes: description = ' Description: {}'.format( exception_codes[response.exception_code]) else: description = '' # Register does not exist log_message = ('''\ Target failure {}: Could not read register {}, count {}, unit {}: \ original code {}, exception code {}, function code {}, check {}, \ protocol ID {}, transaction ID {}, unit ID {}.{}\ '''.format(ip_target, registervariable.register, registervariable.count, registervariable.unit, response.original_code, response.exception_code, response.function_code, response.check, response.protocol_id, response.transaction_id, response.unit_id, description)) log.log2warning(65027, log_message) elif isinstance(response, ModbusIOException): # Target may not be available or not listening on Modbus port log_message = ('''\ Pymodbus failure code {}. Message: {}\ '''.format(response.fcode, response.message)) log.log2warning(65026, log_message)
def crypt_receive(): """Receive encrypted data from agent Args: None Returns: message (str): Reception result response (int): HTTP response code """ # Read configuration config = Config() cache_dir = config.agent_cache_directory(PATTOO_API_AGENT_NAME) try: # Retrieves Pgpier class gpg = get_gnupg(PATTOO_API_AGENT_NAME, config) #Sets key ID gpg.set_keyid() # Checks if a Pgpier object exists if gpg is None: raise Exception('Could not retrieve Pgpier for {}'.format( PATTOO_API_AGENT_NAME)) except Exception as e: response = 500 message = 'Server error' log_msg = 'Could not retrieve Pgpier: >>>{}<<<'.format(e) log.log2warning(20175, log_msg) return message, response # Predefined error message and response response = 400 message = 'Proceed to key exchange first' # Block connection if a symmetric key was not stored if 'symm_key' not in session: message = 'No symmetric key' response = 403 return message, response if request.method == 'POST': # Get data from agent data_json = request.get_json(silent=False) data_dict = json.loads(data_json) # Retrieved symmetrically encrypted data encrypted_data = data_dict['encrypted_data'] # Symmetrically decrypt data data = gpg.symmetric_decrypt(encrypted_data, session['symm_key']) # Initialize key variables prefix = 'Invalid posted data.' posted_data = None source = None # Extract posted data and source try: data_extract = json.loads(data) posted_data = data_extract['data'] source = data_extract['source'] except Exception as e: log_message = 'Decrypted data extraction failed: {}'\ .format(e) log.log2warning(20176, log_message) log_message = 'Decrypted data extraction successful' log.log2info(20177, log_message) # Abort if posted_data isn't a list if isinstance(posted_data, dict) is False: log_message = '{} Not a dictionary'.format(prefix) log.log2warning(20178, log_message) abort(404) if len(posted_data) != len(CACHE_KEYS): log_message = ('''{} Incorrect length. Expected length of {} '''.format(prefix, len(CACHE_KEYS))) log.log2warning(20179, log_message) abort(404) for key in posted_data.keys(): if key not in CACHE_KEYS: log_message = '{} Invalid key'.format(prefix) log.log2warning(20180, log_message) abort(404) # Extract key values from posting try: timestamp = posted_data['pattoo_agent_timestamp'] except: _exception = sys.exc_info() log_message = ('API Failure') log.log2exception(20181, _exception, message=log_message) abort(404) # Create filename. Add a suffix in the event the source is posting # frequently. suffix = str(randrange(100000)).zfill(6) json_path = ('{}{}{}_{}_{}.json'.format(cache_dir, os.sep, timestamp, source, suffix)) # Create cache file try: with open(json_path, 'w+') as temp_file: json.dump(posted_data, temp_file) except Exception as err: log_message = '{}'.format(err) log.log2warning(20182, log_message) abort(404) except: _exception = sys.exc_info() log_message = ('API Failure') log.log2exception(20183, _exception, message=log_message) abort(404) # Return message = 'Decrypted and received' response = 202 log.log2info(20184, message) return message, response
def _make_pattoo_db_record(item): """Ingest data. Args: item: Dict of key-value pairs DataPoint Returns: pattoo_db_variable: PattooDBrecord object """ # Initialize data valids = [] pattoo_db_variable = None _log_message = 'Invalid cache data.' reserved_keys_non_metadata = [ _ for _ in RESERVED_KEYS if _ != 'pattoo_metadata'] metadata = {} ''' Make sure we have all keys required for creating a PattooDBrecord Omit 'pattoo_metadata' as we need to recreate it. 'pattoo_metadata' was extracted to its component key-value pairs before the agent posted it to the pattoo API ''' for key in reserved_keys_non_metadata: valids.append(key in item.keys()) for key in AGENT_METADATA_KEYS: valids.append(key in item.keys()) if False in valids: log.log2warning(1047, _log_message) return None # Get metadata for item for key, value in sorted(item.items()): if key not in reserved_keys_non_metadata: metadata[key] = value valids.append(isinstance(key, str)) # Work on the data_type if key == 'pattoo_data_type': valids.append(value in [ DATA_FLOAT, DATA_INT, DATA_COUNT64, DATA_COUNT, DATA_STRING, DATA_NONE]) # Append to result if False not in valids: # Add the datasource to the original checksum for better uniqueness checksum = _checksum( item['pattoo_agent_id'], item['pattoo_agent_polled_target'], item['pattoo_checksum']) pattoo_db_variable = PattooDBrecord( pattoo_checksum=checksum, pattoo_key=item['pattoo_key'], pattoo_agent_id=item['pattoo_agent_id'], pattoo_agent_polling_interval=item[ 'pattoo_agent_polling_interval'], pattoo_timestamp=item['pattoo_timestamp'], pattoo_data_type=item['pattoo_data_type'], pattoo_value=item['pattoo_value'], pattoo_agent_polled_target=item['pattoo_agent_polled_target'], pattoo_agent_program=item['pattoo_agent_program'], pattoo_agent_hostname=item['pattoo_agent_hostname'], pattoo_metadata=_keypairs(metadata) ) # Return return pattoo_db_variable
def key_exchange(): """Process public key exhange. Args: None Returns: result: Various responses """ # Initialize key variables required_keys = ['pattoo_agent_email', 'pattoo_agent_key'] # If a symmetric key has already been established, skip if 'symm_key' in session: log_message = 'Symmetric key already set.' log.log2info(20148, log_message) return (log_message, 208) # Get data from incoming agent POST if request.method == 'POST': try: # Get data from agent data_dict = json.loads(request.get_json(silent=False)) except: _exception = sys.exc_info() log_message = 'Client sent corrupted JSON data' log.log2exception(20167, _exception, message=log_message) return (log_message, 500) # Check for minimal keys for key in required_keys: if key not in data_dict.keys(): log_message = '''\ Required JSON key "{}" missing in key exchange'''.format(key) log.log2warning(20164, log_message) abort(404) # Save email in session session['email'] = data_dict['pattoo_agent_email'] # Save agent public key in keyring encryption.pimport(data_dict['pattoo_agent_key']) return ('Key received', 202) # Get data from incoming agent POST if request.method == 'GET': if 'email' in session: # Generate server nonce session['nonce'] = hashlib.sha256(str( uuid.uuid4()).encode()).hexdigest() # Retrieve information from session. Set previously in POST agent_fingerprint = encryption.fingerprint(session['email']) # Trust agent key encryption.trust(agent_fingerprint) # Encrypt api nonce with agent public key encrypted_nonce = encryption.encrypt(session['nonce'], agent_fingerprint) data_dict = { 'api_email': encryption.email, 'api_key': encryption.pexport(), 'encrypted_nonce': encrypted_nonce } # Send api email, public key and encrypted nonce log_message = 'API information sent' return jsonify(data_dict) # Otherwise send error message return ('Send email and key first', 403) # Return aborted status abort(400)
def purge(url, identifier): """Purge data from cache by posting to central server. Args: url: URL to receive posted data identifier: Unique identifier for the source of the data. (AgentID) Returns: None """ # Initialize key variables config = Config() cache_dir = config.agent_cache_directory(identifier) # Add files in cache directory to list only if they match the # cache suffix all_filenames = [ filename for filename in os.listdir(cache_dir) if os.path.isfile(os.path.join(cache_dir, filename)) ] filenames = [ filename for filename in all_filenames if filename.endswith('.json') ] # Read cache file for filename in filenames: # Only post files for our own UID value if identifier not in filename: continue # Get the full filepath for the cache file and post filepath = os.path.join(cache_dir, filename) with open(filepath, 'r') as f_handle: try: data = json.load(f_handle) except: # Log removal log_message = ('''\ Error reading previously cached agent data file {} for identifier {}. May be \ corrupted.'''.format(filepath, identifier)) log.log2warning(1064, log_message) # Delete file if os.path.isfile(filepath) is True: os.remove(filepath) log_message = ('''\ Deleting corrupted cache file {} for identifier {}.\ '''.format(filepath, identifier)) log.log2warning(1036, log_message) # Go to the next file. continue # Post file success = post(url, data, identifier, save=False) # Delete file if successful if success is True: if os.path.exists(filepath) is True: os.remove(filepath) # Log removal log_message = ('''\ Purging cache file {} after successfully contacting server {}\ '''.format(filepath, url)) log.log2info(1007, log_message)
def _save_data(data, source): """Handle the agent posting route. Args: data: Data dict received from agents Returns: success: True if successful """ # Initialize key variables success = False prefix = 'Invalid posted data.' # Read configuration config = configuration.ConfigAgentAPId() cache_dir = config.agent_cache_directory(PATTOO_API_AGENT_NAME) # Abort if data isn't a list if isinstance(data, dict) is False: log_message = '{} Not a dictionary'.format(prefix) log.log2warning(20024, log_message) abort(404) if len(data) != len(CACHE_KEYS): log_message = ('''\ {} Incorrect length. Expected length of {}'''.format(prefix, len(CACHE_KEYS))) log.log2warning(20019, log_message) return success # Basic integrity check of required JSON fields for key in data.keys(): if key not in CACHE_KEYS: log_message = '{} Invalid key'.format(prefix) log.log2warning(20018, log_message) return success # Extract key values from posting try: timestamp = data['pattoo_agent_timestamp'] except: _exception = sys.exc_info() log_message = ('API Failure') log.log2exception(20025, _exception, message=log_message) return success # Create filename. Add a suffix in the event the source is posting # frequently. suffix = str(randrange(100000)).zfill(6) json_path = ('{}{}{}_{}_{}.json'.format(cache_dir, os.sep, timestamp, source, suffix)) # Create cache file try: with open(json_path, 'w+') as temp_file: json.dump(data, temp_file) except Exception as err: log_message = '{}'.format(err) log.log2warning(20016, log_message) return success except: _exception = sys.exc_info() log_message = ('API Failure') log.log2exception(20017, _exception, message=log_message) return success # Return success = True return success
def valid_key(): """Handles validation by decrypting the received symmetric key from the agent and then symmetrically decrypting the nonce from agent and check if it's the same as the nonce sent The symmetric key is stored in session so that it can be attached to the cached data for future decryption The agent public key is then deleted Args: None Returns: message (str): Validation response message response (int): HTTP response code """ # If a symmetric key has already been established, skip if 'symm_key' in session: message = 'Symmetric key already set' response = 208 log.log2info(20171, message) return message, response # Predefined error message and response response = 403 message = 'Proceed to key exchange first' # If no nonce is set, inform agent to exchange keys if 'nonce' not in session: return message, response # Read configuration config = Config() try: # Retrieves Pgpier class gpg = get_gnupg(PATTOO_API_AGENT_NAME, config) #Sets key ID gpg.set_keyid() # Checks if a Pgpier object exists if gpg is None: raise Exception('Could not retrieve Pgpier for {}'.format( PATTOO_API_AGENT_NAME)) except Exception as e: response = 500 message = 'Server error' log_msg = 'Could not retrieve Pgpier: >>>{}<<<'.format(e) log.log2warning(20172, log_msg) return message, response if request.method == 'POST': # Get data from incoming agent POST try: # Get data from agent data_json = request.get_json(silent=False) data_dict = json.loads(data_json) # Retrieved symmetrically encrypted nonce encrypted_nonce = data_dict['encrypted_nonce'] # Retrieved encrypted symmetric key encrypted_sym_key = data_dict['encrypted_sym_key'] # Decrypt symmetric key passphrase = gpg.passphrase decrypted_symm_key = gpg.decrypt_data(encrypted_sym_key, passphrase) # Symmetrically decrypt nonce nonce = gpg.symmetric_decrypt(encrypted_nonce, decrypted_symm_key) # Checks if the decrypted nonce matches one sent if nonce != session['nonce']: response = 401 message = 'Nonce does not match' return message, response # Set symmetric key session['symm_key'] = decrypted_symm_key # Retrieve information from session agent_email = session['email'] agent_fingerprint = gpg.email_to_key(agent_email) # Delete agent public key result = gpg.del_pub_key(agent_fingerprint) session.pop('email', None) session.pop('nonce', None) response = 200 message = 'Symmetric key saved. Del public key: {}'\ .format(result) log.log2info(20173, message) except Exception as e: log_msg = 'Invalid email and key entry: >>>{}<<<'.format(e) log.log2warning(20174, log_msg) message = 'Message not received' response = 400 return message, response