def process_health_message(message): """ Process each message from the parent invocation event """ # These should be defined by the component type or resource tags # Hard-code for now try: notify_slack = message.get("NotifySlack", True) notify_pagerduty = False notify_dashboard = True processed = True if "Source" and "Resource" in message: if notify_pagerduty: pd_response = notify_pagerduty_sns(message) processed = ("MessageId" in pd_response) and processed if notify_slack: slack_response = notify_slack_sns(message) processed = ("MessageId" in slack_response) and processed if notify_dashboard: dash_response = notify_dashboard_sns(message) processed = ("MessageId" in dash_response) and processed else: LOG.error("Message missing required fields") processed = False except KeyError as err: LOG.error("Error processing health message: %s", err) processed = False return processed
def send_health_monitoring_data_to_splunk(payload_to_send): """ Send the Health Monitoring payload to Splunk Cloud HEC """ try: splunk_hec_token = get_splunk_hec_token(SPLUNK_HEC_SSM_PARAMETER, AWS_REGION) splunk_hec_endpoint = ( "https://http-inputs-gds.splunkcloud.com/services/collector") headers = {"Authorization": "Splunk " + splunk_hec_token} response = requests.post(splunk_hec_endpoint, payload_to_send, headers=headers, verify=False) if response.status_code != 200: LOG.debug( "Received a non 200 HTTP status code from Splunk Cloud HEC") LOG.debug("Response code: %s: message: %s", response.status_code, response.text) elif response.status_code == 200: LOG.info("Successful: message: %s", response.text) except (ValueError, KeyError): LOG.error("Failed to send health monitoring data to Splunk Cloud HEC")
def audit_ebs(connect, volumes, days=None): """ Creates list of volumes older than X days Defaults to 1 week """ if type(volumes) is not list: raise TypeError("Not a list") # Sets days if not specified if not days: date = str(datetime.now() - timedelta(days=7)) else: date = str(datetime.now() - timedelta(days=days)) audit_tag = [{'Key':'audit','Value':str(datetime.now())}] unused_ebs=[] for ebs in volumes: try: # Gets tags for volume for attribute in ebs.tags: # Searches for audit tag if attribute.get('Key') == 'audit': # Compares audit dates if attribute.get('Value') < date: LOG.info("INFO: {0} ready for deletion" .format(ebs.id)) unused_ebs.append(ebs.id) # Tags volume if missing audit tag else: LOG.info("INFO: Audit tag added to {0}" .format(ebs.id)) ebs.create_tags(Tags=audit_tag) except Exception, err: LOG.error("ERROR: {0}".format(err))
def main(): args = parser.parse_args() try: uploader = ApplicantUploader(args.token) except AssertionError: LOG.error(f"Error durinng uploader initialization") sys.exit(1) if files := scan_directory(args.base_dir, BASE_FILENAME): for applicant in get_applicants_info(files, args.row): LOG.debug(f"Start uploading") # загружаем резюме resume_info = uploader.upload_file(applicant.file_path) # формируем json для загрузки кандидата body = uploader.collect_parsed_data(resume_info) # полученные из .xlsx файла данные имеют приоритет body.update(get_fio(applicant)) body.update({"money": applicant.salary}) # загружаем кандидата if response := uploader.upload_applicant(body): # устанавливаем его на вакансию applicant_id = response.get("id") uploader.set_vacancy(applicant, applicant_id)
def acceptBluetoothConnection(self): try: self.clientSocket, clientInfo = self.serverSocket.accept() LOG.info("Accepted bluetooth connection from %s", clientInfo) except (bluetooth.BluetoothError, SystemExit, KeyboardInterrupt): LOG.error("Failed to accept bluetooth connection ... ", exc_info=True)
def process_message(message: Dict[str, Any]) -> Any: """ Receive event body forwarded from lambda handler. """ try: actions = { "register": register, "commit": commit, "usage": usage, "audit": audit.start, "log_org_membership": audit.log_org_membership, "log_org_teams": audit.log_org_teams, "log_org_team_membership": audit.log_org_team_membership, "log_org_team_repos": audit.log_org_team_repos, "log_org_repos": audit.log_org_repos, "log_org_repo_contributors": audit.log_org_repo_contributors, "log_org_repo_team_members": audit.log_org_repo_team_members, } action = message["action"] process_action = actions[action] success = process_action(message) if not success: LOG.error("Processing failed for %s", action) except (audit.IncompleteAuditError, github_api.GithubApiError): success = False return success
def ebs_usage(connect, volumes): """ Checks for no usage in 24 hours """ if type(volumes) is not list: raise TypeError("Not a list") unused_volumes = [] if volumes and volumes is not None: try: for ebs in volumes: response = connect.cloudwatch.get_metric_statistics(Namespace='AWS/EBS', MetricName='VolumeReadBytes', StartTime=datetime.now() - timedelta(days=1), EndTime=datetime.now(), Period=86400, Statistics=['Average'], Dimensions=[{'Name':'VolumeId','Value':ebs.id}]) if 'Datapoints' in response and not response['Datapoints']: LOG.info("INFO: {0} is not active".format(ebs.id)) unused_volumes.append(ebs) except Exception, err: LOG.error("ERROR: {0}".format(err))
def _delete(self, resource_type, _id): try: res = self.request('get', f'/{resource_type}/delete?id={_id}') res.raise_for_status() LOG.info(f'DELETED {resource_type}/{_id}') except HTTPError: LOG.error(f'Failed to DELETE {resource_type}/{_id}')
def splunk_forwarder_event_handler(event, context): """ Receive and process Health Monitoring message """ try: process_update_dashboard_event(event) except (ValueError, KeyError): LOG.error("Failed to build Splunk payload for health monitoring data")
def __init__( self, message: Union[str, TypeError] = "Incomplete audit error", ): self.message = message super().__init__(self.message) LOG.error({"error": "GithubApiError", "message": message})
def get_session_var(name, default=None): try: value = session.get(name, default) except RuntimeError as err: LOG.error("Failed to get variable from session: " + str(err)) value = None return value
def closeBluetoothSocket(self): try: self.clientSocket.close() self.serverSocket.close() LOG.info("Bluetooth sockets successfully closed ...") except bluetooth.BluetoothError: LOG.error("Failed to close the bluetooth sockets ", exc_info=True)
def test_consumer_connection() -> bool: try: HELPER.request('HEAD', '/job/describe') return True except HTTPError as her: LOG.error(f'Could not connect to consumer: {her}') return False
def Execute(parse_source, parse_state): LOG.info('STARTED: ' + os.path.basename(__file__)) LOG.info('parse_source: \'' + str(parse_source) + '\'') if not parse_state: parse_state = db.CommunicationLogs.States.DOWNLOADED #production mode: parsing newly downloaded logs LOG.info('parse_state: \'' + str(parse_state) + '\'') communication_logs = db.CommunicationLogs.GetByStateSource( state=parse_state, source=parse_source, ) LOG.debug(communication_logs) for communication_log in communication_logs: try: file_path = settings.DOWNLOAD_DIR + '/' + communication_log[ 'source'] + '/' + communication_log['file_name'] if not os.path.exists(file_path): LOG.error('Does not exists: ' + file_path) continue LOG.info('Parsing: ' + file_path) parsers = imp.load_source( 'parsers', 'parsers/' + communication_log['source'] + '.py') #class_ = getattr(parsers, settings.COMMUNICATION_LOG_SOURCES[communication_log['source']]['ParserClass']) parser_class = getattr(parsers, communication_log['source']) parser = parser_class() parser.Parse(communication_log) except: LOG.exception(sys.exc_info()[0]) LOG.info('COMPLETED')
def send_batch(sqs_entries): # http://boto3.readthedocs.org/en/latest/reference/services/sqs.html#SQS.Queue.sendentries result = self.queue.send_messages(Entries=sqs_entries) if len(result['Successful']) != len(sqs_entries): LOG.error('Some messages failed to send to SQS: {}'.format( result))
def get_ssm_params(path: str) -> Dict[str, str]: """ Get parameter by path and return value """ try: has_next_page = True next_token = None params = {} while has_next_page: client: SSMClient = boto3.client("ssm") # type: ignore if next_token: response = client.get_parameters_by_path(Path=path, Recursive=True, WithDecryption=True, NextToken=next_token) else: response = client.get_parameters_by_path(Path=path, Recursive=True, WithDecryption=True) # Iterate parameters in response and append to dictionary for param in response["Parameters"]: name = param["Name"].replace(path, "") params[name] = param["Value"] # Check for next page in results has_next_page = "NextToken" in response if has_next_page: next_token = response["NextToken"] else: next_token = None except ClientError as err: LOG.error("Failed to get SSM params on path: %s: %s", path, err) params = {} return params
def update(self, name, phone_number, custom_paths, is_la, group): """ Validate the input fields and existing user settings. Perform update only if all validation steps pass. Return True only if all steps pass. """ error = None steps = {} # Check user exists steps["user_found"] = self.get_details() != {} if not steps.get("user_found"): error = "Failed to get user details to update." # Check input parameters are all set steps["inputs_valid"] = all([ name is not None, phone_number is not None, custom_paths is not None, is_la is not None, group is not None, ]) if not steps.get("inputs_valid"): error = "The new value for a field is missing or blank." # Check the earlier steps have passed if all(steps.values()): user_attributes = [] try: user_attributes += self.__attribute("custom:is_la", is_la) user_attributes += self.__attribute("name", self.sanitise_name(name)) user_attributes += self.__custom_path_attribute( is_la, custom_paths, group) user_attributes += self.__phone_number_attribute(phone_number) self.set_group(group) except ValueError: error = "The current value for a field is missing or blank." steps["current_valid"] = False # If all tests have passed try the update if all(steps.values()): steps["updated"] = cognito.update_user(self.email_address, user_attributes) if not steps.get("updated"): error = "The fields were valid but the user failed to update." if error: config.set_session_var("error_message", error) LOG.error({ "message": "User operation failed", "action": "user.update", "status": steps, }) # Return True if valid and updated return all(steps.values())
def get_datetime_from_string(string): try: return datetime.datetime.strptime(string, '%Y-%m-%dT%H:%M:%S.%fZ') except: LOG.error('string=' + string) #LOG.exception(sys.exc_info()[0]) raise return string
def load_s3_paths(): try: with open("s3paths.json", "r") as s3paths_file: s3paths = json.load(s3paths_file) except (FileNotFoundError, json.JSONDecodeError) as error: LOG.error(error) s3paths = [] return s3paths
def get_datetime_from_unix_string(unix_string): try: return datetime.datetime.utcfromtimestamp(float(unix_string)) except: LOG.error('unix_string=' + unix_string) #LOG.exception(sys.exc_info()[0]) raise return unix_string
def create(self, name, phone_number, custom_paths, is_la, group_name): """ Create a new user in Cognito user pool Validate the inputs. A user is only valid if their MFA and group settings are correct. Return True only if all steps are processed successfully. """ error = None steps = {} # Validate email if not self.email_address_is_valid(): steps["email_valid"] = False error = "Email address is invalid." # Validate phone number phone_number = self.sanitise_phone(phone_number) if phone_number == "": steps["phone_valid"] = False error = "Phone number is empty." # Validate user custom settings if not self.user_paths_are_valid(is_la, custom_paths, group_name): steps["paths_valid"] = False error = "The granted access permissions are not valid." # Only attempt create if all previous steps passed if all(steps.values()): steps["created"] = cognito.create_user(name, self.email_address, phone_number, is_la, custom_paths) if steps.get("created"): steps["set_mfa"] = self.set_mfa_preferences() steps["set_settings"] = self.set_user_settings() steps["added_to_group"] = self.add_to_group(group_name) else: error = "Failed to create user." if steps.get("created") and not all(steps.values()): # If the user was created successfully # but the group or SMS 2FA operations fail # the user should be disabled. if steps.get("created"): cognito.disable_user(self.email_address) if error: config.set_session_var("error_message", error) LOG.error({ "message": "User operation failed", "action": "user.create", "status": steps, }) # Return True only if all settings were successfully set return all(steps.values())
def list_groups_for_user(email): cognito_client = get_boto3_client() try: response = cognito_client.admin_list_groups_for_user( UserPoolId=config.env_pool_id(), Username=email) except CLIENT_EXCEPTIONS as error: LOG.error(error) response = {} return response
def enable_user(email): cognito_client = get_boto3_client() try: response = cognito_client.admin_enable_user( UserPoolId=config.env_pool_id(), Username=email) except CLIENT_EXCEPTIONS as error: LOG.error(error) response = {} return check_response_status_code(response)
def get_user(email): cognito_client = get_boto3_client() try: response = cognito_client.admin_get_user( UserPoolId=config.get("cognito_pool_id"), Username=email) except CLIENT_EXCEPTIONS as error: LOG.error(error) response = {} return response
def decorated_function(*args, **kwargs): if "details" not in session: session["error_message"] = "Login required to access this route" LOG.error({ "action": "access denied", "reason": "Login required to access this route", }) return redirect("/403") return flask_route(*args, **kwargs)
def getBluetoothSocket(self): try: self.serverSocket = bluetooth.BluetoothSocket(bluetooth.RFCOMM) LOG.info( "Bluetooth server socket successfully created for RFCOMM service..." ) except (bluetooth.BluetoothError, SystemExit, KeyboardInterrupt): LOG.error("Failed to create the bluetooth server socket ", exc_info=True)
def test_zb_connection() -> bool: zeebe_connection = HELPER.zb() try: res = next(zeebe_connection.get_topology()) LOG.debug(f'connected to {res.brokers}') return True except ZeebeError as zer: LOG.error(f'Could not connect to ZB: {zer}') return False
def get_datetime_from_string(string): try: #import dateutil #return dateutil.parser.parse(string) return datetime.datetime.strptime(string, '%Y-%m-%dT%H:%M:%S') except: LOG.error('string=' + string) #LOG.exception(sys.exc_info()[0]) raise return string
def parse_sns_message(event): """ Retrieve SNS message field from lambda invoke event """ message = Dict(json.loads(event["Records"][0]["Sns"]["Message"])) # We don't think SNS sends multiple records in the same invocation # but that's an assumption so if it does we can see it happen in # the logs and make the case to code for it. if len(event["Records"]) > 1: LOG.error("More than one record received from SNS event") return message
def update_user(email, attributes): cognito_client = get_boto3_client() try: response = cognito_client.admin_update_user_attributes( UserPoolId=config.env_pool_id(), Username=email, UserAttributes=attributes) except (ClientError, ParamValidationError) as error: LOG.error(error) response = {} return check_response_status_code(response)
def delete_ssm_param(param: str) -> bool: """ Delete SSM parameter and return status """ try: client = boto3.client("ssm") # type: ignore response = client.delete_parameter(Name=param) # delete parameter returns an empty dict success: bool = response == {} except ClientError as err: LOG.error("Failed to set SSM param: %s: %s", param, err) success = False return success
def authenticate(context, username, password): if not have_authentication: return True try: return authenticateRequest(username, password) except Exception, e: msg = 'Authentication failed (%s)' % e LOG.error(msg, exc_info=True) return xmlrpclib.Fault(123, msg)
def remove_from_group(email, group_name): cognito_client = get_boto3_client() try: response = cognito_client.admin_remove_user_from_group( UserPoolId=config.env_pool_id(), Username=email, GroupName=group_name) except CLIENT_EXCEPTIONS as error: LOG.error(error) response = {} return check_response_status_code(response)
def startup(): for x in range(START_TIMEOUT): if test_connections(): LOG.debug(f'Found Brokers, waiting for startup to complete') time.sleep(10) return else: time.sleep(1) LOG.error(f'System could not connect within {START_TIMEOUT} seconds.') raise TimeoutError( f'System could not connect within {START_TIMEOUT} seconds.')
def _cleanup(self): """ Remove old and outdated files from the temporary and spool directory. """ if time.time() - self.cleanup_last > self.cleanup_after: self._lock.acquire() try: self.__cleanup() self.cleanup_last = time.time() except Exception, e: LOG.error(e, exc_info=True) finally:
def convertZIP(context, auth_token, zip_archive, converter_name='pdf-prince'): if not authorizeRequest(auth_token): msg = 'Authorization failed' LOG.error(msg, exc_info=True) return xmlrpclib.Fault(123, msg) try: return context.convertZIP(zip_archive, converter_name) except Exception, e: msg = 'Conversion failed (%s)' % e LOG.error(msg, exc_info=True) return xmlrpclib.Fault(123, msg)
def convertZIPEmail(context, auth_token, zip_archive, converter_name='pdf-prince', sender=None, recipient=None, subject=None, body=None): if not authorizeRequest(auth_token): msg = 'Authorization failed' LOG.error(msg, exc_info=True) return xmlrpclib.Fault(123, msg) try: return context.convertZIPEmail(zip_archive, converter_name, sender, recipient, subject, body) except Exception, e: msg = 'Conversion failed (%s)' % e LOG.error(msg, exc_info=True) return xmlrpclib.Fault(123, msg)
def delete_vol(connect, volumes): """ Deletes Volumes Passed """ if type(volumes) is not list: raise TypeError("Not a list") # Currently only printing id for ebs in volumes: try: LOG.info("INFO: {0} would have been deleted" .format(ebs)) except Exception, err: LOG.error("ERROR: {0}".format(err))
def handle_exc(text, obj, exc_info): """ Handle an exception. Currently we log the exception through our own logger. 'obj' is currently unused. We might use it later to obtain detailed informations. """ # Add some addition object info iff available. # XXX: this should be replaced with a more elegant solution try: text = text + ' (%s)' % obj.absolute_url(1) except: pass LOG.error(text, exc_info=exc_info)
def on_message(self, stomp_headers, json_encoded_messages): LOG.debug('STOMP headers {}'.format(stomp_headers)) try: messages = json.loads(json_encoded_messages) except ValueError as e: LOG.error('Failed to decode {} bytes as JSON: {}'.format( len(json_encoded_messages), json_encoded_messages)) LOG.exception(e) return try: self._handle_multiple_messages(messages) except Exception as e: LOG.exception(e) return
def convertZIPandRedirect(context, auth_token, zip_archive, converter_name='prince-pdf', prefix=None): """ This view appects a ZIP archive through a POST request containing all relevant information (similar to the XMLRPC API). However the converted output file is not returned to the caller but delivered "directly" through the SmartPrintNG server (through an URL redirection). The 'prefix' parameter can be used to override the basename of filename used within the content-disposition header. (This class is only a base class for the related http_ and xmlrpc_ view (in order to avoid redudant code).) """ if not authorizeRequest(auth_token): msg = 'Authorization failed' LOG.error(msg, exc_info=True) return xmlrpclib.Fault(123, msg) try: output_archivename, output_filename = context._processZIP(zip_archive, converter_name) output_ext = os.path.splitext(output_filename)[1] # take ident from archive name ident = os.path.splitext(os.path.basename(output_archivename))[0] # move output file to spool directory dest_filename = os.path.join(context.spool_directory, '%s%s' % (ident, output_ext)) rel_output_filename = dest_filename.replace(context.spool_directory + os.sep, '') shutil.move(output_filename, dest_filename) host = 'localhost' port = 6543 prefix = prefix or '' location = 'http://%s:%s/deliver?filename=%s&prefix=%s' % (host, port, rel_output_filename, prefix) return location except Exception, e: msg = 'Conversion failed (%s)' % e LOG.error(msg, exc_info=True) return xmlrpclib.Fault(123, msg)
def on_error(self, headers, message): LOG.error("ERROR: {} {}".format(headers, message))
def __init__(self, message): LOG.error(message)
def _decode_stanox(stanox): try: return locations.from_stanox(stanox) except locations.LookupFailure: LOG.error('Failed to look up STANOX {}.'.format(stanox))