def modify_record(args, record_id): with session_scope() as session: record = session.query(Record).filter( Record.id == record_id).one_or_none() if not record: raise ClientError('Oops! Record not found') if request.method == 'DELETE': session.delete(record) response = 'successfully deleted record {}'.format(record_id) if request.method == 'PUT': #validate datime if args.get('datetime', None): dt = args['datetime'] try: start = dt[:19] end = dt[19:] datetime.datetime.strptime(start, '%Y-%m-%dT%H:%M:%S') datetime.datetime.strptime(end, '-%H:%M') except ValueError: raise ClientError( 'Invalid datetime format, should be yyyy-mm-ddTHH:MM:SS-HH:MM' ) record.datetime = dt record.longitude = args.get('longitude', record.longitude) record.latitude = args.get('latitude', record.latitude) record.elevation = args.get('elevation', record.elevation) response = 'successfully updated record {}'.format(record_id) return response
def __validate_search_entry(search_entry): valid_fields = ['user_id', 'query_params'] for valid_field in valid_fields: if valid_field not in search_entry: raise ClientError(valid_field + ' is missing in the request body') if not search_entry['user_id']: raise ClientError(valid_field + ' is empty in the request body')
def read_items_index(items_len, item_name): choice = input( f'Enter {item_name} index (or "{CANCEL_CHOICE}" to go back to the main menu): ' ) if choice == CANCEL_CHOICE: return choice if not choice.isnumeric(): raise ClientError('Must be a number') choice = int(choice) if choice < 1 or choice > items_len: raise ClientError( f'{item_name.capitalize()} index must be in the proper range') return choice
def get_parameters_list(self): if self._data is None: raise ClientError('Initialize data first') data_columns = self._data.columns ignored_columns = self._datetime_columns + [self.date_column] result = list(filter(lambda x: x not in ignored_columns, data_columns)) return result
def get_paginated_list(klass, url, start, limit): # check if page exists with session_scope() as session: results = session.query(klass).all() count = len(results) if (count < start): raise ClientError('Page not found') # make response obj = {} obj['start'] = start obj['limit'] = limit obj['count'] = count # make URLs # make previous url if start == 1: obj['previous'] = '' else: start_copy = max(1, start - limit) limit_copy = start - 1 obj['previous'] = url + '?start=%d&limit=%d' % (start_copy, limit_copy) # make next url if start + limit > count: obj['next'] = '' else: start_copy = start + limit obj['next'] = url + '?start=%d&limit=%d' % (start_copy, limit) # finally extract result according to bounds res_this_page = results[(start - 1):(start - 1 + limit)] obj['results'] = [{ 'id': product.id, 'description': product.description, } for product in res_this_page] return obj
def __get_selected_city_index(self): cities = self._session.get('cities', []) if len(cities) == 0: raise ClientError('No available or loaded cities') city_index = read_items_index(len(cities), 'city') return city_index
def find_by_id(cls, id, includes=[]): """ Finds and returns the row with the specified id. Args: id (int): the id of the row to find. includes (list): a list of model relationship to include. Examples: The includes argument takes models that can be included with the result. >>> Comment.find_by_id(1, [Staff]) Comment {'message': 'Hello', 'staff_id': 2, 'request_id': 1, 'staff': { 'username': '******', 'avatar_url': 'xyz.com'}} Raises: ClientError: if not row with specified client id does not exist. """ query = reduce(lambda query, model: query.join(model), includes, cls.query) result = query.filter_by(id=id).first() if not result: raise ClientError(f'cannot find specified {cls.__name__.lower()}', 404) return result
def add_character(character_def): if character_def["name"] in get_all_characters(): raise ClientError("Character {} already exists!".format( character_def["name"])) __get_bucket().put_object(Key="{}.json".format(character_def["name"]), Body=json.dumps(character_def)) return character_def
def add_recrod(args): dt = args['datetime'] try: start = dt[:19] end = dt[19:] datetime.datetime.strptime(start, '%Y-%m-%dT%H:%M:%S') datetime.datetime.strptime(end, '-%H:%M') except ValueError: raise ClientError( 'Invalid datetime format, should be yyyy-mm-ddTHH:MM:SS-HH:MM') with session_scope() as session: product = session.query(Product).filter( Product.description == args['description']).one_or_none() if not product: p = Product(description=args['description']) session.add(p) session.flush() pid = p.id else: pid = product.id record = Record( product_id=pid, datetime=args['datetime'], longitude=args['longitude'], latitude=args['latitude'], elevation=args['elevation'], ) session.add(record) session.flush() response = {'record_id': record.id} return jsonify(response)
def __upload_zip_file(file_name, key): s3 = boto3.resource('s3') try: s3.meta.client.upload_file(file_name, CloudCanvas.get_setting(PACKAGEDVOICELINES), key) except: error_message = 'Could not upload the zip file {}.'.format(key) raise ClientError(error_message)
def update_report_header(report): if not report['universal_unique_identifier']: raise ClientError("Could not find the uuid of this report") key = { 'universal_unique_identifier': report.get('universal_unique_identifier', ''), 'section': 'header' } existing_item = __get_table().get_item(Key=key).get('Item', {}) header_value = existing_item.get('value', {}) # Set the header values according to the report properties # When the report doesn't have the required headers, keep the existing value in the table header_value['jira_status'] = report['jira_status'] if report.get( 'jira_status') != None else header_value.get('jira_status', 'pending') header_value['bookmark'] = report['bookmark'] if report.get( 'bookmark') != None else header_value.get('bookmark', 0) header_value['report_status'] = report['report_status'] if report.get( 'report_status') != None else header_value.get('report_status', 'unread') report_header = { 'universal_unique_identifier': report.get('universal_unique_identifier', ''), 'section': 'header', 'value': header_value } __get_table().put_item(Item=report_header) return 'SUCCESS'
def delete_character(character): if not character in get_all_characters(): raise ClientError( "Could not find {} in character list".format(character)) client = boto3.client('s3') response = client.delete_object( Bucket=CloudCanvas.get_setting("characterdefs"), Key="{}.json".format(character))
def get_character_info(character): if not character in get_all_characters(): raise ClientError( "Could not find {} in character list".format(character)) client = boto3.client('s3') response = client.get_object( Bucket=CloudCanvas.get_setting("characterdefs"), Key="{}.json".format(character)) body = json.loads(response["Body"].read()) return body
def get_issue_types(project_key): jira_client = get_jira_client() # Filter the meta data by project key meta = jira_client.createmeta(projectKeys = project_key) projects = meta.get('projects', []) if len(projects) == 1: issue_types = [issue_type.get('name', '') for issue_type in projects[0].get('issuetypes', [])] return issue_types else: raise ClientError("Invalid project key {}".format(project_key))
def __add_to_zip(key, url, zip_file_name): zf = zipfile.ZipFile(zip_file_name, 'a') try: if not key.rstrip('/') in zf.namelist(): urllib.urlretrieve(url, '/tmp/' + key) zf.write('/tmp/' + key, key) except: error_message = 'Could not add {} to the zip file {}.'.format(key, zip_file_name) raise ClientError(error_message) finally: zf.close()
def __create_character_mappings_file(zip_file_name, character_mapping): with open('/tmp/character_mapping.json', 'w') as file: json.dump(character_mapping, file) zf = zipfile.ZipFile(zip_file_name, 'a') try: zf.write('/tmp/character_mapping.json', '/character_mapping.json') except: error_message = 'Could not add character mappings to the zip file {}.'.format(zip_file_name) raise ClientError(error_message) finally: zf.close()
def visualize_weather_parameter(self, parameter, save_path=''): if self._data is None: raise ClientError('Initialize data first') if parameter not in self.get_parameters_list(): raise ClientError('This parameter does not exist') if not DataManipulator._visualization_path_valid(save_path): raise ClientError('Passed graph image path is not valid') x_col = self.date_column X = self._data[x_col].apply(lambda x: x.hour) Y = self._data[parameter] plt.plot(X, Y, marker='o', linestyle='dashed') plt.xlabel('Hours') plt.ylabel(parameter) # plt.legend() # plt.show() # requires some GUI matplotlib backend plt.savefig( save_path) # when matplotlib backend is non-GUI (like 'agg') plt.clf()
def __get_jira_fields(project_key, issue_type): global jira_field_meta if project_key in jira_field_meta and issue_type in jira_field_meta[ project_key]: return jira_field_meta[project_key][issue_type] jira_client = get_jira_client() # Filter the meta data by project key and issue type name meta = jira_client.createmeta(projectKeys=project_key, issuetypeNames=issue_type, expand='projects.issuetypes.fields') # Retrieve the issue type description projects = meta.get('projects', []) issue_types = [] issue_type_description = {} if len(projects) == 1: issue_types = projects[0].get('issuetypes', []) else: raise ClientError("Invalid project key {}".format(project_key)) if len(issue_types) == 1: issue_type_description = issue_types[0] else: raise ClientError("Invalid issue type {} ".format(issue_type)) fields = [] for field_id, field_property in iteritems( issue_type_description.get('fields', {})): if field_id != 'project' and field_id != 'issuetype': fields.append({ 'id': field_id, 'name': field_property.get('name', field_id), 'required': field_property.get('required', True), 'schema': __get_field_schema(field_property) }) jira_field_meta[project_key] = {issue_type: fields} return fields
def __check_function_parameters(function, parameters): # arg_spec is a named tuple as produced by inspect.getargspec: ArgSpec(args, varargs, keywords, defaults) arg_spec = function.arg_spec # If the function has a **kwargs parameter, it will soak up all values that don't match parameter names. has_kwargs_parameter = arg_spec.keywords is not None # Check to see if all expected parameters have a value provided and that there aren't any values for # which there are no parameters. expected_parameters = set(arg_spec.args) unexpected_paramters = set() for parameter_name, parameter_value in parameters.iteritems(): if parameter_name in expected_parameters: if parameter_value is not None: if parameter_name == arg_spec.args[0]: raise ValueError( 'Invalid handler arguments. The first parameter\'s name, {}, matches an api parameter name. Use an unique name for the first parameter, which is always a service.Request object.' .format(parameter_name)) expected_parameters.remove(parameter_name) elif not has_kwargs_parameter: # There are no "unexpected" parameters if there is a **kwargs parameter. unexpected_paramters.add(parameter_name) # The request object is passed as the first parameter, reguardless of it's name. expected_parameters.remove(arg_spec.args[0]) # Values do not need to be provided for any parameter with a default value. The arg_spec.defaults # array contains default values for the last len(arg_spec.defaults) arguments in arg_spec.args. if arg_spec.defaults: args_index = len(arg_spec.args) - 1 default_index = len(arg_spec.defaults) - 1 while default_index >= 0: arg_name = arg_spec.args[args_index] if arg_name in expected_parameters: expected_parameters.remove(arg_name) default_index -= 1 args_index -= 1 # If there are any expected parameters for which values are not present, or parameters with names # that are not expected, generate a 400 response for the client. if expected_parameters or unexpected_paramters: error_message = '' if expected_parameters: if error_message: error_message += ' ' error_message += 'Expected the following parameters: {}.'.format( ', '.join(expected_parameters)) if unexpected_paramters: if error_message: error_message += ' ' error_message += 'The following parameters are unexpected: {}.'.format( ', '.join(unexpected_paramters)) error_message += ' Check the documentation for the the API your calling.' raise ClientError(error_message)
def post(request, stat=None, additional_data=None): if additional_data is None: additional_data = {} if not stat: return {"scores": []} page = additional_data.get("page", None) page_size = additional_data.get("page_size", None) has_page = False if page is not None and page_size is not None: if not isinstance(page, int): raise ClientError("Page param is not an integer") if not isinstance(page_size, int): raise ClientError("Page size param is not an integer") if page_size > 0: has_page = True leaderboard = score_reader.build_leaderboard( stat, additional_data.get("users", [])) total_entries = len(leaderboard) if has_page: leaderboard = leaderboard[page * page_size:(page + 1) * page_size] response = {"scores": leaderboard} if has_page: response["current_page"] = page response["page_size"] = page_size if total_entries == 0: response["total_pages"] = 0 elif total_entries < page_size: response["total_pages"] = 1 else: response["total_pages"] = ((total_entries - 1) / page_size) + 1 return response
def validate_priority(client_id, priority): """ Checks if priority is invalid. Args: client_id (int): client id. priority (int): priority value. Raises: ClientError: if priority value is not between 0 and 100. """ if priority > 99 or priority < 1: raise ClientError('priority value must be between 0 and 100', 400)
def __send_jira_ticket(cw, issue): print("Sending ticket", issue) # Create a new Jira ticket new_issue = {} try: new_issue = get_jira_client().create_issue(**issue) __write_cloud_watch_metric(cw, "Success") except JIRAError as e: __write_cloud_watch_metric(cw, "Failure") raise ClientError(e.text) print("Jira ticket number {}".format(new_issue)) return new_issue.key
def read_date(prompt_msg): prompt = f'{prompt_msg} (YYYY-MM-DD) or "{CANCEL_CHOICE}" to go back: ' date_str = input(prompt) if date_str == CANCEL_CHOICE: return date_str if not date_str: return None try: _ = date.fromisoformat(date_str) return date_str except ValueError: raise ClientError('Incorrect date format')
def attempt(result): """ Checks the result for errors Args: obj (tuple): result of a marshmallow load or dump Raises: ServerError: if there are errors during (de)serialization """ if result.errors: raise ClientError(result.errors) return result.data
def __generate_presigned_url(key): s3_client = boto3.client('s3') try: presigned_url = s3_client.generate_presigned_url('get_object', Params = { 'Bucket' : CloudCanvas.get_setting(PACKAGEDVOICELINES), 'Key' : key }) except: error_message = 'Could not generate the presigned URL for {}.'.format(key); raise ClientError(error_message) return presigned_url tts.get_bucket(PACKAGEDVOICELINES).put_object(Key=file_key , Body=json.dumps(export_info))
def __create_speech_definitions_file(zip_file_name, speech_line_definitions, speech_lines_header): with open('/tmp/speech_line_definitions.csv', 'w') as file: writer = csv.DictWriter(file, fieldnames = speech_lines_header) writer.writeheader() for speech_line in speech_line_definitions: writer.writerow(speech_line) zf = zipfile.ZipFile(zip_file_name, 'a') try: zf.write('/tmp/speech_line_definitions.csv', '/speech_line_definitions.csv') except: error_message = 'Could not add speech line definitions to the zip file {}.'.format(zip_file_name) raise ClientError(error_message) finally: zf.close()
def update_report_comment(report): if not report['universal_unique_identifier']: raise ClientError("Could not find the uuid of this report") key = { 'universal_unique_identifier': report['universal_unique_identifier'], 'section': 'comments' } report_comments = { 'universal_unique_identifier': report['universal_unique_identifier'], 'section': 'comments', 'value': { 'comments': report.get('comments', []), } } __get_table().put_item(Item=report_comments) return 'SUCCESS'
def lift_ban(user): global BAN_TABLE __init_globals() player_ban = __get_player_ban(user) if not player_ban: return "Player {} is not banned, no operation to perform".format(user) table_key = {"user": user} try: response = BAN_TABLE.delete_item(Key=table_key) except ClientError as e: raise ClientError("Unban operation failed") return "Ban of {} has been lifted".format(user)
def update_report_header(report): if not report['universal_unique_identifier']: raise ClientError("Could not find the uuid of this report") key = { 'universal_unique_identifier': report['universal_unique_identifier'], 'section': 'header' } report_header = { 'universal_unique_identifier': report.get('universal_unique_identifier', ''), 'section': 'header', 'value': { 'bookmark': report.get('bookmark', 0), 'report_status': report.get('report_status', 'unread') } } __get_table().put_item(Item=report_header) return 'SUCCESS'
def _visualize_last_lodaded_weather(self): last_data_hash = self._session.get('loaded_data_hash', None) if not last_data_hash: raise ClientError('No weather loaded') if last_data_hash != self._data_manipulator.data_hash: raise DataSynchronizationError( 'Loaded data is not synchronized with visualizer') print('Available parameters:') params_list = self._data_manipulator.get_parameters_list() for i, param in enumerate(params_list): print(f'{i + 1}. {param}') param_index = read_items_index(len(params_list), 'parameter') if param_index == CANCEL_CHOICE: return param = params_list[param_index - 1] save_path = read_visualization_save_path() if save_path == CANCEL_CHOICE: return self._data_manipulator.visualize_weather_parameter(param, save_path=save_path)