async def fetch(self, session, url): """Execute call to external target using the proxy server. Receives aiohttp session as well as url to be called. Executes the request and returns either the content of the response as json or raises an exeption depending on response. :param session: The aiohttp ClientSession used to execute the call. :param url: String url ready to be requested. :returns: Request response as dict. :raises RatelimitException: on 429 or 430 HTTP Code. :raises NotFoundException: on 404 HTTP Code. :raises Non200Exception: on any other non 200 HTTP Code. """ try: async with session.get(url, proxy=self.proxy) as response: await response.text() if response.status == 429: self.logging.info(429) except aiohttp.ClientConnectionError as err: self.logging.info("Error %s", err) raise Non200Exception() if response.status in [429, 430]: if "Retry-At" in response.headers: self.retry_after = datetime.strptime( response.headers["Retry-At"], "%Y-%m-%d %H:%M:%S.%f") raise RatelimitException() if response.status == 404: raise NotFoundException() if response.status != 200: raise Non200Exception() return await response.json(content_type=None)
def navigate_through(path, fs, own, grp, operation_type): """Return the object related to the parent directory in a path, the penultimate part of a path (ex: /path/to/resource/file.txt --> resource. Parameters ---------- path --> pathlib.PosixPath class, path to which it's required to navigate through fs --> pymongo.collection.Collection class, MongoDb collection which handles fs metadata own --> str, user who required the operation grp --> list, groups to which the user belogns operation_type --> str, operation required Returns ------- curr_dir --> dict, last directory, MongoDB object """ #the start directory is the root one curr_dir = fs.find_one({'name': '/', 'parent': None, 'type': 'd'}) #verify the permissions for the root if not check_permissions(curr_dir, 'ancestor', own, grp, operation_type): raise AccessDeniedException(curr_dir['name']) #navigate from the first directory to the parent one #path = /user/here/the/path/file.txt --> path[1:-1] = [user, here, the, path] for directory in path.parts[1:-1]: if directory in curr_dir['directories']: #get current directory curr_dir = fs.find_one({'name': directory, 'parent': curr_dir['_id'], 'type': 'd'}) #verify the permissions for the current directory if not check_permissions(curr_dir, 'ancestor', own, grp, operation_type): raise AccessDeniedException(curr_dir['name']) else: raise NotFoundException(directory) #the user has not the right permission to navigate through return curr_dir
def raise_exception(payload): """ :param payload: :type payload: dict :return: """ if "error" in payload: error = payload.get("error") error_message = payload.get("errorMessage", "Unknown error") cause = payload.get("cause", None) if error == "Method Not Allowed": raise MethodNotAllowedException(error_message) elif error == "Not Found": raise NotFoundException(error_message) elif error == "ForbiddenOperationException": raise ForbiddenOperationException(error_message, cause) elif error == "IllegalArgumentException": raise IllegalArgumentException(error_message) elif error == "Unsupported Media Type": raise UnsupportedMediaTypeException(error_message) else: raise UnknownException(error_message)
def download(self, log_files, sort='time', limit=-1, nfl_filter='', output_format='default'): if len(log_files) == 0: raise NotFoundException(_('no log file found')) try: nfl_esc = nfl_filter.replace('(', '\(').replace(')', '\)') # remove the slash that is intentionally added in the URL # to avoid failure of filtering stats data. if nfl_esc.startswith('/'): nfl_esc = nfl_esc[1:] stats = Stats2(*log_files) stats.sort_stats(sort) if output_format == 'python': data = self.format_source_code(nfl_filter) elif output_format == 'json': data = stats.to_json(nfl_esc, limit) elif output_format == 'csv': data = stats.to_csv(nfl_esc, limit) elif output_format == 'ods': data = stats.to_ods(nfl_esc, limit) else: profile_tmp_all = tempfile.mktemp('.profile', 'all') stats.dump_stats(profile_tmp_all) data = open(profile_tmp_all).read() os.remove(profile_tmp_all) return data, [('content-type', self.format_dict[output_format])] except ODFLIBNotInstalled as ex: raise ex except Exception as ex: raise ProfileException(_('Data download error: %s') % ex)
def update_attachment(self, volumeID, attachmentID, metadata): '''update an existing attachment the given metadata dict will be merged with the old one. only the following fields could be updated: [name, mime, notes, download_count] ''' log.debug('updating metadata of attachment {} from volume {}'.format( attachmentID, volumeID)) modifiable_fields = ['name', 'mime', 'notes', 'download_count'] for k in metadata.keys(): if k not in modifiable_fields: raise ValueError('Not modifiable field given: {}'.format(k)) if 'name' in metadata and not isinstance(metadata['name'], basestring): raise ValueError("'name' must be a string") if 'mime' in metadata and not isinstance(metadata['mime'], basestring): raise ValueError("'mime' must be a string") if 'notes' in metadata and not isinstance(metadata['notes'], basestring): raise ValueError("'notes' must be a string") if 'download_count' in metadata and not isinstance( metadata['download_count'], Integral): raise ValueError("'download_count' must be a number") rawVolume = self._req_raw_volume(volumeID) for attachment in rawVolume['_source']['_attachments']: if attachment['id'] == attachmentID: attachment.update(metadata) self._db.modify_book(volumeID, rawVolume['_source'], rawVolume['_version']) return raise NotFoundException( 'Could not found attachment with id {} in volume {}'.format( attachmentID, volumeID))
async def fetch(self, session, url): """Execute call to external target using the proxy server. Receives aiohttp session as well as url to be called. Executes the request and returns either the content of the response as json or raises an exeption depending on response. :param session: The aiohttp ClientSession used to execute the call. :param url: String url ready to be requested. :returns: Request response as dict. :raises RatelimitException: on 429 or 430 HTTP Code. :raises NotFoundException: on 404 HTTP Code. :raises Non200Exception: on any other non 200 HTTP Code. """ try: async with session.get(url, proxy="http://lightshield_proxy_%s:8000" % self.server.lower()) as response: await response.text() if response.status == 429: self.logging.info(429) except aiohttp.ClientConnectionError as err: self.logging.info("Error %s", err) raise Non200Exception() if response.status in [429, 430]: if "Retry-After" in response.headers: delay = max(1, int(response.headers["Retry-After"])) self.retry_after = datetime.now() + timedelta(seconds=delay) raise RatelimitException() if response.status == 404: raise NotFoundException() if response.status != 200: raise Non200Exception() return await response.json(content_type=None)
def getSwitch(self, name): if name in self.config['switches']: switch_config = self.config['switches'][name] switch_class = switch_config['class'] return switch_class else: raise NotFoundException("Switch with name {0} not found".format(name))
def delete_volume(self, volumeID): log.debug("Deleting volume: '{}'".format(volumeID)) try: self._db.delete_book(volumeID) except NotFoundError: raise NotFoundException( "could not found volume with id: '{}'".format(volumeID))
def delete_category(self, category: DeleteCategoryDTO) -> None: # TODO check that category belong to user is_exist = self.category_dao.check_category_exist( category_uuid=category.uuid) if not is_exist: raise NotFoundException(exc_data=AppError.CATEGORY_NOT_FOUND) self.category_dao.delete_category(category=category)
def fetch(self, url): """ Returns the HTML content of the URL. """ response = requests.get(url) if response.status_code == 404: raise NotFoundException('URL not found: %s' % (url), url) else: return response.text
async def load_filter(filter_id: int, aroio: Aroio = Depends(get_auth_aroio)): """Get filter with filter_id""" filters = aroio.configuration.convolver.filters if filter_id not in [f.id for f in filters]: raise NotFoundException(detail=f'Not fount filter with id {filter_id}') for filter in filters: if filter.id == filter_id: return filter
async def handle_update(context, state: State, message: Update, request_id: str) -> None: if not state: raise NotFoundException(request_id) else: for key in message.updates: state.fields[key] = message.updates[key] context.state('state').pack(state) await send_response(context, request_id, 200, state)
def get_object(collection, identifier, doc=False): id = 'ident:%d' % identifier docs = find_objects(collection, id, doc) if len(docs) == 0: raise NotFoundException("Couldn't find object") else: if doc: return docs[0] else: return docs[0].document.get_data()
def get_attachment(self, volumeID, attachmentID): log.debug("Requested attachment '{}' of the volume '{}'".format( attachmentID, volumeID)) rawVolume = self._req_raw_volume(volumeID) for rawAttachment in rawVolume['_source']['_attachments']: if rawAttachment['id'] == attachmentID: return Archivant.normalize_attachment(rawAttachment) raise NotFoundException( "could not found attachment '{}' of the volume '{}'".format( attachmentID, volumeID))
async def handle_subtract_credit(context, state: State, message: SubtractCredit) -> None: if not state: raise NotFoundException("NA") else: if state.balance >= message.amount: state.balance -= message.amount context.state('state').pack(state) else: raise NotEnoughCreditException("NA")
def plot(self, log_files, sort='time', limit=10, nfl_filter='', metric_selected='cc', plot_type='bar'): if not PLOTLIB_INSTALLED: raise PLOTLIBNotInstalled(_('python-matplotlib not installed.')) if len(log_files) == 0: raise NotFoundException(_('no log file found')) try: stats = Stats2(*log_files) stats.sort_stats(sort) stats_dict = stats.stats __, func_list = stats.get_print_list([nfl_filter, limit]) nfls = [] performance = [] names = { 'nc': 'Total Call Count', 'cc': 'Primitive Call Count', 'tt': 'Total Time', 'ct': 'Cumulative Time' } for func in func_list: cc, nc, tt, ct, __ = stats_dict[func] metric = {'cc': cc, 'nc': nc, 'tt': tt, 'ct': ct} nfls.append(func[2]) performance.append(metric[metric_selected]) y_pos = range(len(nfls)) error = [random.random() for __ in y_pos] plt.clf() if plot_type == 'pie': plt.pie(x=performance, explode=None, labels=nfls, autopct='%1.1f%%') else: plt.barh(y_pos, performance, xerr=error, align='center', alpha=0.4) plt.yticks(y_pos, nfls) plt.xlabel(names[metric_selected]) plt.title('Profile Statistics (by %s)' % names[metric_selected]) #plt.gcf().tight_layout(pad=1.2) profile_img = tempfile.TemporaryFile() plt.savefig(profile_img, format='png', dpi=300) profile_img.seek(0) data = profile_img.read() os.close(profile_img) return data, [('content-type', 'image/jpg')] except Exception as ex: raise ProfileException(_('plotting results failed due to %s') % ex)
def get_gaia_results(search_form, target_file, page_size, max_pages, start_page=1, valid_ids=None, offset=None): gaia_ids = list() gaia_count = None distance_to_target_data = dict() note = None try: current_page = start_page n_page_requests = 1 # Iterate over gaia result pages while (len(gaia_ids) < gaia_count or gaia_count == None) and n_page_requests <= max_pages: if not offset: offset = (current_page - 1) * page_size results, count, note = similarity_api_search( target=search_form.cleaned_data['target'], filter=search_form.cleaned_data['descriptors_filter'], num_results=page_size, offset=offset, target_file=target_file, in_ids=valid_ids) gaia_ids += [id[0] for id in results] gaia_count = count if search_form.cleaned_data['target'] or target_file: # Save sound distance to target into so it can be later used in the view class and added to results distance_to_target_data.update(dict(results)) #print 'Gaia page %i (total %i sounds)' % (current_page, gaia_count) current_page += 1 n_page_requests += 1 except SimilarityException as e: if e.status_code == 500: raise ServerErrorException(msg=e.message) elif e.status_code == 400: raise BadRequestException(msg=e.message) elif e.status_code == 404: raise NotFoundException(msg=e.message) else: raise ServerErrorException(msg='Similarity server error: %s' % e.message) except Exception as e: raise ServerErrorException( msg= 'The similarity server could not be reached or some unexpected error occurred.' ) return gaia_ids, gaia_count, distance_to_target_data, note
async def handle_delete_and_transfer_all(context, state: State, message: DeleteAndTransferAll, request_id: str) -> None: if not state: raise NotFoundException(request_id) else: transfer = Transfer(outgoing_id=message.id, incoming_id=message.incoming_id, amount=state.balance) context.pack_and_send_transaction_invocation( "ycsb-example/delete_function", request_id, transfer)
def create_category(self, category: CreateCategoryDTO) -> Category: if not category.parent_uuid or category.parent_uuid == "": self.logger.debug("no parent category - create root category") return self.category_dao.create_root_category(category=category) else: is_exist = self.category_dao.check_category_exist( category_uuid=category.parent_uuid) if not is_exist: raise NotFoundException(exc_data=AppError.CATEGORY_NOT_FOUND) self.logger.debug("paren category is present. create sub category") # TODO check that parent category belong to user return self.category_dao.create_sub_category(category=category)
def download(url, filename): ''' 2. download(url,filename) raises NotFoundException when url returns 404 ''' try: r = requests.get(url) r.raise_for_status() with open(filename, 'wb') as fileWrite: for chunk in r.iter_content(chunk_size=1024): fileWrite.write(chunk) except requests.exceptions.HTTPError as error: raise NotFoundException(str(error))
def __dl_append(self, url): try: r = requests.get(url) r.raise_for_status() name = self.__name_handle(url) self.__file_name_list.append(name + '.txt') with open(name + '.txt', 'ab') as fileAppend: fileAppend.flush() for chunk in r.iter_content(chunk_size=1024): fileAppend.write(chunk) except requests.exceptions.HTTPError as error: raise NotFoundException(str(error))
def do_GET(self): try: if self.path == "/converter": try: req_data = json.loads(self.get_request_data()) logger.info(f"GET {self.path} data: {req_data}") except json.decoder.JSONDecodeError: raise BadRequestException( "Bad Request: provided data is not json") try: data = json.loads(get_currency_data()) except json.decoder.JSONDecodeError: raise ServerException( "Failed to validate currency server responce") try: valute = data["Valute"][req_data["valute"]] except KeyError: raise NotFoundException("Data Not Found") self.json_writer({ "valute": req_data["valute"], "value": req_data["value"], "result": round(req_data["value"] * valute["Value"], 4), }) else: raise NotFoundException("Page Not Found") except ServerException as error: logger.error(f"GET {self.path} error: {error.message}") self.json_error_writer(error.message, error.code) except Exception: logger.error(traceback.format_exc()) self.json_error_writer("Internal server error", 500)
async def delete_filter(filter_id: int, aroio: Aroio = Depends(get_auth_aroio)): """Deletes filter with the given filter_id.""" filter_to_delete = -1 for idx, f in enumerate(aroio.configuration.convolver.filters): if f.id == filter_id: filter_to_delete = idx if filter_to_delete == -1: raise NotFoundException( detail=f'Filter to delete not found. Filter id: {filter_to_delete}' ) aroio.configuration.convolver.filters.pop(filter_to_delete) datasource.save(aroio=aroio)
def find_node(self, id=None, iname=None): if not id and not iname: raise Exception("find_node requires id or iname to locate nodes.") retval = None if id: retval = self.nodes.get(id) elif iname: if iname in self.inames: return self.inames[iname] if not retval: raise NotFoundException("Node not found") return retval
def delete_attachments(self, volumeID, attachmentsID): ''' delete attachments from a volume ''' log.debug("deleting attachments from volume '{}': {}".format( volumeID, attachmentsID)) rawVolume = self._req_raw_volume(volumeID) insID = [a['id'] for a in rawVolume['_source']['_attachments']] # check that all requested file are present for id in attachmentsID: if id not in insID: raise NotFoundException( "could not found attachment '{}' of the volume '{}'". format(id, volumeID)) for index, id in enumerate(attachmentsID): rawVolume['_source']['_attachments'].pop(insID.index(id)) self._db.modify_book(volumeID, rawVolume['_source'], version=rawVolume['_version'])
def find_matching_shortcut(self, controller, action): retval = None for page in self.nodes.values(): if not page.extradata: continue dat = cPickle.loads(str(page.extradata)) tgt = dat.get('target') if not tgt: continue if tgt['controller'] == controller and tgt['action'] == action: retval = page break if not retval: raise NotFoundException("Somehow, I couldn't find the page") return retval
def search(self, title: str, artist: Optional[str] = None) -> List[Song]: """ Search lyrics with title & artist. Parameters ---------- title : str Title of the song to search for. artist : Optional[str] Artist of the song to search for. Returns ------- Lyrics Lyrics dataclass of the song searched. """ count = self._get_resemble_count(title=title, artist=artist) if count == 0: raise NotFoundException("Can't find any lyric.") return self._get_resemble(title=title, artist=artist)
def route_not_found_exception(): raise NotFoundException()
async def handle_add_credit(context, state: State, message: AddCredit) -> None: if not state: raise NotFoundException("NA") else: state.balance += message.amount context.state('state').pack(state)
async def handle_delete(context, state: State, message: Delete) -> None: if not state: raise NotFoundException("NA") else: del context['state']