def load_images(blender_output_file_name: str, result_file: str, subtask_id: str) -> Tuple[ndarray, ndarray]: # Read both files with OpenCV. cv2 = import_cv2() # type: ignore try: image_1 = cv2.imread( # pylint: disable=no-member generate_verifier_storage_file_path(blender_output_file_name)) image_2 = cv2.imread( # pylint: disable=no-member result_file) except MemoryError as exception: log( logger, f'Loading result files into memory exceeded available memory and failed with: {exception}', subtask_id=subtask_id, ) raise VerificationError( str(exception), ErrorCode.VERIFIER_LOADING_FILES_INTO_MEMORY_FAILED, subtask_id, ) # If loading fails because of wrong path, cv2.imread does not raise any error but returns None. if image_1 is None or image_2 is None: log( logger, f'Loading files using OpenCV fails.', subtask_id=subtask_id, ) raise VerificationError( 'Loading files using OpenCV fails.', ErrorCode.VERIFIER_LOADING_FILES_WITH_OPENCV_FAILED, subtask_id, ) return (image_1, image_2)
def wrapper( request: HttpRequest, client_message: message.Message, client_public_key: bytes, *args: list, **kwargs: dict, ) -> Union[HttpResponse, JsonResponse]: if not is_given_golem_messages_version_supported_by_concent( request=request): log( logger, f'Wrong version of golem messages. Clients version is {request.META["HTTP_X_Golem_Messages"]}, ' f'Concent version is {settings.GOLEM_MESSAGES_VERSION}.', client_public_key=client_public_key, ) serialized_message = dump( message.concents.ServiceRefused( reason=message.concents.ServiceRefused.REASON. UnsupportedProtocolVersion, ), settings.CONCENT_PRIVATE_KEY, client_public_key, ) return HttpResponse(serialized_message, content_type='application/octet-stream') return view(request, client_message, client_public_key, *args, *kwargs)
def wrapper(request: HttpRequest, golem_message: message.Message, client_public_key: bytes) -> HttpResponse: json_message_to_log = get_json_from_message_without_redundant_fields_for_logging( golem_message) log(logger, str(json_message_to_log)) response_from_view = view(request, golem_message, client_public_key) return response_from_view
def download_archives_from_storage( file_transfer_token: message.concents.FileTransferToken, subtask_id: str, package_paths_to_downloaded_file_names: Dict[str, str], ) -> None: # Remove any files from VERIFIER_STORAGE_PATH. clean_directory(settings.VERIFIER_STORAGE_PATH, subtask_id) # Download all the files listed in the message from the storage server to local storage. for file_path, download_file_name in package_paths_to_downloaded_file_names.items( ): try: file_transfer_token.sig = None cluster_response = send_request_to_storage_cluster( prepare_storage_request_headers(file_transfer_token), settings.STORAGE_SERVER_INTERNAL_ADDRESS + CLUSTER_DOWNLOAD_PATH + file_path, method='get', ) path_to_store = os.path.join(settings.VERIFIER_STORAGE_PATH, download_file_name) store_file_from_response_in_chunks(cluster_response, path_to_store) except Exception as exception: log( logger, f'blender_verification_order for SUBTASK_ID {subtask_id} failed with error {exception}.' f'ErrorCode: {ErrorCode.VERIFIER_FILE_DOWNLOAD_FAILED.name}') raise VerificationError( str(exception), ErrorCode.VERIFIER_FILE_DOWNLOAD_FAILED, subtask_id=subtask_id, )
def is_golem_message_signed_with_key( public_key: bytes, golem_message: message.base.Message, ) -> bool: """ Validates if given Golem message is signed with given public key. :param golem_message: Instance of golem_messages.base.Message object. :param public_key: Client public key in bytes. :return: True if given Golem message is signed with given public key, otherwise False. """ assert isinstance(golem_message, message.base.Message) validate_bytes_public_key(public_key, 'public_key') try: is_valid = golem_message.verify_signature(public_key) except MessageError as exception: is_valid = False log(logger, f'There was an exception when validating if golem_message {golem_message.__class__.__name__} is signed ' f'with public key. Exception: {exception}.', client_public_key=public_key, logging_level=LoggingLevel.INFO) return is_valid
async def _decorated(req: Request) -> Response: registry = req.app["registry"] ldap_server = registry.server.uri attempt = max_attempts sleep_for = 1 while attempt: try: return await handler(req) except CannotConnect: attempt -= 1 if not attempt: # Bad gateway raise HTTPError( 502, f"Cannot establish a connection with LDAP server at {ldap_server}" ) # Otherwise, sleep for a bit, then try reconnecting log( f"Will attempt to reconnect to LDAP server at {ldap_server} in {sleep_for} seconds...", Level.Debug) await asyncio.sleep(sleep_for) ldap = Server(ldap_server) registry.server = ldap # Exponential back-off sleep_for *= 2
def safeMakeDirs(dirpath, **options): '''make directories recursively for given path, don't throw exception if directory exists but if file exists''' if not os.path.isdir(dirpath): logging.log('Making directory: "%s"' % dirpath) try: os.makedirs(dirpath, **options) except: logging.alert('Cannot make directory: "%s"' % dirpath)
def server(self, server:ldap.Server) -> None: """ Reattach an LDAP server to every node, in the event of connection problems and forcibly expire the registry """ with self._reattach_lock: log(f"Reattaching all nodes to {server.uri}", Level.Debug) self._server = server for node in self._registry: self._registry[node].reattach_server(server)
def wrapper(*args: Any, **kwargs: Any) -> None: try: return task(*args, **kwargs) except Exception as exception: log( crash_logger, f'Exception occurred while executing task {task.__name__}: {exception}, Traceback: {traceback.format_exc()}', subtask_id=kwargs['subtask_id'] if 'subtask_id' in kwargs else None, logging_level=LoggingLevel.ERROR) raise
def wrapper(middleman_message: GolemMessageFrame) -> bytes: connection_start = time() retry_counter = 0 while time() - connection_start < settings.SCI_CALLBACK_RETRIES_TIME: try: return send_request_to_middleman(middleman_message) except (SCICallbackTimeoutError, SystemExit): log(logger, f"Concent didn't get any response from middleman. Retrying. Retry amount: {retry_counter}.") retry_counter += 1 log(logger, f'error: Concent failed to get response from middleman, in {retry_counter} retries.') raise SCICallbackTimeoutError()
def wrapper( request: HttpRequest, *args: list, **kwargs: dict, ) -> Union[HttpResponse, JsonResponse]: if not is_given_golem_messages_version_supported_by_concent(request=request): error_message = f"Unsupported protocol version. Client's version is {request.META['HTTP_X_GOLEM_MESSAGES']}, " \ f"Concent's version is {settings.GOLEM_MESSAGES_VERSION}." log(logger, error_message) return HttpResponse(error_message, status=404) return view(request, *args, *kwargs)
def update_subtask_state(subtask: Subtask, state: str, next_deadline: Union[int, float, None] = None) -> None: old_state = subtask.state subtask.state = state subtask.next_deadline = None if next_deadline is None else parse_timestamp_to_utc_datetime(next_deadline) subtask.full_clean() subtask.save() log( logger, f'Subtask changed its state from {old_state} to {subtask.state}', subtask_id=subtask.subtask_id )
def delete_file(file_path: str, subtask_id: str) -> None: file_path = os.path.join(settings.VERIFIER_STORAGE_PATH, file_path) try: if os.path.isfile(file_path): os.unlink(file_path) except OSError as exception: log( logger, f'File with path {file_path} was not deleted, exception: {exception}', subtask_id=subtask_id, logging_level=LoggingLevel.WARNING, )
def ensure_enough_result_files_provided(frames: List[int], result_files_list: List[str], subtask_id: str) -> None: if len(frames) > len(result_files_list): raise VerificationMismatch(subtask_id=subtask_id) elif len(frames) < len(result_files_list): log( logger, f'There is more result files than frames to render', subtask_id=subtask_id, logging_level=LoggingLevel.WARNING, )
def check_compatibility(subtask: Subtask, client_public_key: bytes) -> None: if not is_protocol_version_compatible(subtask.task_to_compute.protocol_version): error_message = f'Unsupported version of golem messages in stored messages. ' \ f'Version stored in database is {subtask.task_to_compute.protocol_version}, ' \ f'Concent version is {settings.MAJOR_MINOR_GOLEM_MESSAGES_VERSION}.' log( logger, error_message, subtask_id=subtask.subtask_id, client_public_key=client_public_key, ) raise UnsupportedProtocolVersion( error_message=error_message, error_code=ErrorCode.UNSUPPORTED_PROTOCOL_VERSION)
def clean_directory(directory_path: str, subtask_id: Optional[str] = None) -> None: """ Removes all files from given directory path. """ for file in os.listdir(directory_path): file_path = os.path.join(directory_path, file) try: if os.path.isfile(file_path): os.unlink(file_path) except OSError as exception: log( logger, f'File {file} in directory {directory_path} was not deleted, exception: {exception}', subtask_id=subtask_id, logging_level=LoggingLevel.WARNING, )
def try_to_upload_blender_output_file(blender_output_file_name: str, output_format: str, subtask_id: str, frame_number: int) -> None: upload_file_path = generate_upload_file_path(subtask_id, output_format, frame_number) # Read Blender output file. try: with open( generate_verifier_storage_file_path(blender_output_file_name), 'rb') as upload_file: upload_file_content = upload_file.read() # type: bytes upload_file_checksum = 'sha1:' + hashlib.sha1( upload_file_content).hexdigest() # Generate a FileTransferToken valid for an upload of the image generated by blender. upload_file_transfer_token = create_file_transfer_token_for_concent( subtask_id=subtask_id, result_package_path=upload_file_path, result_size=len(upload_file_content), result_package_hash=upload_file_checksum, operation=message.concents.FileTransferToken.Operation.upload, ) # Upload the image. upload_file_to_storage_cluster( upload_file_content, upload_file_path, upload_file_transfer_token, settings.CONCENT_PRIVATE_KEY, settings.CONCENT_PUBLIC_KEY, settings.CONCENT_PUBLIC_KEY, settings.STORAGE_SERVER_INTERNAL_ADDRESS, ) except OSError as exception: log(crash_logger, str(exception), subtask_id=subtask_id, logging_level=LoggingLevel.ERROR) except MemoryError as exception: log(logger, f'Loading result files into memory failed with: {exception}', subtask_id=subtask_id, logging_level=LoggingLevel.ERROR) raise VerificationError( str(exception), ErrorCode.VERIFIER_LOADING_FILES_INTO_MEMORY_FAILED, subtask_id, )
def unpack_archives(file_paths: Iterable[str], subtask_id: str) -> None: # Verifier unpacks the archive with project source. for archive_file_path in file_paths: try: unpack_archive(os.path.basename(archive_file_path)) except zipfile.BadZipFile as exception: log( logger, f'Verifier failed to unpack the archive with project source with error {exception} ' f'SUBTASK_ID {subtask_id}. ' f'ErrorCode: {ErrorCode.VERIFIER_UNPACKING_ARCHIVE_FAILED.name}' ) raise VerificationError( str(exception), ErrorCode.VERIFIER_UNPACKING_ARCHIVE_FAILED, subtask_id, )