def __on_process_error(self, error): Logs.warning('Error in nanobabel process:') Logs.warning(error)
def _on_complex_updated(cls, index, new_complex): callbacks = _PluginInstance.__complex_updated_callbacks try: callbacks[index](new_complex) except KeyError: Logs.warning('Received an unknown updated complex index:', index)
def on_advanced_settings(self): """ | Called when user presses "Advanced Settings" """ Logs.warning('Callback on_advanced_settings not defined. Ignoring')
def _parse_lines(lines): lines = [line.rstrip() for line in lines] content = Content() chain_idx = 0 chain_offset = 0 model_number = 0 model_done = False line_counter = 0 total_lines = len(lines) while (line_counter < total_lines): line = lines[line_counter] try: record_type = record_chunk_string(line, 1, 6) if record_type == "MODEL": chain_idx = 0 chain_offset = 0 model_number += 1 model_done = False if not model_done: rec = None if record_type == "TER": ter = record_ter(line, line_counter) content.records.append(ter) content.ters.append(ter) chain_idx += 1 chain_offset = 0 if record_type == "ATOM": rec = record_atom(line, line_counter) if record_type == "HETATM": rec = record_het_atom(line, line_counter) if not rec == None: if len(rec.chain_identifier) <= 0: if rec.residue_serial_number < chain_offset: chain_idx += 1 chain_offset = 0 elif rec.residue_serial_number >= chain_offset: chain_offset = rec.residue_serial_number rec.chain_identifier = str(chain_idx) model_number = max(model_number, 1) rec.model_number = model_number if (not rec.residue_name == "SOL"): content.records.append(rec) content.atoms.append(rec) if (record_type == "COMPND"): try: rec = record_compnd(line, line_counter) content.records.append(rec) content.compnds.append(rec) except: Logs.warning("Error parsing COMPND:", traceback.format_exc()) if (record_type == "REMARK"): rec = record_remark(line, line_counter) content.records.append(rec) if (rec.num in content._remarks): content._remarks[ rec.num] = content._remarks[rec.num] + "\n" + rec.text else: content._remarks[rec.num] = rec.text if record_type == "CRYST": line = "REMARK " + line rec = record_cryst(line, line_counter) content.records.append(rec) content.crysts.append(rec) if record_type == "ORIGX": line = "REMARK " + line rec = record_origx(line, line_counter) content.records.append(rec) content.origxs.append(rec) if record_type == "SCALE": line = "REMARK " + line rec = record_scale(line, line_counter) content.records.append(rec) content.scales.append(rec) if record_type == "MTRIX": rec = record_mtrix(line, line_counter) content.records.append(rec) content.mtrixs.append(rec) if record_type == "HELIX": rec = record_helix(line, line_counter) content.records.append(rec) content.helixs.append(rec) if record_type == "SHEET": rec = record_sheet(line, line_counter) content.records.append(rec) content.sheets.append(rec) if record_type == "ENDMDL": model_done = True content.raw.append(line) except: print("LINE: " + str(line_counter)) print("PDB Parsing error") raise line_counter += 1 content.model_count = model_number return content
def menu(self): if not self.__set_first: self.__set_first = True Logs.warning("The default menu (self.menu) is now deprecated and will be removed in a future version. Please use the ui.Menu() constructor to create the menu.") return self.__menu
def on_run(self): """ | Called when user presses "Run" """ Logs.warning('Callback on_run not defined. Ignoring')
def __on_error(self, msg): Logs.warning("[DSSP]", msg)
def __on_error(self, msg): if not "molecule converted" in msg: Logs.warning("[Bond Generation]", msg)
def do_POST(self): try: parsed_url = urlparse(self.path) path = parsed_url.path path = urllib.parse.unquote(path) content_len = int(self.headers.get('Content-Length')) data = self.rfile.read(content_len) except: Logs.warning("Error trying to parse request:\n", traceback.format_exc()) self._send_json_error(400, "Parsing problem") return if not path.startswith('/files'): self._send_json_error(403, "Forbidden") return folder = os.path.join(FILES_DIR, path[7:]) # no files provided, create folders if not content_len: if os.path.exists(folder): self._send_json_error(400, "Name already exists") else: os.makedirs(folder) self._send_json_success() return data_manager = DataManager() data_manager.data = data data_manager.find_boundary() done = False while not done: RequestHandler.read_header(data_manager) RequestHandler.read_data(data_manager) done = RequestHandler.check_EOF(data_manager) file_name = data_manager.file_name file_body = data_manager.body if file_name == "": continue # If file is not supported if not WebLoaderServer.file_filter(file_name): self._send_json_error(400, file_name + " format not supported") return subfolder = os.path.join(folder, os.path.dirname(file_name)) if not os.path.exists(subfolder): os.makedirs(subfolder) file_path = os.path.join(folder, file_name) # rename on duplicates: file.txt -> file (n).txt reg = r'(.+/)([^/]+?)(?: \((\d+)\))?(\.\w+)' (path, name, copy, ext) = re.search(reg, file_path).groups() copy = 1 if copy is None else int(copy) while os.path.isfile(file_path): copy += 1 file_path = '%s%s (%d)%s' % (path, name, copy, ext) # Create file with open(file_path, "wb") as f: f.write(file_body) self._send_json_success()
def _write(self, message): try: self.wfile.write(message) except: Logs.warning("Connection reset while responding", self.client_address)
async def start_process(self, workspace, ff, steps, steepest): if sum(1 for _ in workspace.complexes) == 0: Logs.message('No structures to minimize') return input_file = tempfile.NamedTemporaryFile(delete=False, suffix='.sdf', dir=self.temp_dir.name) constraints_file = tempfile.NamedTemporaryFile(delete=False, suffix='.txt', dir=self.temp_dir.name) output_file = tempfile.NamedTemporaryFile(delete=False, suffix='.pdb', dir=self.temp_dir.name) self.__output_lines = [] self.__updates_done = {} self.__packet_id = 0 (saved_atoms, indices) = self.__save__atoms(input_file.name, workspace) Logs.debug("Wrote input file:", input_file.name) self.__save__constraints(constraints_file.name, saved_atoms) Logs.debug("Wrote constraints file:", constraints_file.name) self.__stream, error = await self.__plugin.create_writing_stream( indices, StreamType.position) if error == StreamCreationError.AtomNotFound: # User deleted atom in time between start_process() and create_writing_stream(). # so lets update the workspace and try again Logs.warning( f"User deleted atoms while setting up process, retrying") updated_workspace = await self.__plugin.request_workspace() await self.start_process(updated_workspace, ff, steps, steepest) return elif error != StreamCreationError.NoError: Logs.error(f"Error while creating stream: {error}") return self.__data_queue = deque() cwd_path = self.__nanobabel_dir exe = 'nanobabel.exe' if IS_WIN else 'nanobabel' exe_path = os.path.join(cwd_path, exe) args = [ 'minimize', '-h', '-l', '20', '-n', str(steps), '-ff', ff, '-i', input_file.name, '-cx', constraints_file.name, '-o', output_file.name ] if IS_WIN: args += ['-dd', 'data'] if steepest: args.append('-sd') Logs.debug(args) p = Process(exe_path, args, True) p.on_error = self.__on_process_error p.on_output = self.__on_process_output p.on_done = self.__on_process_done self.calculation_start_time = time.time() log_data = { 'exe_path': exe_path, 'force_field': ff, 'steps': steps, 'steepest': steepest } Logs.message("Starting Minimization Process", extra=log_data) p.start() self.__process = p self.__process_running = True self.is_running = True