def existing_artifact(self, filename, move=False, target_filename=None): """ Add existing artifact, it will be collected into artifact_dir. If move=True, the original file will be deleted :type filename: str :type move: bool :type target_filename: str """ self.log.debug("Add existing artifact (move=%s): %s", move, filename) if self.artifacts_dir is None: self.log.warning("Artifacts dir has not been set, will not copy %s", filename) return new_filename = os.path.basename(filename) if target_filename is None else target_filename new_name = os.path.join(self.artifacts_dir, new_filename) self.__artifacts.append(new_name) if get_full_path(filename) == get_full_path(new_name): self.log.debug("No need to copy %s", filename) return if not os.path.exists(filename): self.log.warning("Artifact file not exists: %s", filename) return if move: self.log.debug("Moving %s to %s", filename, new_name) shutil.move(filename, new_name) else: self.log.debug("Copying %s to %s", filename, new_name) shutil.copy(filename, new_name)
def validate_args(arguments: dict) -> dict: """ Check if the manifest file exists, and if a target directory is specified check if it is valid, or if no target directory is specified, use the parent dir of the manifest file :param arguments: The arguments :return: The updated and checked arguments """ manifest_file = get_full_path(arguments["manifest"]) if not is_valid_path(str(manifest_file)): sys.exit( "The path specified for the manifest file is invalid\n" "Please verify that it exists and/or that you have the right permissions" ) else: print("The path specified for the manifest file is valid") if arguments["directory"] is not None: target_dir = get_full_path(arguments["directory"]) else: print("Download directory not specified, using manifest parent directory") target_dir = manifest_file.parent arguments["manifest"] = manifest_file arguments["directory"] = target_dir arguments["mods_folder"] = target_dir.joinpath("mods") if not is_valid_path(str(arguments["mods_folder"])): print("Creating folder to store mods in") arguments["mods_folder"].mkdir(parents=True) return arguments
def move(self, from_path, to_path): """ Move a file or folder to a new path inside the provider's root. Return false if the moved file didn't exist """ full_from_path = get_full_path(self.root, from_path) full_to_path = get_full_path(self.root, to_path) from_base, from_item_name = os.path.split(full_from_path) to_base, to_item_name = os.path.split(full_to_path) from_item = self.box_item.get_by_path(full_from_path, force_no_cache = True) if from_item.not_exists(): return False from_item_id = from_item.get_id() from_item_is_folder = from_item.is_folder() to_item = self.box_item.get_by_path(full_to_path, force_no_cache = True) if to_item.not_exists(): to_item = self.box_item.get_by_path(to_base, force_no_cache = True) destination_folder = self.client.folder(to_item.get_id()) if from_item_is_folder: source = self.client.folder(from_item_id) else: source = self.client.file(from_item_id) if from_item_name == to_item_name: source.move(destination_folder) else: source.rename(to_item_name) return True
def __init__(self): TG_TOKEN = json.loads( open(get_full_path('telegram_wrapper/workdata/credentials.json')). read())['tg_bot_token'] log_filename = get_full_path('telegram_wrapper/workdata/log.txt') feedback_filename = get_full_path( 'telegram_wrapper/workdata/feedback.txt') TelegramWrapper.__init__(self, TG_TOKEN=TG_TOKEN, log_filename=log_filename, feedback_filename=feedback_filename)
def on_rename_buf(self, data): new = utils.get_full_path(data['path']) old = utils.get_full_path(data['old_path']) new_dir = os.path.dirname(new) if new_dir: utils.mkdir(new_dir) view = self.get_view(data['id']) self.FLOO_BUFS[data['id']]['path'] = data['path'] if view: view.rename(new) else: os.rename(old, new)
def create_artifacts_dir(self, existing_artifacts=(), merged_config=None): """ Create directory for artifacts, directory name based on datetime.now() """ if not self.artifacts_dir: artifacts_dir = self.config.get(SETTINGS, force_set=True).get("artifacts-dir", self.ARTIFACTS_DIR) self.artifacts_dir = datetime.datetime.now().strftime(artifacts_dir) self.artifacts_dir = get_full_path(self.artifacts_dir) self.log.info("Artifacts dir: %s", self.artifacts_dir) self.env.set({TAURUS_ARTIFACTS_DIR: self.artifacts_dir}) os.environ[TAURUS_ARTIFACTS_DIR] = self.artifacts_dir if not os.path.isdir(self.artifacts_dir): os.makedirs(self.artifacts_dir) # dump current effective configuration dump = self.create_artifact("effective", "") # TODO: not good since this file not exists self.config.set_dump_file(dump) self.config.dump() # dump merged configuration if merged_config: merged_config.dump(self.create_artifact("merged", ".yml"), Configuration.YAML) merged_config.dump(self.create_artifact("merged", ".json"), Configuration.JSON) for artifact in existing_artifacts: self.existing_artifact(artifact)
def delete_buf(self, path): """deletes a path""" if not path: return path = utils.get_full_path(path) if not self.is_shared(path): msg.error("Skipping deleting %s because it is not in shared path %s." % (path, G.PROJECT_PATH)) return if os.path.isdir(path): for dirpath, dirnames, filenames in os.walk(path): # Don't care about hidden stuff dirnames[:] = [d for d in dirnames if d[0] != "."] for f in filenames: f_path = os.path.join(dirpath, f) if f[0] == ".": msg.log("Not deleting buf for hidden file %s" % f_path) else: self.delete_buf(f_path) return buf_to_delete = None rel_path = utils.to_rel_path(path) for buf_id, buf in self.FLOO_BUFS.items(): if rel_path == buf["path"]: buf_to_delete = buf break if buf_to_delete is None: msg.error("%s is not in this room" % path) return msg.log("deleting buffer ", rel_path) event = {"name": "delete_buf", "id": buf_to_delete["id"]} self.agent.put(event)
def set_text(self, text): self.emacs_buf = text emacs.put('get_buf', { 'id': self.buf['id'], 'full_path': utils.get_full_path(self.buf['path']), 'buf': text, })
def on_rename_buf(self, data): buf = self.FLOO_BUFS[int(data['id'])] # This can screw up if someone else renames the buffer around the same time as us. Oh well. buf = self.get_buf_by_path(utils.get_full_path(data['path'])) if not buf: return super(Protocol, self).on_rename_buf(data) msg.debug('We already renamed %s. Skipping' % buf['path'])
def test_draw_contour(self): """Test is contour is being drawn accordingly to flags_handles. """ # setup # Input from camera. cv2.namedWindow('test_draw_contour') test_path = utils.get_full_path( 'docs/material_for_testing/back_ground_removed_frame.jpg') test_image = cv2.imread(test_path) # Because image loaded from local, and not received from web-cam, a flip is needed. test_image = cv2.flip(test_image, 1) expected = test_image.copy() flags_handler = FlagsHandler() # Set flags_handler in order to perform the test. flags_handler.lifted = True flags_handler.calibrated = True detector = Detector(flags_handler) # run while flags_handler.quit_flag is False: """ Inside loop, update self._threshold according to flags_handler, Pressing 'c': in order to toggle control (suppose to change contour's color between green and red) Pressing 'l': to raise 'land' flag in flags_handler, in order to be able to break loop (with esc) Pressing esc: break loop. """ detector.input_frame_for_feature_extraction = test_image cv2.imshow('test_draw_contour', detector.input_frame_for_feature_extraction) flags_handler.keyboard_input = cv2.waitKey(1) # teardown cv2.destroyAllWindows()
def on_create_buf(self, data): super(Protocol, self).on_create_buf(data) emacs.put('create_buf', { 'full_path': utils.get_full_path(data['path']), 'path': data['path'], 'username': data.get('username', ''), })
def build_docker_image(): logger.info('Building full freqtrade docker image...') client = docker.from_env() client.images.build(path=get_full_path(['']), dockerfile=r'./Dockerfile', tag='freqtradefull:latest', rm=True)
def __init__(self, buf, emacs_buf=None): self.buf = buf self._emacs_buf = emacs_buf if emacs_buf is None: emacs.put('create_view', { 'full_path': utils.get_full_path(buf['path']), 'id': buf['id'], })
def on_delete_buf(self, data): # TODO: somehow tell the user about this. maybe delete on disk too? del self.FLOO_BUFS[data["id"]] path = utils.get_full_path(data["path"]) if not G.DELETE_LOCAL_FILES: msg.log("Not deleting %s because delete_local_files is disabled" % path) return utils.rm(path) msg.warn("deleted %s because %s told me to." % (path, data.get("username", "the internet")))
def __init__(self): self.logger = logging.getLogger('face_processor_handler') self.logger.setLevel(logging.INFO) self._face_detector = cv2.CascadeClassifier( utils.get_full_path( 'hallopy/config/haarcascade_frontalface_default.xml')) self._face_padding_x = 20 self._face_padding_y = 60 self._preprocessed_input_frame = None
def run_nrpspks_specific_hmmer(seq_record, withinclustergenes, pksnrpsvars): # lol this is a true multiFASTA protein file #nrpspksfasta = utils.get_specific_multifasta(seq_record, withinclustergenes) gene_id = "gene" fasta_seq = str(seq_record.seq.translate(to_stop=True)) nrpspksfasta = ">%s\n%s" % (gene_id, fasta_seq) #antiSMASH actually checks for abMotifs here but remove for now, since Ive no idea what it does :P #Analyse for C/A/PCP/E/KS/AT/ATd/DH/KR/ER/ACP/TE/TD/COM/Docking/MT/CAL domains # note from HMMER3 documentation: "TC thresholds are # generally considered to be the score of the lowest-scoring known true positive that # is above all known false positives." logging.getLogger('user_visible').info("Scanning for NRP domains using HMMER3") nrpspksdomain_opts = ["--cut_tc"] nrpspksdomain_results = utils.run_hmmscan(utils.get_full_path(__file__, "nrpspksdomains.hmm"), nrpspksfasta, nrpspksdomain_opts) hmmlengthsdict = utils.hmmlengths(utils.get_full_path(__file__, "nrpspksdomains.hmm")) pksnrpsvars.domaindict = parse_hmmscan_results(nrpspksdomain_results, hmmlengthsdict) pksnrpsvars.domaindict2 = pksnrpsvars.domaindict filter_nonterminal_docking_domains(seq_record, pksnrpsvars)
def on_delete_buf(self, data): # TODO: somehow tell the user about this. maybe delete on disk too? del self.FLOO_BUFS[data['id']] path = utils.get_full_path(data['path']) if not G.DELETE_LOCAL_FILES: msg.log('Not deleting %s because delete_local_files is disabled' % path) return utils.rm(path) msg.warn('deleted %s because %s told me to.' % (path, data.get('username', 'the internet')))
def write(self, path, stream): """ Write the stream to the object denoted by path into the stream """ full_path = get_full_path(self.root, path) item = self.box_item.create_path(full_path, force_no_cache = True) if item.is_folder(): item.write_stream(stream) else: raise Exception('Not a file name')
def stat(self, path): """ Get the info about the object at the given path inside the provider's root, or None if the object doesn't exist """ full_path = get_full_path(self.root, path) box_item = self.box_item.get_by_path(full_path) if box_item.not_exists(): return None return box_item.get_stat()
def detect_faces(img): """Function for detecting faces. :returns faces: array with detected faces coordination's. """ face_detector = cv2.CascadeClassifier( utils.get_full_path( 'hallopy/config/haarcascade_frontalface_default.xml')) gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) return face_detector.detectMultiScale(gray, 1.3, 5)
def get_view(self, buf_id): """Warning: side effects!""" view = self.views.get(buf_id) if view: return view buf = self.FLOO_BUFS[buf_id] full_path = utils.get_full_path(buf['path']) emacs_buf = self.emacs_bufs.get(full_path) if emacs_buf: view = self.create_view(buf, emacs_buf) return view
def on_highlight(self, data): super(Protocol, self).on_highlight(data) buf = self.FLOO_BUFS[data['id']] # TODO: save highlights for when user opens the buffer in emacs emacs.put('highlight', { 'id': buf['id'], 'full_path': utils.get_full_path(buf['path']), 'ranges': data['ranges'], 'user_id': data['user_id'], 'username': data.get('username', 'unknown user'), })
def delete_recursive(self, path): """ Delete recursively from path. Return the number of deleted files (optional) """ full_path = get_full_path(self.root, path) item = self.box_item.get_by_path(full_path, force_no_cache = True) if item.not_exists(): return 0 else: ret = item.delete() self.box_item.cache.reset() return ret
def browse(self, path): """ List the file or directory at the given path, and its children (if directory) """ normalized_path = get_normalized_path(path) full_path = get_full_path(self.root, path) item = self.box_item.get_by_path(get_rel_path(full_path)) if item.not_exists(): return {'fullPath' : normalized_path, 'exists' : False} if item.is_folder(): return {'fullPath' : normalized_path, 'exists' : True, 'directory' : True, 'children' : item.get_children(normalized_path), 'lastModified' : item.get_last_modified()} else: return item.get_as_browse()
def read(self, path, stream, limit): full_path = get_full_path(self.root, path) byte_range = None if limit is not None and limit is not "-1": int_limit = int(limit) if int_limit > 0: byte_range = (0, int(limit) - 1) item = self.box_item.get_by_path(full_path) if item.not_exists(): raise Exception('Path doesn t exist') shutil.copyfileobj(item.get_stream(byte_range), stream)
def run_nrpspredictor(seq_record, nrpsnames, nrpsseqs, options): #NRPSPredictor: extract AMP-binding + 120 residues N-terminal of this domain, extract 8 Angstrom residues and insert this into NRPSPredictor logging.getLogger('user_visible').info( "Predicting NRPS A domain substrate specificities by NRPSPredictor") with TemporaryDirectory(change=True): nrpsseqs_file = "nrpsseqs.fasta" NRPSPredictor2_dir = utils.get_full_path(__file__, "NRPSPredictor2") utils.writefasta(nrpsnames, nrpsseqs, nrpsseqs_file) #Get NRPSPredictor2 code predictions, output sig file for input for NRPSPredictor2 SVMs nrpscodepred.run_nrpscodepred(options) #Run NRPSPredictor2 SVM datadir = path.join(NRPSPredictor2_dir, 'data') libdir = path.join(NRPSPredictor2_dir, 'lib') jarfile = path.join(NRPSPredictor2_dir, 'build', 'NRPSpredictor2.jar') classpath = [ jarfile, '%s/java-getopt-1.0.13.jar' % libdir, '%s/Utilities.jar' % libdir, '%s/libsvm.jar' % libdir ] if sys.platform == ("linux2") or sys.platform == ("darwin"): java_separator = ":" elif sys.platform == ("win32"): java_separator = ";" commands = [ 'java', '-Ddatadir=%s' % datadir, '-cp', java_separator.join(classpath), 'org.roettig.NRPSpredictor2.NRPSpredictor2', '-i', 'input.sig', '-r', path.join( options.raw_predictions_outputfolder, "ctg" + str(options.record_idx) + '_nrpspredictor2_svm.txt'), '-s', '1', '-b', options.eukaryotic and '1' or '0' ] out, err, retcode = utils.execute(commands) if err != '': logging.debug('running nrpspredictor2 gave error %r' % err) #Copy NRPSPredictor results and move back to original directory try: os.remove( path.join( options.raw_predictions_outputfolder, "ctg" + str(options.record_idx) + "_nrpspredictor2_codes.txt")) except: pass shutil.move( "ctg" + str(options.record_idx) + "_nrpspredictor2_codes.txt", options.raw_predictions_outputfolder)
def _load_base_configs(self): base_configs = [os.path.join(get_full_path(__file__, step_up=1), 'resources', 'base-config.yml')] machine_dir = get_configs_dir() # can't refactor machine_dir out - see setup.py if os.path.isdir(machine_dir): self.log.debug("Reading extension configs from: %s", machine_dir) for cfile in sorted(os.listdir(machine_dir)): fname = os.path.join(machine_dir, cfile) if os.path.isfile(fname): base_configs.append(fname) else: self.log.debug("No machine configs dir: %s", machine_dir) self.log.debug("Base configs list: %s", base_configs) self.config.load(base_configs)
def on_delete_buf(self, data): buf_id = int(data['id']) buf = self.FLOO_BUFS[buf_id] path = buf['path'] try: super(Protocol, self).on_delete_buf(data) except Exception as e: msg.debug('Unable to delete buf %s: %s' % (path, str(e))) else: emacs.put('delete_buf', { 'full_path': utils.get_full_path(path), 'path': path, 'username': data.get('username', ''), })
def run_nrpspks_specific_hmmer(seq_record, withinclustergenes, pksnrpsvars): # lol this is a true multiFASTA protein file #nrpspksfasta = utils.get_specific_multifasta(seq_record, withinclustergenes) gene_id = "gene" fasta_seq = str(seq_record.seq.translate(to_stop=True)) nrpspksfasta = ">%s\n%s" % (gene_id, fasta_seq) #antiSMASH actually checks for abMotifs here but remove for now, since Ive no idea what it does :P #Analyse for C/A/PCP/E/KS/AT/ATd/DH/KR/ER/ACP/TE/TD/COM/Docking/MT/CAL domains # note from HMMER3 documentation: "TC thresholds are # generally considered to be the score of the lowest-scoring known true positive that # is above all known false positives." logging.getLogger('user_visible').info( "Scanning for NRP domains using HMMER3") nrpspksdomain_opts = ["--cut_tc"] nrpspksdomain_results = utils.run_hmmscan( utils.get_full_path(__file__, "nrpspksdomains.hmm"), nrpspksfasta, nrpspksdomain_opts) hmmlengthsdict = utils.hmmlengths( utils.get_full_path(__file__, "nrpspksdomains.hmm")) pksnrpsvars.domaindict = parse_hmmscan_results(nrpspksdomain_results, hmmlengthsdict) pksnrpsvars.domaindict2 = pksnrpsvars.domaindict filter_nonterminal_docking_domains(seq_record, pksnrpsvars)
def on_room_info(self, data): # Success! Reset counter self.room_info = data self.perms = data["perms"] if "patch" not in data["perms"]: msg.log("We don't have patch permission. Setting buffers to read-only") utils.mkdir(G.PROJECT_PATH) floo_json = { "url": utils.to_room_url( { "host": self.agent.host, "owner": self.agent.owner, "port": self.agent.port, "room": self.agent.room, "secure": self.agent.secure, } ) } with open(os.path.join(G.PROJECT_PATH, ".floo"), "w") as floo_fd: floo_fd.write(json.dumps(floo_json, indent=4, sort_keys=True)) for buf_id, buf in data["bufs"].iteritems(): buf_id = int(buf_id) # json keys must be strings buf_path = utils.get_full_path(buf["path"]) new_dir = os.path.dirname(buf_path) utils.mkdir(new_dir) self.FLOO_BUFS[buf_id] = buf try: buf_fd = open(buf_path, "r") buf_buf = buf_fd.read().decode("utf-8") md5 = hashlib.md5(buf_buf.encode("utf-8")).hexdigest() if md5 == buf["md5"]: msg.debug("md5 sums match. not getting buffer") buf["buf"] = buf_buf else: raise Exception("different md5") except Exception: try: open(buf_path, "a").close() except Exception as e: msg.debug("couldn't touch file: %s becuase %s" % (buf_path, e)) self.agent.send_get_buf(buf_id) msg.debug(G.PROJECT_PATH) self.agent.on_auth()
def on_room_info(self, data): # Success! Reset counter self.room_info = data self.perms = data['perms'] if 'patch' not in data['perms']: msg.log('We don\'t have patch permission. Setting buffers to read-only') utils.mkdir(G.PROJECT_PATH) floo_json = { 'url': utils.to_room_url({ 'host': self.agent.host, 'owner': self.agent.owner, 'port': self.agent.port, 'room': self.agent.room, 'secure': self.agent.secure, }) } with open(os.path.join(G.PROJECT_PATH, '.floo'), 'w') as floo_fd: floo_fd.write(json.dumps(floo_json, indent=4, sort_keys=True)) for buf_id, buf in data['bufs'].iteritems(): buf_id = int(buf_id) # json keys must be strings buf_path = utils.get_full_path(buf['path']) new_dir = os.path.dirname(buf_path) utils.mkdir(new_dir) self.FLOO_BUFS[buf_id] = buf try: buf_fd = open(buf_path, 'r') buf_buf = buf_fd.read().decode('utf-8') md5 = hashlib.md5(buf_buf.encode('utf-8')).hexdigest() if md5 == buf['md5']: msg.debug('md5 sums match. not getting buffer') buf['buf'] = buf_buf else: raise Exception('different md5') except Exception: try: open(buf_path, "a").close() except Exception as e: msg.debug("couldn't touch file: %s becuase %s" % (buf_path, e)) self.agent.send_get_buf(buf_id) msg.debug(G.PROJECT_PATH) self.agent.on_auth()
def get_children(self, internal_path): full_path = get_full_path(self.root, self.path) intra_path = self.path.replace('/' + self.root, '') children = [] for sub in self.client.folder(self.id).get_items( fields=['modified_at', 'name', 'type', 'size']): sub_path = get_normalized_path( os.path.join(internal_path, sub.name)) ret = { 'fullPath': sub_path, 'exists': True, 'directory': sub.type == self.BOX_FOLDER, 'size': sub.size, 'lastModified': self.get_last_modified(sub) } children.append(ret) self.cache.add(get_rel_path(sub.name), sub.id, sub.type) return children
def enumerate(self, path, first_non_empty): """ Enumerate files recursively from prefix. If first_non_empty, stop at the first non-empty file. If the prefix doesn't denote a file or folder, return None """ full_path = get_full_path(self.root, path) normalized_path = get_normalized_path(path) item = self.box_item.get_by_path(full_path) if item.not_exists(): return None paths = [] if item.is_folder(): paths = self.list_recursive(normalized_path, item.id, first_non_empty) else: paths.append({'path':normalized_path.split("/")[-1], 'size':item.size, 'lastModified':int(0) * 1000}) return paths
def apply_patches(self, buf, patches): cursor_offset = self.get_cursor_offset() msg.debug('cursor offset is %s bytes' % cursor_offset) self.emacs_buf = patches[0] emacs.put('edit', { 'id': self.buf['id'], 'full_path': utils.get_full_path(self.buf['path']), 'edits': patches[2], }) for patch in patches[2]: offset = patch[0] length = patch[1] patch_text = patch[2] if cursor_offset > offset: new_offset = len(patch_text) - length cursor_offset += new_offset self.set_cursor_position(cursor_offset)
def test_find_largest_contours(self): """Test if largest contours is found. """ # setup test_path = utils.get_full_path( 'docs/material_for_testing/back_ground_removed_frame.jpg') test_image = cv2.imread(test_path) # Because image loaded from local, and not received from web-cam, a flip is needed. test_image = cv2.flip(test_image, 1) test_image = cv2.bitwise_not(test_image) max_area_contour = ImageTestTool.get_max_area_contour(test_image) expected_area = ImageTestTool.get_contour_area(max_area_contour) # Create detector flags_handler = FlagsHandler() detector = Detector(flags_handler) # run detector.input_frame_for_feature_extraction = test_image result_area = cv2.contourArea(detector.max_area_contour) assert result_area == expected_area
def __configure(self, configs): self.log.info("Starting with configs: %s", configs) if self.options.no_system_configs is None: self.options.no_system_configs = False bzt_rc = os.path.expanduser(os.path.join('~', ".bzt-rc")) if os.path.exists(bzt_rc): self.log.debug("Using personal config: %s" % bzt_rc) else: self.log.debug("Adding personal config: %s", bzt_rc) self.log.info("No personal config found, creating one at %s", bzt_rc) shutil.copy( os.path.join(get_full_path(__file__, step_up=1), 'resources', 'base-bzt-rc.yml'), bzt_rc) merged_config = self.engine.configure( [bzt_rc] + configs, not self.options.no_system_configs) # apply aliases for alias in self.options.aliases: cli_aliases = self.engine.config.get('cli-aliases') keys = sorted(cli_aliases.keys()) err = TaurusConfigError( "'%s' not found in aliases. Available aliases are: %s" % (alias, ", ".join(keys))) self.engine.config.merge(cli_aliases.get(alias, err)) if self.options.option: overrider = ConfigOverrider(self.log) overrider.apply_overrides(self.options.option, self.engine.config) if self.__is_verbose(): CLI.console_handler.setLevel(logging.DEBUG) self.engine.create_artifacts_dir(configs, merged_config) self.engine.default_cwd = os.getcwd() self.engine.eval_env( ) # yacky, I don't like having it here, but how to apply it after aliases and artif dir?
def delete_buf(self, path): """deletes a path""" if not path: return path = utils.get_full_path(path) if not self.is_shared(path): msg.error('Skipping deleting %s because it is not in shared path %s.' % (path, G.PROJECT_PATH)) return if os.path.isdir(path): for dirpath, dirnames, filenames in os.walk(path): # Don't care about hidden stuff dirnames[:] = [d for d in dirnames if d[0] != '.'] for f in filenames: f_path = os.path.join(dirpath, f) if f[0] == '.': msg.log('Not deleting buf for hidden file %s' % f_path) else: self.delete_buf(f_path) return buf_to_delete = None rel_path = utils.to_rel_path(path) for buf_id, buf in self.FLOO_BUFS.items(): if rel_path == buf['path']: buf_to_delete = buf break if buf_to_delete is None: msg.error('%s is not in this room' % path) return msg.log('deleting buffer ', rel_path) event = { 'name': 'delete_buf', 'id': buf_to_delete['id'], } self.agent.put(event)
def find_file(self, filename): """ Try to find file or dir in search_path if it was specified. Helps finding files in non-CLI environments or relative to config path Return path is full and mustn't treat with abspath/etc. :param filename: file basename to find :type filename: str """ if not filename: return filename if filename.lower().startswith("http://") or filename.lower().startswith("https://"): parsed_url = parse.urlparse(filename) downloader = ExceptionalDownloader(self.get_http_client()) self.log.info("Downloading %s", filename) tmp_f_name, http_msg = downloader.get(filename) cd_header = http_msg.get('Content-Disposition', '') dest = cd_header.split('filename=')[-1] if cd_header and 'filename=' in cd_header else '' if not dest: dest = os.path.basename(parsed_url.path) fname, ext = os.path.splitext(dest) if dest else (parsed_url.hostname.replace(".", "_"), '.file') dest = self.create_artifact(fname, ext) self.log.debug("Moving %s to %s", tmp_f_name, dest) shutil.move(tmp_f_name, dest) return dest else: filename = os.path.expanduser(filename) # expanding of '~' is required for check of existence # check filename 'as is' and all combinations of file_search_path/filename for dirname in [""] + self.file_search_paths: location = os.path.join(dirname, filename) if os.path.exists(location): if dirname: self.log.warning("Guessed location from search paths for %s: %s", filename, location) return get_full_path(location) self.log.warning("Could not find location at path: %s", filename) return filename
def test_draw_axes(self): """Test if detected_out_put_center calculated properly. """ # setup test_path = utils.get_full_path( 'docs/material_for_testing/back_ground_removed_frame.jpg') test_image = cv2.imread(test_path) # Because image loaded from local, and not received from web-cam, a flip is needed. test_image = cv2.flip(test_image, 1) expected = test_image.copy() # Create detector flags_handler = FlagsHandler() detector = Detector(flags_handler) expected_detected_out_put_center = (int(expected.shape[1] / 2), int(expected.shape[0] / 2) + detector.horiz_axe_offset) # run detector.input_frame_for_feature_extraction = test_image cv2.imshow('expected', expected) cv2.imshow('result', detector.input_frame_for_feature_extraction) cv2.waitKey() assert expected_detected_out_put_center == detector.detected_out_put_center
def on_emacs_buffer_list_change(self, req): added = req.get('added') or {} for path, text in added.iteritems(): buf = self.get_buf_by_path(path) self.emacs_bufs[path][0] = text if not buf: msg.debug('no buf for path %s' % path) self.create_buf(path, text) continue view = self.views.get(buf['id']) if view is None: self.get_view(buf['id']) elif view.is_loading(): view._emacs_buf = self.emacs_bufs[path] else: msg.debug('view for buf %s already exists. this is not good. we got out of sync' % buf['path']) deleted = req.get('deleted') or [] for path in deleted: if self.emacs_bufs.get(path) is None: msg.debug('emacs deleted %s but we already deleted it from emacs_bufs' % path) del self.emacs_bufs[path] buf = self.get_buf_by_path(path) if buf: del self.views[buf['id']] seen = set() current = req.get('current') or [] for path in current: if self.emacs_bufs.get(path) is None: msg.debug('We should have buffer %s in emacs_bufs but we don\'t' % path) else: seen.add(path) for buf_id, view in self.views.iteritems(): if utils.get_full_path(view.buf['path']) not in seen: msg.debug('We should not have buffer %s in our views but we do.' % view.buf['path'])
def execute_queries_for_branch(aws_key_id, aws_key, args, branch, is_first_branch): """ Checks out the specified branch and runs the provided queries. Copies the event log and continuous monitors to `args.output_dir`. `is_first_branch` should be set to True if this is the first branch to be tested. """ print_heading("Testing branch '{}'".format(branch)) execute_shell_command("cd {}; git checkout {}".format(SPARK_DIR, branch)) jar_dir = args.jar_dir if jar_dir is None: print "Compiling spark-monotasks" mvn_filepath = path.join("build", "mvn") execute_shell_command( "cd {}; {} ".format(SPARK_DIR, mvn_filepath) + "-Dhadoop.version=2.0.0-cdh4.2.0 -Phive -Phive-thriftserver -DskipTests -e clean package" ) else: print "Retrieving JAR" jar_filepath = path.join(jar_dir, branch, "*") jar_dest_dir = path.join(SPARK_DIR, "assembly", "target", "scala-2.10") # TODO: This will cause an error if the file specified by jar_filename does not exist. We should # check if the file exists before trying to copy it. execute_shell_command("cp -v {} {} ".format(jar_filepath, jar_dest_dir)) print "Copying spark-monotasks to slaves" copy_dir_filepath = utils.get_full_path(path.join("spark-ec2", "copy-dir")) execute_shell_command("{} --delete {}".format(copy_dir_filepath, SPARK_DIR)) for query in args.queries: execute_query(aws_key_id, aws_key, args, query, branch, is_first_branch)
def run_nrpspredictor(seq_record, nrpsnames, nrpsseqs, options): #NRPSPredictor: extract AMP-binding + 120 residues N-terminal of this domain, extract 8 Angstrom residues and insert this into NRPSPredictor logging.getLogger('user_visible').info("Predicting NRPS A domain substrate specificities by NRPSPredictor") with TemporaryDirectory(change=True): nrpsseqs_file = "nrpsseqs.fasta" NRPSPredictor2_dir = utils.get_full_path(__file__, "NRPSPredictor2") utils.writefasta(nrpsnames, nrpsseqs, nrpsseqs_file) #Get NRPSPredictor2 code predictions, output sig file for input for NRPSPredictor2 SVMs nrpscodepred.run_nrpscodepred(options) #Run NRPSPredictor2 SVM datadir = path.join(NRPSPredictor2_dir, 'data') libdir = path.join(NRPSPredictor2_dir, 'lib') jarfile = path.join(NRPSPredictor2_dir, 'build', 'NRPSpredictor2.jar') classpath = [ jarfile, '%s/java-getopt-1.0.13.jar' % libdir, '%s/Utilities.jar' % libdir, '%s/libsvm.jar' % libdir ] if sys.platform == ("linux2") or sys.platform == ("darwin"): java_separator = ":" elif sys.platform == ("win32"): java_separator = ";" commands = ['java', '-Ddatadir=%s' % datadir, '-cp', java_separator.join(classpath), 'org.roettig.NRPSpredictor2.NRPSpredictor2', '-i', 'input.sig', '-r', path.join(options.raw_predictions_outputfolder, "ctg" + str(options.record_idx) + '_nrpspredictor2_svm.txt'), '-s', '1', '-b', options.eukaryotic and '1' or '0'] out, err, retcode = utils.execute(commands) if err != '': logging.debug('running nrpspredictor2 gave error %r' % err) #Copy NRPSPredictor results and move back to original directory try: os.remove(path.join(options.raw_predictions_outputfolder, "ctg" + str(options.record_idx) + "_nrpspredictor2_codes.txt")) except: pass shutil.move("ctg" + str(options.record_idx) + "_nrpspredictor2_codes.txt", options.raw_predictions_outputfolder)
def save_buf(self, buf): path = utils.get_full_path(buf["path"]) utils.mkdir(os.path.split(path)[0]) with open(path, "wb") as fd: fd.write(buf["buf"].encode("utf-8")) return path
def create_view(buf): path = utils.get_full_path(buf['path']) view = G.ROOM_WINDOW.open_file(path) if view: msg.debug('Created view', view.name() or view.file_name()) return view
def focus(self, offset): emacs.put('focus', { 'id': self.buf['id'], 'full_path': utils.get_full_path(self.buf['path']), 'offset': offset, })
def protocol(self, req): self.buf += req.decode('utf-8') msg.debug('buf: %s' % self.buf) while True: before, sep, after = self.buf.partition('\n') if not sep: break try: data = json.loads(before) except Exception as e: msg.error('Unable to parse json: %s' % str(e)) msg.error('Data: %s' % before) raise e name = data.get('name') if name == 'patch': # TODO: we should do this in a separate thread Listener.apply_patch(data) elif name == 'get_buf': buf_id = data['id'] listener.BUFS[buf_id] = data view = listener.get_view(buf_id) if view: Listener.update_view(data, view) else: listener.save_buf(data) elif name == 'create_buf': listener.BUFS[data['id']] = data listener.save_buf(data) elif name == 'rename_buf': new = utils.get_full_path(data['path']) old = utils.get_full_path(data['old_path']) new_dir = os.path.split(new)[0] if new_dir: utils.mkdir(new_dir) os.rename(old, new) view = listener.get_view(data['id']) if view: view.retarget(new) elif name == 'delete_buf': path = utils.get_full_path(data['path']) try: utils.rm(path) except Exception: pass listener.delete_buf(data['id']) elif name == 'room_info': # Success! Reset counter self.retries = G.MAX_RETRIES self.room_info = data G.PERMS = data['perms'] if 'patch' not in data['perms']: msg.log( 'We don\'t have patch permission. Setting buffers to read-only' ) project_json = {'folders': [{'path': G.PROJECT_PATH}]} utils.mkdir(G.PROJECT_PATH) with open(os.path.join(G.PROJECT_PATH, '.sublime-project'), 'wb') as project_fd: project_fd.write( json.dumps(project_json, indent=4, sort_keys=True).encode('utf-8')) floo_json = { 'url': utils.to_room_url({ 'host': self.host, 'owner': self.owner, 'port': self.port, 'room': self.room, 'secure': self.secure, }) } with open(os.path.join(G.PROJECT_PATH, '.floo'), 'w') as floo_fd: floo_fd.write( json.dumps(floo_json, indent=4, sort_keys=True)) for buf_id, buf in data['bufs'].items(): buf_id = int(buf_id) # json keys must be strings buf_path = utils.get_full_path(buf['path']) new_dir = os.path.dirname(buf_path) utils.mkdir(new_dir) listener.BUFS[buf_id] = buf try: buf_fd = open(buf_path, 'rb') buf_buf = buf_fd.read().decode('utf-8') md5 = hashlib.md5(buf_buf.encode('utf-8')).hexdigest() if md5 == buf['md5']: msg.debug('md5 sums match. not getting buffer') buf['buf'] = buf_buf else: msg.debug( 'md5 for %s should be %s but is %s. getting buffer' % (buf['path'], buf['md5'], md5)) raise Exception('different md5') except Exception as e: msg.debug('Error calculating md5:', e) Listener.get_buf(buf_id) self.authed = True G.CONNECTED = True msg.log('Successfully joined room %s/%s' % (self.owner, self.room)) if self.on_connect: self.on_connect(self) self.on_connect = None elif name == 'join': msg.log('%s joined the room' % data['username']) self.room_info['users'][data['user_id']] = data['username'] elif name == 'part': msg.log('%s left the room' % data['username']) try: del self.room_info['users'][data['user_id']] except Exception as e: print('Unable to delete user %s from user list' % (data)) region_key = 'floobits-highlight-%s' % (data['user_id']) for window in sublime.windows(): for view in window.views(): view.erase_regions(region_key) elif name == 'highlight': region_key = 'floobits-highlight-%s' % (data['user_id']) Listener.highlight(data['id'], region_key, data['username'], data['ranges'], data.get('ping', False)) elif name == 'error': message = 'Floobits: Error! Message: %s' % str(data.get('msg')) msg.error(message) elif name == 'disconnect': message = 'Floobits: Disconnected! Reason: %s' % str( data.get('reason')) msg.error(message) sublime.error_message(message) self.stop() elif name == 'msg': self.on_msg(data) else: msg.debug('unknown name!', name, 'data:', data) self.buf = after
def save_buf(self, buf): path = utils.get_full_path(buf['path']) utils.mkdir(os.path.split(path)[0]) with open(path, 'wb') as fd: fd.write(buf['buf'].encode('utf-8')) return path
def protocol(self, req): self.buf += req while True: before, sep, after = self.buf.partition('\n') if not sep: break try: data = json.loads(before) except Exception as e: print('Unable to parse json:', e) print('Data:', before) raise e name = data.get('name') if name == 'patch': # TODO: we should do this in a separate thread Listener.apply_patch(data) elif name == 'get_buf': buf_id = data['id'] listener.BUFS[buf_id] = data view = listener.get_view(buf_id) if view: Listener.update_view(data, view) else: listener.save_buf(data) elif name == 'create_buf': listener.BUFS[data['id']] = data listener.save_buf(data) elif name == 'rename_buf': new = utils.get_full_path(data['path']) old = utils.get_full_path(data['old_path']) new_dir = os.path.split(new)[0] if new_dir: utils.mkdir(new_dir) os.rename(old, new) view = listener.get_view(data['id']) if view: view.retarget(new) elif name == 'delete_buf': path = utils.get_full_path(data['path']) utils.rm(path) listener.delete_buf(data['id']) elif name == 'room_info': # Success! Reset counter self.retries = G.MAX_RETRIES self.room_info = data G.PERMS = data['perms'] if 'patch' not in data['perms']: msg.log('We don\'t have patch permission. Setting buffers to read-only') project_json = { 'folders': [ {'path': G.PROJECT_PATH} ] } utils.mkdir(G.PROJECT_PATH) with open(os.path.join(G.PROJECT_PATH, '.sublime-project'), 'w') as project_fd: project_fd.write(json.dumps(project_json, indent=4, sort_keys=True)) floo_json = { 'url': utils.to_room_url({ 'host': self.host, 'owner': self.owner, 'port': self.port, 'room': self.room, 'secure': self.secure, }) } with open(os.path.join(G.PROJECT_PATH, '.floo'), 'w') as floo_fd: floo_fd.write(json.dumps(floo_json, indent=4, sort_keys=True)) for buf_id, buf in data['bufs'].iteritems(): buf_id = int(buf_id) # json keys must be strings buf_path = utils.get_full_path(buf['path']) new_dir = os.path.dirname(buf_path) utils.mkdir(new_dir) listener.BUFS[buf_id] = buf try: buf_fd = open(buf_path, 'r') buf_buf = buf_fd.read().decode('utf-8') md5 = hashlib.md5(buf_buf.encode('utf-8')).hexdigest() if md5 == buf['md5']: msg.debug('md5 sums match. not getting buffer') buf['buf'] = buf_buf else: msg.debug('md5 for %s should be %s but is %s. getting buffer' % (buf['path'], buf['md5'], md5)) raise Exception('different md5') except Exception as e: msg.debug('Error calculating md5:', e) Listener.get_buf(buf_id) self.authed = True G.CONNECTED = True msg.log('Successfully joined room %s/%s' % (self.owner, self.room)) if self.on_connect: self.on_connect(self) self.on_connect = None elif name == 'join': msg.log('%s joined the room' % data['username']) self.room_info['users'][data['user_id']] = data['username'] elif name == 'part': msg.log('%s left the room' % data['username']) try: del self.room_info['users'][data['user_id']] except Exception as e: print('Unable to delete user %s from user list' % (data)) region_key = 'floobits-highlight-%s' % (data['user_id']) for window in sublime.windows(): for view in window.views(): view.erase_regions(region_key) elif name == 'highlight': region_key = 'floobits-highlight-%s' % (data['user_id']) Listener.highlight(data['id'], region_key, data['username'], data['ranges'], data.get('ping', False)) elif name == 'error': message = 'Floobits: Error! Message: %s' % str(data.get('msg')) msg.error(message) elif name == 'disconnect': message = 'Floobits: Disconnected! Reason: %s' % str(data.get('reason')) msg.error(message) sublime.error_message(message) self.stop() elif name == 'msg': self.on_msg(data) else: msg.error('unknown name!', name, 'data:', data) self.buf = after
def on_delete_buf(self, data): # TODO: somehow tell the user about this. maybe delete on disk too? del self.FLOO_BUFS[data['id']] path = utils.get_full_path(data['path']) utils.rm(path) msg.warn('deleted %s because %s told me to.' % (path, data.get('username', 'the internet')))