def normalize_coordinates(tile_fnames_or_dir, output_dir, jar_file): all_files = [] for file_or_dir in tile_fnames_or_dir: if not os.path.exists(file_or_dir): print("{0} does not exist (file/directory), skipping".format(file_or_dir)) continue if os.path.isdir(file_or_dir): actual_dir_files = glob.glob(os.path.join(file_or_dir, '*.json')) all_files.extend(actual_dir_files) else: all_files.append(file_or_dir) if len(all_files) == 0: print "No files for normalization found. Exiting." return print "Normalizing coordinates of {0} files".format(all_files) files_urls = [] for file_name in all_files: tiles_url = utils.path2url(file_name) files_urls.append(tiles_url) list_file = os.path.join(output_dir, "all_files.txt") print "list_file", list_file utils.write_list_to_file(list_file, files_urls) list_file_url = utils.path2url(list_file) java_cmd = 'java -Xmx3g -XX:ParallelGCThreads=1 -Djava.awt.headless=true -cp "{0}" org.janelia.alignment.NormalizeCoordinates --targetDir {1} {2}'.format( jar_file, output_dir, list_file_url) utils.execute_shell_command(java_cmd)
def register_defaults(self, project_paths=None): projects = list() if not self.args: if project_paths == None: path = self.options.projects_paths else: path = project_paths for root, dirs, files in os.walk(path): for f in files: if not f.endswith(".xges"): continue projects.append(utils.path2url(os.path.join(path, root, f))) else: for proj_uri in self.args: if not utils.isuri(proj_uri): proj_uri = utils.path2url(proj_uri) if os.path.exists(proj_uri): projects.append(proj_uri) if self.options.long_limit != 0: scenarios = ["none", "scrub_forward_seeking", "scrub_backward_seeking"] else: scenarios = ["play_15s", "scrub_forward_seeking_full", "scrub_backward_seeking_full"] for proj_uri in projects: # First playback casses project = XgesProjectDescriptor(proj_uri) for scenario_name in scenarios: scenario = self._scenarios.get_scenario(scenario_name) if scenario is None: continue if scenario.get_min_media_duration() >= (project.get_duration() / utils.GST_SECOND): continue classname = "ges.playback.%s.%s" % (scenario.name, os.path.basename(proj_uri).replace(".xges", "")) self.add_test(GESPlaybackTest(classname, self.options, self.reporter, project, scenario=scenario) ) # And now rendering casses for comb in GES_ENCODING_TARGET_COMBINATIONS: classname = "ges.render.%s.%s" % (str(comb).replace(' ', '_'), os.path.splitext(os.path.basename(proj_uri))[0]) self.add_test(GESRenderTest(classname, self.options, self.reporter, project, combination=comb) )
def optimize_elastic_transform(correspondence_file, tilespec_file, fixed_tiles, output_file, jar_file, conf_fname=None): corr_url = utils.path2url(correspondence_file) tiles_url = utils.path2url(tilespec_file) conf_args = utils.conf_args_from_file(conf_fname, 'OptimizeMontageElastic') fixed_str = "" if fixed_tiles != None: fixed_str = "--fixedTiles {0}".format(" ".join(map(str, fixed_tiles))) java_cmd = 'java -Xmx10g -XX:ParallelGCThreads=1 -Djava.awt.headless=true -cp "{0}" org.janelia.alignment.OptimizeMontageElastic {1} --corrfile {2} --tilespecfile {3} {4} --targetPath {5}'.format(\ jar_file, conf_args, corr_url, tiles_url, fixed_str, output_file) utils.execute_shell_command(java_cmd)
def list_tests(self): if self.tests: return self.tests projects = list() if not self.args: path = self.options.projects_paths for root, dirs, files in os.walk(path): for f in files: if not f.endswith(".xges"): continue projects.append(utils.path2url(os.path.join(path, root, f))) else: for proj in self.args: if not utils.isuri(proj): proj = utils.path2url(proj) if os.path.exists(proj): projects.append(proj) SCENARIOS = ["play_15s", "seek_forward", "seek_backward", "scrub_forward_seeking"] for proj in projects: # First playback casses for scenario_name in SCENARIOS: scenario = self._scenarios.get_scenario(scenario_name) if scenario is None: continue classname = "ges.playback.%s.%s" % (scenario.name, os.path.basename(proj).replace(".xges", "")) self.add_test(GESPlaybackTest(classname, self.options, self.reporter, proj, scenario=scenario) ) # And now rendering casses for comb in GES_ENCODING_TARGET_COMBINATIONS: classname = "ges.render.%s.%s" % (str(comb).replace(' ', '_'), os.path.splitext(os.path.basename(proj))[0]) self.add_test(GESRenderTest(classname, self.options, self.reporter, proj, combination=comb) ) return self.tests
def render_tiles_2d(tiles_fname, output_dir, tile_size, output_type, jar_file, output_pattern=None, blend_type=None, threads_num=None): tiles_url = utils.path2url(tiles_fname) threads_str = "" if threads_num is not None: threads_str = "--threads {}".format(threads_num) blend_str = "" if blend_type is not None: blend_str = "--blendType {}".format(blend_type) pattern_str = "" if output_pattern is not None: pattern_str = "--outputNamePattern {}".format(output_pattern) java_cmd = 'java -Xmx32g -XX:ParallelGCThreads=1 -Djava.awt.headless=true -cp "{0}" org.janelia.alignment.RenderTiles --targetDir {1} --tileSize {2} \ {3} {4} --outputType {5} {6} --url {7}' .format(\ jar_file, output_dir, tile_size, threads_str, blend_str, output_type, pattern_str, tiles_url) utils.execute_shell_command(java_cmd)
def create_meshes(tiles_fname, output_dir, jar_file, threads_num=4): tiles_url = utils.path2url(os.path.abspath(tiles_fname)) # Compute the Sift features `for each tile in the tile spec file java_cmd = 'java -Xmx32g -XX:ParallelGCThreads=1 -Djava.awt.headless=true -cp "{0}" org.janelia.alignment.SaveMesh --targetDir {1} --threads {2} --inputfile {3}'.format(\ jar_file, output_dir, threads_num, tiles_url) utils.execute_shell_command(java_cmd)
def quote_uri(uri): """Encodes a URI/path according to RFC 2396.""" # Split off the "file:///" part, if present. parts = urlparse.urlsplit(uri, allow_fragments=False) # Make absolutely sure the string is unquoted before quoting again! raw_path = unquote(parts.path) return utils.path2url(raw_path)
def block_matching_test_params(tile_files, jar_file, conf=None, threads_num=4): #conf_args = utils.conf_args(conf, 'MatchLayersByMaxPMCC') java_cmd = 'java -Xmx16g -XX:ParallelGCThreads=1 -cp "{0}" org.janelia.alignment.TestBlockMatchingParameters --tilespecFiles {1} \ --threads {2} {3}'.format( jar_file, " ".join(utils.path2url(f) for f in tile_files), threads_num, conf) utils.execute_shell_command(java_cmd)
def match_multiple_sift_features(tiles_file, features_file, index_pairs, jar, out_fname, conf_args): tiles_url = utils.path2url(os.path.abspath(tiles_file)) java_cmd = 'java -Xmx4g -Djava.awt.headless=true -cp "{0}" org.janelia.alignment.MatchSiftFeatures --tilespecfile {1} --featurefile {2} {3} --targetPath {4} {5}'.format( jar, tiles_url, features_file, " ".join("--indices {}:{}".format(a, b) for a, b in index_pairs), out_fname, conf_args) utils.execute_shell_command(java_cmd)
def match_layers_sift_features(tiles_file1, features_file1, tiles_file2, features_file2, out_fname, jar_file, conf=None, threads_num=4): # When matching layers, no need to take bounding box into account conf_args = utils.conf_args_from_file(conf, 'MatchSiftFeatures') java_cmd = 'java -Xmx3g -XX:ParallelGCThreads=1 -Djava.awt.headless=true -cp "{0}" org.janelia.alignment.MatchLayersSiftFeatures --threads {1} --tilespec1 {2} \ --featurefile1 {3} --tilespec2 {4} --featurefile2 {5} --targetPath {6} {7}'.format( jar_file, threads_num, utils.path2url(tiles_file1), utils.path2url(features_file1), utils.path2url(tiles_file2), utils.path2url(features_file2), out_fname, conf_args) utils.execute_shell_command(java_cmd)
def filter_ransac(corr_file, compared_url, out_fname, jar_file, conf=None): # When matching layers, no need to take bounding box into account conf_args = utils.conf_args_from_file(conf, 'FilterRansac') java_cmd = 'java -Xmx3g -XX:ParallelGCThreads=1 -Djava.awt.headless=true -cp "{0}" org.janelia.alignment.FilterRansac --inputfile {1} --comparedUrl {2} \ --targetPath {3} {4}'.format(jar_file, utils.path2url(corr_file), compared_url, out_fname, conf_args) utils.execute_shell_command(java_cmd)
def quote_uri(uri): """ Encode a URI/path according to RFC 2396, without touching the file:/// part. """ # Split off the "file:///" part, if present. parts = urlparse.urlsplit(uri, allow_fragments=False) # Make absolutely sure the string is unquoted before quoting again! raw_path = unquote(parts.path) return utils.path2url(raw_path)
def _set_rendering_info(self): self.dest_file = path = os.path.join( self.options.dest, self.classname.replace(".render.", os.sep).replace(".", os.sep)) utils.mkdir(os.path.dirname(urlparse.urlsplit(self.dest_file).path)) if not utils.isuri(self.dest_file): self.dest_file = utils.path2url(self.dest_file) profile = self.get_profile(video_restriction="video/x-raw,format=I420") self.add_arguments("-f", profile, "-o", self.dest_file)
def _set_rendering_info(self): self.dest_file = path = os.path.join(self.options.dest, self.classname.replace(".render.", os.sep). replace(".", os.sep)) utils.mkdir(os.path.dirname(urlparse.urlsplit(self.dest_file).path)) if not utils.isuri(self.dest_file): self.dest_file = utils.path2url(self.dest_file) profile = self.get_profile(video_restriction="video/x-raw,format=I420") self.add_arguments("-f", profile, "-o", self.dest_file)
def match_layers_by_max_pmcc(jar_file, tiles_file1, tiles_file2, models_file, image_width, image_height, fixed_layers, out_fname, meshes_dir1=None, meshes_dir2=None, conf=None, threads_num=None, auto_add_model=False): conf_args = utils.conf_args_from_file(conf, 'MatchLayersByMaxPMCC') meshes_str = '' if meshes_dir1 is not None: meshes_str += ' --meshesDir1 "{0}"'.format(meshes_dir1) if meshes_dir2 is not None: meshes_str += ' --meshesDir2 "{0}"'.format(meshes_dir2) fixed_str = "" if fixed_layers != None: fixed_str = "--fixedLayers {0}".format(" ".join(map(str, fixed_layers))) threads_str = "" if threads_num != None: threads_str = "--threads {0}".format(threads_num) auto_add_model_str = "" if auto_add_model: auto_add_model_str = "--autoAddModel" java_cmd = 'java -Xmx16g -XX:ParallelGCThreads={0} -Djava.awt.headless=true -cp "{1}" org.janelia.alignment.MatchLayersByMaxPMCC --inputfile1 {2} --inputfile2 {3} \ --modelsfile1 {4} --imageWidth {5} --imageHeight {6} {7} {8} {9} --targetPath {10} {11} {12}'.format( utils.get_gc_threads_num(threads_num), jar_file, utils.path2url(tiles_file1), utils.path2url(tiles_file2), utils.path2url(models_file), int(image_width), int(image_height), threads_str, auto_add_model_str, meshes_str, out_fname, fixed_str, conf_args) utils.execute_shell_command(java_cmd)
def cleanup(self): """ Cleanup the options looking after user options have been parsed """ # Get absolute path for main_dir and base everything on that self.main_dir = os.path.abspath(self.main_dir) # default for output_dir is MAINDIR if not self.output_dir: self.output_dir = self.main_dir else: self.output_dir = os.path.abspath(self.output_dir) # other output directories if self.logsdir is None: self.logsdir = os.path.join(self.output_dir, "logs") if self.xunit_file is None: self.xunit_file = os.path.join(self.logsdir, "xunit.xml") if self.dest is None: self.dest = os.path.join(self.output_dir, "rendered") if not os.path.exists(self.dest): os.makedirs(self.dest) if urlparse.urlparse(self.dest).scheme == "": self.dest = path2url(self.dest) if self.no_color: utils.desactivate_colors() if self.clone_dir is None: self.clone_dir = os.path.join(self.main_dir, QA_ASSETS) if not isinstance(self.paths, list): self.paths = [self.paths] if self.generate_info_full is True: self.generate_info = True if self.sync_all is True: self.sync = True if self.update_assets_command == DEFAULT_SYNC_ASSET_COMMAND: self.update_assets_command = DEFAULT_SYNC_ALL_ASSET_COMMAND if not self.sync and not os.path.exists(self.clone_dir) and \ self.clone_dir == os.path.join(self.clone_dir, MEDIAS_FOLDER): printc("Media path (%s) does not exists. Forgot to run --sync ?" % self.clone_dir, Colors.FAIL, True) return False if (self.main_dir != DEFAULT_MAIN_DIR or self.clone_dir != QA_ASSETS) and \ self.testsuites_dir == DEFAULT_TESTSUITES_DIR: self.testsuites_dir = os.path.join(self.main_dir, self.clone_dir, "testsuites") return True
def filter_local_smoothness(corr_file, out_fname, jar_file, conf=None): # When matching layers, no need to take bounding box into account conf_args = utils.conf_args_from_file(conf, 'FilterLocalSmoothness') java_cmd = 'java -Xmx3g -XX:ParallelGCThreads=1 -Djava.awt.headless=true -cp "{0}" org.janelia.alignment.FilterLocalSmoothness --inputfile {1} \ --targetPath {2} {3}'.format( jar_file, utils.path2url(corr_file), out_fname, conf_args) utils.execute_shell_command(java_cmd)
def onChangeImage(self): """Change canvas background image""" image_path = self.getImage() if not image_path: return width, height = imageProp(image_path) bkgd_url = path2url(image_path) mw.ImgOccEdit.svg_edit.eval(""" svgCanvas.setBackground('#FFF', '%s'); svgCanvas.setResolution(%s, %s); //svgCanvas.zoomChanged('', 'canvas'); """ %(bkgd_url, width, height)) self.image_path = image_path
def optimize_layers_affine(tile_files, corr_files, model_files, fixed_layers, out_dir, max_layer_distance, jar_file, conf=None, skip_layers=None, threads_num=4, manual_matches=None): conf_args = utils.conf_args_from_file(conf, 'OptimizeLayersAffine') fixed_str = "" if fixed_layers != None: fixed_str = " ".join("--fixedLayers {0}".format(str(fixed_layer)) for fixed_layer in fixed_layers) skip_str = "" if skip_layers != None: skip_str = "--skipLayers {0}".format(skip_layers) manual_matches_str = "" if manual_matches is not None: manual_matches_str = " ".join("--manualMatches {}".format(a) for a in manual_matches) # Assuming that at least 4 threads will be allocated for this job, and increasing the number of gc threads to 4 will make it faster java_cmd = 'java -Xmx46g -XX:ParallelGCThreads={0} -Djava.awt.headless=true -cp "{1}" org.janelia.alignment.OptimizeLayersAffine --tilespecFiles {2} --corrFiles {3} \ --modelFiles {4} {5} {6} --threads {7} --maxLayersDistance {8} {9} --targetDir {10} {11}'.format( utils.get_gc_threads_num(threads_num), jar_file, " ".join(utils.path2url(f) for f in tile_files), " ".join(utils.path2url(f) for f in corr_files), " ".join(utils.path2url(f) for f in model_files), fixed_str, manual_matches_str, threads_num, max_layer_distance, skip_str, out_dir, conf_args) utils.execute_shell_command(java_cmd)
def set_rendering_info(self): self.dest_file = path = os.path.join(self.options.dest, self.classname.replace(".transcode.", os.sep). replace(".", os.sep)) utils.mkdir(os.path.dirname(urlparse.urlsplit(self.dest_file).path)) if urlparse.urlparse(self.dest_file).scheme == "": self.dest_file = path2url(self.dest_file) try: video_restriction = G_V_PROTOCOL_VIDEO_RESTRICTION_CAPS[self.media_descriptor.get_protocol()] except KeyError: video_restriction = None profile = get_profile(self.combination, video_restriction=video_restriction) self.add_arguments("-o", profile)
def render_2d(tiles_fname, output_fname, width, jar_file, threads_num=None): tiles_url = utils.path2url(tiles_fname) threads_str = "" if threads_num is not None: threads_str = "--threads {}".format(threads_num) width_str = "--width {}".format(width) if width == -1: width_str = "--fullImage" java_cmd = 'java -Xmx32g -XX:ParallelGCThreads=1 -Djava.awt.headless=true -cp "{0}" org.janelia.alignment.Render --out {1} {2} \ {3} --hide --url {4}'.format(\ jar_file, output_fname, width_str, threads_str, tiles_url) utils.execute_shell_command(java_cmd)
def match_single_sift_features_and_filter(tiles_file, features_file1, features_file2, jar, out_fname, index_pair, conf_fname=None): tiles_url = utils.path2url(os.path.abspath(tiles_file)) conf_args = utils.conf_args_from_file(conf_fname, 'MatchSiftFeaturesAndFilter') java_cmd = 'java -Xmx3g -XX:ParallelGCThreads=1 -Djava.awt.headless=true -cp "{0}" org.janelia.alignment.MatchSiftFeaturesAndFilter \ --tilespecfile {1} --featurefile1 {2} --featurefile2 {3} --indices {4} --targetPath {5} {6}'.format( jar, tiles_url, features_file1, features_file2, "{}:{}".format(index_pair[0], index_pair[1]), out_fname, conf_args) utils.execute_shell_command(java_cmd)
def create_layer_sift_features(tiles_fname, out_fname, jar_file, meshes_dir=None, conf=None, threads_num=4): meshes_str = '' if meshes_dir is not None: meshes_str = '--meshesDir "{0}"'.format(meshes_dir) tiles_url = utils.path2url(os.path.abspath(tiles_fname)) conf_args = utils.conf_args_from_file(conf, 'ComputeSiftFeatures') # Compute the Sift features `for each tile in the tile spec file java_cmd = 'java -Xmx11g -XX:ParallelGCThreads=1 -Djava.awt.headless=true -cp "{0}" org.janelia.alignment.ComputeLayerSiftFeatures --url {1} --targetPath {2} --threads {3} {4} {5}'.format(\ jar_file, tiles_url, out_fname, threads_num, meshes_str, conf_args) utils.execute_shell_command(java_cmd)
def create_sift_features(tiles_fname, out_fname, index, jar_file, conf_fname=None, threads_num=None): threads_str = "" if threads_num != None: threads_str = "--threads {0}".format(threads_num) tiles_url = utils.path2url(os.path.abspath(tiles_fname)) conf_args = utils.conf_args_from_file(conf_fname, 'ComputeSiftFeatures') # Compute the Sift features `for each tile in the tile spec file java_cmd = 'java -Xmx5g -XX:ParallelGCThreads=1 -Djava.awt.headless=true -cp "{0}" org.janelia.alignment.ComputeSiftFeatures --index {1} \ --url {2} --targetPath {3} {4} {5}' .format(\ jar_file, index, tiles_url, out_fname, threads_str, conf_args) utils.execute_shell_command(java_cmd)
def render_3d(tile_fnames_or_dir_fname, output_dir, from_layer, to_layer, scale, from_x, from_y, to_x, to_y, jar_file, threads_num=1): list_file_url = utils.path2url(tile_fnames_or_dir_fname) java_cmd = 'java -Xmx32g -XX:ParallelGCThreads=1 -cp "{0}" org.janelia.alignment.Render3D --targetDir {1} --scale {2} \ --threads {3} --fromLayer {4} --toLayer {5} --fromX {6} --fromY {7} --toX {8} --toY {9} --hide {10}' .format(\ jar_file, output_dir, scale, threads_num, from_layer, to_layer, from_x, from_y, to_x, to_y, list_file_url) utils.execute_shell_command(java_cmd)
def _list_uris(self): if self._uris: return self._uris if not self.args: if isinstance(self.options.paths, str): self.options.paths = [os.path.join(self.options.paths)] for path in self.options.paths: for root, dirs, files in os.walk(path): for f in files: fpath = os.path.join(path, root, f) if os.path.isdir(fpath) or \ fpath.endswith(G_V_MEDIA_INFO_EXT) or\ fpath.endswith(ScenarioManager.FILE_EXTENDION): continue else: self._discover_file(path2url(fpath), fpath) self.debug("Uris found: %s", self._uris) return self._uris
def match_multiple_pmcc(tiles_file, index_pairs, fixed_tiles, jar, out_fname, conf_fname=None, threads_num=None): tiles_url = utils.path2url(os.path.abspath(tiles_file)) fixed_str = "" if fixed_tiles != None: fixed_str = "--fixedTiles {0}".format(" ".join(map(str, fixed_tiles))) threads_str = "" if threads_num != None: threads_str = "--threads {0}".format(threads_num) conf_args = utils.conf_args_from_file(conf_fname, 'MatchByMaxPMCC') java_cmd = 'java -Xmx27g -XX:ParallelGCThreads=1 -Djava.awt.headless=true -cp "{0}" org.janelia.alignment.MatchByMaxPMCC --inputfile {1} {2} {3} {4} --targetPath {5} {6}'.format( jar, tiles_url, fixed_str, " ".join("--indices {}:{}".format(a, b) for a, b in index_pairs), threads_str, out_fname, conf_args) utils.execute_shell_command(java_cmd)
match_json = os.path.join( matched_sifts_dir, "{0}_{1}_sift_matches.json".format(fname1_prefix, fname2_prefix)) if not os.path.exists(match_json): print "Matching layers' sifts: {0} and {1}".format(i, i + j) match_layers_sift_features(layer_to_ts_json[i], layer_to_sifts[i], \ layer_to_ts_json[i + j], layer_to_sifts[i + j], match_json, args.jar_file, conf) all_matched_sifts_files.append(match_json) # filter and ransac the matched points ransac_fname = os.path.join( after_ransac_dir, "{0}_{1}_filter_ransac.json".format(fname1_prefix, fname2_prefix)) if not os.path.exists(ransac_fname): print "Filter-and-Ransac of layers: {0} and {1}".format(i, i + j) filter_ransac(match_json, path2url(layer_to_ts_json[i]), ransac_fname, args.jar_file, conf) all_model_files.append(ransac_fname) j += 1 matched_after_layers += 1 # Optimize all layers to a single 3d image all_ts_files = layer_to_ts_json.values() create_dir(args.output_dir) ts_list_file = os.path.join(args.workspace_dir, "all_ts_files.txt") write_list_to_file(ts_list_file, all_ts_files) matched_sifts_list_file = os.path.join(args.workspace_dir, "all_matched_sifts_files.txt") write_list_to_file(matched_sifts_list_file, all_matched_sifts_files)
def update_bounding_box(tiles_fname, output_dir, jar_file, threads_num=1): tiles_url = utils.path2url(tiles_fname) java_cmd = 'java -Xmx9g -XX:ParallelGCThreads=1 -Djava.awt.headless=true -cp "{0}" org.janelia.alignment.UpdateBoundingBox --threads {1} --targetDir {2} {3}'.format(\ jar_file, threads_num, output_dir, tiles_url) utils.execute_shell_command(java_cmd)
def callImgOccEdit(self): """Set up variables, call and prepare ImgOccEdit""" width, height = imageProp(self.image_path) if not width: tooltip("Not a valid image file.") return False ofill = self.sconf['ofill'] scol = self.sconf['scol'] swidth = self.sconf['swidth'] fsize = self.sconf['fsize'] font = self.sconf['font'] bkgd_url = path2url(self.image_path) opref = self.opref onote = self.ed.note mode = self.mode flds = self.mflds deck = mw.col.decks.nameOrNone(opref["did"]) try: mw.ImgOccEdit is not None mw.ImgOccEdit.resetWindow() # use existing IO instance when available except AttributeError: mw.ImgOccEdit = ImgOccEdit(mw) mw.ImgOccEdit.setupFields(flds) logging.debug("Launching new ImgOccEdit instance") dialog = mw.ImgOccEdit dialog.switchToMode(self.mode) url = QUrl.fromLocalFile(svg_edit_path) url.setQueryItems(svg_edit_queryitems) url.addQueryItem('initFill[color]', ofill) url.addQueryItem('dimensions', '{0},{1}'.format(width, height)) url.addQueryItem('bkgd_url', bkgd_url) url.addQueryItem('initStroke[color]', scol) url.addQueryItem('initStroke[width]', str(swidth)) url.addQueryItem('text[font_size]', str(fsize)) url.addQueryItem('text[font_family]', "'%s', %s" % (font, svg_edit_fonts)) if mode != "add": url.addQueryItem('initTool', 'select'), for i in flds: fn = i["name"] if fn in self.ioflds_priv: continue dialog.tedit[fn].setPlainText(onote[fn].replace( '<br />', '\n')) svg_url = path2url(opref["omask"]) url.addQueryItem('url', svg_url) else: url.addQueryItem('initTool', 'rect'), dialog.svg_edit.setUrl(url) dialog.deckChooser.deck.setText(deck) dialog.tags_edit.setCol(mw.col) dialog.tags_edit.setText(opref["tags"]) for i in self.ioflds_prsv: if i in onote: dialog.tedit[i].setPlainText(onote[i]) dialog.visible = True if mode == "add": dialog.show() else: # modal dialog when editing dialog.exec_()
def cleanup(self): """ Cleanup the options looking after user options have been parsed """ # Get absolute path for main_dir and base everything on that self.main_dir = os.path.abspath(self.main_dir) # default for output_dir is MAINDIR if not self.output_dir: self.output_dir = self.main_dir else: self.output_dir = os.path.abspath(self.output_dir) if self.gdb: self.logsdir = "stdout" self.debug = True self.num_jobs = 1 try: subprocess.check_output("gdb --help", shell=True) except subprocess.CalledProcessError: printc("Want to use gdb, but not avalaible on the system", Colors.FAIL) return False # other output directories if self.logsdir in ['stdout', 'stderr']: # Allow -l stdout/stderr to work like -rl stdout/stderr self.redirect_logs = self.logsdir self.logsdir = None if self.logsdir is None: self.logsdir = os.path.join(self.output_dir, "logs") if self.xunit_file is None: self.xunit_file = os.path.join(self.logsdir, "xunit.xml") if self.dest is None: self.dest = os.path.join(self.output_dir, "rendered") if not os.path.exists(self.dest): os.makedirs(self.dest) if not os.path.exists(self.logsdir): os.makedirs(self.logsdir) if self.redirect_logs not in ['stdout', 'stderr', False]: printc("Log redirection (%s) must be either 'stdout' or 'stderr'." % self.redirect_logs, Colors.FAIL, True) return False if urlparse.urlparse(self.dest).scheme == "": self.dest = path2url(self.dest) if self.no_color: utils.desactivate_colors() if self.clone_dir is None: self.clone_dir = os.path.join(self.main_dir, QA_ASSETS) if not isinstance(self.paths, list): self.paths = [self.paths] if self.generate_info_full is True: self.generate_info = True if self.sync_all is True or self.force_sync is True: self.sync = True if not self.sync and not os.path.exists(self.clone_dir) and \ self.clone_dir == os.path.join(self.clone_dir, MEDIAS_FOLDER): printc("Media path (%s) does not exists. Forgot to run --sync ?" % self.clone_dir, Colors.FAIL, True) return False if (self.main_dir != DEFAULT_MAIN_DIR or self.clone_dir != QA_ASSETS) and \ self.testsuites_dir == DEFAULT_TESTSUITES_DIR: self.testsuites_dir = os.path.join(self.main_dir, self.clone_dir, "testsuites") if self.valgrind: try: subprocess.check_output("valgrind --help", shell=True) except subprocess.CalledProcessError: printc("Want to use valgrind, but not avalaible on the system", Colors.FAIL) return False return True
def call_java_debug_match_pmcc(input_file, out_dir, jar_file): input_url = utils.path2url(os.path.abspath(input_file)) java_cmd = 'java -Xmx4g -Djava.awt.headless=true -cp "{0}" org.janelia.alignment.DebugCorrespondence --inputfile {1} --targetDir {2}'.format(\ jar_file, input_url, out_dir) utils.execute_shell_command(java_cmd)
layers_data[si]['matched_sifts'][sij] = match_json all_matched_sifts_files.append(match_json) # filter and ransac the matched points ransac_fname = os.path.join(after_ransac_dir, "{0}_{1}_filter_ransac.json".format(fname1_prefix, fname2_prefix)) if not os.path.exists(ransac_fname): print "Filter-and-Ransac of layers: {0} and {1}".format(i, i + j) dependencies = [ ] if job_match != None: dependencies.append(job_match) for dep in jobs[si]['sifts']: dependencies.append(dep) for dep in jobs[sij]['sifts']: dependencies.append(dep) job_ransac = FilterRansac(dependencies, path2url(layers_data[si]['ts']), layers_data[si]['matched_sifts'][sij], ransac_fname, \ args.jar_file, conf_fname=args.conf_file_name) #filter_ransac(match_json, path2url(layer_to_ts_json[i]), ransac_fname, args.jar_file, conf) all_running_jobs.append(job_ransac) filter_ransac_jobs.append(job_ransac) layers_data[si]['ransac'][sij] = ransac_fname all_model_files.append(ransac_fname) j += 1 matched_after_layers += 1 # Create a single file that lists all tilespecs and a single file that lists all pmcc matches (the os doesn't support a very long list) ts_list_file = os.path.join(args.workspace_dir, "all_ts_files.txt")
def register_defaults(self, project_paths=None): projects = list() if not self.args: if project_paths == None: path = self.options.projects_paths else: path = project_paths for root, dirs, files in os.walk(path): for f in files: if not f.endswith(".xges"): continue projects.append(utils.path2url(os.path.join(path, root, f))) else: for proj_uri in self.args: if not utils.isuri(proj_uri): proj_uri = utils.path2url(proj_uri) if os.path.exists(proj_uri): projects.append(proj_uri) if self.options.long_limit != 0: scenarios = [ "none", "scrub_forward_seeking", "scrub_backward_seeking" ] else: scenarios = [ "play_15s", "scrub_forward_seeking_full", "scrub_backward_seeking_full" ] for proj_uri in projects: # First playback casses project = XgesProjectDescriptor(proj_uri) for scenario_name in scenarios: scenario = self._scenarios.get_scenario(scenario_name) if scenario is None: continue if scenario.get_min_media_duration() >= ( project.get_duration() / utils.GST_SECOND): continue classname = "ges.playback.%s.%s" % ( scenario.name, os.path.basename(proj_uri).replace( ".xges", "")) self.add_test( GESPlaybackTest(classname, self.options, self.reporter, project, scenario=scenario)) # And now rendering casses for comb in GES_ENCODING_TARGET_COMBINATIONS: classname = "ges.render.%s.%s" % (str(comb).replace( ' ', '_'), os.path.splitext(os.path.basename(proj_uri))[0]) self.add_test( GESRenderTest(classname, self.options, self.reporter, project, combination=comb))
def main(): parser = argparse.ArgumentParser(formatter_class=Formatter, prog='gst-validate-launcher', description=HELP) parser.add_argument("-d", "--debug", dest="debug", action="store_true", default=False, help="Let user debug the process on timeout") parser.add_argument("-f", "--forever", dest="forever", action="store_true", default=False, help="Keep running tests until one fails") parser.add_argument("-F", "--fatal-error", dest="fatal_error", action="store_true", default=False, help="Stop on first fail") parser.add_argument("-t", "--wanted-tests", dest="wanted_tests", default=[], action="append", help="Define the tests to execute, it can be a regex" " if it contains defaults_only, only default scenarios" " will be executed") parser.add_argument("-b", "--blacklisted-tests", dest="blacklisted_tests", default=[], action="append", help="Define the tests not to execute, it can be a regex.") parser.add_argument("-L", "--list-tests", dest="list_tests", action="store_true", default=False, help="List tests and exit") parser.add_argument("-m", "--mute", dest="mute", action="store_true", default=False, help="Mute playback output, which mean that we use " "a fakesink") parser.add_argument("-n", "--no-color", dest="no_color", action="store_true", default=False, help="Set it to output no colored text in the terminal") parser.add_argument("-g", "--generate-media-info", dest="generate_info", action="store_true", default=False, help="Set it in order to generate the missing .media_infos files") parser.add_argument("-lt", "--long-test-limit", dest="long_limit", default=utils.LONG_TEST, action='store', help="Defines the limite from which a test is concidered as long (is seconds)"), dir_group = parser.add_argument_group("Directories and files to be used by the launcher") parser.add_argument('--xunit-file', action='store', dest='xunit_file', metavar="FILE", default=None, help=("Path to xml file to store the xunit report in. " "Default is LOGSDIR/xunit.xml")) dir_group.add_argument("-M", "--main-dir", dest="main_dir", default=DEFAULT_MAIN_DIR, help="Main directory where to put files. Default is %s" % DEFAULT_MAIN_DIR) dir_group.add_argument("-o", "--output-dir", dest="output_dir", default=None, help="Directory where to store logs and rendered files. Default is MAIN_DIR") dir_group.add_argument("-l", "--logs-dir", dest="logsdir", default=None, help="Directory where to store logs, default is OUTPUT_DIR/logs") dir_group.add_argument("-R", "--render-path", dest="dest", default=None, help="Set the path to which projects should be rendered, default is OUTPUT_DIR/rendered") dir_group.add_argument("-p", "--medias-paths", dest="paths", action="append", default=None, help="Paths in which to look for media files, default is MAIN_DIR/gst-qa-assets/media") dir_group.add_argument("-a", "--clone-dir", dest="clone_dir", default=None, help="Paths in which to look for media files, default is MAIN_DIR/gst-qa-assets") http_server_group = parser.add_argument_group("Handle the HTTP server to be created") http_server_group.add_argument("--http-server-port", dest="http_server_port", default=8079, help="Port on which to run the http server on localhost") http_server_group.add_argument("-s", "--folder-for-http-server", dest="http_server_dir", default=None, help="Folder in which to create an http server on localhost. Default is PATHS") http_server_group.add_argument("--http-only", dest="httponly", default=False, action='store_true', help="Start the http server and quit") assets_group = parser.add_argument_group("Handle remote assets") assets_group.add_argument("-u", "--update-assets-command", dest="update_assets_command", default="git fetch %s && git checkout FETCH_HEAD && git annex get ." % (DEFAULT_GST_QA_ASSETS_REPO, ), help="Command to update assets") assets_group.add_argument("--get-assets-command", dest="get_assets_command", default="git clone", help="Command to get assets") assets_group.add_argument("--remote-assets-url", dest="remote_assets_url", default=DEFAULT_GST_QA_ASSETS_REPO, help="Url to the remote assets (default:%s)" % DEFAULT_GST_QA_ASSETS_REPO) assets_group.add_argument("-S", "--sync", dest="sync", action="store_true", default=False, help="Synchronize asset repository") assets_group.add_argument("--usage", dest="sync", action=PrintUsage, help="Print usage documentation") loggable.init("GST_VALIDATE_LAUNCHER_DEBUG", True, False) tests_launcher = _TestsLauncher() tests_launcher.add_options(parser) (options, args) = parser.parse_known_args() # Get absolute path for main_dir and base everything on that options.main_dir = os.path.abspath(options.main_dir) # default for output_dir is MAINDIR if not options.output_dir: options.output_dir = options.main_dir else: options.output_dir = os.path.abspath(options.output_dir) # other output directories if options.logsdir is None: options.logsdir = os.path.join(options.output_dir, "logs") if options.xunit_file is None: options.xunit_file = os.path.join(options.logsdir, "xunit.xml") if options.dest is None: options.dest = os.path.join(options.output_dir, "rendered") if not os.path.exists(options.dest): os.makedirs(options.dest) if urlparse.urlparse(options.dest).scheme == "": options.dest = path2url(options.dest) if options.no_color: utils.desactivate_colors() if options.clone_dir is None: options.clone_dir = os.path.join(options.main_dir, QA_ASSETS) if options.paths is None: options.paths = os.path.join(options.clone_dir, MEDIAS_FOLDER) if options.http_server_dir is None: options.http_server_dir = options.paths if not options.sync and not os.path.exists(options.clone_dir) and \ options.clone_dir == os.path.join(options.clone_dir, MEDIAS_FOLDER): printc("Media path (%s) does not exists. Forgot to run --sync ?" % options.clone_dir, Colors.FAIL, True) return -1 blacklisted = tests_launcher.get_blacklisted() if blacklisted: msg = "Currently 'hardcoded' blacklisted tests:\n" for name, bug in blacklisted: options.blacklisted_tests.append(name) msg += " + %s \n --> bug: %s\n\n" % (name, bug) printc(msg, Colors.FAIL, True) tests_launcher.set_settings(options, args) if options.remote_assets_url and options.sync: if os.path.exists(options.clone_dir): launch_command("cd %s && %s" % (options.clone_dir, options.update_assets_command), fails=True) else: launch_command("%s %s %s" % (options.get_assets_command, options.remote_assets_url, options.clone_dir), fails=True) launch_command("cd %s && %s" % (options.clone_dir, options.update_assets_command), fails=True) # Ensure that the scenario manager singleton is ready to be used ScenarioManager().config = options tests_launcher.list_tests() if options.list_tests: l = tests_launcher.tests l.sort() for test in l: printc(test) printc("\nNumber of tests: %d" % len (l), Colors.OKGREEN) return 0 httpsrv = HTTPServer(options) if tests_launcher.needs_http_server() or options.httponly is True: httpsrv.start() if options.httponly is True: print "Running HTTP server only" return e = None try: tests_launcher.run_tests() tests_launcher.final_report() except Exception as e: pass finally: httpsrv.stop() if e is not None: raise return 0
def cleanup(self): """ Cleanup the options looking after user options have been parsed """ # Get absolute path for main_dir and base everything on that self.main_dir = os.path.abspath(self.main_dir) # default for output_dir is MAINDIR if not self.output_dir: self.output_dir = self.main_dir else: self.output_dir = os.path.abspath(self.output_dir) # other output directories if self.logsdir in ['stdout', 'stderr']: # Allow -l stdout/stderr to work like -rl stdout/stderr self.redirect_logs = self.logsdir self.logsdir = None if self.logsdir is None: self.logsdir = os.path.join(self.output_dir, "logs") if self.xunit_file is None: self.xunit_file = os.path.join(self.logsdir, "xunit.xml") if self.dest is None: self.dest = os.path.join(self.output_dir, "rendered") if not os.path.exists(self.dest): os.makedirs(self.dest) if not os.path.exists(self.logsdir): os.makedirs(self.logsdir) if not self.redirect_logs in ['stdout', 'stderr', False]: printc("Log redirection (%s) must be either 'stdout' or 'stderr'." % self.redirect_logs, Colors.FAIL, True) return False if urlparse.urlparse(self.dest).scheme == "": self.dest = path2url(self.dest) if self.no_color: utils.desactivate_colors() if self.clone_dir is None: self.clone_dir = os.path.join(self.main_dir, QA_ASSETS) if not isinstance(self.paths, list): self.paths = [self.paths] if self.generate_info_full is True: self.generate_info = True if self.sync_all is True: self.sync = True if self.update_assets_command == DEFAULT_SYNC_ASSET_COMMAND: self.update_assets_command = DEFAULT_SYNC_ALL_ASSET_COMMAND if not self.sync and not os.path.exists(self.clone_dir) and \ self.clone_dir == os.path.join(self.clone_dir, MEDIAS_FOLDER): printc("Media path (%s) does not exists. Forgot to run --sync ?" % self.clone_dir, Colors.FAIL, True) return False if (self.main_dir != DEFAULT_MAIN_DIR or self.clone_dir != QA_ASSETS) and \ self.testsuites_dir == DEFAULT_TESTSUITES_DIR: self.testsuites_dir = os.path.join(self.main_dir, self.clone_dir, "testsuites") return True
def cleanup(self): """ Cleanup the options looking after user options have been parsed """ # Get absolute path for main_dir and base everything on that self.main_dir = os.path.abspath(self.main_dir) # default for output_dir is MAINDIR if not self.output_dir: self.output_dir = self.main_dir else: self.output_dir = os.path.abspath(self.output_dir) if self.gdb: self.logsdir = "stdout" self.debug = True self.num_jobs = 1 try: subprocess.check_output("gdb --help", shell=True) except subprocess.CalledProcessError: printc("Want to use gdb, but not avalaible on the system", Colors.FAIL) return False # other output directories if self.logsdir in ['stdout', 'stderr']: # Allow -l stdout/stderr to work like -rl stdout/stderr self.redirect_logs = self.logsdir self.logsdir = None if self.logsdir is None: self.logsdir = os.path.join(self.output_dir, "logs") if self.xunit_file is None: self.xunit_file = os.path.join(self.logsdir, "xunit.xml") if self.dest is None: self.dest = os.path.join(self.output_dir, "rendered") if not os.path.exists(self.dest): os.makedirs(self.dest) if not os.path.exists(self.logsdir): os.makedirs(self.logsdir) if self.redirect_logs not in ['stdout', 'stderr', False]: printc( "Log redirection (%s) must be either 'stdout' or 'stderr'." % self.redirect_logs, Colors.FAIL, True) return False if urlparse.urlparse(self.dest).scheme == "": self.dest = path2url(self.dest) if self.no_color: utils.desactivate_colors() if self.clone_dir is None: self.clone_dir = os.path.join(self.main_dir, QA_ASSETS) if not isinstance(self.paths, list): self.paths = [self.paths] if self.generate_info_full is True: self.generate_info = True if self.sync_all is True or self.force_sync is True: self.sync = True if not self.sync and not os.path.exists(self.clone_dir) and \ self.clone_dir == os.path.join(self.clone_dir, MEDIAS_FOLDER): printc( "Media path (%s) does not exists. Forgot to run --sync ?" % self.clone_dir, Colors.FAIL, True) return False if (self.main_dir != DEFAULT_MAIN_DIR or self.clone_dir != QA_ASSETS) and \ self.testsuites_dir == DEFAULT_TESTSUITES_DIR: self.testsuites_dir = os.path.join(self.main_dir, self.clone_dir, "testsuites") if self.valgrind: try: subprocess.check_output("valgrind --help", shell=True) except subprocess.CalledProcessError: printc("Want to use valgrind, but not avalaible on the system", Colors.FAIL) return False return True