def test_main(resource, capsys): in_file = resource('cat.jpg') main(in_file) stdout, _ = capsys.readouterr() assert re.match(r'Found label:.*cat', stdout)
def test_main(capsys): in_file = os.path.join(RESOURCES, 'cat.jpg') main(in_file) stdout, _ = capsys.readouterr() assert re.match(r'Found label:.*cat', stdout)
def _check_labelling(infile, labelfile): ## simple check with open(infile) as f: for l in f: info = l.strip().split('\t')[8].split(';') label = get_value_from_keycolonvalue_list('mirna_label', info) if label == '': isLabelled = False else: isLabelled = True break if isLabelled: return infile else: print '## No labelling is found, proceed with labelling...' outfile = '%s.label' % infile lb.main(infile, labelfile, outfile) return outfile
def main(f_config, gff_infile, outdir, has_mirna, make_plots): ensure_dir(outdir) cparser = SafeConfigParser() cparser.read(f_config) f_params = cparser.get('promi2', 'params') listoffeatures = cparser.get('promi2', 'features').split(',') labelfile = cparser.get('configs', 'labelfile') if 'corr' in listoffeatures: is_consider_corr = True else: is_consider_corr = False ## Make sure no chrM in infile _verify_infile(gff_infile) ## Extract features gff_allfeatures = extractFeatures_given_gff(f_config, gff_infile, outdir, has_mirna, is_consider_corr) ## Don't consider TSS which does not have a partner miRNA gff_allfeatures = _filter_keepValidPairs(gff_allfeatures) ## Run Promirna fo_predictions = os.path.join( outdir, 'Predictions.%s.txt' % os.path.basename(gff_infile)) promi2.promi2(f_params, listoffeatures, gff_allfeatures, fo_predictions) ## Label predictions fo_labelledpredictions = fo_predictions + '.label' label.main(fo_predictions, labelfile, fo_labelledpredictions) ## Generate plots if make_plots: import plots outdir_plt = os.path.join(outdir, 'plots') plots.main(fo_labelledpredictions, outdir_plt, f_config) return fo_labelledpredictions
def main(f_config, gff_infile, outdir, has_mirna, make_plots): ensure_dir(outdir) cparser = SafeConfigParser() cparser.read(f_config) f_params = cparser.get('promi2', 'params') listoffeatures = cparser.get('promi2', 'features').split(',') labelfile = cparser.get('configs', 'labelfile') if 'corr' in listoffeatures: is_consider_corr = True else: is_consider_corr = False ## Make sure no chrM in infile _verify_infile(gff_infile) ## Extract features gff_allfeatures = extractFeatures_given_gff(f_config, gff_infile, outdir, has_mirna, is_consider_corr) ## Don't consider TSS which does not have a partner miRNA gff_allfeatures = _filter_keepValidPairs(gff_allfeatures) ## Run Promirna fo_predictions = os.path.join(outdir, 'Predictions.%s.txt' % os.path.basename(gff_infile)) promi2.promi2(f_params, listoffeatures, gff_allfeatures, fo_predictions) ## Label predictions fo_labelledpredictions = fo_predictions + '.label' label.main(fo_predictions, labelfile, fo_labelledpredictions) ## Generate plots if make_plots: import plots outdir_plt = os.path.join(outdir, 'plots') plots.main(fo_labelledpredictions, outdir_plt, f_config) return fo_labelledpredictions
def main(lat, lng): start_time = time.time() #Define our connection string options = config.main() conn_string = "host='" + options['host'] + "' dbname='" + options[ 'db_name'] + "' user='******'db_user'] + "' password='******'db_pass'] + "'" # print the connection string we will use to connect print("Connecting to database") # % (conn_string) # get a connection, if a connect cannot be made an exception will be raised here conn = psycopg2.connect(conn_string) # conn.cursor will return a cursor object, you can use this cursor to perform queries cur = conn.cursor() lat = float(lat) lng = float(lng) print("Connected!\n") sql = ( "SELECT label, CHAR_LENGTH(label),lng,lat, ST_Distance(geog_def, poi) AS distance_m" " FROM " + options['db_prefix'] + "road," " (select ST_MakePoint(%(lng)f, %(lat)f)::geography as poi) as poi" " WHERE ST_DWithin(geog_def, poi, 100000)" " AND CHAR_LENGTH(label) >=6 " " AND label LIKE '%%+%%' " " ORDER BY ST_Distance(geog_def, poi)" " LIMIT 1;" % { 'lat': lat, 'lng': lng }) print(sql) cur.execute(sql) rows = cur.fetchall() print(time.time() - start_time, "seconds") if len(rows) == 0: print("Cannot find closest Distance") return {'label': '0', 'distance': '0'} else: for row in rows: # convert label STA or - to KM km = label.main(row[0]) print("\nGet Closest Distance:", km['km']) return {'label': str(km['km']), 'distance': round(row[4], 2)}
def label(self): self.labels=label.main(self.path) label.label_parse(self.labels)
Hello, world! """ ) #url = "http://localhost:8000/?%s" #params = urllib.parse.urlencode({'spam': 1, 'eggs': 2, 'bacon': 0}) #try: # urlopen(url % params) #except error.URLError as e: print("URL Error:",e.read() , url) #except error.HTTPError as e: print("HTTP Error:",e.read() , url) #sys.exit(0) lng = 115.719 lat = -0.493033 data = '1,2,3'; x , y,z = data.split(',') print(x) km = label.main('STA 29+000') print(km['km']) d =distance.main(lat,lng) time.sleep(0.5) val = {"lat": lat, "distance" : d['distance'], "lng" : lng} re.sub(r'\W+', '', d['label']) print('get label is=',d['label']) #sys.exit(0) print("type", type(d['label']), type(d['distance'])) print(str(d['label']), d['distance'], val['distance']) print("<p>KM is =", d['label'],"</p>")