def runTest(self): for num, info in sorted(samples['small_signals'].items()): # Prepare paths # orig_bigwig_path = info['bigwig'] orig_sql_path = info['sql'] test_sql_path = temporary_path('.sql') test_bigwig_path = temporary_path('.bigwig') # From bigwig to SQL # try: track.convert(orig_bigwig_path, test_sql_path, assembly='sacCer2') except MissingExecutableError as err: sys.stderr.write("skipping: {0} ".format(err.message)) break else: self.assertTrue(assert_file_equal(orig_sql_path, test_sql_path)) # From SQL to bigwig # try: track.convert(test_sql_path, test_bigwig_path) except MissingExecutableError as err: sys.stderr.write("skipping: {0} ".format(err.message)) break else: self.assertTrue( assert_file_equal(orig_bigwig_path, test_bigwig_path, start_a=1, start_b=1)) # Clean up # os.remove(test_sql_path) os.remove(test_bigwig_path)
def benchmark_creation(file): old_path = file['path'] new_path = Path(output_directory + old_path.filename + '.sql') if os.path.exists(str(new_path)): os.remove(str(new_path)) with Timer() as timer: track.convert(str(old_path), str(new_path)) file['new_path'] = new_path return timer.total_time
def runTest(self): for num, info in sorted(samples['small_signals'].items()): # Prepare paths # orig_bigwig_path = info['bigwig'] orig_sql_path = info['sql'] test_sql_path = temporary_path('.sql') test_bigwig_path = temporary_path('.bigwig') # From bigwig to SQL # try: track.convert(orig_bigwig_path, test_sql_path, assembly='sacCer2') except MissingExecutableError as err: sys.stderr.write("skipping: {0} ".format(err.message)) break else: self.assertTrue(assert_file_equal(orig_sql_path, test_sql_path)) # From SQL to bigwig # try: track.convert(test_sql_path, test_bigwig_path) except MissingExecutableError as err: sys.stderr.write("skipping: {0} ".format(err.message)) break else: self.assertTrue(assert_file_equal(orig_bigwig_path, test_bigwig_path, start_a=1, start_b=1)) # Clean up # os.remove(test_sql_path) os.remove(test_bigwig_path)
def create_tracks(): for collection, conversion_info in sorted(collection_conversion.items()): for track_name, track_info in sorted(samples[collection].items()): print Color.ylw + "Creating track '" + track_info['name'] + "'" + Color.end from_path = track_info[conversion_info['from']] to_path = track_info[conversion_info['to']] if os.path.exists(to_path): os.remove(to_path) track.convert(from_path, to_path, assembly = conversion_info['assembly'])
def runTest(self): # Prepare paths # orig_gzip_path = samples['gzip_tracks'][1]['gzip'] orig_sql_path = samples['small_features'][1]['sql'] test_sql_path = temporary_path('.sql') # From BED to SQL # track.convert(orig_gzip_path, test_sql_path, assembly='sacCer2') self.assertTrue(assert_file_equal(orig_sql_path, test_sql_path)) # Clean up # os.remove(test_sql_path)
def runTest(self): for num, info in sorted(samples['rand_signals'].items()): # Prepare paths # orig_wig_path = info['wig'] orig_sql_path = info['sql'] test_sql_path = temporary_path('.sql') # From WIG to SQL # track.convert(orig_wig_path, test_sql_path, assembly='sacCer2') self.assertTrue(assert_file_equal(orig_sql_path, test_sql_path)) # Clean up # os.remove(test_sql_path)
def runTest(self): for num, info in sorted(samples['sga_tracks'].items()): # Prepare paths # orig_sga_path = info['sga'] orig_sql_path = info['sql'] test_sql_path = temporary_path('.sql') # From SGA to SQL # track.convert(orig_sga_path, test_sql_path, assembly='hg19') self.assertTrue(assert_file_equal(orig_sql_path, test_sql_path)) # Clean up # os.remove(test_sql_path)
def runTest(self): info = samples['small_signals'][1] # Prepare paths # orig_sql_path = info['sql'] orig_sga_path = info['sga'] test_sga_path = temporary_path('.sga') # From SGA to SQL # track.convert(orig_sql_path, test_sga_path, assembly='hg19') self.assertTrue(assert_file_equal(orig_sga_path, test_sga_path)) # Clean up # os.remove(test_sga_path)
def runTest(self): for num, info in sorted(samples['yeast_features'].items()): # Prepare paths # orig_bed_path = info['bed'] orig_sql_path = info['sql'] test_sql_path = temporary_path('.sql') # From BED to SQL # track.convert(orig_bed_path, test_sql_path, assembly='sacCer2') self.assertTrue(assert_file_equal(orig_sql_path, test_sql_path)) # Clean up # os.remove(test_sql_path)
def runTest(self): for num, info in sorted(samples['gtf_tracks'].items()): # This one is too large # if num == 'GenRep': continue # Prepare paths # orig_gtf_path = info['gtf'] orig_sql_path = info['sql'] test_sql_path = temporary_path('.sql') # From GTF to SQL # track.convert(orig_gtf_path, test_sql_path) self.assertTrue(assert_file_equal(orig_sql_path, test_sql_path)) # Clean up # os.remove(test_sql_path)
def tosql(fileinfo, seq_name): """ Transform a input file to an sql one. """ debug('Tosql', 3) track.convert(fileinfo.extension and (fileinfo.paths['upload_to'], fileinfo.extension) or fileinfo.paths['upload_to'], fileinfo.paths['store']) with track.load(fileinfo.paths['store'], 'sql', readonly=False) as t: t.assembly = seq_name # debug('tosql : btrack.convert("%s", "%s", chrmeta="%s")' % (fileinfo.paths['upload_to'], fileinfo.paths['store'], seq_name), 3) # btrack.convert(fileinfo.paths['upload_to'], fileinfo.paths['store'], chrmeta=seq_name) fileinfo.states['instore'] = True debug('done', 4) return fileinfo
def dump(self, track_id, format, *args, **kw): user = handler.user.get_user_in_session(request) if not checker.can_download(user.id, track_id): flash("You haven't the right to export any tracks which is not yours", 'error') raise redirect('../') _track = DBSession.query(Track).filter(Track.id == track_id).first() if format == 'sqlite': response.content_type = 'application/x-sqlite3' return open(_track.path).read() else: tmp_file = tempfile.NamedTemporaryFile(delete=True) tmp_file.close() track.convert(_track.path, (tmp_file.name, format)) response.content_type = 'text/plain' return open(tmp_file.name).read()
def run(create_sql_files=False): for format, outcomes in challanges.items(): for outcome, paths in outcomes.items(): for path in paths: name = os.path.basename(path) dest = os.path.splitext(path)[0] + '.sql' if os.path.exists(dest): os.remove(dest) try: track.convert(path, dest) except Exception as err: message(path, name, 'fail', outcome, str(err)[0:160]) else: message(path, name, 'pass', outcome) finally: if not create_sql_files and os.path.exists(dest): os.remove(dest)
def runTest(self): for num, info in sorted(samples['small_signals'].items()): # Prepare paths # orig_bigwig_path = info['bigwig'] orig_sql_path = info['sql'] test_sql_path = temporary_path('.sql') test_bigwig_path = temporary_path('.bigwig') # From bigwig to SQL # track.convert(orig_bigwig_path, test_sql_path, assembly='sacCer2') self.assertTrue(assert_file_equal(orig_sql_path, test_sql_path)) # From SQL to bigwig # track.convert(test_sql_path, test_bigwig_path) self.assertTrue(assert_file_equal(orig_bigwig_path, test_bigwig_path, start_a=1, start_b=1)) # Clean up # os.remove(test_sql_path) os.remove(test_bigwig_path)
def runTest(self): for num, info in sorted(samples['small_features'].items()): # Prepare paths # orig_bed_path = info['bed'] orig_sql_path = info['sql'] test_sql_path = temporary_path('.sql') test_bed_path = temporary_path('.bed') # From BED to SQL # track.convert(orig_bed_path, test_sql_path, assembly='sacCer2') self.assertTrue(assert_file_equal(orig_sql_path, test_sql_path)) # From SQL to BED # with track.load(test_sql_path) as t: t.roman_to_integer() track.convert(test_sql_path, test_bed_path) self.assertTrue(assert_file_equal(orig_bed_path, test_bed_path, start_b=1)) # Clean up # os.remove(test_sql_path) os.remove(test_bed_path)
def tosql(fileinfo, seq_name): """ Transform a input file to an sql one. """ debug('Tosql', 3) track.convert( fileinfo.extension and (fileinfo.paths['upload_to'], fileinfo.extension) or fileinfo.paths['upload_to'], fileinfo.paths['store']) with track.load(fileinfo.paths['store'], 'sql', readonly=False) as t: t.assembly = seq_name # debug('tosql : btrack.convert("%s", "%s", chrmeta="%s")' % (fileinfo.paths['upload_to'], fileinfo.paths['store'], seq_name), 3) # btrack.convert(fileinfo.paths['upload_to'], fileinfo.paths['store'], chrmeta=seq_name) fileinfo.states['instore'] = True debug('done', 4) return fileinfo
def runTest(self): for num, info in sorted(samples['gtf_tracks'].items()): if num == 'GenRep': continue orig_gtf_path = info['gtf'] orig_sql_path = info['sql'] test_sql_path = temporary_path('.sql') test_gtf_path = temporary_path('.gtf') # From GTF to SQL # track.convert(orig_gtf_path, test_sql_path) self.assertTrue(assert_file_equal(orig_sql_path, test_sql_path)) # From SQL to GTF # # Use Track() instead. # track.convert(test_sql_path, test_gtf_path) self.assertTrue(assert_file_equal(orig_gtf_path, test_gtf_path, start_b=1)) # Clean up # os.remove(test_sql_path) os.remove(test_gtf_path)
def runTest(self): for num, info in sorted(samples['gtf_tracks'].items()): if num == 'GenRep': continue orig_gtf_path = info['gtf'] orig_sql_path = info['sql'] test_sql_path = temporary_path('.sql') test_gtf_path = temporary_path('.gtf') # From GTF to SQL # track.convert(orig_gtf_path, test_sql_path) self.assertTrue(assert_file_equal(orig_sql_path, test_sql_path)) # From SQL to GTF # # Use Track() instead. # track.convert(test_sql_path, test_gtf_path) self.assertTrue( assert_file_equal(orig_gtf_path, test_gtf_path, start_b=1)) # Clean up # os.remove(test_sql_path) os.remove(test_gtf_path)
def runTest(self): for num, info in sorted(samples["gff_tracks"].items()): # Prepare paths # orig_gff_path = info["gff"] orig_sql_path = info["sql"] test_sql_path = temporary_path(".sql") test_gff_path = temporary_path(".gff") # From GFF to SQL # track.convert(orig_gff_path, test_sql_path, assembly="sacCer2") self.assertTrue(assert_file_equal(orig_sql_path, test_sql_path)) # From SQL to GFF # with track.load(test_sql_path) as t: t.roman_to_integer() track.convert(test_sql_path, test_gff_path) self.assertTrue(assert_file_equal(orig_gff_path, test_gff_path, start_a=1, start_b=1)) # Clean up # os.remove(test_sql_path) os.remove(test_gff_path)
def runTest(self): for num, info in sorted(samples['small_features'].items()): # Prepare paths # orig_bed_path = info['bed'] orig_sql_path = info['sql'] test_sql_path = temporary_path('.sql') test_bed_path = temporary_path('.bed') # From BED to SQL # track.convert(orig_bed_path, test_sql_path, assembly='sacCer2') self.assertTrue(assert_file_equal(orig_sql_path, test_sql_path)) # From SQL to BED # with track.load(test_sql_path) as t: t.roman_to_integer() track.convert(test_sql_path, test_bed_path) self.assertTrue( assert_file_equal(orig_bed_path, test_bed_path, start_b=1)) # Clean up # os.remove(test_sql_path) os.remove(test_bed_path)
def runTest(self): for num, info in sorted(samples['small_signals'].items()): # Some files cannot be roundtriped # if num == 3 or num == 7: continue # Prepare paths # orig_wig_path = info['wig'] orig_sql_path = info['sql'] test_sql_path = temporary_path('.sql') test_wig_path = temporary_path('.wig') # From WIG to SQL # track.convert(orig_wig_path, test_sql_path, assembly='sacCer2') self.assertTrue(assert_file_equal(orig_sql_path, test_sql_path)) # From SQL to WIG # with track.load(test_sql_path) as t: t.roman_to_integer() track.convert(test_sql_path, test_wig_path) self.assertTrue(assert_file_equal(orig_wig_path, test_wig_path, start_b=1)) # Clean up # os.remove(test_sql_path) os.remove(test_wig_path)
def runTest(self): for num, info in sorted(samples['small_signals'].items()): # Prepare paths # orig_bigwig_path = info['bigwig'] orig_sql_path = info['sql'] test_sql_path = temporary_path('.sql') test_bigwig_path = temporary_path('.bigwig') # From bigwig to SQL # track.convert(orig_bigwig_path, test_sql_path, assembly='sacCer2') self.assertTrue(assert_file_equal(orig_sql_path, test_sql_path)) # From SQL to bigwig # track.convert(test_sql_path, test_bigwig_path) self.assertTrue( assert_file_equal(orig_bigwig_path, test_bigwig_path, start_a=1, start_b=1)) # Clean up # os.remove(test_sql_path) os.remove(test_bigwig_path)
def runTest(self): for num, info in sorted(samples['small_signals'].items()): # Some files cannot be roundtriped # if num == 3 or num == 7: continue # Prepare paths # orig_wig_path = info['wig'] orig_sql_path = info['sql'] test_sql_path = temporary_path('.sql') test_wig_path = temporary_path('.wig') # From WIG to SQL # track.convert(orig_wig_path, test_sql_path, assembly='sacCer2') self.assertTrue(assert_file_equal(orig_sql_path, test_sql_path)) # From SQL to WIG # with track.load(test_sql_path) as t: t.roman_to_integer() track.convert(test_sql_path, test_wig_path) self.assertTrue( assert_file_equal(orig_wig_path, test_wig_path, start_b=1)) # Clean up # os.remove(test_sql_path) os.remove(test_wig_path)
def runTest(self): for num, info in sorted(samples['gff_tracks'].items()): # Prepare paths # orig_gff_path = info['gff'] orig_sql_path = info['sql'] test_sql_path = temporary_path('.sql') test_gff_path = temporary_path('.gff') # From GFF to SQL # track.convert(orig_gff_path, test_sql_path, assembly='sacCer2') self.assertTrue(assert_file_equal(orig_sql_path, test_sql_path)) # From SQL to GFF # with track.load(test_sql_path) as t: t.roman_to_integer() track.convert(test_sql_path, test_gff_path) self.assertTrue( assert_file_equal(orig_gff_path, test_gff_path, start_a=1, start_b=1)) # Clean up # os.remove(test_sql_path) os.remove(test_gff_path)
def device_add ( self, dev ): tdir = os.path.expanduser(self._conf['track_dir']) # Log prod = dev.get_product() ser = dev.get_serial() log('device[%s] added (serial=%s)' % (prod, ser)) self.notify('Device Added', 'Device: %s\nSerial: %s' % (prod, ser)) # Read tracks history = list(reversed(dev.get_history())) for h in history: # Ignore short tracks if h.summary.distance < self._conf['min_distance']: continue if h.summary.ride_time < self._conf['min_time']: continue # Check if already cached? tm = time.gmtime(h.summary.start) ts = time.strftime('%Y%m%d%H%M%S', tm) n = ts + '.track' p = os.path.join(tdir, n) if os.path.exists(p): continue # Log ts = time.strftime('%F %T', tm) log('device[%s]: track found %s' % (prod, ts)) self.notify('Track', 'Start: %s' % ts) # Create directory if not os.path.exists(tdir): os.makedirs(tdir) # Get track data (from device) t = track.convert(h.merged_segments(True)) t = track.fixup(t, self._conf) # Save in JSON format open(p, 'w').write(json.dumps(t)) log('device[%s]: track cached' % (prod))
def runTest(self): in_path = samples['small_features'][1]['sql'] out_path = temporary_path('.bed') track.convert(in_path, out_path) #assert_file_equal(out_path, samples['small_features'][1]['bed']) os.remove(out_path)