def LoadCSV2DB(filename): db = MySQLDatabase(host='132.77.80.238', user='******', passwd='a1a1a1', db='primero') counter = 1 for row in csv.reader(open(filename, 'r')): loc_id = int(row[1]) name = row[2] seq = row[3] seq = seq.replace(" ", "") if name == "": continue box = int(floor(loc_id / 81)) + 1 residual = loc_id % 81 row = chr(int(floor(residual / 9)) + 65) residual = residual % 9 col = residual + 1 db.Insert('primers_primer', [counter, name, box, row, col, seq, '', loc_id]) counter += 1 db.Commit()
def main(): opt_parser = MakeOpts() options, _ = opt_parser.parse_args(sys.argv) if not options.exp_id: opt_parser.print_help(sys.stderr) sys.exit(-1) #print "Importing into database: " + options.sqlite_db_filename sys.stderr.write("Experiment ID: %s\n" % options.exp_id) if options.plate_num is not None: sys.stderr.write("Plate num: %d\n" % options.plate_num) db = MySQLDatabase(host='hldbv02', user='******', passwd='a1a1a1', db='tecan') query = 'SELECT plate,reading_label,row,col,time,measurement ' + \ 'FROM tecan_readings WHERE exp_id="%s"' % options.exp_id if options.plate_num is not None: query += ' AND plate=%d' % options.plate_num if options.reading_label is not None: query += ' AND reading_label="%s"' % options.reading_label query += ' ORDER BY exp_id, plate, reading_label, row, col;' if options.output_fname is not None: f_out = open(options.output_fname, 'w') else: f_out = sys.stdout if options.csv: output = csv.writer(f_out) output.writerow([ 'plate', 'reading_label', 'row', 'col', 'time_in_sec', 'measurement' ]) else: output = f_out for row in db.Execute(query): plate, reading_label, row, col, time_in_sec, measurement = row if options.csv: output.writerow( [plate, reading_label, row, col, time_in_sec, measurement]) else: t = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(time_in_sec)) output.write("(%d,%d) %s, %s : %.4f" % (row, col, reading_label, t, measurement)) del db
def main(): options = MakeOpts().parse_args() if options.debug: db = CreateDummyDB() else: db = MySQLDatabase(host=options.host, user='******', port=3306, passwd='a1a1a1', db='tecan') if options.xml_dir: if not os.path.exists(options.xml_dir): error("Directory not found: " + options.xml_dir) xml_fname = GetLatestFile(options.xml_dir) else: xml_fname = options.xml_filename if not os.path.exists(xml_fname): error("File not found: " + xml_fname) print "Importing from file: " + xml_fname header_dom, script_dom, plate_values = tecan.ParseReaderFile(xml_fname) exp_id_dict, plate_id = read_exp_id_csv(options.exp_id_csv[0]) if options.plate not in exp_id_dict: error('The measured plate (%d) does not have an exp_id in the CSV file' % options.plate) exp_id = exp_id_dict[options.plate] MES = {plate_id: plate_values} tecan.WriteToDatabase(MES, db, exp_id) db.Commit() print "Done!" sys.exit(0)
def main(): options = MakeOpts().parse_args() VOL = options.vol LABWARE = 'GRID40SITE3' LIQ = options.liquid_class # We should also state which directory where the evoware could find the worklist file db = MySQLDatabase(host=options.host, user='******', port=3306, passwd='a1a1a1', db='tecan') plate_id = options.iteration % options.num_plates exp_id, max_time = GetLastPlate(db, plate_id, options.reading_label) data = GetMeasuredData(db, exp_id, max_time, plate_id, options.reading_label) dilution_rows = GetDilutionRows(db, exp_id, plate_id, max_time, options.row_split) print "dilution_rows:\n", dilution_rows worklist = [] for split in xrange(dilution_rows.shape[0]): for col in xrange(dilution_rows.shape[1]): row = dilution_rows[split, col] meas = data[row, col] print col, row, meas if (meas > options.threshold) and ( (row+1) % (8/options.row_split) != 0 ): msg = "OD = %f --> dilute cell %s%d into cell %s%d" % (meas, chr(ord('A') + row), col+1, chr(ord('A') + row + 1), col+1) print msg worklist += [UserPrompt(msg)] worklist += [Comm('A',LABWARE,row,col,VOL,LIQ)] worklist += [Comm('D',LABWARE,row+1,col,VOL,LIQ)] #labware,volume and liquid_class would be hard coded for now ... worklist += [Tip()] IncrementRow(db, exp_id, plate_id, col, row+1, max_time) db.Commit() if len(worklist) == 0: sys.exit(0) worklist = Header() + worklist + Footer() f = open(options.worklist[0], 'w') f.write('\n'.join(worklist)) f.close() print "Done!" sys.exit(1)
def main(): options = MakeOpts().parse_args() VOL = options.vol LABWARE_FROM = 'EpnStand' LABWARE_TO = 'GRID40SITE3' LIQ = options.liquid_class # We should also state which directory where the evoware could find the worklist file db = MySQLDatabase(host=options.host, user='******', port=3306, passwd='a1a1a1', db='tecan') plate_id = options.iteration % options.num_plates exp_id, max_time = GetLastPlate(db, plate_id, options.reading_label) worklist = [] for col, row in GetWellsToInject(db, exp_id, max_time, plate_id, options.reading_label, options.threshold): if col < options.column_range: msg = 'Inject into %s%d' % (chr(ord('A') + row), col + 1) print msg worklist += [UserPrompt(msg)] worklist += [Comm('A', LABWARE_FROM, 0, 0, VOL, LIQ)] worklist += [Comm('D', LABWARE_TO, row, col, VOL, LIQ)] worklist += [Tip()] StoreInDB(db, exp_id, plate_id, col, row, max_time) if len(worklist) == 0: sys.exit(0) worklist = Header() + worklist + Footer() f = open(options.worklist[0], 'w') f.write('\n'.join(worklist)) f.close() print "Done!" sys.exit(1)
def Main(): options, _ = MakeOpts().parse_args(sys.argv) assert options.experiment_id and options.plate_id and options.reading_label print 'Reading plate %s from experiment %s' % (options.plate_id, options.experiment_id) db = MySQLDatabase(host='hldbv02', user='******', passwd='a1a1a1', db='tecan') p = Plate96.FromDatabase(db, options.experiment_id, options.plate_id) times, readings, labels = p.SelectReading(options.reading_label) print 'Calculating growth rates' lux_calculator = growth.SlidingWindowGrowthCalculator(window_size=options.window_size, minimum_level=options.lower_bound, maximum_level=options.upper_bound) n = labels.size colors = ColorMap(set(labels)) rates = {} for i in xrange(n): rate, unused_stationary = lux_calculator.CalculateGrowth(times[i,:], readings[i,:]) scaled_rate = rate * 60 * 60 label = labels[i] rates.setdefault(label, []).append(scaled_rate) sorted_labels = sorted(rates.keys(), key=MaybeFloat, reverse=False) xpts = pylab.arange(len(sorted_labels)) ticks = xpts + 0.5 mean_and_err_rates = [MeanWithConfidenceInterval(rates[l]) for l in sorted_labels] means = [t[0] for t in mean_and_err_rates] errs = [t[1] for t in mean_and_err_rates] for label, mean_rate, err in zip(sorted_labels, means, errs): print '%s: %.2g +- %.2g' % (label, mean_rate, err) pylab.figure() pylab.bar(xpts, means) pylab.errorbar(ticks, means, yerr=errs, linestyle='None') pylab.xticks(ticks, sorted_labels, rotation=45) pylab.show()
def main(): options = MakeOpts().parse_args() if options.debug: db = CreateDummyDB() else: db = MySQLDatabase(host=options.host, user='******', port=3306, passwd='a1a1a1', db='tecan') if options.xml_dir: if not os.path.exists(options.xml_dir): print "Directory not found: " + options.xml_dir sys.exit(-1) xml_fname = GetLatestFile(options.xml_dir) else: xml_fname = options.xml_filename if not os.path.exists(xml_fname): print "File not found: " + xml_fname sys.exit(-1) print "Importing from file: " + xml_fname header_dom, script_dom, plate_values = tecan.ParseReaderFile(xml_fname) exp_id = GetExperimentID(options, db, header_dom, script_dom) print "Experiment ID: " + exp_id plate_id = options.iteration % options.num_plates print "Plate ID: %d" % plate_id MES = {plate_id: plate_values} tecan.WriteToDatabase(MES, db, exp_id) db.Commit() print "Done!" sys.exit(0)
def Main(): options, _ = MakeOpts().parse_args(sys.argv) assert options.experiment_id and options.plate_id and options.reading_label assert options.matfile_name print 'Reading plate %s from experiment %s' % (options.plate_id, options.experiment_id) db = MySQLDatabase(host='132.77.80.238', user='******', passwd='a1a1a1', db='tecan') p = Plate96.FromDatabase(db, options.experiment_id, options.plate_id) times, readings, labels = p.SelectReading(options.reading_label) labels = pylab.array(labels, dtype=np.object) print 'Writing data to %s' % options.matfile_name out_data = { 'times': times, 'labels': labels, options.reading_label: readings } sio.savemat(options.matfile_name, out_data, oned_as='column')
def Main(): options, _ = MakeOpts().parse_args(sys.argv) assert options.experiment_id and options.plate_id and options.reading_label print 'Reading plate %s from experiment %s' % (options.plate_id, options.experiment_id) db = MySQLDatabase(host='132.77.80.238', user='******', passwd='a1a1a1', db='tecan') p = Plate96.FromDatabase(db, options.experiment_id, options.plate_id) times, readings, labels = p.SelectReading(options.reading_label) print 'Calculating growth rates' lux_calculator = growth.SlidingWindowGrowthCalculator( window_size=options.window_size, minimum_level=options.lower_bound, maximum_level=options.upper_bound) n = labels.size colors = ColorMap(set(labels)) rates = {} stationaries = {} for i in xrange(n): label = labels[i] if not TryFloat(label): continue rate, stationary = lux_calculator.CalculateGrowth( times[i, :], readings[i, :]) scaled_rate = rate * 60 * 60 rates.setdefault(label, []).append(scaled_rate) stationaries.setdefault(label, []).append(stationary) print 'Calculating mean growth rate and error' f = pylab.figure(0) mean_rates = [] rate_err = [] for label in rates.iterkeys(): r = pylab.array(rates[label]) mean_rate, r_err = MeanWithConfidenceInterval(r) mean_rates.append(mean_rate) rate_err.append(r_err) pct = TryFloat(label) if not pct: continue pct *= 100 s = pylab.array(stationaries[label]) / pct s_mean, s_err = MeanWithConfidenceInterval(s) if s_mean == 0.0: continue pylab.loglog(pct, s_mean, color=colors[label], marker='.', linestyle='None', figure=f, markersize=20, label=label) pylab.errorbar(pct, s_mean, yerr=s_err) pylab.xlabel('Substrate Concentration (%)') pylab.ylabel('Yield per Input Concentration (%s/%%)' % (options.reading_label)) pylab.legend(loc='upper right') pylab.figure(2) concs = pylab.array([float(l) for l in rates.keys()]) for conc, rate, err in zip(concs, mean_rates, rate_err): print '%s: %.2g +- %.2g' % (conc, rate, err) idx = range(len(concs)) pylab.semilogx(concs, mean_rates, 'g.') pylab.errorbar(concs, mean_rates, yerr=rate_err, fmt=None) pylab.xlabel('Substrate Concentration (%)') pylab.ylabel('Specific Growth Rate (/hour)') pylab.xlim((1.0e-10, concs.max() + 1)) pylab.show()
def Main(): options, _ = MakeOpts().parse_args(sys.argv) assert options.experiment_id and options.plate_ids and options.reading_label plates = map(str.strip, options.plate_ids.split(',')) print 'Reading plates %s from experiment %s' % (', '.join(plates), options.experiment_id) db = MySQLDatabase(host='hldbv02', user='******', passwd='a1a1a1', db='tecan') print 'Calculating growth rates' growth_calc = growth.SlidingWindowGrowthCalculator( window_size=options.window_size, minimum_level=options.lower_bound, maximum_level=options.upper_bound) plate_names = {'1': 'Glucose', '2': 'Gluconate'} colormap = {'1': 'k', '2': 'r'} f1 = pylab.figure(0) for plate_id in plates: p = Plate96.FromDatabase(db, options.experiment_id, plate_id) rates, unused_stationaries = growth_calc.CalculatePlateGrowth( p, options.reading_label) mean_rates = MeanWithConfidenceIntervalDict(rates) means = [] errors = [] concs = [] for label in mean_rates.keys(): conc = TryFloat(label) if conc is False: continue mean, error = mean_rates[label] means.append(mean) errors.append(error) concs.append(conc) means = pylab.array(means) errors = pylab.array(errors) concs = pylab.array(concs) max_mean = max(means) norm_means = means / max_mean norm_errors = errors / max_mean label = plate_names[plate_id] color = colormap[plate_id] """ pylab.subplot(121) pylab.plot(concs, norm_means, color=color, linestyle='None', marker='.', label=label) pylab.errorbar(concs, norm_means, yerr=norm_errors, ecolor=color, fmt=None) """ #pylab.subplot(122) pcts = concs * 100 idx = pylab.find(pcts > 1e-4) my_pcts = pcts[idx] my_means = means[idx] my_errs = errors[idx] order = pylab.argsort(my_pcts) my_pcts = my_pcts[order] my_means = my_means[order] my_errs = my_errs[order] pylab.plot(my_pcts, my_means, color=color, linestyle='None', linewidth=4, marker='.', markersize=15, label=label) pylab.errorbar(my_pcts, my_means, yerr=my_errs, ecolor=color, fmt=None, linewidth=1) smoothed = smoothing.WeightedAverageSmoother(pylab.log(my_pcts), my_means, sigma=0.7) log_xs = pylab.arange(pylab.log(1e-4), pylab.log(2.2), 1e-3) xs = pylab.exp(log_xs) ys = smoothed(log_xs) pylab.plot(xs, ys, color=color, linewidth=3, linestyle='--') """ pylab.subplot(121) pylab.xlabel('CAP Concentration (fraction of standard concentration)') pylab.ylabel('Relative Specific Growth Rate (/hour)') pylab.xlim(-0.1,0.2) """ """ pylab.subplot(122) pylab.xlabel('CAP Concentration (fraction of standard concentration)') pylab.ylabel('Absolute Specific Growth Rate (/hour)') """ pylab.xscale('log') pylab.xlabel('Substrate concentration (m/v %)') pylab.ylabel('Specific Growth Rate (/hour)') #pylab.xlim(-0.1,0.2) pylab.legend(loc='upper left') pylab.show()
def Main(): options, _ = MakeOpts().parse_args(sys.argv) assert options.experiment_id assert options.first_plate_ids and options.second_plate_ids assert options.culture_label and options.reporter_label assert options.output_dir if not path.exists(options.output_dir): util._mkdir(options.output_dir) imgs_path = path.join(options.output_dir, 'imgs/') if not path.exists(imgs_path): util._mkdir(imgs_path) first_plate_ids = map(str.strip, options.first_plate_ids.split(',')) second_plate_ids = map(str.strip, options.second_plate_ids.split(',')) labels_to_ignore = set() for l in options.labels_to_ignore.split(','): labels_to_ignore.add(l.strip()) print 'Reading plates from experiment %s' % (options.experiment_id) db = MySQLDatabase(host='hldbv02', user='******', passwd='a1a1a1', db='tecan') filterer = promoter_activity.CultureReporterFilterer( options.min_culture_level, options.min_reporter_level) reporter_bg_sub = promoter_activity.ReporterBackgroundSubtracter( options.background_label) culture_shifter = promoter_activity.CultureShifter() activity_calc = promoter_activity.ReporterActivityCalculator( options.lower_culture_bound, options.upper_culture_bound, min_reporter_level=options.lower_reporter_bound, window_size=options.window_size) first_plate_runners = [] second_plate_runners = [] print 'Calculating promoter activities for first condition' runner1 = PlateActivityRunner(options.culture_label, options.reporter_label, filterer, culture_shifter, reporter_bg_sub, activity_calc) for plate_id in first_plate_ids: plate = Plate96.FromDatabase(db, options.experiment_id, plate_id) runner1.AddPlate(plate) runner1.Run() first_plate_runners.append(runner1) print 'Calculating promoter activities for second condition' runner2 = PlateActivityRunner(options.culture_label, options.reporter_label, filterer, culture_shifter, reporter_bg_sub, activity_calc) for plate_id in second_plate_ids: plate = Plate96.FromDatabase(db, options.experiment_id, plate_id) runner2.AddPlate(plate) runner2.Run() second_plate_runners.append(runner2) # Unify strain data. print 'Saving figures' strains_data = StrainConditionsData(options.background_label) for plate_data in first_plate_runners: strains_data.AddPlateData('Glucose', plate_data, ignore_labels=labels_to_ignore) for plate_data in second_plate_runners: strains_data.AddPlateData('Gluconate', plate_data, ignore_labels=labels_to_ignore) strains_data.MakeStrainFigures(imgs_path) summary_fignames = strains_data.MakeSummaryFigures(imgs_path, 'Glucose', 'Gluconate') plate_fignames = strains_data.MakePerPlateFigures(imgs_path) labels = strains_data.GetStrainLabels() condition1_activities, condition1_errs = strains_data.GetMeanMaxActivities( labels, 'Glucose') condition2_activities, condition2_errs = strains_data.GetMeanMaxActivities( labels, 'Gluconate') log_1 = np.log2(condition1_activities) log_2 = np.log2(condition2_activities) diffs = log_2 - log_1 sorted_diffs = list(np.argsort(diffs)) sorted_diffs.reverse() diffs_data = [] for i in sorted_diffs: logfold = diffs[i] fold = np.exp2(logfold) if np.isnan(logfold): logfold = None fold = None diffs_data.append({ 'label': labels[i], 'fold_change': fold, 'log_fold': logfold }) # Render the template. print 'Writing HTML output' template_data = { 'experiment_id': options.experiment_id, 'first_plate_ids': first_plate_ids, 'second_plate_ids': second_plate_ids, 'culture_label': options.culture_label, 'reporter_label': options.reporter_label, 'first_plates': first_plate_runners, 'second_plates': second_plate_runners, 'strains_data': strains_data, 'diffs_data': diffs_data, 'summary_figure_fnames': summary_fignames, 'per_plate_figure_fnames': plate_fignames } template_fname = path.join(options.output_dir, 'results.html') templates.render_to_file('compare_promoter_activities.html', template_data, template_fname) return
def main(): options = MakeOpts().parse_args() path_dict = ReadPathCsv(options) VOL = options.vol MEDIA_VOL = 150 - VOL #volune of fresh media in designated well LABWARE = 'GRID40SITE3' EPNSTAND = 'EpnStand' LIQ = options.liquid_class # We should also state which directory where the evoware could find the worklist file db = MySQLDatabase(host=options.host, user='******', port=3306, passwd='a1a1a1', db='tecan') exp_id_dict, plate_id = read_exp_id_csv(options.exp_id_csv) if options.plate not in exp_id_dict: error( 'The measured plate (%d) does not have an exp_id in the CSV file' % options.plate) exp_id = exp_id_dict[options.plate] max_time = GetLastPlate(db, exp_id, plate_id, options.reading_label) data = GetMeasuredData(db, exp_id, max_time, plate_id, options.reading_label) path_step_dict = GetPathSteps(db, exp_id, plate_id, max_time, path_dict) worklist = [] for path_label, path_step in path_step_dict.iteritems(): row, col = path_dict[path_label][path_step] meas = data[row, col] print path_label, path_step, col, row, meas if (meas > options.threshold) and (path_step < len(path_dict[path_label]) - 1): next_row, next_col = path_dict[path_label][path_step + 1] msg = "Current plate is : %d ) %s __ OD = %f --> dilute cell %s%d into cell %s%d" % ( options.plate, exp_id, meas, chr(ord('A') + row), col + 1, chr(ord('A') + next_row), next_col + 1) print msg worklist += [UserPrompt(msg)] worklist += [Comm('A', EPNSTAND, 0, 0, MEDIA_VOL, LIQ)] worklist += [ Comm('D', LABWARE, next_row, next_col, MEDIA_VOL, LIQ) ] worklist += [Comm('A', LABWARE, row, col, VOL, LIQ)] worklist += [Comm('D', LABWARE, next_row, next_col, VOL, LIQ)] #labware,volume and liquid_class would be hard coded for now ... worklist += [Tip()] IncrementRow(db, exp_id, plate_id, path_label, path_step + 1, max_time, row, col, next_row, next_col) db.Commit() if len(worklist) == 0: sys.exit(0) worklist = Header() + worklist + Footer() f = open(options.worklist[0], 'w') f.write('\n'.join(worklist)) f.close() print "Done!" sys.exit(1)
def main(): options = MakeOpts().parse_args() db = MySQLDatabase(host=options.host, user='******', port=3306, passwd='a1a1a1', db='tecan') plate_id = options.iteration % options.num_plates exp_id, max_time = GetLastPlate(db, plate_id, options.reading_label) exp_id_url = exp_id.replace(" ", "%20") url = 'http://eladpc1/RoboSite/graph/%s/%s/OD600/Read' % (exp_id_url, plate_id) response = urllib2.urlopen(url) print "Response:", response # Get the URL. This gets the real URL. print "The URL is: ", response.geturl() # Get all data html = response.read().replace( '/js/highcharts.js', 'http://code.highcharts.com/highcharts.js').replace( '/js/modules/exporting.js', 'http://code.highcharts.com/modules/exporting.js') print "Get all data: ", html file = 'attach.html' fh = open(file, "w") fh.write(html) fh.close() subject = "%s --> plate : %d " % (exp_id, plate_id) data = GetMeasuredData(db, exp_id, max_time, plate_id, options.reading_label) csv = '' string = '' for c in range(12): string += "<BR>" for r in range(8): #string += data[r,c].astype('|S6') string += "%d , %d --> %s" % (r + 1, c + 1, data[r, c].astype('|S6')) string += '<BR>' print "%d , %d --> %f" % (r + 1, c + 1, data[r, c]) for r in range(8): csv += "<BR>" for c in range(12): csv += data[r, c].astype('|S6') if (c < 11): csv += ' ,' print "%d , %d --> %f" % (r + 1, c + 1, data[r, c]) print string body = string body += '<BR><BR>' body += csv msg = MIMEMultipart() msg['Subject'] = subject msg['From'] = sender content = MIMEText(body, 'html') msg.attach(content) part = MIMEBase('application', 'octet-stream') part.set_payload(open(file, 'rb').read()) Encoders.encode_base64(part) exp = exp_id + '.html' part.add_header('Content-Disposition', 'attachment; filename="%s"' % exp) msg.attach(part) session = smtplib.SMTP(SMTP_SERVER, SMTP_PORT) session.ehlo() session.starttls() session.login(sender, password) session.sendmail(sender, recipient, msg.as_string()) sys.exit(1) headers = [ "From: " + sender, "Subject: " + subject, "To: " + recipient, "MIME-Version: 1.0", "Content-Type: text/html" ] headers = "\r\n".join(headers) session = smtplib.SMTP(SMTP_SERVER, SMTP_PORT) session.ehlo() session.starttls() session.login(sender, password) session.sendmail(sender, recipient, headers + "\r\n\r\n" + body) session.quit() print "Done!" sys.exit(1)