def long_chain_limit(request, upload_form): buff = StringIO() zfile = zipfile.ZipFile(buff, 'w', zipfile.ZIP_DEFLATED) for f in upload_form.cleaned_data["files"]: parser = dataparser.DataParser(f) homolumo, gap = parser.get_graphs() name, _ = os.path.splitext(f.name) if len(upload_form.cleaned_data["files"]) > 1: zfile.writestr(name + "/output.txt", parser.format_output()) zfile.writestr(name + "/homolumo.eps", homolumo.getvalue()) zfile.writestr(name + "/gap.eps", gap.getvalue()) else: zfile.writestr("output.txt", parser.format_output()) zfile.writestr("homolumo.eps", homolumo.getvalue()) zfile.writestr("gap.eps", gap.getvalue()) if len(upload_form.cleaned_data["files"]) > 1: name = "output" zfile.close() buff.flush() ret_zip = buff.getvalue() buff.close() response = HttpResponse(ret_zip, content_type="application/zip") response["Content-Disposition"] = "attachment; filename=%s.zip" % name return response
def parse_file_list(files): for f in files: if f.name.endswith(".zip"): with zipfile.ZipFile(f, "r") as zfile: names = [x for x in zfile.namelist() if not x.endswith("/")] for name in names: newfile = StringIO(zfile.open(name).read(), name=name) yield newfile elif f.name.endswith(".tar.bz2") or f.name.endswith(".tar.gz"): end = f.name.split(".")[-1] with tarfile.open(fileobj=f, mode='r:' + end) as tfile: for name in tfile.getnames(): if tfile.getmember(name).isfile(): newfile = StringIO(tfile.extractfile(name).read(), name=name) yield newfile else: yield f
def draw(xvals, yvals, vert, horz): offset = 0.25 scale = 10 mins = numpy.array([min(xvals), min(yvals)]) maxs = numpy.array([max(xvals), max(yvals)]) dimensions = maxs - mins + 2 * offset mins = mins - offset dimensions *= scale WIDTH = int(dimensions[1]) HEIGHT = int(dimensions[0]) f = StringIO() surface = cairo.SVGSurface(f, WIDTH, HEIGHT) ctx = cairo.Context(surface) ctx.scale(scale, scale) ctx.rotate(math.pi / 2) # hack to fix the translation from the rotation ctx.translate(0, -dimensions[1] / scale) ctx.translate(-mins[0], -mins[1]) ctx.set_line_width(0.1) for i, (x, y) in enumerate(zip(xvals, yvals)): if i in vert: color = (0, 255, 0) elif i in horz: color = (255, 0, 0) else: color = (0, 0, 255) ctx.set_source_rgb(*color) ctx.arc(x, y, 0.25, 0, 2 * math.pi) ctx.fill() surface.write_to_png(f) # THIS IS REQUIRED BECAUSE OF ISSUES WITH CAIRO. del surface del ctx ############# string = "data:image/png;base64," string += base64.b64encode(f.getvalue()) return string
def setUp(self): self.client = Client() user = get_user_model().objects.create_user(**USER) user.save() self.user = user self.template = models.JobTemplate( name="test-template-delete", creator=self.user, ) self.template.template = File(StringIO("template", name="test-template-delete")) self.template.save()
def get_ssh_connection(self): if self.use_password: return get_ssh_connection(self.cluster.hostname, self.username, password=self.password, port=self.cluster.port) else: user = type(self.user).objects.get(id=self.user.id) private = StringIO(user.private_key) return get_ssh_connection(self.cluster.hostname, self.username, key=private, port=self.cluster.port)
def save_model(model, errors): logger.info("Saving Model") with StringIO(name="decay_predictors.pkl") as f: cPickle.dump(model, f, protocol=-1) f.seek(0) pred = Predictor( homo_error=errors[0], lumo_error=errors[1], gap_error=errors[2], pickle=File(f), ) pred.save()
def compute_axes_percents(f): data = f.read() coords = get_coordinates(StringIO(data)) atom_idxs, elements, orb_values, occupied_orbs = get_numbers(StringIO(data)) body = '\n'.join("%s %f %f %f" % (ele, x, y, z) for ele, (x, y, z) in zip(elements, coords)) bonds = calculate_bonds(body) mol = from_xyz(StringIO(body + "\n\n" + bonds)) vert_idxs, horz_idxs = get_group_indices(mol) vert_idxs = set(vert_idxs) horz_idxs = set(horz_idxs) center_idxs = [i for i in xrange(len(coords)) if i not in vert_idxs and i not in horz_idxs] homo_idx = get_homo_index(occupied_orbs) homo_values = [x[homo_idx] for x in orb_values] lumo_values = [x[homo_idx + 1] for x in orb_values] homo_res = calculate_groups(homo_values, atom_idxs, vert_idxs, horz_idxs) lumo_res = calculate_groups(lumo_values, atom_idxs, vert_idxs, horz_idxs) # X and Y coordinates are flipped in Gaussian files image = draw([x[1] for x in coords], [x[0] for x in coords], vert_idxs, horz_idxs) return ("H**O", ) + homo_res, ("LUMO", ) + lumo_res, image
def convert_logs(logsets): converted = [] for key in logsets: nvals = [] homovals = [] lumovals = [] gapvals = [] for num, log in logsets[key]: parser = fileparser.Log(log) nvals.append(num) homovals.append(parser["H**O"]) lumovals.append(parser["LUMO"]) gapvals.append(parser["ExcitationEnergy1"]) f = StringIO(name=key) f.write(', '.join(nvals) + '\n') f.write(', '.join(homovals) + '\n') f.write(', '.join(lumovals) + '\n') f.write(', '.join(gapvals) + '\n') f.seek(0) converted.append(f) return converted
def get_sftp_connection(self): if self.use_password: return get_sftp_connection(self.cluster.hostname, self.username, password=self.password, port=self.cluster.port) else: try: del self.user._profile_cache except: pass private = StringIO(self.user.private_key) return get_sftp_connection(self.cluster.hostname, self.username, key=private, port=self.cluster.port)
def parse_log(request, upload_form): split_iter = upload_form.cleaned_data['split_iter'] parser = fileparser.LogSet(split_iter=split_iter) for f in upload_form.cleaned_data["files"]: parser.parse_file(f) output = parser.format_output() f2 = StringIO(output) if upload_form.cleaned_data['store'] and request.user.is_staff: number_added = load_data.main(f2) string = "%d datapoint(s) added to database." % number_added logger.info(string) output += "\n\n\n" + string response = HttpResponse(output, content_type="text/plain") return response
def draw(self, scale, svg=False, hydrogens=True, colors=True, fancy_bonds=True): '''Draws a basic image of the molecule.''' offset = 0.25 mins, maxs = self.bounding_box() dimensions = (maxs - mins) + 2 * offset dimensions *= scale mins = mins - offset WIDTH = int(dimensions[1]) HEIGHT = int(dimensions[0]) f = StringIO() surface = cairo.SVGSurface(f, WIDTH, HEIGHT) ctx = cairo.Context(surface) ctx.scale(scale, scale) ctx.rotate(math.pi / 2) # hack to fix the translation from the rotation ctx.translate(0, -dimensions[1] / scale) ctx.translate(-mins[0], -mins[1]) ctx.set_line_width(0.1) def draw_bond(ctx, coords1, coords2, unit, factors): for x in factors: ctx.move_to(*(x * unit + coords1)) ctx.line_to(*(x * unit + coords2)) ctx.stroke() ctx.set_source_rgb(*COLORS2['1']) for bond in self.bonds: if not hydrogens and any(x.element == 'H' for x in bond.atoms): continue if colors: ctx.set_source_rgb(*COLORS2[bond.type]) coords1 = numpy.array(bond.atoms[0].xyz_tuple[:2]) coords2 = numpy.array(bond.atoms[1].xyz_tuple[:2]) temp = (coords2 - coords1) mag = numpy.linalg.norm(temp) unit = numpy.array([-temp[1] / mag, temp[0] / mag]) if fancy_bonds: if bond.type == '2': draw_bond(ctx, coords1, coords2, unit, [0.1, -0.1]) elif bond.type == '3': draw_bond(ctx, coords1, coords2, unit, [0.2, 0.0, -0.2]) elif bond.type == 'Ar': ctx.save() ctx.set_dash([0.3, 0.15]) draw_bond(ctx, coords1, coords2, unit, [0.1, -0.1]) ctx.restore() else: draw_bond(ctx, coords1, coords2, unit, [0.0]) else: draw_bond(ctx, coords1, coords2, unit, [0.0]) for atom in self.atoms: if not hydrogens and atom.element == 'H': continue ctx.set_source_rgb(*COLORS2[atom.element]) point = atom.xyz_tuple ctx.arc(point[0], point[1], 0.25, 0, 2 * math.pi) ctx.fill() if svg: surface.finish() else: surface.write_to_png(f) return f
def clean_template(self): template = self.cleaned_data.get("template") name = self.cleaned_data.get("name") return File(StringIO(template, name=name))
def reset_gjf(request, upload_form): job_form = JobForm.get_form(request, "{{ name }}") errors = [] strings = [] names = [] for f in upload_form.cleaned_data["files"]: try: parser = fileparser.Log(f) name, _ = os.path.splitext(f.name) td = False if request.REQUEST.get("td_reset"): name += '_TD' td = True strings.append(parser.format_gjf(td=td)) names.append(name) except Exception as e: logger.warn("There was an error when trying to reset a gjf: '%s'" % str(e)) errors.append((f.name, e)) if request.REQUEST.get("gjf_submit"): if not job_form.is_valid(request.method): if request.is_ajax(): upload_form_html = render_crispy_form( upload_form, context=RequestContext(request)) job_form_html = render_crispy_form( job_form, context=RequestContext(request)) a = { "success": False, "job_form_html": job_form_html, "upload_form_html": upload_form_html, } return HttpResponse(json.dumps(a), content_type="application/json") c = { "job_form": job_form, "upload_form": upload_form, } return render(request, "chem/upload_log.html", c) d = dict(job_form.cleaned_data) cred = d.pop("credential") a = cluster.interface.run_jobs(cred, names, strings, **d) a["failed"].extend(errors) do_html = request.REQUEST.get("html", False) if do_html: html = render_to_string("chem/multi_submit.html", a) temp = {"success": True, "html": html} return HttpResponse(json.dumps(temp), content_type="application/json") else: return HttpResponse(json.dumps(a), content_type="application/json") buff = StringIO() zfile = zipfile.ZipFile(buff, 'w', zipfile.ZIP_DEFLATED) for name, string in zip(names, strings): zfile.writestr("%s.gjf" % name, string) if errors: temp = ['%s - %s' % (name, error) for (name, error) in errors] zfile.writestr("errors.txt", '\n'.join(temp)) zfile.close() buff.flush() ret_zip = buff.getvalue() buff.close() response = HttpResponse(ret_zip, content_type="application/zip") response["Content-Disposition"] = "attachment; filename=output.zip" return response
def mock_exec_command(string): return StringIO(''), StringIO(''), StringIO('')
def mock_exec_command2(string): print string return StringIO(''), StringIO(''), StringIO('some cp error')
def from_log(file): out = fileparser.Log(file) return from_gjf(StringIO(out.format_gjf()))
def make_io_triplet(stdin='', stdout='', stderr=''): return StringIO(stdin), StringIO(stdout), StringIO(stderr)
def reset_gjf(request, upload_form): job_form = JobForm.get_form(request, "{{ name }}") errors = [] strings = [] names = [] for f in upload_form.cleaned_data["files"]: try: parser = fileparser.Log(f) name, _ = os.path.splitext(f.name) td = False if request.REQUEST.get("td_reset"): name += '_TD' td = True strings.append(parser.format_gjf(td=td)) names.append(name) except Exception as e: logger.warn("There was an error when trying to reset a gjf: '%s'" % str(e)) errors.append((f.name, e)) if request.REQUEST.get("gjf_submit"): if not job_form.is_valid(request.method): if request.is_ajax(): upload_form_html = render_crispy_form(upload_form, context=RequestContext(request)) job_form_html = render_crispy_form(job_form, context=RequestContext(request)) a = { "success": False, "job_form_html": job_form_html, "upload_form_html": upload_form_html, } return HttpResponse(json.dumps(a), content_type="application/json") c = { "job_form": job_form, "upload_form": upload_form, } return render(request, "chem/upload_log.html", c) d = dict(job_form.cleaned_data) cred = d.pop("credential") a = cluster.interface.run_jobs(cred, names, strings, **d) a["failed"].extend(errors) do_html = request.REQUEST.get("html", False) if do_html: html = render_to_string("chem/multi_submit.html", a) temp = {"success": True, "html": html} return HttpResponse(json.dumps(temp), content_type="application/json") else: return HttpResponse(json.dumps(a), content_type="application/json") buff = StringIO() zfile = zipfile.ZipFile(buff, 'w', zipfile.ZIP_DEFLATED) for name, string in zip(names, strings): zfile.writestr("%s.gjf" % name, string) if errors: temp = ['%s - %s' % (name, error) for (name, error) in errors] zfile.writestr("errors.txt", '\n'.join(temp)) zfile.close() buff.flush() ret_zip = buff.getvalue() buff.close() response = HttpResponse(ret_zip, content_type="application/zip") response["Content-Disposition"] = "attachment; filename=output.zip" return response
def from_gjf(file): data = file.read().replace('\r', '') parts = data.split("\n\n") header = parts[0].strip() assert "#" in header header_lines = header.split('\n') if "#" in header_lines[-1]: has_bonds = "connectivity" in header_lines[-1] has_redundant = "modredundant" in header_lines[-1] else: raise Exception("The header is missing a #") # title = parts[1].strip() other = [x for x in parts[3:] if x.strip()] if len(other) < (has_bonds + has_redundant): raise Exception( "Either the bonds data or redundant coords are missing") letter_first = [] number_first = [] for part in other: if part.strip()[0] in string.letters: letter_first.append(part) elif part.strip()[0] in string.digits: number_first.append(part) if has_redundant: if len(letter_first) > 1: variables, redundant = letter_first else: redundant = letter_first[0] variables = '' else: if len(letter_first) == 1: variables = letter_first[0] redundant = '' elif len(letter_first) < 1: variables = '' redundant = '' else: raise Exception("Too many letter first groups") if has_bonds: temp = number_first[0] bonds = [] for line in temp.split('\n'): comp = line.strip().split() if len(comp) < 3: continue main = comp[0] comp = comp[1:] for i, x in enumerate(comp[::2]): bonds.append("%s %s %s" % (main, x, comp[2 * i + 1])) bonds_string = "\n".join(bonds) else: bonds_string = '' body = parts[2].strip() start = body.index('\n') charge, multiplicity = body[0:start].strip().split() geom = body[start + 1:] if variables: geom = replace_geom_vars(geom, variables) if len(geom[:geom.index("\n")].strip().split()) < 4: geom = convert_zmatrix_to_cart(geom) if not has_bonds: bonds_string = calculate_bonds(geom) f = StringIO(geom + "\n\n" + bonds_string) return from_xyz(f)