def load_project(self, projdir): ''' Activate the project in the specified directory; instantiate a file manager and projdirfactory. ''' _clear_insts() self.cleanup() try: # Start a new log file. logging.getLogger().handlers[0].doRollover() self.files = FileManager('files', path=projdir, publish_updates=self.publish_updates) self.projdirfactory = ProjDirFactory(projdir, observer=self.files.observer) register_class_factory(self.projdirfactory) self.proj = Project(projdir) repo = get_repo(projdir) if repo is None: find_vcs()[0](projdir).init_repo() self.proj.activate() except Exception as err: self._error(err, sys.exc_info())
def test_project_export_import(self): proj = Project(os.path.join(self.tdir, 'proj1')) self.assertEqual(proj.config.items('info'), [('version', '0'), ('description', '')]) new_info = [('version', 'stinky'), ('description', 'Frobozz rulz!')] proj.set_info(dict(new_info)) proj.activate() self._fill_project(proj) proj.export(destdir=self.tdir) proj.deactivate() newproj = project_from_archive(os.path.join(self.tdir, 'proj1%s' % PROJ_FILE_EXT), proj_name='proj2', dest_dir=self.tdir) self.assertEqual(newproj.path, os.path.join(self.tdir, 'proj2')) self.assertEqual(newproj.config.items('info'), new_info) try: newproj = project_from_archive(os.path.join( self.tdir, 'proj1%s' % PROJ_FILE_EXT), dest_dir=self.tdir) except Exception, err: self.assertTrue(str(err).endswith(' already exists'))
def post(self, project_id): forms = {} for field in ['projectname', 'description', 'version']: if field in self.request.arguments.keys(): forms[field] = self.request.arguments[field][0] pdb = Projects() # Existing project. if int(project_id) != pdb.predict_next_rowid(): project = pdb.get(project_id) project_is_new = False # New project else: project = {} project['active'] = 0 project['projpath'] = None project_is_new = True if 'projectname' not in forms or \ len(forms['projectname']) == 0: project['projectname'] = "Unnamed Project" else: project['projectname'] = forms['projectname'].strip() if 'description' in forms: project['description'] = forms['description'].strip() else: project['description'] = '' if 'version' in forms: project['version'] = forms['version'].strip() else: project['version'] = '' # if there's no proj dir yet, create an empty one if not project['projpath']: directory = self.get_project_dir() pname = project['projectname'] project['projpath'] = _get_unique_name(directory, pname) if project_is_new: pdb.new(project) os.mkdir(project['projpath']) else: for key, value in project.iteritems(): pdb.set(project_id, key, value) pdb.modified(project_id) # Update project settings. proj = Project(project['projpath']) dummy = proj.get_info() # Just to get required keys. info = {} for key in dummy: info[key] = project[key] proj.set_info(info) self.redirect("/workspace/project?projpath=" + project['projpath'])
def test_project_export_import(self): proj = Project(os.path.join(self.tdir, 'proj1')) self.assertEqual(proj.config.items('info'), [('version', '0'), ('description', '')]) new_info = [('version', 'stinky'), ('description', 'Frobozz rulz!')] proj.set_info(dict(new_info)) proj.activate() self._fill_project(proj) proj.export(destdir=self.tdir) proj.deactivate() newproj = project_from_archive(os.path.join(self.tdir, 'proj1%s' % PROJ_FILE_EXT), proj_name='proj2', dest_dir=self.tdir) self.assertEqual(newproj.path, os.path.join(self.tdir, 'proj2')) self.assertEqual(newproj.config.items('info'), new_info) try: newproj = project_from_archive(os.path.join(self.tdir, 'proj1%s' % PROJ_FILE_EXT), dest_dir=self.tdir) except Exception, err: self.assertTrue(str(err).endswith(' already exists'))
def test_using(self): proj = Project('a_proj') self._fill_project(proj.top) proj.top.run() self.assertEqual(proj.top.comp1.rval_out, 10.) self.assertEqual(proj.top.comp2.rval_out, 40.) proj.top.comp1.rval_in = 0.5 os.chdir(self.tdir) proj.export(projname='fooproj') fooproj = project_from_archive('fooproj.proj') self.assertEqual(fooproj.top.comp1.rval_in, proj.top.comp1.rval_in) fooproj.top.run() self.assertEqual(fooproj.top.comp1.rval_out, 1.) self.assertEqual(fooproj.top.comp2.rval_out, 4.)
def post(self): pdb = Projects() forms = {} for field in ['projectname', 'description', 'version']: if field in self.request.arguments.keys(): forms[field] = self.request.arguments[field][0] project = {} project['projectname'] = forms['projectname'].strip() project['description'] = forms['description'].strip() project['version'] = forms['version'].strip() project['id'] = pdb.predict_next_rowid() project['active'] = 1 # figure out a unique directory name for the project using # the project name and version string directory = self.get_project_dir() version = project['version'] pname = project['projectname'] if len(version): filename = clean_filename('%s-%s' % (pname, version)) else: filename = clean_filename(pname) unique = filename i = 1 while os.path.exists(os.path.join(directory, unique)): unique = '%s_%s' % (filename, str(i)) i = i + 1 project['projpath'] = os.path.join(directory, unique) pdb.new(project) os.mkdir(project['projpath']) # Update project settings. proj = Project(project['projpath']) dummy = proj.get_info() # Just to get required keys. info = {} for key in dummy: info[key] = project[key] proj.set_info(info) self.redirect("/workspace/project?projpath=" + quote_plus(project['projpath']))
def post(self): pdb = Projects() forms = {} for field in ['projectname', 'description', 'version']: if field in self.request.arguments.keys(): forms[field] = self.request.arguments[field][0] project = {} project['projectname'] = forms['projectname'].strip() project['description'] = forms['description'].strip() project['version'] = forms['version'].strip() project['id'] = pdb.predict_next_rowid() project['active'] = 1 # figure out a unique directory name for the project using # the project name and version string directory = self.get_project_dir() version = project['version'] pname = project['projectname'] if len(version): filename = clean_filename('%s-%s' % (pname, version)) else: filename = clean_filename(pname) unique = filename i = 1 while os.path.exists(os.path.join(directory, unique)): unique = '%s_%s' % (filename, str(i)) i = i+1 project['projpath'] = os.path.join(directory, unique) pdb.new(project) os.mkdir(project['projpath']) # Update project settings. proj = Project(project['projpath']) dummy = proj.get_info() # Just to get required keys. info = {} for key in dummy: info[key] = project[key] proj.set_info(info) self.redirect("/workspace/project?projpath=" + project['projpath'])
def test_project_export_import(self): proj = Project(os.path.join(self.tdir, 'proj1')) self._fill_project(proj.top) proj.export(destdir=self.tdir) proj.deactivate() newproj = project_from_archive(os.path.join(self.tdir, 'proj1%s' % PROJ_FILE_EXT), proj_name='proj2', dest_dir=self.tdir) self.assertEqual(newproj.path, os.path.join(self.tdir, 'proj2')) try: newproj = project_from_archive(os.path.join(self.tdir, 'proj1%s' % PROJ_FILE_EXT), dest_dir=self.tdir) except Exception, err: self.assertTrue(str(err).endswith(' already exists'))
def get(self, project_id): ''' Browser download of a project file ''' pdb = Projects() project = pdb.get(project_id) if project['projpath']: dirname = project['projpath'] if os.path.isdir(dirname): proj = Project(dirname) tdir = mkdtemp() try: filename = proj.export(destdir=tdir) proj_file = open(filename, 'rb') self.set_header('content_type', 'application/octet-stream') self.set_header('Content-Length', str(os.path.getsize(filename))) form_proj = clean_filename(project['projectname']) form_ver = clean_filename(project['version']) form_date = strftime('%Y-%m-%d_%H%M%S') self.set_header( 'Content-Disposition', 'attachment; filename=%s-%s-%s.proj' % (form_proj, form_ver, form_date)) try: self.write(proj_file.read()) finally: proj_file.close() finally: try: shutil.rmtree(tdir, onerror=onerror) except: pass else: raise HTTPError(dirname, 403, "%s is not a directory" % dirname, None, None) else: raise HTTPError(filename, 403, "no file found for %s" % project['projectname'], None, None)
def get(self, project_id): ''' Browser download of a project file ''' pdb = Projects() project = pdb.get(project_id) if project['projpath']: dirname = project['projpath'] if os.path.isdir(dirname): proj = Project(dirname) tdir = mkdtemp() try: filename = proj.export(destdir=tdir) proj_file = open(filename, 'rb') self.set_header('content_type', 'application/octet-stream') self.set_header('Content-Length', str(os.path.getsize(filename))) form_proj = clean_filename(project['projectname']) form_ver = clean_filename(project['version']) form_date = strftime('%Y-%m-%d_%H%M%S') self.set_header('Content-Disposition', 'attachment; filename=%s-%s-%s.proj' % (form_proj, form_ver, form_date)) try: self.write(proj_file.read()) finally: proj_file.close() finally: try: shutil.rmtree(tdir) except: pass else: raise HTTPError(dirname, 403, "%s is not a directory" % dirname, None, None) else: raise HTTPError(filename, 403, "no file found for %s" % \ project['projectname'], None, None)
def test_using(self): proj = Project('a_proj') proj.activate() self._fill_project(proj) top = proj.get('top') top.run() self.assertEqual(top.comp1.rval_out, 10.) self.assertEqual(top.comp2.rval_out, 40.) proj.command("top.comp1.rval_in = 0.5") os.chdir(self.tdir) proj.export(projname='fooproj') fooproj = project_from_archive('fooproj.proj') fooproj.activate() footop = fooproj.get('top') self.assertEqual(footop.comp1.rval_in, top.comp1.rval_in) footop.run() self.assertEqual(footop.comp1.rval_out, 1.) self.assertEqual(footop.comp2.rval_out, 4.)
def test_project_export_import(self): proj = Project(os.path.join(self.tdir, 'proj1')) proj.activate() self._fill_project(proj) proj.export(destdir=self.tdir) proj.deactivate() newproj = project_from_archive(os.path.join(self.tdir, 'proj1%s' % PROJ_FILE_EXT), proj_name='proj2', dest_dir=self.tdir) self.assertEqual(newproj.path, os.path.join(self.tdir, 'proj2')) try: newproj = project_from_archive(os.path.join( self.tdir, 'proj1%s' % PROJ_FILE_EXT), dest_dir=self.tdir) except Exception, err: self.assertTrue(str(err).endswith(' already exists'))
def post(self, project_id): forms = {} for field in ['projectname', 'description', 'version']: if field in self.request.arguments.keys(): forms[field] = self.request.arguments[field][0] pdb = Projects() # Existing project. if int(project_id) != pdb.predict_next_rowid(): project = pdb.get(project_id) project_is_new = False # New project else: project = {} project['active'] = 0 project['projpath'] = None project_is_new = True if 'projectname' not in forms or \ len(forms['projectname']) == 0: project['projectname'] = "Unnamed Project" else: project['projectname'] = forms['projectname'].strip() if 'description' in forms: project['description'] = forms['description'].strip() else: project['description'] = '' if 'version' in forms: project['version'] = forms['version'].strip() else: project['version'] = '' directory = forms.get('directory', self.get_project_dir()) # if there's no proj dir yet, create an empty one if not project['projpath']: version = project['version'] pname = project['projectname'] if len(version): filename = clean_filename('%s-%s' % (pname, version)) else: filename = clean_filename(pname) unique = filename i = 1 while os.path.exists(os.path.join(directory, unique)): unique = '%s_%s' % (filename, str(i)) i = i+1 project['projpath'] = os.path.join(directory, unique) if project_is_new: pdb.new(project) os.mkdir(project['projpath']) else: for key, value in project.iteritems(): pdb.set(project_id, key, value) pdb.modified(project_id) # Update project settings. proj = Project(project['projpath']) dummy = proj.get_info() # Just to get required keys. info = {} for key in dummy: info[key] = project[key] proj.set_info(info) self.redirect("/workspace/project?projpath=" + project['projpath'])
def post(self): if not self.request.arguments.has_key( "projectname" ): # First step in the import process. # Just get the name, description and version of the # project the user wants to import. # Then pass this to the form so the user can change it. # Go through the process of creating a new project directory # so we can read the name, description and version from the # settings file. sourcefile = self.request.files['projectfile'][0] if sourcefile: filename = sourcefile['filename'] if len(filename) > 0: unique = _get_unique_name(self.get_project_dir(), parse_archive_name(filename)) os.mkdir(unique) buff = StringIO.StringIO(sourcefile['body']) archive = tarfile.open(fileobj=buff, mode='r:gz') archive.extractall(path=unique) vcslist = find_vcs() if vcslist: vcs = vcslist[0](unique) else: vcs = DumbVCS(unique) vcs.init_repo() # Update project dict with info section of config file. proj = Project(unique) shutil.rmtree(unique) project_info = proj.get_info() self.render('projdb/import-metadata-fields.html', projectname=parse_archive_name(unique), description=project_info['description'], version=project_info['version'] ) self.redirect("/") else: forms = {} for field in ['projectname', 'description', 'version']: if field in self.request.arguments.keys(): forms[field] = self.request.arguments[field][0] sourcefile = self.request.files['projectfile'][0] if sourcefile: filename = sourcefile['filename'] if len(filename) > 0: unique = _get_unique_name(self.get_project_dir(), parse_archive_name(filename)) pdb = Projects() project = {} project['id'] = pdb.predict_next_rowid() project['active'] = 1 project['projectname'] = forms['projectname'].strip() project['description'] = forms['description'].strip() project['version'] = forms['version'].strip() project['projpath'] = unique os.mkdir(unique) buff = StringIO.StringIO(sourcefile['body']) archive = tarfile.open(fileobj=buff, mode='r:gz') archive.extractall(path=unique) vcslist = find_vcs() if vcslist: vcs = vcslist[0](unique) else: vcs = DumbVCS(unique) vcs.init_repo() # Update project settings. proj = Project(project['projpath']) dummy = proj.get_info() # Just to get required keys. info = {} for key in dummy: info[key] = project[key] proj.set_info(info) pdb.new(project) self.redirect("/workspace/project?projpath=" + project['projpath']) self.redirect("/")
def test_localfile_factory(self): proj = Project(os.path.join(self.tdir, 'proj2')) proj.activate() self._fill_project(proj)
def post(self): # The project file is uploaded once to extract the metadata. # It is then deleted and the metadata is used to populate another # import dialog, giving the user an opportunity to edit the # info before importing or cancel the import. if not 'projectname' in self.request.arguments: # First upload sourcefile = self.request.files['projectfile'][0] if sourcefile: filename = sourcefile['filename'] if len(filename) > 0: unique = _get_unique_name(self.get_project_dir(), parse_archive_name(filename)) tdir = mkdtemp(prefix=unique) buff = StringIO.StringIO(sourcefile['body']) archive = tarfile.open(fileobj=buff, mode='r:gz') archive.extractall(path=tdir) proj = Project(tdir) project_info = proj.get_info() try: shutil.rmtree(tdir, onerror=onerror) except: pass self.render('projdb/import-metadata-fields.html', projectname=parse_archive_name(unique), description=project_info['description'], version=project_info['version']) else: # second upload forms = {} for field in ['projectname', 'description', 'version']: if field in self.request.arguments.keys(): forms[field] = self.request.arguments[field][0] sourcefile = self.request.files['projectfile'][0] if sourcefile: filename = sourcefile['filename'] if len(filename) > 0: unique = _get_unique_name(self.get_project_dir(), parse_archive_name(filename)) pdb = Projects() project = {} project['id'] = pdb.predict_next_rowid() project['active'] = 1 project['projectname'] = forms['projectname'].strip() project['description'] = forms['description'].strip() project['version'] = forms['version'].strip() project['projpath'] = unique os.mkdir(unique) buff = StringIO.StringIO(sourcefile['body']) archive = tarfile.open(fileobj=buff, mode='r:gz') archive.extractall(path=unique) vcslist = find_vcs() if vcslist: vcs = vcslist[0](unique) else: vcs = DumbRepo(unique) vcs.init_repo() # Update project settings. proj = Project(project['projpath']) dummy = proj.get_info() # Just to get required keys. info = {} for key in dummy: info[key] = project[key] proj.set_info(info) pdb.new(project) self.redirect("/workspace/project?projpath=" + quote_plus(project['projpath'])) self.redirect("/")
class ConsoleServer(cmd.Cmd): ''' Object which knows how to load an OpenMDAO project and provides a command line interface and methods to interact with that project. ''' def __init__(self, name='', host='', publish_updates=True): cmd.Cmd.__init__(self) self.intro = 'OpenMDAO ' + __version__ + ' (' + __date__ + ')' self.prompt = 'OpenMDAO>> ' self._hist = [] self.host = host self._projname = '' self.proj = None self.exc_info = None self.publish_updates = publish_updates self._publish_comps = {} self._log_directory = os.getcwd() self._log_handler = None self._log_subscribers = 0 self._partial_cmd = None # for multi-line commands self.projdirfactory = None self.files = None # make sure we have a ProjFinder in sys.path_hooks if not ProjFinder in sys.path_hooks: sys.path_hooks = [ProjFinder] + sys.path_hooks def set_current_project(self, path): """ Set current project name. """ # Called by ProjectHandler, since load_project() is too late to # affect the rendering of the template. self._projname = os.path.basename(path) def get_current_project(self): """ Get current project name. """ return self._projname def _update_roots(self): ''' Ensure that all root containers in the project dictionary know their own name and are set as top. ''' for k, v in self.proj.items(): if has_interface(v, IContainer): if v.name != k: v.name = k if v._call_cpath_updated: set_as_top(v) def _update_workflows(self): ''' Call :meth:`_update_workflow` on drivers to capture any workflow updates now rather than waiting until they are run. ''' for k, v in self.proj.items(): if has_interface(v, IContainer): for driver in [ obj for name, obj in v.items(recurse=True) if is_instance(obj, Driver) ]: driver._update_workflow() def publish_components(self): ''' Publish the current component tree and subscribed components. ''' try: publish('components', self.get_components()) publish('', {'Dataflow': self.get_dataflow('')}) publish('', {'Workflow': self.get_workflow('')}) except Exception as err: self._error(err, sys.exc_info()) else: comps = self._publish_comps.keys() for pathname in comps: comp, root = self.get_object(pathname, report=False) if comp is None: del self._publish_comps[pathname] publish(pathname, {}) else: publish(pathname, comp.get_attributes(io_only=False)) def send_pub_msg(self, msg, topic): ''' Publish the given message with the given topic. ''' publish(topic, msg) def _error(self, err, exc_info): ''' Publish error message and save stack trace if case it's requested. ''' self._partial_cmd = None self.exc_info = exc_info msg = '%s: %s' % (err.__class__.__name__, err) logger.error(msg) self._print_error(msg) def _print_error(self, msg): ''' Publish error message. ''' try: publish('console_errors', msg) except: logger.error('publishing of message failed') def do_trace(self, arg): ''' Print remembered trace from last exception. ''' if self.exc_info: exc_type, exc_value, exc_traceback = self.exc_info traceback.print_exception(exc_type, exc_value, exc_traceback) else: print "No trace available." def precmd(self, line): ''' This method is called after the line has been input but before it has been interpreted. If you want to modify the input line before execution (for example, variable substitution), do it here. ''' # self._hist += [line.strip()] return line @modifies_project @modifies_state def onecmd(self, line): self._hist.append(line) try: cmd.Cmd.onecmd(self, line) except Exception as err: self._error(err, sys.exc_info()) def parseline(self, line): """Have to override this because base class version strips the lines, making multi-line Python commands impossible. """ # line = line.strip() if not line: return None, None, line elif line[0] == '?': line = 'help ' + line[1:] elif line[0] == '!': if hasattr(self, 'do_shell'): line = 'shell ' + line[1:] else: return None, None, line i, n = 0, len(line) while i < n and line[i] in self.identchars: i = i + 1 cmd, arg = line[:i], line[i:].strip() return cmd, arg, line def emptyline(self): # Default for empty line is to repeat last command - yuck if self._partial_cmd: self.default('') def default(self, line): ''' Called on an input line when the command prefix is not recognized. In this case we execute the line as Python code. ''' line = line.rstrip() if self._partial_cmd is None: if line.endswith(':'): self._partial_cmd = line return else: if line: self._partial_cmd = self._partial_cmd + '\n' + line if line.startswith(' ') or line.startswith('\t'): return else: line = self._partial_cmd self._partial_cmd = None try: result = self.proj.command(line) if result is not None: print result except Exception as err: self._error(err, sys.exc_info()) @modifies_project @modifies_state def run(self, pathname, *args, **kwargs): ''' Run the component `pathname`. If no pathname is specified, use `top`. ''' pathname = pathname or 'top' if pathname in self.proj: print "Executing..." try: comp = self.proj.get(pathname) comp.run(*args, **kwargs) print "Execution complete." except Exception as err: self._error(err, sys.exc_info()) else: self._print_error("Execution failed: No %r component was found." % pathname) @modifies_project @modifies_state def execfile(self, filename): ''' Execfile in server's globals. ''' try: self.proj.command("execfile('%s', '%s')" % (filename, file_md5(filename))) except Exception as err: self._error(err, sys.exc_info()) def get_pid(self): ''' Return this server's :attr:`pid`. ''' return os.getpid() def get_project(self): ''' Return the current project. ''' return self.proj def get_history(self): ''' Return this server's :attr:`_hist`. ''' return self._hist def get_recorded_cmds(self): ''' Return this server's :attr:`_recorded_cmds`. ''' return self._recorded_cmds[:] def get_object(self, pathname, report=True): ''' Get the container with the specified pathname. Returns the container and the name of the root object. ''' cont = None parts = pathname.split('.', 1) root = parts[0] if self.proj and root in self.proj: if root == pathname: cont = self.proj.get(root) else: try: root_obj = self.proj.get(root) except Exception as err: self._error(err, sys.exc_info()) else: try: cont = root_obj.get(parts[1]) except AttributeError as err: # When publishing, don't report remove as an error. if report: self._error(err, sys.exc_info()) except Exception as err: self._error(err, sys.exc_info()) return cont, root def _get_components(self, cont, pathname=None): ''' Get a heierarchical list of all the components in the given container or dictionary. The name of the root container, if specified, is prepended to all pathnames. ''' comps = [] for k, v in cont.items(): if is_instance(v, Component): comp = {} if cont is self.proj._project_globals: comp['pathname'] = k else: comp['pathname'] = '.'.join([pathname, k ]) if pathname else k children = self._get_components(v, comp['pathname']) if len(children) > 0: comp['children'] = children comp['type'] = str(v.__class__.__name__) inames = [] for klass in list(implementedBy(v.__class__)): inames.append(klass.__name__) comp['interfaces'] = inames comps.append(comp) return comps def get_components(self): ''' Get hierarchical dictionary of openmdao objects. ''' return json.dumps(self._get_components(self.proj._project_globals), default=json_default) def get_connectivity(self, pathname): ''' Get the connectivity data for the assembly with the given pathname ''' connectivity = {} asm, root = self.get_object(pathname) if asm: try: connectivity = asm.get_connectivity() except Exception as err: self._error(err, sys.exc_info()) return json.dumps(connectivity, default=json_default) def get_dataflow(self, pathname): ''' Get the structure of the specified assembly or of the global namespace if no pathname is specified; consists of the list of components and the connections between them (i.e., the dataflow). ''' dataflow = {} if pathname and len(pathname) > 0: try: asm, root = self.get_object(pathname) if has_interface(asm, IAssembly): dataflow = asm.get_dataflow() except Exception as err: self._error(err, sys.exc_info()) else: components = [] for k, v in self.proj.items(): if is_instance(v, Component): inames = [ cls.__name__ for cls in list(implementedBy(v.__class__)) ] components.append({ 'name': k, 'pathname': k, 'type': type(v).__name__, 'interfaces': inames, 'python_id': id(v) }) dataflow['components'] = components dataflow['connections'] = [] dataflow['parameters'] = [] dataflow['constraints'] = [] dataflow['objectives'] = [] dataflow['responses'] = [] return json.dumps(dataflow, default=json_default) def get_available_events(self, pathname): ''' Serve a list of events that are available to a driver. ''' events = [] if pathname: drvr, root = self.get_object(pathname) events = drvr.list_available_events() return json.dumps(events, default=json_default) def get_workflow(self, pathname): ''' Get the workflow for the specified driver or assembly. If no driver or assembly is specified, get the workflows for all of the top-level assemblies. ''' flows = [] if pathname: drvr, root = self.get_object(pathname) # allow for request on the parent assembly if is_instance(drvr, Assembly): drvr = drvr.get('driver') pathname = pathname + '.driver' if drvr: try: flow = drvr.get_workflow() except Exception as err: self._error(err, sys.exc_info()) flows.append(flow) else: for k, v in self.proj.items(): if is_instance(v, Assembly): v = v.get('driver') if is_instance(v, Driver): flow = v.get_workflow() flows.append(flow) return json.dumps(flows, default=json_default) def get_attributes(self, pathname): ''' Get the attributes of the specified object. ''' attr = {} comp, root = self.get_object(pathname) try: if comp: attr = comp.get_attributes(io_only=False) return json.dumps(attr, default=json_default) except Exception as err: self._error(err, sys.exc_info()) def get_passthroughs(self, pathname): ''' Get the inputs and outputs of the assembly's child components and indicate for each whether or not it is a passthrough variable. ''' asm, root = self.get_object(pathname) passthroughs = asm.get_passthroughs() return json.dumps(passthroughs, default=json_default) def get_value(self, pathname): ''' Get the value of the object with the given pathname. ''' try: val, root = self.get_object(pathname) return val except Exception as err: self._print_error("error getting value: %s" % err) def get_types(self): ''' Get a dictionary of types available for creation. ''' # Don't want to get variable types showing up, so we exclude # 'openmdao.variable' from this list. keyset = set(plugin_groups.keys()) exclset = set(['openmdao.variable']) groups = list(keyset - exclset) return packagedict(get_available_types(groups)) @modifies_state def load_project(self, projdir): ''' Activate the project in the specified directory; instantiate a file manager and projdirfactory. ''' _clear_insts() self.cleanup() try: # Start a new log file. logging.getLogger().handlers[0].doRollover() self.files = FileManager('files', path=projdir, publish_updates=self.publish_updates) self.projdirfactory = ProjDirFactory(projdir, observer=self.files.observer) register_class_factory(self.projdirfactory) self.proj = Project(projdir) repo = get_repo(projdir) if repo is None: find_vcs()[0](projdir).init_repo() self.proj.activate() except Exception as err: self._error(err, sys.exc_info()) @modifies_project def commit_project(self, comment=''): ''' Save the current project macro and commit to the project repo. ''' if self.proj: try: repo = get_repo(self.proj.path) repo.commit(comment) print 'Committed project in directory ', self.proj.path except Exception as err: self._error(err, sys.exc_info()) else: self._print_error('No Project to commit') @modifies_project def revert_project(self, commit_id=None): ''' Revert to the most recent commit of the project. ''' if self.proj: try: repo = get_repo(self.proj.path) repo.revert(commit_id) if commit_id is None: commit_id = 'latest' print "Reverted project %s to commit '%s'" \ % (self.proj.name, commit_id) except Exception as err: self._error(err, sys.exc_info()) return err # give the caller an indication that something went # wrong so he can give the proper error response to # the http call if desired. Raising an exception # here doesn't work else: msg = 'No Project to revert' self._print_error(msg) return Exception(msg) def get_signature(self, classname): ''' Get constructor argument signature for `classname`. ''' try: return get_signature(str(classname)) except Exception as err: self._error(err, sys.exc_info()) def put_object(self, pathname, classname, args=None): ''' Create or replace object with the given pathname with a new object of the specified type. ''' obj, root = self.get_object(pathname, report=False) if obj: self.replace_object(pathname, classname, args) else: self.add_object(pathname, classname, args) @modifies_project @modifies_state def add_object(self, pathname, classname, args): ''' Add a new object of the given type to the specified parent. ''' parentname, _, name = pathname.rpartition('.') if isidentifier(name): name = name.encode('utf8') if args is None: args = '' cmd = 'create("%s"%s)' % (classname, args) if parentname: cmd = '%s.add("%s", %s)' % (parentname, name, cmd) else: cmd = '%s = set_as_top(%s)' % (name, cmd) try: self.proj.command(cmd) except Exception as err: self._error(err, sys.exc_info()) else: self._print_error('Error adding object:' ' "%s" is not a valid identifier' % name) @modifies_project @modifies_state def replace_object(self, pathname, classname, args=None): ''' Replace existing object with object of the given type. ''' pathname = pathname.encode('utf8') parentname, _, name = pathname.rpartition('.') if parentname: try: self.proj.command('%s.replace("%s", create("%s"))' % (parentname, name, classname)) except Exception as err: self._error(err, sys.exc_info()) else: self._print_error('Error replacing component, no parent: "%s"' % pathname) def cleanup(self): ''' Cleanup various resources. ''' if self.proj: self.proj.deactivate() if self.projdirfactory: self.projdirfactory.cleanup() remove_class_factory(self.projdirfactory) if self.files: self.files.cleanup() def get_files(self): ''' Get a nested dictionary of files. ''' try: return self.files.get_files(root=self.proj.path) except AttributeError: return {} def get_file(self, filename): ''' Get contents of a file. Returns a tuple of (file contents, mimetype, encoding). Tuple values will be None if file was not found. ''' return self.files.get_file(filename) @modifies_project def ensure_dir(self, dirname): ''' Create directory (does nothing if directory already exists). ''' return self.files.ensure_dir(dirname) @modifies_project def write_file(self, filename, contents): ''' Write contents to file. ''' ret = self.files.write_file(filename, contents) if not ret is True: return ret @modifies_project def add_file(self, filename, contents): ''' Add file. ''' return self.files.add_file(filename, contents) @modifies_project def delete_file(self, filename): ''' Delete file from project. Returns False if file was not found; otherwise returns True. ''' return self.files.delete_file(filename) @modifies_project def rename_file(self, oldpath, newname): ''' Rename file. ''' return self.files.rename_file(oldpath, newname) def install_addon(self, url, distribution): print "Installing", distribution, "from", url easy_install.main(["-U", "-f", url, distribution]) def add_subscriber(self, pathname, publish): ''' Publish the specified topic. ''' if pathname in [ '', 'components', 'files', 'types', 'console_errors', 'file_errors' ]: # these topics are published automatically return elif pathname == 'log_msgs': if publish: self._start_log_msgs(pathname) else: self._stop_log_msgs() elif pathname.startswith('/'): # treat it as a filename if publish: Publisher.register(pathname, pathname[1:]) else: Publisher.unregister(pathname) else: parts = pathname.split('.', 1) if len(parts) > 1: root = self.proj.get(parts[0]) if root: rest = parts[1] root.register_published_vars(rest, publish) cont, root = self.get_object(pathname) if has_interface(cont, IComponent): if publish: if pathname in self._publish_comps: self._publish_comps[pathname] += 1 else: self._publish_comps[pathname] = 1 else: if pathname in self._publish_comps: self._publish_comps[pathname] -= 1 if self._publish_comps[pathname] < 1: del self._publish_comps[pathname] def _start_log_msgs(self, topic): """ Start sending log messages. """ # Need to lock access while we capture state. logging._acquireLock() try: # Flush output. for handler in logging.getLogger().handlers: handler.flush() # Grab previously logged messages. log_path = os.path.join(self._log_directory, 'openmdao_log.txt') with open(log_path, 'r') as inp: line = True # Just to get things started. while line: lines = [] for i in range(100): # Process in chunks. line = inp.readline() if line: lines.append(line) else: break if lines: publish('log_msgs', dict(active=False, text=''.join(lines))) # End of historical messages. publish('log_msgs', dict(active=False, text='')) # Add handler to get any new messages. if self._log_handler is None: self._log_handler = _LogHandler() logging.getLogger().addHandler(self._log_handler) except Exception: print "Can't initiate logging:" traceback.print_exc() finally: logging._releaseLock() self._log_subscribers += 1 def _stop_log_msgs(self): """ Stop sending log messages. """ self._log_subscribers -= 1 if self._log_subscribers <= 0: if self._log_handler is not None: logging.getLogger().removeHandler(self._log_handler) self._log_handler = None self._log_subscribers = 0 def is_macro(self, filename): return filename.lstrip('/') == os.path.join( os.path.basename(self.proj.macrodir), self.proj.macro) def file_forces_reload(self, filename): """Returns True if the given file (assumed to be a file in the project) has classes that have been instantiated in the current process or if the file is a macro file. Note that this doesn't keep track of removes/deletions, so if an instance was created earlier and then deleted, it will still be reported. """ pdf = self.projdirfactory if pdf: if self.is_macro(filename): return True if filename.endswith('.py'): filename = filename.lstrip('/') filename = os.path.join(self.proj.path, filename) info = pdf._files.get(filename) if info and _match_insts(info.classes.keys()): return True return False
def test_new_project_is_valid(self): proj = Project(os.path.join(self.tdir, 'proj1')) self._fill_project(proj.top) self.assertEqual(proj.path, os.path.join(self.tdir, 'proj1')) self.assertTrue(_is_valid_project_dir(proj.path))
class ConsoleServer(cmd.Cmd): ''' Object which knows how to load an OpenMDAO project and provides a command line interface and methods to interact with that project. ''' def __init__(self, name='', host='', publish_updates=True): cmd.Cmd.__init__(self) self.intro = 'OpenMDAO ' + __version__ + ' (' + __date__ + ')' self.prompt = 'OpenMDAO>> ' self._hist = [] self.host = host self._projname = '' self.proj = None self.exc_info = None self.publish_updates = publish_updates self._publish_comps = {} self._log_directory = os.getcwd() self._log_handler = None self._log_subscribers = 0 self._partial_cmd = None # for multi-line commands self.projdirfactory = None self.files = None # make sure we have a ProjFinder in sys.path_hooks if not ProjFinder in sys.path_hooks: sys.path_hooks = [ProjFinder] + sys.path_hooks def set_current_project(self, path): """ Set current project name. """ # Called by ProjectHandler, since load_project() is too late to # affect the rendering of the template. self._projname = os.path.basename(path) def get_current_project(self): """ Get current project name. """ return self._projname def _update_roots(self): ''' Ensure that all root containers in the project dictionary know their own name and are set as top. ''' for k, v in self.proj.items(): if has_interface(v, IContainer): if v.name != k: v.name = k if v._call_cpath_updated: set_as_top(v) def _update_workflows(self): ''' Call :meth:`_update_workflow` on drivers to capture any workflow updates now rather than waiting until they are run. ''' for k, v in self.proj.items(): if has_interface(v, IContainer): for driver in [obj for name, obj in v.items(recurse=True) if is_instance(obj, Driver)]: driver._update_workflow() def publish_components(self): ''' Publish the current component tree and subscribed components. ''' try: publish('components', self.get_components()) publish('', {'Dataflow': self.get_dataflow('')}) publish('', {'Workflow': self.get_workflow('')}) except Exception as err: self._error(err, sys.exc_info()) else: comps = self._publish_comps.keys() for pathname in comps: comp, root = self.get_object(pathname, report=False) if comp is None: del self._publish_comps[pathname] publish(pathname, {}) else: publish(pathname, comp.get_attributes(io_only=False)) def send_pub_msg(self, msg, topic): ''' Publish the given message with the given topic. ''' publish(topic, msg) def _error(self, err, exc_info): ''' Publish error message and save stack trace if case it's requested. ''' self._partial_cmd = None self.exc_info = exc_info msg = '%s: %s' % (err.__class__.__name__, err) logger.error(msg) self._print_error(msg) def _print_error(self, msg): ''' Publish error message. ''' try: publish('console_errors', msg) except: logger.error('publishing of message failed') def do_trace(self, arg): ''' Print remembered trace from last exception. ''' if self.exc_info: exc_type, exc_value, exc_traceback = self.exc_info traceback.print_exception(exc_type, exc_value, exc_traceback) else: print "No trace available." def precmd(self, line): ''' This method is called after the line has been input but before it has been interpreted. If you want to modify the input line before execution (for example, variable substitution), do it here. ''' #self._hist += [line.strip()] return line @modifies_project @modifies_state def onecmd(self, line): self._hist.append(line) try: cmd.Cmd.onecmd(self, line) except Exception as err: self._error(err, sys.exc_info()) def parseline(self, line): """Have to override this because base class version strips the lines, making multi-line Python commands impossible. """ #line = line.strip() if not line: return None, None, line elif line[0] == '?': line = 'help ' + line[1:] elif line[0] == '!': if hasattr(self, 'do_shell'): line = 'shell ' + line[1:] else: return None, None, line i, n = 0, len(line) while i < n and line[i] in self.identchars: i = i + 1 cmd, arg = line[:i], line[i:].strip() return cmd, arg, line def emptyline(self): # Default for empty line is to repeat last command - yuck if self._partial_cmd: self.default('') def default(self, line): ''' Called on an input line when the command prefix is not recognized. In this case we execute the line as Python code. ''' line = line.rstrip() if self._partial_cmd is None: if line.endswith(':'): self._partial_cmd = line return else: if line: self._partial_cmd = self._partial_cmd + '\n' + line if line.startswith(' ') or line.startswith('\t'): return else: line = self._partial_cmd self._partial_cmd = None try: result = self.proj.command(line) if result is not None: print result except Exception as err: self._error(err, sys.exc_info()) @modifies_project @modifies_state def run(self, pathname, *args, **kwargs): ''' Run the component `pathname`. If no pathname is specified, use `top`. ''' pathname = pathname or 'top' if pathname in self.proj: print "Executing..." try: comp = self.proj.get(pathname) comp.run(*args, **kwargs) print "Execution complete." except Exception as err: self._error(err, sys.exc_info()) else: self._print_error("Execution failed: No %r component was found." % pathname) @modifies_project @modifies_state def execfile(self, filename): ''' Execfile in server's globals. ''' try: self.proj.command("execfile('%s', '%s')" % (filename, file_md5(filename))) except Exception as err: self._error(err, sys.exc_info()) def get_pid(self): ''' Return this server's :attr:`pid`. ''' return os.getpid() def get_project(self): ''' Return the current project. ''' return self.proj def get_history(self): ''' Return this server's :attr:`_hist`. ''' return self._hist def get_recorded_cmds(self): ''' Return this server's :attr:`_recorded_cmds`. ''' return self._recorded_cmds[:] def get_object(self, pathname, report=True): ''' Get the container with the specified pathname. Returns the container and the name of the root object. ''' cont = None parts = pathname.split('.', 1) root = parts[0] if self.proj and root in self.proj: if root == pathname: cont = self.proj.get(root) else: try: root_obj = self.proj.get(root) except Exception as err: self._error(err, sys.exc_info()) else: try: cont = root_obj.get(parts[1]) except AttributeError as err: # When publishing, don't report remove as an error. if report: self._error(err, sys.exc_info()) except Exception as err: self._error(err, sys.exc_info()) return cont, root def _get_components(self, cont, pathname=None): ''' Get a heierarchical list of all the components in the given container or dictionary. The name of the root container, if specified, is prepended to all pathnames. ''' comps = [] for k, v in cont.items(): if is_instance(v, Component): comp = {} if cont is self.proj._project_globals: comp['pathname'] = k else: comp['pathname'] = '.'.join([pathname, k]) if pathname else k children = self._get_components(v, comp['pathname']) if len(children) > 0: comp['children'] = children comp['type'] = str(v.__class__.__name__) inames = [] for klass in list(implementedBy(v.__class__)): inames.append(klass.__name__) comp['interfaces'] = inames comps.append(comp) return comps def get_components(self): ''' Get hierarchical dictionary of openmdao objects. ''' return json.dumps(self._get_components(self.proj._project_globals), default=json_default) def get_connectivity(self, pathname): ''' Get the connectivity data for the assembly with the given pathname ''' connectivity = {} asm, root = self.get_object(pathname) if asm: try: connectivity = asm.get_connectivity() except Exception as err: self._error(err, sys.exc_info()) return json.dumps(connectivity, default=json_default) def get_dataflow(self, pathname): ''' Get the structure of the specified assembly or of the global namespace if no pathname is specified; consists of the list of components and the connections between them (i.e., the dataflow). ''' dataflow = {} if pathname and len(pathname) > 0: try: asm, root = self.get_object(pathname) if has_interface(asm, IAssembly): dataflow = asm.get_dataflow() except Exception as err: self._error(err, sys.exc_info()) else: components = [] for k, v in self.proj.items(): if is_instance(v, Component): inames = [cls.__name__ for cls in list(implementedBy(v.__class__))] components.append({ 'name': k, 'pathname': k, 'type': type(v).__name__, 'interfaces': inames, 'python_id': id(v) }) dataflow['components'] = components dataflow['connections'] = [] dataflow['parameters'] = [] dataflow['constraints'] = [] dataflow['objectives'] = [] dataflow['responses'] = [] return json.dumps(dataflow, default=json_default) def get_available_events(self, pathname): ''' Serve a list of events that are available to a driver. ''' events = [] if pathname: drvr, root = self.get_object(pathname) events = drvr.list_available_events() return json.dumps(events, default=json_default) def get_workflow(self, pathname): ''' Get the workflow for the specified driver or assembly. If no driver or assembly is specified, get the workflows for all of the top-level assemblies. ''' flows = [] if pathname: drvr, root = self.get_object(pathname) # allow for request on the parent assembly if is_instance(drvr, Assembly): drvr = drvr.get('driver') pathname = pathname + '.driver' if drvr: try: flow = drvr.get_workflow() except Exception as err: self._error(err, sys.exc_info()) flows.append(flow) else: for k, v in self.proj.items(): if is_instance(v, Assembly): v = v.get('driver') if is_instance(v, Driver): flow = v.get_workflow() flows.append(flow) return json.dumps(flows, default=json_default) def get_attributes(self, pathname): ''' Get the attributes of the specified object. ''' attr = {} comp, root = self.get_object(pathname) try: if comp: attr = comp.get_attributes(io_only=False) return json.dumps(attr, default=json_default) except Exception as err: self._error(err, sys.exc_info()) def get_passthroughs(self, pathname): ''' Get the inputs and outputs of the assembly's child components and indicate for each whether or not it is a passthrough variable. ''' asm, root = self.get_object(pathname) passthroughs = asm.get_passthroughs() return json.dumps(passthroughs, default=json_default) def get_value(self, pathname): ''' Get the value of the object with the given pathname. ''' try: val, root = self.get_object(pathname) return val except Exception as err: self._print_error("error getting value: %s" % err) def get_types(self): ''' Get a dictionary of types available for creation. ''' #Don't want to get variable types showing up, so we exclude #'openmdao.variable' from this list. keyset = set(plugin_groups.keys()) exclset = set(['openmdao.variable']) groups = list(keyset - exclset) return packagedict(get_available_types(groups)) @modifies_state def load_project(self, projdir): ''' Activate the project in the specified directory; instantiate a file manager and projdirfactory. ''' _clear_insts() self.cleanup() try: # Start a new log file. logging.getLogger().handlers[0].doRollover() self.files = FileManager('files', path=projdir, publish_updates=self.publish_updates) self.projdirfactory = ProjDirFactory(projdir, observer=self.files.observer) register_class_factory(self.projdirfactory) self.proj = Project(projdir) repo = get_repo(projdir) if repo is None: find_vcs()[0](projdir).init_repo() self.proj.activate() except Exception as err: self._error(err, sys.exc_info()) @modifies_project def commit_project(self, comment=''): ''' Save the current project macro and commit to the project repo. ''' if self.proj: try: repo = get_repo(self.proj.path) repo.commit(comment) print 'Committed project in directory ', self.proj.path except Exception as err: self._error(err, sys.exc_info()) else: self._print_error('No Project to commit') @modifies_project def revert_project(self, commit_id=None): ''' Revert to the most recent commit of the project. ''' if self.proj: try: repo = get_repo(self.proj.path) repo.revert(commit_id) if commit_id is None: commit_id = 'latest' print "Reverted project %s to commit '%s'" \ % (self.proj.name, commit_id) except Exception as err: self._error(err, sys.exc_info()) return err # give the caller an indication that something went # wrong so he can give the proper error response to # the http call if desired. Raising an exception # here doesn't work else: msg = 'No Project to revert' self._print_error(msg) return Exception(msg) def get_signature(self, classname): ''' Get constructor argument signature for `classname`. ''' try: return get_signature(str(classname)) except Exception as err: self._error(err, sys.exc_info()) def put_object(self, pathname, classname, args=None): ''' Create or replace object with the given pathname with a new object of the specified type. ''' obj, root = self.get_object(pathname, report=False) if obj: self.replace_object(pathname, classname, args) else: self.add_object(pathname, classname, args) @modifies_project @modifies_state def add_object(self, pathname, classname, args): ''' Add a new object of the given type to the specified parent. ''' parentname, _, name = pathname.rpartition('.') if isidentifier(name): name = name.encode('utf8') if args is None: args = '' cmd = 'create("%s"%s)' % (classname, args) if parentname: cmd = '%s.add("%s", %s)' % (parentname, name, cmd) else: cmd = '%s = set_as_top(%s)' % (name, cmd) try: self.proj.command(cmd) except Exception as err: self._error(err, sys.exc_info()) else: self._print_error('Error adding object:' ' "%s" is not a valid identifier' % name) @modifies_project @modifies_state def replace_object(self, pathname, classname, args=None): ''' Replace existing object with object of the given type. ''' pathname = pathname.encode('utf8') parentname, _, name = pathname.rpartition('.') if parentname: try: self.proj.command('%s.replace("%s", create("%s"))' % (parentname, name, classname)) except Exception as err: self._error(err, sys.exc_info()) else: self._print_error('Error replacing component, no parent: "%s"' % pathname) def cleanup(self): ''' Cleanup various resources. ''' if self.proj: self.proj.deactivate() if self.projdirfactory: self.projdirfactory.cleanup() remove_class_factory(self.projdirfactory) if self.files: self.files.cleanup() def get_files(self): ''' Get a nested dictionary of files. ''' try: return self.files.get_files(root=self.proj.path) except AttributeError: return {} def get_file(self, filename): ''' Get contents of a file. Returns a tuple of (file contents, mimetype, encoding). Tuple values will be None if file was not found. ''' return self.files.get_file(filename) @modifies_project def ensure_dir(self, dirname): ''' Create directory (does nothing if directory already exists). ''' return self.files.ensure_dir(dirname) @modifies_project def write_file(self, filename, contents): ''' Write contents to file. ''' ret = self.files.write_file(filename, contents) if not ret is True: return ret @modifies_project def add_file(self, filename, contents): ''' Add file. ''' return self.files.add_file(filename, contents) @modifies_project def delete_file(self, filename): ''' Delete file from project. Returns False if file was not found; otherwise returns True. ''' return self.files.delete_file(filename) @modifies_project def rename_file(self, oldpath, newname): ''' Rename file. ''' return self.files.rename_file(oldpath, newname) def install_addon(self, url, distribution): print "Installing", distribution, "from", url easy_install.main(["-U", "-f", url, distribution]) def add_subscriber(self, pathname, publish): ''' Publish the specified topic. ''' if pathname in ['', 'components', 'files', 'types', 'console_errors', 'file_errors']: # these topics are published automatically return elif pathname == 'log_msgs': if publish: self._start_log_msgs(pathname) else: self._stop_log_msgs() elif pathname.startswith('/'): # treat it as a filename if publish: Publisher.register(pathname, pathname[1:]) else: Publisher.unregister(pathname) else: parts = pathname.split('.', 1) if len(parts) > 1: root = self.proj.get(parts[0]) if root: rest = parts[1] root.register_published_vars(rest, publish) cont, root = self.get_object(pathname) if has_interface(cont, IComponent): if publish: if pathname in self._publish_comps: self._publish_comps[pathname] += 1 else: self._publish_comps[pathname] = 1 else: if pathname in self._publish_comps: self._publish_comps[pathname] -= 1 if self._publish_comps[pathname] < 1: del self._publish_comps[pathname] def _start_log_msgs(self, topic): """ Start sending log messages. """ # Need to lock access while we capture state. logging._acquireLock() try: # Flush output. for handler in logging.getLogger().handlers: handler.flush() # Grab previously logged messages. log_path = os.path.join(self._log_directory, 'openmdao_log.txt') with open(log_path, 'r') as inp: line = True # Just to get things started. while line: lines = [] for i in range(100): # Process in chunks. line = inp.readline() if line: lines.append(line) else: break if lines: publish('log_msgs', dict(active=False, text=''.join(lines))) # End of historical messages. publish('log_msgs', dict(active=False, text='')) # Add handler to get any new messages. if self._log_handler is None: self._log_handler = _LogHandler() logging.getLogger().addHandler(self._log_handler) except Exception: print "Can't initiate logging:" traceback.print_exc() finally: logging._releaseLock() self._log_subscribers += 1 def _stop_log_msgs(self): """ Stop sending log messages. """ self._log_subscribers -= 1 if self._log_subscribers <= 0: if self._log_handler is not None: logging.getLogger().removeHandler(self._log_handler) self._log_handler = None self._log_subscribers = 0 def is_macro(self, filename): return filename.lstrip('/') == os.path.join(os.path.basename(self.proj.macrodir), self.proj.macro) def file_forces_reload(self, filename): """Returns True if the given file (assumed to be a file in the project) has classes that have been instantiated in the current process or if the file is a macro file. Note that this doesn't keep track of removes/deletions, so if an instance was created earlier and then deleted, it will still be reported. """ pdf = self.projdirfactory if pdf: if self.is_macro(filename): return True if filename.endswith('.py'): filename = filename.lstrip('/') filename = os.path.join(self.proj.path, filename) info = pdf._files.get(filename) if info and _match_insts(info.classes.keys()): return True return False
def commit(self, comment=''): p = Project(self.dirpath) p.export(destdir=self.repodir)
def post(self): # The project file is uploaded once to extract the metadata. # It is then deleted and the metadata is used to populate another # import dialog, giving the user an opportunity to edit the # info before importing or cancel the import. if not 'projectname' in self.request.arguments: # First upload sourcefile = self.request.files['projectfile'][0] try: filename = sourcefile['filename'] if len(filename) > 0: unique = _get_unique_name(self.get_project_dir(), parse_archive_name(filename)) tdir = mkdtemp(prefix=unique) buff = StringIO.StringIO(sourcefile['body']) archive = tarfile.open(fileobj=buff, mode='r:gz') archive.extractall(path=tdir) proj = Project(tdir) project_info = proj.get_info() try: shutil.rmtree(tdir, onerror=onerror) except: pass self.render('projdb/import-metadata-fields.html', projectname=parse_archive_name(unique), description=project_info['description'], version=project_info['version']) except Exception as err: print 'ERROR: could not get metadata from', sourcefile exc_type, exc_value, exc_traceback = sys.exc_info() traceback.print_exception(exc_type, exc_value, exc_traceback) self.redirect('/') else: # second upload forms = {} for field in ['projectname', 'description', 'version']: if field in self.request.arguments.keys(): forms[field] = self.request.arguments[field][0] sourcefile = self.request.files['projectfile'][0] try: filename = sourcefile['filename'] if len(filename) > 0: unique = _get_unique_name(self.get_project_dir(), parse_archive_name(filename)) pdb = Projects() project = {} project['id'] = pdb.predict_next_rowid() project['active'] = 1 project['projectname'] = forms['projectname'].strip() project['description'] = forms['description'].strip() project['version'] = forms['version'].strip() project['projpath'] = unique os.mkdir(unique) buff = StringIO.StringIO(sourcefile['body']) archive = tarfile.open(fileobj=buff, mode='r:gz') archive.extractall(path=unique) vcslist = find_vcs() if vcslist: vcs = vcslist[0](unique) else: vcs = DumbRepo(unique) vcs.init_repo() # Update project settings. proj = Project(project['projpath']) dummy = proj.get_info() # Just to get required keys. info = {} for key in dummy: info[key] = project[key] proj.set_info(info) pdb.new(project) self.redirect("/workspace/project?projpath=" + quote_plus(project['projpath'])) except Exception as err: print 'ERROR: could not get import project from', sourcefile exc_type, exc_value, exc_traceback = sys.exc_info() traceback.print_exception(exc_type, exc_value, exc_traceback) self.redirect('/')