コード例 #1
0
ファイル: views.py プロジェクト: neurodata/ndstore
def buildGraph(request, webargs):
    """Build a graph based on different arguments"""

    try:
        # TODO UA use regex here verus replace. Check ndwsrest for more details.
        args = (webargs.replace(',', '/').split('/'))[0:-1]
        w_token = args[0]

        # TODO UA this will be replaced by NDToken and authentication
        if not request.user.is_superuser:
            m_tokens = Token.objects.filter(
                user=request.user.id) | Token.objects.filter(public=1)
            tokens = []
            for v in m_tokens.values():
                tokens.append(v['token_name'])
            if w_token not in tokens:
                logger.error(
                    "Token {} does not exist or you do not have sufficient permissions to access it."
                    .format(w_token))
                raise NDWSError(
                    "Token {} does not exist or you do not have sufficient permissions to access it."
                    .format(w_token))

        # TODO UA where are you closing this file. we need to ensure that this file is closed before it is returned.
        (file, filename) = ndgraph.genGraphRAMON(*args)
        response = HttpResponse(content_type='text/plain')
        response[
            'Content-Disposition'] = "attachment; filename=\"output.{}\"".format(
                filename)
        response.write(file.read())
        return response

    except Exception as e:
        logger.error(e)
        raise NDWSError(e)
コード例 #2
0
ファイル: ingestmanager.py プロジェクト: neurodata/ndstore
 def validateConfig(self, config_data):
     try:
         ndcg = Configuration(config_data)
         validator = ndcg.get_validator()
         validator.schema = ndcg.schema
         validator.validate_schema()
         ingest_job_json = json.dumps({
             'dataset':
             ndcg.config_data["database"]["dataset"],
             'project':
             ndcg.config_data["database"]["project"],
             'channel':
             ndcg.config_data["database"]["channel"],
             'resolution':
             ndcg.config_data["ingest_job"]["resolution"],
             'x_start':
             ndcg.config_data["ingest_job"]["extent"]["x"][0],
             'x_stop':
             ndcg.config_data["ingest_job"]["extent"]["x"][1],
             'y_start':
             ndcg.config_data["ingest_job"]["extent"]["y"][0],
             'y_stop':
             ndcg.config_data["ingest_job"]["extent"]["y"][1],
             'z_start':
             ndcg.config_data["ingest_job"]["extent"]["z"][0],
             'z_stop':
             ndcg.config_data["ingest_job"]["extent"]["z"][1],
             't_start':
             ndcg.config_data["ingest_job"]["extent"]["t"][0],
             't_stop':
             ndcg.config_data["ingest_job"]["extent"]["t"][1],
             'tile_size_x':
             ndcg.config_data["ingest_job"]["tile_size"]["x"],
             'tile_size_y':
             ndcg.config_data["ingest_job"]["tile_size"]["y"],
             'tile_size_z':
             ndcg.config_data["ingest_job"]["tile_size"]["z"],
             'tile_size_t':
             ndcg.config_data["ingest_job"]["tile_size"]["t"],
         })
         self.ingest_job = NDIngestJob.fromJson(ingest_job_json)
         self.nd_proj = NDIngestProj(self.ingest_job.project,
                                     self.ingest_job.channel,
                                     self.ingest_job.resolution)
     except jsonschema.ValidationError as e:
         raise NDWSError("Schema validation failed")
     except Exception as e:
         return NDWSError("Properties not found")
     return True
コード例 #3
0
ファイル: annotation.py プロジェクト: neurodata/ndstore
 def setField ( self, field, value ):
   """Mutator by field name. Then need to store the field."""
   
   if field == 'segmentclass':
     self.segmentclass = value
   elif field == 'parentseed':
     self.parentseed = value
   elif field == 'neuron':
     self.neuron = value
   elif field == 'synapses':
     raise NDWSError ("Cannot set synapses in segments. It is derived from the synapse annotations.")
   elif field == 'organelles':
     raise NDWSError ("Cannot set organelles in segments. It is derived from the organelle annotations.")
   else:
     Annotation.setField ( self, field, value )
コード例 #4
0
ファイル: ndgraph.py プロジェクト: neurodata/ndstore
def getAnnoIds(proj, ch, resolution, xmin, xmax, ymin, ymax, zmin, zmax):
    """Return a list of anno ids restricted by equality predicates. Equalities are alternating in field/value in the url."""

    mins = (xmin, ymin, zmin)
    maxs = (xmax, ymax, zmax)
    offset = proj.datasetcfg.get_offset(resolution)
    # Add a comment
    corner = map(max, zip(*[mins, map(sub, mins, offset)]))
    dim = map(sub, maxs, mins)

    if not proj.datasetcfg.checkCube(resolution, corner, dim):
        logger.error("Illegal cutout corner={}, dim={}".format(corner, dim))
        raise NDWSError("Illegal cutout corner={}, dim={}".format(corner, dim))

    with closing(SpatialDB(proj)) as sdb:
        cutout = sdb.cutout(ch, corner, dim, resolution)

    if cutout.isNotZeros():
        annoids = np.unique(cutout.data)
    else:
        annoids = np.asarray([0], dtype=np.uint32)

    if annoids[0] == 0:
        return annoids[1:]
    else:
        return annoids
コード例 #5
0
    def newNDProject(self):
        """Create the database for a project"""

        with closing(
                MySQLdb.connect(
                    host=self.pr.host,
                    user=settings.DATABASES['default']['USER'],
                    passwd=settings.DATABASES['default']['PASSWORD'],
                    db=settings.DATABASES['default']['NAME'],
                    connect_timeout=1)) as conn:
            with closing(conn.cursor()) as cursor:

                try:
                    # create the database
                    sql = "CREATE DATABASE {}".format(self.pr.dbname)

                    cursor.execute(sql)
                    conn.commit()

                except MySQLdb.Error, e:
                    logger.error(
                        "Failed to create database for new project {}: {}.".
                        format(e.args[0], e.args[1]))
                    raise NDWSError(
                        "Failed to create database for new project {}: {}.".
                        format(e.args[0], e.args[1]))
コード例 #6
0
    def deleteNDProject(self):
        """Delete the database for a project"""

        try:
            with closing(
                    MySQLdb.connect(
                        host=self.pr.host,
                        user=settings.DATABASES['default']['USER'],
                        passwd=settings.DATABASES['default']['PASSWORD'],
                        connect_timeout=1)) as conn:
                with closing(conn.cursor()) as cursor:
                    # delete the database
                    sql = "DROP DATABASE {}".format(self.pr.dbname)

                    try:
                        cursor.execute(sql)
                        conn.commit()
                    except MySQLdb.Error, e:
                        # Skipping the error if the database does not exist
                        if e.args[0] == 1008:
                            logger.warning("Database {} does not exist".format(
                                self.pr.dbname))
                            pass
                        else:
                            conn.rollback()
                            logger.error(
                                "Failed to drop project database {}: {}. sql={}"
                                .format(e.args[0], e.args[1], sql))
                            raise NDWSError(
                                "Failed to drop project database {}: {}. sql={}"
                                .format(e.args[0], e.args[1], sql))
        except MySQLdb.OperationalError as e:
            logger.warning(
                "Cannot connect to the server at host {}. {}".format(
                    self.pr.host, e))
コード例 #7
0
    def updateNDChannelNew(self, channel_name):
        """Create the tables for a channel"""

        ch = NDChannel.fromName(self.pr, channel_name)

        # connect to the database
        with closing(
                MySQLdb.connect(
                    host=self.pr.host,
                    user=settings.DATABASES['default']['USER'],
                    passwd=settings.DATABASES['default']['PASSWORD'],
                    db=self.pr.dbname,
                    connect_timeout=1)) as conn:
            with closing(conn.cursor()) as cursor:

                try:
                    # tables specific to all other non time data
                    for res in self.pr.datasetcfg.resolutions:
                        cursor.execute(
                            "CREATE TABLE {} ( zindex BIGINT, timestamp INT, cube LONGBLOB, PRIMARY KEY(zindex,timestamp))"
                            .format(ch.getNearIsoTable(res)))
                    # Commiting at the end
                    conn.commit()
                except MySQLdb.Error, e:
                    logging.error(
                        "Failed to create neariso tables for existing project {}: {}."
                        .format(e.args[0], e.args[1]))
                    raise NDWSError(
                        "Failed to create neariso tables for existing project {}: {}."
                        .format(e.args[0], e.args[1]))
コード例 #8
0
ファイル: ingestmanager.py プロジェクト: neurodata/ndstore
    def createIngestJob(self, user_id, config_data):
        """Create an ingest job based on the posted config data"""

        config_data = json.loads(config_data)
        # validate schema
        if self.validateConfig(config_data):
            try:
                # create the upload queue
                UploadQueue.createQueue(
                    self.nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT)
                self.upload_queue = UploadQueue(
                    self.nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT)
                self.ingest_job.upload_queue = self.upload_queue.url
                # create the ingest queue
                IngestQueue.createQueue(
                    self.nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT)
                self.ingest_job.ingest_queue = IngestQueue(
                    self.nd_proj,
                    endpoint_url=ndingest_settings.SQS_ENDPOINT).url
                # create the cleanup queue
                CleanupQueue.createQueue(
                    self.nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT)
                self.ingest_job.cleanup_queue = CleanupQueue(
                    self.nd_proj,
                    endpoint_url=ndingest_settings.SQS_ENDPOINT).url
                self.generateUploadTasks()
                self.ingest_job.user_id = user_id
                self.ingest_job.save()
                return NDIngestJob.serialize(self.ingest_job._job)

            except Exception as e:
                print(e)
                raise NDWSError(e)
コード例 #9
0
ファイル: annotation.py プロジェクト: neurodata/ndstore
 def setField ( self, field, value ):
   """Mutator by field name. Then need to store the field."""
   
   if field == 'segments':
     raise NDWSError ("Cannot set segments. It is derived from the neuron field of ANNO_SEGMENTS.")
   else:
     Annotation.setField ( self, field, value )
コード例 #10
0
  def putAnnotation ( self, ch, anno, options='' ):
    """store an HDF5 annotation to the database"""

    self.annodb.startTxn()

    try:
      # for updates, make sure the annotation exists and is of the right type

      if 'update' in options:
 
        kvdict = self.annodb.getAnnotationKV ( ch, anno.annid )

        # can't update annotations that don't exist
        if  kvdict == None:
          raise NDWSError ( "During update no annotation found at id {}".format(anno.annid)  )

        else:
          self.annodb.putAnnotationKV ( ch, anno.annid, anno.toDict(), update=True)
        
      # Write the user chosen annotation id
      else:
        kvdict = anno.toDict()
        self.annodb.putAnnotationKV ( ch, anno.annid, kvdict)

      self.annodb.commit()

    except Exception, e:
      self.annodb.rollback()
      raise
コード例 #11
0
ファイル: h5ann.py プロジェクト: neurodata/ndstore
def AnnotationtoH5(anno, h5fh):
    """Operate polymorphically on annotations"""

    if anno.__class__ == annotation.AnnSynapse:
        return SynapsetoH5(anno, h5fh)
    elif anno.__class__ == annotation.AnnSeed:
        return SeedtoH5(anno, h5fh)
    if anno.__class__ == annotation.AnnSegment:
        return SegmenttoH5(anno, h5fh)
    if anno.__class__ == annotation.AnnNeuron:
        return NeurontoH5(anno, h5fh)
    if anno.__class__ == annotation.AnnOrganelle:
        return OrganelletoH5(anno, h5fh)
    if anno.__class__ == annotation.AnnNode:
        return NodetoH5(anno, h5fh)
    if anno.__class__ == annotation.AnnSkeleton:
        return SkeletontoH5(anno, h5fh)
    elif anno.__class__ == annotation.Annotation:
        return BasetoH5(anno, annotation.ANNO_ANNOTATION, h5fh)
    else:
        logger.warning(
            "(AnnotationtoH5) Does not support this annotation type yet. Type = %s"
            % anno.__class__)
        raise NDWSError(
            "(AnnotationtoH5) Does not support this annotation type yet. Type = %s"
            % anno.__class__)
コード例 #12
0
  def getAnnotation ( self, ch, annid ):
    """Return a RAMON object by identifier"""

    kvdict = self.annodb.getAnnotationKV ( ch, annid )
 
    annotype = int(kvdict['ann_type'])

    # switch on the type of annotation
    if annotype is None:
      return None
    elif annotype == annotation.ANNO_SYNAPSE:
      anno = annotation.AnnSynapse(self.annodb,ch)
    elif annotype == annotation.ANNO_SEED:
      anno = annotation.AnnSeed(self.annodb,ch)
    elif annotype == annotation.ANNO_SEGMENT:
      anno = annotation.AnnSegment(self.annodb,ch)
    elif annotype == annotation.ANNO_NEURON:
      anno = annotation.AnnNeuron(self.annodb,ch)
    elif annotype == annotation.ANNO_ORGANELLE:
      anno = annotation.AnnOrganelle(self.annodb,ch)
    elif annotype == annotation.ANNO_NODE:
      anno = annotation.AnnNode(self.annodb,ch)
    elif annotype == annotation.ANNO_SKELETON:
      anno = annotation.AnnSkeleton(self.annodb,ch)
    elif annotype == annotation.ANNO_ROI:
      anno = annotation.AnnROI(self.annodb,ch)
    elif annotype == annotation.ANNO_ANNOTATION:
      anno = annotation.Annotation(self.annodb,ch)
    else:
      raise NDWSError ( "Unrecognized annotation type {}".format(type) )

    # load the annotation
    anno.fromDict ( kvdict )

    return anno
コード例 #13
0
ファイル: ndwsnifti.py プロジェクト: neurodata/ndstore
def queryNIFTI ( tmpfile, ch, db, proj ): 
  """ Return a NII file that contains the entire DB"""
  
  try:

    # get the header in a fileobj
    nh = NDNiftiHeader.fromChannel(ch)

    cuboid = db.cutout ( ch, (0,0,0), proj.datasetcfg.dataset_dim(0), 0, timerange=ch.time_range) 

    # transpose to nii's xyz format
    niidata = cuboid.data.transpose()

    # for 3-channel FA
    if niidata.dtype == np.uint32:
      niidata = _RGBto3dby8 ( niidata[:,:,:,0] )
      

    # assemble the header and the data and create a nii file
    nii = nibabel.Nifti1Image(niidata, affine=nh.affine, header=nh.header ) 

    # this adds a suffix and save to the tmpfile
    nibabel.save ( nii, tmpfile.name )

  except Exception as e:
    logger.error("Failed to build nii file. Error {}".format(e))
    raise NDWSError("Failed to build nii file. Error {}".format(e))
コード例 #14
0
 def setReadOnly (self, value):
   if value in [READONLY_TRUE, READONLY_FALSE]:
     self.ch.readonly = value
     self.ch.save()
   else:
     logger.error ( "Wrong Readonly Value {} for Channel {}".format( value, self.channel_name ) )
     raise NDWSError ( "Wrong Readonly Value {} for Channel {}".format( value, self.ch.channel_name ) )
コード例 #15
0
    def deleteNDChannel(self, channel_name):
        """Delete the keys for a channel"""

        # KL TODO Maybe do this as a transaction or lock the cache when we drop the channel?
        try:
            # removing keys related to kvio data
            # channel pattern to fetch all the keys with project_name&channel_name&
            channel_pattern = "{}&{}&*".format(self.pr.project_name,
                                               channel_name)
            channel_keys = self.client.keys(channel_pattern)
            # delete all the keys with the pattern
            if channel_keys:
                self.client.delete(*channel_keys)
            # removing keys related to kvindex data
            index_min_pattern = "[{}&{}".format(self.pr.project_name,
                                                channel_name)
            index_max_pattern = "+"
            self.client.zremrangebylex(settings.REDIS_INDEX_KEY,
                                       index_min_pattern, index_max_pattern)
            # deleteting from s3 and dynamo
            self.s3_proj.deleteNDChannel(channel_name)
        except Exception as e:
            logger.error("Error in deleting channel {}. {}".format(
                channel_name, e))
            raise NDWSError("Error in deleting channel {}. {}".format(
                channel_name, e))
コード例 #16
0
    def deleteNDResolution(self, channel_name, resolution):
        """Delete the resolution for a channel"""

        # KL TODO Maybe do this as a transaction or lock the cache when we drop the channel?
        try:
            # removing keys for kvio
            # resolution pattern to fetch all keys with project_name&channel_name&resolution&
            resolution_pattern = "{}&{}&{}&*".format(self.pr.project_name,
                                                     channel_name, resolution)
            resolution_keys = self.client.keys(channel_pattern)
            # delete all the keys with pattern
            if resolution_keys:
                self.client.delete(*resolution_keys)
            # removing keys for kvindex data
            index_min_pattern = "[{}&{}&{}".format(self.pr.project_name,
                                                   channel_name, resolution)
            index_max_pattern = "+"
            self.client.zremrangebylex(settings.REDIS_INDEX_KEY,
                                       index_min_pattern, index_max_pattern)
            # deleteting from s3 and dynamo
            self.s3_proj.deleteNDResolution(channel_name, resolution)
        except Exception as e:
            logger.error(
                "Error in deleting resolution {} channel {}. {}".format(
                    resolution, channel_name, e))
            raise NDWSError(
                "Error in deleting resolution {} channel {}. {}".format(
                    resolution, channel_name, e))
コード例 #17
0
 def getRamonTable(self):
   """Return the name of the ramon table"""
   if self.pr.kvengine == MYSQL:
     return "{}_ramon".format(self.ch.channel_name)
   else:
     logger.error("RAMON not support for KV Engine {}".format(self.pr.kvengine))
     raise NDWSError("RAMON not support for KV Engine {}".format(self.pr.kvengine))
コード例 #18
0
    def deleteNDChannel(self, channel_name):
        """Delete the tables for this channel"""

        ch = NDChannel.fromName(self.pr, channel_name)
        table_list = []

        if ch.channel_type in ANNOTATION_CHANNELS:
            # delete the ids table
            table_list.append(ch.getIdsTable())
            # delete the ramon table
            table_list.append(ch.getRamonTable())

        for res in self.pr.datasetcfg.resolutions:
            # delete the res tables
            table_list.append(ch.getTable(res))
            if self.pr.datasetcfg.scalingoption == ZSLICES:
                table_list.append(ch.getNearIsoTable(res))
            # delete the index tables
            table_list.append(ch.getS3IndexTable(res))
            if ch.channel_type in ANNOTATION_CHANNELS:
                # delete the exceptions tables
                table_list = table_list + [
                    ch.getIdxTable(res),
                    ch.getExceptionsTable(res)
                ]

        try:
            with closing(
                    MySQLdb.connect(
                        host=self.pr.host,
                        user=settings.DATABASES['default']['USER'],
                        passwd=settings.DATABASES['default']['PASSWORD'],
                        db=self.pr.dbname,
                        connect_timeout=1)) as conn:
                with closing(conn.cursor()) as cursor:

                    # delete the tables for this channel
                    sql = "DROP TABLES IF EXISTS {}".format(
                        ','.join(table_list))
                    try:
                        cursor.execute(sql)
                        conn.commit()
                    except MySQLdb.Error, e:
                        # Skipping the error if the table does not exist
                        if e.args[0] == 1051:
                            pass
                        if e.args[0] == 1049:
                            pass
                        else:
                            conn.rollback()
                            logger.error(
                                "Failed to drop channel tables {}: {}. sql={}".
                                format(e.args[0], e.args[1], sql))
                            raise NDWSError(
                                "Failed to drop channel tables {}: {}. sql={}".
                                format(e.args[0], e.args[1], sql))
        except Exception as e:
            logger.warning("Database {} on host {} not found".format(
                self.pr.dbname, self.pr.host))
コード例 #19
0
def propagate (token, channel_name):
  """Propagate the given project for all resolutions"""

  try:
    buildStack(token, channel_name)
  except Exception, e:
    logger.error("Error in propagate. {}".format(e))
    raise NDWSError("Error in propagate. {}".format(e))
コード例 #20
0
 def __init__(self, pr):
     """Create the database connection"""
     self.pr = pr
     # Connect to the redis cluster
     try:
         self.client = redis.StrictRedis(host=self.pr.host, port=6379, db=0)
         self.pipe = self.client.pipeline(transaction=False)
         # delete from S3 when deleting from Redis
         self.s3_proj = S3ProjectDB(pr)
     except redis.ConnectionError as e:
         logger.error("Cannot connect to Redis server. {}".format(e))
         raise NDWSError("Cannot connect to Redis server. {}".format(e))
     except Exception as e:
         logger.error(
             "Unknown error while connecting to Redis. {}".format(e))
         raise NDWSError(
             "Unknown error while connecting to Redis. {}".format(e))
コード例 #21
0
def ingest (token_name, channel_name, resolution, data_url, file_format, file_type):
  """Call the remote ingest function here"""
  
  try:
    ingest_data = IngestData(token_name, channel_name, resolution, data_url, file_format, file_type)
    ingest_data.ingest()
  except Exception, e:
    logger.error("Error in ingest. {}".format(e))
    raise NDWSError("Error in ingest. {}".format(e))
コード例 #22
0
ファイル: ndproject.py プロジェクト: neurodata/ndstore
 def fromTokenName(cls, token_name):
     try:
         tk = Token.objects.get(token_name=token_name)
         pr = Project.objects.get(project_name=tk.project_id)
         return cls(pr)
     except ObjectDoesNotExist, e:
         logger.error("Token {} does not exist. {}".format(token_name, e))
         raise NDWSError("Token {} does not exist. {}".format(
             token_name, e))
コード例 #23
0
ファイル: annotation.py プロジェクト: neurodata/ndstore
 def setField ( self, field, value ):
   """Mutator by field name.  Then need to store the field."""
   
   if field == 'parent':
     self.parent = value
   elif field == 'children':
     raise NDWSError ("Cannot set children.  It is derived from the parent field of ANNO_NODE.")
   else:
     Annotation.setField ( self, field, value )
コード例 #24
0
ファイル: ndwsprojingest.py プロジェクト: neurodata/ndstore
def deleteChannel(webargs, post_data):
    """Delete a list of channels using a JSON file"""

    # Get the token and load the project
    try:
        m = re.match("(\w+)/deleteChannel/$", webargs)
        token_name = m.group(1)
    except Exception, e:
        logger.error("Error in URL format")
        raise NDWSError("Error in URL format")
コード例 #25
0
ファイル: annotation.py プロジェクト: neurodata/ndstore
 def setField ( self, field, value ):
   """Mutator by field name.  Then need to store the field."""
   
   if field == 'nodetype':
     self.nodetype = value
   elif field == 'children':
     raise NDWSError ("Cannot set children.  It is derived from the parent field of ANNO_NODE.")
   elif field == 'location':
     self.location = np.array([float(x) for x in value.split(',')], dtype=np.float)
     if len(self.location) != 3:
       raise NDWSError ("Illegal arguments to set field location: %s" % value)
   elif field == 'parent':
     self.parent = value
   elif field == 'radius':
     self.radius = value
   elif field == 'skeleton':
     self.skeleton = value
   else:
     Annotation.setField ( self, field, value )
コード例 #26
0
def querySWC(swcfile, ch, db, proj, skelids=None):
    """Query the list of skelids (skeletons) and populate an open file swcfile
     with lines of swc data."""

    db.startTxn()
    try:

        # write out metadata about where this came from
        # ND version number and schema number
        swcfile.write('# ND (NeuroData) Version {} Schema {}\n'.format(
            proj.getNDVersion(), proj.getSchemaVersion()))
        # ND project and channel
        swcfile.write('# Project {} Channel {}\n'.format(
            proj.getProjectName(), ch.getChannelName()))

        # get a skeleton for metadata and populate the comments field
        if skelids != None:

            skel = db.getAnnotation(ch, skelids[0])

            # write each key value line out as a comment
            for (k, v) in skel.toDict().iteritems():
                # match a comment
                if re.match("^#.*\n", str(v)):
                    swcfile.write(v)
                else:
                    swcfile.write("# {} {}\n".format(k, v))

        # iterate over all nodes
        for skel in skelids:
            for nodeid in db.querySkeletonNodes(ch, skel):

                node = db.getAnnotation(ch, nodeid)

                #RB nodes are floating point values.  let's not scale.
                #      # scale points to resolution
                #      xpos = xpos/(2**res)
                #      ypos = ypos/(2**res)
                #      # check for isotropic
                #      if db.datasetcfg.scalingoption == ISOTROPIC:
                #        zpos = zpos/(2**res)

                # write an node in swc
                # n T x y z R P
                swcfile.write("{} {} {} {} {} {} {}\n".format(
                    node.annid, node.nodetype, node.location[0],
                    node.location[1], node.location[2], node.radius,
                    node.parent))

        db.commit()

    except Exception, e:
        db.rollback()
        logger.warning("Failed to get SWC file {}".format(e))
        raise NDWSError("Failed to get SWC file {}".format(e))
コード例 #27
0
    def ingest(self):
        """Identify the data style and ingest accordingly"""

        if self.file_format in ['SLICE']:
            self.ingestImageStack()
        elif self.file_format in ['CATMAID']:
            self.ingestCatmaidStack()
        else:
            logger.error("Format {} not supported.".format(self.file_format))
            raise NDWSError("Format {} not supported.".format(
                self.file_format))
コード例 #28
0
ファイル: annotation.py プロジェクト: neurodata/ndstore
 def setField ( self, field, value ):
   """Mutator by field name.  Then need to store the field."""
   
   if field == 'skeletontype':
     self.skeletontype = value
   elif field == 'skeletonnodes':
     raise NDWSError ("Cannot set nodes.  It is derived from the parent field of ANNO_NODE.")
   elif field == 'rootnode':
     self.rootnode = value
   else:
     Annotation.setField ( self, field, value )
コード例 #29
0
ファイル: ndwsprojingest.py プロジェクト: neurodata/ndstore
def extractProjectDict(pr_dict):
    """Generate a project object from the JSON flle"""

    pr = {}
    tk = {}

    try:
        pr['project_name'] = pr_dict['project_name']
    except Exception, e:
        logger.error("Missing required fields")
        raise NDWSError("Missing required fields")
コード例 #30
0
 def setPropagate (self, value):
   if value in [NOT_PROPAGATED, PROPAGATED]:
     self.ch.propagate = value
     self.setReadOnly ( READONLY_FALSE )
     self.ch.save()
   elif value in [UNDER_PROPAGATION]:
     self.ch.propagate = value
     self.setReadOnly ( READONLY_TRUE )
     self.ch.save()
   else:
     logger.error ( "Wrong Propagate Value {} for Channel {}".format( value, self.ch.channel_name ) )
     raise NDWSError ( "Wrong Propagate Value {} for Channel {}".format( value, self.ch.channel_name ) )