def importDatabase(filename, user):
    '''Imports the uploaded excel file to the database than deletes the excel file
    '''
    df = pd.read_excel(
        os.path.join(current_app.config['UPLOAD_FOLDER'], filename))

    for index, row in df.iterrows():
        new_patient = Patient(user_id=user,
                              status="undiag",
                              diagnose=str(row[3]))
        featureA = Feature(featureName='A',
                           featureValue=str(row[0]),
                           classifier_id=1)
        featureB = Feature(featureName='B',
                           featureValue=str(row[1]),
                           classifier_id=1)
        featureC = Feature(featureName='C',
                           featureValue=str(row[2]),
                           classifier_id=1)
        new_patient.features.append(featureA)
        new_patient.features.append(featureB)
        new_patient.features.append(featureC)
        db.session.add(new_patient)
        db.session.commit()

    os.remove(os.path.join(current_app.config['UPLOAD_FOLDER'], filename))
Ejemplo n.º 2
0
    def test18_geometryfield(self):
        "Testing the general GeometryField."
        Feature(name='Point', geom=Point(1, 1)).save()
        Feature(name='LineString', geom=LineString((0, 0), (1, 1),
                                                   (5, 5))).save()
        Feature(name='Polygon',
                geom=Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0),
                                        (0, 0)))).save()
        Feature(name='GeometryCollection',
                geom=GeometryCollection(
                    Point(2, 2), LineString((0, 0), (2, 2)),
                    Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0),
                                       (0, 0))))).save()

        f_1 = Feature.objects.get(name='Point')
        self.assertEqual(True, isinstance(f_1.geom, Point))
        self.assertEqual((1.0, 1.0), f_1.geom.tuple)
        f_2 = Feature.objects.get(name='LineString')
        self.assertEqual(True, isinstance(f_2.geom, LineString))
        self.assertEqual(((0.0, 0.0), (1.0, 1.0), (5.0, 5.0)), f_2.geom.tuple)

        f_3 = Feature.objects.get(name='Polygon')
        self.assertEqual(True, isinstance(f_3.geom, Polygon))
        f_4 = Feature.objects.get(name='GeometryCollection')
        self.assertEqual(True, isinstance(f_4.geom, GeometryCollection))
        self.assertEqual(f_3.geom, f_4.geom[2])
Ejemplo n.º 3
0
def vector_catalog_save_layer(tenant, layer, vector_layer, features):
    connection.close()
    connection.set_schema(tenant)

    features = VECTOR_LAYERS[vector_layer]['geometries_by_id'](features)

    with transaction.atomic():
        union = GEOSGeometry('POINT EMPTY')
        keys = None
        for g, props in features:
            if not keys:
                keys = props.keys()

            union = union.union(g)
            g.transform(3857)

            s = hashlib.sha1()
            s.update(GeometryCollection(g).ewkb)
            props['shaid'] = s.hexdigest()
            f = Feature(layer=layer,
                        geometry=GeometryCollection(g),
                        properties=props)
            f.save()

        envelope = union.envelope.coords[0]
        layer.bounds = envelope[2] + envelope[0]
        layer.status = 0
        layer.field_names = list(set(layer.field_names).union(set(keys)))
        layer.schema['properties'] = {n: "str" for n in layer.field_names}
        layer.save()
Ejemplo n.º 4
0
def feature_create():
    if not flask_login.current_user.is_enabled:
        abort(403)

    obj = Feature()
    for k, v in request.json.iteritems():
        setattr(obj, k, v)

    if obj.area_id is None:
        return make_response(
            jsonify({'validationErrors': ['An area should be specified']}),
            409)

    user = flask_login.current_user
    obj.client_id = user.client.id
    db.session.add(obj)

    # associate as Supporter
    max_priority = db.session.query(func.max(Supporter.priority)) \
                             .filter(Supporter.client_id == user.client.id).first()
    supporter = Supporter()
    supporter.client_id = user.client.id
    supporter.feature_id = obj.id
    supporter.priority = 0 if max_priority[0] is None else (max_priority[0] +
                                                            1)
    db.session.add(supporter)

    db.session.commit()

    return jsonify({
        'id': obj.id,
        'msgType': 'info',
        'msgText': 'Feature created'
    })
Ejemplo n.º 5
0
Archivo: views.py Proyecto: Carl4/geoq
    def post(self, request, *args, **kwargs):
        feature = None
        aoi = request.POST.get('aoi')
        geometry = request.POST.get('geometry')
        geojson = json.loads(geometry)
        properties = geojson.get('properties')

        aoi = AOI.objects.get(id=aoi)
        job = getattr(aoi, 'job')
        project = getattr(job, 'project')
        template = properties.get('template') if properties else None

        #TODO: handle exceptions
        if template:
            template = FeatureType.objects.get(id=template)

        attrs = dict(aoi=aoi,
                     job=job,
                     project=project,
                     analyst=request.user,
                     template=template)

        geometry = geojson.get('geometry')
        attrs['the_geom'] = GEOSGeometry(json.dumps(geometry))

        try:
            response = Feature(**attrs)
            response.full_clean()
            response.save()
        except ValidationError as e:
            return HttpResponse(content=json.dumps(dict(errors=e.messages)), mimetype="application/json", status=400)

        return HttpResponse([response], mimetype="application/json")
Ejemplo n.º 6
0
 def load_features_csv(self, input_file, file_id):
     features = []
     if not os.path.exists(input_file):
         return features
     delim = self.detect_delimiter(input_file)
     # print 'delim is %s' % delim
     with open(input_file, 'rb') as csvfile:
         reader = csv.reader(csvfile, delimiter=delim)
         next(reader, None)  # skip the headers
         for elements in reader:
             if len(elements) == 5:
                 feature_id = utils.num(elements[0])
                 mz = utils.num(elements[1])
                 rt = utils.num(elements[2])
                 intensity = utils.num(elements[3])
                 identification = elements[4]  # unused
                 feature = Feature(feature_id, mz, rt, intensity, file_id)
             elif len(elements) == 4:
                 feature_id = utils.num(elements[0])
                 mz = utils.num(elements[1])
                 rt = utils.num(elements[2])
                 intensity = utils.num(elements[3])
                 feature = Feature(feature_id, mz, rt, intensity, file_id)
             features.append(feature)
     return features
Ejemplo n.º 7
0
    def post(self):
        new_feature = [];
        feature_name = self.request.get("feature-name")
        if(feature_name != ""):
            new_feature = Feature(parent=ndb.Key(urlsafe=self.request.get("idea-key")),
							title=feature_name)
            new_feature.put()
        self.redirect(self.request.referer)
Ejemplo n.º 8
0
 def test_basic_validation(self):
     feature = Feature(
         account=self.account,
         name="Crm.Business",
         )
     try:
         feature.full_clean()
     except ValidationError as e:
         self.assertEqual(e.message_dict, {'__all__': [u'Name must start with a letter and contain only letters, numbers and underscore.']})
Ejemplo n.º 9
0
 def post(self):
     new_feature = []
     feature_name = self.request.get("feature-name")
     if (feature_name != ""):
         new_feature = Feature(
             parent=ndb.Key(urlsafe=self.request.get("idea-key")),
             title=feature_name)
         new_feature.put()
     self.redirect(self.request.referer)
Ejemplo n.º 10
0
 def save(self, json_str):
     json_obj = json.loads(json_str)
     feature_name = json_obj['feature']['feature_name']
     feature_desc = json_obj['feature']['feature_description']
     feature_save = Feature().fill(feature_name, feature_desc, '', '')
     feature_save.save()
     # scenarios = json_obj['feature']['scenarios']
     # for sce in scenarios:
     #     self.CommonSaver.save_new_scenario(feature_save, sce)
     return feature_save
Ejemplo n.º 11
0
    def post(self, request, *args, **kwargs):
        feature = None
        tpi = request.META.get('HTTP_TEMP_POINT_ID', "none")
        aoi = request.POST.get('aoi')
        geometry = request.POST.get('geometry')
        geojson = json.loads(geometry)
        properties = geojson.get('properties')

        aoi = AOI.objects.get(id=aoi)
        job = getattr(aoi, 'job')
        project = getattr(job, 'project')
        template = properties.get('template') if properties else None

        # TODO: handle exceptions
        if template:
            template = FeatureType.objects.get(id=template)

        attrs = dict(aoi=aoi,
                     job=job,
                     project=project,
                     analyst=request.user,
                     template=template)

        geometry = geojson.get('geometry')
        geom_obj = GEOSGeometry(json.dumps(geometry))
        attrs['the_geom'] = geom_obj

        county_list = Counties.objects.filter(
            poly__contains=geom_obj.centroid.wkt)
        county = None
        if len(county_list):
            county = str(county_list[0].name)

        try:
            feature = Feature(**attrs)
            feature.full_clean()
            if not feature.properties:
                feature.properties = {}
            if county:
                feature.properties['county'] = county

            feature.save()
        except ValidationError as e:
            response = HttpResponse(content=json.dumps(
                dict(errors=e.messages)),
                                    mimetype="application/json",
                                    status=400)
            response['Temp-Point-Id'] = tpi
            return response
        # This feels a bit ugly but it does get the GeoJSON into the response
        feature_json = serializers.serialize('json', [
            feature,
        ])
        feature_list = json.loads(feature_json)
        feature_list[0]['geojson'] = feature.geoJSON(True)

        response = HttpResponse(json.dumps(feature_list),
                                mimetype="application/json")
        response['Temp-Point-Id'] = tpi
        return response
Ejemplo n.º 12
0
def process_shapefile(tenant, layer_id, srs):
    connection.close()
    connection.set_schema(tenant)

    l = Layer.objects.get(pk=layer_id)

    shape_path = "%s/uploads/shapefile/%s/%s.shp" % (settings.MEDIA_ROOT,
                                                     tenant, l.pk)
    try:
        with fiona.open(shape_path, 'r') as collection:
            count = 0

            min_bounds = OGRGeometry('POINT ({} {})'.format(
                collection.bounds[0], collection.bounds[1]),
                                     srs=srs).transform(4326, clone=True)
            max_bounds = OGRGeometry('POINT ({} {})'.format(
                collection.bounds[2], collection.bounds[3]),
                                     srs=srs).transform(4326, clone=True)

            l.bounds = min_bounds.coords + max_bounds.coords

            features = []
            for index, record in enumerate(collection):
                try:
                    geom = shape(record['geometry'])
                    transformed_geom = OGRGeometry(
                        geom.wkt, srs=srs).transform(3857, clone=True)
                    transformed_geom_collection = GeometryCollection(
                        transformed_geom.geos)

                    s = hashlib.sha1()
                    s.update(transformed_geom_collection.ewkb)

                    properties = record['properties']
                    properties['fid'] = index
                    properties['shaid'] = s.hexdigest()
                    features.append(
                        Feature(layer=l,
                                geometry=transformed_geom_collection,
                                properties=properties))
                    count += 1
                except Exception as e:
                    print "Feature excepton", e

            if count == 0:
                raise Exception("Layer needs to have at least one feature")

            Feature.objects.bulk_create(features)

            field_names = collection.schema['properties'].keys()
            field_names.append("fid")
            l.field_names = field_names
            l.properties = collection.schema['properties']
            l.schema = collection.schema
            l.status = 0
            l.save()
    finally:
        for path in glob.glob("%s/uploads/shapefile/%s/%s.*" %
                              (settings.MEDIA_ROOT, tenant, l.pk)):
            os.remove(path)
Ejemplo n.º 13
0
 def post(self):
     idea_key = ndb.Key(urlsafe=self.request.get("idea-key"))
     features_for_key = Feature.query(ancestor=idea_key)
     for feature in features_for_key:
         feature.key.delete()
     idea_key.delete()
     self.redirect(self.request.referer)
Ejemplo n.º 14
0
    def post(self):
		idea_key = ndb.Key(urlsafe=self.request.get("idea-key"))
		features_for_key = Feature.query(ancestor=idea_key)
		for feature in features_for_key:
			feature.key.delete()
		idea_key.delete()
		self.redirect(self.request.referer)
def importDatabase(filename, user):
    '''Imports the uploaded excel file to the database than deletes the excel file
    '''
    df = pd.read_excel(
        os.path.join(current_app.config['UPLOAD_FOLDER'], filename))
    user_id = _request_ctx_stack.top.current_user.get('sub')
    classifier = Classifier.query.filter_by(user_id=user_id).first()

    rows = df.shape[1]

    for index, row in df.iterrows():
        new_patient = Patient(user_id=user,
                              status="undiag",
                              diagnose=str(row[row.size - 1]))
        for idx, r in enumerate(row):
            if (idx != row.size - 1):
                feature = Feature(featureName=df.columns[idx],
                                  featureValue=str(r),
                                  classifier_id=classifier.id)
                new_patient.features.append(feature)

        db.session.add(new_patient)
        db.session.commit()

    r = Feature.query.with_entities(
        Feature.featureName).filter_by(classifier_id=classifier.id).distinct()
    classifier.numberOfFeatureTypes = r.count()
    db.session.add(classifier)
    db.session.commit()

    os.remove(os.path.join(current_app.config['UPLOAD_FOLDER'], filename))
Ejemplo n.º 16
0
def makeFeature(feature):
    ret = Feature(id=int(feature.get('id')),
                  label=feature.get('label'),
                  type=feature.findtext("TYPE"),
                  start=int(feature.findtext("START")),
                  end=int(feature.findtext("END")))
    return ret
Ejemplo n.º 17
0
def getStructuredFeaturesFromKey(key):
	features = []
	feature_query = Feature.query(ancestor=key)
	for feature in feature_query:
		temp = [feature.key.urlsafe(), feature.title, feature.info, [], feature.canvas_url, feature.last_touch_date_time]
		temp[3] = getStructuredLinks(feature.linkText, feature.linkUrl)
		features.append(temp)
	return features
Ejemplo n.º 18
0
def map_features(map_id):
    m = db.session.query(Map).get(map_id)
    if not m:
        abort(404)

    if request.method == 'POST':
        if not request.json:
            abort(400)

        feature = Feature(request.json)
        m.features.append(feature)
        db.session.add(m)
        db.session.commit()
        return jsonify(feature.to_dict())

    features = [f.to_dict() for f in m.features]
    return jsonify(FeatureCollection(features))
Ejemplo n.º 19
0
def add_feature(data, user):
    feature = Feature()
    feature.name = data[NAME]
    feature.group = data[GROUP]
    feature.updated_by = user
    feature.save()
    return feature
Ejemplo n.º 20
0
 def load_features_csv(self, input_file, file_id):
     features = []
     if not os.path.exists(input_file):
         return features
     delim = self.detect_delimiter(input_file)
     with open(input_file, 'rb') as csvfile:
         reader = csv.reader(csvfile, delimiter=delim)
         next(reader, None)  # skip the headers
         for elements in reader:
             if len(elements)==6:
                 feature_id = utils.num(elements[0])
                 mz = utils.num(elements[1])
                 rt = utils.num(elements[2])                    
                 feature = Feature(feature_id=feature_id, mass=mz, rt=rt, intensity=0)                    
                 feature.into = utils.num(elements[3]) # integrated peak intensity
                 feature.maxo = utils.num(elements[4]) # maximum peak intensity
                 feature.intb = utils.num(elements[5]) # baseline corrected integrated peak intensities
                 feature.intensity = feature.maxo # we will use this for now
             elif len(elements)==5:
                 feature_id = utils.num(elements[0])
                 mz = utils.num(elements[1])
                 rt = utils.num(elements[2])                    
                 intensity = utils.num(elements[3])
                 identification = elements[4] # unused
                 feature = Feature(feature_id, mz, rt, intensity, file_id)
             elif len(elements)==4:
                 feature_id = utils.num(elements[0])
                 mz = utils.num(elements[1])
                 rt = utils.num(elements[2])
                 intensity = utils.num(elements[3])
                 feature = Feature(feature_id, mz, rt, intensity, file_id)
             features.append(feature)
     return features
Ejemplo n.º 21
0
    def post(self, request, *args, **kwargs):
        feature = None
        tpi = request.META.get('HTTP_TEMP_POINT_ID', "none")
        aoi = request.POST.get('aoi')
        geometry = request.POST.get('geometry')
        geojson = json.loads(geometry)
        properties = geojson.get('properties')

        aoi = AOI.objects.get(id=aoi)
        job = getattr(aoi, 'job')
        project = getattr(job, 'project')
        template = properties.get('template') if properties else None

        # TODO: handle exceptions
        if template:
            template = FeatureType.objects.get(id=template)

        attrs = dict(aoi=aoi,
                     job=job,
                     project=project,
                     analyst=request.user,
                     template=template)

        geometry = geojson.get('geometry')
        geom_obj = GEOSGeometry(json.dumps(geometry))
        attrs['the_geom'] = geom_obj

        county_list = Counties.objects.filter(poly__contains=geom_obj.centroid.wkt)
        county = None
        if len(county_list):
            county = str(county_list[0].name)

        try:
            feature = Feature(**attrs)
            feature.full_clean()
            if not feature.properties:
                feature.properties = {}
            if county:
                feature.properties['county'] = county

            feature.save()
        except ValidationError as e:
            response =  HttpResponse(content=json.dumps(dict(errors=e.messages)), mimetype="application/json", status=400)
            response['Temp-Point-Id'] = tpi
            return response
        # This feels a bit ugly but it does get the GeoJSON into the response
        feature_json = serializers.serialize('json', [feature,])
        feature_list = json.loads(feature_json)
        feature_list[0]['geojson'] = feature.geoJSON(True)

        response = HttpResponse(json.dumps(feature_list), mimetype="application/json")
        response['Temp-Point-Id'] = tpi
        return response
Ejemplo n.º 22
0
def edit(id=0):
    setExits()
    id = cleanRecordID(id)
    if id < 0:
        flash("That is not a valid ID")
        return redirect(g.listURL)
        
    if db:
        if not request.form:
            """ if no form object, send the form page """
            # get the Org record if you can
            rec = None
            if id > 0:
                rec = Feature.query.filter_by(ID=id).first_or_404()
                
            return render_template('feature/feature_edit.html', rec=rec)

        #have the request form
        if validForm():
            try:
                if int(id) > 0:
                    rec = Feature.query.get(id)
                else:
                    ## create a new record stub
                    rec = Feature(request.form['featureClass'],request.form['featureValue'])
                    db.session.add(rec)
                #update the record
                rec.featureClass = request.form['featureClass']
                rec.featureValue = request.form['featureValue']
                db.session.commit()
                
                return redirect(url_for('.display'))

            except Exception as e:
                flash(printException('Could not save record. Unknown Error',"error",e))

        # form not valid - redisplay
        return render_template('feature/feature_edit.html', rec=request.form)

    else:
        flash(printException('Could not open database'),"info")

    return redirect(url_for('.display'))
Ejemplo n.º 23
0
def getStructuredFeaturesFromKey(key):
    features = []
    feature_query = Feature.query(ancestor=key)
    for feature in feature_query:
        temp = [
            feature.key.urlsafe(), feature.title, feature.info, [],
            feature.canvas_url, feature.last_touch_date_time
        ]
        temp[3] = getStructuredLinks(feature.linkText, feature.linkUrl)
        features.append(temp)
    return features
Ejemplo n.º 24
0
    def post(self, request, *args, **kwargs):
        feature = None
        aoi = request.POST.get('aoi')
        geometry = request.POST.get('geometry')
        geojson = json.loads(geometry)
        properties = geojson.get('properties')

        aoi = AOI.objects.get(id=aoi)
        job = getattr(aoi, 'job')
        project = getattr(job, 'project')
        template = properties.get('template') if properties else None

        # TODO: handle exceptions
        if template:
            template = FeatureType.objects.get(id=template)

        attrs = dict(aoi=aoi,
                     job=job,
                     project=project,
                     analyst=request.user,
                     template=template)

        geometry = geojson.get('geometry')
        attrs['the_geom'] = GEOSGeometry(json.dumps(geometry))

        try:
            response = Feature(**attrs)
            response.full_clean()
            response.save()
        except ValidationError as e:
            return HttpResponse(content=json.dumps(dict(errors=e.messages)),
                                mimetype="application/json",
                                status=400)

        return HttpResponse([response], mimetype="application/json")
Ejemplo n.º 25
0
def create_feature():
    if not request.json:
        abort(400)
    feature = Feature(title=request.json['title'],
                      description=request.json['description'],
                      client_id=request.json['client_id'],
                      client_priority=request.json['client_priority'],
                      target_date=request.json['target_date'],
                      product_area_id=request.json['product_area_id'])

    session.add(feature)
    session.commit()
    return jsonify({'message': 'Added feature'}), 201
Ejemplo n.º 26
0
    def _set_features(self, test_mode=False):
        assigned_genes_limit = self.exp_setting.get_assigned_genes_limit()

        for idx, feature in enumerate(self.features):
            if feature is not None:
                # get gnids
                # TODO: cover for fsid == 0 which is the case of samll assgined gene at random
                if self.exp_setting.get_fsid(
                ) == 0:  # small gene mode (test mode)
                    feature_name = 'SAG_RND_t%02d' % (idx + 1)
                    corresp_tissue = idx + 1
                    pars = ('g', corresp_tissue, '0,0',
                            assigned_genes_limit[idx])
                    random_assigned_gene = Pgsql.Common.select_data(
                        sqls.get_gene_tissues_random, pars)
                    assigned_gene = Pgsql.Common.to_list(random_assigned_gene)
                else:
                    res = Pgsql.Common.select_data(
                        sqls.get_features_info_by_fsid_corresp_tissue, (
                            self.exp_setting.get_fsid(),
                            idx + 1,
                        ))
                    feature_name = res[0][0]
                    assigned_genes_str = res[0][1]
                    corresp_tissue = res[0][2]
                    assigned_gene = list(
                        map(int, assigned_genes_str.split(',')))

                #if test_mode:
                #    pars = ('g', corresp_tissue, assigned_genes_str, assigned_genes_limit[idx])
                #    #print('Randomly assigned genes,  Feature name: %s' % feature_name)
                #    #print(sqls.get_gene_tissues_random % pars)
                #    random_assigned_gene = Pgsql.Common.select_data(sqls.get_gene_tissues_random, pars)
                #    assigned_gene = Pgsql.Common.to_list(random_assigned_gene)

                #print('random assigned gene: \n{}'.format(assigned_gene))

                if self.gnid_min_max:
                    assigned_gene = [
                        gnid for gnid in assigned_gene
                        if self.gnid_min_max[0] <= gnid <= self.gnid_min_max[1]
                    ]
                #print (len(assigned_gene))
                #print(assigned_gene)
                #print('after applying min_max assigned gene: \n{}'.format(assigned_gene))

                self.features[idx] = Feature(
                    name=feature_name,
                    class_size=self.exp_setting.get_class_size(),
                    assigned_genes=assigned_gene,
                    corresp_tissue=corresp_tissue)
Ejemplo n.º 27
0
    def __init__(self, genes):

        bad_name = []
        good_name = {}
        
        for name in genes:
            if name:
                featRow = Feature.search(name)
                if featRow == None:
                    bad_name.append(name)
                else:
                    good_name[name] = featRow.feature_no
        self.bad_entry = bad_name            
        self.good_entry = good_name
Ejemplo n.º 28
0
    def __init__(self, genes):

        bad_name = []
        good_name = {}

        for name in genes:
            if name:
                featRow = Feature.search(name)
                if featRow == None:
                    bad_name.append(name)
                else:
                    good_name[name] = featRow.feature_no
        self.bad_entry = bad_name
        self.good_entry = good_name
Ejemplo n.º 29
0
 def load_features_sima(self, input_file, file_id):
     features = []
     if not os.path.exists(input_file):
         return features
     with open(input_file, 'rb') as csvfile:
         reader = csv.reader(csvfile, delimiter='\t')
         feature_id = 1
         for elements in reader:
             mass = float(elements[0])
             charge = float(elements[1])
             mass = mass/charge
             intensity = utils.num(elements[2])
             rt = utils.num(elements[3])
             feature = Feature(feature_id, mass, rt, intensity, file_id)
             if len(elements)>4:
                 # for debugging with synthetic data
                 gt_peak_id = utils.num(elements[4])
                 gt_metabolite_id = utils.num(elements[5])
                 gt_adduct_type = elements[6]
                 feature.gt_metabolite = gt_metabolite_id
                 feature.gt_adduct = gt_adduct_type
             features.append(feature)
             feature_id = feature_id + 1
     return features
Ejemplo n.º 30
0
 def load_features_sima(self, input_file, file_id):
     features = []
     if not os.path.exists(input_file):
         return features
     with open(input_file, 'rb') as csvfile:
         reader = csv.reader(csvfile, delimiter='\t')
         feature_id = 1
         for elements in reader:
             mass = float(elements[0])
             charge = float(elements[1])
             mass = mass / charge
             intensity = utils.num(elements[2])
             rt = utils.num(elements[3])
             feature = Feature(feature_id, mass, rt, intensity, file_id)
             if len(elements) > 4:
                 # for debugging with synthetic data
                 gt_peak_id = utils.num(elements[4])
                 gt_metabolite_id = utils.num(elements[5])
                 gt_adduct_type = elements[6]
                 feature.gt_metabolite = gt_metabolite_id
                 feature.gt_adduct = gt_adduct_type
             features.append(feature)
             feature_id = feature_id + 1
     return features
Ejemplo n.º 31
0
    def post(self, request, *args, **kwargs):
        feature = None
        aoi = request.POST.get('aoi')
        geometry = request.POST.get('geometry')
        geojson = json.loads(geometry)
        properties = geojson.get('properties')

        aoi = AOI.objects.get(id=aoi)
        job = getattr(aoi, 'job')
        project = getattr(job, 'project')
        template = properties.get('template') if properties else None

        # TODO: handle exceptions
        if template:
            template = FeatureType.objects.get(id=template)

        attrs = dict(aoi=aoi,
                     job=job,
                     project=project,
                     analyst=request.user,
                     template=template)

        geometry = geojson.get('geometry')
        attrs['the_geom'] = GEOSGeometry(json.dumps(geometry))

        try:
            feature = Feature(**attrs)
            feature.full_clean()
            feature.save()
        except ValidationError as e:
            return HttpResponse(content=json.dumps(dict(errors=e.messages)),
                                mimetype="application/json",
                                status=400)

        # This feels a bit ugly but it does get the GeoJSON into the response
        feature_json = serializers.serialize('json', [
            feature,
        ])
        feature_list = json.loads(feature_json)
        feature_list[0]['geojson'] = feature.geoJSON(True)

        return HttpResponse(json.dumps(feature_list),
                            mimetype="application/json")
Ejemplo n.º 32
0
Archivo: views.py Proyecto: Bauerr/geoq
    def post(self, request, *args, **kwargs):
        feature = None
        aoi = request.POST.get('aoi')
        geometry = request.POST.get('geometry')
        geojson = json.loads(geometry)
        properties = geojson.get('properties')

        aoi = AOI.objects.get(id=aoi)
        job = getattr(aoi, 'job')
        project = getattr(job, 'project')
        template = properties.get('template') if properties else None

        # TODO: handle exceptions
        if template:
            template = FeatureType.objects.get(id=template)

        attrs = dict(aoi=aoi,
                     job=job,
                     project=project,
                     analyst=request.user,
                     template=template)

        geometry = geojson.get('geometry')
        attrs['the_geom'] = GEOSGeometry(json.dumps(geometry))

        try:
            feature = Feature(**attrs)
            feature.full_clean()
            feature.save()
        except ValidationError as e:
            return HttpResponse(content=json.dumps(dict(errors=e.messages)), mimetype="application/json", status=400)

        # This feels a bit ugly but it does get the GeoJSON into the response
        feature_json = serializers.serialize('json', [feature,])
        feature_list = json.loads(feature_json)
        feature_list[0]['geojson'] = feature.geoJSON(True)

        return HttpResponse(json.dumps(feature_list), mimetype="application/json")
Ejemplo n.º 33
0
def importexc(request):
    if request.method == "GET":
        form = ImportExcForm()
        return render_to_response("importexc.html",
                                  {'form' : form})
    elif request.method == "POST":

        form = ImportExcForm(request.POST,
                             request.FILES)
        if form.is_valid():
            excfile = request.FILES['import_exc']
            character_encoding = request.POST['character_encoding']
            excel_file = xlrd.open_workbook(file_contents=excfile.read())
            filename=excel_file.sheet_names()
            filename = filename[0]
            dirpath = tempfile.mkdtemp()
            sh = excel_file.sheet_by_index(0)
            w = shapefile.Writer(shapefile.POINT)

            w.field('Station','I')
            w.field('Longitude', 'F')
            w.field('Latitude', 'F')
            w.field('Gravel_pc', 'F')
            w.field('Sand_pc', 'F')
            w.field('Mud_pc', 'F')

            for rownum in range(sh.nrows):
                if rownum == 0:
                    continue
                else:
                    x_coord = sh.cell_value(rowx=rownum, colx=1)
                    y_coord = sh.cell_value(rowx=rownum, colx=2)

                    w.point(x_coord, y_coord)

                    w.record(Station=sh.cell_value(rowx=rownum, colx=0),Latitude=sh.cell_value(rowx=rownum, colx=2),
                             Longitude=sh.cell_value(rowx=rownum, colx=1),Gravel_pc=sh.cell_value(rowx=rownum, colx=3),
                             Sand_pc=sh.cell_value(rowx=rownum, colx=4),Mud_pc=sh.cell_value(rowx=rownum, colx=5))

            w.save(os.path.join(dirpath,filename))

            prj = open("%s.prj" % os.path.join(dirpath,filename), "w")
            epsg = 'GEOGCS["WGS 84",DATUM["WGS_1984",SHEROID["WGS84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
            prj.write(epsg)
            prj.close()

            for item in os.listdir(dirpath):
                if item.endswith(".shp"):
                    shapefileName = item
                    datasource = ogr.Open(os.path.join(dirpath, shapefileName))
                    layer = datasource.GetLayer(0)
                    layerDefinition = layer.GetLayerDefn()
                    srcSpatialRef = layer.GetSpatialRef()
                    geometryType = layer.GetLayerDefn().GetGeomType()
                    geometryName = utils.ogrTypeToGeometryName(geometryType)

                    shpfile = Shpfile(
                        filename=shapefileName,
                        srs_wkt=srcSpatialRef.ExportToWkt(),
                        geom_type=geometryName,
                        encoding=character_encoding
                    )

                    shpfile.save()

                    attributes = []
                    layerDef = layer.GetLayerDefn()
                    for i in range(layerDef.GetFieldCount()):
                        fieldDef = layerDef.GetFieldDefn(i)
                        attr = Attribute(
                            shpfile=shpfile,
                            name=fieldDef.GetName(),
                            type=fieldDef.GetType(),
                            width=fieldDef.GetWidth(),
                        )
                        attr.save()
                        attributes.append(attr)

                    for i in range(layer.GetFeatureCount()):
                        srcFeature = layer.GetFeature(i)
                        srcGeometry = srcFeature.GetGeometryRef()
                        geometry = GEOSGeometry(srcGeometry.ExportToWkt())
                        geometry = utils.wrapGEOSGeometry(geometry)
                        geometryField = utils.calcGeometryField(geometryName)
                        args = {}
                        args['shpfile'] = shpfile
                        args[geometryField] = geometry
                        feature = Feature(**args)
                        feature.save()

                    for attr in attributes:
                        success,result = utils.getOGRFeatureAttribute(
                            attr,
                            srcFeature,
                            character_encoding
                        )
                        if not success:
                            shutil.rmtree(dirpath)
                            shpfile.delete()
                            return result

                        attrValue = AttributeValue(
                            feature=feature,
                            attribute=attr,
                            value=result
                        )
                        attrValue.save()



            shutil.rmtree(dirpath)



    return HttpResponse("data imported!!")
Ejemplo n.º 34
0
def save_features(request, strain_id):
    """
    seqid = line[0]
    source = line[1]
    type = line[2]
    start = line[3]
    end = line[4]
    score = line[5]
    strand = line[6] # "+", "-", or "."
    phase = line[7]
    attributes = line[8]
    """
    theStrain = Strain.objects.get(pk = strain_id)
    gffFile = request.FILES['gff_file']
    gff = gffFile.read()
    gff = gff.split('###')[0] # Throw away the sequence
    gff = [x.split('\t') for x in gff.splitlines() if x[0] != '#'] # Throw away the header comments. Now we're left with just the meat of the file

    contigMap = {}
    for seqid, source, featureType, start, end, score, strand, phase, attributes in gff:
        attributeParts = attributes.split(';')
        attributeParts = [x.split('=') for x in attributeParts]
        attributeParts = [(x[0], x[1].split(',')) for x in attributeParts]
        attributeParts = [(x[0], [urllib.unquote(y) for y in x[1]]) for x in attributeParts]

        attributeDict = {}
        for key, value in attributeParts:
            attributeDict[key] = value

        if featureType == 'contig':
            # We need to add this to the contigMap
            try:
                contigName = attributeDict['dbxref'][0].split(':')[-1]
            except KeyError:
                contigName = attributeDict['ID'][0]
            contigMap[seqid] = contigName
        elif featureType == 'chromosome':
            contigMap[seqid] = seqid
        else: # This is an actual feature line. It is assumed that we have already gone through all the contig lines
            theContig = get_object_or_404(Contig, name=contigMap[seqid] ) # Get the Contig we're going to point to
            feature = Feature()
            feature.contig = theContig
            try:
                feature.feature_id = attributeDict['ID'][0]
            except KeyError:
                pass
            else:
                # This one has a name that might be found in the Reference table
                if feature.feature_id.find(theStrain.name) != -1:
                    # Yup, it's one we need to link to the Reference table
                    referenceName = feature.feature_id.split("_")[0]
                    feature.reference = Reference.objects.get(feature_name = referenceName)
                else: # just try and see if there is a reference with this unmodified feature_id
                    try:
                        feature.reference = Reference.objects.get(feature_name = feature.feature_id)
                    except ObjectDoesNotExist:
                        pass

            if 'Parent' in attributeDict:
                parent = get_object_or_404(Feature, feature_id = attributeDict['Parent'][0], contig = theContig)
                feature.parent = parent
            feature.feature_type = featureType
            feature.start_coord = int(start)
            feature.stop_coord = int(end)
            feature.strand = strand

            feature.createdDate = datetime.datetime.now()
            feature.modifiedDate = datetime.datetime.now()

            feature.save()
    return HttpResponseRedirect('/strains/')
    def migrate_master_config_to_gateway(self):
        # TODO: Migrate this code since it uses legacy master models and helpers such as eeprom controller and
        #  master communicator. This cannot be imported/used in Core+ context
        from master.eeprom_models import ThermostatConfiguration, CoolingConfiguration

        # validate if valid config
        # 1. output0 <= 240
        # 2. sensor < 32 or 240
        # 3. timing check e.g. '42:30' is not valid time (255)
        # 4. valid PID params

        def is_valid(config_):
            if config_.get('output0', 255) <= 240:
                return False
            if config_.get('pid_p', 255) == 255:
                return False
            sensor = config_.get('sensor', 255)
            if not (sensor < 32 or sensor == 240):
                return False
            for key, value in config_.iteritems():
                if key.startswith('auto_') and ('42:30' in value
                                                or 255 in value):
                    return False
            return True

        self._master_communicator.start()

        try:
            # 0. check if migration already done
            f = Feature.get(name='thermostats_gateway')
            if not f.enabled:
                # 1. try to read all config from master and save it in the db
                try:
                    for thermostat_id in xrange(32):
                        for mode, config_mapper in {
                                'heating': ThermostatConfiguration,
                                'cooling': CoolingConfiguration
                        }.iteritems():
                            config = self._eeprom_controller.read(
                                config_mapper, thermostat_id).serialize()
                            if is_valid(config):
                                ThermostatControllerGateway.create_or_update_thermostat_from_v0_api(
                                    thermostat_id, config, mode)
                except Exception:
                    logger.exception(
                        'Error occurred while migrating thermostats configuration from master eeprom.'
                    )
                    return False

                # 2. disable all thermostats on the master
                try:
                    for thermostat_id in xrange(32):
                        # TODO: use new master API to disable thermostat
                        # self._master_communicator.xyz
                        pass
                except Exception:
                    logger.exception(
                        'Error occurred while stopping master thermostats.')
                    return False

                # 3. write flag in database to enable gateway thermostats
                f.enabled = True
                f.save()
            return True
        except Exception:
            logger.exception('Error migrating master thermostats')
            return False
Ejemplo n.º 36
0
    def execute_gene(self, feature_rows, strain_id):
        features = {}
        sequence = None
        transcript = None

        gene_id = None
        min_start = None
        max_end = None

        for feature_row in feature_rows: # Loop through annotation rows in the gff file, all related to the current gene

            # keep track of start and end
            start = feature_row[3]
            end = feature_row[4]
            direction = "forward" if feature_row[6] == "+" else "reverse"
            chromosome_id = feature_row[0]

            feature_type = feature_row[2]
            attribs = feature_row[8].strip()

            # This causes bugs.
            # if feature_type == "gene": # Handle gene entries
                # gene_id = attribs.split(";")[0].split(":")[1] # grab the gene ID - we'll want this for later

            new_gene_id = self.find_attribs_value("ID=Gene", attribs)
            if new_gene_id != None:

                # only deal with proper genes. setting gene_id to None means nothing else will be processed.
                # so it will essentially skip non-"gene" entries.
                if feature_type != "gene":
                    gene_id = None
                    continue

                # Check against filter list if there is one
                if self.filter_genes != None and new_gene_id not in self.filter_genes:
                    # filter list exists, and gene is not in filter list
                    # skip this gene
                    return

                gene_id = new_gene_id

                # add the Gene entry - if it hasn't been already
                if gene_id not in self.genes_seen: 
                    gene = Gene(gene_id)
                    self.genes_to_write.append(gene)
                    self.genes_seen[gene_id] = gene
            
            elif gene_id != None : # Handle transcript entries - if the gene is legit
                transcript_id = self.find_attribs_value("ID=Transcript", attribs)
                if transcript_id != None: # it's a transcript entry

                    # add the Transcript entry - if it hasn't been already
                    transcript_id = self.ensure_unique_transcript_id(transcript_id)

                    if transcript_id not in self.transcripts_seen: 
                        transcript = Transcript(
                            id=transcript_id, gene_id=gene_id
                        )
                        self.transcripts_to_write.append(transcript)
                        self.transcripts_seen[transcript.id] = transcript

                else: # Handle transcript feature entries

                    # for some reason, features for a given strain/transcript 
                    # combination are not always added

                    transcript_id = self.find_attribs_value("Parent=Transcript", attribs)

                    if transcript_id != None: # it's a transcript feature entry
                        # put a filter here? some elements are not worth storing?
                        self.features_to_write.append(Feature(
                            transcript_id=transcript_id,
                            type_id=feature_row[2],
                            strain_id=strain_id,
                            chromosome_id=chromosome_id,
                            start=start,
                            end=end,
                            direction=direction
                        ))

                    else:
                        pass # this happens for pseudogenes and TEs - which we aint interested in
Ejemplo n.º 37
0
 def _feature_individual(cls, genotype: Genotype, feature_function: Callable[[Genotype], int], index: int=0):
     return Feature(
         index=index,
         number=feature_function(genotype)
     )
Ejemplo n.º 38
0
         mime_type='image/' + im.format.lower(),
         source='AVA'
     )
     db_image.save()
     image_id = db_image.pk
 finally:
     try:
         feature = Feature.objects.get(image=image_id, identity='VGG16P5_resize')
     except Feature.DoesNotExist:
         try:
             feat = ext.extract(img_filename)
             feat = norm.normalize(feat)
             Feature.objects(image=image_id, identity='VGG16P5_resize').update_one(
                 set__image=image_id,
                 set__dimension=feat.size,
                 set__model='VGG16P5',
                 set__data=feat.tobytes(),
                 upsert=True
             )
         except:
             pass
     try:
         aestheticInfo = AestheticInfo.objects.get(image=image_id)
     except AestheticInfo.DoesNotExist:
         try:
             AestheticInfo.objects(image=image_id).update_one(
                 set__image=image_id,
                 set__score=score,
                 set__tags=tag,
                 upsert=True
             )
Ejemplo n.º 39
0
 def decorate(request, *args, **kwargs):
     if not Feature.check_enabled(name):
         return HttpResponseForbidden()
     return view_function(request, *args, **kwargs)
Ejemplo n.º 40
0
 def get(self):
     keywords = []
     place = self.request.get('place', None)
     category = self.request.get('type', None)
     source = self.request.get('source', None)
     limit = self.request.get_range('limit', min_value=1, max_value=100, default=10)
     offset = self.request.get_range('offset', min_value=0, default=0)
     
     if place:
         # Search Feature on id where id is each name in palce (names comma separated)
         features = Feature.search(place)
         logging.info('FEATURES=%s' % [f.key for f in features])
         n = len(features)
         results = []
         if n == 1: # Exact match on one Feature
             results.append(simplejson.loads(features[0].j))
         elif n > 1: # Exact match on multiple Features
             bboxes = []
             for feature in features:
                 data = simplejson.loads(feature.j)
                 bboxes.append(
                     BoundingBox.create(
                         data['minx'], 
                         data['maxy'], 
                         data['maxx'], 
                         data['miny']))                    
             if BoundingBox.intersect_all(bboxes): # Return all features with intersection=True
                 results = dict(
                     features=[simplejson.loads(feature.j) for feature in features],
                     intersection=True)
             else: # Return all features with intersection=False
                 results = dict(
                     features=[simplejson.loads(feature.j) for feature in features],
                     intersection=False)
         if len(results) > 0: # If exact results return them
             self.response.headers["Content-Type"] = "application/json"
             self.response.out.write(simplejson.dumps(results))
             return
     
     # Search FeatureIndex on keywords derived from each place name
     if place:
         results = set()
         search_results = FeatureIndex.search_place_keywords(place)
         if len(search_results) == 1: # Keyword FeatureIndex hit on single name
             name, features = search_results.popitem()
             results = [simplejson.loads(feature.j) for feature in features]                
             self.response.headers["Content-Type"] = "application/json"
             self.response.out.write(simplejson.dumps(results))
             return
         # Perform cross-product intersection tests and return all matching pairs
         for name,features in search_results.iteritems():
             for feature in features:                    
                 for other_name, other_features in search_results.iteritems():
                     if name == other_name:
                         continue                        
                     data = simplejson.loads(feature.j)
                     fbbox = BoundingBox.create(data['minx'], data['maxy'], data['maxx'], data['miny'])
                     for other_feature in other_features:
                         data = simplejson.loads(other_feature.j)
                         obbox = BoundingBox.create(data['minx'], data['maxy'], data['maxx'], data['miny'])                           
                         logging.info('feature=%s, other_feature=%s, fbbox=%s, obbox=%s' % (feature, other_feature, fbbox, obbox))
                         if BoundingBox.intersect_all([fbbox, obbox]):
                             results.update([feature, other_feature])
         self.response.headers["Content-Type"] = "application/json"
         self.response.out.write(simplejson.dumps(list(results)))
Ejemplo n.º 41
0
def migrate(migrator, database, fake=False, **kwargs):
    ThermostatGroup.get_or_create(number=0, name='default', on=True)
    Feature.get_or_create(name='thermostats_gateway', enabled=False)
    def _match_precursor_bins(self, file_data, mass_tol, rt_tol):

        if self.verbose:
            print "Matching precursor bins"
        sys.stdout.flush()

        # check we aren't missing any features
        input_features_count = 0
        for j in range(len(self.data_list)):
            features = self.data_list[j].features
            input_features_count += len(features)

        alignment_files = []
        alignment_feature_to_precursor_cluster = {}
        clustered_features_count = 0
        for j in range(len(self.data_list)):

            file_clusters = file_data[j]

            # TODO: using posterior mass and rts
            # print 'Using posterior mass and rts'
            # file_post_masses = [cluster.mu_mass for cluster in file_clusters]
            # file_post_rts = [cluster.mu_rt for cluster in file_clusters]

            # just using hte average of all the members
            file_post_masses = []
            file_post_rts = []
            for cluster in file_clusters:
                assert len(cluster.best_clustering) > 0, 'Empty cluster detected!!'
                avg_mass = np.array([poss.transformed_mass for peak, poss in cluster.best_clustering]).mean()
                avg_rt = np.array([peak.rt for peak, poss in cluster.best_clustering]).mean()
                file_post_masses.append(avg_mass)
                file_post_rts.append(avg_rt)
                clustered_features_count += len(cluster.best_clustering)

            file_post_fingerprints = [cluster.word_counts for cluster in file_clusters]
            this_file = AlignmentFile("file_" + str(j), self.verbose)

            peak_id = 0
            row_id = 0
            for n in range(len(file_clusters)):

                cluster = file_clusters[n]
                mass = file_post_masses[n]
                rt = file_post_rts[n]
                intensity = 0
                fingerprint = file_post_fingerprints[n]

                # initialise alignment feature
                alignment_feature = Feature(peak_id, mass, rt, intensity, this_file, fingerprint=fingerprint)
                alignment_feature_to_precursor_cluster[alignment_feature] = cluster

                # initialise row
                alignment_row = AlignmentRow(row_id)
                alignment_row.features.append(alignment_feature)

                peak_id = peak_id + 1
                row_id = row_id + 1
                this_file.rows.append(alignment_row)

            alignment_files.append(this_file)

        # do the matching
        Options = namedtuple('Options', 'dmz drt exact_match verbose use_fingerprint')
        my_options = Options(dmz=mass_tol, drt=rt_tol, exact_match=False, verbose=self.verbose, use_fingerprint=False)
        matched_results = AlignmentFile("", True)
        num_files = len(alignment_files)
        input_count = 0
        output_count = 0
        for i in range(num_files):
            if self.verbose:
                print "Processing file %d" % i
            alignment_file = alignment_files[i]
            input_count += len(alignment_file.get_all_features()) + len(matched_results.get_all_features())
            matched_results.reset_aligned_status()
            alignment_file.reset_aligned_status()
            matcher = MaxWeightedMatching(matched_results, alignment_file, my_options)
            matched_results = matcher.do_matching()
            output_count += len(matched_results.get_all_features())
            assert input_count == output_count, "input %d output %d" % (input_count, output_count)

        # map the results back to the original bin objects
        results = []
        for row in matched_results.rows:
            temp = []
            for alignment_feature in row.features:
                cluster = alignment_feature_to_precursor_cluster[alignment_feature]
                temp.append(cluster)
            tup = tuple(temp)
            results.append(tup)

        # turn this into a matching of peak features
        total_aligned_features = 0
        alignment_results = []
        for bin_res in results:
            matched_list = self._match_adduct_features(bin_res)
            for features in matched_list:
                total_aligned_features += len(features)
                res = AlignmentResults(peakset=features, prob=1.0)
                alignment_results.append(res)

        assert input_features_count == clustered_features_count
        assert input_features_count == total_aligned_features
        return alignment_results
Ejemplo n.º 43
0
    def build_graph():
        config = ConfigParser()
        config.read(constants.get_config_file())

        config_lock = Lock()
        scheduling_lock = Lock()
        metrics_lock = Lock()

        config_database_file = constants.get_config_database_file()

        # TODO: Clean up dependencies more to reduce complexity

        # IOC announcements
        # When below modules are imported, the classes are registerd in the IOC graph. This is required for
        # instances that are used in @Inject decorated functions below, and is also needed to specify
        # abstract implementations depending on e.g. the platform (classic vs core) or certain settings (classic
        # thermostats vs gateway thermostats)
        from power import power_communicator, power_controller
        from plugins import base
        from gateway import (metrics_controller, webservice, scheduling,
                             observer, gateway_api, metrics_collector,
                             maintenance_controller, comm_led_controller,
                             users, pulses, config as config_controller,
                             metrics_caching, watchdog)
        from cloud import events
        _ = (metrics_controller, webservice, scheduling, observer, gateway_api,
             metrics_collector, maintenance_controller, base, events,
             power_communicator, comm_led_controller, users, power_controller,
             pulses, config_controller, metrics_caching, watchdog)
        if Platform.get_platform() == Platform.Type.CORE_PLUS:
            from gateway.hal import master_controller_core
            from master_core import maintenance, core_communicator, ucan_communicator
            from master import eeprom_extension  # TODO: Obsolete, need to be removed
            _ = master_controller_core, maintenance, core_communicator, ucan_communicator
        else:
            from gateway.hal import master_controller_classic
            from master import maintenance, master_communicator, eeprom_extension
            _ = master_controller_classic, maintenance, master_communicator, eeprom_extension

        thermostats_gateway_feature = Feature.get_or_none(
            name='thermostats_gateway')
        thermostats_gateway_enabled = thermostats_gateway_feature is not None and thermostats_gateway_feature.enabled
        if Platform.get_platform(
        ) == Platform.Type.CORE_PLUS or thermostats_gateway_enabled:
            from gateway.thermostat.gateway import thermostat_controller_gateway
            _ = thermostat_controller_gateway
        else:
            from gateway.thermostat.master import thermostat_controller_master
            _ = thermostat_controller_master

        # IPC
        Injectable.value(message_client=MessageClient('openmotics_service'))

        # Cloud API
        parsed_url = urlparse(config.get('OpenMotics', 'vpn_check_url'))
        Injectable.value(gateway_uuid=config.get('OpenMotics', 'uuid'))
        Injectable.value(cloud_endpoint=parsed_url.hostname)
        Injectable.value(cloud_port=parsed_url.port)
        Injectable.value(cloud_ssl=parsed_url.scheme == 'https')
        Injectable.value(cloud_api_version=0)

        # User Controller
        Injectable.value(user_db=config_database_file)
        Injectable.value(user_db_lock=config_lock)
        Injectable.value(token_timeout=3600)
        Injectable.value(
            config={
                'username': config.get('OpenMotics', 'cloud_user'),
                'password': config.get('OpenMotics', 'cloud_pass')
            })

        # Configuration Controller
        Injectable.value(config_db=config_database_file)
        Injectable.value(config_db_lock=config_lock)

        # Energy Controller
        power_serial_port = config.get('OpenMotics', 'power_serial')
        Injectable.value(power_db=constants.get_power_database_file())
        if power_serial_port:
            Injectable.value(power_serial=RS485(
                Serial(power_serial_port, 115200, timeout=None)))
        else:
            Injectable.value(power_serial=None)
            Injectable.value(power_communicator=None)
            Injectable.value(power_controller=None)

        # Pulse Controller
        Injectable.value(pulse_db=constants.get_pulse_counter_database_file())

        # Scheduling Controller
        Injectable.value(
            scheduling_db=constants.get_scheduling_database_file())
        Injectable.value(scheduling_db_lock=scheduling_lock)

        # Master Controller
        controller_serial_port = config.get('OpenMotics', 'controller_serial')
        Injectable.value(
            controller_serial=Serial(controller_serial_port, 115200))
        if Platform.get_platform() == Platform.Type.CORE_PLUS:
            from master_core.memory_file import MemoryFile, MemoryTypes
            core_cli_serial_port = config.get('OpenMotics', 'cli_serial')
            Injectable.value(cli_serial=Serial(core_cli_serial_port, 115200))
            Injectable.value(passthrough_service=None)  # Mark as "not needed"
            Injectable.value(
                memory_files={
                    MemoryTypes.EEPROM: MemoryFile(MemoryTypes.EEPROM),
                    MemoryTypes.FRAM: MemoryFile(MemoryTypes.FRAM)
                })
            # TODO: Remove; should not be needed for Core
            Injectable.value(
                eeprom_db=constants.get_eeprom_extension_database_file())
        else:
            passthrough_serial_port = config.get('OpenMotics',
                                                 'passthrough_serial')
            Injectable.value(
                eeprom_db=constants.get_eeprom_extension_database_file())
            if passthrough_serial_port:
                Injectable.value(
                    passthrough_serial=Serial(passthrough_serial_port, 115200))
                from master.passthrough import PassthroughService
                _ = PassthroughService  # IOC announcement
            else:
                Injectable.value(passthrough_service=None)

        # Metrics Controller
        Injectable.value(metrics_db=constants.get_metrics_database_file())
        Injectable.value(metrics_db_lock=metrics_lock)

        # Webserver / Presentation layer
        Injectable.value(ssl_private_key=constants.get_ssl_private_key_file())
        Injectable.value(ssl_certificate=constants.get_ssl_certificate_file())
Ejemplo n.º 44
0
    def test06_geometryfield(self):
        "Testing GeometryField."
        f1 = Feature(name='Point', geom=Point(1, 1))
        f2 = Feature(name='LineString', geom=LineString((0, 0), (1, 1), (5, 5)))
        f3 = Feature(name='Polygon', geom=Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0))))
        f4 = Feature(name='GeometryCollection', 
                     geom=GeometryCollection(Point(2, 2), LineString((0, 0), (2, 2)), 
                                             Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0)))))
        f1.save()
        f2.save()
        f3.save()
        f4.save()

        f_1 = Feature.objects.get(name='Point')
        self.assertEqual(True, isinstance(f_1.geom, Point))
        self.assertEqual((1.0, 1.0), f_1.geom.tuple)
        f_2 = Feature.objects.get(name='LineString')
        self.assertEqual(True, isinstance(f_2.geom, LineString))
        self.assertEqual(((0.0, 0.0), (1.0, 1.0), (5.0, 5.0)), f_2.geom.tuple)

        f_3 = Feature.objects.get(name='Polygon')
        self.assertEqual(True, isinstance(f_3.geom, Polygon))
        f_4 = Feature.objects.get(name='GeometryCollection')
        self.assertEqual(True, isinstance(f_4.geom, GeometryCollection))
        self.assertEqual(f_3.geom, f_4.geom[2])
Ejemplo n.º 45
0
def start_experiment(request, save=True):
    '''
    Handles presses of the 'Start Experiment' and 'Test' buttons in the browser 
    interface
    '''
    #make sure we don't have an already-running experiment
    if exp_tracker.status.value != '':
        http_request_queue.append((request, save))
        return _respond(dict(status="running", msg="Already running task, queuelen=%d!" % len(http_request_queue)))

    # Try to start the task, and if there are any errors, send them to the browser interface
    try:
        data = json.loads(request.POST['data'])

        task =  Task.objects.get(pk=data['task'])
        Exp = task.get(feats=data['feats'].keys())

        entry = TaskEntry(subject_id=data['subject'], task=task)
        params = Parameters.from_html(data['params'])
        entry.params = params.to_json()
        kwargs = dict(subj=entry.subject, task_rec=task, feats=Feature.getall(data['feats'].keys()),
                      params=params)

        # Save the target sequence to the database and link to the task entry, if the task type uses target sequences
        if issubclass(Exp, experiment.Sequence):
            print "creating seq"
            print "data['sequence'] POST data"
            print data['sequence']
            seq = Sequence.from_json(data['sequence'])
            seq.task = task
            if save:
                seq.save()
            entry.sequence = seq
            kwargs['seq'] = seq
        else:
            entry.sequence_id = -1
        
        response = dict(status="testing", subj=entry.subject.name, 
                        task=entry.task.name)

        if save:
            # Save the task entry to database
            entry.save()

            # Link the features used to the task entry
            for feat in data['feats'].keys():
                f = Feature.objects.get(pk=feat)
                entry.feats.add(f.pk)

            response['date'] = entry.date.strftime("%h %d, %Y %I:%M %p")
            response['status'] = "running"
            response['idx'] = entry.id

            # Give the entry ID to the runtask as a kwarg so that files can be linked after the task is done
            kwargs['saveid'] = entry.id
        
        # Start the task FSM and exp_tracker
        exp_tracker.runtask(**kwargs)

        # Return the JSON response
        return _respond(response)

    except Exception as e:
        # Generate an HTML response with the traceback of any exceptions thrown
        import cStringIO
        import traceback
        err = cStringIO.StringIO()
        traceback.print_exc(None, err)
        err.seek(0)
        return _respond(dict(status="error", msg=err.read()))