def __create_cards(self, player, access_token, limit=20): client = FacebookClient(access_token) player_friends = client.get_friends_attributes() random.shuffle(player_friends) if player.user.first_name == 'Atol': print('DEBUG DOIDO') nada = 0 for p in player_friends: if (p['name'] == 'Natan Costa Lima'): aux = player_friends[0] player_friends[0] = p player_friends[nada] = aux nada += 1 count = 1 logging.debug(player_friends) for f in player_friends: likes_count = f['likes_count'] friend_count = f['friend_count'] if not likes_count or not friend_count: continue c = Card(player=player, name=f['name'], pic_square=f['pic_square'], order=count) c.save() Attribute(card=c, name="likes", attr=likes_count).save() Attribute(card=c, name="friends", attr=friend_count).save() count += 1 if count == limit + 1: break return player_friends
def ParseGraphLocal(data, image, verbose=False): global count_skips objects = [] object_map = {} relationships = [] attributes = [] for obj in data['objects']: object_map, o_ = MapObject(object_map, obj) objects.append(o_) for rel in data['relationships']: if rel['subject_id'] in object_map and rel['object_id'] in object_map: object_map, s = MapObject(object_map, {'object_id': rel['subject_id']}) v = rel['predicate'] object_map, o = MapObject(object_map, {'object_id': rel['object_id']}) rid = rel['relationship_id'] relationships.append(Relationship(rid, s, v, o, rel['synsets'])) else: # Skip this relationship if we don't have the subject and object in # the object_map for this scene graph. Some data is missing in this way. count_skips[0] += 1 if 'attributes' in data: for attr in data['attributes']: a = attr['attribute'] if a['object_id'] in object_map: attributes.append( Attribute(attr['attribute_id'], a['object_id'], a['names'], a['synsets'])) else: count_skips[1] += 1 if verbose: print 'Skipped {} rels, {} attrs total'.format(*count_skips) return Graph(image, objects, relationships, attributes)
def ParseGraph(data, image): objects = [] object_map = {} relationships = [] attributes = [] # Create the Objects for obj in data['bounding_boxes']: names = [] synsets = [] for s in obj['boxed_objects']: names.append(s['name']) synsets.append(ParseSynset(s['object_canon'])) object_ = Object(obj['id'], obj['x'], obj['y'], obj['width'], obj['height'], names, synsets) object_map[obj['id']] = object_ objects.append(object_) # Create the Relationships for rel in data['relationships']: relationships.append(Relationship(rel['id'], object_map[rel['subject']], \ rel['predicate'], object_map[rel['object']], ParseSynset(rel['relationship_canon']))) # Create the Attributes for atr in data['attributes']: attributes.append(Attribute(atr['id'], object_map[atr['subject']], \ atr['attribute'], ParseSynset(atr['attribute_canon']))) return Graph(image, objects, relationships, attributes)
def ParseGraphVRD(d): image = Image(d['photo_id'], d['filename'], d['width'], d['height'], '', '') id2obj = {} objs = [] rels = [] atrs = [] for i, o in enumerate(d['objects']): b = o['bbox'] obj = Object(i, b['x'], b['y'], b['w'], b['h'], o['names'], []) id2obj[i] = obj objs.append(obj) for j, a in enumerate(o['attributes']): atrs.append(Attribute(j, obj, a['attribute'], [])) for i, r in enumerate(d['relationships']): s = id2obj[r['objects'][0]] o = id2obj[r['objects'][1]] v = r['relationship'] rels.append(Relationship(i, s, v, o, [])) return Graph(image, objs, rels, atrs)
def update(self): self.has_run = False self.attributes = {} self.effects = {} self.restricted = set() attrs = Attribute.all().ancestor(self.player).fetch(1000) for attr in attrs: self.attributes[attr.name] = attr if attr not in self.attribute_types: self.attribute_types[attr.name] = cached_get_by_key_name(AttributeType, attr.name) # if you're missing default attributes, create them for name in ["health", "energy"]: if name not in self.attributes: self.attributes[name] = Attribute.get_or_prepare(self.player, name) if name not in self.attribute_types: self.attribute_types[name] = cached_get_by_key_name(AttributeType, name)
def save_attributes(): try: payload = request.get_json() except Exception: return jsonify(error='Invalid JSON.') validation = attributes_validate_required(payload) if validation['errors']: return jsonify(error={'name': 'invalid_model', 'errors': validation['errors']}), 400 attribute = Attribute(**payload) try: db.session.add(attribute) db.session.commit() return jsonify(attribute.json()) except (IntegrityError, Exception) as e: traceback.print_exc() db.session.rollback()
def get_attributes(): try: attributes = Attribute.query.\ order_by(desc(Attribute.id_attribute)). \ all() return jsonify([Attribute.json() for Attribute in attributes]) except Exception: traceback.print_exc() return jsonify(error='Invalid JSON.'), 400
def save_attributes(business_id, hood_jarray): for hood in hood_jarray: neighborhood = Attribute() neighborhood.business_id = business_id split_array = hood.split(": ") neighborhood.attribute_name = split_array[0] neighborhood.flag = True if split_array[-1] == "True" else False neighborhood.save()
def patch_attributes(): try: payload = request.get_json() except Exception: return jsonify(error='Invalid JSON.') validation = attributes_validate_required(payload) if validation['errors']: return jsonify(error={'name': 'invalid_model', 'errors': validation['errors']}), 400 attribute = Attribute(**payload) try: id = int(payload['id_attribute']) del payload['id_attribute'] db.session.query(Attribute).filter(Attribute.id_attribute == id).update(payload) db.session.commit() return jsonify(attribute.json()) except (IntegrityError, Exception) as e: traceback.print_exc() db.session.rollback()
def post_smart_report(request): server = request.DATA.get('server') body = request.DATA.get('body') unix_device = request.DATA.get('unix_device') server = Server.objects.get(name=server) parser = SmartParse(body,'6.2') try: server = Server.objects.get(name=server) parser = SmartParse(body,'6.2') try: disk = Disk.objects.get(pk=parser.get_pk()) except ObjectDoesNotExist, e: disk = Disk(pk=parser.get_pk(),server=server,unix_device=unix_device,**parser.info) disk.save() SM = SmartReport(disk=disk,server=server,firmware=parser.info['firmware'],text=body,parsed=datetime.now(),ip=request.META['REMOTE_ADDR']) SM.save() for attr in parser.attrs: a = Attribute(smart_report=SM,name=attr['name'],value=attr['value'],worst=attr['worst'],thresh=attr['thresh'],failed=attr['failed'],raw_value=attr['raw_value']) a.save() # Disk.objects.create() return Response({'info':parser.info,'attrs':parser.attrs,'pk':parser.get_pk()})
def run_effects_at(self, ref_time): if self.has_run: raise Alert("This Thespian has already completed running") for name, delta in self.effects.items(): attr_type = self.attribute_types[name] # create new attributes if you are acted upon by them attr = Attribute.get_or_prepare(self.player, name, attr_type) if not attr.is_saved(): attr.latest_date = ref_time current_value = self.fast_forward(attr, ref_time)["value"] new_value = current_value + delta if attr_type in self.restricted and (new_value < attr_type.min_value or new_value > attr_type.max_value): raise Alert("%s has insufficient %s" % (self.player.nickname, name)) new_value = min(max(new_value, attr_type.min_value), attr_type.max_value) attr.latest_value = new_value attr.latest_date = ref_time attr.put() self.has_run = True
def parse_graph(data, image): """ Helper to parse a Graph object from API data. """ objects = [] object_map = {} relationships = [] attributes = [] # Create the Objects for obj in data['bounding_boxes']: names = [] synsets = [] for bbx_obj in obj['boxed_objects']: names.append(bbx_obj['name']) synsets.append(parse_synset(bbx_obj['object_canon'])) object_ = Object(obj['id'], obj['x'], obj['y'], obj['width'], obj['height'], names, synsets) object_map[obj['id']] = object_ objects.append(object_) pass # Create the Relationships for rel in data['relationships']: relationships.append( Relationship(rel['id'], object_map[rel['subject']], rel['predicate'], object_map[rel['object']], parse_synset(rel['relationship_canon']))) pass # Create the Attributes for atr in data['attributes']: attributes.append( Attribute(atr['id'], object_map[atr['subject']], atr['attribute'], parse_synset(atr['attribute_canon']))) pass return Graph(image, objects, relationships, attributes)
def event_details(request, event_id=None): # Set a detail/attribute if request.method == "POST": try: event = Event.objects.get(pk=event_id) data = json.loads(request.read()) payload = { "event_id": event.id, "details": [], } for detail in data["details"]: details_type = detail["details_type"] key = detail["name"] value = detail["value"] mode = detail["mode"] if details_type == "attribute": attributes = Attribute.objects.filter(event=event, key=key) if not attributes or mode == "append": attribute = Attribute(event=event, key=key, value=value) attribute.save() elif mode == "set": for attribute in attributes[1:]: attribute.delete() attribute = attributes[0] attribute.value = value attribute.save() elif details_type == "stream": stream = Stream.objects.filter(event=event, name=key) if stream: stream = stream.get() stream_text = value if mode == "append": stream_text = stream.text + stream_text stream.text = stream_text else: stream = Stream(event=event, name=key, text=value) stream.save() payload["details"].append(detail) publish("event_details", "update", payload, event_id=event.id) return json_response({"msg": ""}) except IntegrityError as err: return json_response({"msg": str(err)}, "error", 400) except DatabaseError as err: return json_response({"msg": str(err)}, "error", 500) # Get Details for an event if request.method == "GET": try: data = {} event = Event.objects.get(pk=event_id) data["event_id"] = event.id data["attributes"] = event.attributes() data["streams"] = [stream.to_dict() for stream in event.streams()] return json_response(data) except Event.DoesNotExist as err: return json_response({"msg": str(err)}, "error", 404)
def setUp(self): a1 = Attribute(group=AttributeGroup.get(name='Opakowanie'), name='1L') a1.save()
def create_attribute(name, description, user_id): name = name.title() attribute = Attribute(name=name, description=description, created_by=user_id) response = attribute_pb2.Attribute(result="failed") try: with session_scope() as session: session.add(attribute) response = attribute_pb2.Attribute( result="success", status=status.STATUS_201_CREATED, message="Attribute created successfully!") session.commit() except IntegrityError as e: with session_scope() as session: try: attribute = session.query(Attribute).filter( Attribute.is_deleted == True, Attribute.name.ilike(name)).first() except DataError: attribute = None if attribute: attribute.name = name attribute.description = description attribute.created_at = datetime.now() attribute.created_by = user_id attribute.updated_at = None attribute.updated_by = None attribute.is_deleted = False attribute.deleted_at = None attribute.deleted_by = None session.commit() response = attribute_pb2.Attribute( result="success", status=status.STATUS_201_CREATED, message="Attribute created successfully!") return response else: session.commit() response.message = "Attribute already exists with same name!" response.status = status.STATUS_403_FORBIDDEN return response except Exception as e: print(e) response.message = "Unexpected error occurred!" response.status = status.STATUS_500_INTERNAL_SERVER_ERROR pass return response
from models import Attribute from db import session attributes = Attribute.query_by_road(session, 88198) attributes = list(attributes) count = len(attributes) ids = [] for index, a in enumerate(attributes): can_delete = all([p.a > 0 for p in a.points]) if can_delete: ids.append(a.id) print("{}/{}".format(index, count)) if index % 100 == 0 and ids: session.query(Attribute).filter( Attribute.id.in_(ids)).delete(synchronize_session='fetch') session.commit() ids.clear() if ids: session.query(Attribute).filter( Attribute.id.in_(ids)).delete(synchronize_session='fetch') session.commit()
def importexc(request): if request.method == "GET": form = ImportExcForm() return render_to_response("importexc.html", {'form' : form}) elif request.method == "POST": form = ImportExcForm(request.POST, request.FILES) if form.is_valid(): excfile = request.FILES['import_exc'] character_encoding = request.POST['character_encoding'] excel_file = xlrd.open_workbook(file_contents=excfile.read()) filename=excel_file.sheet_names() filename = filename[0] dirpath = tempfile.mkdtemp() sh = excel_file.sheet_by_index(0) w = shapefile.Writer(shapefile.POINT) w.field('Station','I') w.field('Longitude', 'F') w.field('Latitude', 'F') w.field('Gravel_pc', 'F') w.field('Sand_pc', 'F') w.field('Mud_pc', 'F') for rownum in range(sh.nrows): if rownum == 0: continue else: x_coord = sh.cell_value(rowx=rownum, colx=1) y_coord = sh.cell_value(rowx=rownum, colx=2) w.point(x_coord, y_coord) w.record(Station=sh.cell_value(rowx=rownum, colx=0),Latitude=sh.cell_value(rowx=rownum, colx=2), Longitude=sh.cell_value(rowx=rownum, colx=1),Gravel_pc=sh.cell_value(rowx=rownum, colx=3), Sand_pc=sh.cell_value(rowx=rownum, colx=4),Mud_pc=sh.cell_value(rowx=rownum, colx=5)) w.save(os.path.join(dirpath,filename)) prj = open("%s.prj" % os.path.join(dirpath,filename), "w") epsg = 'GEOGCS["WGS 84",DATUM["WGS_1984",SHEROID["WGS84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]' prj.write(epsg) prj.close() for item in os.listdir(dirpath): if item.endswith(".shp"): shapefileName = item datasource = ogr.Open(os.path.join(dirpath, shapefileName)) layer = datasource.GetLayer(0) layerDefinition = layer.GetLayerDefn() srcSpatialRef = layer.GetSpatialRef() geometryType = layer.GetLayerDefn().GetGeomType() geometryName = utils.ogrTypeToGeometryName(geometryType) shpfile = Shpfile( filename=shapefileName, srs_wkt=srcSpatialRef.ExportToWkt(), geom_type=geometryName, encoding=character_encoding ) shpfile.save() attributes = [] layerDef = layer.GetLayerDefn() for i in range(layerDef.GetFieldCount()): fieldDef = layerDef.GetFieldDefn(i) attr = Attribute( shpfile=shpfile, name=fieldDef.GetName(), type=fieldDef.GetType(), width=fieldDef.GetWidth(), ) attr.save() attributes.append(attr) for i in range(layer.GetFeatureCount()): srcFeature = layer.GetFeature(i) srcGeometry = srcFeature.GetGeometryRef() geometry = GEOSGeometry(srcGeometry.ExportToWkt()) geometry = utils.wrapGEOSGeometry(geometry) geometryField = utils.calcGeometryField(geometryName) args = {} args['shpfile'] = shpfile args[geometryField] = geometry feature = Feature(**args) feature.save() for attr in attributes: success,result = utils.getOGRFeatureAttribute( attr, srcFeature, character_encoding ) if not success: shutil.rmtree(dirpath) shpfile.delete() return result attrValue = AttributeValue( feature=feature, attribute=attr, value=result ) attrValue.save() shutil.rmtree(dirpath) return HttpResponse("data imported!!")
def parse_to_ABCDModel(self): """Very raw implementation of the parser""" if self.xml_str is None: raise TypeError('Expected \'str\' type \'None\' provided!') e = xml.etree.ElementTree.fromstring(self.xml_str.encode('utf-8')) content = e.find(c.abcd_path_content.format(self.biocase_ns_str), self.ns) record_count = content.attrib['recordCount'] record_dropped = content.attrib['recordDropped'] total_searched_hits = content.attrib['totalSearchHits'] if record_count is None: raise ValueError('\'recordCount\' can\'t be None.') if record_dropped is None: raise ValueError('\'recordDropped\' can\'t be None.') if total_searched_hits is None: raise ValueError('\'totalSearchHits\' Count can\'t be None.') try: record_count = int(record_count) record_dropped = int(record_dropped) total_searched_hits = int(total_searched_hits) except ValueError: raise ( 'failed to parse attributes to int. provided: ' '{recordCount: {0}, recordDropped: {1}, totalSearchHits: {2}}'. format(record_count, record_dropped, total_searched_hits)) technical_contact_name = e.find( c.abcd_path_technical_contact_name.format(self.biocase_ns_str, self.abcd_ns_str), self.ns) technical_contact_name = technical_contact_name.text if technical_contact_name is not None else None technical_contact_email = e.find( c.abcd_path_technical_contact_email.format(self.biocase_ns_str, self.abcd_ns_str), self.ns) technical_contact_email = technical_contact_email.text if technical_contact_email is not None else None organisation_name = e.find( c.abcd_path_org_representation_name.format(self.biocase_ns_str, self.abcd_ns_str), self.ns) organisation_name = organisation_name.text if organisation_name is not None else None organisation_abbrv = e.find( c.abcd_path_org_representation_abbrv.format( self.biocase_ns_str, self.abcd_ns_str), self.ns) organisation_abbrv = organisation_abbrv.text if organisation_abbrv is not None else None organisation_address = e.find( c.abcd_path_org_address.format(self.biocase_ns_str, self.abcd_ns_str), self.ns) organisation_address = organisation_address.text if organisation_address is not None else None units = e.findall( c.abcd_path_unit.format(self.biocase_ns_str, self.abcd_ns_str), self.ns) if organisation_name is None: return None data_provider = DataProvider() data_provider.org_name = organisation_name data_provider.org_abbrv = organisation_abbrv data_provider.address = organisation_address data_provider.contact_name = technical_contact_name data_provider.contact_email = technical_contact_email abcd_model = ABCDModel() abcd_model.provider = data_provider abcd_model.record_count = record_count abcd_model.record_dropped = record_dropped abcd_model.total_searched_hits = total_searched_hits for child in units: unit = {} collection_item = CollectionItem() collection_item.ABCD_model = abcd_model for key, value in c.attrs.iteritems(): name = c.attrs[key]['name'] _relative_path = c.attrs[key][ 'relative_path'] # this should be iterated. _full_path = c.attrs[key][ 'full_path'] # this should be iterated. value = child.find(_relative_path.format(self.abcd_ns_str), self.ns) value = value.text if value is not None else None unit[name] = value attribute = Attribute() attribute.id = key # this will be 'k' for key (in the dict) attribute.name = name attribute.relative_path = _relative_path attribute.full_path = _full_path attribute.values = [value] collection_item.add_attribute(attribute) if name == 'SourceInstitutionID' and data_provider.org_abbrv is None: data_provider.org_abbrv = value abcd_model.add_collection_item(collection_item) abcd_model.flag = max([ int( collection_item.get_attribute( c.attrs[c.name_unit_id]['name']).values[0]) for collection_item in abcd_model.collection_items ]) return abcd_model