def _map_entity(self, entity): new_key = Key.from_path( self.to_kind, entity.key().id_or_name(), namespace=self.to_namespace ) parent = entity.parent() if parent: # If the entity has an ancestor then we need to make sure that that ancestor exists in # the new namespace as well new_parent_key = Key.from_path( parent.kind(), parent.is_or_name(), namespace=self.to_namespace ) new_parent_exists = Get([new_parent_key])[0] if not new_parent_exists: raise DataError( "Trying to copy entity with an ancestor (%r) to a new namespace but the " "ancestor does not exist in the new namespace. Copy the ancestors first." % entity.key() ) def txn(): existing = Get([new_key])[0] if existing and not self.overwrite_existing: return if isinstance(entity.key().id_or_name(), (int, long)): reserve_id(self.to_kind, entity.key().id_or_name(), self.to_namespace) new_entity = clone_entity(entity, new_key) Put(new_entity) RunInTransaction(txn)
def post(self): query = self.request.get('symptoms') id= self.request.get('id') did= self.request.get('did') dis = self.request.get('disease').split(' ') med = self.request.get('medicine') sym = summarize.doctor_symptoms(query) q= patient.all(keys_only=True) q.filter("__key__ >=", Key.from_path('patient', id)) res = q.get() if res: d= doctor.all(keys_only=True) d.filter("__key__ >=", Key.from_path('doctor', did)) resd = d.get() if resd: result ="Updated successfully" rec = record(parent=res, symptoms = sym, disease =dis,medicine=med, doc = did) rec.rec_date =datetime.datetime.now().date() rec.put() else: result ="Invalid DocID" else: result ="Invalid PatientID" template_values = { 'result': result } template = jinja_environment.get_template('message.html') self.response.out.write(template.render(template_values))
def create_key(db_table, value): if isinstance(value, GAEKey): parent = None if value.parent_key() is not None: parent = value.parent_key().real_key() return Key.from_path(db_table, value.id_or_name(), parent=parent) if isinstance(value, (int, long)) and value < 1: return None return Key.from_path(db_table, value)
def acquire_marker(identifier): # Key.from_path expects None for an empty namespace, but Key.namespace() returns '' namespace = entity_key.namespace() or None identifier_key = Key.from_path(UniqueMarker.kind(), identifier, namespace=namespace) marker = UniqueMarker.get(identifier_key) if marker: # If the marker instance is None, and the marker is older then 5 seconds then we wipe it out # and assume that it's stale. if not marker.instance and (datetime.datetime.utcnow() - marker.created).seconds > 5: marker.delete() elif marker.instance and marker.instance != entity_key and key_exists(marker.instance): fields_and_values = identifier.split("|") table_name = fields_and_values[0] fields_and_values = fields_and_values[1:] fields = [ x.split(":")[0] for x in fields_and_values ] raise IntegrityError("Unique constraint violation for kind {} on fields: {}".format(table_name, ", ".join(fields))) else: # The marker is ours anyway return marker marker = UniqueMarker( key=identifier_key, instance=entity_key if entity_key.id_or_name() else None, # May be None if unsaved created=datetime.datetime.utcnow() ) marker.put() return marker
def post(self): id= self.request.get('id') q= patient.all() q.filter("__key__ >=", Key.from_path('patient', id)) res = q.get() result="" if res: result = result + "Name: "+res.name+"<br> ID: "+id+"<br>" recs = record.all().ancestor(res).fetch(100) for r in recs: result = result + "<br><br>Date: "+str(r.rec_date)+"<br> DoctorID: "+r.doc+"<br>" if r.symptoms: result =result + "Symptoms Recorded: "+ (','.join(r.symptoms)) + "<br>" if r.disease: result =result + "Disease Recorded: "+ (','.join(r.disease)) + "<br>" if r.medicine: result =result + "Medicine Recorded: "+ r.medicine +"<br>" else: result ="Invalid PatientID" template_values = { 'result': result } template = jinja_environment.get_template('message.html') self.response.out.write(template.render(template_values))
def to_python(self, value): if value is None: return None if isinstance(value, GAEKey): return value if isinstance(value, Key): return GAEKey(real_key=value) if isinstance(value, basestring): try: return GAEKey(real_key=Key(encoded=value)) except datastore_errors.BadKeyError: return GAEKey(real_key=Key.from_path(self.model._meta.db_table, long(value))) if isinstance(value, (int, long)): return GAEKey(real_key=Key.from_path(self.model._meta.db_table, value)) raise ValidationError("GAEKeyField does not accept %s" % type(value))
def _fetch_entity(self, instance): kind = instance._meta.db_table namespace = connection.settings_dict["NAMESPACE"] return Get( Key.from_path(kind, instance.pk, namespace=namespace) )
def get_count(datastore, counter_id): key = Key.from_path('Counter', counter_id, _app=PROJECT_ID) entity = yield datastore.get(key) if entity is None: raise gen.Return(0) raise gen.Return(entity.get('count', 0))
def put_from_message(cls, message): """ Inserts a patient into the DB Args: message: A PatientPut instance to be inserted. Returns: The Patient entity that was inserted. """ q = Doctor.all() q.filter('__key__ =', Key.from_path('Doctor', message.doctor_email)) doctor = q.get() new_patient = None patient = Patient.get_patient(message.email) if patient != None: new_patient = cls(key_name=message.email, doctor=doctor, first_name=message.first_name, last_name=message.last_name, phone=message.phone, diagnosis=patient.diagnosis, septic_risk=patient.septic_risk, basis_pass=patient.basis_pass) else: new_patient = cls(key_name=message.email, doctor=doctor, first_name=message.first_name, last_name=message.last_name, phone=message.phone, diagnosis='No', septic_risk=-1.0) new_patient.put() return new_patient
def test_blob(self): x = BlobModel(data='lalala') x.full_clean() x.save() e = Get(Key.from_path(BlobModel._meta.db_table, x.pk)) self.assertEqual(e['data'], x.data) x = BlobModel.objects.all()[0] self.assertEqual(e['data'], x.data)
def delete(self): from rogerthat.models import DSPicklePart, DSPickle while True: parts = DSPicklePart.all(keys_only=True).ancestor(self.ancestor()).fetch(200) if not parts: break db.delete(parts) db.delete(Key.from_path(DSPickle.kind(), self.key))
def from_service_id(cls, service, id): ''' Use this with data returned from get_id(). ''' if use_ancestor: return cls.objects.get(key=Key.from_path( cls._meta.db_table, long(id), parent=service.key )) return cls.objects.get(service=service, pk=id)
def get_db_prep_value(self, value, connection, prepared=False): if isinstance(value, AncestorKey): # If the key isn't fully initialized pass it through, the InserCompiler will handle it if value.key_id is None: return value return Key.from_path( self.model._meta.db_table, value.key_id, parent=value._parent_key ) return super(GAEKeyField, self).get_db_prep_value(value, connection)
def read(key): from rogerthat.models import DSPicklePart, DSPickle dsp = db.get(Key.from_path(DSPickle.kind(), key)) if not dsp: return DSPickler(key) stream = StringIO() for dspp in DSPicklePart.all().ancestor(dsp).filter('version =', dsp.version).order('number'): stream.write(str(dspp.data)) stream.seek(0) data = pickle.load(stream) return DSPickler(key, dsp.version, data)
def get_patient(cls, email): """ Gets patient data Args: email: Patient e-mail address Returns: The Patient entity with corresponsing e-mail address """ p = cls.all() p.filter('__key__ =', Key.from_path('Patient', email)) return p.get()
def __init__(self, ancestor=None, key_id=None, ancestor_pk=None, ancestor_model=None): if ancestor is not None: assert ancestor.pk is not None, "You must provide a parent with a key" self._parent_model = type(ancestor) ancestor_pk = ancestor.pk else: assert ancestor_pk is not None and ancestor_model is not None, "You must provide an ancestor_model and ancestor_pk or an ancestor instance" self._parent_model = ancestor_model self._parent_key = Key.from_path(self._parent_model._meta.db_table, ancestor_pk) self._parent_cache = ancestor self.key_id = key_id
def mark_deleted_tx(): user = users.get_current_user() entry = db.get(Key.from_path(cls.__name__, int(id))) if not entry or entry.deleted: return None if not user or entry.user_id != user.user_id(): return None entry.modified = now entry.deleted = True entry.put() return entry
def update_tx(): user = users.get_current_user() entry = db.get(Key.from_path(cls.__name__, int(id))) if not entry or entry.deleted: return None if not user or entry.user_id != user.user_id(): return None entry.modified = now if title: entry.title = title if notes: entry.notes = notes if complete: entry.complete = not int(complete) == 0 entry.put() return entry
def _map_entity(self, entity): new_key = Key.from_path(self.to_kind, entity.key().id_or_name(), namespace=self.namespace) def txn(): try: existing = Get(new_key) except datastore_errors.EntityNotFoundError: existing = None if existing and not self.overwrite_existing: return if isinstance(entity.key().id_or_name(), (int, long)): reserve_id(self.to_kind, entity.key().id_or_name(), self.namespace) new_entity = clone_entity(entity, new_key) Put(new_entity) RunInTransaction(txn)
def get(self, id): """Returns a single todolist_entry, specified by id URI Params: id - entry id of the entry to return Status Codes: 200(ok) - ok, body includes the todolist_entry 410(gone) - entry with specified id does not exist """ user = users.get_current_user() entry = db.get(Key.from_path("TodolistEntry", int(id))) if (entry and not entry.deleted) and (user and entry.user_id == user.user_id()): self.response.headers['Content-type'] = 'application/json' body = encode_json(entry.to_dict()) self.response.out.write(body) else: self.error(410)
def _acquire_identifiers(identifiers, entity_key): # This must always be in a cross-group transaction, because even if there's only 1 identifider, # in the case where that identifier already exists, we then check if its `instance` exists assert entity_key namespace = entity_key.namespace() or None identifier_keys = [ Key.from_path(UniqueMarker.kind(), identifier, namespace=namespace) for identifier in identifiers ] existing_markers = UniqueMarker.get(identifier_keys) markers_to_create = [] markers = [] for identifier_key, existing_marker in zip(identifier_keys, existing_markers): # Backwards compatability: we used to create the markers first in an independent transaction # and then create the entity and update the `instance` on the markers. This meant that it # was possible that the independent marker creation transaction finished first and the outer # transaction failed, causing stale markers to be left behind. We no longer do it this way # but we still want to ignore any old stale markers, hence if instance is None we overwrite. now = datetime.datetime.utcnow() if not existing_marker or existing_marker.instance is None: markers_to_create.append(UniqueMarker( key=identifier_key, instance=entity_key, created=now )) elif existing_marker.instance != entity_key and key_exists(existing_marker.instance): fields_and_values = identifier_key.name().split("|") table_name = fields_and_values[0] fields_and_values = fields_and_values[1:] fields = [ x.split(":")[0] for x in fields_and_values ] raise IntegrityError("Unique constraint violation for kind {} on fields: {}".format(table_name, ", ".join(fields))) elif existing_marker.instance != entity_key: markers_to_create.append(UniqueMarker( key=identifier_key, instance=entity_key, created=now )) else: # The marker is ours anyway markers.append(existing_marker) db.put(markers_to_create) return markers + markers_to_create
def increment_counter(datastore, counter_id, retries): txid = yield datastore.begin_transaction() key = Key.from_path('Counter', counter_id, _app=PROJECT_ID) entity = yield datastore.get(key, txid=txid) if entity is None: entity = Entity('Counter', name=counter_id, _app=PROJECT_ID) if 'count' not in entity: entity['count'] = 0 entity['count'] += 1 yield datastore.put(entity, txid=txid) try: yield datastore.commit(txid) except DatastoreError: if retries < 1: raise yield increment_counter(datastore, counter_id, retries - 1)
def post(self): query = self.request.get('query') id= self.request.get('id') idn, sym, dis = summarize.seq1(query) q= patient.all(keys_only=True) q.filter("__key__ >=", Key.from_path('patient', id)) res = q.get() #logging.info("res : " + res) if res: if(idn==1): result = "Please Input Some Relevant Query" elif( idn==2): result = "Very weak symptoms. No Medicines required currently, however we suggest to consult a doctor before if symptoms aggravate" rec = record(parent=res, symptoms = sym, medicine = 'not_any', doc = 'ehealth') rec.rec_date =datetime.datetime.now().date() rec.put() elif(idn==3): result ="No decision could be taken for this set of symptoms. We have recorded this query and will furthur look into it. <br> <b> Please consult a doctor . </b>" rec = record(parent=res, symptoms = sym, medicine = 'not_checked', doc = 'ehealth') rec.rec_date =datetime.datetime.now().date() rec.put() elif(idn==4): result ="Your symptoms strongly match the following: "+(','.join(dis))+"<br><b>Please consult a doctor .</b>" rec = record(parent=res, symptoms = sym, disease =dis, doc = 'ehealth') rec.rec_date =datetime.datetime.now().date() rec.put() elif(idn==5): result ="Your symptoms strongly match "+str(dis[0])+".<br> We suggest based on your medical history, the medicine :"+(','.join(dis[1]))+"." rec = record(parent=res, symptoms = sym, disease =[dis[0]],medicine= (','.join(dis[1])), doc = 'ehealth') rec.rec_date =datetime.datetime.now().date() rec.put() else: result = "Invalid ID" template_values = { 'result': result, 'name':self.request.get('name') } template = jinja_environment.get_template('process.html') self.response.out.write(template.render(template_values))
def docstree(request, resource_id=False): response_data = False feeds = False if request.is_ajax(): client = gdata.docs.client.DocsClient(source='yourCo-yourAppName-v1') client.ssl = True # Force all API requests through HTTPS client.http_client.debug = True # Set to True for debugging HTTP requests client.ClientLogin(settings.DOCS_EMAIL, settings.DOCS_PASS, client.source) docs = PageDocs.all(keys_only=True) if 'id' in request.GET: resource_id = request.GET['id'] if resource_id: if resource_id.find('folder:') >= 0: feeds = memcache.get(resource_id) if not feeds: feeds = client.GetDocList(uri='/feeds/default/private/full/%s/contents?showfolders=true' % resource_id) memcache.add(resource_id, feeds, settings.TO_CACHE_DOCS_TIME) else: feeds = memcache.get('root') if not feeds: feeds = client.GetDocList(uri='/feeds/default/private/full?showfolders=true') memcache.add('root', feeds, settings.TO_CACHE_DOCS_TIME) if feeds: response_data = [] for entry in feeds.entry: item = { 'data': entry.title.text, 'metadata': {'id': entry.resource_id.text}, 'attr':{} } if entry.get_document_type() == 'folder': item['state'] = 'closed' else: item['attr']['rel'] = 'document' if Key.from_path('main_pagedocs', entry.resource_id.text) in docs: item['attr']['class'] = 'jstree-checked' response_data.append(item) return HttpResponse(simplejson.dumps(response_data), mimetype="application/json")
def acquire_marker(identifier): identifier_key = Key.from_path(UniqueMarker.kind(), identifier) marker = UniqueMarker.get(identifier_key) if marker: # If the marker instance is None, and the marker is older then 5 seconds then we wipe it out # and assume that it's stale. if not marker.instance and (datetime.datetime.utcnow() - marker.created).seconds > 5: marker.delete() elif marker.instance and Key(marker.instance) != entity_key and key_exists(Key(marker.instance)): raise IntegrityError("Unable to acquire marker for %s" % identifier) else: # The marker is ours anyway return marker marker = UniqueMarker( key=identifier_key, instance=str(entity_key) if entity_key.id_or_name() else None, # May be None if unsaved created=datetime.datetime.utcnow() ) marker.put() return marker
def startSim(namespace, unique, globalStop, initialStop, stepRange, goNext): from google.appengine.api.datastore import Key JobID = meTools.buildJobID(namespace, unique, globalStop, initialStop, stepRange) persistStops = meSchema.WorkQueue(key_name = JobID, globalStop = globalStop, initialStop = initialStop) meTools.memPut_multi({persistStops.key().name() : persistStops}, priority = 1) if not globalStop >= initialStop: raise(BaseException('globalStop: %s is not >= lastStopStep: %s' % (globalStop, initialStop))) lastDesire = meSchema.desire.all(keys_only = True).filter('__key__ <', Key.from_path('desire','1000000_0000_00')).order('-__key__').get() if lastDesire: lastDesireStop = int(lastDesire.name().split('_')[0]) else: lastDesireStop = 1 desireFunc.primeDesireCache(lastDesireStop) for step in range(lastDesireStop, globalStop + 1): desireFunc.doDesires(step) if goNext == 'true': doNext(JobID, 'weeklyDesires','')
def ancestor(self): from rogerthat.models import DSPickle return Key.from_path(DSPickle.kind(), self.key)
def get_datastore_key(model, pk): """ Return a datastore.Key for the given model and primary key. """ kind = get_top_concrete_parent(model)._meta.db_table return Key.from_path(kind, model._meta.pk.get_prep_value(pk))
def delete(): keys = [Key.from_path(UniqueMarker.kind(), x) for x in identifiers] Delete(keys)
def get(self): key = Key(self.request.get('id')) cell = db.get(key) self.response.out.write(json.dumps({'exec_string': cell.exec_string}))
def create_key(db_table, value): if isinstance(value, (int, long)) and value < 1: return None return Key.from_path(db_table, value)
def add_filter(self, column, lookup_type, negated, db_type, value): if value in ([], ()): self.pk_filters = [] return # Emulated/converted lookups if column == self.query.get_meta().pk.column: column = '__key__' db_table = self.query.get_meta().db_table if lookup_type in ('exact', 'in'): # Optimization: batch-get by key if self.pk_filters is not None: raise DatabaseError("You can't apply multiple AND filters " "on the primary key. " "Did you mean __in=[...]?") if not isinstance(value, (tuple, list)): value = [value] pks = [create_key(db_table, pk) for pk in value if pk] if negated: self.excluded_pks = pks else: self.pk_filters = pks return else: # XXX: set db_type to 'gae_key' in order to allow # convert_value_for_db to recognize the value to be a Key and # not a str. Otherwise the key would be converted back to a # unicode (see convert_value_for_db) db_type = 'gae_key' key_type_error = 'Lookup values on primary keys have to be' \ 'a string or an integer.' if lookup_type == 'range': if isinstance(value, (list, tuple)) and not ( isinstance(value[0], (basestring, int, long)) and isinstance(value[1], (basestring, int, long))): raise DatabaseError(key_type_error) elif not isinstance(value, (basestring, int, long)): raise DatabaseError(key_type_error) # for lookup type range we have to deal with a list if lookup_type == 'range': value[0] = create_key(db_table, value[0]) value[1] = create_key(db_table, value[1]) else: value = create_key(db_table, value) if lookup_type not in OPERATORS_MAP: raise DatabaseError("Lookup type %r isn't supported" % lookup_type) # We check for negation after lookup_type isnull because it # simplifies the code. All following lookup_type checks assume # that they're not negated. if lookup_type == 'isnull': if (negated and value) or not value: # TODO/XXX: is everything greater than None? op = '>' else: op = '=' value = None elif negated and lookup_type == 'exact': if self.has_negated_exact_filter: raise DatabaseError("You can't exclude more than one __exact " "filter") self.has_negated_exact_filter = True self._combine_filters(column, db_type, (('<', value), ('>', value))) return elif negated: try: op = NEGATION_MAP[lookup_type] except KeyError: raise DatabaseError("Lookup type %r can't be negated" % lookup_type) if self.inequality_field and column != self.inequality_field: raise DatabaseError("Can't have inequality filters on multiple " "columns (here: %r and %r)" % (self.inequality_field, column)) self.inequality_field = column elif lookup_type == 'in': # Create sub-query combinations, one for each value if len(self.gae_query) * len(value) > 30: raise DatabaseError("You can't query against more than " "30 __in filter value combinations") op_values = [('=', v) for v in value] self._combine_filters(column, db_type, op_values) return elif lookup_type == 'startswith': self._add_filter(column, '>=', db_type, value) if isinstance(value, str): value = value.decode('utf8') if isinstance(value, Key): value = list(value.to_path()) if isinstance(value[-1], str): value[-1] = value[-1].decode('utf8') value[-1] += u'\ufffd' value = Key.from_path(*value) else: value += u'\ufffd' self._add_filter(column, '<=', db_type, value) return elif lookup_type in ('range', 'year'): self._add_filter(column, '>=', db_type, value[0]) op = '<=' if lookup_type == 'range' else '<' self._add_filter(column, op, db_type, value[1]) return else: op = OPERATORS_MAP[lookup_type] self._add_filter(column, op, db_type, value)
def add_filter(self, column, lookup_type, negated, db_type, value): if value in ([], ()): self.pk_filters = [] return # Emulated/converted lookups if column == self.query.get_meta().pk.column: column = '__key__' db_table = self.query.get_meta().db_table if lookup_type in ('exact', 'in'): # Optimization: batch-get by key if self.pk_filters is not None: raise DatabaseError("You can't apply multiple AND filters " "on the primary key. " "Did you mean __in=[...]?") if not isinstance(value, (tuple, list)): value = [value] pks = [create_key(db_table, pk) for pk in value if pk] if negated: self.excluded_pks = pks else: self.pk_filters = pks return else: # XXX: set db_type to 'gae_key' in order to allow # convert_value_for_db to recognize the value to be a Key and # not a str. Otherwise the key would be converted back to a # unicode (see convert_value_for_db) db_type = 'gae_key' key_type_error = 'Lookup values on primary keys have to be' \ 'a string or an integer.' if lookup_type == 'range': if isinstance(value,(list, tuple)) and not(isinstance( value[0], (basestring, int, long)) and \ isinstance(value[1], (basestring, int, long))): raise DatabaseError(key_type_error) elif not isinstance(value,(basestring, int, long)): raise DatabaseError(key_type_error) # for lookup type range we have to deal with a list if lookup_type == 'range': value[0] = create_key(db_table, value[0]) value[1] = create_key(db_table, value[1]) else: value = create_key(db_table, value) if lookup_type not in OPERATORS_MAP: raise DatabaseError("Lookup type %r isn't supported" % lookup_type) # We check for negation after lookup_type isnull because it # simplifies the code. All following lookup_type checks assume # that they're not negated. if lookup_type == 'isnull': if (negated and value) or not value: # TODO/XXX: is everything greater than None? op = '>' else: op = '=' value = None elif negated and lookup_type == 'exact': if self.has_negated_exact_filter: raise DatabaseError("You can't exclude more than one __exact " "filter") self.has_negated_exact_filter = True self._combine_filters(column, db_type, (('<', value), ('>', value))) return elif negated: try: op = NEGATION_MAP[lookup_type] except KeyError: raise DatabaseError("Lookup type %r can't be negated" % lookup_type) if self.inequality_field and column != self.inequality_field: raise DatabaseError("Can't have inequality filters on multiple " "columns (here: %r and %r)" % (self.inequality_field, column)) self.inequality_field = column elif lookup_type == 'in': # Create sub-query combinations, one for each value if len(self.gae_query) * len(value) > 30: raise DatabaseError("You can't query against more than " "30 __in filter value combinations") op_values = [('=', v) for v in value] self._combine_filters(column, db_type, op_values) return elif lookup_type == 'startswith': self._add_filter(column, '>=', db_type, value) if isinstance(value, str): value = value.decode('utf8') if isinstance(value, Key): value = list(value.to_path()) if isinstance(value[-1], str): value[-1] = value[-1].decode('utf8') value[-1] += u'\ufffd' value = Key.from_path(*value) else: value += u'\ufffd' self._add_filter(column, '<=', db_type, value) return elif lookup_type in ('range', 'year'): self._add_filter(column, '>=', db_type, value[0]) op = '<=' if lookup_type == 'range' else '<' self._add_filter(column, op, db_type, value[1]) return else: op = OPERATORS_MAP[lookup_type] self._add_filter(column, op, db_type, value)
def get_datastore_key(model, pk, namespace): """ Return a datastore.Key for the given model and primary key. """ kind = get_top_concrete_parent(model)._meta.db_table return Key.from_path(kind, pk, namespace=namespace)
def _fetch_entity(self, instance): kind = instance._meta.db_table namespace = connection.settings_dict["NAMESPACE"] return Get(Key.from_path(kind, instance.pk, namespace=namespace))
def _release_identifiers(identifiers, namespace): keys = [ Key.from_path(UniqueMarker.kind(), x, namespace=namespace) for x in identifiers ] Delete(keys)
def get_artist(cls, artist): return Artist.all().filter('__key__ =', Key.from_path('Artist', artist)).get()
# Filtering by entity key name in Google App Engine on Python from google.appengine.api.datastore import Key query.filter("__key__ >=", Key.from_path('User', 'abc'))