def get_kinds(self): """Retrieve all entity list""" kinds = [] for kind in metadata.get_kinds(): if not kind.startswith('_'): kinds.append(kind) return kinds
def populate(self): if u'Intelligence' in metadata.get_kinds(): logging.error('database Intelligence already exists!') logging.error(Intelligence.query().fetch(1, keys_only=True)[0].parent()) return else: logging.error('creating database Intelligence from csv file') with open(CSV_FILE, 'rb') as f: reader = csv.reader(f) for row in reader: key = ndb.Key('Intelligence', row[0], parent=intelligence_key(DEFAULT_INTELLIGENCE_NAME)) intelligence = key.get() if intelligence: logging.error('found!!!') key.delete() intelligence = Intelligence(key=key) intelligence.isbn = row[0] intelligence.name = row[1] intelligence.language = row[-1] rating = StellarRating() rating.linguistic = int(row[2]) if row[2] else 0 rating.logical = int(row[3]) if row[3] else 0 rating.right_minded = int(row[4]) if row[4] else 0 rating.intrapersonal = int(row[5]) if row[5] else 0 rating.interpersonal = int(row[6]) if row[6] else 0 rating.naturalistic = int(row[7]) if row[7] else 0 intelligence.rating = rating intelligence.update()
def get(self): # pylint: disable=g-bad-name # Only run backups in prod. if not env_utils.RunningInProd(): logging.info('Datastore backups are only run in prod') return # Only run one backup per day. if _DailyBackupExists(): logging.info('A backup was already performed today.') return kinds = [k for k in metadata.get_kinds() if not k.startswith('_')] bucket = '%s/%s' % (settings.ENV.DATASTORE_BACKUP_BUCKET, _CreateDayString()) params = { 'kind': kinds, 'name': _BACKUP_PREFIX + '_', # Date suffix is automatically added. 'filesystem': 'gs', 'gs_bucket_name': bucket, 'queue': constants.TASK_QUEUE.BACKUP, } # Dump the backup onto a task queue. Don't worry about catching Exceptions, # anything that gets raised will be dealt with in UpvoteRequestHandler and # reported as a 500. logging.info('Starting a new Datastore backup') taskqueue.add(url='/_ah/datastore_admin/backup.create', params=params, target='ah-builtin-python-bundle', queue_name=constants.TASK_QUEUE.BACKUP) _DATASTORE_BACKUPS.Increment()
def delete(self): '''Deletes an entity as specified''' logging.getLogger().warn(self.request.path_info) match = re.match(r'^/api(?:/(?P<type>\w+)(?:/(?P<id>\w+))?)?$', self.request.path_info) if match: object_type = match.group('type') object_id = match.group('id') if object_type: property_to_delete = self.request.get('propogate') delete_props = property_to_delete.split( ',') if property_to_delete else [] if object_id: deleteObjKey = ndb.Key(object_type, int(object_id)) returned_obj = deleteObjKey.get() logging.getLogger().warn( 'Attempting to delete: %s, with id: %s and got this: %s' % (object_type, object_id, returned_obj)) for prop in delete_props: res = getattr(returned_obj, prop) if res: res.delete() deleteObjKey.delete() else: if self.request.get('force') == 'yes': if delete_props: for obj in getattr(model, object_type).query().iter(): for prop in delete_props: res = getattr(obj, prop) if res: res.delete() obj.key.delete() else: ndb.delete_multi( getattr( model, object_type).query().fetch(keys_only=True)) else: raise SyntaxError( "MUST use 'force'='yes' to do this mode of delete") else: if self.request.get('force') == 'yes': for k in metadata.get_kinds(): if not k.kind_name.startswith('_'): ndb.delete_multi( getattr( model, k.kind_name).query().fetch(keys_only=True)) else: raise SyntaxError( "MUST use 'force'='yes' to do this mode of delete") else: raise MalformedURLException( 'Error when parsing URL - invalid syntax: %s' % self.request.path_info)
def _write_all(self): '''Writes every single entity stored in the DB''' isLoadKeysOnly = self.request.get('load') != 'all' allEntities = [] for k in metadata.get_kinds(): logging.getLogger().info("TYPE:" + k) if not k.startswith('_'): allEntities.extend(getattr(model, k).query().fetch(keys_only=isLoadKeysOnly)) self.response.headers['Content-Type'] = "application/json" self.response.write(parser.get_json_string(allEntities))
def get(self): # pylint: disable=g-bad-name # Only run backups in prod. if not env_utils.RunningInProd(): logging.info('Datastore backups are only run in prod') return logging.info('Starting a new Datastore backup') access_token, _ = app_identity.get_access_token( 'https://www.googleapis.com/auth/datastore') app_id = app_identity.get_application_id() # Configure a backup of all Datastore kinds, stored in a separate Cloud # Storage bucket for each day. output_url_prefix = 'gs://%s/%s/' % ( env_utils.ENV.DATASTORE_BACKUP_BUCKET, datetime.datetime.utcnow().strftime('%Y_%m_%d')) kinds = [k for k in metadata.get_kinds() if not k.startswith('_')] request = { 'project_id': app_id, 'output_url_prefix': output_url_prefix, 'entity_filter': { 'kinds': kinds } } headers = { 'Content-Type': 'application/json', 'Authorization': 'Bearer ' + access_token } url = 'https://datastore.googleapis.com/v1/projects/%s:export' % app_id logging.info('Backing up %d kind(s) to %s', len(kinds), output_url_prefix) try: result = urlfetch.fetch(url=url, payload=json.dumps(request), method=urlfetch.POST, deadline=60, headers=headers) if result.status_code == httplib.OK: logging.info(result.content) _DATASTORE_BACKUPS.Increment() else: logging.warning(result.content) self.response.status_int = result.status_code except urlfetch.Error: logging.exception('Datastore backup failed') self.response.status_int = httplib.INTERNAL_SERVER_ERROR
def _write_all(self): '''Writes every single entity stored in the DB''' isLoadKeysOnly = self.request.get('load') != 'all' allEntities = [] for k in metadata.get_kinds(): logging.getLogger().info("TYPE:" + k) if not k.startswith('_'): allEntities.extend( getattr(model, k).query().fetch(keys_only=isLoadKeysOnly)) self.response.headers['Content-Type'] = "application/json" self.response.write(parser.get_json_string(allEntities))
def delete_everything(): kinds = metadata.get_kinds() for kind in kinds: if kind.startswith('_'): pass # Ignore kinds that begin with _, they are internal to GAE else: q = ndb.Query(kind=kind) keys = q.fetch(keys_only=True) # Delete 1000 entities at a time. for i in range(len(keys) / 1000 + 1): portion = keys[i * 1000:i * 1000 + 1000] ndb.delete_multi(portion)
def GetBackupXML(): thisdate = datetime.now() xml = '<?xml version="1.0" encoding="utf-8"?>\r\n<data date="' + thisdate.isoformat() + '">\r\n' kinds = metadata.get_kinds() for kind in kinds: if kind.startswith('_'): pass # Ignore kinds that begin with _, they are internal to GAE else: q = ndb.Query(kind=kind) all = q.fetch() for e in all: xml += '<' + kind + ' id=' + e.key.id() + '>\r\n' for n, v in e._properties.items(): xml += ' <' + n + '>' + str(v) + '</' + n + '>\r\n' xml += '</' + kind + '>\r\n' xml += '</data>' return xml
def delete(self): '''Deletes an entity as specified''' logging.getLogger().warn(self.request.path_info) match = re.match(r'^/api(?:/(?P<type>\w+)(?:/(?P<id>\w+))?)?$', self.request.path_info) if match: object_type = match.group('type') object_id = match.group('id') if object_type: property_to_delete = self.request.get('propogate') delete_props = property_to_delete.split(',') if property_to_delete else [] if object_id: deleteObjKey = ndb.Key(object_type, int(object_id)) returned_obj = deleteObjKey.get() logging.getLogger().warn('Attempting to delete: %s, with id: %s and got this: %s' %(object_type, object_id, returned_obj)) for prop in delete_props: res = getattr(returned_obj,prop) if res: res.delete() deleteObjKey.delete() else: if self.request.get('force') == 'yes': if delete_props: for obj in getattr(model, object_type).query().iter(): for prop in delete_props: res = getattr(obj,prop) if res: res.delete() obj.key.delete() else: ndb.delete_multi(getattr(model,object_type).query().fetch(keys_only=True)) else: raise SyntaxError("MUST use 'force'='yes' to do this mode of delete") else: if self.request.get('force') == 'yes': for k in metadata.get_kinds(): if not k.kind_name.startswith('_'): ndb.delete_multi(getattr(model, k.kind_name).query().fetch(keys_only=True)) else: raise SyntaxError("MUST use 'force'='yes' to do this mode of delete") else: raise MalformedURLException('Error when parsing URL - invalid syntax: %s' % self.request.path_info)
def get(self): # https://cloud.google.com/appengine/docs/standard/python/ndb/admin#Metadata_queries kinds = [k for k in metadata.get_kinds() if not k.startswith('_')] kinds.remove('Response') kinds.remove('SyndicatedPost') logging.info('Backing up %s', kinds) access_token, _ = app_identity.get_access_token( 'https://www.googleapis.com/auth/datastore') app_id = app_identity.get_application_id() request = { 'project_id': app_id, 'output_url_prefix': ('gs://brid-gy.appspot.com/weekly/' + datetime.datetime.now().strftime('%Y%m%d')), 'entity_filter': { 'kinds': kinds, # 'namespace_ids': self.request.get_all('namespace_id'), }, } headers = { 'Content-Type': 'application/json', 'Authorization': 'Bearer ' + access_token, } try: result = urlfetch.fetch( url='https://datastore.googleapis.com/v1/projects/%s:export' % app_id, payload=json_dumps(request), method=urlfetch.POST, headers=headers) if result.status_code == http.client.OK: logging.info(result.content) else: logging.error(result.content) self.abort(result.status_code) except urlfetch.Error as e: util.interpret_http_exception(e) raise
def get(self): # https://cloud.google.com/appengine/docs/standard/python/ndb/admin#Metadata_queries kinds = [k for k in metadata.get_kinds() if not k.startswith('_')] kinds.remove('Response') kinds.remove('SyndicatedPost') logging.info('Backing up %s', kinds) access_token, _ = app_identity.get_access_token( 'https://www.googleapis.com/auth/datastore') app_id = app_identity.get_application_id() request = { 'project_id': app_id, 'output_url_prefix': ('gs://brid-gy.appspot.com/weekly/' + datetime.datetime.now().strftime('%Y%m%d')), 'entity_filter': { 'kinds': kinds, # 'namespace_ids': self.request.get_all('namespace_id'), }, } headers = { 'Content-Type': 'application/json', 'Authorization': 'Bearer ' + access_token, } try: result = urlfetch.fetch( url='https://datastore.googleapis.com/v1/projects/%s:export' % app_id, payload=json.dumps(request), method=urlfetch.POST, headers=headers) if result.status_code == httplib.OK: logging.info(result.content) else: logging.error(result.content) self.abort(result.status_code) except urlfetch.Error as e: util.interpret_http_exception(e) raise
def init(): init_list = [ Converter( id="ffmpeg", title="ffmpeg beta", pnaclBin="converters/ffmpeg/bin/ffmpeg.pexe.nmf", isolated=True, chdir="/", naclSize=10312460, fullDesc="""Convert mp4,webm,mkv,mp3 and others Audio/Video with your web browser without sending any data.""", shortDesc="Convert your audio/video files", ouTag=[ "h264", "vp8", "vp9", "mkv", "mp4", "webm", "avi", "xvid", "mpg", "mp3", "wav", "oga", "ogv", "mpeg2", "flv" ], footer="Version 2.1.3 - Original code by ffmpegTeam"), Converter(id="unecm", title="unecm", inputExt=".ecm", pnaclBin="converters/unecm/bin/unecm.pexe.nmf", emscrBin="converters/unecm/bin/unecm.js", fullDesc="""Convert .ecm images to playstation iso/bin with your web browser without sending any data.""", shortDesc="Convert ecm file to disk images", inTag=["ecm"], ouTag=["iso", "bin"], footer="Original code by Neill Corlett ©2002"), Converter(id="ecmify", title="ecmify", inputExt=".iso,.bin", pnaclBin="converters/ecmify/bin/ecmify.pexe.nmf", fullDesc="""Optimize your iso/bin disk error to make it more compressible with your web browser.""", shortDesc="Make your CD image files more compressible", inTag=["iso", "bin"], ouTag=["ecm"], footer="Original code by Neill Corlett ©2002"), Converter( id="cwebp", title="cwebp", inputExt=".png,.jpg,.tiff,.webp", pnaclBin="converters/webp/bin/cwebp.pexe.nmf", fullDesc="""Convert your jpeg,png,tiff images into Webp with your web browser without sending any data.""", shortDesc="Convert jpeg/png/tiff to webp image", inTag=["jpeg", "png", "tiff", "webp"], ouTag=["webp"], footer= "Version 0.4.2 - Original code by WebP team, licensed under the same terms as WebM" ), Converter(id="dwebp", title="dwebp", inputExt=".webp", pnaclBin="converters/webp/bin/dwebp.pexe.nmf", fullDesc="""Convert your webp images into png,tiff,bmp with your web browser without sending any data.""", shortDesc="Convert webp to png/tiff image", inTag=["webp"], ouTag=["png", "tiff", "bmp", "pam", "ppm", "pgm", "yuv"], footer= "Version 0.4.2 - Original code by WebP team, licensed under the same terms as WebM" ) ] first_init = "Converter" not in metadata.get_kinds() for c in init_list: c.hasPnacl = c.pnaclBin is not None and c.pnaclBin != "" c.hasEmscr = c.emscrBin is not None and c.emscrBin != "" if first_init or Converter.get_by_id(c.key.id()) is None: c.put()
def init(): init_list = [ Converter(id="ffmpeg", title="ffmpeg beta", pnaclBin="converters/ffmpeg/bin/ffmpeg.pexe.nmf", isolated=True, chdir="/", naclSize=10312460, fullDesc="""Convert mp4,webm,mkv,mp3 and others Audio/Video with your web browser without sending any data.""", shortDesc="Convert your audio/video files", ouTag=["h264", "vp8", "vp9", "mkv", "mp4", "webm", "avi", "xvid", "mpg", "mp3", "wav", "oga", "ogv", "mpeg2", "flv"], footer="Version 2.1.3 - Original code by ffmpegTeam" ), Converter(id="unecm", title="unecm", inputExt=".ecm", pnaclBin="converters/unecm/bin/unecm.pexe.nmf", emscrBin="converters/unecm/bin/unecm.js", fullDesc="""Convert .ecm images to playstation iso/bin with your web browser without sending any data.""", shortDesc="Convert ecm file to disk images", inTag=["ecm"], ouTag=["iso", "bin"], footer="Original code by Neill Corlett ©2002" ), Converter(id="ecmify", title="ecmify", inputExt=".iso,.bin", pnaclBin="converters/ecmify/bin/ecmify.pexe.nmf", fullDesc="""Optimize your iso/bin disk error to make it more compressible with your web browser.""", shortDesc="Make your CD image files more compressible", inTag=["iso", "bin"], ouTag=["ecm"], footer="Original code by Neill Corlett ©2002" ), Converter(id="cwebp", title="cwebp", inputExt=".png,.jpg,.tiff,.webp", pnaclBin="converters/webp/bin/cwebp.pexe.nmf", fullDesc="""Convert your jpeg,png,tiff images into Webp with your web browser without sending any data.""", shortDesc="Convert jpeg/png/tiff to webp image", inTag=["jpeg", "png", "tiff", "webp"], ouTag=["webp"], footer="Version 0.4.2 - Original code by WebP team, licensed under the same terms as WebM" ), Converter(id="dwebp", title="dwebp", inputExt=".webp", pnaclBin="converters/webp/bin/dwebp.pexe.nmf", fullDesc="""Convert your webp images into png,tiff,bmp with your web browser without sending any data.""", shortDesc="Convert webp to png/tiff image", inTag=["webp"], ouTag=["png", "tiff", "bmp", "pam", "ppm", "pgm", "yuv"], footer="Version 0.4.2 - Original code by WebP team, licensed under the same terms as WebM" ) ] first_init = "Converter" not in metadata.get_kinds() for c in init_list: c.hasPnacl = c.pnaclBin is not None and c.pnaclBin != "" c.hasEmscr = c.emscrBin is not None and c.emscrBin != "" if first_init or Converter.get_by_id(c.key.id()) is None: c.put()
def debug_showKinds(): print 'kinds:' for key in metadata.get_kinds(): print ' - ', key