def delete_files(self): self.response.write('Deleting files...\n') for filename in self.tmp_filenames_to_clean_up: try: gcs.delete(filename) except gcs.NotFoundError: pass
def test_cache(self): """Test that caching works at some level.""" response = self.get_build_page() gcs.delete(self.BUILD_DIR + 'started.json') gcs.delete(self.BUILD_DIR + 'finished.json') response2 = self.get_build_page() self.assertEqual(str(response), str(response2))
def delete(self): logging.info('delete photo') photo_key_urlsafe = self.request.get('photo_key') try: pkey = ndb.Key(urlsafe=photo_key_urlsafe) photo = pkey.get() logging.info(photo) except ProtocolBufferDecodeError as e: self.error(404) logging.error(e) self.response.out.write(e) return # check if there is the photo if not photo: self.error(404) msg = 'photo not found using the urlsafe key provided' logging.error(msg) self.response.out.write(msg) return if photo.file_name is not None: name = '/%s/%s' % (bucket, photo.file_name) # catch any error try: cloudstorage.delete(name) except Exception as e: self.error(500) logging.error(e) self.response.out.write(e) return # delete the photo on datastore pkey.delete() return
def delete_files(self): self.response.write('Deleting files...\n') for filename in self.tmp_filenames_to_clean_up: try: gcs.delete(filename) except gcs.NotFoundError: pass
def post(self): file_key = self.request.get("k") try: gcs.delete(file_key) except: pass
def update_linked_image_for_model(self, serving_url, model_id): """ Gets the image's object in the datastore using its serving_url and adds the ID to it If the model already has an image linked. Remove it. """ image_q = UploadedImage.all() image_q.filter("uploaded_for_type =", self.model_type) image_q.filter("uploaded_for_id =", model_id) for existing_image in image_q: if existing_image.serving_url == serving_url: continue try: gcs.delete(existing_image.gcs_file_path) except Exception as e: logging.exception(e) existing_image.delete() if serving_url: serving_image_q = UploadedImage.all() serving_image_q.filter("serving_url =", serving_url) image = serving_image_q.get() if image: image.uploaded_for_id = model_id image.put() return bool(image)
def _clean_cloudstore_paths_for_key(cls, root_key, min_start_time_millis): for path in cls._collect_cloudstore_paths(root_key): try: cloudstorage.delete(path) logging.info('Deleted cloud storage file %s', path) except cloudstorage.NotFoundError: logging.info('Cloud storage file %s already deleted', path)
def OfferInsert(self, offer): """ Created create offer""" user = self.get_current_user() offer.owner_key = user.key urls = [] blobkeys = [] for image in offer.images: if len(image) > 6*1024*1024: for blobkey in blobkeys: gcs.delete(blobkey) raise endpoints.BadRequestException("Max. image size is 6*1024*1024 bytes") write_retry_params = gcs.RetryParams(backoff_factor=1.1) filename = "/" + BUCKET_NAME + "/" +str(uuid.uuid4()) png = images.rotate(image, 0, output_encoding=images.PNG) gcs_file = gcs.open(filename,'w',retry_params=write_retry_params,content_type='image/png',) gcs_file.write(image) gcs_file.close() blobkey = blobstore.create_gs_key("/gs" + filename) blobkeys.append(filename) #url = images.get_serving_url("gs" + filename) url = images.get_serving_url(blobkey) urls.append(url) offer.image_urls = urls offer.blobkeys = blobkeys del offer.images offer.put() return offer
def attachments_delete(self, request): """Remove single attachment for a timeline card""" current_user = endpoints.get_current_user() if current_user is None: raise endpoints.UnauthorizedException("Authentication required.") card = ndb.Key("TimelineItem", request.itemId).get() if card is None or card.user != current_user: raise endpoints.NotFoundException("Attachment not found.") if card.attachments is not None: for att in card.attachments: if att.id == request.attachmentId: # Delete attachment from blobstore try: gcs.delete(bucket + "/" + att.id) except gcs.NotFoundError: pass # Remove attachment from timeline card card.attachments.remove(att) card.put() return AttachmentResponse(id=att.id) raise endpoints.NotFoundException("Attachment not found.")
def delete(self, name): filename = '{}/{}'.format(self.location, name) try: gcs.delete(filename) except gcs.NotFoundError: pass
def delete(attachment): """Delete from GCS Bucket.""" path = make_path(attachment.aid) try: gcs.delete(path) except gcs.NotFoundError: pass
def post(self, shortname): shortname = shortname.lower() tourney = Tournament.get_by_id(shortname) if not tourney: self.response.set_status(404) self.response.write('Tournament not found.') return codeword = self.request.get('codeword') if codeword != tourney.codeword: self.response.set_status(403) self.response.write("You didn't say the magic word.") return bucket_name = os.environ.get( 'BUCKET_NAME', app_identity.get_default_gcs_bucket_name()) filename = '/%s/%s/export.js' % (bucket_name, shortname) try: gcs.delete(filename) except gcs.NotFoundError: pass tourney.key.delete() self.redirect('/')
def post(self): """Copy uploaded files to provided destination Returns: string: path to uploaded path """ if not self.get_file_infos(): self.abort(400, "No file has been uploaded") fileinfo = self.get_file_infos()[0] try: import cloudstorage as gcs except ImportError: self.abort( 500, 'GoogleAppEngineCloudStorageClient module is required') stat = gcs.stat(fileinfo.gs_object_name[3:]) destpath = "/".join(stat.filename.split("/")[:-1]) gcs.copy2(fileinfo.gs_object_name[3:], destpath) gcs.delete(fileinfo.gs_object_name[3:]) if spiner.env.is_local_env(): url = '/_ah/gcs{}'.format(destpath) else: url = 'https://storage.googleapis.com{}'.format(destpath) self.response.write(url)
def delete(self, name): filename = '{}/{}'.format(self.location, name) try: gcs.delete(filename) except gcs.NotFoundError: pass
def get(self): auth = authenticate(self) if auth[0]: current_user = User.query( User.username == auth[0]._User__email).get() form_data = cgi.FieldStorage() requests = form_data.getlist("chkDeleteStream") index = search.Index(INDEX_NAME) for key_str in requests: key = ndb.Key(urlsafe=key_str) stream = key.get() for pic_key in stream.pictures: picture = pic_key.get() picture_name = picture.name filename = '/{}/Pictures'.format( BUCKET_NAME) + "/" + picture_name cloudstorage.delete(filename) blob_key = picture.image images.delete_serving_url(blob_key) blobstore.delete(blob_key) pic_key.delete() key.delete() index.delete(key_str) current_user.streams_owned.remove(key) current_user.put() time.sleep(.1) self.redirect('/ManageStream')
def delete(self, file_path): """Delete file from Google App Engine storage bucket Arguments: self file_path: string. '{folder}/{filename}'. no leading slash Returns: True if successful, False if unsuccessful """ # Attempt deletion of file_path try: logging.debug( 'QueryBucketTool.delete(): deleting file from storage bucket') cloudstorage.delete('{}/{}'.format(self.bucket_path, file_path)) logging.debug( 'QueryBucketTool.delete(): deleting file from storage bucket was successful' ) # Return True if successful return True # Return False if no file to delete in the first place / unsuccessful except NotFoundError as e: logging.info( 'QueryBucketTool.delete(): deleting file from storage bucket was not successful' ) logging.exception(e) return False except Exception as e: logging.info( 'QueryBucketTool.delete(): deleting file from storage bucket was not successful' ) logging.exception(e) return False
def _gs_delete(self, gs_path): """Wrapper around cloudstorage.delete that catches NotFoundError.""" try: cloudstorage.delete(filename=gs_path, retry_params=self._retry_params) except cloudstorage.NotFoundError: # pragma: no cover pass
def _clean_cloudstore_paths_for_key(cls, root_key, min_start_time_millis): for path in cls._collect_cloudstore_paths(root_key): try: cloudstorage.delete(path) logging.info('Deleted cloud storage file %s', path) except cloudstorage.NotFoundError: logging.info('Cloud storage file %s already deleted', path)
def StorageHandler(request, ident): if not ident == 'read': response = HttpResponse("", content_type='application/json') try: if request.method == 'GET': if (ident == 'list'): ans = list_bucket('/' + get_application_id() + '.appspot.com') response.write( simplejson.dumps({ 'error': 0, 'all_objects': ans })) elif (ident == 'basic'): general(response) elif (ident == 'read'): nombre = request.GET.get('name', None) response = read_file(nombre) elif (ident == 'guid'): response.write( simplejson.dumps({ 'error': 0, 'uid': generarUID() })) else: response.write(simplejson.dumps({'error': 0})) elif request.method == 'POST': archivo = request.FILES['file-0'] uploaded_file_content = archivo.read() uploaded_file_filename = archivo.name uploaded_file_type = archivo.content_type nombreAnterior = request.POST.get('name', None) carpeta = request.POST.get('folder', '') if (not nombreAnterior is None): try: gcs.delete(nombreAnterior) except: pass nombre = '/' + app_identity.get_default_gcs_bucket_name( ) + carpeta + '/' + generarUID() + '-' + uploaded_file_filename write_retry_params = gcs.RetryParams(backoff_factor=1.1) gcs_file = gcs.open(nombre, 'w', content_type=uploaded_file_type, options={'x-goog-acl': 'public-read'}, retry_params=write_retry_params) gcs_file.write(uploaded_file_content) gcs_file.close() response.write(simplejson.dumps({'error': 0, 'id': nombre})) except Exception, e: exc_type, exc_value, exc_traceback = sys.exc_info() response = HttpResponse("", content_type='application/json') response.write( simplejson.dumps({ 'error': 1, 'msg': 'Error de servidor: ' + repr(traceback.format_tb(exc_traceback)) + '->' + str(e) }))
def start_restore(self, request_user): restore_status = Restore.get_by_id(self.id) restore_status.restore_status = 'started' restore_status.put() filename = '/' + server.GCS_BUCKET_NAME + '/importerdata/' + self.id f = gcs.open(filename, mode='r') base64_project = f.read() f.close() gcs.delete(filename) project_zip_blob = base64.decodestring(base64_project) zip_file = zipfile.ZipFile(StringIO.StringIO(project_zip_blob), 'r') info_list = zip_file.infolist() project_file = zip_file.open(info_list[0], 'r') project_json = json.load(project_file) if 'version' in project_json: if project_json['version'] == 1: project = self.restore_v1(project_json, request_user, zip_file) elif project_json['version'] == 2: project = self.restore_v2(project_json, request_user, zip_file) else: project = self.restore_v1(project_json, request_user, zip_file) else: project = self.restore_v1(project_json, request_user, zip_file) memcache.add(key=self.id + '_result', value=project.key.id(), time=600) restore_status.restore_status = 'finished' restore_status.put()
def delete_files(self, filepaths): for filepath in filepaths: self.response.write(' Delete file {}\n'.format(filepath)) try: gcs.delete(filepath) except gcs.NotFoundError: pass
def delete(self): """Delete the original file and dynamic serving url if it exists """ filepath = request.args.get('filepath') if not filepath: return make_response_validation_error('filepath', message='Parameter filepath is required') try: cloudstorage.delete(filename) except cloudstorage.AuthorizationError: abort_json(401, "Unauthorized request has been received by GCS.") except cloudstorage.ForbiddenError: abort_json(403, "Cloud Storage Forbidden Error. GCS replies with a 403 error for many reasons, the most common one is due to bucket permission not correctly setup for your app to access.") except cloudstorage.NotFoundError: abort_json(404, filepath + " not found on GCS in bucket " + self.bucket) except cloudstorage.TimeoutError: abort_json(408, 'Remote timed out') # TODO get the query string and delete file if asked to blobstore_filename = u'/gs/{}/{}'.format(bucket_name, filepath) blob_key = blobstore.create_gs_key(blobstore_filename) try: images.delete_serving_url(blob_key) except images.AccessDeniedError: abort_json(403, "App Engine Images API Access Denied Error. Files has already been deleted from Cloud Storage") except images.ObjectNotFoundError: pass return '', 204
def test_cache(self): """Test that caching works at some level.""" response = self.get_build_page() gcs.delete(self.BUILD_DIR + 'started.json') gcs.delete(self.BUILD_DIR + 'finished.json') response2 = self.get_build_page() self.assertEqual(str(response), str(response2))
def delete_files(bucket, filenames, ignore_missing=False): """Deletes multiple files stored in GS. Arguments: bucket: a bucket that contains the files. filenames: list of file paths to delete (relative to a bucket root). ignore_missing: if True, will silently skip missing files, otherwise will print a warning to log. Returns: An empty list so this function can be used with functions that expect the RPC to return a Future. """ # Sadly Google Cloud Storage client library doesn't support batch deletes, # so do it one by one. retry_params = _make_retry_params() for filename in filenames: try: cloudstorage.delete('/%s/%s' % (bucket, filename), retry_params=retry_params) except cloudstorage.errors.NotFoundError: if not ignore_missing: logging.warning( 'Trying to delete a GS file that\'s not there: /%s/%s', bucket, filename) return []
def _delete_file(self, bucketFile, ftype=None): log.info("Deleting: %s" % bucketFile) self.delCache(bucketFile, ftype) try: cloudstorage.delete(bucketFile) except cloudstorage.NotFoundError: pass
def _delete_file(self, bucketFile, ftype=None): log.info("Deleting: %s" % bucketFile) self.delCache(bucketFile, ftype) try: cloudstorage.delete(bucketFile) except cloudstorage.NotFoundError: pass
def delete(self): """ delete filename in GCS and BlobFiles """ try: gcs.delete(self.gcs_filename) except gcs.NotFoundError, e: logging.warning('GCS file %s NOT FOUND : %s' % (self.gcs_filename, e))
def post(self, key): try: object_name = None server = self.get_server_by_key(key) file_infos = self.get_file_infos() if file_infos: file_info = file_infos[0] object_name = file_info.gs_object_name[3:] gcs_file = cloudstorage.open(object_name) if validate_server_archive(gcs_file): prefix = "/{0}/".format(gcs.get_default_bucket_name()) object_name.index(prefix) gcs_object_name = object_name[len(prefix):] gcs.copy_archive(server.key.urlsafe(), gcs_object_name) message = u'Minecraft world successfully uploaded' self.session.add_flash(message, level='info') else: message = u'Invalid minecraft world archive' logging.error(message) self.session.add_flash(message, level='error') else: message = u'No file chosen' self.session.add_flash(message, level='error') except Exception as e: message = u'Minecraft world archive could not be uploaded (Reason: {0})'.format(e) logging.error(message) self.session.add_flash(message, level='error') try: if object_name: cloudstorage.delete(object_name) except Exception as e: logging.error("Problem deleting uploaded server archive {0} (Reason: {1})".format(object_name, e)) self.redirect(webapp2.uri_for('home', server_key=server.url_key))
def Check(EmployeeDataFilePath,emailID,fallOutReport): FileName = EmployeeDataFilePath.split('/')[-1].split(".xlsx")[0] EmployeeDataFilePathTsv = XlsxToTsv(EmployeeDataFilePath) FieldId,Employees,TotalEmployee = EmployeeData(EmployeeDataFilePathTsv) Errors = ErrorList(Employees,TotalEmployee,ProductionTemplateFileName) if len(Errors) == 0 : if fallOutReport == 1: FileName = FallOutReportXlsx(FieldId,Employees,TotalEmployee,ProductionTemplateFileName,FileName) Message = open("SuccessEmailBody.txt").read() Subject = "Success Factor Upload FallOut Report" mail.send_mail(sender=sender_email_id.format( app_identity.get_application_id()), to=emailID, subject=Subject, body=Message,attachments=[(FileName, gcs.open(FileName).read())]) gcs.delete(FileName) else: FileName = XlsxErrorReport(Errors,FileName) Message = open("ErrorEmailBody.txt").read() Subject = "Success Factor Upload File Error" mail.send_mail(sender=sender_email_id.format( app_identity.get_application_id()), to=emailID, subject=Subject, body=Message,attachments=[(FileName, gcs.open(FileName).read())]) gcs.delete(FileName) if len(Errors) == 0: return 1 else: return 0
def finish(self): """ Called when all shards have finished processing """ if self.get_meta().generate_error_csv: self.error_csv_filename = self._error_csv_filename() with cloudstorage.open(self.error_csv_filename, 'w') as f: # Concat all error csvs from shards into 1 file has_written = False for shard in ImportShard.objects.filter(task_id=self.pk, task_model_path=self.model_path): if not shard.error_csv_filename: continue # If this is the first row, write the column headers if not has_written: data = json.loads(shard.source_data_json)[0] cols = getattr(self, "detected_columns", sorted(data.keys())) + [ "errors" ] csvwriter = csv.writer(f) csvwriter.writerow(cols) has_written = True # Write the shard's error file into the master file f.write(cloudstorage.open(shard.error_csv_filename).read()) cloudstorage.delete(shard.error_csv_filename) if has_written: # Create a blobstore key for the GCS file blob_key = create_gs_key('/gs%s' % self.error_csv_filename) self.error_csv = '%s/errors.csv' % blob_key self.save() else: cloudstorage.delete(self.error_csv_filename)
def delete_files(bucket, filenames, ignore_missing=False): """Deletes multiple files stored in GS. Arguments: bucket: a bucket that contains the files. filenames: list of file paths to delete (relative to a bucket root). ignore_missing: if True, will silently skip missing files, otherwise will print a warning to log. Returns: An empty list so this function can be used with functions that expect the RPC to return a Future. """ # Sadly Google Cloud Storage client library doesn't support batch deletes, # so do it one by one. retry_params = _make_retry_params() for filename in filenames: try: cloudstorage.delete( '/%s/%s' % (bucket, filename), retry_params=retry_params) except cloudstorage.errors.NotFoundError: if not ignore_missing: logging.warning( 'Trying to delete a GS file that\'s not there: /%s/%s', bucket, filename) return []
def delete(self, path, silent=False): try: absolute_path = os.path.join(self.root, path.lstrip('/')) cloudstorage.delete(absolute_path) except cloudstorage.errors.NotFoundError: if not silent: raise
def delete(attachment): """Delete from GCS Bucket.""" path = make_path(attachment.aid) try: gcs.delete(path) except gcs.NotFoundError: pass
def import_text(self, text, title): self.sent_tok = nltk.data.load( 'lib/nltk_data/tokenizers/punkt/english.pickle') if not text: filename = '/' + server.GCS_BUCKET_NAME + '/importerdata/' + self.id f = gcs.open(filename, mode='r') text = f.read() f.close() gcs.delete(filename) self.create_new_project(title) index = 0 paragraphs = text.split('\n') self.set_tag_count(len(paragraphs)) self.reset_tag_processed() self.set_status('running') for par in paragraphs: self.import_text_paragraph(par, parent=None) index += 1 self.incr_tag_processed() ndb.put_multi(self.concept_array.values()) ndb.put_multi(self.attribute_array.values()) ndb.put_multi(self.crawlcontext_array) ndb.put_multi(self.phrasing_array) ndb.put_multi(self.prems_array) self.index_phrasings(self.phrasing_array) self.project.put() memcache.add(key=str(self.id) + '_result', value=self.project.key.id()) self.set_status('finished')
def InputFormatting(fileName): try: fileHandle = gcs.open(fileName, "r") except FileNotFoundError: print "FILE NOT FOUND" sys.exit() LevelTitle = fileHandle.readline().split("\t") LevelTitle = [level.lower() for level in LevelTitle] missingValue = [""] * len(LevelTitle) Lines = fileHandle.read() TotalRecord = 0 TotalLevels = len(LevelTitle) Output = {} for Level in LevelTitle: Output[Level] = [] for line in Lines.splitlines(): TotalRecord += 1 level = line.split("\t") level = [x.lower().split(".0")[0] for x in level] for i in xrange(0, len(LevelTitle)): if level[i] == "": Output[LevelTitle[i]].append(missingValue[i]) else: Output[LevelTitle[i]].append(level[i]) missingValue[i] = level[i] gcs.delete(fileName) return (LevelTitle, Output, TotalRecord)
def InputFormatting(fileName): try: fileHandle = gcs.open(fileName,"r") except FileNotFoundError: print "FILE NOT FOUND" sys.exit() LevelTitle = fileHandle.readline().split("\t") LevelTitle = [level.lower() for level in LevelTitle] missingValue = [""]*len(LevelTitle) Lines = fileHandle.read() TotalRecord = 0 TotalLevels = len(LevelTitle) Output = {} for Level in LevelTitle: Output[Level] = [] for line in Lines.splitlines(): TotalRecord+=1 level = line .split("\t") level = [x.lower().split(".0")[0] for x in level] for i in xrange(0,len(LevelTitle)): if level[i] == "": Output[LevelTitle[i]].append(missingValue[i]) else: Output[LevelTitle[i]].append(level[i]) missingValue[i] = level[i] gcs.delete(fileName) return (LevelTitle,Output,TotalRecord)
def delete_file(filename): if not filename: return try: gcs.delete(get_bucket() + filename) except gcs.NotFoundError: pass
def delete_file(self, filename): """Deletes the file from CloudStorage.""" filepath = self.bucket + '/' + filename try: gcs.delete(filepath) except gcs.NotFoundError: pass return
def _CleanUpOldExportResult(user_id, date): result = ndb.Key(ExportRatingsResult, user_id).get() if not result: return if result.date > date: return gcs.delete(result.filename) result.key.delete()
def delete_file(self, filename): """Deletes the file from CloudStorage.""" filepath = self.bucket + '/' + filename try: gcs.delete(filepath) except gcs.NotFoundError: pass return
def delete_files(self): self.response.write('Deleting files...\n') for filename in self.tmp_filenames_to_clean_up: self.response.write('Deleting file {}\n'.format(filename)) try: cloudstorage.delete(filename) except cloudstorage.NotFoundError: pass
def delete_files(self): self.response.write('Deleting files...\n') for filename in self.tmp_filenames_to_clean_up: self.response.write('Deleting file {}\n'.format(filename)) try: cloudstorage.delete(filename) except cloudstorage.NotFoundError: pass
def delete(self, filename): """Delete method will remove a file from GCS, provided an absolute file path.""" try: gcs.delete(filename) return "{} deleted.".format(filename) except gcs.NotFoundError: return 'GCS File Not Found'
def delete_files(self): print('Deleting files...\n') for filename in self.tmp_filenames_to_clean_up: print('Deleting file %s\n' % filename) try: gcs.delete(filename) except gcs.NotFoundError: pass
def delete( file_name, retry_params = None ): try: gcs.delete(file_name, retry_params) except gcs.NotFoundError: return False else: return True
def delete_file(): # GCS helpfully decodes UTF-8 for you, is a bit weird because it won't # accept unicode when creating an object. for stat in cloudstorage.listbucket(folder): target = stat.filename.encode('utf-8') cloudstorage.delete(target) return bottle.redirect('/list')
def testStrict(self): """Tests that fail_on_missing_input works properly.""" gcs_files = [] for num in range(10): gcs_file = "/los_buckets/file%s" % num with cloudstorage.open(gcs_file, "w") as buf: buf.write(str(num + 100)) gcs_files.append("file%s" % num) input_class = (input_readers.__name__ + "." + input_readers._GoogleCloudStorageInputReader.__name__) def _RunMR(fail_on_missing_input=None): """Clears the state and runs a single (strict or not) MR.""" self._ClearMapperData() input_reader_dict = { "bucket_name": "los_buckets", "objects": gcs_files, } if fail_on_missing_input is not None: input_reader_dict["fail_on_missing_input"] = fail_on_missing_input mr_id = control.start_map( "job1", __name__ + "." + "_input_reader_memory_mapper", input_class, { "input_reader": input_reader_dict, }, shard_count=10) test_support.execute_until_empty(self.taskqueue) return mr_id # All files are there. Default, strict and non-strict MRs should work. _RunMR(None) self.assertEqual([str(num + 100) for num in range(10)], sorted(_memory_mapper_data)) _RunMR(False) self.assertEqual([str(num + 100) for num in range(10)], sorted(_memory_mapper_data)) _RunMR(True) self.assertEqual([str(num + 100) for num in range(10)], sorted(_memory_mapper_data)) # Now remove a file. cloudstorage.delete("/los_buckets/file5") # Non-strict MR still works but some output is not there. mr_id = _RunMR(False) self.assertEqual([str(num + 100) for num in [0, 1, 2, 3, 4, 6, 7, 8, 9]], sorted(_memory_mapper_data)) self.assertEquals(model.MapreduceState.get_by_job_id(mr_id).result_status, model.MapreduceState.RESULT_SUCCESS) # Strict MR fails. mr_id = _RunMR(True) self.assertEquals(model.MapreduceState.get_by_job_id(mr_id).result_status, model.MapreduceState.RESULT_FAILED)
def _delete_blob(self, filename): bucket = app_identity.get_default_gcs_bucket_name() # Cloud Storage file names are in the format /bucket/object. filename = '/{}/{}'.format(bucket, filename) try: cloudstorage.delete(filename) except cloudstorage.NotFoundError: logging.info("file not found {}".format(filename))
def testComposeOne(self): """Test to ensure one file can be composed (the API supports it).""" test_file = TESTFILE[len(BUCKET) + 1:] cloudstorage.compose([test_file], DESTFILE) with cloudstorage.open(DESTFILE, 'r') as gcs: results = gcs.read() cloudstorage.delete(DESTFILE) self.assertEqual(DEFAULT_COMPOSE_CONTENT, results)
def testComposeOne(self): """Test to ensure one file can be composed (the API supports it).""" test_file = TESTFILE[len(BUCKET) + 1:] cloudstorage.compose([test_file], DESTFILE) with cloudstorage.open(DESTFILE, 'r') as gcs: results = gcs.read() cloudstorage.delete(DESTFILE) self.assertEqual(DEFAULT_COMPOSE_CONTENT, results)
def remove(key): filename = bucket + '/' + key try: gcs.delete(filename, retry_params=None) if checkCache(key): removeCache(key) return True except Exception: return False
def testCompose32Files(self): """Test to 32 files are composed properly.""" test_file = TESTFILE[len(BUCKET) + 1:] cloudstorage.compose([test_file] * 32, DESTFILE, content_type='text/plain') with cloudstorage.open(DESTFILE, 'r') as gcs: results = gcs.read() cloudstorage.delete(DESTFILE) self.assertEqual(''.join([DEFAULT_COMPOSE_CONTENT] * 32), results)
def borrarImg_cloud(img): if img.blobkeygs: filenom='/'+BUCKET +'/'+str(img.key.parent().id())+"/"+img.nombre #try: gcs.delete(filenom) images.delete_serving_url(img.blobkeygs) #except: # pass img.key.delete()
def __deleteBlob(cls,key): """ Private method to delete a blobstore file from key param @key is String """ r = gcs.listbucket(cls.__bucket_name) for a in r: if a.filename[len(cls.__bucket_name):]==key: gcs.delete(a.filename) break
def rmtree(self, path): if path != "": path_prefix = self.location + "/" + path + "/" else: path_prefix = self.location + "/" bucketContents = gcs.listbucket(path_prefix=path_prefix) for entry in bucketContents: gcs.delete(entry.filename)
def _execute(self): delta = timedelta(self._params['expiration_days']) expiration_datetime = datetime.now() - delta expiration_timestamp = time.mktime(expiration_datetime.timetuple()) stats = self._get_matching_stats(self._params['file_uris']) for stat in stats: if stat.st_ctime < expiration_timestamp: gcs.delete(stat.filename) self.log_info('gs:/%s file deleted.', stat.filename)
def delete_thumbnail(thumbnail_key): filename = '/gs/{}/{}'.format(THUMBNAIL_BUCKET, thumbnail_key) blob_key = blobstore.create_gs_key(filename) images.delete_serving_url(blob_key) thumbnail_reference = ThumbnailReference.query( ThumbnailReference.thumbnail_key == thumbnail_key).get() thumbnail_reference.key.delete() filename = '/{}/{}'.format(THUMBNAIL_BUCKET, thumbnail_key) cloudstorage.delete(filename)
def delete(self, name): """ Removes file from google cloud storage """ filename = self.location + "/" + name try: gcs.delete(filename) except gcs.NotFoundError: raise OSError( u'File does not exist on Google Cloud Storage')