def copyStagingFilepathToGcs(staging_filepath, account_id, bill_id, bill_file_id): filename = os.path.basename(staging_filepath.data) filepath = getFilepath(account_id, bill_id, bill_file_id, filename) gcs.copy2( '/' + settings.STAGING_FILE_BUCKET + staging_filepath.data, '/' + settings.FILE_BUCKET + filepath) return filepath
def post(self): """Copy uploaded files to provided destination Returns: string: path to uploaded path """ if not self.get_file_infos(): self.abort(400, "No file has been uploaded") fileinfo = self.get_file_infos()[0] try: import cloudstorage as gcs except ImportError: self.abort( 500, 'GoogleAppEngineCloudStorageClient module is required') stat = gcs.stat(fileinfo.gs_object_name[3:]) destpath = "/".join(stat.filename.split("/")[:-1]) gcs.copy2(fileinfo.gs_object_name[3:], destpath) gcs.delete(fileinfo.gs_object_name[3:]) if spiner.env.is_local_env(): url = '/_ah/gcs{}'.format(destpath) else: url = 'https://storage.googleapis.com{}'.format(destpath) self.response.write(url)
def copy(self, path, new_path, raise_errors=False, prefix=None): """ Copy a certain path :param path: The path to the file :type path: str :param new_path: The path to the new file :type new_path: str :param raise_errors: Raise the errors :type raise_errors: bool :param prefix: The prefix :type prefix: str :return: Success :rtype: bool """ path = self.path(path, prefix=prefix) new_path = self.path(new_path) try: gcs.copy2(path, new_path) return True except (gcs.NotFoundError, gcs.AuthorizationError): if raise_errors: raise else: return False
def make_persistent(path): """Marks a file as persistent, which means it won't be deleted automatically.""" path = _absolute(path) if not is_shortlived(path): return new_path = config.STORAGE_PATH_PERSISTENT + path[len(config.STORAGE_PATH_SHORTLIVED):] gcs.copy2(path, new_path) return new_path
def cloudstorage_copy_objects(source, destination): """Copies all objects matching the source to the destination Both source and destination use the following format: /bucket/prefix/ """ for source_file_stat in cloudstorage.listbucket(source): destination_filename = destination + source_file_stat.filename[ len(source):] if _should_copy_object(source_file_stat, destination_filename): cloudstorage.copy2(source_file_stat.filename, destination_filename)
def _do_move_logs(files): for filename in files: split = filename.split('/protocol-logs-')[1].split('.') date_str, number = split[0], split[1] date = datetime.strptime(date_str, '%Y%m%d') if split[-1] == 'processed' or date < datetime(2016, 3, 17): continue folder_name = _get_folder_name(date) new_filename = '%s/%s/rogerthat-server-%s.json' % (NEW_BUCKET, folder_name, number) cloudstorage.copy2(filename, new_filename)
def copyStagingFilepathsToGcs(request, account_id, bill_id, bill = None): filepaths = [] for staging_filepath in request.staging_filepaths: filename = os.path.basename(staging_filepath.data) filepath = getFilepath(account_id, bill_id, filename) if bill and (filepath in bill.filepaths): raise endpoints.InternalServerErrorException("{} file already uploaded.".format(filename)) filepaths.append(filepath) gcs.copy2( '/' + settings.STAGING_FILE_BUCKET + staging_filepath.data, '/' + settings.FILE_BUCKET + filepath) return filepaths
def copy(self, source_assets_path, filepath): """Copy images from source_path. Args: source_assets_path: str. The path to the source entity's assets folder. filepath: str. The path to the relevant file within the entity's assets folder. """ bucket_name = app_identity_services.get_gcs_resource_bucket_name() source_file_url = ( '/%s/%s/%s' % (bucket_name, source_assets_path, filepath)).encode('utf-8') # The cloudstorage.copy2 method copies the file from the source URL to # the destination URL. cloudstorage.copy2(source_file_url, self._get_gcs_file_url(filepath))
def post(self): file_info = self.get_file_infos()[0] path = self.request.get('path') or '/' if '/' == path[0]: path = path[1:] new_path = os.path.join('/*CHANGEME*', path) if True == zipfile.is_zipfile(file_info.filename): return self.processUnzip(file_info.gs_object_name[3:], new_path, file_info.filename) # NOTREACHED gcs.copy2(file_info.gs_object_name[3:], os.path.join(new_path, file_info.filename), {'content-type': file_info.content_type}) gcs.delete(file_info.gs_object_name[3:]) # remove leading /gs rtn_data = { "gs_object_name": new_path, "content_type": file_info.content_type, "size": file_info.size, "md5_hash": file_info.md5_hash, "filename": file_info.filename } self.response.headers['cache-control'] = 'no-cache' self.response.headers['content-type'] = 'text/javascript' self.response.write(json.dumps(rtn_data))
logging.warning("Deadline exceeded error (not to worry) composing \ file: %s Error: %s\nVersion: %s" % (composed_filename, e, DOWNLOAD_VERSION) ) pass # logging.error("Error composing file: %s Error: %s\nVersion: %s" # % (composed_filename, e, DOWNLOAD_VERSION) ) # retry_count += 1 # Now, can we zip the final result? # Not directly with GCS. It would have to be done using gsutil in Google # Compute Engine # Copy the file from temporary storage bucket to the download bucket src = '/%s/%s' % (TEMP_BUCKET, composed_filename) dest = '/%s/%s' % (DOWNLOAD_BUCKET, composed_filename) try: gcs.copy2(src, dest) except Exception, e: s = 'Error copying %s to %s\n' % (src, dest) s += 'Error: %s Version: %s' % (e, DOWNLOAD_VERSION) logging.error(s) # Change the ACL so that the download file is publicly readable. mbody=acl_update_request() # logging.info('Requesting update for /%s/%s\nmbody%s \ # \nVersion: %s' % (DOWNLOAD_BUCKET,composed_filename, mbody, # DOWNLOAD_VERSION) ) req = service.objects().update( bucket=DOWNLOAD_BUCKET, object=composed_filename, predefinedAcl='publicRead', body=mbody)
def get(self): user_name = self.request.get('user_name', 'xxx') bucket_name = os.environ.get( 'BUCKET_NAME', app_identity.get_default_gcs_bucket_name()) bucket = '/' + bucket_name + '/Users' tempfilename = bucket+'/tempusername' write_retry_params = cloudstorage.RetryParams(backoff_factor=1.1) gcs_file_write=cloudstorage.open( tempfilename, 'w', content_type='text/plain', options={ 'x-goog-meta-foo': 'foo', 'x-goog-meta-bar': 'bar'}, retry_params=write_retry_params ) index = 0 if (self.FileExists('/withingsapp.appspot.com/Users', 'username')): # copy out the info in /username, and print the info into another file filename = bucket+'/username' with cloudstorage.open(filename) as gcs_file_read: contents = gcs_file_read.read() gcs_file_write.write(contents) gcs_file_read.close() #get current index, and give ++index to this current user with cloudstorage.open(bucket+'/current_index') as index_file: index = int(index_file.readline()) + 1 #save user name in /tempusername gcs_file_write.write('\n') gcs_file_write.write(user_name.encode('utf-8')) gcs_file_write.write('\n') gcs_file_write.write(str(index)) gcs_file_write.close() index_file.close() try: cloudstorage.copy2(tempfilename, bucket + '/username') except cloudstorage.NotFoundError: self.response.write('NotFoundError') else:#this is the first user with cloudstorage.open( (bucket + '/username'), 'w', content_type='text/plain', options={ 'x-goog-meta-foo': 'foo', 'x-goog-meta-bar': 'bar'}, retry_params=write_retry_params) as user_name_file: user_name_file.write(user_name.encode('utf-8')) user_name_file.write('\n') user_name_file.write(str(index)) user_name_file.close() #write current index into /current_index with cloudstorage.open( (bucket+'/current_index'), 'w', content_type='text/plain', options={ 'x-goog-meta-foo': 'foo', 'x-goog-meta-bar': 'bar'}, retry_params=write_retry_params) as index_file: index_file.write(str(index)) index_file.close() #create the participantID/ folder with cloudstorage.open( (bucket+'/'+str(index)+'/'), 'w', options={ 'x-goog-meta-foo': 'foo', 'x-goog-meta-bar': 'bar'}, retry_params=write_retry_params) as participantID: participantID.close() self.response.write ("participantID") #create the dailyrecords/ folder in the new participantID/ folder with cloudstorage.open( (bucket+'/'+str(index)+'/'+'Dailyrecords/'), 'w', options={ 'x-goog-meta-foo': 'foo', 'x-goog-meta-bar': 'bar'}, retry_params=write_retry_params) as dailyrecords: dailyrecords.close() self.response.write ("dailyrecords") template_values = { 'url': AUTH_URL_COMPLETE } template = JINJA_ENVIRONMENT.get_template('index.html') self.response.write(template.render(template_values))
pass # logging.error("Error composing file: %s Error: %s\nVersion: %s" # % (composed_filename, e, DOWNLOAD_VERSION) ) # retry_count += 1 # Now, can we zip the final result? # Not directly with GCS. It would have to be done using gsutil in Google # Compute Engine # Copy the file from temporary storage bucket to the download bucket src = '/%s/%s' % (TEMP_BUCKET, composed_filename) dest = '/%s/%s' % (DOWNLOAD_BUCKET, composed_filename) try: gcs.copy2(src, dest) except Exception, e: s = 'Error copying %s to %s\n' % (src, dest) s += 'Error: %s Version: %s' % (e, DOWNLOAD_VERSION) logging.error(s) # Change the ACL so that the download file is publicly readable. mbody = acl_update_request() # logging.info('Requesting update for /%s/%s\nmbody%s \ # \nVersion: %s' % (DOWNLOAD_BUCKET,composed_filename, mbody, # DOWNLOAD_VERSION) ) req = service.objects().update(bucket=DOWNLOAD_BUCKET, object=composed_filename, predefinedAcl='publicRead', body=mbody) resp = req.execute()