Пример #1
0
class GoogleCloudFile(File):
    def __init__(self, name, mode, storage):
        self.name = name
        self.mime_type = mimetypes.guess_type(name)[0]
        self._mode = mode
        self._storage = storage
        self.blob = storage.bucket.get_blob(name)
        if not self.blob and 'w' in mode:
            self.blob = Blob(self.name,
                             storage.bucket,
                             chunk_size=storage.blob_chunk_size)
        self._file = None
        self._is_dirty = False

    @property
    def size(self):
        return self.blob.size

    def _get_file(self):
        if self._file is None:
            self._file = SpooledTemporaryFile(
                max_size=self._storage.max_memory_size,
                suffix=".GSStorageFile",
                dir=setting("FILE_UPLOAD_TEMP_DIR"))
            if 'r' in self._mode:
                self._is_dirty = False
                self.blob.download_to_file(self._file)
                self._file.seek(0)
        return self._file

    def _set_file(self, value):
        self._file = value

    file = property(_get_file, _set_file)

    def read(self, num_bytes=None):
        if 'r' not in self._mode:
            raise AttributeError("File was not opened in read mode.")

        if num_bytes is None:
            num_bytes = -1

        return super().read(num_bytes)

    def write(self, content):
        if 'w' not in self._mode:
            raise AttributeError("File was not opened in write mode.")
        self._is_dirty = True
        return super().write(force_bytes(content))

    def close(self):
        if self._file is not None:
            if self._is_dirty:
                self.blob.upload_from_file(
                    self.file,
                    rewind=True,
                    content_type=self.mime_type,
                    predefined_acl=self._storage.default_acl)
            self._file.close()
            self._file = None
Пример #2
0
    def upload_image_to_gcs(self, image_file_path, bucket_name):
        """Uploads images to Google Cloud Storage.

        Arguments:
            file_name : Name of image to upload on local machine
            image_file_path: Path to image to upload on local machine.
            bucket_name: Name of the GCS bucket.
        """
        project = "<GCP_project_id>"
        credentials = service_account.Credentials.from_service_account_file(
            '~/gcp-service-account.json')
        client = storage.Client(project, credentials)

        try:
            bucket = client.get_bucket(bucket_name)
        except Exception as e:
            # Error out if we're unable to locate the S3 bucket.
            raise MotionAlertError("Error connecting to GCS bucket: "
                                   "{0}".format(e))

        try:
            temp_image = image_file_path.split('/')
            image_file = temp_image[3] + "/" + temp_image[4]

            blob = Blob(image_file, bucket)

            with open(image_file_path, 'rb') as my_file:
                blob.upload_from_file(my_file)
        except Exception as e:
            # Error out if we're unable to upload the image.
            raise MotionAlertError(
                "Error uploading file to GCS: {0}".format(e))
Пример #3
0
    def _upload(self, payload: bytes, filename: str, bucket: str) -> None:
        """
        Upload a payload to GCS

        """

        client = Client(project=self.project_id)
        count = 0
        while count < self.max_retries:
            try:
                bucket_obj = client.get_bucket(bucket)
                if self.use_encryption:
                    payload = self._encrypt(payload)
                content = BytesIO(payload)
                blob = Blob(filename, bucket_obj)
                blob.upload_from_file(content)
                break
            except (
                InvalidResponse,
                GoogleAPICallError,
                InternalServerError,
                SSLError,
            ) as e:
                if count >= self.max_retries:
                    raise StoqPluginException(
                        f'Failed to upload {bucket}/{filename} to GCS: {str(e)}'
                    )
                count += 1
                sleep(randrange(0, 4))
Пример #4
0
 def upload_image(self, location: str, label: str, image_bytes,
                  metadata: dict) -> typing.Optional[str]:
     blob = Blob(f"{location}/{label}", self.bucket)
     blob.metadata = metadata
     blob.upload_from_file(image_bytes, content_type="image/png")
     blob.make_public()
     return blob.public_url
Пример #5
0
def download_bhavcopy(event, context):

    holiday_dict = create_holiday_dict()
    logging.info('Dictionary created for ' + str(len(holiday_dict)) +
                 ' holidays')

    base_url = 'https://www.bseindia.com/download/BhavCopy/Equity/'

    pubsub_message = base64.b64decode(event['data']).decode('utf-8')
    print(pubsub_message)
    print(event['attributes'])
    fname = event['attributes']['objectId']

    extracted_date = re.search(r'([eE][qQ])(\d\d\d\d\d\d)', fname).group(2)
    new_date = datetime.strptime(extracted_date, '%d%m%y') + timedelta(days=1)

    file_downloaded_locally, new_fname = check_and_download(
        new_date, holiday_dict, base_url)
    try:
        if file_downloaded_locally and (
                not check_if_already_stored(new_fname)):
            client = storage.Client(project='bhavcopy')
            bucket = client.get_bucket('bhavcopy-store')
            blob = Blob(new_fname, bucket)
            with open('/tmp/' + new_fname, 'rb') as my_file:
                blob.upload_from_file(my_file)
    except Exception as e:
        logging.info(
            'Not Downloaded: Cloud function exiting without storing file for date: '
            + str(new_date) + '.Received error: ' + str(e))
Пример #6
0
def upload_item_as_raw_file(path, client=None):
    """Set things up, convert the file, and upload it."""
    if client is None:
        client = get_storage_client()

    # Check that the bucket exists, make it if not.
    try:
        b = client.get_bucket(TRANSCRIPTS_BUCKET_NAME)
    except Forbidden as e:
        print("Received Forbidden (403) error while getting bucket. This could "
              "mean that you do not have billing set up for this "
              "account/project, or that somebody else has taken this bucket "
              "from the global namespace.")
        raise e
    except NotFound:
        b = client.bucket(TRANSCRIPTS_BUCKET_NAME)
        b.lifecycle_rules = [{
            'action': {'type': 'Delete'},
            'condition': {'age': 7},
        }]
        b.create()
        b.make_public(future=True)

    # Re-encode the file as a temp file and upload it. When we leave the context
    # manager, the temp file gets automatically deleted.
    with NamedTemporaryFile(prefix='transcode_', suffix='.raw') as tmp:
        encode_as_linear16(path, tmp)

        # Name it after a SHA2 hash of the item, to avoid collisions.
        file_name = 'transcripts-%s' % hashlib.sha256(tmp.read()).hexdigest()
        blob = Blob(file_name, b)
        blob.upload_from_file(tmp, rewind=True)

    return {'blob_name': blob.name, 'bucket_name': blob.bucket.name}
Пример #7
0
def upload_arr(arr: np.ndarray, blob: storage.Blob):
    logging.info(f'uploading blob {blob.name}')
    with io.BytesIO() as stream:
        # noinspection PyTypeChecker
        np.save(stream, arr)
        stream.seek(0)
        blob.upload_from_file(stream)
Пример #8
0
    def write(self,
               file_path,               
               data,
               num_retries=10,
               content_type=None,               
               bucket=None):

        bucket = self.client.get_bucket(self.bucket or bucket)

        try:
            blob = Blob(file_path, bucket)
        except:
            blob = bucket.get_blob(file_path)

        try:
            data = json.loads(data)
        except:
            pass

        if isinstance(data,(dict,list)):
            data = json.dumps(data)
        else:
            data =  data

        data = _to_bytes(data, encoding="utf-8")
        string_buffer = BytesIO(data)    

        blob.upload_from_file(
            file_obj = string_buffer,
            size = len(data),
            client = self.client,
            num_retries = num_retries or self.num_retries,
            content_type = _MEME_TYPES[self.content_type or content_type]
        )
        return 
Пример #9
0
    def put(self, source, name=None):
        """

        Parameters
        ----------
        source
        name

        Returns
        -------

        """

        if isinstance(source, io.BytesIO):
            filebuff = io.BufferedReader(source)
        elif isinstance(source, (str, bytes)):
            filebuff = io.BufferedReader(io.BytesIO(source))
        else:
            log.error(
                'Source should be either a string, or bytes or io.BytesIO, got {}'
                .format(type(source)))
            return False

        key = self.base_address if name is None else os.path.join(
            self.base_address, name)
        key = key.strip('/')
        try:
            blob = Blob(key, self.bucket)
            blob.upload_from_file(filebuff, rewind=True)
            log.info("Uploaded {} bytes to \tbucket={}\tkey={}".format(
                len(source), self.bucket_name, key))
            return True
        except Exception as e:
            log.error("{}\tbucket={}\tkey={}".format(e, self.bucket, key))
            return False
Пример #10
0
def write_gcloud_blob(bucket_id: str, gpath: str, fpath: str):
    """ Write blob from Google Cloud Storage.

    References:
      https://pypi.org/project/google-cloud-storage/

    :param bucket_id: id for google cloud bucket
    :param gpath: file path of item within bucket
    :param fpath: file path of item from disk
    :return: upload file blob from disk
    :rtype: None
    """
    logger.info("Writing '{}' to '{}' at '{}'".\
                format(fpath, bucket_id, gpath))
    try:
        client = storage.Client()
        bucket = client.get_bucket(bucket_id)
        blob = Blob(gpath, bucket)
        with open(fpath, 'rb') as infile:
            blob.upload_from_file(infile)

        logger.info("SUCCESS -- uploaded '{}' to '{}' using '{}'".\
                    format(fpath, gpath, bucket_id))

    except Exception as exc:
        logger.error("Unable to upload '{}' to '{}' using '{}'".\
                     format(fpath, gpath, bucket_id))
        logger.exception(exc)
def upload_static_files(bucket, file_name):
	# Uploader Form
	blob = Blob(name=file_name, bucket=bucket)
	with open(os.path.abspath('../static-files/%s' % file_name), 'rb') as tmp_file:
		blob.upload_from_file(tmp_file)
	# Resource
	print(u'Info: File %s uploaded' % file_name)
	print(blob)
Пример #12
0
def file_upload(file, request_date):
    if file and allowed_file(file.filename):
        new_file_name = request_date.replace(" ", "-").replace(
            ":", "-") + "_" + file.filename
        safename = secure_filename(new_file_name)
        blob = Blob(safename, get_storage())
        blob.upload_from_file(file)
    print('File uploaded successfully')
Пример #13
0
def upload(filename):
    client = storage.Client(project=PROJECT_NAME)
    bucket = client.get_bucket(BUCKET_NAME)
    blob_name = "{}_{}".format(str(datetime.datetime.now()), filename)
    blob = Blob(blob_name, bucket)
    with open(filename, 'rb') as my_file:
        blob.upload_from_file(my_file)
        blob_q.put("gs://{}/{}".format(BUCKET_NAME, blob_name))
Пример #14
0
 def upload_public_file(client, bkt, file_name):
     # file_name in Blob constructor is the file name you want to have on GCS
     blob = Blob(file_name, bkt)
     # file_name in open function is the one that actually sits on your hard drive
     with open(file_name, 'rb') as my_file:
         blob.upload_from_file(my_file)
     # after uploading the blob, we set it to public, so that it's accessible with a simple link
     blob.make_public(client)
def download_html_company_information(company_symbols, bucket_name):
    """Downloads HTML files with company information and stores localy in the VM and upload in Cloud Storage"""

    client = storage.Client()
    bucket = client.get_bucket(bucket_name)

    # Set the Display for the VM to download the HTML COde
    display = Display(visible=0, size=(800, 600))
    display.start()

    print "Downloading the Company Information HTML files for: " + str(
        len(company_symbols)) + " symbols"

    company_symbols = ["AXON"]

    i = 1
    for symbol in company_symbols:

        print "Downloading symbol " + str(i) + ": " + str(
            symbol) + " from the list"

        # Download locally in a vm specified folder
        url = "http://financials.morningstar.com/company-profile/c.action?t=" + symbol + "&region=GRC&culture=en_US"
        #htmlFile = urllib2.urlopen(url)
        htmlFilePath = "/home/project_investing/datasources/html_" + symbol + "_" + datetime.datetime.today(
        ).strftime('%Y%m%d') + ".html"

        driver = webdriver.Chrome()
        #options = webdriver.ChromeOptions()
        #options.binary_location = '/opt/google/chrome/google-chrome'
        #service_log_path = "{}/chromedriver.log".format("/home/project_investing")
        #service_args = ['--verbose']
        #driver = webdriver.Chrome('/usr/bin/chromedriver', chrome_options=options, service_args=service_args, service_log_path=service_log_path)

        driver.get(url)

        #waiting for the page to load - TODO: change
        #wait = WebDriverWait(driver, 10)
        #wait.until(EC.visibility_of_element_located((By.ID, "content")))

        data = driver.page_source

        with open(htmlFilePath, 'wb') as output:
            output.write(data)

        driver.close()

        # Upload to Cloud Storage
        blob_fileName = "datasources/html_files/html_" + symbol + "_" + datetime.datetime.today(
        ).strftime('%Y%m%d') + ".html"
        blob = Blob(blob_fileName, bucket)
        with open(htmlFilePath, 'rb') as input:
            blob.upload_from_file(input)

        i += 1

    print "All HTML files have been succesfully stored in Cloud Storage in the following path: gs://athens_stock_exchange/datasources/html_files/"
Пример #16
0
    def uploadNewPhoto(self, file, bucket):
        blob = Blob(file, bucket)
        # TODO: create csv/tuple for the input?
        with open(file, "rb") as my_file:
            blob.upload_from_file(my_file)

        blob.make_public()

        return r"gs://" + bucket.name + r"/" + file
Пример #17
0
 def store_file_to_gcs(self, bucket_name, filename):
     bucket = self.gcs.get_bucket(bucket_name)
     blob = Blob(filename, bucket)
     try:
         with open(filename, 'rb') as input_file:
             blob.upload_from_file(input_file)
         return True
     except IOError:
         print('Error: Cannot find the file {}'.format(filename))
Пример #18
0
 def copyFromLocal(self,
                   uri_source,
                   uri_target,
                   bucket_name_target=None,
                   **kwargs):
     gcp_bucket_target = self._gcp_bucket(bucket_name_target)
     blob = Blob(uri_target, gcp_bucket_target)
     with open(uri_source, 'rb') as file_pointer:
         blob.upload_from_file(file_pointer)
def upload_file(module, client, src, dest):
    try:
        bucket = client.get_bucket(module.params['bucket'])
        blob = Blob(dest, bucket)
        with open(src, "r") as file_obj:
            blob.upload_from_file(file_obj)
        return blob_to_dict(blob)
    except google.cloud.exceptions.GoogleCloudError as e:
        module.fail_json(msg=str(e))
Пример #20
0
def get_gcloud_url(stream):
    filename = ''.join(
        random.choice(string.ascii_lowercase + string.digits)
        for i in range(16)) + ".png"
    client = storage.Client()
    bucket = client.get_bucket('hackgt-catalyst2018-photostyle')
    blob = Blob(filename, bucket)
    blob.upload_from_file(stream, content_type="image/png", client=client)
    blob.make_public(client=client)
    return blob.public_url
Пример #21
0
def upload_data(data):
    formatted_version = '_'.join(factorio_version().split('.'))
    storage_filename = 'factorio-data/v{}.json.gz'.format(formatted_version)

    data_blob = Blob(storage_filename, storage.bucket(app=actuario_app))
    data_blob.content_encoding = 'gzip'

    data_gz_bytes = gzip.compress(json.dumps(data).encode())
    with io.BytesIO(data_gz_bytes) as data_stream:
        data_blob.upload_from_file(data_stream,
                                   content_type='application/json')
def main():
    #
    # initial greeting...
    #
    print("Hello Google Cloud Storage!")
    #
    # create a client
    #
    print("creating client...")
    client = storage.Client()
    index = 0
    print("indexing over bucket list...")
    for bucket in client.list_buckets():
        print(bucket)
        print("index = " + str(index))
        if index == 0:
            defaultBucket = bucket
        index += 1
    print("")
    print("chosen bucket is: " + str(defaultBucket))
    blob = Blob("raw_image.jpg", defaultBucket)
    quit = False
    imageFilePath = "/home/shawn/Desktop/raw_image_download.jpg"
    while quit == False:
        blobCount = 0
        for blobItem in defaultBucket.list_blobs():
            blobCount += 1
        if blobCount == 0:
            print("empty...")
        else:
            print("downloading...")
            with open(imageFilePath, "wb") as imageFile:
                blob.download_to_file(imageFile)
            with Image(filename=imageFilePath) as img:
                print(img.size)
                print("blurring...")
                img.gaussian_blur(9, 1)
                imageFilePath = "/home/shawn/Desktop/blurred_image.jpg"
                print("saving...")
                img.save(filename=imageFilePath)
            with Image(filename=imageFilePath) as img:
                blob = Blob("blurred_image.jpg", defaultBucket)
                print("uploading...")
                with open("/home/shawn/Desktop/blurred_image.jpg",
                          "rb") as imageFile:
                    blob.upload_from_file(imageFile)
                display(img)
        time.sleep(1.0)
    #
    # final greeting...
    #
    print("Goodbye Google Cloud Storage!")
Пример #23
0
def upload_from_file(client, to_delete):
    # [START upload_from_file]
    from google.cloud.storage import Blob

    client = storage.Client(project='my-project')
    bucket = client.get_bucket('my-bucket')
    encryption_key = 'aa426195405adee2c8081bb9e7e74b19'
    blob = Blob('secure-data', bucket, encryption_key=encryption_key)
    with open('my-file', 'rb') as my_file:
        blob.upload_from_file(my_file)
    # [END upload_from_file]

    to_delete.append(blob)
Пример #24
0
def upload_from_file(client, to_delete):
    # [START upload_from_file]
    from google.cloud.storage import Blob

    client = storage.Client(project='my-project')
    bucket = client.get_bucket('my-bucket')
    encryption_key = 'aa426195405adee2c8081bb9e7e74b19'
    blob = Blob('secure-data', bucket, encryption_key=encryption_key)
    with open('my-file', 'rb') as my_file:
        blob.upload_from_file(my_file)
    # [END upload_from_file]

    to_delete.append(blob)
Пример #25
0
def upload_from_file(to_delete):
    # [START upload_from_file]
    from google.cloud.storage import Blob

    client = storage.Client(project="my-project")
    bucket = client.get_bucket("my-bucket")
    encryption_key = "aa426195405adee2c8081bb9e7e74b19"
    blob = Blob("secure-data", bucket, encryption_key=encryption_key)
    with open("my-file", "rb") as my_file:
        blob.upload_from_file(my_file)
    # [END upload_from_file]

    to_delete.append(blob)
Пример #26
0
 def _save(self, name, content):
     name = os.path.basename(name)
     new_name = name
     count = 0
     while True:
         blob = Blob(new_name, self.bucket, chunk_size=1024 * 256)
         if not blob.exists():
             break
         count += 1
         new_name = name + '.%d' % count
     blob.upload_from_file(content)
     blob.make_public()
     return new_name
Пример #27
0
def upload_from_file(client, to_delete):
    # [START upload_from_file]
    from google.cloud.storage import Blob

    client = storage.Client(project="my-project")
    bucket = client.get_bucket("my-bucket")
    encryption_key = "aa426195405adee2c8081bb9e7e74b19"
    blob = Blob("secure-data", bucket, encryption_key=encryption_key)
    with open("my-file", "rb") as my_file:
        blob.upload_from_file(my_file)
    # [END upload_from_file]

    to_delete.append(blob)
Пример #28
0
    def write(self, file_path, data, num_retries=10, content_type=None, bucket=None):

        """Writes data to Google Cloud Storage

        :type data: bytes
        :param data: The data that will be written to Google Cloud Storage

        :type file_path: str
        :param file_path: The path to the file where the data will be written

        :type num_retries: int
        :param num_retries: (Optional) The number of attempts to save to Cloud Storage

        :type content_type: str
        :param content_type: The type of content being uploaded. Default is 'json'. Options are txt, csv, json, png, jpg

        :type bucket: str
        :param bucket: The name of the bucket. (Optional) if given in class instantiation.

        :returns: None
        """

        bucket = self.client.get_bucket(self.bucket or bucket)

        try:
            blob = Blob(file_path, bucket)
        except:
            blob = bucket.get_blob(file_path)

        try:
            data = json.loads(data)
        except:
            pass

        if isinstance(data, (dict, list)):
            data = json.dumps(data)
        else:
            data = data

        data = _to_bytes(data, encoding="utf-8")
        string_buffer = BytesIO(data)

        blob.upload_from_file(
            file_obj=string_buffer,
            size=len(data),
            client=self.client,
            num_retries=num_retries or self.num_retries,
            content_type=_MEME_TYPES[self.content_type or content_type],
        )
        return
Пример #29
0
def upload():
    """Process the uploaded file and upload it to Google Cloud Storage."""
    uploaded_file = request.files['file']
    client = storage.Client()
    
    bucket =  client.get_bucket(request.form['uploadbucketname'])
    
    encoded_key = base64.b64encode(key).decode('utf-8')
    
    encryption_key = base64.b64decode(encoded_key)
    blob = Blob(uploaded_file.filename, bucket, encryption_key=encryption_key)
    
    blob.upload_from_file(uploaded_file)
    
    return blob.public_url
Пример #30
0
def upload_blob(file_path, input_file):
    ct = time.time()
    logging.info('Start uploading file')
    """Uploads a file to the bucket."""
    client = storage.Client(project=UPLOAD_GOOGLE_PROJECT)
    bucket = client.get_bucket(UPLOAD_GOOGLE_BUCKET_NAME)
    if is_dev_env:
        file_path = os.path.join(VERSION, UPLOAD_TEST_PATH, file_path)
    else:
        file_path = os.path.join(VERSION, file_path)
    blob = Blob(file_path, bucket)
    input_file.seek(0)
    blob.upload_from_file(input_file)
    duration = time.time() - ct
    logging.info('Finished uploading file %s', duration)
Пример #31
0
def on_http_fetch_dailies(request):
    gcp_logger = init_gcp_logger()
    gcp_logger.info('on_http_fetch_dailies(), args=%s', request.args)
    storage_client = init_storage_client()
    fdb = init_firestore_client()

    columns = [
        'meter_consumptions_kwh', 'meter_generations_kwh',
        'solar_generations_kwh', 'solar_mean_powrs_kw',
        'solar_devices_reportings', 'capacities_kw', 'charge_quantities_kwh',
        'deterioration_states_pct', 'discharge_quantities_kwh',
        'power_at_charges_kw', 'residual_capacities_pct',
        'total_charge_quantities_kwh', 'total_discharge_quantities_kwh',
        'min_temperature_c', 'max_temperature_c'
    ]

    df_all_dailies = pd.DataFrame(index=pd.DatetimeIndex([]), columns=columns)
    missing_values = np.zeros(48)

    for doc in fdb.collection(f"sites/{NMI}/dailies").order_by(
            'interval_date', direction='ASCENDING').stream():
        doc_fields = list(doc.to_dict().keys())
        if 'interval_date' not in doc_fields:
            gcp_logger.info('Missing field interval_date')
            continue

        gcp_logger.info('Processing interval_date=%s',
                        doc.get('interval_date'))

        doc_dict = {}
        for column in columns:
            doc_dict[column] = np.array(
                doc.get(column)) if column in doc_fields else missing_values

        df_all_dailies.loc[doc.get('interval_date')] = doc_dict

    pkl_file_name = f"dailies_{NMI}.pkl"
    pkl_file_path = f"/tmp/{pkl_file_name}"
    df_all_dailies.to_pickle(f"/tmp/{pkl_file_name}")

    bucket = storage_client.get_bucket(GCP_STORAGE_BUCKET_ID)
    blob = Blob(pkl_file_name, bucket)
    with open(pkl_file_path, "rb") as pkl_file:
        blob.upload_from_file(pkl_file)

    os.remove(pkl_file_path)

    return ('', 200)
Пример #32
0
def upload_file():
    if request.method == 'POST':
        # check if the post request has the file part
        if 'file' not in request.files:
            flash('No file part')
            return redirect(request.url)
        file = request.files['file']
        # if user does not select file, browser also
        # submit a empty part without filename
        if file.filename == '':
            flash('No selected file')
            return redirect(request.url)
        if file and allowed_file(file.filename):
            print('allowed_file')
            filename = secure_filename(file.filename)
            blob = Blob(filename, BUCKET)
            blob.upload_from_file(file)
            return redirect(url_for('uploaded_file', filename=filename))
    return '''