def upload_object(self, bucket_name, object_name, read_path, subfolders=None, print_details=True):
        process_start_time = datetime.now(UTC)

        media = MediaFileUpload(read_path, chunksize=self._CHUNKSIZE, resumable=True)

        if not media.mimetype():
            media = MediaFileUpload(read_path, self._DEFAULT_MIMETYPE, resumable=True)
        
        request = self._objects.insert(
            bucket=bucket_name,
            name=self._parse_object_name(object_name, subfolders),
            media_body=media
        )

        progressless_iters = 0
        response = None
        while response is None:
            error = None
            try:
                progress, response = request.next_chunk()
            except HttpError, err:
                error = err
                if err.resp.status < 500:
                    raise
            except self._RETRYABLE_ERRORS, err:
                error = err
  def test_resumable_media_good_upload_from_execute(self):
    """Not a multipart upload."""
    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
    zoo = build('zoo', 'v1', http=self.http)

    media_upload = MediaFileUpload(datafile('small.png'), resumable=True)
    request = zoo.animals().insert(media_body=media_upload, body=None)
    assertUrisEqual(self,
        'https://www.googleapis.com/upload/zoo/v1/animals?uploadType=resumable&alt=json',
        request.uri)

    http = HttpMockSequence([
      ({'status': '200',
        'location': 'http://upload.example.com'}, ''),
      ({'status': '308',
        'location': 'http://upload.example.com/2',
        'range': '0-12'}, ''),
      ({'status': '308',
        'location': 'http://upload.example.com/3',
        'range': '0-%d' % media_upload.size()}, ''),
      ({'status': '200'}, '{"foo": "bar"}'),
      ])

    body = request.execute(http=http)
    self.assertEquals(body, {"foo": "bar"})
  def test_http_request_to_from_json(self):

    def _postproc(*kwargs):
      pass

    http = httplib2.Http()
    media_upload = MediaFileUpload(
        datafile('small.png'), chunksize=500, resumable=True)
    req = HttpRequest(
        http,
        _postproc,
        'http://example.com',
        method='POST',
        body='{}',
        headers={'content-type': 'multipart/related; boundary="---flubber"'},
        methodId='foo',
        resumable=media_upload)

    json = req.to_json()
    new_req = HttpRequest.from_json(json, http, _postproc)

    self.assertEqual({'content-type':
                       'multipart/related; boundary="---flubber"'},
                       new_req.headers)
    self.assertEqual('http://example.com', new_req.uri)
    self.assertEqual('{}', new_req.body)
    self.assertEqual(http, new_req.http)
    self.assertEqual(media_upload.to_json(), new_req.resumable.to_json())

    self.assertEqual(random.random, new_req._rand)
    self.assertEqual(time.sleep, new_req._sleep)
Exemple #4
0
    def upload_object(self, bucket_name, object_name, read_path, predefined_acl=None, projection=None, **object_resource):
        """
        Uploads object in chunks.

        Optional parameters and valid object resources are listed here [https://cloud.google.com/storage/docs/json_api/v1/objects/insert]

        :param bucket_name: Bucket identifier.
        :type bucket_name: string
        :param object_name: Can take string representation of object resource or list denoting path to object on GCS.
        :type object_name: list or string
        :param read_path: Local path of object to upload.
        :type read_path: string
        :param predefined_acl: Apply a predefined set of access controls to this object.
        :param projection: Set of properties to return.
        :param object_resource: Supply optional properties [https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request-body]
        :returns: GcsResponse object.
        :raises: HttpError if non-retryable errors are encountered.
        """
        resp_obj = GcsResponse('uploaded')

        media = MediaFileUpload(read_path, chunksize=self._chunksize, resumable=True)

        if not media.mimetype():
            media = MediaFileUpload(read_path, 'application/octet-stream', resumable=True)

        req = self._service.objects().insert(
            bucket=bucket_name,
            name=self._parse_object_name(object_name),
            media_body=media,

            predefinedAcl=predefined_acl,
            projection=projection,
            body=object_resource
        )

        progressless_iters = 0
        resp = None
        while resp is None:
            error = None
            try:
                progress, resp = req.next_chunk()
            except HttpError as e:
                error = e
                if e.resp.status < 500:
                    raise
            except self._RETRYABLE_ERRORS as e:
                error = e

            if error:
                progressless_iters += 1
                self._handle_progressless_iter(error, progressless_iters)
            else:
                progressless_iters = 0

        resp_obj.load_resp(
            resp,
            is_download=False
        )

        return resp_obj
Exemple #5
0
    def put_multipart(self, local_path, destination_gcs_path, chunk_size=67108864, fallback_to_simple_put=True):
        """
        Put an object stored locally to an GCS path
        using using MediaFileUpload chunks(for files > 5GB,
        see https://developers.google.com/api-client-library/python/guide/media_upload).

        :param local_path: Path to source local file
        :param destination_gcs_path: URL for target GCS location
        :param chunk_size: Chunk size in bytes. Default: 67108864 (64MB).
            Chunk size restriction: There are some chunk size restrictions based on the size of the file
            you are uploading. Files larger than 256 KB (256 * 1024 B) must have chunk sizes that are
            multiples of 256 KB. For files smaller than 256 KB, there are no restrictions. In either case,
            the final chunk has no limitations; you can simply transfer the remaining bytes.
        """

        source_size = os.stat(local_path).st_size

        if fallback_to_simple_put and (source_size <= chunk_size or source_size < GcsFileSystem.MIN_CHUNK_SIZE):
            GcsFileSystem.logger.debug("File too small will upload as a single chunk")
            return self.put(local_path, destination_gcs_path)

        chunk_size = GcsFileSystem.correct_chunk_size(chunk_size)

        (bucket, key) = self.path_to_bucket_and_key(destination_gcs_path)

        media = MediaFileUpload(local_path, chunksize=chunk_size, resumable=True)

        if media.mimetype() is None:
            media = MediaFileUpload(local_path, chunksize=chunk_size, resumable=True,
                                    mimetype='application/octet-stream')

        request = self.gcs_service.objects().insert(media_body=media, name=key, bucket=bucket)

        def should_retry(exception):
            return isinstance(exception, HttpError) and exception.resp.status in [500, 502, 503, 504]

        @retry(stop_max_attempt_number=5, wait_exponential_multiplier=1000, retry_on_exception=should_retry)
        def load_chunk(r):
            self.logger.debug("Uploading chunk to {}/{}".format(bucket, key))
            return r.next_chunk()

        response = None
        while response is None:
            upload_status, response = load_chunk(request)
            self.logger.debug("Chunk uploaded to {}/{}".format(bucket, key))
            if upload_status:
                self.logger.debug(
                    "Overall uploaded to {}/{}: {}%".format(bucket, key, int(upload_status.progress() * 100)))

        return bucket, key
  def test_resumable_multipart_media_good_upload(self):
    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
    zoo = build('zoo', 'v1', http=self.http)

    media_upload = MediaFileUpload(datafile('small.png'), resumable=True)
    request = zoo.animals().insert(media_body=media_upload, body={})
    self.assertTrue(request.headers['content-type'].startswith(
        'application/json'))
    self.assertEquals('{"data": {}}', request.body)
    self.assertEquals(media_upload, request.resumable)

    self.assertEquals('image/png', request.resumable.mimetype())

    self.assertNotEquals(request.body, None)
    self.assertEquals(request.resumable_uri, None)

    http = HttpMockSequence([
      ({'status': '200',
        'location': 'http://upload.example.com'}, ''),
      ({'status': '308',
        'location': 'http://upload.example.com/2',
        'range': '0-12'}, ''),
      ({'status': '308',
        'location': 'http://upload.example.com/3',
        'range': '0-%d' % (media_upload.size() - 2)}, ''),
      ({'status': '200'}, '{"foo": "bar"}'),
      ])

    status, body = request.next_chunk(http=http)
    self.assertEquals(None, body)
    self.assertTrue(isinstance(status, MediaUploadProgress))
    self.assertEquals(13, status.resumable_progress)

    # Two requests should have been made and the resumable_uri should have been
    # updated for each one.
    self.assertEquals(request.resumable_uri, 'http://upload.example.com/2')

    self.assertEquals(media_upload, request.resumable)
    self.assertEquals(13, request.resumable_progress)

    status, body = request.next_chunk(http=http)
    self.assertEquals(request.resumable_uri, 'http://upload.example.com/3')
    self.assertEquals(media_upload.size()-1, request.resumable_progress)
    self.assertEquals('{"data": {}}', request.body)

    # Final call to next_chunk should complete the upload.
    status, body = request.next_chunk(http=http)
    self.assertEquals(body, {"foo": "bar"})
    self.assertEquals(status, None)
  def test_media_file_upload_to_from_json(self):
    upload = MediaFileUpload(
        datafile('small.png'), chunksize=500, resumable=True)
    self.assertEqual('image/png', upload.mimetype())
    self.assertEqual(190, upload.size())
    self.assertEqual(True, upload.resumable())
    self.assertEqual(500, upload.chunksize())
    self.assertEqual('PNG', upload.getbytes(1, 3))

    json = upload.to_json()
    new_upload = MediaUpload.new_from_json(json)

    self.assertEqual('image/png', new_upload.mimetype())
    self.assertEqual(190, new_upload.size())
    self.assertEqual(True, new_upload.resumable())
    self.assertEqual(500, new_upload.chunksize())
    self.assertEqual('PNG', new_upload.getbytes(1, 3))
    def method(self, **kwargs):
        # Don't bother with doc string, it will be over-written by createMethod.

        for name in six.iterkeys(kwargs):
            if name not in parameters.argmap:
                raise TypeError('Got an unexpected keyword argument "{0!s}"'.format(name))

        # Remove args that have a value of None.
        keys = list(kwargs.keys())
        for name in keys:
            if kwargs[name] is None:
                del kwargs[name]

        for name in parameters.required_params:
            if name not in kwargs:
                raise TypeError('Missing required parameter "{0!s}"'.format(name))

        for name, regex in six.iteritems(parameters.pattern_params):
            if name in kwargs:
                if isinstance(kwargs[name], six.string_types):
                    pvalues = [kwargs[name]]
                else:
                    pvalues = kwargs[name]
                for pvalue in pvalues:
                    if re.match(regex, pvalue) is None:
                        raise TypeError(
                            'Parameter "{0!s}" value "{1!s}" does not match the pattern "{2!s}"'.format(
                                name, pvalue, regex
                            )
                        )

        for name, enums in six.iteritems(parameters.enum_params):
            if name in kwargs:
                # We need to handle the case of a repeated enum
                # name differently, since we want to handle both
                # arg='value' and arg=['value1', 'value2']
                if name in parameters.repeated_params and not isinstance(kwargs[name], six.string_types):
                    values = kwargs[name]
                else:
                    values = [kwargs[name]]
                for value in values:
                    if value not in enums:
                        raise TypeError(
                            'Parameter "{0!s}" value "{1!s}" is not an allowed value in "{2!s}"'.format(
                                name, value, str(enums)
                            )
                        )

        actual_query_params = {}
        actual_path_params = {}
        for key, value in six.iteritems(kwargs):
            to_type = parameters.param_types.get(key, "string")
            # For repeated parameters we cast each member of the list.
            if key in parameters.repeated_params and type(value) == type([]):
                cast_value = [_cast(x, to_type) for x in value]
            else:
                cast_value = _cast(value, to_type)
            if key in parameters.query_params:
                actual_query_params[parameters.argmap[key]] = cast_value
            if key in parameters.path_params:
                actual_path_params[parameters.argmap[key]] = cast_value
        body_value = kwargs.get("body", None)
        media_filename = kwargs.get("media_body", None)

        if self._developerKey:
            actual_query_params["key"] = self._developerKey

        model = self._model
        if methodName.endswith("_media"):
            model = MediaModel()
        elif "response" not in methodDesc:
            model = RawModel()

        headers = {}
        headers, params, query, body = model.request(headers, actual_path_params, actual_query_params, body_value)

        expanded_url = uritemplate.expand(pathUrl, params)
        url = _urljoin(self._baseUrl, expanded_url + query)

        resumable = None
        multipart_boundary = ""

        if media_filename:
            # Ensure we end up with a valid MediaUpload object.
            if isinstance(media_filename, six.string_types):
                (media_mime_type, encoding) = mimetypes.guess_type(media_filename)
                if media_mime_type is None:
                    raise UnknownFileType(media_filename)
                if not mimeparse.best_match([media_mime_type], ",".join(accept)):
                    raise UnacceptableMimeTypeError(media_mime_type)
                media_upload = MediaFileUpload(media_filename, mimetype=media_mime_type)
            elif isinstance(media_filename, MediaUpload):
                media_upload = media_filename
            else:
                raise TypeError("media_filename must be str or MediaUpload.")

            # Check the maxSize
            if media_upload.size() is not None and media_upload.size() > maxSize > 0:
                raise MediaUploadSizeError("Media larger than: {0!s}".format(maxSize))

            # Use the media path uri for media uploads
            expanded_url = uritemplate.expand(mediaPathUrl, params)
            url = _urljoin(self._baseUrl, expanded_url + query)
            if media_upload.resumable():
                url = _add_query_parameter(url, "uploadType", "resumable")

            if media_upload.resumable():
                # This is all we need to do for resumable, if the body exists it gets
                # sent in the first request, otherwise an empty body is sent.
                resumable = media_upload
            else:
                # A non-resumable upload
                if body is None:
                    # This is a simple media upload
                    headers["content-type"] = media_upload.mimetype()
                    body = media_upload.getbytes(0, media_upload.size())
                    url = _add_query_parameter(url, "uploadType", "media")
                else:
                    # This is a multipart/related upload.
                    msgRoot = MIMEMultipart("related")
                    # msgRoot should not write out it's own headers
                    setattr(msgRoot, "_write_headers", lambda self: None)

                    # attach the body as one part
                    msg = MIMENonMultipart(*headers["content-type"].split("/"))
                    msg.set_payload(body)
                    msgRoot.attach(msg)

                    # attach the media as the second part
                    msg = MIMENonMultipart(*media_upload.mimetype().split("/"))
                    msg["Content-Transfer-Encoding"] = "binary"

                    payload = media_upload.getbytes(0, media_upload.size())
                    msg.set_payload(payload)
                    msgRoot.attach(msg)
                    # encode the body: note that we can't use `as_string`, because
                    # it plays games with `From ` lines.
                    fp = BytesIO()
                    g = _BytesGenerator(fp, mangle_from_=False)
                    g.flatten(msgRoot, unixfrom=False)
                    body = fp.getvalue()

                    multipart_boundary = msgRoot.get_boundary()
                    headers["content-type"] = ("multipart/related; " 'boundary="%s"') % multipart_boundary
                    url = _add_query_parameter(url, "uploadType", "multipart")

        logger.info("URL being requested: {0!s} {1!s}".format(httpMethod, url))
        return self._requestBuilder(
            self._http,
            model.response,
            url,
            method=httpMethod,
            body=body,
            headers=headers,
            methodId=methodId,
            resumable=resumable,
        )
def main():
    dataset = pd.read_csv("Graph/newData.csv")
    X = dataset.iloc[1:, 1].values
    y = dataset.iloc[1:, 2].values
    X1 = dataset['1.Time']
    X2 = dataset['2.Signal Value']
    Xf = np.array(list(zip(X1, X2)))
    yf = dataset['5.Health Status']
    X_train, X_test, y_train, y_test = train_test_split(Xf, yf, test_size=0.30)
    current_status = dataset.iloc[-1]["5.Health Status"]
    current_value = dataset.iloc[-1]["2.Signal Value"]
    current_time = dataset.iloc[-1]["1.Time"]
    current_date = dataset.iloc[-1]["6.Date"]
    data = pd.date_range(current_date, periods=50, freq='D')
    predicted_date = data[-1]
    print(predicted_date)
    pred_time = current_time + 50
    new_time = [[pred_time]]

    df = pd.read_csv('Graph/newData.csv', usecols=['2.Signal Value'], header=0)

    model = pm.auto_arima(
        df.values,
        start_p=1,
        start_q=1,
        test='adf',  # use adftest to find optimal 'd'
        max_p=3,
        max_q=3,  # maximum p and q
        m=1,  # frequency of series
        d=None,  # let model determine 'd'
        seasonal=False,  # No Seasonality
        start_P=0,
        D=0,
        trace=True,
        error_action='ignore',
        suppress_warnings=True,
        stepwise=True)

    # Forecast
    n_periods = 50
    fc = None
    confint = None
    index_of_fc = None
    fc_series = None
    lower_series = None
    upper_series = None
    fc, confint = model.predict(n_periods=n_periods, return_conf_int=True)
    index_of_fc = np.arange(len(df.values), len(df.values) + n_periods)

    # make series for plotting purpose
    fc_series = pd.Series(fc, index=index_of_fc)
    last_element = fc_series[-1:]
    print(last_element)
    pred_value = last_element[pred_time]
    lower_series = pd.Series(confint[:, 0], index=index_of_fc)
    upper_series = pd.Series(confint[:, 1], index=index_of_fc)

    # Plot
    plt.plot(df.values)
    plt.plot(fc_series, color='darkgreen')
    plt.fill_between(lower_series.index,
                     lower_series,
                     upper_series,
                     color='k',
                     alpha=.15)
    plt.title("Final Forecast")
    plt.savefig('Graph/graph.png')
    #plt.show()

    predict_val = [[pred_time, pred_value]]

    model = RandomForestClassifier()
    model.fit(X_train, y_train)
    predicted = model.predict(predict_val)
    y_pred = model.predict(X_test)
    print('Health Status : ', predicted)
    print('accuracy random forest: ', accuracy_score(y_test, y_pred))

    image = Image.open('background.png')
    draw = ImageDraw.Draw(image)
    font = ImageFont.truetype('Roboto-Black.ttf', size=15)
    message = str(predicted_date)
    color = 'rgb(0, 0, 0)'  # black color
    draw.text((10, 10), 'Prediction Date : ', fill=color, font=font)
    draw.text((130, 10), message, fill=color, font=font)
    draw.text((10, 40), 'Prediction Value : ', fill=color, font=font)
    pred_value = last_element[pred_time].astype(str)
    draw.text((130, 40), pred_value, fill=color, font=font)
    message = predicted[0]
    draw.text((10, 70), 'Machine Status : ', fill=color, font=font)
    draw.text((130, 70), message, fill=color, font=font)

    image.save('prognosis.png')

    image = Image.open('background.png')
    draw = ImageDraw.Draw(image)
    font = ImageFont.truetype('Roboto-Black.ttf', size=15)
    current_time = current_date
    current_value = current_value.astype(str)
    color = 'rgb(0, 0, 0)'  # black color
    draw.text((10, 10), 'Current Date : ', fill=color, font=font)
    draw.text((130, 10), current_time, fill=color, font=font)
    draw.text((10, 40), 'Current Value : ', fill=color, font=font)
    text = draw.text((130, 40), current_value, fill=color, font=font)
    draw.text((10, 70), 'Machine Status : ', fill=color, font=font)
    draw.text((130, 70), current_status, fill=color, font=font)

    image.save('diagnosis.png')

    data = [pred_time, pred_value, predicted[0]]
    with open('Graph/predicted.csv', 'a') as csvFile:
        writer = csv.writer(csvFile, delimiter=',')
        writer.writerow(data)

    creds = None
    if os.path.exists('token.pickle'):
        with open('token.pickle', 'rb') as token:
            creds = pickle.load(token)
    # If there are no (valid) credentials available, let the user log in.
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'credentials.json', SCOPES)
            creds = flow.run_local_server()
        # Save the credentials for the next run
        with open('token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    service = build('drive', 'v3', credentials=creds)

    file_id = '1BCVoi2yXL_seuaDBumEUpIw1vpc5yJGj'
    file_metadata = {'name': 'graph.png'}
    media = MediaFileUpload('Graph/graph.png', mimetype='image/jpeg')
    file = service.files().update(fileId=file_id,
                                  body=file_metadata,
                                  media_body=media,
                                  fields='id').execute()
    print('File ID: %s' % file.get('id'))

    file_id = '1MMNdZNchkmDYBy3fpWpRP_wOxfil3y_p'
    file_metadata = {'name': 'prognosis.png'}
    media = MediaFileUpload('prognosis.png', mimetype='image/jpeg')
    file = service.files().update(fileId=file_id,
                                  body=file_metadata,
                                  media_body=media,
                                  fields='id').execute()
    print('File ID: %s' % file.get('id'))

    file_id = '10IKDxJYrGvF2DX90Tbze_HQ_0tjh9z1f'
    file_metadata = {'name': 'diagnosis.png'}
    media = MediaFileUpload('diagnosis.png', mimetype='image/jpeg')
    file = service.files().update(fileId=file_id,
                                  body=file_metadata,
                                  media_body=media,
                                  fields='id').execute()
    print('File ID: %s' % file.get('id'))
def main():
    creds = None
    # The file token.pickle stores the user's access and refresh tokens, and is
    # created automatically when the authorization flow completes for the first
    # time.
    if os.path.exists('token.pickle'):
        with open('token.pickle', 'rb') as token:
            creds = pickle.load(token)
    # If there are no (valid) credentials available, let the user log in.
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'credentials.json', SCOPES)
            creds = flow.run_local_server(port=0)
        # Save the credentials for the next run
        with open('token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    service = build('drive', 'v3', credentials=creds)

    LOG = setup_logger("app.log", "app.log", logging.INFO)

    page_token = None

    try:
        shelfFile = shelve.open('log_list')
    except err as e:
        LOG.error('error in opening the shelve file')

    global file_already_logged

    #make a request until there are no more files to process
    while True:
        # Call the Drive v3 API
        response = service.files().list(
            q = "mimeType='application/vnd.oasis.opendocument.spreadsheet' and trashed=false", pageSize=1000, fields="nextPageToken, files(id,name,modifiedTime,lastModifyingUser)", pageToken=page_token).execute()
        items = response.get('files', [])

        current_datetime = datetime.datetime.utcnow()

        if not items:
            LOG.info("No files found using the provided query")
        else:
            for item in items:
                re_results = filename_regex.search(item["name"])
                if re_results is not None:
                    minutes = minutes_from_last_change(item["modifiedTime"], current_datetime)
                    if(minutes < CRON_TIME):
                        my_file = File(service, item, LOG)
                        #my_file.download_file()

                        results = service.revisions().list(fileId=item["id"]).execute()
                        revisions = results.get("revisions", [])
                        my_file.setup_logger(level=logging.INFO)
                        my_file.set_revisions(revisions)

                        if(len(revisions) > 1):
                            log_name = item['name'] + '.log'
                            log_keys = shelfFile.keys()
                            log_id = None
                            if log_name in log_keys:
                                log_id = shelfFile[log_name]
                                my_file.download_file(log_id, log_name)
                                file_already_logged = True

                            revision_index = get_revision_index(revisions, current_datetime)
                            #my_file.set_revision(revisions[revision_index])
                            #my_file.download_revision()
                            try:
                                my_file.compute_revisions(revision_index)
                                log_metadata = {'name' : log_name, 'parents' : [folder_id]}
                                media = MediaFileUpload(log_name, mimetype='text/plain', resumable=True)
                                if file_already_logged:
                                    log_metadata = {'name' : log_name} #the parents field in the metadata is not writable using the update request, use addParent,removeParents instead
                                    file = service.files().update(fileId=log_id, body=log_metadata, media_body=media, fields='id').execute()
                                    LOG.info('{} log updated successfully'.format(log_name))
                                else:
                                    file = service.files().create(body=log_metadata, media_body=media, fields='id').execute()
                                    shelfFile[log_name] = file.get('id')
                                    LOG.info('{} log created successfully'.format(log_name))
                                    permissions_metadata = {"type": "anyone", "role": "reader"}
                                    permissions = service.permissions().create(fileId=file.get('id'), body=permissions_metadata).execute()
                                remove_file(log_name, LOG)
                            except KeyError:
                                LOG.info("error in reading the files content")
                                traceback.print_exc()
                                remove_file(item["name"]+".log", LOG)
                            except Exception as e:
                                LOG.info("error : {0}".format(str(e)))
                                traceback.print_exc()

                                remove_file(item["name"]+".log", LOG)
                            #remove_file("revision_" + item["name"], LOG)
                        else:
                            my_file.file_created()
                            logname = item['name'] + '.log'
                            log_metadata = {'name' : logname, 'parents' : [folder_id]}
                            media = MediaFileUpload(logname, mimetype='text/plain', resumable=True)
                            file = service.files().create(body=log_metadata, media_body=media, fields='id').execute()
                            permissions_metadata = {"type": "anyone", "role": "reader"}
                            permissions = service.permissions().create(fileId=file.get('id'), body=permissions_metadata).execute()
                            shelfFile[logname] = file.get('id')
                            LOG.info("{} log created and uploaded".format(logname))
                            remove_file(logname, LOG)
                        name_last_revision = "revision" + str(len(revisions)-1) + "_" + item["name"]
                        remove_file(name_last_revision, LOG)
                        file_already_logged = False

                        del my_file

        page_token = response.get('nextPageToken', None)
        if page_token is None:
            break
Exemple #11
0
    def test_media_file_upload_mimetype_detection(self):
        upload = MediaFileUpload(datafile('small.png'))
        self.assertEqual('image/png', upload.mimetype())

        upload = MediaFileUpload(datafile('empty'))
        self.assertEqual('application/octet-stream', upload.mimetype())
Exemple #12
0
def main(title, description, videoPath, thumbnailPath):
    os.environ["OAUTHLIB_INSECURE_TRANSPORT"] = "1"

    API_SERVICE_NAME = "youtube"
    API_VERSION = "v3"
    CLIENT_SECRET_FILE = ""  #Youtube Data Api Client Secret file goes here
    SCOPES = ["https://www.googleapis.com/auth/youtube.upload"]

    cred = None

    pickle_file = f'token_{API_SERVICE_NAME}_{API_VERSION}.pickle'

    if os.path.exists(pickle_file):
        with open(pickle_file, 'rb') as token:
            cred = pickle.load(token)

    if not cred or not cred.valid:
        if cred and cred.expired and cred.refresh_token:
            cred.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                CLIENT_SECRET_FILE, SCOPES)
            cred = flow.run_local_server()

        with open(pickle_file, 'wb') as token:
            pickle.dump(cred, token)

    youtube = googleapiclient.discovery.build(API_SERVICE_NAME,
                                              API_VERSION,
                                              credentials=cred)

    request = youtube.videos().insert(
        part="snippet,status",
        body={
            "snippet": {
                "categoryId":
                "24",
                "description":
                description,
                "title":
                title,
                "tags": [
                    "ask reddit stories", "askreddit", "askreddit best posts",
                    "askreddit best questions", "askreddit funny",
                    "askreddit scary", "askreddit stories",
                    "askreddit top posts", "askreddit top questions",
                    "best of askreddit", "best of reddit", "best reddit posts",
                    "cowbelly", "dank memes", "dankmemes", "memes",
                    "r/askreddit", "radio", "radio tts", "reddit",
                    "reddit best posts", "reddit jar", "reddit memes",
                    "reddit post video", "reddit scary", "reddit stories",
                    "reddit top posts", "redditors", "top posts", "updoot",
                    "wholesome", "wholesome memes"
                ]
            },
            "status": {
                "privacyStatus": "public",
                'selfDeclaredMadeForKids': False
            },
            'notifySubscribers': True
        },
        media_body=MediaFileUpload(videoPath, resumable=True))
    response_upload = request.execute()
    print(response_upload)

    request = youtube.thumbnails().set(
        videoId=response_upload.get('id'),
        media_body=MediaFileUpload(thumbnailPath))
    response = request.execute()
    print(response)
Exemple #13
0
async def upload(gdrive, service, file_path, file_name, mimeType):
    try:
        await gdrive.edit("`Processing upload...`")
    except Exception:
        pass
    body = {
        "name": file_name,
        "description": "Uploaded from Telegram using ProjectBish userbot.",
        "mimeType": mimeType,
    }
    try:
        if parent_Id is not None:
            pass
    except NameError:
        """ - Fallback to G_DRIVE_FOLDER_ID else root dir - """
        if G_DRIVE_FOLDER_ID is not None:
            body['parents'] = [G_DRIVE_FOLDER_ID]
    else:
        """ - Override G_DRIVE_FOLDER_ID because parent_Id not empty - """
        body['parents'] = [parent_Id]
    media_body = MediaFileUpload(file_path, mimetype=mimeType, resumable=True)
    """ - Start upload process - """
    file = service.files().create(body=body,
                                  media_body=media_body,
                                  fields="id, size, webContentLink",
                                  supportsAllDrives=True)
    global is_cancelled
    current_time = time.time()
    response = None
    display_message = None
    is_cancelled = False
    while response is None:
        if is_cancelled is True:
            raise CancelProcess

        status, response = file.next_chunk()
        if status:
            file_size = status.total_size
            diff = time.time() - current_time
            uploaded = status.resumable_progress
            percentage = uploaded / file_size * 100
            speed = round(uploaded / diff, 2)
            eta = round((file_size - uploaded) / speed)
            prog_str = "`Uploading` | [{0}{1}] `{2}%`".format(
                "".join(["■" for i in range(math.floor(percentage / 10))]),
                "".join(["▨"
                         for i in range(10 - math.floor(percentage / 10))]),
                round(percentage, 2))
            current_message = (
                "`[FILE - UPLOAD]`\n\n"
                f"`{file_name}`\n"
                f"`Status`\n{prog_str}\n"
                f"`{humanbytes(uploaded)} of {humanbytes(file_size)} "
                f"@ {humanbytes(speed)}`\n"
                f"`ETA` -> {time_formatter(eta)}")
            if round(diff % 15.00) == 0 and (display_message != current_message
                                             ) or (uploaded == file_size):
                await gdrive.edit(current_message)
                display_message = current_message
    file_id = response.get("id")
    file_size = response.get("size")
    downloadURL = response.get("webContentLink")
    """ - Change permission - """
    await change_permission(service, file_id)
    return int(file_size), downloadURL
Exemple #14
0
def thumbnails_set(client, media_file, **kwargs):
    request = client.thumbnails().set(media_body=MediaFileUpload(
        media_file, chunksize=-1, resumable=True), **kwargs)

    # See full sample for function
    return resumable_upload_thumbnails(request)
Exemple #15
0
async def upload(gdrive, service, file_path, file_name, mimeType):
    try:
        await gdrive.edit("`Processing upload...`")
    except Exception:
        pass
    body = {
        "name": file_name,
        "description": "Uploaded from Telegram using UserBot.",
        "mimeType": mimeType,
    }
    try:
        if parent_Id is not None:
            pass
    except NameError:
        if G_DRIVE_FOLDER_ID is not None:
            body["parents"] = [G_DRIVE_FOLDER_ID]
    else:
        body["parents"] = [parent_Id]
    media_body = MediaFileUpload(file_path, mimetype=mimeType, resumable=True)
    file = service.files().create(
        body=body,
        media_body=media_body,
        fields="id, size, webContentLink",
        supportsAllDrives=True,
    )
    global is_cancelled
    current_time = time.time()
    response = None
    display_message = None
    is_cancelled = False
    while response is None:
        if is_cancelled:
            raise CancelProcess

        status, response = file.next_chunk()
        if status:
            file_size = status.total_size
            diff = time.time() - current_time
            uploaded = status.resumable_progress
            percentage = uploaded / file_size * 100
            speed = round(uploaded / diff, 2)
            eta = round((file_size - uploaded) / speed)
            prog_str = "[{0}{1}] `{2}%`".format(
                "".join("█" for i in range(math.floor(percentage / 10))),
                "".join("░" for i in range(10 - math.floor(percentage / 10))),
                round(percentage, 2),
            )
            current_message = (
                f"{file_name} - Uploading\n"
                f"{prog_str}\n"
                f"`Size: {humanbytes(uploaded)} of {humanbytes(file_size)}\n"
                f"`Speed:` {humanbytes(speed)}`\n"
                f"`ETA:` {time_formatter(eta)}")
            if (round(diff % 15.00) == 0 and
                (display_message != current_message)
                    or (uploaded == file_size)):
                await gdrive.edit(current_message)
                display_message = current_message
    file_id = response.get("id")
    file_size = response.get("size")
    downloadURL = response.get("webContentLink")
    await change_permission(service, file_id)
    return int(file_size), downloadURL
Exemple #16
0
    def upload_file(self,
                    source_path,
                    path,
                    server,
                    collection,
                    file,
                    representation,
                    site,
                    overwrite=False):
        """
            Uploads single file from 'source_path' to destination 'path'.
            It creates all folders on the path if are not existing.

        Args:
            source_path (string):
            path (string): absolute path with or without name of the file
            overwrite (boolean): replace existing file

            arguments for saving progress:
            server (SyncServer): server instance to call update_db on
            collection (str): name of collection
            file (dict): info about uploaded file (matches structure from db)
            representation (dict): complete repre containing 'file'
            site (str): site name

        Returns:
            (string) file_id of created/modified file ,
                throws FileExistsError, FileNotFoundError exceptions
        """
        if not os.path.isfile(source_path):
            raise FileNotFoundError(
                "Source file {} doesn't exist.".format(source_path))

        root, ext = os.path.splitext(path)
        if ext:
            # full path
            target_name = os.path.basename(path)
            path = os.path.dirname(path)
        else:
            target_name = os.path.basename(source_path)
        target_file = self.file_path_exists(path + "/" + target_name)
        if target_file and not overwrite:
            raise FileExistsError("File already exists, "
                                  "use 'overwrite' argument")

        folder_id = self.folder_path_exists(path)
        if not folder_id:
            raise NotADirectoryError("Folder {} doesn't exists".format(path))

        file_metadata = {'name': target_name}
        media = MediaFileUpload(source_path,
                                mimetype='application/octet-stream',
                                chunksize=self.CHUNK_SIZE,
                                resumable=True)

        try:
            if not target_file:
                # update doesnt like parent
                file_metadata['parents'] = [folder_id]

                request = self.service.files().create(body=file_metadata,
                                                      supportsAllDrives=True,
                                                      media_body=media,
                                                      fields='id')
            else:
                request = self.service.files().update(fileId=target_file["id"],
                                                      body=file_metadata,
                                                      supportsAllDrives=True,
                                                      media_body=media,
                                                      fields='id')

            media.stream()
            log.debug("Start Upload! {}".format(source_path))
            last_tick = status = response = None
            status_val = 0
            while response is None:
                if server.is_representation_paused(representation['_id'],
                                                   check_parents=True,
                                                   project_name=collection):
                    raise ValueError("Paused during process, please redo.")
                if status:
                    status_val = float(status.progress())
                if not last_tick or \
                        time.time() - last_tick >= server.LOG_PROGRESS_SEC:
                    last_tick = time.time()
                    log.debug("Uploaded %d%%." % int(status_val * 100))
                    server.update_db(collection=collection,
                                     new_file_id=None,
                                     file=file,
                                     representation=representation,
                                     site=site,
                                     progress=status_val)
                status, response = request.next_chunk()

        except errors.HttpError as ex:
            if ex.resp['status'] == '404':
                return False
            if ex.resp['status'] == '403':
                # real permission issue
                if 'has not granted' in ex._get_reason().strip():
                    raise PermissionError(ex._get_reason().strip())

                log.warning("Forbidden received, hit quota. "
                            "Injecting 60s delay.")
                time.sleep(60)
                return False
            raise
        return response['id']
Exemple #17
0
def main():
    """Syncronizes computer folder with Google Drive folder.

    Checks files if they exist, uploads new files and subfolders,
    deletes old files from Google Drive and refreshes existing stuff.
    """
    credentials = get_credentials()
    http = credentials.authorize(httplib2.Http())
    service = discovery.build('drive', 'v3', http=http)

    # Get id of Google Drive folder and it's path (from other script)
    # folder_id, full_path = initial_upload.check_upload(service)
    folder_id, full_path = check_upload(service)
    folder_name = full_path.split(os.path.sep)[-1]
    tree_list = []
    root = ''
    parents_id = {}

    parents_id[folder_name] = folder_id
    get_tree(folder_name, tree_list, root, parents_id, service)
    os_tree_list = []
    root_len = len(full_path.split(os.path.sep)[0:-2])

    # Get list of folders three paths on computer
    for root, dirs, files in os.walk(full_path, topdown=True):
        for name in dirs:
            var_path = (os.path.sep).join(
                root.split(os.path.sep)[root_len + 1:])
            os_tree_list.append(os.path.join(var_path, name))

    # old folders on drive
    remove_folders = list(set(tree_list).difference(set(os_tree_list)))
    # new folders on drive, which you dont have(i suppose hehe)
    upload_folders = list(set(os_tree_list).difference(set(tree_list)))
    # foldes that match
    exact_folders = list(set(os_tree_list).intersection(set(tree_list)))

    # Add starting directory
    exact_folders.append(folder_name)
    # Sort uploadable folders
    # so now in can be upload from top to down of tree
    upload_folders = sorted(upload_folders, key=by_lines)

    # Here we upload new (abcent on Drive) folders
    for folder_dir in upload_folders:
        var = os.path.join(full_path.split(os.path.sep)[0:-1]) + os.path.sep
        variable = var + folder_dir
        last_dir = folder_dir.split(os.path.sep)[-1]
        pre_last_dir = folder_dir.split(os.path.sep)[-2]

        files = [
            f for f in os.listdir(variable)
            if os.path.isfile(os.path.join(variable, f))
        ]

        folder_metadata = {
            'name': last_dir,
            'parents': [parents_id[pre_last_dir]],
            'mimeType': 'application/vnd.google-apps.folder'
        }
        create_folder = service.files().create(  # pylint: disable=no-member
            body=folder_metadata, fields='id').execute()
        folder_id = create_folder.get('id', [])
        parents_id[last_dir] = folder_id

        for os_file in files:
            some_metadata = {'name': os_file, 'parents': [folder_id]}
            os_file_mimetype = mimetypes.MimeTypes().guess_type(
                os.path.join(variable, os_file))[0]
            media = MediaFileUpload(os.path.join(variable, os_file),
                                    mimetype=os_file_mimetype)
            upload_this = service.files().create(
                body=some_metadata,  # pylint: disable=no-member
                media_body=media,
                fields='id').execute()
            upload_this = upload_this.get('id', [])

    # Check files in existed folders and replace them
    # with newer versions if needed
    for folder_dir in exact_folders:

        var = (os.path.sep).join(full_path.split(
            os.path.sep)[0:-1]) + os.path.sep

        variable = var + folder_dir
        last_dir = folder_dir.split(os.path.sep)[-1]
        # print(last_dir, folder_dir)
        os_files = [
            f for f in os.listdir(variable)
            if os.path.isfile(os.path.join(variable, f))
        ]
        results = service.files().list(  # pylint: disable=no-member
            pageSize=1000,
            q=('%r in parents and \
            mimeType!="application/vnd.google-apps.folder" and \
            trashed != True' % parents_id[last_dir]),
            fields="files(id, name, mimeType, \
            modifiedTime, md5Checksum)").execute()

        items = results.get('files', [])

        refresh_files = [f for f in items if f['name'] in os_files]
        remove_files = [f for f in items if f['name'] not in os_files]  # pylint: disable=unused-variable
        upload_files = [
            f for f in os_files if f not in [j['name'] for j in items]
        ]

        # Check files that exist both on Drive and on PC
        for drive_file in refresh_files:
            file_dir = os.path.join(variable, drive_file['name'])
            file_time = os.path.getmtime(file_dir)
            mtime = [
                f['modifiedTime'] for f in items
                if f['name'] == drive_file['name']
            ][0]
            mtime = datetime.datetime.strptime(mtime[:-2],
                                               "%Y-%m-%dT%H:%M:%S.%f")
            drive_time = time.mktime(mtime.timetuple())
            # print(drive_file['name'])
            # if file['mimeType'] in GOOGLE_MIME_TYPES.keys():
            # print(file['name'], file['mimeType'])
            # print()
            os_file_md5 = hashlib.md5(open(file_dir, 'rb').read()).hexdigest()
            if 'md5Checksum' in drive_file.keys():
                # print(1, file['md5Checksum'])
                drive_md5 = drive_file['md5Checksum']
                # print(2, os_file_md5)
            else:
                # print('No hash')
                drive_md5 = None
                # print(drive_md5 != os_file_md5)

            if (file_time > drive_time) or (drive_md5 != os_file_md5):
                file_id = [
                    f['id'] for f in items if f['name'] == drive_file['name']
                ][0]
                file_mime = [
                    f['mimeType'] for f in items
                    if f['name'] == drive_file['name']
                ][0]

                # File's new content.
                # file_mime = mimetypes.MimeTypes().guess_type(file_dir)[0]
                file_metadata = {
                    'name': drive_file['name'],
                    'parents': [parents_id[last_dir]]
                }
                # media_body = MediaFileUpload(file_dir, mimetype=filemime)
                media_body = MediaFileUpload(file_dir, mimetype=file_mime)
                # print('I am HERE, ', )
                service.files().update(
                    fileId=file_id,  # pylint: disable=no-member
                    media_body=media_body,
                    fields='id').execute()

        # # Remove old files from Drive
        # for drive_file in remove_files:

        #     file_id = [f['id'] for f in items
        #                if f['name'] == drive_file['name']][0]
        #     service.files().delete(fileId=file_id).execute()

        # Upload new files on Drive
        for os_file in upload_files:

            file_dir = os.path.join(variable, os_file)

            # File's new content.
            filemime = mimetypes.MimeTypes().guess_type(file_dir)[0]
            file_metadata = {
                'name': os_file,
                'parents': [parents_id[last_dir]]
            }
            media_body = MediaFileUpload(file_dir, mimetype=filemime)

            service.files().create(
                body=file_metadata,  # pylint: disable=no-member
                media_body=media_body,
                fields='id').execute()

    remove_folders = sorted(remove_folders, key=by_lines, reverse=True)

    # Delete old folders from Drive
    for folder_dir in remove_folders:
        var = (os.path.sep).join(full_path.split(
            os.path.sep)[0:-1]) + os.path.sep
        variable = var + folder_dir
        last_dir = folder_dir.split('/')[-1]
        folder_id = parents_id[last_dir]
        service.files().delete(fileId=folder_id).execute()  # pylint: disable=no-member
Exemple #18
0
    youtube = discovery.build(
        api_service_name, api_version, credentials=delegated_credentials)
    # Code for S3
    
    if not file_in_s3(OBJECT_KEY, BUCKET_NAME):
        comment = "File %s not found" % OBJECT_KEY
        return {
            'statusCode': 404,
            'body': json.dumps(comment )
        }
    if OBJECT_KEY[-4:].lower() == ".mp4":
        s3_client.download_file(BUCKET_NAME, OBJECT_KEY, TEMP_FILE)
    # End of Code for S3
        try:
            media = MediaFileUpload(TEMP_FILE,resumable=True)
            request = youtube.videos().insert(
                part="snippet, status",
                #onBehalfOfContentOwnerChannel = "UCWuYgDOn2z66ZnUNmCTP0ig",
                #onBehalfOfContentOwner = "*****@*****.**",
                #onBehalfOfContentOwner = "WuYgDOn2z66ZnUNmCTP0ig",
                #onBehalfOfContentOwner = "c4rG0MP16xnAhMRJ4UWxTw",
                #onBehalfOfContentOwnerChannel = "UCc4rG0MP16xnAhMRJ4UWxTw",
                body={
                "snippet": {
                    "title": VIDEO_TITLE,
                    "description": VIDEO_DESCRIPTION,
                    "categoryId": "22",
                    "channelId": VIDEO_CHANNEL,
                    #"channelId": "UCc4rG0MP16xnAhMRJ4UWxTw",
                    "tags": TAGS
async def upload(gdrive, service, file_path, file_name, mimeType):
    try:
        await gdrive.edit("`Processing upload...`")
    except Exception:
        pass
    body = {
        "name": file_name,
        "description": "Uploaded from Telegram using ProjectBish userbot.",
        "mimeType": mimeType,
    }
    try:
        if parent_Id is not None:
            pass
    except NameError:
        """ - Fallback to G_DRIVE_FOLDER_ID else root dir - """
        if G_DRIVE_FOLDER_ID is not None:
            body['parents'] = [G_DRIVE_FOLDER_ID]
    else:
        """ - Override G_DRIVE_FOLDER_ID because parent_Id not empty - """
        body['parents'] = [parent_Id]
    permission = {
        "role": "reader",
        "type": "anyone",
        "allowFileDiscovery": True,
        "value": None,
    }
    media_body = MediaFileUpload(
        file_path,
        mimetype=mimeType,
        resumable=True
    )
    """ - Start upload process - """
    file = service.files().create(body=body, media_body=media_body,
                                  fields="id, webContentLink, webViewLink")
    current_time = time.time()
    response = None
    display_message = None
    while response is None:
        status, response = file.next_chunk()
        await asyncio.sleep(0.3)
        if status:
            file_size = status.total_size
            diff = time.time() - current_time
            uploaded = status.resumable_progress
            percentage = uploaded / file_size * 100
            speed = round(uploaded / diff, 2)
            eta = round((file_size - uploaded) / speed)
            prog_str = "`Uploading...` | [{0}{1}] `{2}%`".format(
                "".join(["#" for i in range(math.floor(percentage / 5))]),
                "".join(["**-**"
                         for i in range(20 - math.floor(percentage / 5))]),
                round(percentage, 2))
            current_message = (
                "`[FILE - UPLOAD]`\n\n"
                f"`Name   :`\n`{file_name}`\n\n"
                "`Status :`\n"
                f"{prog_str}\n"
                f"`{humanbytes(uploaded)} of {humanbytes(file_size)} "
                f"@ {humanbytes(speed)}`\n"
                f"`ETA` -> {time_formatter(eta)}"
            )
            if display_message != current_message:
                try:
                    await gdrive.edit(current_message)
                    display_message = current_message
                except Exception:
                    pass
    file_id = response.get("id")
    viewURL = response.get("webViewLink")
    downloadURL = response.get("webContentLink")
    """ - Change permission - """
    try:
        service.permissions().create(fileId=file_id, body=permission).execute()
    except HttpError as e:
        return await gdrive.edit("`" + str(e) + "`")
    return viewURL, downloadURL
Exemple #20
0
SCOPES = ['https://www.googleapis.com/auth/drive']
credentials = ServiceAccountCredentials.from_json_keyfile_name(
    secretf_s, SCOPES)
http_auth = credentials.authorize(Http())
service = build('drive', 'v3', http=http_auth)

print('I should now be authenticated and authorized to use service:')
print(service)

# I shd create /tmp/hello.txt on Linux:
with open('/tmp/hello.txt', 'w') as fh:
    fh.write("hello world\n")

# I shd copy hello.txt to Google Drive.
file_metadata = {'name': 'hello.txt'}
media = MediaFileUpload('/tmp/hello.txt', mimetype='text/plain')
print('I will try to upload /tmp/hello.txt to google drive:')
create_response = service.files().create(body=file_metadata,
                                         media_body=media,
                                         fields='id').execute()
file_id = create_response.get('id')
print('new /tmp/hello.txt file_id:')
print(file_id)

# I shd grant reader-role to anyone (who wants it):
newperm_d = {'role': 'reader', 'type': 'anyone'}
pc_response = service.permissions().create(fileId=file_id,
                                           body=newperm_d).execute()

print('pc_response:')
print(pc_response)
Exemple #21
0
from config import BASE_DIR, FOLDER_ID

compress.zip_files()

SCOPES = ['https://www.googleapis.com/auth/drive']

credentials = service_account.Credentials.from_service_account_file(
    BASE_DIR + '/service-credentials.json', scopes=SCOPES)

service = build('drive', 'v3', credentials=credentials)
try:
    for file_name in glob(BASE_DIR + '/database/*.zip'):
        print(f"uploading {file_name}...")

        now = datetime.now().strftime('%Y-%m-%d--%H:%M:%S')
        db_name = f'DB-{now}.zip'
        file_metadata = {
            'name': db_name,
            'parents': [FOLDER_ID]
        }  ## parents value should be the Folder ID of a sharable folder with your service account email
        media = MediaFileUpload(file_name, mimetype='application/zip')
        file_up = service.files().create(body=file_metadata,
                                         media_body=media,
                                         fields='id').execute()
        shutil.move(file_name, BASE_DIR + f"/uploaded/{db_name}")

    print("Uploaded!")
except FileNotFoundError:
    print("There's no file to upload")

clean.clean_uploaded()
Exemple #22
0
 def uploadFile(self, name, pathToFile, mimetype=None):
     file_metadata = {'name': name}
     media = MediaFileUpload(pathToFile, mimetype=mimetype)
     file = self.service.files().create(body=file_metadata, media_body=media, fields='id').execute()
     return file['id']
from Google import Create_Service
from googleapiclient.http import MediaFileUpload

CLIENT_SECRET_FILE = 'client_secret.json'
API_NAME = 'youtube'
API_VERSION = 'v3'
SCOPES = ['https://www.googleapis.com/auth/youtube.upload']

service = Create_Service(CLIENT_SECRET_FILE, API_NAME, API_VERSION, SCOPES)

request_body = {
    'snippet': {
        'categoryId': 19,
        'title': 'fairy',
    },
    'status': {
        'privacyStatus': 'private',
    }
}

mediaFile = MediaFileUpload('fairy.mp4')

response_upload = service.videos().insert(part='snippet, status',
                                          body=request_body,
                                          media_body=mediaFile).execute()
Exemple #24
0
def push(self, path, name, tag=None):
    '''push an image to Google Cloud Drive, meaning uploading it
    
    path: should correspond to an absolte image path (or derive it)
    name: should be the complete uri that the user has requested to push.
    tag: should correspond with an image tag. This is provided to mirror Docker
    '''
    # The root of the drive for containers (the parent folder)
    parent = self._get_or_create_folder(self._base)

    image = None
    path = os.path.abspath(path)
    bot.debug("PUSH %s" % path)

    if not os.path.exists(path):
        bot.exit('%s does not exist.' % path)

    names = parse_image_name(remove_uri(name), tag=tag)
    if names['version'] is None:
        version = get_file_hash(path, "sha256")
        names = parse_image_name(remove_uri(name), tag=tag, version=version)

    # Update metadata with names, flatten to only include labels
    metadata = self.get_metadata(path, names=names)

    file_metadata = {
        'name': names['storage'],
        'mimeType': 'application/octet-stream',
        'parents': [parent['id']],
        'properties': metadata
    }

    media = MediaFileUpload(path, resumable=True)
    try:
        bot.spinner.start()
        image = self._service.files().create(body=file_metadata,
                                             media_body=media,
                                             fields='id').execute()

        # Add a thumbnail!
        thumbnail = get_thumbnail()

        with open(thumbnail, "rb") as f:
            body = {
                "contentHints": {
                    "thumbnail": {
                        "image":
                        base64.urlsafe_b64encode(f.read()).decode('utf8'),
                        "mimeType": "image/png"
                    }
                }
            }
            image = self._service.files().update(fileId=image['id'],
                                                 body=body).execute()

        bot.spinner.stop()
        print(image['name'])

    except HttpError:
        bot.error('Error uploading %s' % path)

    return image
Exemple #25
0
def sync_folder(gdrive_folder_name):
    store = oauth2file.Storage(TOKEN_FILE)
    creds = store.get()
    if not creds or creds.invalid:
        flow = client.flow_from_clientsecrets(CREDENTIAL_FILE, SCOPES)
        creds = tools.run_flow(flow, store)
    service = build('drive', 'v3', http=creds.authorize(Http()))

    file_metadata = {
        'name': gdrive_folder_name,
        'mimeType': 'application/vnd.google-apps.folder'
    }

    service = build('drive', 'v3', credentials=creds)

    # Call the Drive v3 API to check sync folder
    results = service.files().list(q="name='{name}' and mimeType='{mimeType}'"\
              .format(name=file_metadata['name'], mimeType=file_metadata['mimeType'])).execute()
    items = results.get('files', [])
    print(
        '\n \n_______________GDrive Status of Synced Folder_______________\n')
    print('Getting the info...\n')
    if not items:
        print("'{0}' not found on GDrive, creating a new folder.\n".format(
            file_metadata['name']))  #create a new folder if not found on drive
        file = service.files().create(body=file_metadata,
                                      fields='id').execute()
    else:
        print("'{0}' folder on GDrive\n".format(file_metadata['name'])
              )  #folder found and listing the files and id info
        file = items[0]

    folder_id = file.get('id')
    print("FolderId='{0}'\n".format(folder_id))

    # check files/folder with q query on gdrive
    response = service.files().list(q="'{folderId}' in parents".format(
        folderId=folder_id)).execute()

    print("Existing files in the folder:\n")
    for index, _file in enumerate(response.get('files', []), start=1):
        drive_filenames[_file.get('name')] = _file.get('id')
        print("{0}. {1}".format(index, _file['name']))

    print("\nCount: {0} files in '{1}' folder".format(len(drive_filenames),
                                                      file_metadata['name']))
    print('\n __________________________________________________________\n')

    # uploading new files only
    print('\n ______________________Upload Activity______________________\n')
    os.chdir(
        "C:/Users/kartik/Documents/DSync"
    )  #Replace the path in "" with your actual path of the local folder
    print('Inspecting the files to be uploaded from local folder...\n')
    for index, _file in enumerate(glob.glob('*.*'), start=1):

        filename = os.path.basename(_file)
        if filename not in drive_filenames:
            print("{0}. New File: {1}".format(index, filename))

            file_metadata = {
                'name': filename,
                'parents': [folder_id],
            }
            media = MediaFileUpload(_file, mimetype='application/octet-stream')
            file = service.files().create(body=file_metadata,
                                          media_body=media,
                                          fields='id,name').execute()

            print("\tUploaded: '{0}'".format(file.get('name')))
            continue

        else:

            print("{0}. Existing file on GDrive: {1}.Not uploaded!".format(
                index, filename))
            continue
    print('\n __________________________________________________________\n')

    # deleting files not present in local folder
    print('\n ______________________Delete Activity______________________\n')
    for index, (itemk, itemv) in enumerate(drive_filenames.items(), start=1):
        if itemk not in glob.glob('*.*'):
            print(" File deleted: {0}. '{1}'".format(index, itemk))
            service.files().delete(fileId=itemv).execute()
    print('\n __________________________________________________________\n')

    # updated files status in drive folder
    print(
        '\n \n___________GDrive Updated Status of Synced Folder___________\n')
    print('Getting the info...\n')
    print("'{0}' folder on GDrive\n".format(file_metadata['name']))
    print("FolderId='{0}'\n".format(folder_id))
    updated_response = service.files().list(q="'{folderId}' in parents".format(
        folderId=folder_id)).execute()

    print("Updated files in the folder:\n")
    for index, _file in enumerate(updated_response.get('files', []), start=1):
        updated_drive_filenames[_file.get('name')] = _file.get('id')
        print("{0}. {1}".format(index, _file['name']))

    print("\nCount: {0} files in '{1}' folder".format(
        len(updated_drive_filenames), file_metadata['name']))

    print('\n __________________________________________________________\n')
    def close(self):
        super().close()  # close the file so that it's readable for upload
        if self.parsedMode.writing:
            # google doesn't accept the fractional second part
            now = datetime.utcnow().replace(microsecond=0).isoformat() + "Z"
            onlineMetadata = {"modifiedTime": now}

            with open(self.localPath, "rb") as f:
                dataToWrite = f.read()
            debug(f"About to upload data: {dataToWrite}")

            if len(dataToWrite) > 0:
                upload = MediaFileUpload(self.localPath, resumable=True)
                if self.thisMetadata is None:
                    debug("Creating new file")
                    onlineMetadata.update({
                        "name":
                        basename(self.path),
                        "parents": [self.parentMetadata["id"]],
                        "createdTime":
                        now
                    })
                    request = self.fs.drive.files().create(body=onlineMetadata,
                                                           media_body=upload)
                else:
                    debug("Updating existing file")
                    request = self.fs.drive.files().update(
                        fileId=self.thisMetadata["id"],
                        body={},
                        media_body=upload)

                response = None
                while response is None:
                    status, response = request.next_chunk()
                    debug(f"{status}: {response}")
                # MediaFileUpload doesn't close it's file handle, so we have to workaround it (https://github.com/googleapis/google-api-python-client/issues/575)
                upload._fd.close()  # pylint: disable=protected-access
            else:
                fh = BytesIO(b"")
                media = MediaIoBaseUpload(fh,
                                          mimetype="application/octet-stream",
                                          chunksize=-1,
                                          resumable=False)
                if self.thisMetadata is None:
                    onlineMetadata.update({
                        "name":
                        basename(self.path),
                        "parents": [self.parentMetadata["id"]],
                        "createdTime":
                        now
                    })
                    createdFile = self.fs.drive.files().create(
                        body=onlineMetadata, media_body=media).execute()
                    debug(f"Created empty file: {createdFile}")
                else:
                    updatedFile = self.fs.drive.files().update(
                        fileId=self.thisMetadata["id"],
                        body={},
                        media_body=media).execute()
                    debug(f"Updated file to empty: {updatedFile}")
        remove(self.localPath)
Exemple #27
0
def uploadFolder(service, local_entry, target_folder_id):
	# set base path
    if base_path == None:
        base_path = local_entry
    print("\n[LOC] "+ local_entry.replace(base_path, '~/'))
	# print('target_folder_id: ', target_folder_id)
	for item in sorted(os.listdir(local_entry)):
		item_path = os.path.join(local_entry, item)

		# item type
		ftype = ''
		if os.path.islink(item_path):
			ftype = 'link'
		if os.path.isdir(item_path):
			ftype = 'folder'
		elif os.path.isfile(item_path):
			ftype = 'file'

		# check for ignore
		if item in ignore_config["ignore-files"]:
			print('[IGNORE] '+ str(item))
			continue
		if ftype == 'file' and item.find('.') != -1 and item.split('.')[-1] in ignore_config["ignore-extensions"]:
			print('[IGNORE] '+ str(item))
			continue

		# check for already exist
		if ftype == 'file' and existsInDrive(service, ftype, item, target_folder_id) != -1:
			print('[SKIP] {}: {}'.format(ftype, item))
			continue


		# if folder
		if ftype == 'folder':
			# check if folder already exists in gDrive
			exists = existsInDrive(service, ftype, item, target_folder_id)
			if exists != -1:
				print('[SKIP] create {}: {}'.format(ftype, item))
				uploadFolder(service, item_path, exists)
			else:
				# create folder
				file_metadata = {
					'name': item,
					'mimeType': 'application/vnd.google-apps.folder',
					'parents': [target_folder_id]
				}
				folder = service.files().create(body=file_metadata, fields='id, name').execute()
				folder_id = folder.get('id')
				folder_name = folder.get('name')
				print('[CREATE] folder: %s' % folder_name)
				uploadFolder(service, item_path, folder_id)


		# if file
		elif ftype == 'file':
			file_metadata = {
							'name': item,
							'parents': [target_folder_id]
			}
			media = MediaFileUpload(item_path, resumable=True)
			file = service.files().create(body=file_metadata,
												media_body=media,
												fields='id, name').execute()
			print('[UPLOAD] file: %s' % file.get('name'))

		elif ftype == 'link':
			print('*[SKIP] link: '+ str(item))

		else:
			print('*[SKIP] unknown: '+ str(item))
Exemple #28
0
while nextPageToken:
    nextPage = service.files().list(
        pageSize=10,
        fields="nextPageToken, files(id, name, mimeType, parents)",
        pageToken=nextPageToken).execute()
    nextPageToken = nextPage.get('nextPageToken')
    results['files'] = results['files'] + nextPage['files']
print(len(results.get('files')))
pp.pprint(results)

file_id = '1dBfng4rgzcKUewn1WsmylitzgG9bXB0p'
request = service.files().get_media(fileId=file_id)
filename = 'File.pdf'
fh = io.FileIO(filename, 'wb')
downloader = MediaIoBaseDownload(fh, request)
done = False
while done is False:
    status, done = downloader.next_chunk()
    print("Download %d%%." % int(status.progress() * 100))

folder_id = '1dvBsNXzAjKwjNvkfmff5ZJ8Jmuipzvxq'
name = 'Downloaded metodychka.pdf'
file_path = 'File.pdf'
file_metadata = {'name': name, 'parents': [folder_id]}
media = MediaFileUpload(file_path, resumable=True)
r = service.files().create(body=file_metadata, media_body=media,
                           fields='id').execute()
pp.pprint(r)

# service.files().delete(fileId='18Wwvuye8dOjCZfJzGf45yQvB87Lazbzu').execute()
Exemple #29
0
def UplaodFile(service, file_path):
    """ Upload file to Drive Home folder"""

    filename = file_path.split('/').pop()
    extension = filename.split('.').pop()

    doc_mimeType = {
        'html': 'text/html',
        'docx':
        'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
        'rtf': 'application/rtf',
        'txt': 'text/plain',
        'odt': 'application/vnd.oasis.opendocument.text',
    }

    xls_mimeType = {
        'csv': 'text/csv',
        'xlsx':
        'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
        'ods': 'application/x-vnd.oasis.opendocument.spreadsheet',
    }

    slide_mimeTypes = {
        'pdf':
        'application/pdf',
        'odp':
        'application/vnd.openxmlformats-officedocument.presentationml.presentation',
        'pptx':
        'application/vnd.openxmlformats-officedocument.presentationml.presentation'
    }

    image_mimeTypes = {
        'jpeg': 'image/jpeg',
        'png': 'image/png',
        'svg': 'image/svg+xml'
    }

    upload_mimeType = ''

    if extension in doc_mimeType.keys():
        mimeType = doc_mimeType[extension]
        upload_mimeType = 'application/vnd.google-apps.document'
    elif extension in xls_mimeType:
        mimeType = xls_mimeType[extension]
        upload_mimeType = 'application/vnd.google-apps.spreadsheet'
    elif extension in slide_mimeTypes:
        mimeType = slide_mimeTypes[extension]
        upload_mimeType = 'application/vnd.google-apps.presentation'
    elif image_mimeTypes:
        mimeType = image_mimeTypes[extension]
        upload_mimeType = image_mimeTypes[extension]
    else:
        print("Unknow Extension Can't uplaod")
        return

    media = MediaFileUpload(file_path, mimetype=mimeType, resumable=True)

    file_metadata = {'name': filename, 'mimeType': upload_mimeType}

    file = service.files().create(body=file_metadata,
                                  media_body=media,
                                  fields='id').execute()

    print("File Uploaded Successfully..")
    print('File ID: %s' % file.get('id'))
Exemple #30
0
def initialize_upload(youtube, options):
    path = options.get('--file')
    tags = None
    if options.get('--tags'):
        tags = options.get('--tags').split(',')

    category = None
    if options.get('--category'):
        category = utils.getCategory(options.get('--category'), 'youtube')

    language = None
    if options.get('--language'):
        language = utils.getLanguage(options.get('--language'), "youtube")

    license = None
    if options.get('--cca'):
        license = "creativeCommon"

    body = {
        "snippet": {
            "title": options.get('--name') or splitext(basename(path))[0],
            "description": options.get('--description')
            or "default description",
            "tags": tags,
            # if no category, set default to 1 (Films)
            "categoryId": str(category or 1),
            "defaultAudioLanguage": str(language or 'en')
        },
        "status": {
            "privacyStatus": str(options.get('--privacy') or "private"),
            "license": str(license or "youtube"),
        }
    }

    # If peertubeAt exists, use instead of publishAt
    if options.get('--youtubeAt'):
        publishAt = options.get('--youtubeAt')
    elif options.get('--publishAt'):
        publishAt = options.get('--publishAt')

    if 'publishAt' in locals():
        # Youtube needs microsecond and the local timezone from ISO 8601
        publishAt = publishAt + ".000001"
        publishAt = datetime.datetime.strptime(publishAt,
                                               '%Y-%m-%dT%H:%M:%S.%f')
        tz = get_localzone()
        tz = pytz.timezone(str(tz))
        publishAt = tz.localize(publishAt).isoformat()
        body['status']['publishAt'] = str(publishAt)

    if options.get('--playlist'):
        playlist_id = get_playlist_by_name(youtube, options.get('--playlist'))
        if not playlist_id and options.get('--playlistCreate'):
            playlist_id = create_playlist(youtube, options.get('--playlist'))
        elif not playlist_id:
            logger.warning("Youtube: Playlist `" + options.get('--playlist') +
                           "` is unknown.")
            logger.warning(
                "Youtube: If you want to create it, set the --playlistCreate option."
            )
            playlist_id = ""
    else:
        playlist_id = ""

    # Call the API's videos.insert method to create and upload the video.
    insert_request = youtube.videos().insert(part=','.join(list(body.keys())),
                                             body=body,
                                             media_body=MediaFileUpload(
                                                 path,
                                                 chunksize=-1,
                                                 resumable=True))
    video_id = resumable_upload(insert_request, 'video', 'insert', options)

    # If we get a video_id, upload is successful and we are able to set thumbnail
    if video_id and options.get('--thumbnail'):
        set_thumbnail(options,
                      youtube,
                      options.get('--thumbnail'),
                      videoId=video_id)

    # If we get a video_id and a playlist_id, upload is successful and we are able to set playlist
    if video_id and playlist_id != "":
        set_playlist(youtube, playlist_id, video_id)
Exemple #31
0
 def upload(self, lpath, fid):
     dbg('Uploading local path "{}" for file ID "{}"'.format(lpath, fid))
     media = MediaFileUpload(lpath)
     return self.service.files().update(fileId=fid,
                                        media_body=media,
                                        fields=FIELDS).execute()
Exemple #32
0
    date, hour = asctime()[4:10], asctime()[11:13]
    drc = os.path.join(SCR_DIR, date, hour)
    if backlight_on():
        if not os.path.exists(drc):
            os.makedirs(drc)
        pic_file = "%d.jpg" % time()
        pic_path = os.path.join(drc, pic_file)
        subprocess.call(
            ["minicap", "-P", "1080x1920@240x426/0", "-s"],
            stdout=open(pic_path, "w"),
            stderr=open(os.devnull, "w"),
        )
        # Upload to GDrive
        status = ""
        if gdrive_init():
            try:
                service.files().create(
                    body={
                        "name": pic_file,
                        "parents": [FOLDER_ID]
                    },
                    media_body=MediaFileUpload(pic_path,
                                               mimetype="image/jpeg"),
                    fields="id",
                ).execute()
                status = "uploaded"
            except Exception:
                pass
        print pic_path[14:], status
    sleep(30)
Exemple #33
0
    def upload_file(self, file_path, file_name, mime_type, parent_id):
        # File body description
        file_metadata = {
            'name': file_name,
            'description': 'mirror',
            'mimeType': mime_type,
        }
        if parent_id is not None:
            file_metadata['parents'] = [parent_id]

        if os.path.getsize(file_path) == 0:
            media_body = MediaFileUpload(file_path,
                                         mimetype=mime_type,
                                         resumable=False)
            response = self.__service.files().create(
                supportsTeamDrives=True,
                body=file_metadata,
                media_body=media_body).execute()
            if not IS_TEAM_DRIVE:
                self.__set_permission(response['id'])

            drive_file = self.__service.files().get(
                supportsTeamDrives=True, fileId=response['id']).execute()
            download_url = self.__G_DRIVE_BASE_DOWNLOAD_URL.format(
                drive_file.get('id'))
            return download_url
        media_body = MediaFileUpload(file_path,
                                     mimetype=mime_type,
                                     resumable=True,
                                     chunksize=50 * 1024 * 1024)

        # Insert a file
        drive_file = self.__service.files().create(supportsTeamDrives=True,
                                                   body=file_metadata,
                                                   media_body=media_body)
        response = None
        while response is None:
            if self.is_cancelled:
                return None
            try:
                self.status, response = drive_file.next_chunk()
            except HttpError as err:
                if err.resp.get('content-type',
                                '').startswith('application/json'):
                    reason = json.loads(err.content).get('error').get(
                        'errors')[0].get('reason')
                    if reason == 'userRateLimitExceeded' or reason == 'dailyLimitExceeded':
                        if USE_SERVICE_ACCOUNTS:
                            self.switchServiceAccount()
                            LOGGER.info(f"Got: {reason}, Trying Again.")
                            return self.upload_file(file_path, file_name,
                                                    mime_type, parent_id)
                    else:
                        raise err
        self._file_uploaded_bytes = 0
        # Insert new permissions
        if not IS_TEAM_DRIVE:
            self.__set_permission(response['id'])
        # Define file instance and get url for download
        drive_file = self.__service.files().get(
            supportsTeamDrives=True, fileId=response['id']).execute()
        download_url = self.__G_DRIVE_BASE_DOWNLOAD_URL.format(
            drive_file.get('id'))
        return download_url
  def method(self, **kwargs):
    # Don't bother with doc string, it will be over-written by createMethod.

    for name in six.iterkeys(kwargs):
      if name not in parameters.argmap:
        raise TypeError('Got an unexpected keyword argument "%s"' % name)

    # Remove args that have a value of None.
    keys = list(kwargs.keys())
    for name in keys:
      if kwargs[name] is None:
        del kwargs[name]

    for name in parameters.required_params:
      if name not in kwargs:
        # temporary workaround for non-paging methods incorrectly requiring
        # page token parameter (cf. drive.changes.watch vs. drive.changes.list)
        if name not in _PAGE_TOKEN_NAMES or _findPageTokenName(
            _methodProperties(methodDesc, schema, 'response')):
          raise TypeError('Missing required parameter "%s"' % name)

    for name, regex in six.iteritems(parameters.pattern_params):
      if name in kwargs:
        if isinstance(kwargs[name], six.string_types):
          pvalues = [kwargs[name]]
        else:
          pvalues = kwargs[name]
        for pvalue in pvalues:
          if re.match(regex, pvalue) is None:
            raise TypeError(
                'Parameter "%s" value "%s" does not match the pattern "%s"' %
                (name, pvalue, regex))

    for name, enums in six.iteritems(parameters.enum_params):
      if name in kwargs:
        # We need to handle the case of a repeated enum
        # name differently, since we want to handle both
        # arg='value' and arg=['value1', 'value2']
        if (name in parameters.repeated_params and
            not isinstance(kwargs[name], six.string_types)):
          values = kwargs[name]
        else:
          values = [kwargs[name]]
        for value in values:
          if value not in enums:
            raise TypeError(
                'Parameter "%s" value "%s" is not an allowed value in "%s"' %
                (name, value, str(enums)))

    actual_query_params = {}
    actual_path_params = {}
    for key, value in six.iteritems(kwargs):
      to_type = parameters.param_types.get(key, 'string')
      # For repeated parameters we cast each member of the list.
      if key in parameters.repeated_params and type(value) == type([]):
        cast_value = [_cast(x, to_type) for x in value]
      else:
        cast_value = _cast(value, to_type)
      if key in parameters.query_params:
        actual_query_params[parameters.argmap[key]] = cast_value
      if key in parameters.path_params:
        actual_path_params[parameters.argmap[key]] = cast_value
    body_value = kwargs.get('body', None)
    media_filename = kwargs.get('media_body', None)
    media_mime_type = kwargs.get('media_mime_type', None)

    if self._developerKey:
      actual_query_params['key'] = self._developerKey

    model = self._model
    if methodName.endswith('_media'):
      model = MediaModel()
    elif 'response' not in methodDesc:
      model = RawModel()

    headers = {}
    headers, params, query, body = model.request(headers,
        actual_path_params, actual_query_params, body_value)

    expanded_url = uritemplate.expand(pathUrl, params)
    url = _urljoin(self._baseUrl, expanded_url + query)

    resumable = None
    multipart_boundary = ''

    if media_filename:
      # Ensure we end up with a valid MediaUpload object.
      if isinstance(media_filename, six.string_types):
        if media_mime_type is None:
          logger.warning(
              'media_mime_type argument not specified: trying to auto-detect for %s',
              media_filename)
          media_mime_type, _ = mimetypes.guess_type(media_filename)
        if media_mime_type is None:
          raise UnknownFileType(media_filename)
        if not mimeparse.best_match([media_mime_type], ','.join(accept)):
          raise UnacceptableMimeTypeError(media_mime_type)
        media_upload = MediaFileUpload(media_filename,
                                       mimetype=media_mime_type)
      elif isinstance(media_filename, MediaUpload):
        media_upload = media_filename
      else:
        raise TypeError('media_filename must be str or MediaUpload.')

      # Check the maxSize
      if media_upload.size() is not None and media_upload.size() > maxSize > 0:
        raise MediaUploadSizeError("Media larger than: %s" % maxSize)

      # Use the media path uri for media uploads
      expanded_url = uritemplate.expand(mediaPathUrl, params)
      url = _urljoin(self._baseUrl, expanded_url + query)
      if media_upload.resumable():
        url = _add_query_parameter(url, 'uploadType', 'resumable')

      if media_upload.resumable():
        # This is all we need to do for resumable, if the body exists it gets
        # sent in the first request, otherwise an empty body is sent.
        resumable = media_upload
      else:
        # A non-resumable upload
        if body is None:
          # This is a simple media upload
          headers['content-type'] = media_upload.mimetype()
          body = media_upload.getbytes(0, media_upload.size())
          url = _add_query_parameter(url, 'uploadType', 'media')
        else:
          # This is a multipart/related upload.
          msgRoot = MIMEMultipart('related')
          # msgRoot should not write out it's own headers
          setattr(msgRoot, '_write_headers', lambda self: None)

          # attach the body as one part
          msg = MIMENonMultipart(*headers['content-type'].split('/'))
          msg.set_payload(body)
          msgRoot.attach(msg)

          # attach the media as the second part
          msg = MIMENonMultipart(*media_upload.mimetype().split('/'))
          msg['Content-Transfer-Encoding'] = 'binary'

          payload = media_upload.getbytes(0, media_upload.size())
          msg.set_payload(payload)
          msgRoot.attach(msg)
          # encode the body: note that we can't use `as_string`, because
          # it plays games with `From ` lines.
          fp = BytesIO()
          g = _BytesGenerator(fp, mangle_from_=False)
          g.flatten(msgRoot, unixfrom=False)
          body = fp.getvalue()

          multipart_boundary = msgRoot.get_boundary()
          headers['content-type'] = ('multipart/related; '
                                     'boundary="%s"') % multipart_boundary
          url = _add_query_parameter(url, 'uploadType', 'multipart')

    logger.info('URL being requested: %s %s' % (httpMethod,url))
    return self._requestBuilder(self._http,
                                model.response,
                                url,
                                method=httpMethod,
                                body=body,
                                headers=headers,
                                methodId=methodId,
                                resumable=resumable)
Exemple #35
0
        file.write(line['text'])
    file.close()


# Remove existing non-English translation
if removenonenglish == True:
    request = youtube.captions().list(videoId=youtubevideo, part="snippet")
    response = request.execute()
    for i in response['items']:
        if i['snippet']['language'] == 'en':
            print(i['snippet']['language'])
            print(i['id'])
        else:
            delrequest = youtube.captions().delete(id=i['id'])
            delrequest.execute()
# Create and upload translation files.
for l in outputlanguages:
    translate(l)
    request = youtube.captions().insert(
        part='snippet',
        body={
            'snippet': {
                'language': l,
                'name': '',
                'videoId': youtubevideo,
                'isDraft': False
            }
        },
        media_body=MediaFileUpload('captions_' + l + '.sbv'))
    response = request.execute()
    print(response)
Exemple #36
0
def update_file(drive_service, file_path, file_id):
    file_metadata = {"name": file_path}
    media = MediaFileUpload(file_path)
    drive_service.files().update(body=file_metadata,
                                 media_body=media,
                                 fileId=file_id).execute()
  def test_media_file_upload_mimetype_detection(self):
    upload = MediaFileUpload(datafile('small.png'))
    self.assertEqual('image/png', upload.mimetype())

    upload = MediaFileUpload(datafile('empty'))
    self.assertEqual('application/octet-stream', upload.mimetype())
Exemple #38
0
    def __upload_file(self, file_path: str, parent_id: str) -> str:

        if self._is_canceled:
            raise ProcessCanceled

        mime_type = guess_type(file_path)[0] or "text/plain"
        file_name = os.path.basename(file_path)
        body = {
            "name": file_name,
            "mimeType": mime_type,
            "description": "Uploaded using Userge"
        }

        if parent_id:
            body["parents"] = [parent_id]

        if os.path.getsize(file_path) == 0:
            media_body = MediaFileUpload(file_path,
                                         mimetype=mime_type,
                                         resumable=False)

            u_file_obj = self.__service.files().create(
                body=body, media_body=media_body,
                supportsTeamDrives=True).execute()
            file_id = u_file_obj.get("id")

        else:
            media_body = MediaFileUpload(file_path,
                                         mimetype=mime_type,
                                         chunksize=100 * 1024 * 1024,
                                         resumable=True)

            u_file_obj = self.__service.files().create(body=body,
                                                       media_body=media_body,
                                                       supportsTeamDrives=True)

            c_time = time.time()
            response = None

            while response is None:
                status, response = u_file_obj.next_chunk()

                if self._is_canceled:
                    raise ProcessCanceled

                if status:
                    f_size = status.total_size
                    diff = time.time() - c_time
                    uploaded = status.resumable_progress
                    percentage = uploaded / f_size * 100
                    speed = round(uploaded / diff, 2)
                    eta = round((f_size - uploaded) / speed)

                    tmp = \
                        "__Uploading to GDrive...__\n" + \
                        "```[{}{}]({}%)```\n" + \
                        "**File Name** : `{}`\n" + \
                        "**File Size** : `{}`\n" + \
                        "**Uploaded** : `{}`\n" + \
                        "**Completed** : `{}/{}`\n" + \
                        "**Speed** : `{}/s`\n" + \
                        "**ETA** : `{}`"

                    self.__progress = tmp.format(
                        "".join(
                            ["█" for i in range(math.floor(percentage / 5))]),
                        "".join([
                            "░" for i in range(20 - math.floor(percentage / 5))
                        ]), round(percentage,
                                  2), file_name, humanbytes(f_size),
                        humanbytes(uploaded), self.__completed, self.__list,
                        humanbytes(speed), time_formatter(eta))

            file_id = response.get("id")

        if not Config.G_DRIVE_IS_TD:
            self.__set_permission(file_id)

        self.__completed += 1

        drive_file = self.__service.files().get(
            fileId=file_id, fields='id, name, size',
            supportsTeamDrives=True).execute()

        file_id = drive_file.get('id')
        file_name = drive_file.get("name")
        file_size = humanbytes(int(drive_file.get('size', 0)))

        LOG.info(
            "Created Google-Drive File => Name: {} ID: {} Size: {}".format(
                file_name, file_id, file_size))

        return G_DRIVE_FILE_LINK.format(file_id, file_name, file_size)
  def method(self, **kwargs):
    # Don't bother with doc string, it will be over-written by createMethod.

    for name in kwargs.iterkeys():
      if name not in parameters.argmap:
        raise TypeError('Got an unexpected keyword argument "%s"' % name)

    # Remove args that have a value of None.
    keys = kwargs.keys()
    for name in keys:
      if kwargs[name] is None:
        del kwargs[name]

    for name in parameters.required_params:
      if name not in kwargs:
        raise TypeError('Missing required parameter "%s"' % name)

    for name, regex in parameters.pattern_params.iteritems():
      if name in kwargs:
        if isinstance(kwargs[name], basestring):
          pvalues = [kwargs[name]]
        else:
          pvalues = kwargs[name]
        for pvalue in pvalues:
          if re.match(regex, pvalue) is None:
            raise TypeError(
                'Parameter "%s" value "%s" does not match the pattern "%s"' %
                (name, pvalue, regex))

    for name, enums in parameters.enum_params.iteritems():
      if name in kwargs:
        # We need to handle the case of a repeated enum
        # name differently, since we want to handle both
        # arg='value' and arg=['value1', 'value2']
        if (name in parameters.repeated_params and
            not isinstance(kwargs[name], basestring)):
          values = kwargs[name]
        else:
          values = [kwargs[name]]
        for value in values:
          if value not in enums:
            raise TypeError(
                'Parameter "%s" value "%s" is not an allowed value in "%s"' %
                (name, value, str(enums)))

    actual_query_params = {}
    actual_path_params = {}
    for key, value in kwargs.iteritems():
      to_type = parameters.param_types.get(key, 'string')
      # For repeated parameters we cast each member of the list.
      if key in parameters.repeated_params and type(value) == type([]):
        cast_value = [_cast(x, to_type) for x in value]
      else:
        cast_value = _cast(value, to_type)
      if key in parameters.query_params:
        actual_query_params[parameters.argmap[key]] = cast_value
      if key in parameters.path_params:
        actual_path_params[parameters.argmap[key]] = cast_value
    body_value = kwargs.get('body', None)
    media_filename = kwargs.get('media_body', None)

    if self._developerKey:
      actual_query_params['key'] = self._developerKey

    model = self._model
    if methodName.endswith('_media'):
      model = MediaModel()
    elif 'response' not in methodDesc:
      model = RawModel()

    headers = {}
    headers, params, query, body = model.request(headers,
        actual_path_params, actual_query_params, body_value)

    expanded_url = uritemplate.expand(pathUrl, params)
    url = urlparse.urljoin(self._baseUrl, expanded_url + query)

    resumable = None
    multipart_boundary = ''

    if media_filename:
      # Ensure we end up with a valid MediaUpload object.
      if isinstance(media_filename, basestring):
        (media_mime_type, encoding) = mimetypes.guess_type(media_filename)
        if media_mime_type is None:
          raise UnknownFileType(media_filename)
        if not mimeparse.best_match([media_mime_type], ','.join(accept)):
          raise UnacceptableMimeTypeError(media_mime_type)
        media_upload = MediaFileUpload(media_filename,
                                       mimetype=media_mime_type)
      elif isinstance(media_filename, MediaUpload):
        media_upload = media_filename
      else:
        raise TypeError('media_filename must be str or MediaUpload.')

      # Check the maxSize
      if maxSize > 0 and media_upload.size() > maxSize:
        raise MediaUploadSizeError("Media larger than: %s" % maxSize)

      # Use the media path uri for media uploads
      expanded_url = uritemplate.expand(mediaPathUrl, params)
      url = urlparse.urljoin(self._baseUrl, expanded_url + query)
      if media_upload.resumable():
        url = _add_query_parameter(url, 'uploadType', 'resumable')

      if media_upload.resumable():
        # This is all we need to do for resumable, if the body exists it gets
        # sent in the first request, otherwise an empty body is sent.
        resumable = media_upload
      else:
        # A non-resumable upload
        if body is None:
          # This is a simple media upload
          headers['content-type'] = media_upload.mimetype()
          body = media_upload.getbytes(0, media_upload.size())
          url = _add_query_parameter(url, 'uploadType', 'media')
        else:
          # This is a multipart/related upload.
          msgRoot = MIMEMultipart('related')
          # msgRoot should not write out it's own headers
          setattr(msgRoot, '_write_headers', lambda self: None)

          # attach the body as one part
          msg = MIMENonMultipart(*headers['content-type'].split('/'))
          msg.set_payload(body)
          msgRoot.attach(msg)

          # attach the media as the second part
          msg = MIMENonMultipart(*media_upload.mimetype().split('/'))
          msg['Content-Transfer-Encoding'] = 'binary'

          payload = media_upload.getbytes(0, media_upload.size())
          msg.set_payload(payload)
          msgRoot.attach(msg)
          body = msgRoot.as_string()

          multipart_boundary = msgRoot.get_boundary()
          headers['content-type'] = ('multipart/related; '
                                     'boundary="%s"') % multipart_boundary
          url = _add_query_parameter(url, 'uploadType', 'multipart')

    logger.info('URL being requested: %s %s' % (httpMethod,url))
    return self._requestBuilder(self._http,
                                model.response,
                                url,
                                method=httpMethod,
                                body=body,
                                headers=headers,
                                methodId=methodId,
                                resumable=resumable)
Exemple #40
0
            raise Exception("Archivo ingresado no existe")

        new_name = GetParams("new_name")
        folder = GetParams("folder")
        var = GetParams("var")

        service = build('drive', 'v3', credentials=creds)

        file_mime = magic.from_file(file_path, mime=True)
        if file_path.endswith('.csv'):
            file_mime = 'text/csv'

        name = new_name or os.path.basename(file_path)

        file_metadata = {'name': name}
        media = MediaFileUpload(file_path, mimetype=file_mime)

        if folder:
            file_metadata['parents'] = [folder]

        file = service.files().create(body=file_metadata,
                                      media_body=media,
                                      fields='id').execute()
        if var:
            SetVar(var, file)

        print(file)
    except Exception as e:
        print("\x1B[" + "31;40mAn error occurred\x1B[" + "0m")
        PrintException()
        raise e