def _parse_api_response(response: ResponseDict) -> _DriveFileArgs: parents = response.get("parents", []) if len(parents) > 2: print("File has more than 1 parent") _content_restrictions = response.get("contentRestrictions", [{}]) is_locked = _content_restrictions[0].get("readOnly", False) locking_reason = _content_restrictions[0].get("reason", None) return _DriveFileArgs( id=response["id"], title=response["name"], description=response.get("description"), mime_type=response["mimeType"], parent_id="root" if not parents else parents[0], is_trashed=response.get("trashed", False), is_starred=response.get("starred", False), is_locked=is_locked, locking_reason=locking_reason, url=response["webViewLink"], created_time=from_rfc3339(response["createdTime"]), modified_time=from_rfc3339(response["modifiedTime"]), bytes_used=response.get("quotaBytesUsed", 0), )
def P(timestamp, value): interval = monitoring_v3.TimeInterval() interval.start_time = datetime_helpers.from_rfc3339(timestamp).replace( tzinfo=None) interval.end_time = datetime_helpers.from_rfc3339(timestamp).replace( tzinfo=None) return monitoring_v3.Point(interval=interval, value={"double_value": value})
def parse_timestamps(): from google.api_core import datetime_helpers return [ datetime_helpers.from_rfc3339(t).replace(tzinfo=None) for t in TIMESTAMPS ]
def test_from_rfc3339_nanos_is_deprecated(): value = "2009-12-17T12:44:32.123456Z" result = datetime_helpers.from_rfc3339(value) result_nanos = datetime_helpers.from_rfc3339_nanos(value) assert result == result_nanos
def test_from_rfc3339_with_bad_tz(): value = "2009-12-17T12:44:32.123456BAD" with pytest.raises(ValueError): datetime_helpers.from_rfc3339(value)
def test_from_rfc3339(): value = "2009-12-17T12:44:32.123456Z" assert datetime_helpers.from_rfc3339(value) == datetime.datetime( 2009, 12, 17, 12, 44, 32, 123456, pytz.utc)
def parse_timestamps(): from google.api_core import datetime_helpers return [datetime_helpers.from_rfc3339(t).replace(tzinfo=None) for t in TIMESTAMPS]
def test_from_rfc3339_with_nanos(): value = '2009-12-17T12:44:32.123456789Z' with pytest.raises(ValueError): datetime_helpers.from_rfc3339(value)
def test_from_rfc3339_with_nanos(): value = "2009-12-17T12:44:32.123456789Z" with pytest.raises(ValueError): datetime_helpers.from_rfc3339(value)
def test_from_rfc3339(): value = "2009-12-17T12:44:32.123456Z" assert datetime_helpers.from_rfc3339(value) == datetime.datetime( 2009, 12, 17, 12, 44, 32, 123456, pytz.utc )
def test_from_rfc3339_with_bad_tz(): value = '2009-12-17T12:44:32.123456BAD' with pytest.raises(ValueError): datetime_helpers.from_rfc3339(value)
def main(event, context): cred = credentials.ApplicationDefault() firebase_admin.initialize_app(cred, {'projectId': PROJECT_NAME}) db: Client = firestore.client() query_date = _get_query_date() price_ref = db.collection('stock_price') prices: [DocumentSnapshot] = price_ref.where('date', '>=', query_date).get() def build_data(price: DocumentSnapshot) -> dict: data = price.to_dict() return { 'date': data['date'], 'code': data['code'], 'opening_quotation': data['opening_quotation'], 'high': data['high'], 'turnover': data['turnover'], 'closing_quotation': data['closing_quotation'], 'low': data['low'] } stock_prices = list(map(build_data, prices)) predictable_stock_codes = [] data_sets = [] for code in range(1001, 9999): filtered = list(filter(lambda x: x['code'] == code, stock_prices)) if len(filtered) == 0: continue data = sorted(filtered, key=lambda x: x['date']) def build_dataset(d: dict) -> list: return [ d['opening_quotation'], d['high'], d['turnover'], d['closing_quotation'], d['low'] ] data_sets.append(list(map(build_dataset, data))) predictable_stock_codes.append(data[0]['code']) input = list(map(lambda d: {'input': d}, data_sets)) ml = discovery.build('ml', 'v1') name = 'projects/{}/models/{}'.format(PROJECT_NAME, 'stock_predictor') name += '/versions/{}'.format('stock_price_predictor') response = ml.projects().predict(name=name, body={ 'instances': input }).execute() if 'error' in response: raise RuntimeError(response['error']) print(response['predictions']) prediction_results = response['predictions'] # TODO save data at next day or week day l = list(filter(lambda x: x['code'] == code, stock_prices)) # FIXME get from now date s = sorted(l, key=lambda x: x['date']) print(s) predict_datetime = from_rfc3339( s[-1]['date'].rfc3339()) # Get latest date in predicted dataset original_datetime = predict_datetime return while True: delta = timedelta(days=1) predict_datetime = predict_datetime + delta # Skip Japan holidays and JPX holidays (Dec 31, Jun 1, 2 and 3) if not _is_jpx_holiday(predict_datetime): break
def test_from_rfc3339_without_nanos(): value = "2009-12-17T12:44:32Z" assert datetime_helpers.from_rfc3339(value) == datetime.datetime( 2009, 12, 17, 12, 44, 32, 0, datetime.timezone.utc )
def test_from_rfc3339_w_nanos_raise_exception(): value = "2009-12-17T12:44:32.123456" with pytest.raises(ValueError): datetime_helpers.from_rfc3339(value)
def test_from_rfc3339_with_truncated_nanos(truncated, micros): value = "2009-12-17T12:44:32.{}Z".format(truncated) assert datetime_helpers.from_rfc3339(value) == datetime.datetime( 2009, 12, 17, 12, 44, 32, micros, datetime.timezone.utc )
def expiration_time(self) -> Optional[datetime]: # TODO only works for google suite ? then distinguish between files # TODO can be set when creating a permission ? return from_rfc3339( self._expiration_time) if self._expiration_time else None