def saveShipmentItems(self): """ save shipment items """ shipmentItem = self.data["shipmentItems"][0] available_keys = list(shipmentItem.keys()) if "pickUpPoint" in self.data.keys(): available_keys.append('pickUpPoint') shipmentItem['pickUpPoint'] = str(self.data['pickUpPoint']) shipmentItem["orderDate"] = maya.parse( shipmentItem["orderDate"]).datetime() shipmentItem["latestDeliveryD"] = maya.parse( shipmentItem["latestDeliveryDate"]).datetime() available_keys.append('shipmentId') shipmentItem['shipmentId'] = self.shipmentId data = {i: shipmentItem[i] for i in available_keys} print(f"saveShipmentItems {data}") serializer = ShipmentsItemsSerializer(data=data) if serializer.is_valid(): serializer.save() print("shipment detail saved") else: raise Exception(serializer.errors) return True
def to_representation(self, obj): representation = super().to_representation(obj) request = self.context['request'] try: params = request.query_params.dict() except AttributeError: # this is a django request and not a REST request params = request.GET.dict() try: arrive = params.get('arrive', dt.datetime.today()) arrive = maya.parse(arrive).date depart = params.get('depart', arrive + timedelta(days=13)) depart = maya.parse(depart).date except ParserError: arrive = dt.date.today() depart = arrive + timedelta(days=13) availabilities = [{ 'date': date, 'quantity': quantity } for (date, quantity) in obj.daily_availabilities_within(arrive, depart)] representation['availabilities'] = availabilities representation['hasFutureDrftCapacity'] = obj.has_future_drft_capacity( ) representation['maxBookingDays'] = obj.location.max_booking_days return representation
def update_project(project): """Updates the duration of the project's time""" duration = None # set local timezone timezone = get_localzone() local_tz = timezone.zone # collect all of the logs that are part of this project logs = Log.select().where(Log.project_id == project.id) # iterate over the logs and accumulate the duration of each log for n, log in enumerate(logs): start = parse(log.start_time).datetime(to_timezone=local_tz, naive=True) stop = parse(log.stop_time).datetime(to_timezone=local_tz, naive=True) if n == 0: duration = MayaInterval.from_datetime(start, stop).timedelta else: duration += MayaInterval.from_datetime(start, stop).timedelta # update the project project.duration = duration project.status = 0 project.save() print('Deactivating: {} with total time of {}'.format( project.name, project.duration))
def filter_queryset(self, queryset): # TODO make sure that capacity_change and uses are prefetched somehow # ideally a method on Resource manager def room_available_during_period(room, arrive, depart): availabilities = room.daily_availabilities_within(arrive, depart) zero_quantity_dates = [ avail for avail in availabilities if avail[1] == 0 ] if zero_quantity_dates: return False return True qs = queryset.filter(location__slug=self.kwargs['location_slug']) params = self.request.query_params.dict() if params: arrive = maya.parse(params['arrive']).date depart = maya.parse(params['depart']).date room_ids = [ room.pk for room in qs if room_available_during_period(room, arrive, depart) ] qs = qs.filter(id__in=room_ids) return qs
def post(self): try: logger.info('getting data from user request.') data = json.loads(request.form['data']) manufacturing_date = maya.parse( data['manufacturing_date']).datetime(to_timezone='US/Central') file = request.files['Image'] expiry_date = data.get('expiry_date', None) path = InventoryModel.image_save(file) logger.info('initialize data in InventoryModel') if expiry_date: expiry_date = maya.parse( data['expiry_date']).datetime(to_timezone='US/Central') inventory_item = InventoryModel(data['inventory_name'], data['inventory_category'], data['quantity'], manufacturing_date, expiry_date, path) else: inventory_item = InventoryModel(data['inventory_name'], data['inventory_category'], data['quantity'], manufacturing_date, expiry_date, path) inventory_item.insert() return {"message": "insert successfully", "image_path": path}, 201 except Exception as e: logging.error(e) return {"message": "!!oops something went wrong"}, 404
def init_next_race(self): """ Sets the Race object's metadata according to the upcoming race """ now = maya.now() r = requests.get("http://ergast.com/api/f1/current.json") races = r.json()["MRData"]["RaceTable"] next_race = None for race in races["Races"]: race_date = maya.parse(race["date"]) if now.date <= race_date.date: # Break out of the loop and set next_race when we find the first 'future' race next_race = race break else: continue # Setting some pieces of information we will want for later self.round = next_race["round"] self.name = next_race["raceName"] self.location = next_race["Circuit"]["Location"]["locality"] # Converting returned url to https because it's 2019 self.circuit_name = next_race["Circuit"]["circuitName"] self.circuit_url = next_race["Circuit"]["url"].replace("http", "https") # Obtaining a 'raw' maya datetime object for the race start datetime race_datetime = maya.parse(f"{next_race['date']} {next_race['time']}", timezone="ETC/Zulu") self.race_datetime = race_datetime
def before_june_8_1995(patent): """Returns a string representation for patents was filed before June 8, 1995. Takes a Patent named tuple, which is what is returned by the GetPatentByNumber function. """ patent_issue_date = maya.parse(patent.issue_date) patent_filing_date = maya.parse(patent.filing_date) issue_plus_seventeen = patent_issue_date.add(years=17) filing_plus_twenty = patent_filing_date.add(years=20) boilerplate1 = ( f'You asked me to calculate the expiration date for patent number ' f'{pretty_number(patent.number)}, with patent title, "{patent.title}". ' ) boilerplate2 = ( f'Filing was on {month_name[patent_filing_date.month]} ' f'{patent_filing_date.day}, {patent_filing_date.year}. Twenty years ' f'after was {filing_plus_twenty.year}. The patent was issued on ' f'{month_name[patent_issue_date.month]} {patent_issue_date.day}, ' f'{patent_issue_date.year}. Seventeen years later was ' f'{issue_plus_seventeen.year}. Patent term expires the later of those dates. ' f'Patent term adjustment does not apply to patents filed before 1999.') if issue_plus_seventeen > filing_plus_twenty: return ( boilerplate1 + f'The patent expires on {month_name[issue_plus_seventeen.month]} ' f'{issue_plus_seventeen.day}, {issue_plus_seventeen.year}. ' + boilerplate2) else: return ( boilerplate1 + f'The patent expires on {month_name[filing_plus_twenty.month]} ' f'{filing_plus_twenty.day}, {filing_plus_twenty.year}. ' + boilerplate2)
def get_tournaments(soup): tournament_divs = soup.findAll( 'div', {'class': 'media media--padded has-icons-on-hover'}) tournaments = [] for div in tournament_divs: name = div.h4.a.text extension = div.a['href'] url = config['urls']['main'] + extension[1:] time_tags = div.findAll('time') if len(time_tags) == 0: continue start_date = maya.parse(time_tags[0].text) if len(time_tags) == 1: end_date = start_date else: end_date = maya.parse(time_tags[1].text) tournament = Tournament(name=name, url=url, start_date=start_date, end_date=end_date) tournaments.append(tournament) return tournaments
def __validate_timestamp(self): try: maya.parse(self.timestamp, day_first=True, year_first=False) except Exception: raise GraphQLError( 'The timestamp you provided is not within the format: "dd/mm/yyyy hh:mm"' )
def check_and_format_logs_params(start, end, tail): """Helper to read the params for the logs command""" def _decode_duration_type(duration_type): durations = {'m': 'minutes', 'h': 'hours', 'd': 'days', 'w': 'weeks'} return durations[duration_type] if not start: if tail: start_dt = maya.now().subtract(seconds=300).datetime(naive=True) else: start_dt = maya.now().subtract(days=1).datetime(naive=True) elif start and start[-1] in ['m', 'h', 'd', 'w']: value = int(start[:-1]) start_dt = maya.now().subtract(**{ _decode_duration_type(start[-1]): value }).datetime(naive=True) elif start: start_dt = maya.parse(start).datetime(naive=True) if end and end[-1] in ['m', 'h', 'd', 'w']: value = int(end[:-1]) end_dt = maya.now().subtract(**{ _decode_duration_type(end[-1]): value }).datetime(naive=True) elif end: end_dt = maya.parse(end).datetime(naive=True) else: end_dt = None return start_dt, end_dt
def test_convert_datetime_to_iso8601(): _d = { 'key1': { 'key11': { 'key21': maya.parse('2018-02-01').datetime() }, 'key12': maya.parse('2018-01-02').datetime() }, 'key2': [{ 'key111': maya.parse('2018-01-03').datetime() }, { 'key112': maya.parse('2018-01-04').datetime() }] } res = { 'key1': { 'key11': { 'key21': '2018-02-01T00:00:00Z' }, 'key12': '2018-01-02T00:00:00Z' }, 'key2': [{ 'key111': '2018-01-03T00:00:00Z' }, { 'key112': '2018-01-04T00:00:00Z' }] } assert convert_datetime_to_iso8601(_d) == res
def opensky_data(date, until, output_file, settings, bounds, **kwargs): if bounds is not None: south, north, west, east = name_request(bounds) bounds = west, south, east, north before = datetime.fromtimestamp(maya.parse(date).epoch) if until is None: until = before + timedelta(days=1) else: until = datetime.fromtimestamp(maya.parse(until).epoch) config = configparser.ConfigParser() config.read(settings.as_posix()) username = config.get("global", "opensky_username", fallback="") password = config.get("global", "opensky_password", fallback="") opensky = ImpalaWrapper(username, password) data = opensky.history(before, until, bounds=bounds, **kwargs) if output_file.suffix == '.pkl': data.to_pickle(output_file.as_posix()) if output_file.suffix == '.csv': data.to_csv(output_file.as_posix()) if output_file.suffix == '.h5': data.to_hdf(output_file.as_posix()) if output_file.suffix == '.xlsx': data.to_excel(output_file.as_posix())
def get_issue_time_in_status(issue, changelog): statuses = {} previous_status = config.jira_initial_status.upper() now = maya.now() created = maya.parse(issue.created) event = MayaInterval(start=created, end=now) statuses[previous_status] = event.duration / config.default_time_scale previous_status_change_date = created i = len(changelog.histories) while i > 0: history = changelog.histories[i - 1] j = 0 while j < len(history.items): item = history.items[j] if item.field == 'status': status = item.fromString.upper() if not status in statuses: statuses[status] = 0 status_change_date = maya.parse(history.created) interval = MayaInterval(start=previous_status_change_date, end=status_change_date) duration = interval.duration / config.default_time_scale statuses[status] += duration previous_status_change_date = status_change_date previous_status = item.toString j += 1 i -= 1 return statuses
def fields_for_measurement(measurement): # e.g. for `valid_from`=16:00 and `valid_to`=16:30: # InfluxDB timestamp = 16:30 (`valid_to`), to be consistent with `interval_end` used in the consumption code. # Store two fields: # `agile_rate_prev`: the Agile rate from 16:00 to 16:30 - "the previous period" (relative to the timestamp) # `agile_rate_next`: the Agile rate from 16:30 to 17:00 - "the next period" (relative to the timestamp) # This allows easy retrieval, from InfluxDB, of the "prev" rate that matches the stored consumption data, for # easier calculations, and of the "next" rate which is more useful for displaying the Agile rate in a graph. # This is less necessary when using Flux to retrieve data, rather than InfluxQL, as Flux has an experimental # `subDuration()` function, but Flux support in Grafana is not yet stable (and indeed is barely usable), so # a workaround is required, at least until Flux is more widely-supported and widely-used. valid_from_iso = maya.parse( measurement['valid_from']).iso8601() # e.g. 16:00 valid_to_iso = maya.parse( measurement['valid_to']).iso8601() # e.g. 16:30 # valid_from = maya.parse(measurement['valid_from']) # e.g. 16:00 # valid_to = maya.parse(measurement['valid_to']) # e.g. 16:30 agile_rate_prev = agile_rates_with_from_dates.get( valid_from_iso, None) # e.g. rate for 16:00-16:30 agile_rate_next = agile_rates_with_from_dates.get( valid_to_iso, None) # e.g. rate for 16:30-17:00 # agile_rate_prev = agile_rates_with_from_dates.get(valid_from, None) # e.g. rate for 16:00-16:30 # agile_rate_next = agile_rates_with_from_dates.get(valid_to, None) # e.g. rate for 16:30-17:00 fields = { 'agile_rate_prev': agile_rate_prev, 'agile_rate_next': agile_rate_next, } return fields
def getWeather(self): # r = requests.get('https://api.openweathermap.org/data/2.5/weather?q=London,GB&appid=99f3aa348c13b09abf198eb14c90b6aa') # print(r.text) owm = OWM('') mgr = owm.weather_manager() observation = mgr.weather_at_place('State College') w = observation.weather results = [] temp = w.temperature('fahrenheit') for x in temp: results.append(temp.get(x)) results.append(w.clouds) wind = w.wind(unit='miles_hour') results.append(round(wind['speed'])) results.append(w.status) results.append( datetime.datetime.strftime( maya.parse(w.sunrise_time(timeformat='date')).datetime( to_timezone='US/Eastern', naive=True), "%I:%M%p")) results.append( datetime.datetime.strftime( maya.parse(w.sunset_time(timeformat='date')).datetime( to_timezone='US/Eastern', naive=True), "%I:%M%p")) print(results)
def get_data(self, request): stream = BytesIO(request.data) data = JSONParser().parse(stream) start_time = maya.parse(data['start_time']) week_day_indx = start_time.weekday - 1 # MayaDT is 1-based for weekday start_hour = start_time.hour stop_time = maya.parse(data['stop_time']) return start_hour, stop_time, week_day_indx
def test_check_and_format_logs_params(mocked_maya_now, start, end, tail, exp_start_ts, exp_end_ts): start_ts, end_ts = check_and_format_logs_params(start, end, tail) assert start_ts == maya.parse(exp_start_ts).datetime(naive=True) if exp_end_ts is None: assert end_ts is None else: assert end_ts == maya.parse(exp_end_ts).datetime(naive=True)
def get_record_by_date(db, start_datetime=None, end_datetime=None, timez='America/Sao_Paulo'): """ This routine returns all records or archives that has records performed in a time range. :param start_datetime: Format YYYY-MM-DD HH:MM, where HH:MM is optional :param end_datetime: Format YYYY-MM-DD HH:MM, where HH:MM is optional :param timez: The timezone used to the query. Default is SAO_PAULO, Brazil :param db: DB Connection :return: A dict with the Record_ID and the Path (Local or Remote) where you can find the record """ ret_dict = dict() if start_datetime is None and end_datetime is None: return ret_dict td_query_start = parse(start_datetime, timezone=timez).datetime( to_timezone=timez, naive=False) if start_datetime is not None \ else start_datetime td_query_end = parse(end_datetime, timezone=timez).datetime( to_timezone=timez, naive=False) if end_datetime is not None \ else end_datetime if td_query_start and td_query_end is None: records_list = select(il for il in db.Record if il.start >= td_query_start) elif td_query_end and td_query_start is None: records_list = select(il for il in db.Record if il.start <= td_query_end) else: records_list = select( il for il in db.Record if (il.start >= td_query_start and il.start <= td_query_end) or ( il.end >= td_query_start and il.start <= td_query_start)) if len(records_list) == 0: return ret_dict for rec in records_list: if rec.status == 'archived': ret_dict[str(rec.id)] = { 'Archive': rec.archive.remote_path if rec.archive.uploaded else rec.archive.local_path, 'Start': str(rec.start), 'End': str(rec.end) } else: ret_dict[str(rec.id)] = { 'Path': rec.path, 'Start': str(rec.start), 'End': str(rec.end) } return ret_dict
def test_parse(): d = maya.parse('February 21, 1994') assert format(d) == '1994-02-21 00:00:00+00:00' d = maya.parse('01/05/2016') assert format(d) == '2016-01-05 00:00:00+00:00' d = maya.parse('01/05/2016', day_first=True) assert format(d) == '2016-05-01 00:00:00+00:00'
def test_sat_am(self): url = 'http://127.0.0.1:8000/find/' test_wed_7am = "2015-07-04T07:00:00Z" test_wed_12pm = "2015-07-04T12:00:00Z" wed_7am = maya.parse(test_wed_7am).datetime() wed_12pm = maya.parse(test_wed_12pm).datetime() resvr = Reservation(start_time=wed_7am, stop_time=wed_12pm) resvr_serde = ReservationSerializr(resvr) json = JSONRenderer().render(resvr_serde.data) response = requests.post(url=url, json=json) self.assertEqual(int(response.text), 2000)
def deltaSeconds(action, sent): minutes = int((maya.parse(action) - maya.parse(sent)).seconds / 60) if (minutes < 5): response = "Fast" elif (minutes < 60): response = "Medium" elif (minutes < 1440): response = "Slow" else: response = "Very Slow" return response
def test_sat_pm(self): url = 'http://127.0.0.1:8000/find/' test_wed_7am = "2015-07-04T07:00:00Z" test_wed_12pm = "2015-07-04T20:00:00Z" wed_7am = maya.parse(test_wed_7am).datetime() wed_12pm = maya.parse(test_wed_12pm).datetime() resvr = Reservation(start_time=wed_7am, stop_time=wed_12pm) resvr_serde = ReservationSerializr(resvr) json = JSONRenderer().render(resvr_serde.data) response = requests.post(url=url, json=json) self.assertTrue(response.content == '"unavailable"')
def get_adjustment_by_adjustment_date_range(self, supplier_id, min_adjustment_date, max_adjustment_date=None): table = 'brewoptix-adjustment-inventory' min_adjustment_date = maya.parse( min_adjustment_date.split('T')[0]).epoch if max_adjustment_date: max_adjustment_date = maya.parse( max_adjustment_date.split('T')[0]).epoch if max_adjustment_date: query = { 'KeyConditionExpression': Key('supplier_id').eq(supplier_id) & Key('adjustment_date').between(min_adjustment_date, max_adjustment_date), 'FilterExpression': Attr('latest').eq(True) & Attr('active').eq(True), 'IndexName': 'by_supplier_id_and_adjustment_date' } else: query = { 'KeyConditionExpression': Key('supplier_id').eq(supplier_id) & Key('adjustment_date').gt(min_adjustment_date), 'FilterExpression': Attr('latest').eq(True) & Attr('active').eq(True), 'IndexName': 'by_supplier_id_and_adjustment_date' } response = self._storage.get_items(table, query) adjustments_obj = [] for item in response['Items']: # The 4 lines below can be uncommented if we move # from ALL to KEYS_ONLY for the table # entity_id = item['EntityID'] # adjustment_resp = self._storage.get(table, entity_id) # adjustment = adjustment_resp['Items'][0] # adjustment = clean(adjustment) adjustment = json_util.loads(clean(item)) adjustment['adjustment_date'] = maya.to_iso8601( datetime.utcfromtimestamp( adjustment['adjustment_date'])).split('T')[0] adjustments_obj.append(adjustment) return adjustments_obj
def test_get_all_matches_international(self, mocker): mocker.patch( 'requests.get', return_value=ResponseMock( 'tests/unit_tests/mocks/tournament/international.html')) tournament = self.get_example_tournament( name='Gothenburg Open 2018', url= 'http://badmintonsweden.tournamentsoftware.com/sport/tournament.aspx?id=55DA6622-FF9B-4E89-ADC4-13FCF07A839C', # NOQA start_date=maya.parse('2018-05-11'), end_date=maya.parse('2018-05-13'), ) matches = tournament.get_all_matches() mocks = [] to_test = [] # men singles to_test.append(matches[0]) mocks.append( get_match_mock(scheduled_time=maya.parse('2018-05-11 12:00'), team1_players=[ get_player_mock(name='Viktor Chen'), get_player_mock(name='Irma Hansson') ], team2_players=[ get_player_mock(name='Ludvig Edholm'), get_player_mock(name='Greta Ribokas') ], team1_seed='', team2_seed='', score=get_score_mock(sets=[]), is_played=False)) to_test.append(matches[2]) mocks.append( get_match_mock(scheduled_time=maya.parse('2018-05-11 12:00'), team1_players=[ get_player_mock(name='Rasmus Gross'), get_player_mock(name='Hanna Malm') ], team2_players=[ get_player_mock(name='Olliver Kauffman'), get_player_mock(name='Amalie Kjalke') ], team1_seed='', team2_seed='', score=get_score_mock(sets=[]), is_played=False)) assert_mocks_equals_objects(mocks, to_test)
def _check_new_incidents_and_send( self, compiled_query: object, maya_initial_incident_date: maya.MayaDT, new_incident_json: dict, ) -> None: """ This will check an interval's worth of incidents, see if we have any new ones, and return those if they match the subject query (if available). This will also set the target time to the most recent incident if any are found, regardless of if they are returned. :param compiled_query: a compiled regex query (re.compile("blah")) :param maya_initial_incident_date: MayaDT - last time checked for incidents :param new_incident_json: dict - Payload from the last get_new_incidents call in descending order by time :return: """ new_incident_date = new_incident_json.get("entries")[0].get( "values").get("Submit Date") self.logger.info( f"Checking for incidents newer than: {new_incident_date}") maya_new_incident_date = maya.parse(new_incident_date) if maya_new_incident_date > maya_initial_incident_date: # New incident was found new_date = maya_new_incident_date # Reset the time as fast as possible, so we don't miss any new inidents while we're processing this set. for incident in new_incident_json.get( "entries" ): # Check each incident and see if we have more than one new incident incident_to_check = incident.get("values") maya_new_date = maya.parse( incident_to_check.get("Submit Date")) if maya_new_date > maya_initial_incident_date: # We have a new incident, see if we need to send it new_incident_description = incident_to_check.get( "Description") self.logger.info( f"New incident found with description: {new_incident_description}" ) if not compiled_query or compiled_query.match( new_incident_description): self.logger.info( f"Returning incident with description: {new_incident_description}" ) self.send( {Output.INCIDENT: komand.helper.clean(incident)}) else: self.logger.info( f"Incident did not match query: {compiled_query}") self.maya_initial_incident_date = new_date
def get_sunrise_and_sunset_seconds() -> Tuple[int, int]: query_url = SUNRISE_SUNSET_QUERY_URL.format(date_string=datetime.date.today().isoformat()) json_resp = requests.get(query_url).json() midnight = datetime.datetime.now().replace(hour=0, minute=0, second=0, microsecond=0) sunrise_dt = maya.parse(json_resp['results']['sunrise']).datetime(to_timezone='US/Pacific', naive=True) sunrise_secs = (sunrise_dt - midnight).total_seconds() sunset_dt = maya.parse(json_resp['results']['sunset']).datetime(to_timezone='US/Pacific', naive=True) sunset_secs = (sunset_dt - midnight).total_seconds() return int(sunrise_secs), int(sunset_secs)
def get_example_tournament( self, name='Yonex Askim SGP 2018', url='http://badmintonsweden.tournamentsoftware.com/sport/tournament.aspx?id=889C8871-C659-4B7E-A630-325459E4EA87', # NOQA start_date=maya.parse('2018-04-28'), end_date=maya.parse('2018-04-29')): tournament = Tournament( name=name, url=url, # NOQA start_date=start_date, end_date=end_date) return tournament
def get_production_by_production_date_range(self, supplier_id, min_production_date, max_production_date=None): table = 'brewoptix-production' min_production_date = maya.parse( min_production_date.split('T')[0]).epoch if max_production_date: max_production_date = maya.parse( max_production_date.split('T')[0]).epoch if max_production_date: query = { 'KeyConditionExpression': Key('supplier_id').eq(supplier_id) & Key('production_date').between(min_production_date, max_production_date), 'FilterExpression': Attr('latest').eq(True) & Attr('active').eq(True), 'IndexName': 'by_supplier_id_and_production_date' } else: query = { 'KeyConditionExpression': Key('supplier_id').eq(supplier_id) & Key('production_date').gt(min_production_date), 'FilterExpression': Attr('latest').eq(True) & Attr('active').eq(True), 'IndexName': 'by_supplier_id_and_production_date' } response = self._storage.get_items(table, query) production_obj = [] for item in response['Items']: # The 4 lines below can be uncommented if we move # from ALL to KEYS_ONLY for the table # entity_id = item['EntityID'] # production = self._storage.get(table, entity_id) # production = clean(production) production = json_util.loads(clean(item)) production['production_date'] = datetime.utcfromtimestamp( production['production_date']).isoformat().split('T')[0] production_obj.append(production) return production_obj
def is_date(self, value): """ Check value is datetime format :param value: :return: """ try: if not isinstance(value, str): return False parse(value) maya.parse(value) return True except ValueError: return False
def get_details_by_date_range(self, supplier_id, start_date, end_date=None): # query on_hand between start date minus 1 and end date start_date = maya.parse(start_date).iso8601().split('T')[0] end_date = maya.parse(end_date).iso8601().split('T')[0] table = 'on_hand' offset = 0 count = 1000 details = [] while True: query = """ SELECT created_on, brand_id, package_type_id, quantity, actual FROM {TABLE} WHERE supplier_id='{SUPPLIER_ID}' AND (created_on BETWEEN '{START_DATE}' AND '{END_DATE}') AND (quantity<>0) ORDER BY brand_id, package_type_id, created_on, actual LIMIT {OFFSET}, {COUNT} """.format(TABLE=table, SUPPLIER_ID=supplier_id, START_DATE=start_date, END_DATE=end_date, OFFSET=offset, COUNT=count) results = self._aurora_storage.get_items(query) # convert from response keys = [ "created_on", "brand_id", "package_type_id", "quantity", "actual" ] for result in results: record = {} for i, val in enumerate(result): record[keys[i]] = val if record['actual']: record['actual'] = False details.append(record) if len(results) < 1000: break else: offset += 1000 return details
def deconvert(self, value): '''Converts a value from the datastore to a native python object''' if value is None: return None try: value = maya.parse(value).datetime().date() return value except Exception as e: raise BadValueError("Expected an ISO 8601 DateTime string from deconversion")
def deserialize(self, value): """We can serialize basic types by calling str on their value""" if not isinstance(value, basestring): raise BadValueError("We expect an ISO 8601 formatted datetime string here") val = maya.parse(value).datetime().date() return val
def serialize(self, value): """We can serialize basic types by calling str on their value""" if not isinstance(value, self.type): raise BadValueError("You cannot serialize a non datetime object with this serializer") val = maya.parse(value).datetime().time() return val.isoformat()