def extract_time(self) -> PartialPost: # Try to extract time for timestamp page_insights = self.data_ft.get('page_insights', {}) for page in page_insights.values(): try: timestamp = page['post_context']['publish_time'] return { 'time': datetime.fromtimestamp(timestamp), } except (KeyError, ValueError): continue # Try to extract from the abbr element date_element = self.element.find('abbr', first=True) if date_element is not None: date = utils.parse_datetime(date_element.text, search=False) if date: return {'time': date} logger.debug("Could not parse date: %s", date_element.text) else: logger.warning("Could not find the abbr element for the date") # Try to look in the entire text date = utils.parse_datetime(self.element.text) if date: return {'time': date} return None
def update_weather(self, weather): zip = int(self.config.location) if not (1000 <= zip <= 9999): raise ValueError("Invalid Swiss ZIP code") date = datetime.utcnow() app.log(f"Fetching weather data...") for ix in range(60): url = f"https://www.meteoschweiz.admin.ch/product/output/forecast-chart/version__{(date - timedelta(minutes=ix)).strftime('%Y%m%d_%H%M')}/de/{zip}00.json" response = requests.get(url) if response.status_code != 404: break else: raise ValueError("No JSON data found within an hour") response.raise_for_status() data = response.json() for key, instant, value in itertools.chain( (('rain', dump_datetime(parse_datetime(hour[0])), hour[1]) for day in data for hour in day['rainfall']), (('sun', dump_datetime(parse_datetime(hour[0])), hour[1] / 100) for day in data for hour in day['sunshine']), (('temperature', dump_datetime(parse_datetime(hour[0])), hour[1]) for day in data for hour in day['temperature']), (('wind', dump_datetime(parse_datetime(hour[0])), hour[1]) for day in data for hour in day['wind']['data'])): hour_weather = weather.get(instant) if not hour_weather: hour_weather = HourlyWeather() weather[instant] = hour_weather setattr(hour_weather, key, value)
def create_task(): if request.args.get('key') != ADMIN_TOKEN: abort(401) if not request.json or 'title' not in request.json or 'event_owner' not in request.json \ or 'details' not in request.json or 'event_start' not in request.json or 'event_end' not in request.json: abort(400) title = request.json['title'] details = request.json['details'] start_dt = parse_datetime(request.json['event_start']) end_dt = parse_datetime(request.json['event_end']) owner = request.json['event_owner'] repeat_times = int(request.json['repeat_times']) events_created = list() for week in range(repeat_times): evt = Event(event_start=start_dt + timedelta(days=7 * week), event_end=end_dt + timedelta(days=7 * week) , created_date=datetime.now(), last_modified_date=datetime.now() , event_owner=owner, title=title, details=details) events_created.append(evt.as_dict()); db_alchemy.session.add(evt) db_alchemy.session.commit() return jsonify({'Success': 'True', 'Number': repeat_times, 'events': events_created}), 201
def get_dates(sdate, edate): sdt = utils.parse_datetime(sdate, form='%Y-%m-%d') edt = utils.parse_datetime(edate, form='%Y-%m-%d') delta = edt - sdt return [utils.format_datetime(sdt + timedelta(days=i), form='%Y-%m-%d')\ for i in range(delta.days + 1)]
async def stream_user_comments(connection, data): idx = 1 async for item in api.get_user_comments( connection, user_token=data["user_token"], timestamp_from=parse_datetime(data.get("timestamp_from", None)), timestamp_to=parse_datetime(data.get("timestamp_to", None))): yield {"record{}".format(idx): item} idx += 1
async def stream_entity_replies(connection, data): idx = 1 async for item in api.get_entity_comments( connection, entity_type=data["type"], entity_token=data["entity"], limit=0, offset=0, timestamp_from=parse_datetime(data.get("timestamp_from", None)), timestamp_to=parse_datetime(data.get("timestamp_to", None)), with_replies=True): yield {"record{}".format(idx): item} idx += 1
def test_parse_datetime(self): from utils import parse_datetime, DatetimeParseError r = parse_datetime('1285041600000') self.assertEqual(r.year, 2010) r = parse_datetime('1283140800') self.assertEqual(r.year, 2010) r = parse_datetime('1286744467.0') self.assertEqual(r.year, 2010) self.assertRaises(DatetimeParseError, parse_datetime, 'junk')
def parse(cls, api, json): status = cls(api) for k, v in json.items(): if k == 'user': user = User.parse(api, v) setattr(status, 'author', user) setattr(status, 'user', user) # DEPRECIATED elif k == 'screen_name': setattr(status, k, v) elif k == 'created_at': #fixme value of created_at maybe '' if v: setattr(status, k, parse_datetime(v)) else: print 'created_at is null string' setattr(status, k, None) elif k == 'source': if '<' in v: setattr(status, k, parse_html_value(v)) setattr(status, 'source_url', parse_a_href(v)) else: setattr(status, k, v) elif k == 'retweeted_status': setattr(status, k, Status.parse(api, v)) elif k == 'geo': setattr(status, k, Geo.parse(api, v)) else: setattr(status, k, v) return status
async def patch_task(id): """Route that patches a task given its ID.""" if not id.isdigit(): return '', HTTPCode.BADREQUEST tasks = current_app.config['task_manager'] current = await tasks.get(id=int(id)) if not current: return '', HTTPCode.NOTFOUND data = await request.form title = data.get("title") or None description = data.get("description") or None if title: current.title = title if description: current.description = description max_score = data.get("max_score") if max_score and max_score.isdigit(): current.max_score = int(max_score) date_due = data.get("date_due") if date_due: try: current.date_due = parse_datetime(date_due) except DateTimeParserError: return '', HTTPCode.BADREQUEST # Datetime formatted incorrectly await tasks.update(current) return '', HTTPCode.OK
async def make_new_task(id): """Route that creates a new task for a given group. When providing the date due, it must be in UTC, and the format: dd/mm/yyyy|hh:mm""" if not id.isdigit(): return '', HTTPCode.BADREQUEST groups = current_app.config['group_manager'] group = await groups.get(group_id=int(id)) if not group: return '', HTTPCode.NOTFOUND data = await request.form title = data.get("title") or None description = data.get("description") or None max_score = data.get("max_score") or None to_parse = data.get("date_due") or None try: date_due = parse_datetime(to_parse) except Exception as e: return '', HTTPCode.BADREQUEST if not title or not description or not max_score or not max_score.isdigit( ) or not date_due: return '', HTTPCode.BADREQUEST # Not all necessary args given, return BADREQUEST tasks = current_app.config['task_manager'] try: await tasks.create(int(id), title, description, date_due, int(max_score)) all_tasks = await tasks.get(group_id=int(id)) new_task = max(all_tasks, key=lambda x: x.id) return '', HTTPCode.CREATED, {"Location": "/task/" + str(new_task.id)} except Exception as e: return '', HTTPCode.BADREQUEST
def create_request(): form = RequestForm(request.form) if request.method == 'POST' and form.validate(): geolocation = getGeocodeLocation(form.location_string.data) newrequest = Request(user_id=login_session['id'], meal_type=form.meal_type.data, location_string=form.location_string.data, latitude=geolocation[0], longitude=geolocation[1]) session.add(newrequest) session.commit() date_request = parse_datetime(year=form.year.data, month=form.month.data, day=form.day.data) if date_request == None: flash('Date no valid...') return redirect(url_for('users.index')) newrequestdate = DateTimeRequest( request=newrequest.id, mealtime=form.meal_time.data, date=date_request) session.add(newrequestdate) session.commit() flash('Succefully!') return redirect(url_for('users.index')) flash_errors(form) return redirect(url_for('users.index'))
def test_datetime_formats(): datetime_samples = [ '2022-01-31T00:00:00.000Z', '2022-01-31T00:00:00Z', '2022-01-31', '01/22', '01-22', '0122' ] for sample in datetime_samples: assert isinstance(parse_datetime(sample), datetime)
def parse(cls, api, json): status = cls(api) for k, v in json.items(): if k == 'user': user = User.parse(api, v) setattr(status, 'author', user) setattr(status, 'user', user) # DEPRECIATED elif k == 'screen_name': setattr(status, k, v) elif k == 'created_at': if (v == ''): # When the retweeted tweet is deleted, v is '' setattr(status, k, None) setattr(status, k, parse_datetime(v)) elif k == 'source': if '<' in v: setattr(status, k, parse_html_value(v)) setattr(status, 'source_url', parse_a_href(v)) else: setattr(status, k, v) elif k == 'retweeted_status': setattr(status, k, Status.parse(api, v)) elif k == 'geo': setattr(status, k, Geo.parse(api, v)) else: setattr(status, k, v) return status
def parse(cls, api, json): status = cls(api) for k, v in json.items(): if k == 'user': user_model = getattr(api.parser.model_factory, 'user') user = user_model.parse(api, v) setattr(status, 'author', user) setattr(status, 'user', user) # DEPRECIATED elif k == 'created_at': setattr(status, k, parse_datetime(v)) elif k == 'source': if '<' in v: setattr(status, k, parse_html_value(v)) setattr(status, 'source_url', parse_a_href(v)) else: setattr(status, k, v) setattr(status, 'source_url', None) elif k == 'retweeted_status': setattr(status, k, Status.parse(api, v)) elif k == 'place': if v is not None: setattr(status, k, Place.parse(api, v)) else: setattr(status, k, None) else: setattr(status, k, v) return status
def create_job(dbsession, data): job = Job() data['service_id'] = data.pop('service_id') data['service_provider_id'] = data.pop('service_provider_id') data['appointment_time'] = parse_datetime(data['appointment_time']) user_id = data.pop('user_id', 0) if user_id: user = get_user(dbsession, user_id) elif data.get('user'): user_data = data.pop('user') user = create_user(dbsession, user_data) else: raise AppException('User id or user data required to create job') data['user'] = user update_model_from_dict(job, data) dbsession.add(job) dbsession.commit() tasks.create_job.apply_async( (job.id,), queue=config.JOB_QUEUE ) return job
async def put_task(id): """Route that puts a task given its ID.""" if not id.isdigit(): return '', HTTPCode.BADREQUEST data = await request.form title = data.get("title") or None description = data.get("description") or None max_score = data.get("max_score") or None date_due = data.get("date_due") or None if not (title and description and date_due and max_score and max_score.isdigit()): return '', HTTPCode.BADREQUEST tasks = current_app.config['task_manager'] current = await tasks.get(id=int(id)) if not current: return '', HTTPCode.NOTFOUND current.title = title current.description = description current.max_score = max_score try: current.date_due = parse_datetime(date_due) except DateTimeParserError: return '', HTTPCode.BADREQUEST await tasks.update(current) return '', HTTPCode.OK
def get_from_kwargs(**kwargs): if kwargs['PaymentMethod'] == 'credit_card': return CreditCard(kwargs['CreditCardNumber'], kwargs['CardHolder'], utils.parse_datetime(kwargs['ExpirationDate']), kwargs.get('SecurityCode')) else: return OtherPaymentMethod()
def process_list(self, api_version, data): # pylint: disable=unused-argument """ This method returns details for given set of Errata. :param data: data obtained from api, we're interested in data["errata_list"] :returns: dictionary containing detailed information for given errata list} """ validate(data, JSON_SCHEMA) modified_since = data.get("modified_since", None) modified_since_dt = parse_datetime(modified_since) errata_to_process = data.get("errata_list", None) page = data.get("page", None) page_size = data.get("page_size", None) response = {"errata_list": {}} if modified_since: response["modified_since"] = modified_since if not errata_to_process: return response if len(errata_to_process) == 1: # treat single-label like a regex, get all matching names errata_to_process = self.find_errata_by_regex(errata_to_process[0]) filters = [] # if we have information about modified/published dates and receive "modified_since" in request, # compare the dates if modified_since: filters.append((self._filter_modified_since, [modified_since_dt])) errata_list = {} errata_page_to_process, pagination_response = paginate(errata_to_process, page, page_size, filters=filters) for errata in errata_page_to_process: errata_detail = self.cache.errata_detail.get(errata, None) if not errata_detail: continue errata_list[errata] = { "synopsis": none2empty(errata_detail[ERRATA_SYNOPSIS]), "summary": none2empty(errata_detail[ERRATA_SUMMARY]), "type": none2empty(errata_detail[ERRATA_TYPE]), "severity": none2empty(errata_detail[ERRATA_SEVERITY]), "description": none2empty(errata_detail[ERRATA_DESCRIPTION]), "solution": none2empty(errata_detail[ERRATA_SOLUTION]), "issued": none2empty(format_datetime(errata_detail[ERRATA_ISSUED])), "updated": none2empty(format_datetime(errata_detail[ERRATA_UPDATED])), "cve_list": errata_detail[ERRATA_CVE], "package_list": pkgidlist2packages(self.cache, errata_detail[ERRATA_PKGIDS]), "bugzilla_list": errata_detail[ERRATA_BUGZILLA], "reference_list": errata_detail[ERRATA_REFERENCE], "url": none2empty(errata_detail[ERRATA_URL]) } response["errata_list"] = errata_list response.update(pagination_response) return response
def parse(cls, api, json): ss = cls(api) for k, v in json.items(): if k == 'created_at': setattr(ss, k, parse_datetime(v)) else: setattr(ss, k, v) return ss
def read(self, start=None, stop=None, q=None): """Read a meter. :param start: Start date and time. :type start: datetime :param stop: Stop date and time. :type stop: datetime :param q: List of filters excluding timestamp filters :type q: List :return: Value of reading :rtype: Float """ # Default times to month to date default_start, default_stop = utils.mtd_range() if not start: start = default_start if not stop: stop = default_stop logger.info("Start: {}".format(start)) logger.info("Stop: {}".format(stop)) logger.info("Meter name: {}".format(self.name)) if start > stop: raise InvalidTimeRangeError(start, stop) # Add times to query. times are +- the extra time. q = q or [] q.append(query.query( 'timestamp', 'gt', start - self._extra_time, 'datetime' )) q.append(query.query( 'timestamp', 'le', stop + self._extra_time, 'datetime' )) # Count of samples: count = self.count(q) logger.debug("{} samples according to statistics.".format(count)) if not count: return [] # Get samples samples = self.client.samples.list( meter_name=self.name, q=q, limit=count ) logger.debug( "{} samples according to sample-list.".format(len(samples)) ) # Convert timestamps from strings to datetime objects for s in samples: s.timestamp = utils.normalize_time( utils.parse_datetime(s.timestamp) ) # Sort by resource id and then timestamps in ascending order samples.sort(cmp=_cmp_sample) # Return generator return self._reading_generator(samples, start, stop)
def read(self, start=None, stop=None, q=None): """Read a meter. :param start: Start date and time. :type start: datetime :param stop: Stop date and time. :type stop: datetime :param q: List of filters excluding timestamp filters :type q: List :return: Value of reading :rtype: Float """ # Default times to month to date default_start, default_stop = utils.mtd_range() if not start: start = default_start if not stop: stop = default_stop logger.info("Start: {}".format(start)) logger.info("Stop: {}".format(stop)) logger.info("Meter name: {}".format(self.name)) if start > stop: raise InvalidTimeRangeError(start, stop) # Add times to query. times are +- the extra time. q = q or [] q.append( query.query('timestamp', 'gt', start - self._extra_time, 'datetime')) q.append( query.query('timestamp', 'le', stop + self._extra_time, 'datetime')) schedule = query.Scheduler(self.client, self.name, start - self._extra_time, stop + self._extra_time, q=[], max_samples=self.max_samples) for s_start, s_stop, s_query, s_count in schedule: logger.debug("{} - {} - {}".format(s_start, s_stop, s_count)) logger.debug("Count of scheduled samples {}".format(schedule.count())) # Get samples samples = schedule.list() logger.debug("{} samples according to sample-list.".format( len(samples))) # Convert timestamps from strings to datetime objects for s in samples: s.timestamp = utils.normalize_time( utils.parse_datetime(s.timestamp)) # Sort by resource id and then timestamps in ascending order samples.sort(cmp=_cmp_sample) # Return generator return self._reading_generator(samples, start, stop)
def fetch_remote_question(question) -> None: now = datetime.now(timezone.utc) dt = parse_datetime(question.closed or question.endTime) minutes = int((dt - now).total_seconds() / 60) if minutes > 0: # Only push the task if the poll is not ended yet p.push(question.id, "/task/fetch_remote_question", delay=minutes) # XXX: delay expects minutes
def get_keys_from_page(query, date, pagenum): root = html.parse(listurl % (query, date, date, pagenum)) items = root.xpath('//ul[@class="list_type_1 search_list"]')[0] blog_ids = items.xpath('./input[@name="blogId"]/@value') log_nos = items.xpath('./input[@name="logNo"]/@value') times = [utils.format_datetime(utils.parse_datetime(time))\ for time in items.xpath('./li/div[@class="list_data"]/span[@class="date"]/text()')] return {(b, l): t for b, l, t in zip(blog_ids, log_nos, times)}
def parse(cls, api, json): lst = List(api) for k,v in json.items(): if k == 'user': setattr(lst, k, User.parse(api, v)) elif k == 'created_at': setattr(lst, k, parse_datetime(v)) else: setattr(lst, k, v) return lst
def parse(cls, api, json): dm = cls(api) for k, v in json.items(): if k == 'sender' or k == 'recipient': setattr(dm, k, User.parse(api, v)) elif k == 'created_at': setattr(dm, k, parse_datetime(v)) else: setattr(dm, k, v) return dm
def parse(cls, api, json): checkin = cls(api) for key, value in json.items(): if key == 'venue': setattr(checkin, key, Venue.parse(api, value)) elif key == 'createdAt': setattr(checkin, key, parse_datetime(value)) else: setattr(checkin, key, value) return checkin
def root(): # pragma: no cover today = datetime.date.today() content = "<div>" for post in posts: content += '<br />{age}<a href="{url}">{title}</a>'.format( age=get_age(today, parse_datetime(post["date_gmt"])), url=post["link"], title=post["title"]["rendered"]) content += "</div>" return """<html><a href="{}">Atom Feed</a><br />{}</html>""".format( url_for("recent_feed"), content)
def __init__(self, video_path="peopleCounter.avi"): """ Subtracting background from the video frames and find contours on the original frame which could be a person in the queue. :string video_path: path to the video to process """ self.stream = cv2.VideoCapture(video_path) self.video_time = parse_datetime(video_path) self.background_subtractor = cv2.bgsegm.createBackgroundSubtractorMOG(history = 1000) self.min_area = min_contour_area_to_be_a_person self.max_area = max_contour_area_to_be_a_person
def _filter_modified_since(self, repos_to_process, modified_since_dt): filtered_repos_to_process = [] for label in repos_to_process: for repo_id in self.cache.repolabel2ids.get(label, []): repo_detail = self.cache.repo_detail[repo_id] if not modified_since_dt or ( repo_detail[REPO_REVISION] != 'None' and parse_datetime( repo_detail[REPO_REVISION]) > modified_since_dt): filtered_repos_to_process.append(label) return filtered_repos_to_process
def get_feed_item(post: dict) -> FeedEntry: """ Returns a FeedEntry object for adding to an AtomFeed item from the werkzeug.contrib.atom module Args: post (dict): post dict from the parser Returns: FeedEntry: item for the atom feed """ return FeedEntry( id=post["guid"]["rendered"], title=post["title"]["rendered"], content=remove_macro_tags(post["content"]["rendered"]), summary=remove_macro_tags(post["excerpt"]["rendered"]), url=post["link"], updated=parse_datetime(post["modified_gmt"]), published=parse_datetime(post["date_gmt"]), )
def __init__(self, point_type: Type[TPoint], query: Union[str, Dict[str, Any]]): factory = get_factory(point_type) props: List[str] = factory.__self__.get_props() if isinstance(query, str): query = literal_eval_checked(query, dict) self.predicates = [] self.filter = { 'pointer_type': get_pointer_type(point_type), 'meta': {} } for key, value in query.items(): if key.startswith('!'): negate = True key = key[1:] else: negate = False ismeta = (key in ('meta', 'id')) or (key not in props) get = (lambda v, key=key: v['meta'][key]) if ismeta else (lambda v, key=key: v[key]) if isinstance(value, str): # special date handling match = PointQuery.__rxdate.fullmatch(value.strip()) if match: date = parse_datetime(match.group(2)) if not match.group(1): # exact date match, no local filtering required, but normalize the date format value = dump_datetime(date) else: if match.group(1) == 'before': op = operator.lt if not negate else operator.ge else: # match.group(1) == 'after': op = operator.gt if not negate else operator.le self.predicates.append(lambda v, get=get, op=op, date=date: op(parse_datetime(get(v)), date)) continue # special number handling match = PointQuery.__rxnum.fullmatch(value.strip()) if match: number = float(match.group(2)) if match.group(1) == 'least': op = operator.ge if not negate else operator.lt else: # match.group(1) == 'most': op = operator.le if not negate else operator.gt self.predicates.append(lambda v, get=get, op=op, number=number: op(float(get(v)), number)) continue if negate: self.predicates.append(lambda v, get=get, value=value: get(v) != value) elif ismeta: self.filter['meta'][key] = value else: self.filter[key] = value if not self.filter['meta']: del self.filter['meta'] device.log(f"Built remote filter {json.dumps(self.filter)} and {len(self.predicates)} local predicates", message_type='debug')
def info(self): url = '%s/%s/id/%s.json' % (get_coverstore_url(), self.category, self.id) try: d = simplejson.loads(urllib2.urlopen(url).read()) d['created'] = parse_datetime(d['created']) if d['author'] == 'None': d['author'] = None d['author'] = d['author'] and web.ctx.site.get(d['author']) return web.storage(d) except IOError: # coverstore is down return None
def check_datetime(date_str): """Parse iso8601 datetime strings. :param date_str: String from a datetime argument. :type date_str: String :returns: Normalized datetime object :rtype: datetime.datetime """ try: dt = utils.parse_datetime(date_str) dt = utils.normalize_time(dt) except Exception: raise argparse.ArgumentTypeError( "{} is an invalid iso8601 datetime string.".format(date_str)) return dt
def handle_tasklet(self): if self.parsed_args[action] == 'create': self.parsed_args[when] = utils.parse_datetime(parsed_args[when]) self.handle_create() elif self.parsed_args[action] == 'edit': pass elif self.parsed_args[action] == 'list': pass elif self.parsed_args[action] == 'delete': pass else: exit( 'tasklet: %s is not supported. Actions supported: (create, edit, list, delete)' % action)
def parse(cls, api, json): user = cls(api) for k, v in json.items(): if k == 'created_at': setattr(user, k, parse_datetime(v)) elif k == 'status': setattr(user, k, Status.parse(api, v)) elif k == 'following': # twitter sets this to null if it is false if v is True: setattr(user, k, True) else: setattr(user, k, False) else: setattr(user, k, v) return user
def parse(cls, api, json): comments = cls(api) for k, v in json.items(): if k == 'user': user = User.parse(api, v) setattr(comments, 'author', user) setattr(comments, 'user', user) elif k == 'status': status = Status.parse(api, v) setattr(comments, 'user', status) elif k == 'created_at': setattr(comments, k, parse_datetime(v)) elif k == 'reply_comment': setattr(comments, k, User.parse(api, v)) else: setattr(comments, k, v) return comments
def parse(cls, api, json): status = cls(api) for k, v in json.items(): if k == 'user': user = User.parse(api, v) setattr(status, 'author', user) setattr(status, 'user', user) # DEPRECIATED elif k == 'created_at': setattr(status, k, parse_datetime(v)) elif k == 'source': if '<' in v: setattr(status, k, parse_html_value(v)) setattr(status, 'source_url', parse_a_href(v)) else: setattr(status, k, v) elif k == 'retweeted_status': setattr(status, k, Status.parse(api, v)) else: setattr(status, k, v) return status
def assign_slot_to_sp(redis, service, slot_datetime, block=False): """assigns slot to sp for service, returns sp id if assigned, raises AssignmentException otherwise""" slot_datetime = parse_datetime(slot_datetime) now = datetime.datetime.now() now = constants.IST_TIMEZONE.localize(now) if slot_datetime < now + datetime.timedelta(hours=3): raise AssignmentException('Cannot assign slot in past') now3 = datetime.datetime(now.year, now.month, now.day + 3) now3 = constants.IST_TIMEZONE.localize(now3) if slot_datetime > now3: raise AssignmentException('Cannot assign slot 3 days into future') if slot_datetime.day == now.day: sp_list = redis.zrangebyscore( "{0}:availability:sps".format(service), 1, 1 ) else: sp_list = redis.zrangebyscore( "{0}:availability:sps".format(service), 0, 1 ) md = slot_datetime.strftime('%m%d') slot = (slot_datetime.hour * 60 + slot_datetime.minute)/5 duration = constants.SLOT_DEFAULT_DURATION[service]/5 slot_end = slot + duration -1 for sp in sp_list: slots = redis.zrangebyscore( 'schedule:{0}:{1}'.format(sp, md), slot, slot_end ) if slots: if len(slots) == duration and not block: redis.zremrangebyscore( 'schedule:{0}:{1}'.format(sp, md), slot, slot_end ) return sp raise AssignmentException('slot not available')
def create_order(dbsession, redis, data, uid): order = Orders() service_provider_id = assign_slot_to_sp( redis, data['service'], data['scheduled'] ) data['service_provider_id'] = service_provider_id data['scheduled'] = parse_datetime(data['scheduled']) update_model_from_dict(order, data) order.service_user_id = uid order.status = 'assigned' dbsession.add(order) order_rating = OrderRating() order_rating.order_id = order.id dbsession.add(order_rating) dbsession.commit() tasks.post_order_creation.apply_async( (service_provider_id, data['scheduled'], order.id), queue=config.ORDER_QUEUE ) return order
def get(self): # this should perhaps use a message queue instead now_utc = datetime.datetime.utcnow() dry_run = niceboolean(self.get_argument("dry_run", False)) if self.get_argument("now_utc", None): now_utc = parse_datetime(self.get_argument("now_utc")) search = dict(_next_send_date={"$lte": now_utc}) for email_reminder in self.db.EmailReminder.find(search): self.write("TO: %s\n" % email_reminder.user.email) # the reason we're including the now date is so that we can # A) make a dry run on a specific date and B) override what "now" # is for unit tests. self._send_reminder(email_reminder, now_utc, dry_run=dry_run) if not dry_run: email_reminder.set_next_send_date(now_utc) email_reminder.save() email_reminder_log = self.db.EmailReminderLog() email_reminder_log.email_reminder = email_reminder email_reminder_log.save() self.write("Done\n")
def __init__(self, model, ids, view, domain=[], context={}, options=None): super(ICalendar, self).__init__() self.info_fields = [] self.fields = {} self.events = [] self.colors = {} self.color_values = [] self.calendar_fields = {} self.concurrency_info = None self.ids = ids self.model = model self.domain = domain or [] self.context = context or {} self.options = options self.date_format = format.get_datetime_format("date") self.use_search = (options or None) and options.use_search try: dt = parse_datetime(options.selected_day) self.selected_day = Day(dt.year, dt.month, dt.day) except: pass view_id = view.get("view_id", False) dom = xml.dom.minidom.parseString(view["arch"].encode("utf-8")) root = dom.childNodes[0] attrs = node_attributes(root) self.string = attrs.get("string", "") self.date_start = attrs.get("date_start") self.date_delay = attrs.get("date_delay") self.date_stop = attrs.get("date_stop") self.color_field = attrs.get("color") self.day_length = int(attrs.get("day_length", 8)) if options and options.mode: self.mode = options.mode else: self.mode = attrs.get("mode") or self.mode or "month" self.info_fields = self.parse(root, view["fields"]) fields = view["fields"] fields = fields.keys() + [self.date_start, self.date_stop, self.date_delay, self.color_field, "state"] fields = list(set([x for x in fields if x])) self.fields = cache.fields_get(model, fields, rpc.get_session().context) if self.color_field and options and options.colors: self.colors = options.colors if self.color_field and options and options.color_values: self.color_values = options.color_values self.calendar_fields["date_start"] = dict(name=self.date_start, kind=self.fields[self.date_start]["type"]) if self.date_delay: self.calendar_fields["date_delay"] = dict(name=self.date_delay, kind=self.fields[self.date_delay]["type"]) if self.date_stop: self.calendar_fields["date_stop"] = dict(name=self.date_stop, kind=self.fields[self.date_stop]["type"]) self.calendar_fields["day_length"] = self.day_length
def block_slot(redis, service, slot_datetime): """blocks given slot for service, return True/False""" slot_datetime = parse_datetime(slot_datetime) md = slot_datetime.strftime('%m%d') slot = (slot_datetime.hour * 60 + slot_datetime.minute)/5 count = redis.hmget("schedule:block", "{0}:{1}:{2}".format(service, md, slot))
def __init__(self, model, ids, view, domain=[], context={}, options=None): super(ICalendar, self).__init__() self.info_fields = [] self.fields = [] self.events = {} self.colors = {} self.color_values = [] self.calendar_fields = {} self.concurrency_info = None self.ids = ids self.model = model self.domain = domain or [] self.context = context or {} self.options = options self.date_format = format.get_datetime_format('date') self.use_search = (options or None) and options.use_search try: dt = parse_datetime(options.selected_day) self.selected_day = Day(dt.year, dt.month, dt.day) except: pass proxy = rpc.RPCProxy(model) view_id = view.get('view_id', False) dom = xml.dom.minidom.parseString(view['arch'].encode('utf-8')) root = dom.childNodes[0] attrs = tools.node_attributes(root) self.string = attrs.get('string', '') self.date_start = attrs.get('date_start') self.date_delay = attrs.get('date_delay') self.date_stop = attrs.get('date_stop') self.color_field = attrs.get('color') self.day_length = int(attrs.get('day_length', 8)) if options and options.mode: self.mode = options.mode else: self.mode = attrs.get('mode') or self.mode or 'month' self.info_fields = self.parse(root, view['fields']) fields = view['fields'] fields = fields.keys() + [self.date_start, self.date_stop, self.date_delay, self.color_field] fields = list(set([x for x in fields if x])) self.fields = proxy.fields_get(fields) if self.color_field and options and options.colors: self.colors = options.colors if self.color_field and options and options.color_values: self.color_values = options.color_values self.calendar_fields['date_start'] = dict(name=self.date_start, kind=self.fields[self.date_start]['type']) if self.date_delay: self.calendar_fields['date_delay'] = dict(name=self.date_delay, kind=self.fields[self.date_delay]['type']) if self.date_stop: self.calendar_fields['date_stop'] = dict(name=self.date_stop, kind=self.fields[self.date_stop]['type']) self.calendar_fields['day_length'] = self.day_length