def get_metric_filter(rate_datetime, window_size): rate_datetime_lower_string = datetimetostr(rate_datetime - window_size) rate_datetime_upper_string = datetimetostr(rate_datetime) return \ '$filter=Timestamp ge datetime\'%s\' and Timestamp lt datetime\'%s\'' % \ (rate_datetime_lower_string, rate_datetime_upper_string)
def set_dates(self, event_data): tz = get_localzone() start_dt_aware = tz.localize(event_data['start']['dateTime']) end_dt_aware = tz.localize(event_data['end']['dateTime']) event_data['start']['dateTime'] = rfc3339.datetimetostr(start_dt_aware) event_data['end']['dateTime'] = rfc3339.datetimetostr(end_dt_aware) return event_data
def set_dates(self, event_data): tz = get_localzone() start_dt_aware = tz.localize(event_data['start']['dateTime']) end_dt_aware = tz.localize(event_data['end']['dateTime']) event_data['start']['dateTime'] = rfc3339.datetimetostr(start_dt_aware) event_data['end']['dateTime'] = rfc3339.datetimetostr(end_dt_aware) return event_data
def set_dates(self, event_data): tz = get_localzone() if 'dateTime' in event_data['start']: start_dt_aware = tz.localize(event_data['start']['dateTime']) event_data['start']['dateTime'] = rfc3339.datetimetostr(start_dt_aware) elif 'date' in event_data['start']: event_data['start']['date'] = event_data['start']['date'].isoformat() if 'dateTime' in event_data['end']: end_dt_aware = tz.localize(event_data['end']['dateTime']) event_data['end']['dateTime'] = rfc3339.datetimetostr(end_dt_aware) elif 'date' in event_data['end']: event_data['end']['date'] = event_data['end']['date'].isoformat() return event_data
def mkevent(self, vals, calid, system): event = {'summary': '{} - {}'.format(system, vals['name']), 'description': system, 'start': { 'dateTime': rfc3339.datetimetostr(vals['start'])[:-1], 'timeZone': 'America/Los_Angeles'}, 'end': { 'dateTime': rfc3339.datetimetostr(vals['end'])[:-1], 'timeZone': 'America/Los_Angeles'} } # Add event self.service.events().insert(calendarId=calid, body=event).execute()
def get_event_list(self, connection=None, calendar_id=None, processor=None, last_retrieved=None, post_retrieval=None): page_token = None if last_retrieved: updated_min = rfc3339.datetimetostr(last_retrieved) else: updated_min = None while True: events = connection.get_service().events().list( calendarId=calendar_id, pageToken=page_token, updatedMin=updated_min).execute() if events['items']: for event in events['items']: processor(event) page_token = events.get('nextPageToken') if not page_token: post_retrieval() break
def get_event_list(self, connection=None, calendar_id=None, processor=None, last_retrieved=None, post_retrieval=None): page_token = None if last_retrieved: updated_min = rfc3339.datetimetostr(last_retrieved) else: updated_min = None _events = connection.get_service().events() while True: if updated_min: events = _events.list(calendarId=calendar_id, pageToken=page_token, updatedMin=updated_min).execute() else: events = _events.list(calendarId=calendar_id, showDeleted=False).execute() if events['items']: for event in events['items']: processor(event) page_token = events.get('nextPageToken') if not page_token: post_retrieval() break logger.info(u'Get %i events last modification time %s from calendar %s' % (len(events['items']), updated_min, str(calendar_id)))
def __init__(self, timezone=None, format='json'): self.tz = timezone if format == 'csv': self.serializers['datetime'] = dict( serialize=lambda x: datetimetostr(pytz.utc.localize(x).astimezone(self.tz)).split('+')[0], deserialize=lambda x: dateutil.parser.parse(x) )
def get_gcal_events(): """ Authenticates with google apis and uses the v3 calendar api to grab your events 24 hours before and after the current time. This is done ahead of time so Rodney wouldn't have to check every message. """ import httplib2 from apiclient.discovery import build from oauth2client.file import Storage from oauth2client.client import AccessTokenRefreshError from oauth2client.client import OAuth2WebServerFlow from oauth2client.tools import run FLOW = OAuth2WebServerFlow( client_id="FILL_THIS_IN", client_secret="FILL_THIS_IN", scope="https://www.googleapis.com/auth/calendar.readonly", user_agent="rodney-gcal/1.0", ) storage = Storage("gcal.dat") credentials = storage.get() if credentials is None or credentials.invalid == True: credentials = run(FLOW, storage) http = httplib2.Http(cache=".cache") http = credentials.authorize(http) service = build("calendar", "v3", http=http) yesterday = rfc.datetimetostr(rfc.now() - timedelta(1)) tomorrow = rfc.datetimetostr(rfc.now() + timedelta(1)) events = ( service.events() .list(calendarId="primary", timeMin=yesterday, timeMax=tomorrow, orderBy="startTime", singleEvents=True) .execute() ) try: print "Found {0} events between {1} and {2}".format(len(events["items"]), yesterday, tomorrow) except KeyError: print "no events" return [] return events["items"]
def pack_kafka_payload(svc, item, refs): payload = { 'item': item, 'hist': { 'svc': svc, 'dt': rfc3339.datetimetostr(rfc3339.now()), 'refs': refs or [] } } return json.dumps(payload)
def check_gcal(event_items): """ Given a bunch of events from gcal, It looks at the time now and finds if it intersects any current events If it does it says you're free at the end of the event, so it will not be accurate if you have 2 back-to-back events. input: 'event_items' : list = events grabbed by rodney at the beginning. output : string = the string that gets sent by rodney over fbchat """ now = int(time.time()) yesterday = rfc.datetimetostr(rfc.now() - timedelta(1)) tomorrow = rfc.datetimetostr(rfc.now() + timedelta(1)) busy = False times = [] for event in event_items: estartts = rfc.strtotimestamp(event["start"]["dateTime"]) eendts = rfc.strtotimestamp(event["end"]["dateTime"]) if estartts < now < eendts: busy = True times.append(estartts) times.append(eendts) print times print busy print now if not busy: return "{0} should be free right now (according to gcal)" else: msg = "{0} is busy right now. He will be free..." free = zip(times[::2], times[1::2]) freestr = [] for (s, e) in free: if s < now < e: estr = time.strftime("%I:%M%p", time.localtime(e)) freestr.append("after {0}.\n".format(estr)) if len(freestr) == 0: msg += "Never :( try sending a text" else: msg += " ".join(freestr) return msg
def get_busy_intervals(credentials, start, end, *, calendars=['primary']): ''' Gets time intervals in which the user is busy. Calendars are for future expansion, when the users may have more than 1 calendars. ''' calendar = build('calendar', 'v3', credentials=credentials) items = [{"id": name} for name in calendars] request = { "items": items, "timeMin": datetimetostr(start), "timeMax": datetimetostr(end) } response = calendar.freebusy().query(body=request).execute() busy_lists = [ response['calendars'][calendar_name]['busy'] for calendar_name in calendars ] busy_intervals = chain(*busy_lists) return [ to_time_interval(*interval.values()) for interval in busy_intervals ]
def get_event_list(self, connection=None, calendar_id=None, processor=None, last_retrieved=None, post_retrieval=None): page_token = None if last_retrieved: updated_min = rfc3339.datetimetostr(last_retrieved) else: updated_min = None while True: events = connection.get_service().events().list( calendarId=calendar_id, pageToken=page_token, updatedMin=updated_min).execute() if events['items']: for event in events['items']: processor(event) page_token = events.get('nextPageToken') if not page_token: post_retrieval() break
def get_now_dts(): # NOTE: rfc3339.now() doesn't give us fractional seconds. This is less precise, but using # fractional seconds complicates specifying datetime ranges when searching. # e.g. 23:59:59.001 is after 23:59:59, but before 00:00:00 return rfc3339.datetimetostr(rfc3339.now())
def date_to_utc_timestamp(date): d = set_tz(date) s = datetimetostr(d) t = strtotimestamp(s) return t
def iterate_folder(service, id=None, fpath = None): """Iterate a folder and check which files/folders are missing. For the local files it checks the modification time and update (local or remote) apropriatedly. """ if STOP_THREAD: sys.exit() global files global queue fpath = fpath or os.getcwd() if not id: #we are dealing with the root directory. try: about = service.about().get().execute() except: print "Can't get the root folder id" return id = about["rootFolderId"] dirs_and_files = os.listdir(fpath) get_files_in_directory(service, id, fpath) for dirfile in dirs_and_files: path = os.path.join(fpath, dirfile) if os.path.split(path)[-1].startswith(".") and \ options.skip_hidden_files: print "Ignoring %s because it starts with dot"%path continue gitem = get_item(dirfile, id) if not gitem: if STOP_THREAD: sys.exit() while queue.full(): time.sleep(0.2) if os.path.isfile(path): stat = os.stat(path) if stat.st_size > MAXSIZE: print "Ignoring %s because is bigger than %d bytes"%( path, MAXSIZE) continue args = (service, path, id) thr = threading.Thread(target = worker) thr.start() queue.put(args) # This item is not in Google Drive. else: if options.force_local_timestamp: print "Checking timestamp for %s"%path stat = os.stat(path) updatetime = stat.st_mtime itemtime = tf_from_timestamp(gitem["modifiedDate"]) if itemtime == updatetime: continue import pdb pdb.set_trace() dat = datetime.datetime.utcfromtimestamp(updatetime) + datetime.timedelta(microseconds=1) updatetime = stat.st_mtime + datetime.timedelta(days=0).total_seconds() gitem["modifiedDate"] = rfc3339.datetimetostr(dat) result = update_file(service, gitem, path, new_revision=False) if result: gitem = result if os.path.isdir(path): while True: #Wait until we have the gitem of the path. gitem = get_item(dirfile, id) if gitem: break time.sleep(0.1) # Get the id of this directory iterate_folder(service, gitem["id"], path) continue
class JsonSerializer(object): """A serializer that provides methods to serialize and deserialize JSON dictionaries. Note, one of the assumptions this serializer makes is that all objects that it is used to deserialize have a constructor that can take all of the attribute arguments. I.e. If you have an object with 3 attributes, the constructor needs to take those three attributes as keyword arguments. """ __attributes__ = None """The attributes to be serialized by the seralizer. The implementor needs to provide these.""" __required__ = None """The attributes that are required when deserializing. The implementor needs to provide these.""" __attribute_serializer__ = None """The serializer to use for a specified attribute. If an attribute is not included here, no special serializer will be user. The implementor needs to provide these.""" __object_class__ = None """The class that the deserializer should generate. The implementor needs to provide these.""" serializers = dict( id=dict( serialize=lambda x: uuid.UUID(bytes=x).hex, deserialiez=lambda x: uuid.UUID(hex=x).bytes ), datetime=dict( serialize=lambda x: datetimetostr(x), deserialize=lambda x: dateutil.parser.parse(x) ), date=dict( serialize=lambda x: x.isoformat(), deserialize=lambda x: dateutil.parser.parse(x) ) ) def __init__(self, timezone=None, format='json'): self.tz = timezone if format == 'csv': self.serializers['datetime'] = dict( serialize=lambda x: datetimetostr(pytz.utc.localize(x).astimezone(self.tz)).split('+')[0], deserialize=lambda x: dateutil.parser.parse(x) ) def deserialize(self, json, **kwargs): """Deserialize a JSON dictionary and return a populated object. This takes the JSON data, and deserializes it appropriately and then calls the constructor of the object to be created with all of the attributes. Args: json: The JSON dict with all of the data **kwargs: Optional values that can be used as defaults if they are not present in the JSON data Returns: The deserialized object. Raises: ValueError: If any of the required attributes are not present """ d = dict() for attr in self.__attributes__: if attr in json: val = json[attr] elif attr in self.__required__: try: val = kwargs[attr] except KeyError: raise ValueError("{} must be set".format(attr)) serializer = self.__attribute_serializer__.get(attr) if serializer: d[attr] = self.serializers[serializer]['deserialize'](val) else: d[attr] = val return self.__object_class__(**d) def serialize(self, obj): """Serialize an object to a dictionary. Take all of the attributes defined in self.__attributes__ and create a dictionary containing those values. Args: obj: The object to serialize Returns: A dictionary containing all of the serialized data from the object. """ d = dict() for attr in self.__attributes__: val = getattr(obj, attr) if val is None: continue serializer = self.__attribute_serializer__.get(attr) if serializer: d[attr] = self.serializers[serializer]['serialize'](val) else: d[attr] = val return d