def getChannelVideos(channelId,dateToStartFrom,dateToGoBackTo,timeInterval): if(dateToStartFrom < dateToGoBackTo): raise Exception('The date to start from cannot be before the date to go back to!') retVal = [] # initialization startFrom = dateToStartFrom goBackTo = startFrom - timeInterval done = False while not done: if(goBackTo < dateToGoBackTo): goBackTo = dateToGoBackTo if(goBackTo == dateToGoBackTo): done = True goBackTo_rfc3339 = rfc3339(goBackTo,utc=True) startFrom_rfc3339 = rfc3339(startFrom,utc=True) videosPublishedInInterval = getChannelVideosPublishedInInterval(channelId,startFrom_rfc3339,goBackTo_rfc3339) retVal.extend(videosPublishedInInterval) if(not done): # we simply continue from where we are startFrom = goBackTo # calculate the next date to go back to based on the given interval nextDate = goBackTo - timeInterval goBackTo = nextDate return retVal
def _add_task(self,tasklist_id,title,notes=None, iscompleted=False, due=None, data_completed=None,deleted=False): params = {'tasklist':tasklist_id} url = 'https://www.googleapis.com/tasks/v1/lists/%s/tasks'%(tasklist_id) data = { 'kind': 'tasks#task', 'title':title, 'deleted':deleted } if notes is not None: data['notes'] = notes if iscompleted: data['status'] = 'completed' if data_completed is not None: data['completed'] = rfc3339.rfc3339(data_completed) else: data['completed'] = rfc3339.rfc3339(datetime.datetime.now()) else: data['status'] = 'needsAction' data['completed'] = None if due is not None: data['due'] = rfc3339.rfc3339(due) body = json.dumps(data).encode('utf-8') addheaders={'Content-type':'application/json'} response = self.__do_request('POST',url,addheaders=addheaders,params=params,data = body) if response and response.text: try: atask = Task(json.loads(response.text)) atask['tasklist_id'] = tasklist_id return atask except Exception as e: print(e) return None
def upload_file(self, google_folder_id, path, file_id=None): print("Uploading file %s" % path) (dt_taken, dt_modified) = self.get_taken_and_modified_time(path) file_name = os.path.basename(path) file_metadata = { 'name': file_name, 'parents': [google_folder_id], 'createdTime': rfc3339.rfc3339(dt_taken), 'modifiedTime': rfc3339.rfc3339(dt_modified) } if file_name.lower().endswith(".jpg"): mimetype = 'image/jpeg' else: mimetype = None media = MediaFileUpload(path, mimetype=mimetype, resumable=True) if (file_id is not None): self.service.files().delete(fileId=file_id).execute() file_node = self.service.files().create(body=file_metadata, media_body=media, fields='id, name, md5Checksum').execute() return file_node
def page2element(page, baseurl): root = ET.Element("entry") if page.author: author = ET.SubElement(root, "author") name = ET.SubElement(author,"name") name.text = page.author title = ET.SubElement(root, "title") title.text = page.title ET.SubElement(root, "link", attrib={"rel":"alternate", "type":"text/html", "href":page.url}) id = ET.SubElement(root, "id") #TODO generate id id.text = page.url summary = ET.SubElement(root, "summary") if page.description: summary.text = page.description else: logging.warn("Missing summary for element %s using title instead" %page.url) summary.text = page.title date = ET.SubElement(root, "updated") if page.date: #TODO check if right format date.text = rfc3339(page.date) else: logging.warn("Missing date for element %s using now() instead" %page.url) date.text = rfc3339(datetime.datetime.now()) if page.keywords: for keyword in page.keywords: ET.SubElement(root, "category", attrib={"scheme":baseurl, "term":keyword}) return root
def printEvents(self, calID='primary', orderBy='startTime', singleEvents=True, timeMin=datetime.datetime(2010,11,10), timeMax=datetime.datetime(2010,11,28)): page_token = None while True: events = self.service.events().list( calendarId=calID, orderBy=orderBy, singleEvents=str(singleEvents), timeMin=rfc3339(timeMin), timeMax=rfc3339(timeMax), pageToken=page_token).execute() for event in events['items']: if ('dateTime' in event['start']): print event['summary'], ": From %s to %s" % ((parser.parse(event['start']['dateTime'])).strftime("%A, %d. %B %Y %I:%M%p"), (parser.parse(event['end']['dateTime'])).strftime("%A, %d. %B %Y %I:%M%p")) elif ('date' in event['start']): print event['summary'], ": On %s" % parser.parse(event['start']['date']).strftime("%A, %d. %B %Y") else: print event['summary'], "is a weird event..." page_token = events.get('nextPageToken') if not page_token: break
def _get_raw_indoor_temperatures(building, zone, pymortar_client, start, end, window_size, aggregation): """ :param building: :param zone: :param pymortar_client: :param start: datetime, timezone aware, rfc3339 :param end: datetime, timezone aware, rfc3339 :param window_size: :return: """ temperature_query = """SELECT ?tstat ?temp WHERE { ?tstat rdf:type brick:Thermostat . ?tstat bf:controls/bf:feeds <http://xbos.io/ontologies/%s#%s> . ?tstat bf:hasPoint ?temp . ?temp rdf:type brick:Temperature_Sensor . };""" % (building, zone) #resp = pymortar_client.qualify([temperature_query]) Need to get list of all sites temperature_view = pymortar.View( name="temperature_view", sites=[building], definition=temperature_query, ) temperature_stream = pymortar.DataFrame( name="temperature", aggregation=aggregation, window=window_size, timeseries=[ pymortar.Timeseries( view="temperature_view", dataVars=["?temp"], ) ] ) request = pymortar.FetchRequest( sites=[building], views=[ temperature_view ], dataFrames=[ temperature_stream ], time=pymortar.TimeParams( start=rfc3339(start), end=rfc3339(end), ) ) temperature_data = pymortar_client.fetch(request)["temperature"] if temperature_data is None: return None, "did not fetch data from pymortar with query: %s" % temperature_query return temperature_data, None
def price(year, month, day, entityId): start_date = datetime.datetime(year, month, day, 0, 0, 0) end_date = datetime.datetime(year, month, day, 23, 59, 59) result = list( influx.query( "SELECT symbol, volume, title, high, low, open, close FROM stock_detail WHERE time > '%s' AND time < '%s' AND entityId = '%d'" % (rfc3339.rfc3339(start_date), rfc3339.rfc3339(end_date), entityId)).get_points()) return result
def open_list(year, month, day): start_date = datetime.datetime(year, month, day, 0, 0, 0) end_date = datetime.datetime(year, month, day, 23, 59, 59) result = list( influx.query( "SELECT id FROM stock_detail WHERE time > '%s' AND time < '%s'" % (rfc3339.rfc3339(start_date), rfc3339.rfc3339(end_date))).get_points()) return result
def load(client, src): filt = [ Filter('type', '=', 'attack-pattern'), ] results = src.query(filt) # print(len(results)) # 81 # print(results[0]) data = [] for attackItem in results: tmpResult = { "name": "", "created_date": rfc3339.rfc3339(attackItem["created"]), "last_modified_date": rfc3339.rfc3339(attackItem["modified"]), "name_full": attackItem["name"], "reference": [], "kill_chain_phase": [], "dgraph.type": "AttackTechnique" } if "description" in attackItem.keys(): tmpResult["description"] = [attackItem["description"]] if "x_mitre_platforms" in attackItem.keys(): tmpResult["platform"] = attackItem["x_mitre_platforms"] if "kill_chain_phases" in attackItem.keys(): for kill_chain_phase in attackItem["kill_chain_phases"]: tmpResult["kill_chain_phase"].append(kill_chain_phase["phase_name"]) for external_reference in attackItem["external_references"]: if external_reference["source_name"] == "mitre-ics-attack": tmpResult["name"] = external_reference["external_id"] tmpResult["url"] = external_reference["url"] else: # no link with capec tmpExternalReference = { "description": external_reference["description"], "refsource": external_reference["source_name"], "dgraph.type": "Reference" } if "url" in external_reference.keys(): tmpExternalReference["url"] = external_reference["url"] tmpResult["reference"].append(tmpExternalReference) data.append(tmpResult) # print(data) txn = client.txn() try: mu = pydgraph.Mutation(set_json=json.dumps(data[:]).encode('utf8')) txn.mutate(mu) txn.commit() except pydgraph.AbortedError: print("error") finally: txn.discard() print("ics att&ck technique data without relations loaded")
def process_dates(): global calendar_events_keep_alive global event_id_list start_connection() start_date_list = list( rrule(DAILY, byweekday=tuple(check_button_list), dtstart=parse(start_date_selector.get() + " " + start_time_input.get()), until=parse(end_date_selector.get() + " " + start_time_input.get()))) stop_date_list = list( rrule( DAILY, byweekday=tuple(check_button_list), dtstart=parse(start_date_selector.get() + " " + start_time_input.get()) + datetime.timedelta(hours=float(end_time_entry.get())), until=parse(end_date_selector.get() + " " + start_time_input.get()) + datetime.timedelta(hours=float(end_time_entry.get())))) date_list = zip(start_date_list, stop_date_list) hours = 0.0 days = 0 event_id_list = [] try: for start, stop in date_list: if not calendar_events_keep_alive: break else: event_id_list.append( insert_event_into_cal(rfc3339.rfc3339(start), rfc3339.rfc3339(stop))) days += 1 hours += float(end_time_entry.get()) except googleapiclient.errors.HttpError: root_window.withdraw() tkinter.messagebox.showerror( "Error Adding Event", "Error when attempting to create events,\n does the calendar still exist?" ) raise SystemExit message_text = "Calendar event(s) created for \"{0}\" event," \ " for {1} hours, over the course of {2} days." \ " The event days are between {3} and {4}".\ format(event_input.get(), str(hours), str(days), str(start_date_selector.get()), str(end_date_selector.get())) notify_message = create_message( 'me', notification_input.get(), '{} Calendar Event Created'.format(event_input.get()), message_text) send_message(email_service, 'me', notify_message) print(message_text)
def __getitem__(self, item): if item == 0: # da izbacimo : return removo_colon_from_rfc3339_time_format( rfc3339.rfc3339(self.start_time)) elif item == 1: return removo_colon_from_rfc3339_time_format( rfc3339.rfc3339(self.end_time)) else: return None
def insert_ajx(nombre, email, tlf, inicio, fin): rest = "POST https://www.googleapis.com/calendar/v3/calendars/%s/events" % settings.google_calid from rfc3339 import rfc3339 inicio = rfc3339(inicio) fin = rfc3339(fin) event = { 'summary': nombre, 'location': 'Caracas', 'start': { 'dateTime': inicio, 'timeZone': "America/Caracas", }, 'end': { 'dateTime': fin, 'timeZone': "America/Caracas" }, 'attendees': [ { 'email': email, # Other attendee's data... }, ], } from oauth2client import gce import os import httplib2 from apiclient.discovery import build from oauth2client.client import flow_from_clientsecrets from oauth2client.file import Storage from oauth2client.tools import run jsonfile = os.path.join(request.folder, 'static', 'client_secrets.json') storage = Storage('credentials.dat') credentials = storage.get() redirecturl = 'urn:ietf:wg:oauth:2.0:oob' flow = flow_from_clientsecrets( jsonfile, scope='https://www.googleapis.com/auth/calendar', redirect_uri=redirecturl) if credentials is None or credentials.invalid: credentials = run(flow, storage) http = httplib2.Http() http = credentials.authorize(http) service = build('calendar', 'v3', http=http) created_event = service.events().insert(calendarId=settings.google_calid, body=event).execute() #response.flash = created_event['id'] script = '' return dict(script=script)
def get_indoor_temp_data(building, zone, window): start = int( time.mktime( datetime.datetime.strptime("30/09/2018 0:00:00", "%d/%m/%Y %H:%M:%S").timetuple()) * 1e9) end = int( time.mktime( datetime.datetime.strptime("1/10/2018 0:00:00", "%d/%m/%Y %H:%M:%S").timetuple()) * 1e9) start = datetime.datetime.utcfromtimestamp(float( start / 1e9)).replace(tzinfo=pytz.utc) end = datetime.datetime.utcfromtimestamp(float( end / 1e9)).replace(tzinfo=pytz.utc) temperature_query = """SELECT ?tstat ?temp WHERE { ?tstat rdf:type brick:Thermostat . ?tstat bf:controls/bf:feeds <http://xbos.io/ontologies/%s#%s> . ?tstat bf:hasPoint ?temp . ?temp rdf:type brick:Temperature_Sensor . };""" % (building, zone) #resp = pymortar_client.qualify([temperature_query]) Need to get list of all sites temperature_view = pymortar.View( name="temperature_view", sites=[building], definition=temperature_query, ) temperature_stream = pymortar.DataFrame(name="temperature", aggregation=pymortar.MEAN, window=window, timeseries=[ pymortar.Timeseries( view="temperature_view", dataVars=["?temp"], ) ]) request = pymortar.FetchRequest(sites=[building], views=[temperature_view], dataFrames=[temperature_stream], time=pymortar.TimeParams( start=rfc3339(start), end=rfc3339(end), )) temperature_data = pymortar_client.fetch(request) print(temperature_data["temperature"]) return temperature_data
def add_event(summary, date, start_time, end_time, email_list, description=None, location=None): matches = list(datefinder.find_dates(start_time)) if len(matches): start = matches[0] matches = list(datefinder.find_dates(end_time)) if len(matches): end = matches[0] start = rfc3339(start) end = rfc3339(end) event = { 'summary': summary, 'location': location, 'description': description, 'start': { 'dateTime': start, #_time.strftime("%Y-%m-%dT%H:%M:%S"), 'timeZone': timezone, }, 'end': { 'dateTime': end, #_time.strftime("%Y-%m-%dT%H:%M:%S"), 'timeZone': timezone, }, 'attendees': email_list, 'reminders': { 'useDefault': False, 'overrides': [ { 'method': 'email', 'minutes': 24 * 60 }, { 'method': 'popup', 'minutes': 10 }, ], }, } event = service.events().insert(calendarId='primary', body=event, sendNotifications=True).execute() return ('Event created: %s' % (event.get('summary')) + ' for %s' % (event.get('description')))
def insert_ajx(nombre,email,tlf,inicio,fin): rest="POST https://www.googleapis.com/calendar/v3/calendars/%s/events" % settings.google_calid from rfc3339 import rfc3339 inicio=rfc3339(inicio) fin=rfc3339(fin) event = { 'summary': nombre, 'location': 'Caracas', 'start': { 'dateTime': inicio, 'timeZone':"America/Caracas", }, 'end': { 'dateTime': fin, 'timeZone':"America/Caracas" }, 'attendees': [ { 'email': email, # Other attendee's data... }, ], } from oauth2client import gce import os import httplib2 from apiclient.discovery import build from oauth2client.client import flow_from_clientsecrets from oauth2client.file import Storage from oauth2client.tools import run jsonfile=os.path.join(request.folder,'static','client_secrets.json') storage = Storage('credentials.dat') credentials = storage.get() redirecturl='urn:ietf:wg:oauth:2.0:oob' flow = flow_from_clientsecrets(jsonfile, scope='https://www.googleapis.com/auth/calendar', redirect_uri=redirecturl) if credentials is None or credentials.invalid: credentials = run(flow, storage) http = httplib2.Http() http = credentials.authorize(http) service = build('calendar', 'v3',http=http) created_event = service.events().insert(calendarId=settings.google_calid, body=event).execute() #response.flash = created_event['id'] script='' return dict(script=script)
def bid(date, currency): date2 = date + datetime.timedelta(minutes=1) rc39 = rfc3339(date, utc=True, use_system_timezone=True) rc2 = rfc3339(date2, utc=True, use_system_timezone=True) r = instruments.InstrumentsCandles(instrument=currency, params={ "granularity": "M1", "price": "B", "from": rc39, "to": rc2 }) rs = api.request(r) response = rs.get("candles") return response
def getChannelVideos(channelId, dateToStartFrom, dateToGoBackTo, timeInterval): log.info('Searching for videos published in channel between %s and %s', dateToStartFrom, dateToGoBackTo) if (dateToStartFrom < dateToGoBackTo): raise Exception( 'The date to start from cannot be before the date to go back to!') retVal = [] # initialization startFrom = dateToStartFrom goBackTo = startFrom - timeInterval done = False while not done: if (goBackTo < dateToGoBackTo): log.debug( 'The interval is now larger than the remaining time span to retrieve videos for. Using the date to go back to as next boundary' ) goBackTo = dateToGoBackTo if (goBackTo == dateToGoBackTo): log.debug('Last round-trip') done = True log.debug('Converting timestamps to RFC3339 format') goBackTo_rfc3339 = rfc3339(goBackTo, utc=True) startFrom_rfc3339 = rfc3339(startFrom, utc=True) videosPublishedInInterval = getChannelVideosPublishedInInterval( channelId, startFrom_rfc3339, goBackTo_rfc3339) log.debug('Adding videos found in the interval to the results list') retVal.extend(videosPublishedInInterval) log.debug('Total video(s) found so far: %d', len(retVal)) if (not done): # we simply continue from where we are startFrom = goBackTo # calculate the next date to go back to based on the given interval nextDate = goBackTo - timeInterval log.debug( 'Calculating the next date to go back to based on the interval: %s - %s => %s', goBackTo, timeInterval, nextDate) goBackTo = nextDate log.info('Found %d video(s) in total', len(retVal)) return retVal
def eventBeginandEnd(sentence): times, dates, _ = ner.find_cd(TextBlob(sentence)) beginTime = parse(times[0], fuzzy_with_tokens=True)[0] #Assume only 2 time frames endTime = parse(times[1], fuzzy_with_tokens=True)[0] dayOf = parse(dates[0], fuzzy_with_tokens=True)[0].day correctedBeginTime = datetime.datetime(2017, 9, dayOf, beginTime.hour, beginTime.minute) correctedEndTime = datetime.datetime(2017, 9, dayOf, endTime.hour, endTime.minute) rfcBegin = rfc3339.rfc3339(correctedBeginTime) rfcEnd = rfc3339.rfc3339(correctedEndTime) return rfcBegin, rfcEnd
def getCalendarData(self, cur_time): start_of_day = datetime.date.today() end_of_day = datetime.date.today() + datetime.timedelta(days=1) # Today's events: events = [] start_date = rfc3339(cur_time) end_date = rfc3339(end_of_day + datetime.timedelta(days=1)) try: query = gdata.calendar.client.CalendarEventQuery(start_min=start_date, start_max=end_date, ctz="America/Vancouver", orderby="starttime", sortorder="ascending", singleevents="true") feed = self.cal_client.GetCalendarEventFeed(q=query) for an_event in feed.entry: start = dateutil.parser.parse(an_event.when[0].start) end = dateutil.parser.parse(an_event.when[0].end) events.append((an_event.title.text, start, end)) except: pass # Next Holidays holidays = [] hol_uri = "https://www.google.com/calendar/feeds/en.canadian%23holiday%40group.v.calendar.google.com/private/full" start_date = rfc3339(start_of_day) try: query = gdata.calendar.client.CalendarEventQuery( start_min=start_date, ctz="America/Vancouver", orderby="starttime", singleevents="true", sortorder="ascending", max_results=4) feed = self.cal_client.GetCalendarEventFeed(q=query, uri=hol_uri) for an_event in feed.entry: start = dateutil.parser.parse(an_event.when[0].start) holidays.append((an_event.title.text, start.date())) except: pass return (events,holidays)
def atom(self): atom = ATOM_TEMPLATE.format(title=self.title, url=''.join(['http://', settings.HOSTNAME, self.link]), id=self.slug, summary=self.render_summary(), updated=rfc3339(self.updated, utc=True)) return atom
def template_render(template, _posts, metadata=None): posts = [{ 'key': str(post.key()), 'title': unicode(post.title.encode("utf-8"), "utf-8"), 'content': unicode( Format.by_name(post.format)(post.content.encode("utf-8")), "utf-8"), 'date': post.date, 'updated': rfc3339.rfc3339(post.date), 'url': path.join(config.base_url, 'posts', str(post.key())) } for post in _posts] variables = { 'base_url': config.base_url, 'blog_name': config.blog_name, 'author': config.author, 'email': config.email, 'license': config.license, 'plog_version': config.plog_version, 'posts': posts, 'updated': posts[0]['updated'] if len(posts) else 0 } if metadata: variables['page'] = metadata["page"] + 1 variables['pages'] = metadata["pages"] variables[ 'prev_page'] = metadata["page"] - 1 if metadata["page"] > 0 else 0 variables['next_page'] = metadata["page"] + 1 variables['title'] = posts[0]['title'] if len(posts) == 1 else "" return pystache.render(template, variables)
def marshalled(self, context): userid = self.userid timestamp = rfc3339(datetime.datetime.now()) secret = self.secret signature = sign(secret, timestamp+userid) auth = self.authfragment % locals() envelope = context.envelope #Set the right ns prefixes envelope.nsprefixes[ 'ns1' ] = envelope.nsprefixes[ 'ns0' ] envelope.clearPrefix( 'ns0' ) #Add our auth to the header element header = envelope.getChild('Header') authns = Element( 'ns1:AuthenticationHeader' ) authns.append( Element( 'mktowsUserId' ).setText(self.userid) ) authns.append( Element( 'requestSignature' ).setText(signature) ) authns.append( Element( 'requestTimestamp' ).setText(timestamp) ) header.append( authns ) #Set the proper body prefixes body = envelope.getChild( 'Body' ) body.prefix = 'SOAP-ENV' body.children[0].prefix = 'ns1' if self.debug: with open("/tmp/envelope.txt","w") as f: f.write(envelope.str())
def decodeBSONEntity(di, key): """ Inspired by pymongo bson.json_util.default, but specially processing some value types: ObjectId --> hexvalue datetime --> rfc3339 Also, while json_util.default creates a new dict in the form {$name: decodedvalue} we assign the decoded value, 'flattening' the value directly in the field. Fallback to other values using json_util.default, and flattening only those decoded entities that has only one key. """ value = di[key] if isinstance(value, ObjectId): di[key] = str(value) return if isinstance(value, datetime): di[key] = rfc3339(value, utc=True, use_system_timezone=False) return try: decoded = json_util.default(di[key]) if len(decoded.keys()) == 1: di[key] = decoded[decoded.keys()[0]] else: di[key] = decoded except: pass
def CreateSourceElement(ee, feed): """Create an atom:source element in the provided entry element, based on the provided feed metadata. """ if not feed: return root = SubElement(ee, "source") TextElement(root, "title", feed.get("title_detail")) if feed.has_key("links"): for link in feed.links: LinkElement(root, "link", link) TextElement(root, "subtitle", feed.get("subtitle_detail")) TextElement(root, "rights", feed.get("rights_detail")) SubElement(root, "generator").text = "feedarchive" SubElement(root, "updated").text = rfc3339(time.time()) SubElementIf(root, "id", feed.get("id")) if feed.has_key("image"): SubElement(root, "icon").text = feed.image.href if feed.has_key("tags"): for tag in feed.tags: te = SubElement(root, "category") if tag.get("term"): te.attrib["term"] = tag.term if tag.get("scheme"): te.attrib["scheme"] = tag.scheme if tag.get("label"): te.attrib["label"] = tag.label PersonElement(root, "author", feed.get("author_detail"))
def put(self, date_time: datetime, value: dict) -> None: ''' Import a single data point. Writes asynchronously, unless the write queue is full. :param date_time: Datetime of the data in UTC :param value: value to import :return: None ''' data = { "import_id": self.__import_id, "time": rfc3339(date_time, utc=True, use_system_timezone=False), "value": value } queued = False while not queued: try: self.__producer.produce(self.__kafka_topic, key=self.__import_id, value=json.dumps(data, ensure_ascii=False)) queued = True except (KafkaException, BufferError) as e: self.__logger.warning( "Could not queue kafka message, flushing and retrying in 1s. Error: " + str(e)) self.__producer.flush() time.sleep(1)
def log_request(response): if request.path == '/favicon.ico': return response elif request.path.startswith('/static'): return response now = time.time() duration = round(now - g.start, 2) dt = datetime.datetime.fromtimestamp(now) timestamp = rfc3339(dt, utc=True) args = dict(request.args) log_params = [('method', request.method, 'yellow'), ('path', request.path, 'yellow'), ('status', response.status_code, 'cyan'), ('duration', duration, 'green'), ('time', timestamp, 'magenta'), ('params', args, 'yellow')] request_id = request.headers.get('X-Request-ID') if request_id: log_params.append(('request_id', request_id, 'yellow')) parts = [] for name, value, color in log_params: part = colors.color("{}={}".format(name, value), fg=color) parts.append(part) line = " ".join(parts) app.logger.info(line) return response
def template_render(template, _posts, metadata=None): posts = [{ 'key' : str(post.key()), 'title' : unicode(post.title.encode("utf-8"), "utf-8"), 'content' : unicode(Format.by_name(post.format)(post.content.encode("utf-8")), "utf-8"), 'date' : post.date, 'updated' : rfc3339.rfc3339(post.date), 'url' : path.join(config.base_url, 'posts', str(post.key())) } for post in _posts] variables = { 'base_url' : config.base_url, 'blog_name' : config.blog_name, 'author' : config.author, 'email' : config.email, 'license' : config.license, 'plog_version': config.plog_version, 'posts' : posts, 'updated' : posts[0]['updated'] if len(posts) else 0 } if metadata: variables['page'] = metadata["page"] + 1 variables['pages'] = metadata["pages"] variables['prev_page'] = metadata["page"] - 1 if metadata["page"] > 0 else 0 variables['next_page'] = metadata["page"] + 1 variables['title'] = posts[0]['title'] if len(posts) == 1 else "" return pystache.render(template, variables)
def default(self, obj): if isinstance(obj, (datetime.date, datetime.datetime)): if __has_rfc3339__: return rfc3339(obj, utc=True, use_system_timezone=False) return obj.isoformat() else: return builtin_json.JSONEncoder.default(self, obj)
def set_completed(self,iscompleted = True): if iscompleted: self['status'] = 'completed' self['completed'] = rfc3339.rfc3339(datetime.datetime.now()) else: self['status'] = 'needsAction' self['completed'] = None
def search_vidid(startdate,enddate,query): publishedBefore = enddate publishedAfter = startdate publishedBefore = rfc3339(publishedBefore) publishedAfter = rfc3339(publishedAfter) query = query + " english news" req=youtube.search().list(q=query,part='snippet',type='video',publishedAfter = publishedAfter,publishedBefore = publishedBefore,maxResults=10) res = req.execute() videoid_list=[] for item in res['items']: videoid_list.append(item['id']['videoId']) return videoid_list
def convert( self, value: ty.Any, path: Path, *args: ty.Any, entity: ty.Optional[ConvertibleEntity] = None, **context: ty.Any ) -> ty.Optional[ty.Union[str, datetime.datetime]]: if isinstance( value, datetime.datetime ) and entity == ConvertibleEntity.RESPONSE and value is not None: value = rfc3339.rfc3339(value) result = self.subtype.convert(value, path, *args, entity=entity, **context) if entity == ConvertibleEntity.RESPONSE: return result if result is None: return None try: return datetime.datetime.fromtimestamp(rfc3339_to_timestamp(value)) except InvalidRFC3339Error: raise SchemaError(Error(path, self.messages['format']))
def header(user_id, encryption_key): timestamp = rfc3339(datetime.datetime.now()) signature = sign(timestamp + user_id, encryption_key) return ('<env:Header><ns1:AuthenticationHeader>' + '<mktowsUserId>' + user_id + '</mktowsUserId>' + '<requestSignature>' + signature + '</requestSignature>' + '<requestTimestamp>' + timestamp + '</requestTimestamp>' + '</ns1:AuthenticationHeader></env:Header>')
def add_event(event_times, description): global service start_datetime, end_datetime = event_times result = service.events().insert(calendarId="primary", body={ "summary": description, "start": { "timeZone": "Asia/Jerusalem", "dateTime": rfc3339(start_datetime) }, "end": { "timeZone": "Asia/Jerusalem", "dateTime": rfc3339(end_datetime) } }).execute()
def log_request(response): """Function that fire after any module view""" now = time.time() duration = round(now - g.start, 2) dt = datetime.datetime.fromtimestamp(now) timestamp = rfc3339(dt, utc=True) ip = request.headers.get("X-Forwarded-For", request.remote_addr) host = request.host.split(":", 1)[0] args = request.json log_params = { "method": request.method, "path": request.path, "status": response.status_code, "duration": duration, "time": timestamp, "ip": ip, "host": host, "params": args, "response": response.json if request.is_json else None } # Log request params mongo_logger.info(log_params) return response
def _DateRangeQuery(self): print "Tim's Google Calendar Events for the next 7 days." print '' start_date = str(rfc3339.rfc3339(datetime.datetime.now())) end_date = str(datetime.date.today() + (datetime.timedelta(7))) query = gdata.calendar.service.CalendarEventQuery('default') query.start_min = start_date query.start_max = end_date query.orderby = 'starttime' query.sortorder = 'ascending' feed = self.cal_client.CalendarQuery(query) for i, an_event in zip(xrange(len(feed.entry)), feed.entry): print '-- %s' % ( an_event.title.text,) try: for a_when in an_event.when: print '\tDate:\t\t%s' % (time.strftime('%A, %B %d',(time.strptime(a_when.start_time[:-10],"%Y-%m-%dT%H:%M:%S")))) print '\tTime:\t\t%s - %s' % (time.strftime('%I:%M %p',(time.strptime(a_when.start_time[:-10],"%Y-%m-%dT%H:%M:%S"))), time.strftime('%I:%M %p',(time.strptime(a_when.end_time[:-10],"%Y-%m-%dT%H:%M:%S")))) for a_where in an_event.where: if a_where.value_string != None: print '\tLocation:\t%s' % (a_where.value_string,) print '' else: print '' except: for a_when in an_event.when: print '\tDate:\t\t%s' % (time.strftime('%A, %B %d',(time.strptime(a_when.start_time,"%Y-%m-%d")))) for a_where in an_event.where: print '\tTime:\t\tAll day' if a_where.value_string != None: print '\tLocation:\t%s' % (a_where.value_string,) print '' else: print ''
def gen_atom(self): #pages, title, baseurl, feedurl, name, email=None, summary=None): feed = ET.Element("feed", nsmap={None: 'http://www.w3.org/2005/Atom'}) #TODO set namespace f_title = ET.SubElement(feed, "title") f_title.text = self.title f_id = ET.SubElement(feed, "id") #TODO generate id f_id.text = self.feedurl f_updated = ET.SubElement(feed, "updated") f_updated.text = rfc3339(datetime.datetime.now()) ET.SubElement(feed, "link", attrib={"href":self.feedurl, "rel":"self"}) ET.SubElement(feed, "link", attrib={"href":self.baseurl}) f_author = ET.SubElement(feed, "author") f_name = ET.SubElement(f_author, "name") f_name.text = self.name if self.email: f_email = ET.SubElement(f_author, "email") f_email.text = self.email if self.summary: f_summary = ET.SubElement(feed, "summary") f_summary.text = self.summary for page in self.pages: elem = page2element(page, self.baseurl) feed.append(elem) return feed
def after_requests(response): if app.config['ENABLE_LOGGING']: if request.path == '/favicon.ico' or request.path.startswith( '/static'): return response now = time_now() duration = round(now - g.start, 2) dt = datetime.datetime.fromtimestamp(now) timestamp = rfc3339(dt, utc=True) ip = request.headers.get('X-Forwarded-For', request.remote_addr) host = request.host.split(':', 1)[0] args = dict(request.args) log_params = [ ('method', request.method), ('path', request.path), ('status', response.status_code), ('duration', duration), ('time', timestamp), ('ip', ip), ('host', host), ('params', args), ] parts = ["{}={}".format(name, value) for name, value in log_params] line = " | ".join(parts) app.logger.info(line) return response
def log_request(response): """ Colorful logging taken from https://dev.to/rhymes/logging-flask-requests-with-colors-and-structure--7g1 """ now = time.time() duration = round(now - g.start, 2) now_time = dt.datetime.fromtimestamp(now) timestamp = rfc3339(now_time) ip_address = request.headers.get("X-Forwarded-For", request.remote_addr) host = request.host.split(":", 1)[0] args = dict(request.args) log_params = [ ("method", request.method, "blue"), ("path", request.path, "blue"), ("status", response.status_code, "yellow"), ("duration", duration, "green"), ("time", timestamp, "magenta"), ("ip_address", ip_address, "red"), ("host", host, "red"), ("params", args, "blue"), ] parts = [] for name, value, color in log_params: part = f"{fg(color)}{name}={value}{attr('reset')}" parts.append(part) line = " ".join(parts) app.logger.info(line) return response
def parse_file(self, directory): mls = [] for f in self.parser.find_all_files(directory): if os.path.isdir(f): continue try: self.logger.info(f + " will be parsed.") mls.extend(self.parser.parse_summary(f, self.metrics)) except Exception as e: self.logger.warning("Unexpected error: " + str(e)) continue # Metrics logs must contain at least one objective metric value # Objective metric is located at first index is_objective_metric_reported = False for ml in mls: if ml.metric.name == self.metrics[0]: is_objective_metric_reported = True break # If objective metrics were not reported, insert unavailable value in the DB if not is_objective_metric_reported: mls = [ api_pb2.MetricLog(time_stamp=rfc3339.rfc3339(datetime.now()), metric=api_pb2.Metric( name=self.metrics[0], value=const.UNAVAILABLE_METRIC_VALUE)) ] self.logger.info( "Objective metric {} is not found in training logs, {} value is reported" .format(self.metrics[0], const.UNAVAILABLE_METRIC_VALUE)) return api_pb2.ObservationLog(metric_logs=mls)
def format(self, value): '''数据库默认认为以 'Asia/Shanghai' 时区存储,在输出时做转换。''' try: dt = value if value.tzinfo != None else tz_server.localize(value) return rfc3339(dt) except AttributeError as ae: raise fields.MarshallingException(ae)
def start_time(): """ Returns the RFC3339 time stamp for the start of today. """ d = datetime.datetime.today() start = datetime.datetime(d.year, d.month, d.day, 0, 0, 0, 0) return rfc3339(start)
def end_time(): """ Returns the RFC3339 time stamp for the end of today. """ d = datetime.datetime.today() end = datetime.datetime(d.year, d.month, d.day, 23, 59, 59, 999999) return rfc3339(end)
def log_request(response): if (request.path != "/info"): return response now = time.time() duration = round(now - g.start, 6) # to the microsecond ip_address = request.headers.get("X-Forwarded-For", request.remote_addr) host = request.host.split(":", 1)[0] params = dict(request.args) dt = datetime.datetime.fromtimestamp(now) timestamp = rfc3339(dt, utc=False) request_id = request.headers.get("X-Request-ID", "") log_params = { "time": timestamp, "method": request.method, "path": request.path, "status": response.status_code, "duration": duration, "ip": ip_address, "host": host, "params": params, "request_id": request_id, } app.logger.info(log_params) return response
def json(self, request, include_issues=True, serialize=True): b = {} b["name"] = self.name b["ingested"] = rfc3339(self.created) b["page_count"] = self.page_count b["lccns"] = self.lccns() b["awardee"] = { "name": self.awardee.name, "url": request.build_absolute_uri(self.awardee.json_url) } b["url"] = request.build_absolute_uri(self.json_url) if include_issues: b["issues"] = [] for issue in self.issues.prefetch_related("title"): i = { "title": { "name": issue.title.display_name, "url": request.build_absolute_uri(issue.title.json_url), }, "date_issued": strftime(issue.date_issued, "%Y-%m-%d"), "url": request.build_absolute_uri(issue.json_url), } b["issues"].append(i) if serialize: return json.dumps(b) else: return b
def json(self, include_issues=True, serialize=True, host="chroniclingamerica.loc.gov"): b = {} b['name'] = self.name b['ingested'] = rfc3339(self.created) b['page_count'] = self.page_count b['lccns'] = self.lccns() b['awardee'] = { "name": self.awardee.name, "url": "http://" + host + self.awardee.json_url } b['url'] = "http://" + host + self.json_url if include_issues: b['issues'] = [] for issue in self.issues.all(): i = { "title": { "name": issue.title.display_name, "url": "http://" + host + issue.title.json_url, }, "date_issued": strftime(issue.date_issued, "%Y-%m-%d"), "url": "http://" + host + issue.json_url } b['issues'].append(i) if serialize: return json.dumps(b) else: return b
def __init__(self,entry=None): self['kind'] = "tasks#taskList" self['id'] = str(uuid.uuid4()) self['title'] = None self['updated'] = rfc3339.rfc3339(datetime.datetime.now()) self['selfLink'] = None self['tasks'] = {} self.set_from_entry(entry)
def date_to_atom_friendly(date): """ Converts dates from our default representation to an Atom-friendly RFC-3339 format. Uses a third party library released under a free license (see rfc3339.py for details). Uses code from http://stackoverflow.com/questions/9637838/convert-string-date-to-timestamp-in-python """ parsed = time.mktime(datetime.datetime.strptime(date, "%Y-%m-%d %H:%M:%S").timetuple()) return rfc3339(parsed)
def get_value(self): # returns a string representation of a datetime # in RFC 3339 format as per # https://cloud.google.com/datastore/docs/concepts/entities#date_and_time # example 2018-05-19T16:31:52.123456Z assert type(self.property_value) == self.get_type() property_value_as_str = rfc3339(self.property_value, utc=True, use_system_timezone=False) property_value_as_str = property_value_as_str[:-1] + '.' + str(self.property_value.microsecond) + 'Z' return {'timestampValue': property_value_as_str}
def header(user_id, encryption_key): timestamp = rfc3339(datetime.datetime.now()) signature = sign(timestamp + user_id, encryption_key) return ( '<env:Header><ns1:AuthenticationHeader>' + '<mktowsUserId>' + user_id + '</mktowsUserId>' + '<requestSignature>' + signature + '</requestSignature>' + '<requestTimestamp>' + timestamp + '</requestTimestamp>' + '</ns1:AuthenticationHeader></env:Header>')
def batches_atom(request, page_number=1): batches = models.Batch.viewable_batches() batches = batches.order_by('-released') now = rfc3339(datetime.datetime.now()) paginator = Paginator(batches, 25) page = paginator.page(page_number) return render_to_response('reports/batches.xml', dictionary=locals(), context_instance=RequestContext(request), mimetype='application/atom+xml')
def header(user_id, encryption_key): timestamp = rfc3339.rfc3339(datetime.datetime.now()) signature = sign(timestamp + user_id, encryption_key) return u"<env:Header><ns1:AuthenticationHeader>" \ u"<mktowsUserId>{user_id}</mktowsUserId>" \ u"<requestSignature>{signature}</requestSignature>" \ u"<requestTimestamp>{timestamp}</requestTimestamp>" \ u"</ns1:AuthenticationHeader></env:Header>".format(user_id=user_id, signature=signature, timestamp=timestamp)
def getChannelVideos(channelId,dateToStartFrom,dateToGoBackTo,timeInterval): log.info('Searching for videos published in channel between %s and %s',dateToStartFrom,dateToGoBackTo) if(dateToStartFrom < dateToGoBackTo): raise Exception('The date to start from cannot be before the date to go back to!') retVal = [] # initialization startFrom = dateToStartFrom goBackTo = startFrom - timeInterval done = False while not done: if(goBackTo < dateToGoBackTo): log.debug('The interval is now larger than the remaining time span to retrieve videos for. Using the date to go back to as next boundary') goBackTo = dateToGoBackTo if(goBackTo == dateToGoBackTo): log.debug('Last round-trip') done = True log.debug('Converting timestamps to RFC3339 format') goBackTo_rfc3339 = rfc3339(goBackTo,utc=True) startFrom_rfc3339 = rfc3339(startFrom,utc=True) videosPublishedInInterval = getChannelVideosPublishedInInterval(channelId,startFrom_rfc3339,goBackTo_rfc3339) log.debug('Adding videos found in the interval to the results list') retVal.extend(videosPublishedInInterval) log.debug('Total video(s) found so far: %d',len(retVal)) if(not done): # we simply continue from where we are startFrom = goBackTo # calculate the next date to go back to based on the given interval nextDate = goBackTo - timeInterval log.debug('Calculating the next date to go back to based on the interval: %s - %s => %s',goBackTo,timeInterval,nextDate) goBackTo = nextDate log.info('Found %d video(s) in total',len(retVal)) return retVal
def toDateTime(value): if value.year == 4501: # Combination of pywin being old and Outlook COM being stupid # returns year 4501 if there is no due date # (ie latest possible date acc'd to outlook) # Fix this to the max date rfc3339 will take? value = rfc3339(datetime.datetime(2011,9,8,17,37,0)) # value.year = 3000 return [key,value] value = rfc3339(datetime.datetime( year=value.year, month=value.month, day=value.day, hour=value.hour, minute=value.minute, second=value.second )) return value
def youtube_search(self, channel, since): search_response = self.youtube.search().list(channelId=channel, part="id,snippet", order='date', type='video', maxResults=50, publishedAfter=rfc3339(since)).execute() resources = [] for video in search_response['items']: resources.append(self.build_resource(video)) return resources
def package_event(mt_event): event = {} event['name'] = str(mt_event.event_name) event['start_date'] = rfc3339(datetime.strptime(mt_event.start_date, "%Y-%m-%d"), use_system_timezone=False) event['start_time'] = rfc3339(datetime.strptime(mt_event.start_date + " " + mt_event.start_time , "%Y-%m-%d %H:%M"), use_system_timezone=False) event['end_date'] = rfc3339(datetime.strptime(mt_event.end_date, "%Y-%m-%d"), use_system_timezone=False) event['end_time'] = rfc3339(datetime.strptime(mt_event.end_date + " " + mt_event.end_time , "%Y-%m-%d %H:%M"), use_system_timezone=False) event['all_day'] = True if (mt_event.all_day == 'yes') else False event['description'] = str(mt_event.description) event['registration_required'] = True if (mt_event.registration_req == 'yes') else False event['on_campus'] = True if (mt_event.on_campus == 'yes') else False event['registration_url'] = str(mt_event.registration_url) event['url'] = str(mt_event.event_page_url) event['virtual'] = True if (mt_event.virtual == 'yes') else False event['time_zone'] = str(mt_event.time_zone) event['google_location_id'] = str(mt_event.google_location_id) event['event_type_id'] = mt_event.event_type if bool(mt_event.general_pricing): event['general_cost'] = float(mt_event.general_pricing) if bool(mt_event.member_pricing): event['member_cost'] = float(mt_event.general_pricing) if bool(mt_event.non_member_pricing): event['non_member_cost'] = float(mt_event.non_member_pricing) x, hosts_dict = get_host_choices() event['host_name'] = str(hosts_dict[mt_event.host_name]) host = mt_event.host_name.split("_") if host[0] == "0": event['department_id'] = int(host[1]) elif host[0] == "1": event['college_id'] = int(host[1]) elif host[0] == "2": event['school_group_id'] = int(host[1]) h = models.Hit.query.get(mt_event.hit_id) event['school_id'] = h.school return json.dumps(event)
def getWeekOldTimestamp(self): # This function returns a RFC 3339 timestamp currentTime = datetime.datetime.now() # Going back 1 year.... diff = datetime.timedelta(hours=8736) newtime = currentTime - diff # returning 24 hour old timestamp in rfc3339 format return rfc3339(newtime,utc=True)