def _handle_url(self, params): timestamp = datetime.datetime.fromtimestamp( int(params['timestamp'] / 1000000)) if not self._args.omit_drawer: properties = OrgProperties() if (params['title'] == ""): params['title'] = params['url'] properties.add('URL', params['url']) properties.add('VISIT_COUNT', params['visit_count']) output = OrgFormat.link(params['url'], params['title']) try: output = self._args.output_format.decode('utf-8').format(**params) except Exception: pass if self._args.omit_drawer: self._writer.write_org_subitem(timestamp=OrgFormat.date( timestamp, show_time=True), output=output, properties=None) else: self._writer.write_org_subitem(timestamp=OrgFormat.date( timestamp, show_time=True), output=output, properties=properties)
def _handle_message(self, msg): """parse a single message row""" msg['number'] = '00' + msg['number'].split('@')[0] msg['name'] = self._numberdict.get(msg['number'], msg['number']) msg['verb'] = 'to' if msg['type'] else 'from' msg['type'] = 'OUTGOING' if msg['type'] else 'INCOMING' msg['handler'] = self._args.handler if msg['text']: if self._args.demojize: msg['text'] = emoji.demojize(msg['text']) if self._args.skip_emoji: msg['text'] = re.sub(emoji.get_emoji_regexp(), '', msg['text']) timestamp = datetime.datetime.fromtimestamp(msg['timestamp'] / 1000) properties = OrgProperties(data_for_hashing=json.dumps(msg)) properties.add('NUMBER', msg['number']) properties.add('TYPE', msg['type']) output = self._args.output_format.format(**msg) if msg['text'] and not self._is_ignored(msg): self._writer.write_org_subitem(timestamp=OrgFormat.date( timestamp, show_time=True), output=output, properties=properties)
def __parse_sample(self, target, row): """ parse a row of csv and write entry @param target: tag or category @param row: list of columns """ tag, begin, end, duration = row timestamp = self.get_timerange(begin, end) duration = self.get_sec(duration) properties = OrgProperties(data_for_hashing=timestamp) properties.add('DURATION', duration) tags = [] # remove colon from output if target.endswith(':'): target = target[:-1] tags.append(target) elif ':' in target: target = target.split(':')[0] output = target.capitalize() tags.append(tag) self._writer.write_org_subitem(timestamp=timestamp, output=output, tags=tags, properties=properties)
def _handle_url(self, params): epoch = datetime.datetime(1970, 1, 1) - datetime.datetime(1601, 1, 1) url_time = params['timestamp'] / 1000000 - epoch.total_seconds() if (url_time > 0): timestamp = datetime.datetime.fromtimestamp(int(url_time)) else: timestamp = datetime.datetime(1970, 1, 1) if not self._args.omit_drawer: properties = OrgProperties() if (params['title'] == ""): params['title'] = params['url'] properties.add('URL', params['url']) properties.add('VISIT_COUNT', params['visit_count']) output = "" try: output = self._args.output_format.decode('utf-8').format(**params) except Exception: pass if self._args.omit_drawer: self._writer.write_org_subitem( timestamp=OrgFormat.datetime(timestamp), output=output, properties=None) else: self._writer.write_org_subitem( timestamp=OrgFormat.datetime(timestamp), output=output, properties=properties)
def write_point(self, p): """write a point (including geocoding)""" timestamp = OrgFormat.date(p.time, show_time=True) geocode = self.reverse_geocode(p.latitude, p.longitude) output = self._args.output_format.format(**geocode) tags = [] properties = OrgProperties(data_for_hashing=timestamp) if p.latitude: properties.add('LATITUDE', p.latitude) if p.longitude: properties.add('LONGITUDE', p.longitude) if p.source: tags.append(p.source.lower()) if timestamp: self._writer.write_org_subitem(timestamp=timestamp, output=output, properties=properties, tags=tags)
def read_properties(self, row): properties = OrgProperties(data_for_hashing=json.dumps(row)) output = self._args.output_format.format(**row) if self._args.properties: for prop in self._args.properties.split(','): properties.add(prop.upper().strip(), row[prop]) return properties
def test_properties_with_own_created(self): p = OrgProperties() p.add("CREATED", OrgFormat.date(time.gmtime(0), show_time=True)) properties = str(p).splitlines() self.assertEqual(properties[0], " :PROPERTIES:") self.assertEqual(properties[1], " :CREATED: <1970-01-0" + \ "1 Thu 00:00>") self.assertEqual(properties[2], " :ID: fede47e9" + \ "f49e1b7f5c6599a6d607e9719ca98625") self.assertEqual(properties[3], " :END:")
def test_properties_with_own_created(self): p = OrgProperties() p.add(u"CREATED", OrgFormat.datetime(time.gmtime(0))) properties = unicode(p).splitlines() self.assertEqual(properties[0], u" :PROPERTIES:") self.assertEqual(properties[1], u" :CREATED: <1970-01-0" + \ "1 Thu 00:00>") self.assertEqual(properties[2], u" :ID: fede47e9" + \ "f49e1b7f5c6599a6d607e9719ca98625") self.assertEqual(properties[3], u" :END:")
def __handle_vevent(self, component): """ handles a VCALENDAR Component sets timezone to calendar's timezone @param component: icalendar component """ logging.debug(component) summary = self.__vtext_to_unicode(component.get('summary'), nonetype="") location = self.__vtext_to_unicode(component.get('location')) description = self.__vtext_to_unicode(component.get('description')) # format: 20091207T180000Z or 20100122 dtstart = self.__vtext_to_unicode( component.get('DTSTART').to_ical().decode('utf-8')) # format: 20091207T180000Z or 20100122 if 'DTEND' in list(component.keys()): dtend = self.__vtext_to_unicode( component.get('DTEND').to_ical().decode('utf-8')) # format: 20091207T180000Z # not used: Datestamp created #dtstamp = self.__vtext_to_unicode(component.get('dtstamp')) # handle repeating events # not implemented due to org-mode datestime-range cannot be repeated # component.get('rrule') ## notice: end date/time is optional; no end date results in end date 9999-12-31 if 'DTEND' in list(component.keys()): orgdate = self.__get_datetime_range(dtstart, dtend) else: orgdate = self.__get_datetime(dtstart) + "-<9999-12-31 Fri>" logging.debug(orgdate + " " + summary) # we need to set data_for_hashing=summary to really get a other sha1 data_for_hashing = orgdate + summary org_properties = OrgProperties(data_for_hashing=data_for_hashing) if location != None: org_properties.add("LOCATION", location) if description != None: org_properties.add("DESCRIPTION", description) self._writer.write_org_subitem(output=summary, properties=org_properties, timestamp=orgdate)
def startElement(self, name, attrs): """ at every <sms> tag write to orgfile """ logging.debug("Handler @startElement name=%s,attrs=%s", name, attrs) if name == "sms": #sms_subject = attrs['subject'] sms_date = int(attrs['date']) / 1000 # unix epoch sms_body = attrs['body'] sms_address = attrs['address'] sms_time = attrs['time'] sms_service_center = attrs['service_center'] sms_type_incoming = int(attrs['type']) == 1 contact_name = attrs['name'] skip = False if sms_type_incoming == True: output = "SMS from " if self._ignore_incoming: skip = True else: output = "SMS to " if self._ignore_outgoing: skip = True if not skip: name_string = "" if contact_name: name_string = '[[contact:' + contact_name + '][' + contact_name + ']]' else: name_string = "Unknown" output += name_string + ": " #if sms_subject != "null": # in case of MMS we have a subject # output += sms_subject # notes = sms_body #else: # output += sms_body # notes = "" notes = sms_body timestamp = OrgFormat.date(time.gmtime(sms_date), show_time=True) data_for_hashing = output + timestamp + notes properties = OrgProperties(data_for_hashing=data_for_hashing) properties.add("NUMBER", sms_address) properties.add("NAME", contact_name) properties.add("SMS_SERVICE_CENTER", sms_service_center) properties.add("TIME", sms_time) self._writer.write_org_subitem(output=output, timestamp=timestamp, note=notes, properties=properties)
def _main(self): """ get's automatically called from Memacs class fetches all mails out of mu database """ command = self._query # command.extend(self._query) command = command + " --fields=t:#:d:#:f:#:g:#:s:#:i --format=plain" try: xml_mails = subprocess.check_output(command, shell=True) except: print("something goes wrong") exit() messages = self.__parse_Plain(xml_mails) properties = OrgProperties() for message in messages: (an, datum, von, flags, betreff, msgid) = message.split(":#:") betreff = betreff.replace("[", "<") betreff = betreff.replace("]", ">") properties.add('TO', an) if von != "": (sender, vname, vmail) = self.__create_mail_link(von) (an, aname, amail) = self.__create_mail_link(an) timestamp = self.__getTimestamp(datum) properties.add_data_for_hashing(timestamp + "_" + msgid) properties.add("FROM", sender) notes = "" if any(match in vmail for match in self._sender): output = output = "".join([ "T: ", an, ": [[mu4e:msgid:", msgid, "][", betreff, "]]" ]) pre = 'WAITING ' else: output = "".join([ "F: ", sender, ": [[mu4e:msgid:", msgid, "][", betreff, "]]" ]) pre = 'NEXT ' if (flags.find('F') >= 0 and self._todo): date = self.__getTimestamp(datum, True) notes = "SCHEDULED: " + date timestamp = "" output = pre + output self._writer.write_org_subitem(timestamp, output, notes, properties)
def _handle_recent_tracks(self, tracks): """parse recent tracks""" logging.debug(tracks) for t in tracks: timestamp = datetime.datetime.fromtimestamp(int(t.timestamp)) output = self._args.output_format.format(title=t.track.title, artist=t.track.artist, album=t.album) properties = OrgProperties(data_for_hashing=t.timestamp) properties.add('ARTIST', t.track.artist) properties.add('ALBUM', t.album) self._writer.write_org_subitem(timestamp=OrgFormat.date(timestamp, show_time=True), output=output, properties=properties)
def _handle_row(self, row): """ handle a single row """ try: # assume unix timestamp if not self._args.timestamp_format: timestamp = datetime.datetime.fromtimestamp( int(row[self._args.timestamp_field])) else: timestamp = time.strptime(row[self._args.timestamp_field], self._args.timestamp_format) # show time with the timestamp format, but only # if it contains at least hours and minutes if not self._args.timestamp_format or \ any(x in self._args.timestamp_format for x in ['%H', '%M']): timestamp = OrgFormat.datetime(timestamp) else: timestamp = OrgFormat.date(timestamp) except ValueError as e: logging.error("timestamp-format does not match: %s", e) sys.exit(1) except IndexError as e: logging.error("did you specify the right delimiter?", e) sys.exit(1) properties = OrgProperties(data_for_hashing=json.dumps(row)) output = self._args.output_format.format(**row) if self._args.properties: for prop in self._args.properties.split(','): properties.add(prop.upper().strip(), row[prop]) self._writer.write_org_subitem(timestamp=timestamp, output=output, properties=properties)
def __write(self): """ write attributes to writer (make an org_sub_item) """ logging.debug("msg:%s", self.__msg) self.__msg = self.__msg.splitlines() subject = "" notes = "" # idea: look for the first -nonempty- message if len(self.__msg) > 0: start_notes = 0 for i in range(len(self.__msg)): if self.__msg[i].strip() != "": subject = self.__msg[i].strip() start_notes = i + 1 break if len(self.__msg) > start_notes: for n in self.__msg[start_notes:]: if n != "": notes += n + "\n" output = "%s (r%d): %s" % (self.__author, self.__rev, subject) properties = OrgProperties(data_for_hashing=self.__author + subject) timestamp = OrgFormat.date(OrgFormat.parse_basic_iso_datetime( self.__date), show_time=True) properties.add("REVISION", self.__rev) if self.__grepauthor == None or \ (self.__author.strip() == self.__grepauthor.strip()): self._writer.write_org_subitem(output=output, timestamp=timestamp, note=notes, properties=properties)
def _handle_battery(self, bat): """ handle single battery, e.g. BAT0 """ # calculate watt usage consumption = float(bat.current_now / 1000000.0 * bat.voltage_now / 1000000.0) timestamp = OrgFormat.date(datetime.datetime.now(), show_time=True) output = self._args.output_format.format(battery=bat) properties = OrgProperties(data_for_hashing=timestamp) properties.add("CYCLE_COUNT", bat.cycle_count) properties.add("CAPACITY", '%s%%' % bat.capacity) properties.add("STATUS", bat.status.lower()) if consumption: properties.add("CONSUMPTION", '%.1f W' % consumption) self._writer.write_org_subitem(timestamp=timestamp, output=output, properties=properties)
def __get_item_data(self, item): """ gets information out of <item>..</item> @return: output, note, properties, tags variables for orgwriter.append_org_subitem """ try: # logging.debug(item) properties = OrgProperties() guid = item['id'] if not guid: logging.error("got no id") unformatted_link = item['link'] short_link = OrgFormat.link(unformatted_link, "link") # if we found a url in title # then append the url in front of subject if re.search("http[s]?://", item['title']) is not None: output = short_link + ": " + item['title'] else: output = OrgFormat.link(unformatted_link, item['title']) note = item['description'] # converting updated_parsed UTC --> LOCALTIME # Karl 2018-09-22 this might be changed due to: # DeprecationWarning: To avoid breaking existing software # while fixing issue 310, a temporary mapping has been # created from `updated_parsed` to `published_parsed` if # `updated_parsed` doesn't exist. This fallback will be # removed in a future version of feedparser. timestamp = OrgFormat.date(time.localtime( calendar.timegm(item['updated_parsed'])), show_time=True) properties.add("guid", guid) except KeyError: logging.error("input is not a RSS 2.0") sys.exit(1) tags = [] # Karl 2018-09-22 this might be changed due to: # DeprecationWarning: To avoid breaking existing software # while fixing issue 310, a temporary mapping has been created # from `updated_parsed` to `published_parsed` if # `updated_parsed` doesn't exist. This fallback will be # removed in a future version of feedparser. dont_parse = [ 'title', 'description', 'updated', 'summary', 'updated_parsed', 'link', 'links' ] for i in item: logging.debug(i) if i not in dont_parse: if (type(i) == str or type(i) == str) and \ type(item[i]) == str and item[i] != "": if i == "id": i = "guid" properties.add(i, item[i]) else: if i == "tags": for tag in item[i]: logging.debug("found tag: %s", tag['term']) tags.append(tag['term']) return output, note, properties, tags, timestamp
class Commit(object): """ class for representing one commit """ def __init__(self): """ Ctor """ self.__empty = True self.__subject = "" self.__body = "" self.__timestamp = "" self.__author = "" self.__properties = OrgProperties() def __set_author_timestamp(self, line): """ extracts the date + time from line: author Forename Lastname <mail> 1234567890 +0000 @param line """ self.__empty = False date_info = line[-16:] # 1234567890 +0000 seconds_since_epoch = float(date_info[:10]) #timezone_info = date_info[11:] self.__timestamp = OrgFormat.date( time.localtime(seconds_since_epoch), show_time=True) self.__author = line[7:line.find("<")].strip() def add_header(self, line): """ adds line to the header if line contains "author" this method calls self.__set_author_timestamp(line) for setting right author + datetime created every line will be added as property i.e: commit <hashtag> would then be following property: :COMMIT: <hashtag> @param line: """ self.__empty = False if line != "": whitespace = line.find(" ") tag = line[:whitespace].upper() value = line[whitespace:] self.__properties.add(tag, value) if tag == "AUTHOR": self.__set_author_timestamp(line) def add_body(self, line): """ adds a line to the body if line starts with Signed-off-by, also a property of that line is added """ line = line.strip() if line != "": if line[:14] == "Signed-off-by:": self.__properties.add("SIGNED-OFF-BY", line[15:]) elif self.__subject == "": self.__subject = line else: self.__body += line + "\n" def is_empty(self): """ @return: True - empty commit False - not empty commit """ return self.__empty def get_output(self): """ @return tuple: output,properties,body for Orgwriter.write_sub_item() """ output = self.__author + ": " + self.__subject return output, self.__properties, self.__body, self.__author, \ self.__timestamp
def startElement(self, name, attrs): """ at every <sms> tag write to orgfile """ logging.debug("Handler @startElement name=%s,attrs=%s", name, attrs) htmlparser = html.parser.HTMLParser() if name == "sms": sms_subject = attrs.get('subject', '') sms_date = int(attrs['date']) / 1000 # unix epoch sms_body = attrs['body'] sms_address = attrs['address'].strip().replace('-', '').replace( '/', '').replace(' ', '').replace('+', '00') sms_type_incoming = int(attrs['type']) == 1 contact_name = False if 'contact_name' in attrs: ## NOTE: older version of backup app did not insert contact_name into XML contact_name = attrs['contact_name'] else: if self._numberdict: if sms_address in list(self._numberdict.keys()): contact_name = self._numberdict[sms_address] skip = False if sms_type_incoming == True: output = "SMS from " if self._ignore_incoming: skip = True else: output = "SMS to " if self._ignore_outgoing: skip = True if not skip: name_string = "" if contact_name: name_string = '[[contact:' + contact_name + '][' + contact_name + ']]' else: name_string = "Unknown" output += name_string + ": " ## reverse encoding hack from just before: sms_body = htmlparser.unescape( sms_body.replace('EnCoDiNgHaCk42', '&#')) for emoji in list(self.EMOJIS.keys()): ## FIXXME: this is a horrible dumb brute-force algorithm. ## In case of bad performance, this can be optimized dramtically sms_body = sms_body.replace(emoji, self.EMOJI_ENCLOSING_CHARACTER + \ self.EMOJIS[emoji] + self.EMOJI_ENCLOSING_CHARACTER).replace('\n', '⏎') if sms_subject != "null": # in case of MMS we have a subject output += sms_subject notes = sms_body else: output += sms_body notes = "" timestamp = OrgFormat.date(time.gmtime(sms_date), show_time=True) data_for_hashing = output + timestamp + notes properties = OrgProperties(data_for_hashing=data_for_hashing) properties.add("NUMBER", sms_address) properties.add("NAME", contact_name) self._writer.write_org_subitem(output=output, timestamp=timestamp, note=notes, properties=properties)
def _generateOrgentry(self, e_time, e_name, e_batt, e_uptime, e_last_opposite_occurrence, e_last_occurrence, prev_office_sum, prev_office_first_begin, office_lunchbreak, battery_percentage_when_booting): """ takes the data from the parameters and generates an Org-mode entry. @param e_time: time-stamp of the entry @param e_name: entry name/description @param e_batt: battery level @param e_uptime: uptime in seconds @param e_last_opposite_occurrence: time-stamp of previous opposite occurrence (if not False) @param e_last_occurrence: time-stamp of previous occurrence @param additional_paren_string: string that gets appended to the parenthesis @param prev_office_sum: holds the sum of all previous working duration today @param prev_office_first_begin: holds the first time-stamp of wifi-office for today @param office_lunchbreak: array of begin- and end-time-stamp of lunch-break (if any) @param battery_percentage_when_booting: battery level of previous boot (only set if no charge event was in-between) """ assert e_time.__class__ == datetime.datetime assert e_name.__class__ == str assert e_batt.__class__ == str assert e_uptime.__class__ == str assert (e_last_opposite_occurrence.__class__ == datetime.datetime or not e_last_opposite_occurrence) assert (e_last_occurrence.__class__ == datetime.datetime or not e_last_occurrence) assert (not battery_percentage_when_booting or battery_percentage_when_booting.__class__ == int) last_info = '' in_between_hms = '' in_between_s = '' ignore_occurrence = False # convert parameters to be writable: office_sum = prev_office_sum office_first_begin = prev_office_first_begin if e_last_opposite_occurrence: in_between_s = (e_time - e_last_opposite_occurrence).seconds + \ (e_time - e_last_opposite_occurrence).days * 3600 * 24 in_between_hms = str(OrgFormat.hms_from_sec(in_between_s)) if e_name == 'boot': last_info = ' (off for ' elif e_name == 'shutdown': last_info = ' (on for ' elif e_name.endswith('-end'): last_info = ' (' + e_name[0:-4].replace('wifi-', '') + ' for ' else: last_info = ' (not ' + e_name.replace('wifi-', '') + ' for ' # handle special case: office hours additional_paren_string = "" if e_name == 'wifi-office-end': office_total = None # calculate office_sum and office_total if not office_sum: office_sum = (e_time - e_last_opposite_occurrence).seconds office_total = office_sum else: assert (office_first_begin) assert (office_sum) office_sum = office_sum + ( e_time - e_last_opposite_occurrence).seconds office_total = int( time.mktime(e_time.timetuple()) - time.mktime(office_first_begin.timetuple())) assert (type(office_total) == int) assert (type(office_sum) == int) assert (type(in_between_s) == int) # come up with the additional office-hours string: additional_paren_string = '; today ' + OrgFormat.hms_from_sec(office_sum) + \ '; today total ' + OrgFormat.hms_from_sec(office_total) if additional_paren_string: last_info += str(OrgFormat.dhms_from_sec( in_between_s)) + additional_paren_string + ')' else: last_info += str(OrgFormat.dhms_from_sec(in_between_s)) + ')' elif e_last_occurrence: in_between_s = (e_time - e_last_occurrence).seconds + \ (e_time - e_last_occurrence).days * 3600 * 24 in_between_hms = str(OrgFormat.hms_from_sec(in_between_s)) # handle special case: office hours if e_name == 'wifi-office': if not office_sum or not office_first_begin: # new day office_first_begin = e_time else: # check if we've found a lunch-break (first wifi-office between 11:30-13:00 where not office for > 17min) if e_time.time() > datetime.time( 11, 30) and e_time.time() < datetime.time( 13, 00) and e_last_opposite_occurrence: if e_last_opposite_occurrence.date() == e_time.date( ) and in_between_s > (17 * 60) and in_between_s < (80 * 60): office_lunchbreak = [ e_last_opposite_occurrence.time(), e_time.time() ] # handle special case: boot without previous shutdown = crash if (e_name == 'boot') and \ (e_last_occurrence and e_last_opposite_occurrence) and \ (e_last_occurrence > e_last_opposite_occurrence): # last boot is more recent than last shutdown -> crash has happened last_info = ' after crash' in_between_hms = '' in_between_s = '' ignore_occurrence = True properties = OrgProperties() if in_between_s == 0: # omit in-between content of property when it is zero in_between_s = '' in_between_hms = '' properties.add("IN-BETWEEN", in_between_hms) properties.add("IN-BETWEEN-S", str(in_between_s)) properties.add("BATT-LEVEL", e_batt) properties.add("UPTIME", OrgFormat.hms_from_sec(int(e_uptime))) properties.add("UPTIME-S", e_uptime) if e_name == 'wifi-office-end' and office_lunchbreak: properties.add( "OFFICE-SUMMARY", e_last_opposite_occurrence.strftime('| %Y-%m-%d | %a ') + prev_office_first_begin.strftime('| %H:%M ') + office_lunchbreak[0].strftime('| %H:%M ') + office_lunchbreak[1].strftime('| %H:%M ') + e_time.strftime('| %H:%M | | |')) elif e_name == 'wifi-office-end' and not office_lunchbreak: properties.add( "OFFICE-SUMMARY", e_last_opposite_occurrence.strftime('| %Y-%m-%d | %a ') + prev_office_first_begin.strftime('| %H:%M | 11:30 | 12:00 ') + e_time.strftime('| %H:%M | | |')) elif e_name == 'shutdown': if battery_percentage_when_booting: batt_diff_from_boot_to_shutdown = battery_percentage_when_booting - int( e_batt) if batt_diff_from_boot_to_shutdown >= 20: # hypothetical run-time (in hours; derived from boot to shutdown) of the device for 100% battery capacity # Note: battery_percentage_when_booting is set to False when a "charge-start"-event is recognized between boot and shutdown # Note: only calculated when at least 20 percent difference of battery level between boot and shutdown runtime_extrapolation = 100 * int( e_uptime) // batt_diff_from_boot_to_shutdown // 3600 properties.add("HOURS_RUNTIME_EXTRAPOLATION", runtime_extrapolation) self._writer.write_org_subitem( timestamp=e_time.strftime('<%Y-%m-%d %a %H:%M>'), output=e_name + last_info, properties=properties) return '** ' + e_time.strftime('<%Y-%m-%d %a %H:%M>') + ' ' + e_name + last_info + \ '\n:PROPERTIES:\n:IN-BETWEEN: ' + in_between_hms + \ '\n:IN-BETWEEN-S: ' + str(in_between_s) + \ '\n:BATT-LEVEL: ' + e_batt + \ '\n:UPTIME: ' + str(OrgFormat.hms_from_sec(int(e_uptime))) + \ '\n:UPTIME-S: ' + str(e_uptime) + '\n:END:\n', \ ignore_occurrence, office_sum, office_first_begin, office_lunchbreak
def startElement(self, name, attrs): """ at every <log> write to orgfile """ logging.debug("Handler @startElement name=%s,attrs=%s", name, attrs) if name == "log": call_number = attrs['number'] call_duration = int(attrs['dur']) call_date = int(attrs['date']) / 1000 # unix epoch call_type = int(attrs['type']) call_incoming = call_type == 1 call_outgoing = call_type == 2 call_missed = call_type == 3 call_voicemail = call_type == 4 call_rejected = call_type == 5 call_refused = call_type == 6 call_name = attrs['name'] output = "Phonecall " skip = False if call_incoming: output += "from " if self._ignore_incoming: skip = True elif call_outgoing: output += "to " if self._ignore_outgoing: skip = True elif call_missed: output += "missed " if self._ignore_missed: skip = True elif call_voicemail: output += "voicemail " if self._ignore_voicemail: skip = True elif call_rejected: output += "rejected " if self._ignore_rejected: skip = True elif call_refused: output += "refused " if self._ignore_refused: skip = True else: raise Exception("Invalid Phonecall Type: %d", call_type) call_number_string = "" if call_number != "-1": call_number_string = call_number else: call_number_string = "Unknown Number" name_string = "" if call_name != "(Unknown)": name_string = '[[contact:' + call_name + '][' + call_name + ']]' else: name_string = "Unknown" output += name_string if call_duration < self._minimum_duration: skip = True timestamp = OrgFormat.date(time.gmtime(call_date), show_time=True) end_datetimestamp = datetime.datetime.utcfromtimestamp( call_date + call_duration) logging.debug( "timestamp[%s] duration[%s] end[%s]" % (str(timestamp), str(call_duration), str(end_datetimestamp))) end_timestamp_string = OrgFormat.date(end_datetimestamp, show_time=True) logging.debug("end_time [%s]" % end_timestamp_string) data_for_hashing = output + timestamp properties = OrgProperties(data_for_hashing=data_for_hashing) properties.add("NUMBER", call_number_string) properties.add("DURATION", call_duration) properties.add("NAME", call_name) if not skip: self._writer.write_org_subitem(output=output, timestamp=timestamp + '-' + end_timestamp_string, properties=properties)
def _main(self): """ get's automatically called from Memacs class """ # do all the stuff # if you need something from config: # attention: foo will be unicode # foo = self._get_config_option("foo") logging.info("foo started") # how to handle config files ? # sample config file: # ---------8<----------- # [memacs-example] # foo = 0 # bar = 1 # --------->8----------- # to read it out, just do following: # foo = self._get_config_option("foo") # bar = self._get_config_option("bar") # use logging.debug() for debug messages # use logging.error() for error messages # use logging.info() instead of print for informing user # # on an fatal error: # use logging.error() and sys.exit(1) timestamp = OrgFormat.datetime(time.gmtime(0)) # note: timestamp has to be a struct_time object # Orgproperties # Option 1: no properties given, specify argument for hashing data properties = OrgProperties("hashing data :ALKJ!@# should be unique") # Option 2: add properties which are all-together unique # properties.add("Category","fun") # properties.add("from","*****@*****.**") # properties.add("body","foo") self._writer.write_org_subitem(timestamp=timestamp, output="foo", properties=properties) # writes following: #** <1970-01-01 Thu 00:00> foo # :PROPERTIES: # :ID: da39a3ee5e6b4b0d3255bfef95601890afd80709 # :END: notes = "bar notes\nfoo notes" p = OrgProperties(data_for_hashing="read comment below") # if a hash is not unique only with its :PROPERTIES: , then # set data_for_hasing string additional information i.e. the output # , which then makes the hash really unique # # if you *really*, *really* have already a unique id, # then you can call following method: # p.set_id("unique id here") p.add("DESCRIPTION", "foooo") p.add("foo-property", "asdf") tags = ["tag1", "tag2"] self._writer.write_org_subitem(timestamp=timestamp, output="bar", note=notes, properties=p, tags=tags)
def _main(self): APP_KEY = self._get_config_option("APP_KEY") APP_SECRET = self._get_config_option("APP_SECRET") OAUTH_TOKEN = self._get_config_option("OAUTH_TOKEN") OAUTH_TOKEN_SECRET = self._get_config_option("OAUTH_TOKEN_SECRET") screen_name = self._get_config_option("screen_name") count = self._get_config_option("count") twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET) try: home_timeline = twitter.get_home_timeline(screenname=screen_name, count=count) except TwythonError as e: logging.error(e) sys.exit(1) for tweet in home_timeline: # strptime doesn't support timezone info, so we are using dateutils. date_object = parser.parse(tweet['created_at']) timestamp = OrgFormat.date(date_object, show_time=True) try: # Data is already Unicode, so don't try to re-encode it. output = tweet['text'] except: logging.error(sys.exc_info()[0]) print("Error: ", sys.exc_info()[0]) data_for_hashing = output + timestamp + output properties = OrgProperties(data_for_hashing=data_for_hashing) properties.add("name", tweet['user']['name']) properties.add("twitter_id", tweet['id']) properties.add("contributors", tweet['contributors']) properties.add("truncated", tweet['truncated']) properties.add("in_reply_to_status_id", tweet['in_reply_to_status_id']) properties.add("favorite_count", tweet['favorite_count']) properties.add("source", tweet['source']) properties.add("retweeted", tweet['retweeted']) properties.add("coordinates", tweet['coordinates']) properties.add("entities", tweet['entities']) self._writer.write_org_subitem(timestamp=timestamp, output=output, properties=properties)