def __get_datetime(self, mydate): """ @return string: Datetime - in Org Format """ mydate_tupel = OrgFormat.datetupelutctimestamp(mydate) return OrgFormat.date(mydate_tupel)
def __handle_file(self, file, rootdir): """ handles a file """ # don't handle emacs tmp files (file~) if file[-1:] == '~': return link = os.path.join(rootdir, file) logging.debug(link) if self._args.force_filedate_extraction: file_datetime = time.localtime(os.path.getmtime(link)) if self._args.skip_filetime_extraction: orgdate = OrgFormat.date(file_datetime) else: orgdate = OrgFormat.datetime(file_datetime) self.__write_file(file, link, orgdate) elif DATESTAMP_REGEX.match(file): try: # we put this in a try block because: # if a timestamp is false i.e. 2011-14-19 or false time # we can handle those not easy with REGEX, therefore we have # an Exception TimestampParseException, which is thrown, # wen strptime (parse from string to time tupel) fails self.__parse_file(file, link) except TimestampParseException, e: logging.warning("False date(time) in file: %s", link)
def __handle_file(self, photo_file, filename): """ checks if file is an image, try to get exif data and write to org file """ logging.debug("handling file %s", filename) # check if file is an image: if imghdr.what(filename) != None: datetime = get_exif_datetime(filename) if datetime == None: logging.debug("skipping: %s has no EXIF information", filename) else: try: datetime = time.strptime(datetime, "%Y:%m:%d %H:%M:%S") timestamp = OrgFormat.datetime(datetime) output = OrgFormat.link(filename, photo_file) properties = OrgProperties(photo_file + timestamp) self._writer.write_org_subitem(timestamp=timestamp, output=output, properties=properties) except ValueError, e: logging.warning("skipping: Could not parse " + \ "timestamp for %s : %s", filename, e)
def _handle_url(self, params): epoch = datetime.datetime(1970, 1, 1)-datetime.datetime(1601, 1, 1) url_time = params['timestamp']/1000000-epoch.total_seconds() if (url_time > 0) : timestamp = datetime.datetime.fromtimestamp(int(url_time)) else: timestamp = datetime.datetime(1970, 1, 1) if not self._args.omit_drawer: properties = OrgProperties() if (params['title'] == "") : params['title'] = params['url'] properties.add('URL', params['url']) properties.add('VISIT_COUNT', params['visit_count']) output = "" try: output = self._args.output_format.decode('utf-8').format(**params) except Exception: pass if self._args.omit_drawer: self._writer.write_org_subitem( timestamp=OrgFormat.datetime(timestamp), output=output, properties=None) else: self._writer.write_org_subitem( timestamp=OrgFormat.datetime(timestamp), output=output, properties=properties)
def _process_range(self, start, stop): start_dt = time.localtime(start['time']) start_timestamp = OrgFormat.datetime(start_dt) stop_dt = time.localtime(stop['time']) stop_timestamp = OrgFormat.datetime(stop_dt) range_timestamp = '{}-{}'.format(start_timestamp, stop_timestamp) duration = stop['time'] - start['time'] d = divmod(duration, 86400) # days h = divmod(d[1], 3600) # hours m = divmod(h[1], 60) # minutes s = m[1] # seconds duration_string = '{:d}:{:02d}'.format(24 * d[0] + h[0], m[0]) output = "Stayed at: {} for {}".format(start['place'], duration_string) data_for_hashing = output + range_timestamp properties = OrgProperties(data_for_hashing=data_for_hashing) properties.add("PLACE", start['place']) properties.add("DURATION", duration_string) self._writer.write_org_subitem(output=output, timestamp=range_timestamp, properties=properties, tags=[start['place']])
def _handle_row(self, row): """ handle a single row """ try: # assume unix timestamp if not self._args.timestamp_format: timestamp = datetime.datetime.fromtimestamp( int(row[self._args.timestamp_field])) else: timestamp = time.strptime(row[self._args.timestamp_field], self._args.timestamp_format) # show time with the timestamp format, but only # if it contains at least hours and minutes if not self._args.timestamp_format or \ any(x in self._args.timestamp_format for x in ['%H', '%M']): timestamp = OrgFormat.datetime(timestamp) else: timestamp = OrgFormat.date(timestamp) except ValueError, e: logging.error("timestamp-format does not match: %s", e) sys.exit(1)
def _handle_url(self, params): timestamp = datetime.datetime.fromtimestamp( int(params['timestamp'] / 1000000)) if not self._args.omit_drawer: properties = OrgProperties() if (params['title'] == ""): params['title'] = params['url'] properties.add('URL', params['url']) properties.add('VISIT_COUNT', params['visit_count']) output = "" try: output = self._args.output_format.decode('utf-8').format(**params) except Exception: pass if self._args.omit_drawer: self._writer.write_org_subitem( timestamp=OrgFormat.datetime(timestamp), output=output, properties=None) else: self._writer.write_org_subitem( timestamp=OrgFormat.datetime(timestamp), output=output, properties=properties)
def __get_item_data(self, item): """ gets information out of <item>..</item> @return: output, note, properties, tags variables for orgwriter.append_org_subitem """ try: #logging.debug(item) properties = OrgProperties() guid = item['id'] if not guid: logging.error("got no id") unformatted_link = item['link'] short_link = OrgFormat.link(unformatted_link, "link") # if we found a url in title # then append the url in front of subject if re.search("http[s]?://", item['title']) != None: output = short_link + ": " + item['title'] else: output = OrgFormat.link(unformatted_link, item['title']) note = item['description'] # converting updated_parsed UTC --> LOCALTIME timestamp = OrgFormat.datetime( time.localtime(calendar.timegm(item['updated_parsed']))) properties.add("guid", guid) except KeyError: logging.error("input is not a RSS 2.0") sys.exit(1) tags = [] dont_parse = [ 'title', 'description', 'updated', 'summary', 'updated_parsed', 'link', 'links' ] for i in item: logging.debug(i) if i not in dont_parse: if (type(i) == unicode or type(i) == str) and \ type(item[i]) == unicode and item[i] != "": if i == "id": i = "guid" properties.add(i, item[i]) else: if i == "tags": for tag in item[i]: logging.debug("found tag: %s", tag['term']) tags.append(tag['term']) return output, note, properties, tags, timestamp
def __getTimestamp(self, time, onlyDate=False): """ converts xml timestamp into org readable timestamp Do 6 Nov 21:22:17 2014 """ time = time.strip().encode('utf-8') mail_date = datetime.strptime(time,"%a %d %b %H:%M:%S %Y") if onlyDate is False: return OrgFormat.datetime(mail_date) return OrgFormat.date(mail_date)
def __getTimestamp(self, time, onlyDate=False): """ converts xml timestamp into org readable timestamp Do 6 Nov 21:22:17 2014 """ time = time.strip().encode('utf-8') mail_date = datetime.strptime(time,"%c") if onlyDate is False: return OrgFormat.datetime(mail_date) return OrgFormat.date(mail_date)
def __get_item_data(self, item): """ gets information out of <item>..</item> @return: output, note, properties, tags variables for orgwriter.append_org_subitem """ try: #logging.debug(item) properties = OrgProperties() guid = item['id'] if not guid: logging.error("got no id") unformatted_link = item['link'] short_link = OrgFormat.link(unformatted_link, "link") # if we found a url in title # then append the url in front of subject if re.search("http[s]?://", item['title']) != None: output = short_link + ": " + item['title'] else: output = OrgFormat.link(unformatted_link, item['title']) note = item['description'] # converting updated_parsed UTC --> LOCALTIME timestamp = OrgFormat.datetime( time.localtime(calendar.timegm(item['updated_parsed']))) properties.add("guid", guid) except KeyError: logging.error("input is not a RSS 2.0") sys.exit(1) tags = [] dont_parse = ['title', 'description', 'updated', 'summary', 'updated_parsed', 'link', 'links'] for i in item: logging.debug(i) if i not in dont_parse: if (type(i) == unicode or type(i) == str) and \ type(item[i]) == unicode and item[i] != "": if i == "id": i = "guid" properties.add(i, item[i]) else: if i == "tags": for tag in item[i]: logging.debug("found tag: %s", tag['term']) tags.append(tag['term']) return output, note, properties, tags, timestamp
def _handle_message(self, msg): """parse a single message row""" msg['number'] = '00' + msg['number'].split('@')[0] msg['verb'] = 'to' if msg['type'] else 'from' msg['type'] = 'OUTGOING' if msg['type'] else 'INCOMING' msg['handler'] = self._args.handler if msg['text']: if self._args.demojize: msg['text'] = emoji.demojize(msg['text']) if self._args.skip_emoji: msg['text'] = re.sub(emoji.get_emoji_regexp(), '', msg['text']) timestamp = datetime.datetime.fromtimestamp(msg['timestamp'] / 1000) properties = OrgProperties(data_for_hashing=json.dumps(msg)) properties.add('NUMBER', msg['number']) properties.add('TYPE', msg['type']) output = self._args.output_format.decode('utf-8').format(**msg) if msg['text'] and not self._is_ignored(msg): self._writer.write_org_subitem(timestamp=OrgFormat.datetime(timestamp), output=output, properties=properties)
def write_point(self, p): """write a point (including geocoding)""" timestamp = OrgFormat.datetime(p.time) geocode = self.reverse_geocode(p.latitude, p.longitude) output = self._args.output_format.decode('utf-8').format(**geocode) tags = [] properties = OrgProperties(data_for_hashing=timestamp) if p.latitude: properties.add('LATITUDE', p.latitude) if p.longitude: properties.add('LONGITUDE', p.longitude) if p.source: tags.append(p.source.lower()) if timestamp: self._writer.write_org_subitem(timestamp=timestamp, output=output, properties=properties, tags=tags)
def startElement(self, name, attrs): """ at every <sms> tag write to orgfile """ logging.debug("Handler @startElement name=%s,attrs=%s", name, attrs) if name == "sms": #sms_subject = attrs['subject'] sms_date = int(attrs['date']) / 1000 # unix epoch sms_body = attrs['body'] sms_address = attrs['address'] sms_time = attrs['time'] sms_service_center = attrs['service_center'] sms_type_incoming = int(attrs['type']) == 1 contact_name = attrs['name'] skip = False if sms_type_incoming == True: output = "SMS from " if self._ignore_incoming: skip = True else: output = "SMS to " if self._ignore_outgoing: skip = True if not skip: name_string = "" if contact_name: name_string = '[[contact:' + contact_name + '][' + contact_name + ']]' else: name_string = "Unknown" output += name_string + ": " #if sms_subject != "null": # in case of MMS we have a subject # output += sms_subject # notes = sms_body #else: # output += sms_body # notes = "" notes = sms_body timestamp = OrgFormat.datetime(time.gmtime(sms_date)) data_for_hashing = output + timestamp + notes properties = OrgProperties(data_for_hashing=data_for_hashing) properties.add("NUMBER", sms_address) properties.add("NAME", contact_name) properties.add("SMS_SERVICE_CENTER", sms_service_center) properties.add("TIME", sms_time) self._writer.write_org_subitem(output=output, timestamp=timestamp, note=notes, properties=properties)
def __get_datetime_range(self, dtstart, dtend): """ @return string: Datetime - Range in Org Format """ begin_tupel = OrgFormat.datetupelutctimestamp(dtstart) end_tupel = OrgFormat.datetupelutctimestamp(dtend) # handle "all-day" - events if begin_tupel.tm_sec == 0 and \ begin_tupel.tm_min == 0 and \ begin_tupel.tm_hour == 0 and \ end_tupel.tm_sec == 0 and \ end_tupel.tm_min == 0 and \ end_tupel.tm_hour == 0: # we have to subtract 1 day to get the correct dates end_tupel = time.localtime(time.mktime(end_tupel) - 24 * 60 * 60) return OrgFormat.utcrange(begin_tupel, end_tupel)
def startElement(self, name, attrs): """ at every <sms> tag write to orgfile """ logging.debug("Handler @startElement name=%s,attrs=%s", name, attrs) if name == "sms": sms_subject = attrs['subject'] sms_date = int(attrs['date']) / 1000 # unix epoch sms_body = attrs['body'] sms_address = attrs['address'] sms_type_incoming = int(attrs['type']) == 1 contact_name = False if 'contact_name' in attrs: ## NOTE: older version of backup app did not insert contact_name into XML contact_name = attrs['contact_name'] skip = False if sms_type_incoming == True: output = "SMS from " if self._ignore_incoming: skip = True else: output = "SMS to " if self._ignore_outgoing: skip = True if not skip: name_string = "" if contact_name: name_string = '[[contact:' + contact_name + '][' + contact_name + ']]' else: name_string = "Unknown" output += name_string + ": " if sms_subject != "null": # in case of MMS we have a subject output += sms_subject notes = sms_body else: output += sms_body notes = "" timestamp = OrgFormat.datetime(time.gmtime(sms_date)) data_for_hashing = output + timestamp + notes properties = OrgProperties(data_for_hashing=data_for_hashing) properties.add("NUMBER", sms_address) properties.add("NAME", contact_name) self._writer.write_org_subitem(output=output, timestamp=timestamp, note=notes, properties=properties)
def _main(self): APP_KEY = self._get_config_option("APP_KEY") APP_SECRET = self._get_config_option("APP_SECRET") OAUTH_TOKEN = self._get_config_option("OAUTH_TOKEN") OAUTH_TOKEN_SECRET = self._get_config_option("OAUTH_TOKEN_SECRET") screen_name = self._get_config_option("screen_name") count = self._get_config_option("count") twitter = Twython( APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET ) try: home_timeline = twitter.get_home_timeline(screenname=screen_name, count=count) except TwythonError as e: logging.error(e) sys.exit(1) for tweet in home_timeline: # strptime doesn't support timezone info, so we are using dateutils. date_object = parser.parse(tweet['created_at']) timestamp = OrgFormat.datetime(date_object) try: # Data is already Unicode, so don't try to re-encode it. output = tweet['text'] except: logging.error(sys.exc_info()[0]) print "Error: ", sys.exc_info()[0] data_for_hashing = output + timestamp + output properties = OrgProperties(data_for_hashing=data_for_hashing) properties.add("name", tweet['user']['name']) properties.add("twitter_id", tweet['id']) properties.add("contributors", tweet['contributors']) properties.add("truncated", tweet['truncated']) properties.add("in_reply_to_status_id", tweet['in_reply_to_status_id']) properties.add("favorite_count", tweet['favorite_count']) properties.add("source", tweet['source']) properties.add("retweeted", tweet['retweeted']) properties.add("coordinates", tweet['coordinates']) properties.add("entities", tweet['entities']) self._writer.write_org_subitem(timestamp=timestamp, output = output, properties = properties)
def startElement(self, name, attrs): """ at every <call> write to orgfile """ logging.debug("Handler @startElement name=%s,attrs=%s", name, attrs) if name == "call": call_number = attrs['number'] call_duration = int(attrs['duration']) call_date = int(attrs['date']) / 1000 # unix epoch call_type = int(attrs['type']) call_incoming = call_type == 1 call_outgoing = call_type == 2 call_missed = call_type == 3 output = "Phonecall " skip = False if call_incoming: output += "from " if self._ignore_incoming: skip = True elif call_outgoing: output += "to " if self._ignore_outgoing: skip = True elif call_missed: output += "missed " if self._ignore_missed: skip = True else: raise Exception("Invalid Phonecall Type: %d", call_type) if call_number != "-1": output += call_number else: output += "Unknown Number" output += " Duration: %d sec" % call_duration if call_duration < self._minimum_duration: skip = True timestamp = OrgFormat.datetime(time.gmtime(call_date)) data_for_hashing = output + timestamp properties = OrgProperties(data_for_hashing=data_for_hashing) if not skip: self._writer.write_org_subitem(output=output, timestamp=timestamp, properties=properties )
def startElement(self, name, attrs): """ at every <call> write to orgfile """ logging.debug("Handler @startElement name=%s,attrs=%s", name, attrs) if name == "call": call_number = attrs['number'] call_duration = int(attrs['duration']) call_date = int(attrs['date']) / 1000 # unix epoch call_type = int(attrs['type']) call_incoming = call_type == 1 call_outgoing = call_type == 2 call_missed = call_type == 3 output = "Phonecall " skip = False if call_incoming: output += "from " if self._ignore_incoming: skip = True elif call_outgoing: output += "to " if self._ignore_outgoing: skip = True elif call_missed: output += "missed " if self._ignore_missed: skip = True else: raise Exception("Invalid Phonecall Type: %d", call_type) if call_number != "-1": output += call_number else: output += "Unknown Number" output += " Duration: %d sec" % call_duration if call_duration < self._minimum_duration: skip = True timestamp = OrgFormat.datetime(time.gmtime(call_date)) data_for_hashing = output + timestamp properties = OrgProperties(data_for_hashing=data_for_hashing) if not skip: self._writer.write_org_subitem(output=output, timestamp=timestamp, properties=properties)
def __set_author_timestamp(self, line): """ extracts the date + time from line: author Forename Lastname <mail> 1234567890 +0000 @param line """ self.__empty = False date_info = line[-16:] # 1234567890 +0000 seconds_since_epoch = float(date_info[:10]) # timezone_info = date_info[11:] self.__timestamp = OrgFormat.datetime(time.localtime(seconds_since_epoch)) self.__author = line[7 : line.find("<")].strip()
def __parse_and_write_file(self, file, link): """ Parses the date+time and writes entry to outputfile @param file: filename @param link: path """ if TIMESTAMP_REGEX.match(file): # if we found a timestamp too,take hours,min # and optionally seconds from this timestamp timestamp = TIMESTAMP_REGEX.match(file).group() orgdate = OrgFormat.strdatetimeiso8601(timestamp) logging.debug("found timestamp: " + orgdate) else: datestamp = DATESTAMP_REGEX.match(file).group() orgdate = OrgFormat.strdate(datestamp) orgdate_time_tupel = OrgFormat.datetupeliso8601(datestamp) file_datetime = time.localtime(os.path.getmtime(link)) # check if the file - time information matches year,month,day, # then update time if file_datetime.tm_year == orgdate_time_tupel.tm_year and \ file_datetime.tm_mon == orgdate_time_tupel.tm_mon and \ file_datetime.tm_mday == orgdate_time_tupel.tm_mday: logging.debug("found a time in file.setting %s-->%s", orgdate, OrgFormat.date(file_datetime, True)) orgdate = OrgFormat.date(file_datetime, True) # write entry to org file output = OrgFormat.link(link=link, description=file) # we need optional data for hashing due it can be, that more # than one file have the same timestamp properties = OrgProperties(data_for_hashing=output) self._writer.write_org_subitem(timestamp=orgdate, output=output, properties=properties)
def _main(self): APP_KEY = self._get_config_option("APP_KEY") APP_SECRET = self._get_config_option("APP_SECRET") OAUTH_TOKEN = self._get_config_option("OAUTH_TOKEN") OAUTH_TOKEN_SECRET = self._get_config_option("OAUTH_TOKEN_SECRET") screen_name = self._get_config_option("screen_name") count = self._get_config_option("count") twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET) try: home_timeline = twitter.get_home_timeline(screenname=screen_name, count=count) except TwythonError as e: logging.error(e) sys.exit(1) for tweet in home_timeline: # strptime doesn't support timezone info, so we are using dateutils. date_object = parser.parse(tweet['created_at']) timestamp = OrgFormat.datetime(date_object) try: # Data is already Unicode, so don't try to re-encode it. output = tweet['text'] except: logging.error(sys.exc_info()[0]) print "Error: ", sys.exc_info()[0] data_for_hashing = output + timestamp + output properties = OrgProperties(data_for_hashing=data_for_hashing) properties.add("name", tweet['user']['name']) properties.add("twitter_id", tweet['id']) properties.add("contributors", tweet['contributors']) properties.add("truncated", tweet['truncated']) properties.add("in_reply_to_status_id", tweet['in_reply_to_status_id']) properties.add("favorite_count", tweet['favorite_count']) properties.add("source", tweet['source']) properties.add("retweeted", tweet['retweeted']) properties.add("coordinates", tweet['coordinates']) properties.add("entities", tweet['entities']) self._writer.write_org_subitem(timestamp=timestamp, output=output, properties=properties)
def __write_file(self, file, link, timestamp): """ write entry to org file (omit replacement of spaces in file names) """ output = OrgFormat.link(link=link, description=file, replacespaces=False) # we need optional data for hashing due it can be, that more # than one file have the same timestamp properties = OrgProperties(data_for_hashing=output) self._writer.write_org_subitem(timestamp=timestamp, output=output, properties=properties)
def __set_author_timestamp(self, line): """ extracts the date + time from line: author Forename Lastname <mail> 1234567890 +0000 @param line """ self.__empty = False date_info = line[-16:] # 1234567890 +0000 seconds_since_epoch = float(date_info[:10]) #timezone_info = date_info[11:] self.__timestamp = OrgFormat.datetime( time.localtime(seconds_since_epoch)) self.__author = line[7:line.find("<")].strip()
def __parse_file(self, file, link): """ Parses the date+time and writes entry to outputfile @param file: filename @param link: path """ if TIMESTAMP_REGEX.match(file): # if we found a timestamp too,take hours,min # and optionally seconds from this timestamp timestamp = TIMESTAMP_REGEX.match(file).group() orgdate = OrgFormat.strdatetimeiso8601(timestamp) logging.debug("found timestamp: " + orgdate) else: datestamp = DATESTAMP_REGEX.match(file).group() orgdate = OrgFormat.strdate(datestamp) orgdate_time_tupel = OrgFormat.datetupeliso8601(datestamp) if self._args.skip_filetime_extraction != True: if os.path.exists(link): file_datetime = time.localtime(os.path.getmtime(link)) # check if the file - time information matches year,month,day, # then update time if file_datetime.tm_year == orgdate_time_tupel.tm_year and \ file_datetime.tm_mon == orgdate_time_tupel.tm_mon and \ file_datetime.tm_mday == orgdate_time_tupel.tm_mday: logging.debug("found a time in file.setting %s-->%s", orgdate, OrgFormat.date(file_datetime, True)) orgdate = OrgFormat.date(file_datetime, True) else: logging.debug( "item [%s] not found and thus could not determine mtime" % link) self.__write_file(file, link, orgdate)
def __write(self): """ write attributes to writer (make an org_sub_item) """ logging.debug("msg:%s", self.__msg) self.__msg = self.__msg.splitlines() subject = "" notes = "" # idea: look for the first -nonempty- message if len(self.__msg) > 0: start_notes = 0 for i in range(len(self.__msg)): if self.__msg[i].strip() != "": subject = self.__msg[i].strip() start_notes = i + 1 break if len(self.__msg) > start_notes: for n in self.__msg[start_notes:]: if n != "": notes += n + "\n" output = "%s (r%d): %s" % (self.__author, self.__rev, subject) properties = OrgProperties(data_for_hashing=self.__author + subject) timestamp = OrgFormat.datetime( OrgFormat.datetupelutctimestamp(self.__date)) properties.add("REVISION", self.__rev) if self.__grepauthor == None or \ (self.__author.strip() == self.__grepauthor.strip()): self._writer.write_org_subitem(output=output, timestamp=timestamp, note=notes, properties=properties)
def startElement(self, name, attrs): """ at every <sms> tag write to orgfile """ logging.debug("Handler @startElement name=%s,attrs=%s", name, attrs) if name == "sms": sms_subject = attrs['subject'] sms_date = int(attrs['date']) / 1000 # unix epoch sms_body = attrs['body'] sms_address = attrs['address'] sms_type_incoming = int(attrs['type']) == 1 skip = False if sms_type_incoming == True: output = "SMS from " if self._ignore_incoming: skip = True else: output = "SMS to " if self._ignore_outgoing: skip = True if not skip: output += sms_address + ": " if sms_subject != "null": # in case of MMS we have a subject output += sms_subject notes = sms_body else: output += sms_body notes = "" timestamp = OrgFormat.datetime(time.gmtime(sms_date)) data_for_hashing = output + timestamp + notes properties = OrgProperties(data_for_hashing=data_for_hashing) self._writer.write_org_subitem(output=output, timestamp=timestamp, note=notes, properties=properties)
def _handle_recent_tracks(self, tracks): """parse recent tracks""" logging.debug(tracks) for t in tracks: timestamp = datetime.datetime.fromtimestamp(int(t.timestamp)) output = self._args.output_format.format(title=t.track.title, artist=t.track.artist, album=t.album) properties = OrgProperties(data_for_hashing=t.timestamp) properties.add('ARTIST', t.track.artist) properties.add('ALBUM', t.album) self._writer.write_org_subitem( timestamp=OrgFormat.datetime(timestamp), output=output, properties=properties)
def _main(self): """ get's automatically called from Memacs class """ with open(self._args.csvfile, 'rb') as f: try: for row in UnicodeCsvReader(f, encoding=self._args.encoding, delimiter=self._args.delimiter): logging.debug(row) try: tstamp = time.strptime(row[self._args.timestamp_index], self._args.timestamp_format) except ValueError, e: logging.error("timestamp-format does not match: %s", e) sys.exit(1) except IndexError, e: logging.error("did you specify the right delimiter?", e) sys.exit(1) timestamp = OrgFormat.datetime(tstamp) output = [] for i in self._args.output_indices: output.append(row[i]) output = " ".join(output) data_for_hashing = "".join(row) properties = OrgProperties( data_for_hashing=data_for_hashing) self._writer.write_org_subitem( timestamp=timestamp, output=output, properties=properties, )
def _main(self): """ get's automatically called from Memacs class """ with open(self._args.csvfile, 'rb') as f: try: for row in UnicodeCsvReader(f, encoding=self._args.encoding, delimiter=self._args.delimiter): logging.debug(row) try: tstamp = time.strptime(row[self._args.timestamp_index], self._args.timestamp_format) except ValueError, e: logging.error("timestamp-format does not match: %s", e) sys.exit(1) except IndexError, e: logging.error("did you specify the right delimiter?", e) sys.exit(1) timestamp = OrgFormat.datetime(tstamp) output = [] for i in self._args.output_indices: output.append(row[i]) output = " ".join(output) data_for_hashing = "".join(row) properties = OrgProperties( data_for_hashing=data_for_hashing) self._writer.write_org_subitem(timestamp=timestamp, output=output, properties=properties, )
def _handle_battery(self, bat): """ handle single battery, e.g. BAT0 """ # calculate watt usage consumption = float(bat.current_now / 1000000.0 * bat.voltage_now / 1000000.0) timestamp = OrgFormat.datetime(datetime.datetime.now()) output = self._args.output_format.format(battery=bat) properties = OrgProperties(data_for_hashing=timestamp) properties.add("CYCLE_COUNT", bat.cycle_count) properties.add("CAPACITY", '%s%%' % bat.capacity) properties.add("STATUS", bat.status.lower()) if consumption: properties.add("CONSUMPTION", '%.1f W' % consumption) self._writer.write_org_subitem(timestamp=timestamp, output=output, properties=properties)
def _generateOrgentry(self, e_time, e_name, e_batt, e_uptime, e_last_opposite_occurrence, e_last_occurrence): """ takes the data from the parameters and generates an Org-mode entry. @param e_time: time-stamp of the entry @param e_name: entry name/description @param e_batt: battery level @param e_uptime: uptime in seconds @param e_last_opposite_occurrence: time-stamp of previous opposite occurrence (if not False) @param e_last_occurrence: time-stamp of previous occurrence """ assert e_time.__class__ == datetime.datetime assert e_name.__class__ == unicode assert e_batt.__class__ == str assert e_uptime.__class__ == str assert (e_last_opposite_occurrence.__class__ == datetime.datetime or not e_last_opposite_occurrence) assert (e_last_occurrence.__class__ == datetime.datetime or not e_last_occurrence) last_info = u'' in_between_hms = u'' in_between_s = u'' ignore_occurrence = False if e_last_opposite_occurrence: in_between_s = (e_time - e_last_opposite_occurrence).seconds + \ (e_time - e_last_opposite_occurrence).days * 3600 * 24 in_between_hms = unicode(OrgFormat.get_hms_from_sec(in_between_s)) if e_name == u'boot': last_info = u' (off for ' elif e_name == u'shutdown': last_info = u' (on for ' elif e_name.endswith(u'-end'): last_info = u' (' + e_name[0:-4].replace('wifi-', '') + u' for ' else: last_info = u' (not ' + e_name.replace('wifi-', '') + u' for ' last_info += unicode( OrgFormat.get_dhms_from_sec(in_between_s)) + u')' if (e_name == u'boot') and \ (e_last_occurrence and e_last_opposite_occurrence) and \ (e_last_occurrence > e_last_opposite_occurrence): ## last boot is more recent than last shutdown -> crash has happened last_info = u' after crash' in_between_hms = u'' in_between_s = u'' ignore_occurrence = True properties = OrgProperties() properties.add("IN-BETWEEN", in_between_hms) properties.add("IN-BETWEEN-S", unicode(in_between_s)) properties.add("BATT-LEVEL", e_batt) properties.add("UPTIME", OrgFormat.get_hms_from_sec(int(e_uptime))) properties.add("UPTIME-S", e_uptime) self._writer.write_org_subitem( timestamp=e_time.strftime('<%Y-%m-%d %a %H:%M>'), output=e_name + last_info, properties=properties) ## the programmer recommends you to read "memacs/tests/simplephonelogs_test.py" ## test_generateOrgentry_* for less cryptic examples on how this looks: return u'** ' + e_time.strftime('<%Y-%m-%d %a %H:%M>') + u' ' + e_name + last_info + \ u'\n:PROPERTIES:\n:IN-BETWEEN: ' + in_between_hms + \ u'\n:IN-BETWEEN-S: ' + unicode(in_between_s) + \ u'\n:BATT-LEVEL: ' + e_batt + \ u'\n:UPTIME: ' + unicode(OrgFormat.get_hms_from_sec(int(e_uptime))) + \ u'\n:UPTIME-S: ' + unicode(e_uptime) + u'\n:END:\n', ignore_occurrence
def _generateOrgentry(self, e_time, e_name, e_batt, e_uptime, e_last_opposite_occurrence, e_last_occurrence, prev_office_sum, prev_office_first_begin): """ takes the data from the parameters and generates an Org-mode entry. @param e_time: time-stamp of the entry @param e_name: entry name/description @param e_batt: battery level @param e_uptime: uptime in seconds @param e_last_opposite_occurrence: time-stamp of previous opposite occurrence (if not False) @param e_last_occurrence: time-stamp of previous occurrence @param additional_paren_string: string that gets appended to the parenthesis @param prev_office_sum: holds the sum of all previous working duration today @param prev_office_first_begin: holds the first time-stamp of wifi-office for today """ assert e_time.__class__ == datetime.datetime assert e_name.__class__ == unicode assert e_batt.__class__ == unicode assert e_uptime.__class__ == unicode assert (e_last_opposite_occurrence.__class__ == datetime.datetime or not e_last_opposite_occurrence) assert (e_last_occurrence.__class__ == datetime.datetime or not e_last_occurrence) last_info = u'' in_between_hms = u'' in_between_s = u'' ignore_occurrence = False ## convert parameters to be writable: office_sum = prev_office_sum office_first_begin = prev_office_first_begin if e_last_opposite_occurrence: in_between_s = (e_time - e_last_opposite_occurrence).seconds + \ (e_time - e_last_opposite_occurrence).days * 3600 * 24 in_between_hms = unicode(OrgFormat.get_hms_from_sec(in_between_s)) if e_name == u'boot': last_info = u' (off for ' elif e_name == u'shutdown': last_info = u' (on for ' elif e_name.endswith(u'-end'): last_info = u' (' + e_name[0:-4].replace('wifi-', '') + u' for ' else: last_info = u' (not ' + e_name.replace('wifi-', '') + u' for ' ## handle special case: office hours additional_paren_string = "" if e_name == 'wifi-office-end': office_total = None ## calculate office_sum and office_total if not office_sum: office_sum = (e_time - e_last_opposite_occurrence).seconds office_total = office_sum else: assert(office_first_begin) assert(office_sum) office_sum = office_sum + (e_time - e_last_opposite_occurrence).seconds office_total = int(time.mktime(e_time.timetuple()) - time.mktime(office_first_begin.timetuple())) assert(type(office_total) == int) assert(type(office_sum) == int) assert(type(in_between_s) == int) ## come up with the additional office-hours string: additional_paren_string = u'; today ' + OrgFormat.get_hms_from_sec(office_sum) + \ '; today total ' + OrgFormat.get_hms_from_sec(office_total) if additional_paren_string: last_info += unicode(OrgFormat.get_dhms_from_sec(in_between_s)) + additional_paren_string + u')' else: last_info += unicode(OrgFormat.get_dhms_from_sec(in_between_s)) + u')' ## handle special case: office hours if e_name == 'wifi-office': if not office_sum or not office_first_begin: ## new day office_first_begin = e_time ## handle special case: boot without previous shutdown = crash if (e_name == u'boot') and \ (e_last_occurrence and e_last_opposite_occurrence) and \ (e_last_occurrence > e_last_opposite_occurrence): ## last boot is more recent than last shutdown -> crash has happened last_info = u' after crash' in_between_hms = u'' in_between_s = u'' ignore_occurrence = True properties = OrgProperties() properties.add("IN-BETWEEN", in_between_hms) properties.add("IN-BETWEEN-S", unicode(in_between_s)) properties.add("BATT-LEVEL", e_batt) properties.add("UPTIME", OrgFormat.get_hms_from_sec(int(e_uptime))) properties.add("UPTIME-S", e_uptime) self._writer.write_org_subitem(timestamp=e_time.strftime('<%Y-%m-%d %a %H:%M>'), output=e_name + last_info, properties=properties) return u'** ' + e_time.strftime('<%Y-%m-%d %a %H:%M>') + u' ' + e_name + last_info + \ u'\n:PROPERTIES:\n:IN-BETWEEN: ' + in_between_hms + \ u'\n:IN-BETWEEN-S: ' + unicode(in_between_s) + \ u'\n:BATT-LEVEL: ' + e_batt + \ u'\n:UPTIME: ' + unicode(OrgFormat.get_hms_from_sec(int(e_uptime))) + \ u'\n:UPTIME-S: ' + unicode(e_uptime) + u'\n:END:\n', \ ignore_occurrence, office_sum, office_first_begin
def _generateOrgentry(self, e_time, e_name, e_batt, e_uptime, e_last_opposite_occurrence, e_last_occurrence, prev_office_sum, prev_office_first_begin): """ takes the data from the parameters and generates an Org-mode entry. @param e_time: time-stamp of the entry @param e_name: entry name/description @param e_batt: battery level @param e_uptime: uptime in seconds @param e_last_opposite_occurrence: time-stamp of previous opposite occurrence (if not False) @param e_last_occurrence: time-stamp of previous occurrence @param additional_paren_string: string that gets appended to the parenthesis @param prev_office_sum: holds the sum of all previous working duration today @param prev_office_first_begin: holds the first time-stamp of wifi-office for today """ assert e_time.__class__ == datetime.datetime assert e_name.__class__ == unicode assert e_batt.__class__ == unicode assert e_uptime.__class__ == unicode assert (e_last_opposite_occurrence.__class__ == datetime.datetime or not e_last_opposite_occurrence) assert (e_last_occurrence.__class__ == datetime.datetime or not e_last_occurrence) last_info = u'' in_between_hms = u'' in_between_s = u'' ignore_occurrence = False ## convert parameters to be writable: office_sum = prev_office_sum office_first_begin = prev_office_first_begin if e_last_opposite_occurrence: in_between_s = (e_time - e_last_opposite_occurrence).seconds + \ (e_time - e_last_opposite_occurrence).days * 3600 * 24 in_between_hms = unicode(OrgFormat.get_hms_from_sec(in_between_s)) if e_name == u'boot': last_info = u' (off for ' elif e_name == u'shutdown': last_info = u' (on for ' elif e_name.endswith(u'-end'): last_info = u' (' + e_name[0:-4].replace('wifi-', '') + u' for ' else: last_info = u' (not ' + e_name.replace('wifi-', '') + u' for ' ## handle special case: office hours additional_paren_string = "" if e_name == 'wifi-office-end': office_total = None ## calculate office_sum and office_total if not office_sum: office_sum = (e_time - e_last_opposite_occurrence).seconds office_total = office_sum else: assert (office_first_begin) assert (office_sum) office_sum = office_sum + ( e_time - e_last_opposite_occurrence).seconds office_total = int( time.mktime(e_time.timetuple()) - time.mktime(office_first_begin.timetuple())) assert (type(office_total) == int) assert (type(office_sum) == int) assert (type(in_between_s) == int) ## come up with the additional office-hours string: additional_paren_string = u'; today ' + OrgFormat.get_hms_from_sec(office_sum) + \ '; today total ' + OrgFormat.get_hms_from_sec(office_total) if additional_paren_string: last_info += unicode(OrgFormat.get_dhms_from_sec( in_between_s)) + additional_paren_string + u')' else: last_info += unicode( OrgFormat.get_dhms_from_sec(in_between_s)) + u')' ## handle special case: office hours if e_name == 'wifi-office': if not office_sum or not office_first_begin: ## new day office_first_begin = e_time ## handle special case: boot without previous shutdown = crash if (e_name == u'boot') and \ (e_last_occurrence and e_last_opposite_occurrence) and \ (e_last_occurrence > e_last_opposite_occurrence): ## last boot is more recent than last shutdown -> crash has happened last_info = u' after crash' in_between_hms = u'' in_between_s = u'' ignore_occurrence = True properties = OrgProperties() properties.add("IN-BETWEEN", in_between_hms) properties.add("IN-BETWEEN-S", unicode(in_between_s)) properties.add("BATT-LEVEL", e_batt) properties.add("UPTIME", OrgFormat.get_hms_from_sec(int(e_uptime))) properties.add("UPTIME-S", e_uptime) self._writer.write_org_subitem( timestamp=e_time.strftime('<%Y-%m-%d %a %H:%M>'), output=e_name + last_info, properties=properties) return u'** ' + e_time.strftime('<%Y-%m-%d %a %H:%M>') + u' ' + e_name + last_info + \ u'\n:PROPERTIES:\n:IN-BETWEEN: ' + in_between_hms + \ u'\n:IN-BETWEEN-S: ' + unicode(in_between_s) + \ u'\n:BATT-LEVEL: ' + e_batt + \ u'\n:UPTIME: ' + unicode(OrgFormat.get_hms_from_sec(int(e_uptime))) + \ u'\n:UPTIME-S: ' + unicode(e_uptime) + u'\n:END:\n', \ ignore_occurrence, office_sum, office_first_begin
def startElement(self, name, attrs): """ at every <call> write to orgfile """ logging.debug("Handler @startElement name=%s,attrs=%s", name, attrs) if name == "call": call_number = attrs['number'] call_duration = int(attrs['duration']) call_date = int(attrs['date']) / 1000 # unix epoch call_type = int(attrs['type']) call_incoming = call_type == 1 call_outgoing = call_type == 2 call_missed = call_type == 3 call_cancelled = call_type == 5 call_name = call_number if 'contact_name' in attrs: ## NOTE: older version of backup app did not insert contact_name into XML call_name = attrs['contact_name'] output = "Phonecall " skip = False if call_incoming: output += "from " if self._ignore_incoming: skip = True elif call_outgoing: output += "to " if self._ignore_outgoing: skip = True elif call_missed: output += "missed " if self._ignore_missed: skip = True elif call_cancelled: output += "cancelled " if self._ignore_cancelled: skip = True else: raise Exception("Invalid Phonecall Type: %d", call_type) call_number_string = "" if call_number != "-1": call_number_string = call_number else: call_number_string = "Unknown Number" name_string = "" if call_name != "(Unknown)": name_string = '[[contact:' + call_name + '][' + call_name + ']]' else: name_string = "Unknown" output += name_string if call_duration < self._minimum_duration: skip = True timestamp = OrgFormat.datetime(time.gmtime(call_date)) end_datetimestamp = datetime.datetime.utcfromtimestamp( call_date + call_duration) logging.debug( "timestamp[%s] duration[%s] end[%s]" % (str(timestamp), str(call_duration), str(end_datetimestamp))) end_timestamp_string = OrgFormat.datetime(end_datetimestamp) logging.debug("end_time [%s]" % end_timestamp_string) data_for_hashing = output + timestamp properties = OrgProperties(data_for_hashing=data_for_hashing) properties.add("NUMBER", call_number_string) properties.add("DURATION", call_duration) properties.add("NAME", call_name) if not skip: self._writer.write_org_subitem(output=output, timestamp=timestamp + '-' + end_timestamp_string, properties=properties)
def __read_store_and_write(self, store_file): """ Reads needed infos of .tagstore/store.tgs, parse the infos, write to outputfile @param store_file: string contains the input from store.tgs """ parser = SafeConfigParser() parser.read(store_file) sections = parser.sections() options = parser.options(sections[1]) for i in range(0, len(options), 3): filename = options[i].split('\\') filename = filename[0] tags = parser.get(sections[1], options[i]) timestamp = parser.get(sections[1], options[i + 1]) category = parser.get(sections[1], options[i + 2]) tags = tags.replace('"', '') tags = tags.replace(' ', '_') tags = tags.replace(':', '_') category = category.replace('"', '') category = category.replace(' ', '_') category = category.replace(':', '_') tags = tags.split(",") category = category.split(",") timestamp = timestamp[0:16] tagstoring = [] tagstoring.extend(tags) tagstoring.extend(category) x = 0 while x < len(tagstoring): if tagstoring[x] == '': tagstoring.pop(x) else: y = x + 1 while y < len(tagstoring): if tagstoring[x] == tagstoring[y]: tagstoring.pop(y) else: y = y + 1 x = x + 1 unformatted_link = self.__path + "/" + filename short_link = OrgFormat.link(unformatted_link, "link") link = ":FILEPATH: " + short_link timestamp = OrgFormat.strdatetime(timestamp) output = filename.decode("utf-8", "replace") data_for_hashing = output.decode("utf-8", "replace") properties = OrgProperties(data_for_hashing=data_for_hashing) self._writer.write_org_subitem(timestamp=timestamp, output=output, note=link, tags=tagstoring, properties=properties)
def _generateOrgentry(self, e_time, e_name, e_batt, e_uptime, e_last_opposite_occurrence, e_last_occurrence): """ takes the data from the parameters and generates an Org-mode entry. @param e_time: time-stamp of the entry @param e_name: entry name/description @param e_batt: battery level @param e_uptime: uptime in seconds @param e_last_opposite_occurrence: time-stamp of previous opposite occurrence (if not False) @param e_last_occurrence: time-stamp of previous occurrence """ assert e_time.__class__ == datetime.datetime assert e_name.__class__ == unicode assert e_batt.__class__ == str assert e_uptime.__class__ == str assert (e_last_opposite_occurrence.__class__ == datetime.datetime or not e_last_opposite_occurrence) assert (e_last_occurrence.__class__ == datetime.datetime or not e_last_occurrence) last_info = u'' in_between_hms = u'' in_between_s = u'' ignore_occurrence = False if e_last_opposite_occurrence: in_between_s = (e_time - e_last_opposite_occurrence).seconds + \ (e_time - e_last_opposite_occurrence).days * 3600 * 24 in_between_hms = unicode(OrgFormat.get_hms_from_sec(in_between_s)) if e_name == u'boot': last_info = u' (off for ' elif e_name == u'shutdown': last_info = u' (on for ' elif e_name.endswith(u'-end'): last_info = u' (' + e_name[0:-4].replace('wifi-','') + u' for ' else: last_info = u' (not ' + e_name.replace('wifi-','') + u' for ' last_info += unicode(OrgFormat.get_dhms_from_sec(in_between_s)) + u')' if (e_name == u'boot') and \ (e_last_occurrence and e_last_opposite_occurrence) and \ (e_last_occurrence > e_last_opposite_occurrence): ## last boot is more recent than last shutdown -> crash has happened last_info = u' after crash' in_between_hms = u'' in_between_s = u'' ignore_occurrence = True properties = OrgProperties() properties.add("IN-BETWEEN", in_between_hms) properties.add("IN-BETWEEN-S", unicode(in_between_s)) properties.add("BATT-LEVEL", e_batt) properties.add("UPTIME", OrgFormat.get_hms_from_sec(int(e_uptime))) properties.add("UPTIME-S", e_uptime) self._writer.write_org_subitem(timestamp = e_time.strftime('<%Y-%m-%d %a %H:%M>'), output = e_name + last_info, properties = properties) ## the programmer recommends you to read "memacs/tests/simplephonelogs_test.py" ## test_generateOrgentry_* for less cryptic examples on how this looks: return u'** ' + e_time.strftime('<%Y-%m-%d %a %H:%M>') + u' ' + e_name + last_info + \ u'\n:PROPERTIES:\n:IN-BETWEEN: ' + in_between_hms + \ u'\n:IN-BETWEEN-S: ' + unicode(in_between_s) + \ u'\n:BATT-LEVEL: ' + e_batt + \ u'\n:UPTIME: ' + unicode(OrgFormat.get_hms_from_sec(int(e_uptime))) + \ u'\n:UPTIME-S: ' + unicode(e_uptime) + u'\n:END:\n', ignore_occurrence
def _generateOrgentry(self, e_time, e_name, e_batt, e_uptime, e_last_opposite_occurrence, e_last_occurrence, prev_office_sum, prev_office_first_begin, office_lunchbreak, battery_percentage_when_booting): """ takes the data from the parameters and generates an Org-mode entry. @param e_time: time-stamp of the entry @param e_name: entry name/description @param e_batt: battery level @param e_uptime: uptime in seconds @param e_last_opposite_occurrence: time-stamp of previous opposite occurrence (if not False) @param e_last_occurrence: time-stamp of previous occurrence @param additional_paren_string: string that gets appended to the parenthesis @param prev_office_sum: holds the sum of all previous working duration today @param prev_office_first_begin: holds the first time-stamp of wifi-office for today @param office_lunchbreak: array of begin- and end-time-stamp of lunch-break (if any) @param battery_percentage_when_booting: battery level of previous boot (only set if no charge event was in-between) """ assert e_time.__class__ == datetime.datetime assert e_name.__class__ == unicode assert e_batt.__class__ == unicode assert e_uptime.__class__ == unicode assert (e_last_opposite_occurrence.__class__ == datetime.datetime or not e_last_opposite_occurrence) assert (e_last_occurrence.__class__ == datetime.datetime or not e_last_occurrence) assert (not battery_percentage_when_booting or battery_percentage_when_booting.__class__ == int) last_info = u'' in_between_hms = u'' in_between_s = u'' ignore_occurrence = False # convert parameters to be writable: office_sum = prev_office_sum office_first_begin = prev_office_first_begin if e_last_opposite_occurrence: in_between_s = (e_time - e_last_opposite_occurrence).seconds + \ (e_time - e_last_opposite_occurrence).days * 3600 * 24 in_between_hms = unicode(OrgFormat.get_hms_from_sec(in_between_s)) if e_name == u'boot': last_info = u' (off for ' elif e_name == u'shutdown': last_info = u' (on for ' elif e_name.endswith(u'-end'): last_info = u' (' + e_name[0:-4].replace('wifi-', '') + u' for ' else: last_info = u' (not ' + e_name.replace('wifi-', '') + u' for ' # handle special case: office hours additional_paren_string = "" if e_name == 'wifi-office-end': office_total = None # calculate office_sum and office_total if not office_sum: office_sum = (e_time - e_last_opposite_occurrence).seconds office_total = office_sum else: assert (office_first_begin) assert (office_sum) office_sum = office_sum + ( e_time - e_last_opposite_occurrence).seconds office_total = int( time.mktime(e_time.timetuple()) - time.mktime(office_first_begin.timetuple())) assert (type(office_total) == int) assert (type(office_sum) == int) assert (type(in_between_s) == int) # come up with the additional office-hours string: additional_paren_string = u'; today ' + OrgFormat.get_hms_from_sec(office_sum) + \ '; today total ' + OrgFormat.get_hms_from_sec(office_total) if additional_paren_string: last_info += unicode(OrgFormat.get_dhms_from_sec( in_between_s)) + additional_paren_string + u')' else: last_info += unicode( OrgFormat.get_dhms_from_sec(in_between_s)) + u')' elif e_last_occurrence: in_between_s = (e_time - e_last_occurrence).seconds + \ (e_time - e_last_occurrence).days * 3600 * 24 in_between_hms = unicode(OrgFormat.get_hms_from_sec(in_between_s)) # handle special case: office hours if e_name == 'wifi-office': if not office_sum or not office_first_begin: # new day office_first_begin = e_time else: # check if we've found a lunch-break (first wifi-office between 11:30-13:00 where not office for > 17min) if e_time.time() > datetime.time( 11, 30) and e_time.time() < datetime.time( 13, 00) and e_last_opposite_occurrence: if e_last_opposite_occurrence.date() == e_time.date( ) and in_between_s > (17 * 60) and in_between_s < (80 * 60): office_lunchbreak = [ e_last_opposite_occurrence.time(), e_time.time() ] # handle special case: boot without previous shutdown = crash if (e_name == u'boot') and \ (e_last_occurrence and e_last_opposite_occurrence) and \ (e_last_occurrence > e_last_opposite_occurrence): # last boot is more recent than last shutdown -> crash has happened last_info = u' after crash' in_between_hms = u'' in_between_s = u'' ignore_occurrence = True properties = OrgProperties() properties.add("IN-BETWEEN", in_between_hms) properties.add("IN-BETWEEN-S", unicode(in_between_s)) properties.add("BATT-LEVEL", e_batt) properties.add("UPTIME", OrgFormat.get_hms_from_sec(int(e_uptime))) properties.add("UPTIME-S", e_uptime) if e_name == 'wifi-office-end' and office_lunchbreak: properties.add( "OFFICE-SUMMARY", e_last_opposite_occurrence.strftime('| %Y-%m-%d | %a ') + prev_office_first_begin.strftime('| %H:%M ') + office_lunchbreak[0].strftime('| %H:%M ') + office_lunchbreak[1].strftime('| %H:%M ') + e_time.strftime('| %H:%M | | |')) elif e_name == 'wifi-office-end' and not office_lunchbreak: properties.add( "OFFICE-SUMMARY", e_last_opposite_occurrence.strftime('| %Y-%m-%d | %a ') + prev_office_first_begin.strftime('| %H:%M | 11:30 | 12:00 ') + e_time.strftime('| %H:%M | | |')) elif e_name == 'shutdown': if battery_percentage_when_booting: batt_diff_from_boot_to_shutdown = battery_percentage_when_booting - int( e_batt) if batt_diff_from_boot_to_shutdown >= 20: # hypothetical run-time (in hours; derived from boot to shutdown) of the device for 100% battery capacity # Note: battery_percentage_when_booting is set to False when a "charge-start"-event is recognized between boot and shutdown # Note: only calculated when at least 20 percent difference of battery level between boot and shutdown runtime_extrapolation = 100 * int( e_uptime) / batt_diff_from_boot_to_shutdown / 3600 properties.add("HOURS_RUNTIME_EXTRAPOLATION", runtime_extrapolation) self._writer.write_org_subitem( timestamp=e_time.strftime('<%Y-%m-%d %a %H:%M>'), output=e_name + last_info, properties=properties) return u'** ' + e_time.strftime('<%Y-%m-%d %a %H:%M>') + u' ' + e_name + last_info + \ u'\n:PROPERTIES:\n:IN-BETWEEN: ' + in_between_hms + \ u'\n:IN-BETWEEN-S: ' + unicode(in_between_s) + \ u'\n:BATT-LEVEL: ' + e_batt + \ u'\n:UPTIME: ' + unicode(OrgFormat.get_hms_from_sec(int(e_uptime))) + \ u'\n:UPTIME-S: ' + unicode(e_uptime) + u'\n:END:\n', \ ignore_occurrence, office_sum, office_first_begin, office_lunchbreak
def startElement(self, name, attrs): """ at every <sms> tag write to orgfile """ logging.debug("Handler @startElement name=%s,attrs=%s", name, attrs) htmlparser = HTMLParser.HTMLParser() if name == "sms": sms_subject = attrs['subject'] sms_date = int(attrs['date']) / 1000 # unix epoch sms_body = attrs['body'] sms_address = attrs['address'].strip().replace('-',u'').replace('/',u'').replace(' ',u'').replace('+',u'00') sms_type_incoming = int(attrs['type']) == 1 contact_name = False if 'contact_name' in attrs: ## NOTE: older version of backup app did not insert contact_name into XML contact_name = attrs['contact_name'] else: if self._numberdict: if sms_address in self._numberdict.keys(): contact_name = self._numberdict[sms_address] skip = False if sms_type_incoming == True: output = "SMS from " if self._ignore_incoming: skip = True else: output = "SMS to " if self._ignore_outgoing: skip = True if not skip: name_string = "" if contact_name: name_string = '[[contact:' + contact_name + '][' + contact_name + ']]' else: name_string = "Unknown" output += name_string + ": " ## reverse encoding hack from just before: sms_body = htmlparser.unescape(sms_body.replace(u'EnCoDiNgHaCk42', u'&#')) for emoji in self.EMOJIS.keys(): ## FIXXME: this is a horrible dumb brute-force algorithm. ## In case of bad performance, this can be optimized dramtically sms_body = sms_body.replace(emoji, self.EMOJI_ENCLOSING_CHARACTER + \ self.EMOJIS[emoji] + self.EMOJI_ENCLOSING_CHARACTER).replace(u'\n', u'⏎') if sms_subject != "null": # in case of MMS we have a subject output += sms_subject notes = sms_body else: output += sms_body notes = "" timestamp = OrgFormat.datetime(time.gmtime(sms_date)) data_for_hashing = output + timestamp + notes properties = OrgProperties(data_for_hashing=data_for_hashing) properties.add("NUMBER", sms_address) properties.add("NAME", contact_name) self._writer.write_org_subitem(output=output, timestamp=timestamp, note=notes, properties=properties)
def __write(self): """ write attributes to writer (make an org_sub_item) """ logging.debug("msg:%s", self.__msg) #getting tags if self.__attrtags: tags = self.__attrtags if self.__split: tags = tags.split(self.__split) else: tags = tags.split(' ') tags = tags[1:] elif self.__taging: tags = self.__taging if self.__split: tags = tags.split(self.__split) else: tags = tags.split(' ') else: tags = [] for item in tags: if item == '': tags.remove(item) #getting output if not self.__attroutput: output = "%s: %s" % (self.__author, self.__msg) else: output = self.__attroutput part = output.split(" ") output = "" for item in part: if re.search("http[s]?://", item) != None: unformatted_link = item short_link = OrgFormat.link(unformatted_link, "link") output = output + " " + short_link + ": " + item else: output = output + " " + item output = output[1:] #getting properties if not self.__attrproperties: properties = OrgProperties(data_for_hashing=self.__author \ + self.__msg + self.__date) else: properties = OrgProperties(data_for_hashing=self.__attrproperties) #getting notes if self.__attrnote: notes = self.__attrnote elif self.__notes: notes = self.__notes else: notes = "" if notes: parts = notes.split(" ") notes = "" for item in parts: if re.search("http[s]?://", item) != None: unformatted_link = item short_link = OrgFormat.link(unformatted_link, "link") notes = notes + " " + short_link + ": " + item else: notes = notes + " " + item notes = notes[1:] #prepare for most time formats + getting timestamp if self.__attrtime: self.__date = self.attrtime try: if (self.__time == 'YYYYMMDD' or self.__time == 'YYYY' or self.__time == 'YYYYMMDDTHHMMSSZ' or self.__time == 'YYYYMMDDTHHMMSST'): timestamp = OrgFormat.datetime( OrgFormat.datetupelutctimestamp( self.__date)) elif (self.__time == ('YYYY-MM-DD')): timestamp = OrgFormat.datetime( OrgFormat.datetupeliso8601( self.__date)) elif (self.__time == 'YYYY-MM-DDTHH.MM.SS' or self.__time == 'YYYY-MM-DDTHH.MM'): timestamp = OrgFormat.datetime( OrgFormat.datetimetupeliso8601( self.__date)) elif (self.__time == 'timetuple'): time_tupel = time.localtime(time.mktime( parsedate(self.__date))) timestamp = OrgFormat.datetime(time_tupel) except: logging.debug("Write functione @timestamp timestamp=%s", self.__date) logging.error("A timestamp problem occured") sys.exit(2) self._writer.write_org_subitem(output=output, timestamp=timestamp, note=notes, tags=tags, properties=properties)
def _main(self): """ get's automatically called from Memacs class """ # do all the stuff # if you need something from config: # attention: foo will be unicode # foo = self._get_config_option("foo") logging.info("foo started") # how to handle config files ? # sample config file: # ---------8<----------- # [memacs-example] # foo = 0 # bar = 1 # --------->8----------- # to read it out, just do following: # foo = self._get_config_option("foo") # bar = self._get_config_option("bar") # use logging.debug() for debug messages # use logging.error() for error messages # use logging.info() instead of print for informing user # # on an fatal error: # use logging.error() and sys.exit(1) timestamp = OrgFormat.datetime(time.gmtime(0)) # note: timestamp has to be a struct_time object # Orgproperties # Option 1: no properties given, specify argument for hashing data properties = OrgProperties("hashing data :ALKJ!@# should be unique") # Option 2: add properties which are all-together unique # properties.add("Category","fun") # properties.add("from","*****@*****.**") # properties.add("body","foo") self._writer.write_org_subitem(timestamp=timestamp, output="foo", properties=properties) # writes following: # ** <1970-01-01 Thu 00:00> foo # :PROPERTIES: # :ID: da39a3ee5e6b4b0d3255bfef95601890afd80709 # :END: notes = "bar notes\nfoo notes" p = OrgProperties(data_for_hashing="read comment below") # if a hash is not unique only with its :PROPERTIES: , then # set data_for_hasing string additional information i.e. the output # , which then makes the hash really unique # # if you *really*, *really* have already a unique id, # then you can call following method: # p.set_id("unique id here") p.add("DESCRIPTION", "foooo") p.add("foo-property", "asdf") tags = [u"tag1", u"tag2"] self._writer.write_org_subitem(timestamp=timestamp, output="bar", note=notes, properties=p, tags=tags)
def _main(self): """ get's automatically called from Memacs class """ # do all the stuff # if you need something from config: # attention: foo will be unicode # foo = self._get_config_option("foo") logging.info("foo started") # how to handle config files ? # sample config file: # ---------8<----------- # [memacs-example] # foo = 0 # bar = 1 # --------->8----------- # to read it out, just do following: # foo = self._get_config_option("foo") # bar = self._get_config_option("bar") # use logging.debug() for debug messages # use logging.error() for error messages # use logging.info() instead of print for informing user # # on an fatal error: # use logging.error() and sys.exit(1) timestamp = OrgFormat.datetime(time.gmtime(0)) # note: timestamp has to be a struct_time object # Orgproperties # Option 1: no properties given, specify argument for hashing data properties = OrgProperties("hashing data :ALKJ!@# should be unique") # Option 2: add properties which are all-together unique # properties.add("Category","fun") # properties.add("from","*****@*****.**") # properties.add("body","foo") self._writer.write_org_subitem(timestamp=timestamp, output="foo", properties=properties) # writes following: #** <1970-01-01 Thu 00:00> foo # :PROPERTIES: # :ID: da39a3ee5e6b4b0d3255bfef95601890afd80709 # :END: notes = "bar notes\nfoo notes" p = OrgProperties(data_for_hashing="read comment below") # if a hash is not unique only with its :PROPERTIES: , then # set data_for_hasing string additional information i.e. the output # , which then makes the hash really unique # # if you *really*, *really* have already a unique id, # then you can call following method: # p.set_id("unique id here") p.add("DESCRIPTION", "foooo") p.add("foo-property", "asdf") tags = [u"tag1", u"tag2"] self._writer.write_org_subitem(timestamp=timestamp, output="bar", note=notes, properties=p, tags=tags)
def startElement(self, name, attrs): """ at every <call> write to orgfile """ logging.debug("Handler @startElement name=%s,attrs=%s", name, attrs) if name == "call": call_number = attrs['number'] call_duration = int(attrs['duration']) call_date = int(attrs['date']) / 1000 # unix epoch call_type = int(attrs['type']) call_incoming = call_type == 1 call_outgoing = call_type == 2 call_missed = call_type == 3 call_cancelled = call_type == 5 call_name = call_number if 'contact_name' in attrs: ## NOTE: older version of backup app did not insert contact_name into XML call_name = attrs['contact_name'] output = "Phonecall " skip = False if call_incoming: output += "from " if self._ignore_incoming: skip = True elif call_outgoing: output += "to " if self._ignore_outgoing: skip = True elif call_missed: output += "missed " if self._ignore_missed: skip = True elif call_cancelled: output += "cancelled " if self._ignore_cancelled: skip = True else: raise Exception("Invalid Phonecall Type: %d", call_type) call_number_string = "" if call_number != "-1": call_number_string = call_number else: call_number_string = "Unknown Number" name_string = "" if call_name != "(Unknown)": name_string = '[[contact:' + call_name + '][' + call_name + ']]' else: name_string = "Unknown" output += name_string if call_duration < self._minimum_duration: skip = True timestamp = OrgFormat.datetime(time.gmtime(call_date)) end_datetimestamp = datetime.datetime.utcfromtimestamp(call_date + call_duration) logging.debug("timestamp[%s] duration[%s] end[%s]" % (str(timestamp), str(call_duration), str(end_datetimestamp))) end_timestamp_string = OrgFormat.datetime(end_datetimestamp) logging.debug("end_time [%s]" % end_timestamp_string) data_for_hashing = output + timestamp properties = OrgProperties(data_for_hashing=data_for_hashing) properties.add("NUMBER", call_number_string) properties.add("DURATION", call_duration) properties.add("NAME", call_name) if not skip: self._writer.write_org_subitem(output=output, timestamp=timestamp + '-' + end_timestamp_string, properties=properties )