def _process_range(self, start, stop): start_dt = time.localtime(start['time']) start_timestamp = OrgFormat.datetime(start_dt) stop_dt = time.localtime(stop['time']) stop_timestamp = OrgFormat.datetime(stop_dt) range_timestamp = '{}-{}'.format(start_timestamp, stop_timestamp) duration = stop['time'] - start['time'] d = divmod(duration, 86400) # days h = divmod(d[1], 3600) # hours m = divmod(h[1], 60) # minutes s = m[1] # seconds duration_string = '{:d}:{:02d}'.format(24 * d[0] + h[0], m[0]) output = "Stayed at: {} for {}".format(start['place'], duration_string) data_for_hashing = output + range_timestamp properties = OrgProperties(data_for_hashing=data_for_hashing) properties.add("PLACE", start['place']) properties.add("DURATION", duration_string) self._writer.write_org_subitem(output=output, timestamp=range_timestamp, properties=properties, tags=[start['place']])
def _handle_url(self, params): epoch = datetime.datetime(1970, 1, 1)-datetime.datetime(1601, 1, 1) url_time = params['timestamp']/1000000-epoch.total_seconds() if (url_time > 0) : timestamp = datetime.datetime.fromtimestamp(int(url_time)) else: timestamp = datetime.datetime(1970, 1, 1) if not self._args.omit_drawer: properties = OrgProperties() if (params['title'] == "") : params['title'] = params['url'] properties.add('URL', params['url']) properties.add('VISIT_COUNT', params['visit_count']) output = "" try: output = self._args.output_format.decode('utf-8').format(**params) except Exception: pass if self._args.omit_drawer: self._writer.write_org_subitem( timestamp=OrgFormat.datetime(timestamp), output=output, properties=None) else: self._writer.write_org_subitem( timestamp=OrgFormat.datetime(timestamp), output=output, properties=properties)
def _handle_url(self, params): timestamp = datetime.datetime.fromtimestamp( int(params['timestamp'] / 1000000)) if not self._args.omit_drawer: properties = OrgProperties() if (params['title'] == ""): params['title'] = params['url'] properties.add('URL', params['url']) properties.add('VISIT_COUNT', params['visit_count']) output = "" try: output = self._args.output_format.decode('utf-8').format(**params) except Exception: pass if self._args.omit_drawer: self._writer.write_org_subitem( timestamp=OrgFormat.datetime(timestamp), output=output, properties=None) else: self._writer.write_org_subitem( timestamp=OrgFormat.datetime(timestamp), output=output, properties=properties)
def _handle_row(self, row): """ handle a single row """ try: # assume unix timestamp if not self._args.timestamp_format: timestamp = datetime.datetime.fromtimestamp( int(row[self._args.timestamp_field])) else: timestamp = time.strptime(row[self._args.timestamp_field], self._args.timestamp_format) # show time with the timestamp format, but only # if it contains at least hours and minutes if not self._args.timestamp_format or \ any(x in self._args.timestamp_format for x in ['%H', '%M']): timestamp = OrgFormat.datetime(timestamp) else: timestamp = OrgFormat.date(timestamp) except ValueError, e: logging.error("timestamp-format does not match: %s", e) sys.exit(1)
def __handle_file(self, photo_file, filename): """ checks if file is an image, try to get exif data and write to org file """ logging.debug("handling file %s", filename) # check if file is an image: if imghdr.what(filename) != None: datetime = get_exif_datetime(filename) if datetime == None: logging.debug("skipping: %s has no EXIF information", filename) else: try: datetime = time.strptime(datetime, "%Y:%m:%d %H:%M:%S") timestamp = OrgFormat.datetime(datetime) output = OrgFormat.link(filename, photo_file) properties = OrgProperties(photo_file + timestamp) self._writer.write_org_subitem(timestamp=timestamp, output=output, properties=properties) except ValueError, e: logging.warning("skipping: Could not parse " + \ "timestamp for %s : %s", filename, e)
def write_point(self, p): """write a point (including geocoding)""" timestamp = OrgFormat.datetime(p.time) geocode = self.reverse_geocode(p.latitude, p.longitude) output = self._args.output_format.decode('utf-8').format(**geocode) tags = [] properties = OrgProperties(data_for_hashing=timestamp) if p.latitude: properties.add('LATITUDE', p.latitude) if p.longitude: properties.add('LONGITUDE', p.longitude) if p.source: tags.append(p.source.lower()) if timestamp: self._writer.write_org_subitem(timestamp=timestamp, output=output, properties=properties, tags=tags)
def __handle_file(self, file, rootdir): """ handles a file """ # don't handle emacs tmp files (file~) if file[-1:] == '~': return link = os.path.join(rootdir, file) logging.debug(link) if self._args.force_filedate_extraction: file_datetime = time.localtime(os.path.getmtime(link)) if self._args.skip_filetime_extraction: orgdate = OrgFormat.date(file_datetime) else: orgdate = OrgFormat.datetime(file_datetime) self.__write_file(file, link, orgdate) elif DATESTAMP_REGEX.match(file): try: # we put this in a try block because: # if a timestamp is false i.e. 2011-14-19 or false time # we can handle those not easy with REGEX, therefore we have # an Exception TimestampParseException, which is thrown, # wen strptime (parse from string to time tupel) fails self.__parse_file(file, link) except TimestampParseException, e: logging.warning("False date(time) in file: %s", link)
def _handle_message(self, msg): """parse a single message row""" msg['number'] = '00' + msg['number'].split('@')[0] msg['verb'] = 'to' if msg['type'] else 'from' msg['type'] = 'OUTGOING' if msg['type'] else 'INCOMING' msg['handler'] = self._args.handler if msg['text']: if self._args.demojize: msg['text'] = emoji.demojize(msg['text']) if self._args.skip_emoji: msg['text'] = re.sub(emoji.get_emoji_regexp(), '', msg['text']) timestamp = datetime.datetime.fromtimestamp(msg['timestamp'] / 1000) properties = OrgProperties(data_for_hashing=json.dumps(msg)) properties.add('NUMBER', msg['number']) properties.add('TYPE', msg['type']) output = self._args.output_format.decode('utf-8').format(**msg) if msg['text'] and not self._is_ignored(msg): self._writer.write_org_subitem(timestamp=OrgFormat.datetime(timestamp), output=output, properties=properties)
def startElement(self, name, attrs): """ at every <sms> tag write to orgfile """ logging.debug("Handler @startElement name=%s,attrs=%s", name, attrs) if name == "sms": #sms_subject = attrs['subject'] sms_date = int(attrs['date']) / 1000 # unix epoch sms_body = attrs['body'] sms_address = attrs['address'] sms_time = attrs['time'] sms_service_center = attrs['service_center'] sms_type_incoming = int(attrs['type']) == 1 contact_name = attrs['name'] skip = False if sms_type_incoming == True: output = "SMS from " if self._ignore_incoming: skip = True else: output = "SMS to " if self._ignore_outgoing: skip = True if not skip: name_string = "" if contact_name: name_string = '[[contact:' + contact_name + '][' + contact_name + ']]' else: name_string = "Unknown" output += name_string + ": " #if sms_subject != "null": # in case of MMS we have a subject # output += sms_subject # notes = sms_body #else: # output += sms_body # notes = "" notes = sms_body timestamp = OrgFormat.datetime(time.gmtime(sms_date)) data_for_hashing = output + timestamp + notes properties = OrgProperties(data_for_hashing=data_for_hashing) properties.add("NUMBER", sms_address) properties.add("NAME", contact_name) properties.add("SMS_SERVICE_CENTER", sms_service_center) properties.add("TIME", sms_time) self._writer.write_org_subitem(output=output, timestamp=timestamp, note=notes, properties=properties)
def __get_item_data(self, item): """ gets information out of <item>..</item> @return: output, note, properties, tags variables for orgwriter.append_org_subitem """ try: #logging.debug(item) properties = OrgProperties() guid = item['id'] if not guid: logging.error("got no id") unformatted_link = item['link'] short_link = OrgFormat.link(unformatted_link, "link") # if we found a url in title # then append the url in front of subject if re.search("http[s]?://", item['title']) != None: output = short_link + ": " + item['title'] else: output = OrgFormat.link(unformatted_link, item['title']) note = item['description'] # converting updated_parsed UTC --> LOCALTIME timestamp = OrgFormat.datetime( time.localtime(calendar.timegm(item['updated_parsed']))) properties.add("guid", guid) except KeyError: logging.error("input is not a RSS 2.0") sys.exit(1) tags = [] dont_parse = [ 'title', 'description', 'updated', 'summary', 'updated_parsed', 'link', 'links' ] for i in item: logging.debug(i) if i not in dont_parse: if (type(i) == unicode or type(i) == str) and \ type(item[i]) == unicode and item[i] != "": if i == "id": i = "guid" properties.add(i, item[i]) else: if i == "tags": for tag in item[i]: logging.debug("found tag: %s", tag['term']) tags.append(tag['term']) return output, note, properties, tags, timestamp
def startElement(self, name, attrs): """ at every <sms> tag write to orgfile """ logging.debug("Handler @startElement name=%s,attrs=%s", name, attrs) if name == "sms": sms_subject = attrs['subject'] sms_date = int(attrs['date']) / 1000 # unix epoch sms_body = attrs['body'] sms_address = attrs['address'] sms_type_incoming = int(attrs['type']) == 1 contact_name = False if 'contact_name' in attrs: ## NOTE: older version of backup app did not insert contact_name into XML contact_name = attrs['contact_name'] skip = False if sms_type_incoming == True: output = "SMS from " if self._ignore_incoming: skip = True else: output = "SMS to " if self._ignore_outgoing: skip = True if not skip: name_string = "" if contact_name: name_string = '[[contact:' + contact_name + '][' + contact_name + ']]' else: name_string = "Unknown" output += name_string + ": " if sms_subject != "null": # in case of MMS we have a subject output += sms_subject notes = sms_body else: output += sms_body notes = "" timestamp = OrgFormat.datetime(time.gmtime(sms_date)) data_for_hashing = output + timestamp + notes properties = OrgProperties(data_for_hashing=data_for_hashing) properties.add("NUMBER", sms_address) properties.add("NAME", contact_name) self._writer.write_org_subitem(output=output, timestamp=timestamp, note=notes, properties=properties)
def _main(self): APP_KEY = self._get_config_option("APP_KEY") APP_SECRET = self._get_config_option("APP_SECRET") OAUTH_TOKEN = self._get_config_option("OAUTH_TOKEN") OAUTH_TOKEN_SECRET = self._get_config_option("OAUTH_TOKEN_SECRET") screen_name = self._get_config_option("screen_name") count = self._get_config_option("count") twitter = Twython( APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET ) try: home_timeline = twitter.get_home_timeline(screenname=screen_name, count=count) except TwythonError as e: logging.error(e) sys.exit(1) for tweet in home_timeline: # strptime doesn't support timezone info, so we are using dateutils. date_object = parser.parse(tweet['created_at']) timestamp = OrgFormat.datetime(date_object) try: # Data is already Unicode, so don't try to re-encode it. output = tweet['text'] except: logging.error(sys.exc_info()[0]) print "Error: ", sys.exc_info()[0] data_for_hashing = output + timestamp + output properties = OrgProperties(data_for_hashing=data_for_hashing) properties.add("name", tweet['user']['name']) properties.add("twitter_id", tweet['id']) properties.add("contributors", tweet['contributors']) properties.add("truncated", tweet['truncated']) properties.add("in_reply_to_status_id", tweet['in_reply_to_status_id']) properties.add("favorite_count", tweet['favorite_count']) properties.add("source", tweet['source']) properties.add("retweeted", tweet['retweeted']) properties.add("coordinates", tweet['coordinates']) properties.add("entities", tweet['entities']) self._writer.write_org_subitem(timestamp=timestamp, output = output, properties = properties)
def __getTimestamp(self, time, onlyDate=False): """ converts xml timestamp into org readable timestamp Do 6 Nov 21:22:17 2014 """ time = time.strip().encode('utf-8') mail_date = datetime.strptime(time,"%c") if onlyDate is False: return OrgFormat.datetime(mail_date) return OrgFormat.date(mail_date)
def __get_item_data(self, item): """ gets information out of <item>..</item> @return: output, note, properties, tags variables for orgwriter.append_org_subitem """ try: #logging.debug(item) properties = OrgProperties() guid = item['id'] if not guid: logging.error("got no id") unformatted_link = item['link'] short_link = OrgFormat.link(unformatted_link, "link") # if we found a url in title # then append the url in front of subject if re.search("http[s]?://", item['title']) != None: output = short_link + ": " + item['title'] else: output = OrgFormat.link(unformatted_link, item['title']) note = item['description'] # converting updated_parsed UTC --> LOCALTIME timestamp = OrgFormat.datetime( time.localtime(calendar.timegm(item['updated_parsed']))) properties.add("guid", guid) except KeyError: logging.error("input is not a RSS 2.0") sys.exit(1) tags = [] dont_parse = ['title', 'description', 'updated', 'summary', 'updated_parsed', 'link', 'links'] for i in item: logging.debug(i) if i not in dont_parse: if (type(i) == unicode or type(i) == str) and \ type(item[i]) == unicode and item[i] != "": if i == "id": i = "guid" properties.add(i, item[i]) else: if i == "tags": for tag in item[i]: logging.debug("found tag: %s", tag['term']) tags.append(tag['term']) return output, note, properties, tags, timestamp
def startElement(self, name, attrs): """ at every <call> write to orgfile """ logging.debug("Handler @startElement name=%s,attrs=%s", name, attrs) if name == "call": call_number = attrs['number'] call_duration = int(attrs['duration']) call_date = int(attrs['date']) / 1000 # unix epoch call_type = int(attrs['type']) call_incoming = call_type == 1 call_outgoing = call_type == 2 call_missed = call_type == 3 output = "Phonecall " skip = False if call_incoming: output += "from " if self._ignore_incoming: skip = True elif call_outgoing: output += "to " if self._ignore_outgoing: skip = True elif call_missed: output += "missed " if self._ignore_missed: skip = True else: raise Exception("Invalid Phonecall Type: %d", call_type) if call_number != "-1": output += call_number else: output += "Unknown Number" output += " Duration: %d sec" % call_duration if call_duration < self._minimum_duration: skip = True timestamp = OrgFormat.datetime(time.gmtime(call_date)) data_for_hashing = output + timestamp properties = OrgProperties(data_for_hashing=data_for_hashing) if not skip: self._writer.write_org_subitem(output=output, timestamp=timestamp, properties=properties )
def __getTimestamp(self, time, onlyDate=False): """ converts xml timestamp into org readable timestamp Do 6 Nov 21:22:17 2014 """ time = time.strip().encode('utf-8') mail_date = datetime.strptime(time,"%a %d %b %H:%M:%S %Y") if onlyDate is False: return OrgFormat.datetime(mail_date) return OrgFormat.date(mail_date)
def startElement(self, name, attrs): """ at every <call> write to orgfile """ logging.debug("Handler @startElement name=%s,attrs=%s", name, attrs) if name == "call": call_number = attrs['number'] call_duration = int(attrs['duration']) call_date = int(attrs['date']) / 1000 # unix epoch call_type = int(attrs['type']) call_incoming = call_type == 1 call_outgoing = call_type == 2 call_missed = call_type == 3 output = "Phonecall " skip = False if call_incoming: output += "from " if self._ignore_incoming: skip = True elif call_outgoing: output += "to " if self._ignore_outgoing: skip = True elif call_missed: output += "missed " if self._ignore_missed: skip = True else: raise Exception("Invalid Phonecall Type: %d", call_type) if call_number != "-1": output += call_number else: output += "Unknown Number" output += " Duration: %d sec" % call_duration if call_duration < self._minimum_duration: skip = True timestamp = OrgFormat.datetime(time.gmtime(call_date)) data_for_hashing = output + timestamp properties = OrgProperties(data_for_hashing=data_for_hashing) if not skip: self._writer.write_org_subitem(output=output, timestamp=timestamp, properties=properties)
def __set_author_timestamp(self, line): """ extracts the date + time from line: author Forename Lastname <mail> 1234567890 +0000 @param line """ self.__empty = False date_info = line[-16:] # 1234567890 +0000 seconds_since_epoch = float(date_info[:10]) # timezone_info = date_info[11:] self.__timestamp = OrgFormat.datetime(time.localtime(seconds_since_epoch)) self.__author = line[7 : line.find("<")].strip()
def _main(self): APP_KEY = self._get_config_option("APP_KEY") APP_SECRET = self._get_config_option("APP_SECRET") OAUTH_TOKEN = self._get_config_option("OAUTH_TOKEN") OAUTH_TOKEN_SECRET = self._get_config_option("OAUTH_TOKEN_SECRET") screen_name = self._get_config_option("screen_name") count = self._get_config_option("count") twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET) try: home_timeline = twitter.get_home_timeline(screenname=screen_name, count=count) except TwythonError as e: logging.error(e) sys.exit(1) for tweet in home_timeline: # strptime doesn't support timezone info, so we are using dateutils. date_object = parser.parse(tweet['created_at']) timestamp = OrgFormat.datetime(date_object) try: # Data is already Unicode, so don't try to re-encode it. output = tweet['text'] except: logging.error(sys.exc_info()[0]) print "Error: ", sys.exc_info()[0] data_for_hashing = output + timestamp + output properties = OrgProperties(data_for_hashing=data_for_hashing) properties.add("name", tweet['user']['name']) properties.add("twitter_id", tweet['id']) properties.add("contributors", tweet['contributors']) properties.add("truncated", tweet['truncated']) properties.add("in_reply_to_status_id", tweet['in_reply_to_status_id']) properties.add("favorite_count", tweet['favorite_count']) properties.add("source", tweet['source']) properties.add("retweeted", tweet['retweeted']) properties.add("coordinates", tweet['coordinates']) properties.add("entities", tweet['entities']) self._writer.write_org_subitem(timestamp=timestamp, output=output, properties=properties)
def __set_author_timestamp(self, line): """ extracts the date + time from line: author Forename Lastname <mail> 1234567890 +0000 @param line """ self.__empty = False date_info = line[-16:] # 1234567890 +0000 seconds_since_epoch = float(date_info[:10]) #timezone_info = date_info[11:] self.__timestamp = OrgFormat.datetime( time.localtime(seconds_since_epoch)) self.__author = line[7:line.find("<")].strip()
def _handle_recent_tracks(self, tracks): """parse recent tracks""" logging.debug(tracks) for t in tracks: timestamp = datetime.datetime.fromtimestamp(int(t.timestamp)) output = self._args.output_format.format(title=t.track.title, artist=t.track.artist, album=t.album) properties = OrgProperties(data_for_hashing=t.timestamp) properties.add('ARTIST', t.track.artist) properties.add('ALBUM', t.album) self._writer.write_org_subitem( timestamp=OrgFormat.datetime(timestamp), output=output, properties=properties)
def startElement(self, name, attrs): """ at every <sms> tag write to orgfile """ logging.debug("Handler @startElement name=%s,attrs=%s", name, attrs) if name == "sms": sms_subject = attrs['subject'] sms_date = int(attrs['date']) / 1000 # unix epoch sms_body = attrs['body'] sms_address = attrs['address'] sms_type_incoming = int(attrs['type']) == 1 skip = False if sms_type_incoming == True: output = "SMS from " if self._ignore_incoming: skip = True else: output = "SMS to " if self._ignore_outgoing: skip = True if not skip: output += sms_address + ": " if sms_subject != "null": # in case of MMS we have a subject output += sms_subject notes = sms_body else: output += sms_body notes = "" timestamp = OrgFormat.datetime(time.gmtime(sms_date)) data_for_hashing = output + timestamp + notes properties = OrgProperties(data_for_hashing=data_for_hashing) self._writer.write_org_subitem(output=output, timestamp=timestamp, note=notes, properties=properties)
def _main(self): """ get's automatically called from Memacs class """ with open(self._args.csvfile, 'rb') as f: try: for row in UnicodeCsvReader(f, encoding=self._args.encoding, delimiter=self._args.delimiter): logging.debug(row) try: tstamp = time.strptime(row[self._args.timestamp_index], self._args.timestamp_format) except ValueError, e: logging.error("timestamp-format does not match: %s", e) sys.exit(1) except IndexError, e: logging.error("did you specify the right delimiter?", e) sys.exit(1) timestamp = OrgFormat.datetime(tstamp) output = [] for i in self._args.output_indices: output.append(row[i]) output = " ".join(output) data_for_hashing = "".join(row) properties = OrgProperties( data_for_hashing=data_for_hashing) self._writer.write_org_subitem( timestamp=timestamp, output=output, properties=properties, )
def _main(self): """ get's automatically called from Memacs class """ with open(self._args.csvfile, 'rb') as f: try: for row in UnicodeCsvReader(f, encoding=self._args.encoding, delimiter=self._args.delimiter): logging.debug(row) try: tstamp = time.strptime(row[self._args.timestamp_index], self._args.timestamp_format) except ValueError, e: logging.error("timestamp-format does not match: %s", e) sys.exit(1) except IndexError, e: logging.error("did you specify the right delimiter?", e) sys.exit(1) timestamp = OrgFormat.datetime(tstamp) output = [] for i in self._args.output_indices: output.append(row[i]) output = " ".join(output) data_for_hashing = "".join(row) properties = OrgProperties( data_for_hashing=data_for_hashing) self._writer.write_org_subitem(timestamp=timestamp, output=output, properties=properties, )
def _handle_battery(self, bat): """ handle single battery, e.g. BAT0 """ # calculate watt usage consumption = float(bat.current_now / 1000000.0 * bat.voltage_now / 1000000.0) timestamp = OrgFormat.datetime(datetime.datetime.now()) output = self._args.output_format.format(battery=bat) properties = OrgProperties(data_for_hashing=timestamp) properties.add("CYCLE_COUNT", bat.cycle_count) properties.add("CAPACITY", '%s%%' % bat.capacity) properties.add("STATUS", bat.status.lower()) if consumption: properties.add("CONSUMPTION", '%.1f W' % consumption) self._writer.write_org_subitem(timestamp=timestamp, output=output, properties=properties)
def __write(self): """ write attributes to writer (make an org_sub_item) """ logging.debug("msg:%s", self.__msg) self.__msg = self.__msg.splitlines() subject = "" notes = "" # idea: look for the first -nonempty- message if len(self.__msg) > 0: start_notes = 0 for i in range(len(self.__msg)): if self.__msg[i].strip() != "": subject = self.__msg[i].strip() start_notes = i + 1 break if len(self.__msg) > start_notes: for n in self.__msg[start_notes:]: if n != "": notes += n + "\n" output = "%s (r%d): %s" % (self.__author, self.__rev, subject) properties = OrgProperties(data_for_hashing=self.__author + subject) timestamp = OrgFormat.datetime( OrgFormat.datetupelutctimestamp(self.__date)) properties.add("REVISION", self.__rev) if self.__grepauthor == None or \ (self.__author.strip() == self.__grepauthor.strip()): self._writer.write_org_subitem(output=output, timestamp=timestamp, note=notes, properties=properties)
def startElement(self, name, attrs): """ at every <sms> tag write to orgfile """ logging.debug("Handler @startElement name=%s,attrs=%s", name, attrs) htmlparser = HTMLParser.HTMLParser() if name == "sms": sms_subject = attrs['subject'] sms_date = int(attrs['date']) / 1000 # unix epoch sms_body = attrs['body'] sms_address = attrs['address'].strip().replace('-',u'').replace('/',u'').replace(' ',u'').replace('+',u'00') sms_type_incoming = int(attrs['type']) == 1 contact_name = False if 'contact_name' in attrs: ## NOTE: older version of backup app did not insert contact_name into XML contact_name = attrs['contact_name'] else: if self._numberdict: if sms_address in self._numberdict.keys(): contact_name = self._numberdict[sms_address] skip = False if sms_type_incoming == True: output = "SMS from " if self._ignore_incoming: skip = True else: output = "SMS to " if self._ignore_outgoing: skip = True if not skip: name_string = "" if contact_name: name_string = '[[contact:' + contact_name + '][' + contact_name + ']]' else: name_string = "Unknown" output += name_string + ": " ## reverse encoding hack from just before: sms_body = htmlparser.unescape(sms_body.replace(u'EnCoDiNgHaCk42', u'&#')) for emoji in self.EMOJIS.keys(): ## FIXXME: this is a horrible dumb brute-force algorithm. ## In case of bad performance, this can be optimized dramtically sms_body = sms_body.replace(emoji, self.EMOJI_ENCLOSING_CHARACTER + \ self.EMOJIS[emoji] + self.EMOJI_ENCLOSING_CHARACTER).replace(u'\n', u'⏎') if sms_subject != "null": # in case of MMS we have a subject output += sms_subject notes = sms_body else: output += sms_body notes = "" timestamp = OrgFormat.datetime(time.gmtime(sms_date)) data_for_hashing = output + timestamp + notes properties = OrgProperties(data_for_hashing=data_for_hashing) properties.add("NUMBER", sms_address) properties.add("NAME", contact_name) self._writer.write_org_subitem(output=output, timestamp=timestamp, note=notes, properties=properties)
def _main(self): """ get's automatically called from Memacs class """ # do all the stuff # if you need something from config: # attention: foo will be unicode # foo = self._get_config_option("foo") logging.info("foo started") # how to handle config files ? # sample config file: # ---------8<----------- # [memacs-example] # foo = 0 # bar = 1 # --------->8----------- # to read it out, just do following: # foo = self._get_config_option("foo") # bar = self._get_config_option("bar") # use logging.debug() for debug messages # use logging.error() for error messages # use logging.info() instead of print for informing user # # on an fatal error: # use logging.error() and sys.exit(1) timestamp = OrgFormat.datetime(time.gmtime(0)) # note: timestamp has to be a struct_time object # Orgproperties # Option 1: no properties given, specify argument for hashing data properties = OrgProperties("hashing data :ALKJ!@# should be unique") # Option 2: add properties which are all-together unique # properties.add("Category","fun") # properties.add("from","*****@*****.**") # properties.add("body","foo") self._writer.write_org_subitem(timestamp=timestamp, output="foo", properties=properties) # writes following: # ** <1970-01-01 Thu 00:00> foo # :PROPERTIES: # :ID: da39a3ee5e6b4b0d3255bfef95601890afd80709 # :END: notes = "bar notes\nfoo notes" p = OrgProperties(data_for_hashing="read comment below") # if a hash is not unique only with its :PROPERTIES: , then # set data_for_hasing string additional information i.e. the output # , which then makes the hash really unique # # if you *really*, *really* have already a unique id, # then you can call following method: # p.set_id("unique id here") p.add("DESCRIPTION", "foooo") p.add("foo-property", "asdf") tags = [u"tag1", u"tag2"] self._writer.write_org_subitem(timestamp=timestamp, output="bar", note=notes, properties=p, tags=tags)
def startElement(self, name, attrs): """ at every <call> write to orgfile """ logging.debug("Handler @startElement name=%s,attrs=%s", name, attrs) if name == "call": call_number = attrs['number'] call_duration = int(attrs['duration']) call_date = int(attrs['date']) / 1000 # unix epoch call_type = int(attrs['type']) call_incoming = call_type == 1 call_outgoing = call_type == 2 call_missed = call_type == 3 call_cancelled = call_type == 5 call_name = call_number if 'contact_name' in attrs: ## NOTE: older version of backup app did not insert contact_name into XML call_name = attrs['contact_name'] output = "Phonecall " skip = False if call_incoming: output += "from " if self._ignore_incoming: skip = True elif call_outgoing: output += "to " if self._ignore_outgoing: skip = True elif call_missed: output += "missed " if self._ignore_missed: skip = True elif call_cancelled: output += "cancelled " if self._ignore_cancelled: skip = True else: raise Exception("Invalid Phonecall Type: %d", call_type) call_number_string = "" if call_number != "-1": call_number_string = call_number else: call_number_string = "Unknown Number" name_string = "" if call_name != "(Unknown)": name_string = '[[contact:' + call_name + '][' + call_name + ']]' else: name_string = "Unknown" output += name_string if call_duration < self._minimum_duration: skip = True timestamp = OrgFormat.datetime(time.gmtime(call_date)) end_datetimestamp = datetime.datetime.utcfromtimestamp( call_date + call_duration) logging.debug( "timestamp[%s] duration[%s] end[%s]" % (str(timestamp), str(call_duration), str(end_datetimestamp))) end_timestamp_string = OrgFormat.datetime(end_datetimestamp) logging.debug("end_time [%s]" % end_timestamp_string) data_for_hashing = output + timestamp properties = OrgProperties(data_for_hashing=data_for_hashing) properties.add("NUMBER", call_number_string) properties.add("DURATION", call_duration) properties.add("NAME", call_name) if not skip: self._writer.write_org_subitem(output=output, timestamp=timestamp + '-' + end_timestamp_string, properties=properties)
def _main(self): """ get's automatically called from Memacs class """ # do all the stuff # if you need something from config: # attention: foo will be unicode # foo = self._get_config_option("foo") logging.info("foo started") # how to handle config files ? # sample config file: # ---------8<----------- # [memacs-example] # foo = 0 # bar = 1 # --------->8----------- # to read it out, just do following: # foo = self._get_config_option("foo") # bar = self._get_config_option("bar") # use logging.debug() for debug messages # use logging.error() for error messages # use logging.info() instead of print for informing user # # on an fatal error: # use logging.error() and sys.exit(1) timestamp = OrgFormat.datetime(time.gmtime(0)) # note: timestamp has to be a struct_time object # Orgproperties # Option 1: no properties given, specify argument for hashing data properties = OrgProperties("hashing data :ALKJ!@# should be unique") # Option 2: add properties which are all-together unique # properties.add("Category","fun") # properties.add("from","*****@*****.**") # properties.add("body","foo") self._writer.write_org_subitem(timestamp=timestamp, output="foo", properties=properties) # writes following: #** <1970-01-01 Thu 00:00> foo # :PROPERTIES: # :ID: da39a3ee5e6b4b0d3255bfef95601890afd80709 # :END: notes = "bar notes\nfoo notes" p = OrgProperties(data_for_hashing="read comment below") # if a hash is not unique only with its :PROPERTIES: , then # set data_for_hasing string additional information i.e. the output # , which then makes the hash really unique # # if you *really*, *really* have already a unique id, # then you can call following method: # p.set_id("unique id here") p.add("DESCRIPTION", "foooo") p.add("foo-property", "asdf") tags = [u"tag1", u"tag2"] self._writer.write_org_subitem(timestamp=timestamp, output="bar", note=notes, properties=p, tags=tags)
def startElement(self, name, attrs): """ at every <call> write to orgfile """ logging.debug("Handler @startElement name=%s,attrs=%s", name, attrs) if name == "call": call_number = attrs['number'] call_duration = int(attrs['duration']) call_date = int(attrs['date']) / 1000 # unix epoch call_type = int(attrs['type']) call_incoming = call_type == 1 call_outgoing = call_type == 2 call_missed = call_type == 3 call_cancelled = call_type == 5 call_name = call_number if 'contact_name' in attrs: ## NOTE: older version of backup app did not insert contact_name into XML call_name = attrs['contact_name'] output = "Phonecall " skip = False if call_incoming: output += "from " if self._ignore_incoming: skip = True elif call_outgoing: output += "to " if self._ignore_outgoing: skip = True elif call_missed: output += "missed " if self._ignore_missed: skip = True elif call_cancelled: output += "cancelled " if self._ignore_cancelled: skip = True else: raise Exception("Invalid Phonecall Type: %d", call_type) call_number_string = "" if call_number != "-1": call_number_string = call_number else: call_number_string = "Unknown Number" name_string = "" if call_name != "(Unknown)": name_string = '[[contact:' + call_name + '][' + call_name + ']]' else: name_string = "Unknown" output += name_string if call_duration < self._minimum_duration: skip = True timestamp = OrgFormat.datetime(time.gmtime(call_date)) end_datetimestamp = datetime.datetime.utcfromtimestamp(call_date + call_duration) logging.debug("timestamp[%s] duration[%s] end[%s]" % (str(timestamp), str(call_duration), str(end_datetimestamp))) end_timestamp_string = OrgFormat.datetime(end_datetimestamp) logging.debug("end_time [%s]" % end_timestamp_string) data_for_hashing = output + timestamp properties = OrgProperties(data_for_hashing=data_for_hashing) properties.add("NUMBER", call_number_string) properties.add("DURATION", call_duration) properties.add("NAME", call_name) if not skip: self._writer.write_org_subitem(output=output, timestamp=timestamp + '-' + end_timestamp_string, properties=properties )
def __write(self): """ write attributes to writer (make an org_sub_item) """ logging.debug("msg:%s", self.__msg) #getting tags if self.__attrtags: tags = self.__attrtags if self.__split: tags = tags.split(self.__split) else: tags = tags.split(' ') tags = tags[1:] elif self.__taging: tags = self.__taging if self.__split: tags = tags.split(self.__split) else: tags = tags.split(' ') else: tags = [] for item in tags: if item == '': tags.remove(item) #getting output if not self.__attroutput: output = "%s: %s" % (self.__author, self.__msg) else: output = self.__attroutput part = output.split(" ") output = "" for item in part: if re.search("http[s]?://", item) != None: unformatted_link = item short_link = OrgFormat.link(unformatted_link, "link") output = output + " " + short_link + ": " + item else: output = output + " " + item output = output[1:] #getting properties if not self.__attrproperties: properties = OrgProperties(data_for_hashing=self.__author \ + self.__msg + self.__date) else: properties = OrgProperties(data_for_hashing=self.__attrproperties) #getting notes if self.__attrnote: notes = self.__attrnote elif self.__notes: notes = self.__notes else: notes = "" if notes: parts = notes.split(" ") notes = "" for item in parts: if re.search("http[s]?://", item) != None: unformatted_link = item short_link = OrgFormat.link(unformatted_link, "link") notes = notes + " " + short_link + ": " + item else: notes = notes + " " + item notes = notes[1:] #prepare for most time formats + getting timestamp if self.__attrtime: self.__date = self.attrtime try: if (self.__time == 'YYYYMMDD' or self.__time == 'YYYY' or self.__time == 'YYYYMMDDTHHMMSSZ' or self.__time == 'YYYYMMDDTHHMMSST'): timestamp = OrgFormat.datetime( OrgFormat.datetupelutctimestamp( self.__date)) elif (self.__time == ('YYYY-MM-DD')): timestamp = OrgFormat.datetime( OrgFormat.datetupeliso8601( self.__date)) elif (self.__time == 'YYYY-MM-DDTHH.MM.SS' or self.__time == 'YYYY-MM-DDTHH.MM'): timestamp = OrgFormat.datetime( OrgFormat.datetimetupeliso8601( self.__date)) elif (self.__time == 'timetuple'): time_tupel = time.localtime(time.mktime( parsedate(self.__date))) timestamp = OrgFormat.datetime(time_tupel) except: logging.debug("Write functione @timestamp timestamp=%s", self.__date) logging.error("A timestamp problem occured") sys.exit(2) self._writer.write_org_subitem(output=output, timestamp=timestamp, note=notes, tags=tags, properties=properties)