def main(): gconfig = ConfigParser.ConfigParser() gconfig.read(Global_cf) uri = "http://localhost:%d/" % (gconfig.getint("network", "port"),) Perscon_utils.init_url(uri) lconfig = ConfigParser.ConfigParser() lconfig.read(Local_cf) map(lambda (s,u): register_credential(s,u), lconfig.items("services"))
def main(): logdir = "%s/Library/Application Support/Adium 2.0/Users/Default/Logs/" % os.getenv("HOME") if not os.path.isdir(logdir): print >> sys.stderr, "Unable to find Adium log dir in: %s" % logdir sys.exit(1) uri = "http://localhost:5985/" Perscon_utils.init_url (uri) for root, dirs, files in os.walk(logdir): for f in files: logfile = os.path.join(root, f) parseLog(logfile)
def __init__(self, reposname, account): """Initialize a PersConRepository object. Takes a URL to the server holding all the mail.""" BaseRepository.__init__(self, reposname, account) self.ui = UIBase.getglobalui() self.localurl = self.getconf('localurl') self.folders = None self.debug("PersConRepository initialized, sep is " + repr(self.getsep()) + " localurl=" + repr(self.localurl)) Perscon_utils.init_url(self.localurl) # test the Personal Container connection self.rpc("ping")
def __init__(self, reposname, account): """Initialize a PersConRepository object. Takes a URL to the server holding all the mail.""" BaseRepository.__init__(self, reposname, account) self.ui = UIBase.getglobalui() self.localurl = self.getconf('localurl') self.folders = None self.debug("PersConRepository initialized, sep is " + repr(self.getsep()) + " localurl=" + repr(self.localurl)) Perscon_utils.init_url (self.localurl) # test the Personal Container connection self.rpc("ping")
def main(argv = None): """ main entry point """ configfile = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", "perscon", "perscon.conf") config.parse(configfile) service = "google.com" username = config.user(service) password = keyring.get_password(service, username) gd_client = gdata.photos.service.PhotosService() gd_client.email = username gd_client.password = password gd_client.source = 'py-perscon-v01' gd_client.ProgrammaticLogin() uri = "http://localhost:5985/" Perscon_utils.init_url(uri) ##### # Get pictures from Picasa ##### albums = gd_client.GetUserFeed(user=username) # iterate over albums for album in albums.entry: print 'title: %s, number of photos: %s, id: %s' % (album.title.text, album.numphotos.text, album.gphoto_id.text) album_id = album.gphoto_id.text # iterate over pictures photos = gd_client.GetFeed('/data/feed/api/user/%s/albumid/%s?kind=photo' % (username, album_id)) for photo in photos.entry: print 'Photo title:', photo.title.text image_url = photo.content.src uid = photo.gphoto_id.text mime,mime_enc = mimetypes.guess_type(photo.content.src) if not mime: mime = 'application/octet-stream' fin = urllib2.urlopen(image_url) data = fin.read() fin.close() Perscon_utils.rpc('att/'+uid, headers={'Content-type':mime,'Content-length':len(data)}, data=data) tstamp = photo.timestamp.text m = {'origin':'com.google.picasa', 'mtime':tstamp, 'att': [uid], 'uid': uid, 'tags':[] } meta={}
def parseObject(entry, client): """Parses a Google Docs entry (document) and stores it.""" m = { 'origin':'com.google.docs' } # Parse the date stamp returned by the GDocs API # in the format 2010-01-31T17:07:39.183Z d = datetime.strptime(entry.updated.text, "%Y-%m-%dT%H:%M:%S.%fZ") m['mtime'] = time.mktime(d.timetuple()) info = { 'type': entry.GetDocumentType(), 'uri': entry.id.text } acl_feed = client.GetDocumentListAclFeed(entry.GetAclLink().href) readers = [] writers = [] for acl_entry in acl_feed.entry: # Set 'from' to be the document owner if (acl_entry.role.value == 'owner'): m['frm'] = [{ 'ty' : entry.GetDocumentType(), 'id': acl_entry.scope.value }] # Gather readers and writers elif (acl_entry.role.value == 'writer'): writers.append(acl_entry.scope.value) elif (acl_entry.role.value == 'reader'): readers.append(acl_entry.scope.value) else: print "ERROR: unrecognised ACL detected" print '%s - %s (%s)' % (acl_entry.role.value, acl_entry.scope.value, acl_entry.scope.type) # Map writers to 'to' field m['to'] = map(lambda x: { 'ty': entry.GetDocumentType(), 'id' : x }, writers) # Map readers to 'cc' field #m['cc'] = map(lambda x: { 'ty': entry.GetDocumentType(), 'id' : x }, readers) meta={} meta.update(info) m['meta'] = meta h = hashlib.sha1() h.update(entry.title.text) h.update(entry.resourceId.text) uid = h.hexdigest() m['uid'] = uid mj = simplejson.dumps(m,indent=2) # print mj Perscon_utils.rpc('thing/' + uid, data=mj)
def stash_tweets(service, account, tweets): info = { 'origin': 'com.twitter', 'account': account, } for tw in tweets: if Verbose: print >>sys.stderr, "raw:", sj.dumps(tw, indent=2) data = { 'meta': info.copy(), } data['meta']['type'] = TWTY.tweet data['meta']['text'] = tw['text'] mtime = dateutil.parser.parse(tw['created_at']) data['meta']['mtime'] = time.mktime(mtime.timetuple()) uid = hashlib.sha1(service+account+str(tw['id'])).hexdigest() data['uid'] = uid if 'sender' in tw and tw['sender']: data['meta']['type'] = TWTY.direct data['frm'] = [ addr(service, tw['sender_screen_name']) ] data['to'] = [ addr(service, tw['recipient_screen_name']) ] else: try: data['frm'] = [addr(service, tw['from_user'])] except KeyError: data['frm'] = [addr(service, tw['user']['screen_name'])] try: data['to'] = [addr(service, tw['to_user'])] except KeyError: data['to'] = [addr(service, None)] if 'in_reply_to_screen_name' in tw and tw['in_reply_to_screen_name']: data['meta']['type'] = TWTY.reply data['to'] = [addr(service, tw['in_reply_to_screen_name'])] if 'retweeted_status' in tw and tw['retweeted_status']: data['meta']['type'] = TWTY.retweet data['meta']['source'] = tw['retweeted_status']['user']['screen_name'] ctime = dateutil.parser.parse(tw['retweeted_status']['created_at']) data['meta']['ctime'] = time.mktime(ctime.timetuple()) dataj = sj.dumps(data, indent=2) if Verbose: print >>sys.stderr, dataj Perscon_utils.rpc("thing/%s" % (uid, ), data=dataj)
def main(argv = None): """ main entry point """ configfile = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", "perscon", "perscon.conf") config.parse(configfile) service = "google.com" username = config.user(service) password = keyring.get_password(service, username) gd_client = gdata.docs.service.DocsService(source='py-perscon-v01') gd_client.ClientLogin(username, password) uri = "http://localhost:5985/" Perscon_utils.init_url (uri) feed = gd_client.GetDocumentListFeed() if not feed.entry: print 'No items found.\n' for entry in feed.entry: parseObject(entry, gd_client)
def main(argv = None): """ main entry point """ uri = "http://localhost:5985/" Perscon_utils.init_url (uri) book = AddressBook.ABAddressBook.sharedAddressBook() for p in book.people(): mtime_ts = getField(p, AddressBook.kABModificationDateProperty) mtime = datetime.fromtimestamp(mtime_ts) uid = getField(p, AddressBook.kABUIDProperty) tt = mtime.timetuple() m, services, att = writeRecord(p, uid, mtime_ts) mj = simplejson.dumps(m) # upload attachment first if att: try: l = len(att[0]) r = urllib2.Request(uri + "att/" + att[1]['uid'], data=att[0], headers={'content-type':att[1]['mime'], 'content-length':l}) urllib2.urlopen(r) except urllib2.HTTPError as e: print e.read () print repr(s) sys.exit(1) # then contacts try: urllib2.urlopen ("%speople/%s" % (uri, uid), data=mj) except urllib2.HTTPError as e: print e.read () print mj sys.exit(1) # finally services, which reference contacts for s in services: sj = simplejson.dumps(s, indent=2) try: urllib2.urlopen(uri + "service", data=sj) except urllib2.HTTPError as e: print e.read () print repr(s) sys.exit(1)
def main(): uri = "http://localhost:5985/" Perscon_utils.init_url (uri) configfile = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", "perscon", "perscon.conf") config.parse(configfile) base = config.get('photofiles', 'base') print "base dir is %s" % base for root, dirs, files in os.walk(base): for f in files: skip = False fname = os.path.join(root, f) meta = {} root_name,ext = os.path.splitext(fname) fin = open(fname, 'rb') try: print "reading %s" % fname data = fin.read() fin.seek(0) exif_tags = EXIF.process_file(fin) except: print >> sys.stderr, "error reading: %s" % fname skip = True finally: fin.close() if skip or (exif_tags == {}): print "skipping" continue if exif_tags.has_key('EXIF DateTimeOriginal'): raw = str(exif_tags['EXIF DateTimeOriginal']) tm = dateutil.parser.parse(raw) tt = tm.timetuple() else: tt = datetime.fromtimestamp(os.path.getmtime(fname)).timetuple() tstamp = time.mktime(tt) guid = hashlib.md5(file(fname).read()).hexdigest() uid = guid + ext m = { 'type':'org.perscon.photofiles', 'mtime':tstamp, 'att': [uid], 'uid': guid, 'frm': [], 'to':[] } # rpath = relpath(root,base) print base print fname m['caption'] = os.path.join(base, os.path.basename(fname)) mime,mime_enc = mimetypes.guess_type(fname) Perscon_utils.rpc('att/'+uid, headers={'content-type': mime,'content-length': len(data)}, data=data) meta['file_path'] = fname m['meta'] = meta mj = simplejson.dumps(m, indent=2) print mj Perscon_utils.rpc('thing/' + uid, data=mj)
def register_credential(svc, usr): pwd = keyring.get_password(svc, usr) uid = hashlib.sha1("%s" % (svc,)).hexdigest() data = { 'uid': uid, 'svc': svc, 'usr': usr, 'pwd': pwd, } print >>sys.stderr, "register_credential:", svc, usr, uid Perscon_utils.rpc("credential/%s" % (uid, ), data=sj.dumps(data, indent=2))
elif len(other_res) > 0: return other_res[0] raise ValueError, "couldnt determine your phone number from address book" def usage(ret=2): print "Usage: %s [-u <IPhone UUID>] -m [call|sms] <SMS sqlite.db>" % sys.argv[0] sys.exit(ret) def main(): try: opts, args = getopt.getopt(sys.argv[1:], "hu:m:o:") except getopt.GetoptError, err: print str(err) usage(2) uri = "http://localhost:5985/" Perscon_utils.init_url (uri) uid_prefix = "Default_iPhone" mode=None for o,a in opts: if o == '-h': usage(0) elif o == '-u': uid_prefix=a elif o == '-m': if a == 'sms': mode = 'SMS' elif a== 'call': mode = 'Call' else: usage() if len(args) != 1 or not mode:
body = lxml.html.fromstring(body).text_content() data['meta']['text'] = body # this message originated from the current user, so its from us # and to the participants data['frm'] = [{ 'ty' : service, 'id': sender }] if sender == account: data['to'] = map(lambda x: { 'ty': service, 'id' : x }, participants) else: data['to'] = [{ 'ty' :service, 'id': account }] uid = hashlib.sha1(service+account+sender+tm+body).hexdigest() data['uid'] = uid dataj = simplejson.dumps(data, indent=2) Perscon_utils.rpc('thing/%s' % (uid,), data=dataj) def main(): logdir = "%s/Library/Application Support/Adium 2.0/Users/Default/Logs/" % os.getenv("HOME") if not os.path.isdir(logdir): print >> sys.stderr, "Unable to find Adium log dir in: %s" % logdir sys.exit(1) uri = "http://localhost:5985/" Perscon_utils.init_url (uri) for root, dirs, files in os.walk(logdir): for f in files: logfile = os.path.join(root, f) parseLog(logfile) if __name__ == "__main__": main()
def main(): global Verbose ## mort: this config stuff is a bit grim - really need a proper ## plugin interface configfile = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", "perscon", "perscon.conf") config.parse(configfile) uri = "http://localhost:%d/" % (config.port(),) Perscon_utils.init_url(uri) service = "twitter.com" username, password = Perscon_utils.get_credentials(service) ## mort: also note that by using Basic authentication the ## username/password pair are essentially being passed in the clear t = twitter.Twitter(username, password) ## 1. tweets mentioning us tsearch = twitter.Twitter(username, password, domain="search.twitter.com") pg = 1 while True: rs = retryOnError("search pg=%d" % pg, lambda: tsearch.search(rpp=90, page=pg, q=username)) if len(rs['results']) == 0: break stash_tweets(service, username, rs['results']) pg += 1 ## 2. our own tweets pg = 1 while True: rs = retryOnError("own_tweets %d" % (pg,), lambda: t.statuses.user_timeline(page=pg, count=200)) if len(rs) == 0: break stash_tweets(service, username, rs) pg += 1 ## 3. our own retweets (stupid api - not included in above) pg = 1 Verbose = True while True: rs = retryOnError("own_retweets %d" % (pg,), lambda: t.statuses.retweeted_by_me(page=pg, count=200)) if len(rs) == 0: break stash_tweets(service, username, rs) pg += 1 ## 4. direct messages we sent pg = 1 while True: rs = retryOnError("direct_messages_sent %d" % (pg,), lambda: t.direct_messages.sent(page=pg, count=200)) if len(rs) == 0: break stash_tweets(service, username, rs) pg += 1 ## 5. direct messages we received pg = 1 while True: rs = retryOnError("direct_messages_received %d" % (pg,), lambda: t.direct_messages(page=pg, count=200)) if len(rs) == 0: break stash_tweets(service, username, rs) pg += 1 ## 6. tweets from friends cr = -1 friends = [] while cr != 0: rs = retryOnError("get_friends cursor=%d" % cr, lambda: t.friends.ids(cursor=cr)) friends.extend(rs['ids']) cr = rs['next_cursor'] print >> sys.stderr, "friends:", friends for friend in friends: pg = 1 while True: rs = retryOnError( "friend_timeline %s %d" % (friend, pg), lambda: t.statuses.user_timeline(id=friend, page=pg, count=200)) if len(rs) == 0: break stash_tweets(service, username, rs) pg += 1 print >> sys.stderr, "friend: %s done" % friend
def parse_photos(): home = os.getenv("HOME") or exit(1) book = AddressBook.ABAddressBook.sharedAddressBook() addrs = book.me().valueForProperty_(AddressBook.kABEmailProperty) myemail = addrs.valueAtIndex_(addrs.indexForIdentifier_(addrs.primaryIdentifier())) fname = book.me().valueForProperty_(AddressBook.kABFirstNameProperty) lname = book.me().valueForProperty_(AddressBook.kABLastNameProperty) name = "%s %s" % (fname, lname) from_info = { 'ty': 'email', 'id' : myemail } base = os.path.join(home, "Pictures/iPhoto Library") idb = os.path.join(base, 'iPhotoMain.db') fdb = os.path.join(base, 'face.db') conn = sqlite3.connect('') c = conn.cursor() c.execute("attach database '%s' as i" % idb) c.execute("attach database '%s' as f" % fdb) sql = "select f.face_name.name,f.face_name.email,relativePath from i.SqFileInfo inner join i.SqFileImage on (i.SqFileImage.primaryKey = i.SqFileInfo.primaryKey) inner join i.SqPhotoInfo on (i.SqFileImage.photoKey = i.SqPhotoInfo.primaryKey) inner join f.detected_face on (f.detected_face.image_key = i.SqFileImage.photoKey) inner join f.face_name on (f.detected_face.face_key = f.face_name.face_key) where f.face_name.name != '' and relativePath=?" fname = "%s/Pictures/iPhoto Library/AlbumData.xml" % os.getenv("HOME") pl = plistlib.readPlist(fname) version="%s.%s" % (pl['Major Version'], pl['Minor Version']) app_version = pl['Application Version'] if not (app_version.startswith('8.1')): print >> sys.stderr, "This script only works with iPhoto 8.1, found version %s" % app_version exit(1) images = pl['Master Image List'] keywords = pl['List of Keywords'] rolls = pl['List of Rolls'] for roll in rolls: roll_id = roll['RollID'] for img_id in roll['KeyList']: img = images[img_id] if 'OriginalPath' in img: img_path = img['OriginalPath'] else: img_path = img['ImagePath'] rel_path = (relpath(img_path, base),) root,ext = os.path.splitext(img_path) uid = img['GUID'] + ext mime,mime_enc = mimetypes.guess_type(img_path) if not mime: mime = 'application/octet-stream' fin = open(img_path, 'rb') data = fin.read() fin.close() Perscon_utils.rpc('att/'+uid, headers={'Content-type':mime,'Content-length':len(data)}, data=data) tstamp,tt = ti_to_tt(img['DateAsTimerInterval']) m = {'origin':'com.apple.iphoto', 'mtime':tstamp, 'att': [uid], 'uid': uid, 'tags':[] } meta={} if 'Rating' in img: meta['rating'] = img['Rating'] if 'Comment' in img and img['Comment'] != '': meta['comment'] = img['Comment'] if 'Keywords' in img: kw = map(lambda x: keywords[x], img['Keywords']) m['tags'] = kw if 'Caption' in img: meta['caption'] = img['Caption'] meta['file_path'] = relpath(img_path, base) c.execute(sql, rel_path) m['frm'] = [from_info] m['to'] = [] # fin = open(img_path, 'rb') # try: # mtags = EXIF.process_file(fin) # except: # pass # fin.close() # m['tags'].extend(mtags) for row in c: fname=row[0] email=row[1] if email: m['to'].append({'ty':'email', 'id':email}) print m m['meta'] = meta mj = simplejson.dumps(m, indent=2) #print mj Perscon_utils.rpc('thing/' + uid, data=mj)
def main(): uri = "http://localhost:5985/" Perscon_utils.init_url (uri) parse_photos()