def get_info(self, param=None): if not self.info: res = cache_get('userinfo:%s' % self.id) if res: for k in ('birthdate', 'created'): res[k] = dateutil.parser.parse(res[k]) if res[k] else None self.info = res else: res = db.fetchone("SELECT name, email, xmpp, icq, skype, " "about, avatar, gender, " "birthdate, location, homepage, created " "FROM users.info WHERE id=%s;", [self.id]) if res: self.info = dict(res) res = dict(res) for k in ('birthdate', 'created'): res[k] = res[k].isoformat() if res[k] else None cache_store('userinfo:%s' % self.id, res) if not self.info: return None if param: try: return self.info[param] except KeyError: return None else: return self.info
def get(table, param): try: return self.profile[table][param] except KeyError: try: # FIXME: profile models db.perform("INSERT INTO %s (id) VALUES (%%s);" % \ table, [self.id]) except IntegrityError: pass res = cache_get('profile:%s:%s' % (table, self.id)) if res: self.profile[table] = res else: res = db.fetchone("SELECT * FROM %s WHERE id=%%s;" % \ table, [self.id]) log.debug('RES %s %s' % (table, res)) if res: self.profile[table] = dict(res) cache_store('profile:%s:%s' % (table, self.id), self.profile[table]) else: #try: # return cls._profile[param]['default'] #except KeyError: # return None return None # FIXME: remove recursive call try: return self.profile[table][param] except KeyError: cache_del('profile:%s:%s' % (table, self.id)) return get(table, param)
def subscriptions(self, type=None): if not self.id: return [] key = "subs:%s:%s" % (self.id, type or 'all') res = cache_get(key) if not res: values = [self.id] if type: type_filter = " AND u.type=%s" values.append(type) else: type_filter = '' res = db.fetchall("SELECT u.id, u.login, u.type, " "i.name, i.gender, i.avatar, i.homepage " "FROM subs.users s " "JOIN users.logins u ON u.id=s.to_user_id " "LEFT OUTER JOIN users.info i " "ON i.id=s.to_user_id " "WHERE s.user_id=%%s %s;" % type_filter, values) cache_store(res, 120) users = [] for r in res: u = User.from_data(r['id'], r['login'], info={'name': r['name'], 'gender': r['gender'], 'avatar': r['avatar'], 'homepage': r['homepage']}) users.append(u) return sorted(users, key=lambda u: u.login.lower())
def markdown_filter(environ, text, post=None, comment=None, img=False): if not text: return '' if settings.cache_markdown: key = 'md:' if post: key = '%s%s' % (key, post) if comment: key = '%s.%s' % (key, comment) else: key = '%s%s' % (key, md5(text.encode('utf-8')).hexdigest()) mdstring = cache_get(key) if mdstring: return mdstring mdstring = md.convert(text) # метод reset() вызывается, чтобы сбросить определение сносок из # экземпляра класса, иначе уже имеющиеся сноски попадут во все следующие # сконвертированные фрагменты HTML как сказано в # https://pythonhosted.org/Markdown/extensions/api.html#registerextension md.reset() if settings.cache_markdown: cache_store(key, mdstring, settings.cache_markdown) return mdstring
def get_info(self, param=None): if not self.info: res = cache_get('userinfo:%s' % self.id) if res: for k in ('birthdate', 'created'): res[k] = dateutil.parser.parse(res[k]) if res[k] else None self.info = res else: res = db.fetchone( "SELECT name, email, xmpp, icq, skype, " "about, avatar, gender, " "birthdate, location, homepage, created " "FROM users.info WHERE id=%s;", [self.id]) if res: self.info = dict(res) res = dict(res) for k in ('birthdate', 'created'): res[k] = res[k].isoformat() if res[k] else None cache_store('userinfo:%s' % self.id, res) if not self.info: return None if param: try: return self.info[param] except KeyError: return None else: return self.info
def update_at(self, dt): if not self.id: return log.info('Feed #%s update_at %s' % (self.id, dt)) qname = 'fqueue:%02d%02d' % (dt.hour, dt.minute) fqueue = Queue(qname, settings.feed_queue_socket, channels=False) fqueue.push({'id': self.id}) cache_store('feed:next_update:%s' % self.id, dt.isoformat())
def comments_count(self): c = cache_get('comments_count:%s' % self.id) if c: return c try: c = db.fetchone("SELECT count(id) FROM posts.comments " "WHERE author=%s;", [self.id])[0] cache_store('comments_count:%s' % self.id, c, 30) return c except IndexError: return 0
def set_url(self, url): self._url = url if not self.id: return key = "feed_url:%s" % self.id cache_store(key, self._url, 3600) db.perform("UPDATE users.feeds SET url=%s WHERE id=%s;", [self._url, self.id])
def readers_count(self, cache=True): if not cache: c = cache_get('readers_count:%s' % self.id) if c: return c try: c = db.fetchone("SELECT count(user_id) FROM subs.users " "WHERE to_user_id=%s;", [self.id])[0] cache_store('readers_count:%s' % self.id, c) return c except IndexError: return 0
def alias_list(self): aliases = {} if self.id: aliases['user'] = cache_get('aliases:%s' % self.id) if aliases['user'] is None: aliases['user'] = dict(db.fetchall( "SELECT alias, command FROM users.user_aliases " "WHERE user_id=%s;", [self.id])) cache_store('aliases:%s' % self.id, aliases['user']) aliases['global'] = cache_get('aliases:global') if aliases['global'] is None: aliases['global'] = dict(db.fetchall( "SELECT alias, command FROM users.aliases;")) cache_store('aliases:global', aliases['global'], 300) return aliases
def xhtmlim(environment, s): if not s: return '' if settings.cache_markdown: h = md5(s.encode('utf-8')).hexdigest() mdstring = cache_get('mdx:%s' % h) if mdstring: return mdstring mdstring = md.convert(s) if settings.cache_markdown: cache_store('mdx:%s' % h, mdstring, 3600) return mdstring
def markdown(text, img=False): if not text: return '' if settings.cache_markdown: h = md5(text.encode('utf-8')).hexdigest() mdstring = cache_get('md:%s' % h) if mdstring: return mdstring mdstring = md.convert(text) if settings.cache_markdown: cache_store('md:%s' % h, mdstring, 3600) return mdstring
def striphtml_filter(environment, s): if not s: return '' if settings.cache_markdown: h = md5(s.encode('utf-8')).hexdigest() mdstring = cache_get('h2t:%s' % h) if mdstring: return mdstring mdstring = striphtml(s) if settings.cache_markdown: cache_store('h2t:%s' % h, mdstring, 3600) return mdstring
def alias_list(self): aliases = {} if self.id: aliases['user'] = cache_get('aliases:%s' % self.id) if aliases['user'] is None: aliases['user'] = dict( db.fetchall( "SELECT alias, command FROM users.user_aliases " "WHERE user_id=%s;", [self.id])) cache_store('aliases:%s' % self.id, aliases['user']) aliases['global'] = cache_get('aliases:global') if aliases['global'] is None: aliases['global'] = dict( db.fetchall("SELECT alias, command FROM users.aliases;")) cache_store('aliases:global', aliases['global'], 300) return aliases
def thumbnail(url): log.debug('- URL %s %s' % (type(url), url)) hash = md5(url).hexdigest() path = os.path.join(settings.thumbnail_path, hash[:2], hash) if os.path.isfile(path) and os.stat(path) > 0: log.debug('%s: thumbnail exists' % path) return log.info('Making thumbnail %s %s' % (path, url)) if cache_get('thumbnail:%s' % hash): return cache_store('thumbnail:%s' % hash, 1, 60) try: dirname = os.path.join(settings.thumbnail_path, hash[:2]) try: os.mkdir(dirname) except OSError, e: if e.errno == 17: log.debug('OSError %s: %s %s' % (e.errno, e.strerror, dirname)) else: log.warn('OSError %s: %s %s' % (e.errno, e.strerror, dirname)) opener = urllib2.build_opener() opener.addheaders = [('User-agent', 'Mozilla/5.0')] resp = opener.open(url) #resp = urllib2.urlopen(url) buf = StringIO(resp.read()) img = Image.open(buf) if img.size[0] * img.size[1] > settings.max_image_size: log.error('too big: %sx%s %s' % (img.size[0], img.size[1], url)) return img.load() fmt = img.format if fmt == 'JPEG': img = _rotate(img) elif fmt == 'GIF': img.seek(0) #img = img.copy() img.thumbnail(settings.thumbnail_size, Image.ANTIALIAS) img.save(path, fmt, **img.info)
def thumbnail(url): log.debug('- URL %s %s' % (type(url), url)) hash = md5(url).hexdigest() dirname = os.path.join(settings.thumbnail_path, hash[:2]) path = os.path.join(dirname, hash) if os.path.isfile(path) and os.stat(path) > 0: log.debug('%s: thumbnail exists' % path) return log.info('Making thumbnail %s %s' % (path, url)) if cache_get('thumbnail:%s' % hash): return cache_store('thumbnail:%s' % hash, 1, 60) try: try: os.mkdir(dirname) except OSError, e: if e.errno == 17: log.debug('OSError %s: %s %s' % (e.errno, e.strerror, dirname)) else: log.warn('OSError %s: %s %s' % (e.errno, e.strerror, dirname)) opener = urllib2.build_opener() opener.addheaders = [('User-agent', 'Mozilla/5.0')] resp = opener.open(url) #resp = urllib2.urlopen(url) buf = StringIO(resp.read()) img = Image.open(buf) if img.size[0] * img.size[1] > settings.max_image_size: log.error('too big: %sx%s %s' % (img.size[0], img.size[1], url)) return img.load() fmt = img.format if fmt == 'JPEG': img = _rotate(img) elif fmt == 'GIF': img.seek(0) #img = img.copy() img.thumbnail(settings.thumbnail_size, Image.ANTIALIAS) img.save(path, fmt, **img.info)
def get_url(self): if self._url: return self._url if self.id: key = "feed_url:%s" % self.id self._url = cache_get(key) if not self._url: res = db.fetchone("SELECT url FROM users.feeds WHERE id=%s;", [self.id]) if not res: raise FeedNotFound self._url = res[0] if self.id: cache_store(key, self._url, 3600) return self._url
def __init__(self, field, value=None): self._url = None if field == 'url': self._url = value elif isinstance(field, (str, unicode)): if field.startswith('http://') or field.startswith('https://'): self._url = field elif not value: raise InvalidFeedUrl if self._url: if not parse_url(self._url, exact=True): raise InvalidFeedUrl key = 'url_feed_id:%s' % self._url id = cache_get(key) if not id: res = db.fetchone("SELECT id FROM users.feeds WHERE url=%s;", [self._url]) if res: id = res['id'] cache_store(key, id) if not id: raise FeedNotFound try: User.__init__(self, long(id)) except UserNotFound: raise FeedNotFound else: try: User.__init__(self, field, value) except UserNotFound: raise FeedNotFound self._posts = []
def tags(self, limit=None, sort_by_name=False, all=False): if not self.id: return [] key = 'user_tags:%d:%s' % (self.id, (limit or 'all')) if not all: tags = cache_get(key) if tags: return tags order = 'tag ASC' if sort_by_name else 'cnt DESC' limit = ("LIMIT %d" % limit) if limit else '' tags = db.fetchall("SELECT tag, count(post_id) AS cnt " "FROM posts.tags WHERE user_id=%%s " "GROUP BY tag ORDER BY %s " "%s;" % (order, limit), [self.id]) cache_store(key, [dict(t) for t in tags], 60) return tags
def tags(self, limit=None, sort_by_name=False, all=False): if not self.id: return [] key = 'user_tags:%d:%s' % (self.id, (limit or 'all')) if not all: tags = cache_get(key) if tags: return tags order = 'tag ASC' if sort_by_name else 'cnt DESC' limit = ("LIMIT %d" % limit) if limit else '' tags = db.fetchall( "SELECT tag, count(post_id) AS cnt " "FROM posts.tags WHERE user_id=%%s " "GROUP BY tag ORDER BY %s " "%s;" % (order, limit), [self.id]) cache_store(key, [dict(t) for t in tags], 60) return tags
def get_info(self, param=None): if not self.info: res = cache_get('userinfo:%s' % self.id) if res: for k in ('birthdate', 'created'): res[k] = dateutil.parser.parse(res[k]) if res[k] else None self.info = res else: res = db.fetchone( "SELECT i.name,p.private, p.deny_anonymous," "i.email, i.xmpp, i.icq, i.skype, i.about, " "i.avatar, i.gender, i.birthdate, i.location," "i.homepage, i.created " "FROM users.info i " "LEFT OUTER JOIN users.profile p " "ON (i.id = p.id) " "WHERE i.id=%s;", [self.id]) if res: self.info = dict(res) res = dict(res) for k in ('birthdate', 'created'): res[k] = res[k].isoformat() if res[k] else None cache_store('userinfo:%s' % self.id, res) if not self.info: return None self.info.update({'tune': self.get_tune()}) if param: try: return self.info[param] except KeyError: return None else: return self.info
def rename(self, login): if not self.id: raise NotAuthorized if cache_get('renamed:%s' % self.id): raise RenameError if not validate_nickname(login): raise UserLoginError old_login = self.login self.login = login try: db.perform("UPDATE users.logins SET login=%s WHERE id=%s;", [login, self.id]) except IntegrityError: raise UserExists cache_store('renamed:%s' % self.id, 1, settings.user_rename_timeout) cache_del('id_login:%s' % old_login.lower()) for t in ACCOUNT_TYPES: for addr in self.get_accounts(t): cache_del("addr_id_login:%s" % addr)
def get_info(self, param=None): if not self.info: res = cache_get('userinfo:%s' % self.id) if res: for k in ('birthdate', 'created'): res[k] = dateutil.parser.parse(res[k]) if res[k] else None self.info = res else: res = db.fetchone("SELECT i.name,p.private, p.deny_anonymous," "i.email, i.xmpp, i.icq, i.skype, i.about, " "i.avatar, i.gender, i.birthdate, i.location," "i.homepage, i.created " "FROM users.info i " "LEFT OUTER JOIN users.profile p " "ON (i.id = p.id) " "WHERE i.id=%s;", [self.id]) if res: self.info = dict(res) res = dict(res) for k in ('birthdate', 'created'): res[k] = res[k].isoformat() if res[k] else None cache_store('userinfo:%s' % self.id, res) if not self.info: return None self.info.update({'tune': self.get_tune()}) if param: try: return self.info[param] except KeyError: return None else: return self.info
class SupportWorker(object): """SupportWorker class""" def __init__(self): proctitle('support-worker') log.info('support worker started with PID=%s' % os.getpid()) pool = RedisPool(settings.pubsub_socket) pubsub = pool.pubsub() pubsub.subscribe(['msg']) for msg in pubsub.listen(): try: data = json.loads(msg['data']) except TypeError: continue if data['a'] in ('post', 'post_edited'): gevent.spawn(self.handle_post, data) def handle_post(self, data): """Handle post """ if data['private']: return if len(data['tags']) == 0 or 'point' not in data['tags']: return tagset = set(data['tags']) if tagset & FR_TAGS: issue_type = 'enhancement' elif tagset & BUG_TAGS: issue_type = 'bug' else: return if cache_get('issue-post:%s' % data['post_id']): return text = template('report.md', **data) args = { 'kind': issue_type, 'title': data['text'][:100], 'content': text, } bb = Bitbucket(settings.api_login, settings.api_password, settings.api_slug) issue = Issue(bb) status, resp = issue.create(**args) try: env.user = User('login', 'support') except UserNotFound: return reply = template('reply.txt', issue=resp['local_id']) try: add_comment(data['post_id'], None, text=reply, dont_subscribe=True, force=True) except (PostError, CommentError), e: log.error(e) return cache_store('issue-post:%s' % data['post_id'], 1, expire=60*60*24)
def update_tune_data(self, data): cache_key = 'user_tune:%s' % self.id if data: cache_store(cache_key, data) else: cache_del(cache_key)
def __init__(self, field, value=None): self.id = None self.login = None self.accounts = [] self.accounts_add = [] self.accounts_del = [] self.profile = {} self.profile_upd = {} self.info = {} self.info_upd = {} self.password = None self._private = None self.redis = RedisPool(settings.storage_socket) if isinstance(field, (int, long)): self.id = field self.login = cache_get('login:%s' % field) if not self.login: res = db.fetchone( "SELECT login FROM users.logins WHERE id=%s;", [field]) if not res: raise UserNotFound self.login = res[0] cache_store('login:%s' % field, self.login) return if not value: #raise UserNotFound # empty user return if field == 'login': r = cache_get('id_login:%s' % value.lower()) if r: try: self.id, self.login, self.type = r except ValueError: self.id, self.login = r self.type = 'user' else: res = db.fetchone( "SELECT id, login, type FROM users.logins " "WHERE lower(login)=%s;", [str(value).lower()]) if not res: raise UserNotFound(value) self.id, self.login, self.type = res cache_store('id_login:%s' % value.lower(), [res[0], res[1], res[2]]) return r = cache_get('addr_id_login:%s' % value.lower()) if r: self.id, self.login = r else: res = db.fetchone("SELECT u.id, u.login FROM users.accounts a " "JOIN users.logins u ON u.id=a.user_id " "WHERE a.type=%s AND lower(a.address)=%s;", [field, value.lower()]) #, _cache=3600) if res: self.id, self.login = res cache_store('addr_id_login:%s' % value.lower(), [res[0], res[1]])
else: filename = ('%s.%s' % (env.user.login, ext)).lower() make_avatar(env.request.files('avatar'), filename) env.user.set_info('avatar', '%s?r=%d' % (filename, randint(1000, 9999))) elif 'avatar' in info and info['avatar']: filename = ('%s.%s' % (env.user.login, 'jpg')).lower() make_avatar(info['avatar'], filename) env.user.set_info('avatar', '%s?r=%d' % (filename, randint(1000, 9999))) cache_store('reg-ok:%s' % env.request.remote_host, 1, 1800) env.user.save() env.user.authenticate() return Response(redirect=get_referer()) @catch_errors def ulogin(): if env.user.id: raise AlreadyAuthorized sess = Session() if env.request.method == 'POST': url = "http://ulogin.ru/token.php?token=%s&host=%s" % \
def load_stoplist(): fd = open(settings.stoplist_file) slist = [s.strip().decode('utf-8') for s in fd] fd.close() cache_store('stoplist', slist, settings.stoplist_expire) return slist
filename = ('%s.%s' % (env.user.login, ext)).lower() make_avatar(env.request.files('avatar'), filename) env.user.set_info('avatar', '%s?r=%d' % (filename, randint(1000, 9999))) elif 'avatar' in info and info['avatar']: filename = ('%s.%s' % (env.user.login, 'jpg')).lower() make_avatar(info['avatar'], filename) env.user.set_info('avatar', '%s?r=%d' % (filename, randint(1000, 9999))) cache_store('reg-ok:%s' % env.request.remote_host, 1, 1800) env.user.save() env.user.authenticate() return Response(redirect=get_referer()) @catch_errors def ulogin(): if env.user.id: raise AlreadyAuthorized sess = Session() if env.request.method == 'POST':
class XMPPBot(sleekxmpp.ClientXMPP): def __init__(self): proctitle('bot') log.info('bot started with PID=%d' % os.getpid()) self._jid = "%s/%s" % (settings.xmpp_jid, settings.xmpp_resource) sleekxmpp.ClientXMPP.__init__(self, self._jid, settings.xmpp_password) self.register_plugin('xep_0184') self.register_plugin('xep_0163') self.plugin['xep_0163'].add_interest('http://jabber.org/protocol/tune') self.plugin['xep_0060'].map_node_event( 'http://jabber.org/protocol/tune', 'user_tune') self.add_event_handler("session_start", self.session_start) self.add_event_handler("message", self.handle_message) self.add_event_handler("presence_subscribed", self.handle_subscription) self.add_event_handler("user_tune_publish", self.handle_tune) self.add_event_handler("got_offline", self.handle_disconnection) self.add_event_handler("receipt_received", self.handle_receipt) self.xin = Queue('xin', addr=settings.queue_socket) self.xout = Queue('xout', addr=settings.queue_socket) self.auto_authorize = True self.auto_subscribe = True spawn(self.listen_queue) def session_start(self, event): self.send_presence() self.get_roster() def handle_subscription(self, presence): key = 'presence:%s:%s' % (presence['type'], presence['from'].bare) data = cache_get(key) if data: cache_del(key) self.send_message(**data) def handle_receipt(self, msg): if msg['receipt']: receipt = msg['receipt'] elif msg['id']: receipt = msg['id'] if receipt and receipt.startswith('post_'): self.xin.push( json.dumps({ 'from': str(msg['from']), 'receipt': receipt[5:] })) def handle_message(self, msg): if msg['type'] in ('chat', 'normal'): if msg['id'] and msg['id'].startswith('post_'): _msg_id = msg['id'].strip() self.xin.push( json.dumps({ 'from': str(msg['from']), 'id': _msg_id })) try: jid, resource = str(msg['to']).split('/', 1) except ValueError: jid = settings.xmpp_jid resource = settings.xmpp_resource self.xin.push( json.dumps({ 'from': str(msg['from']), 'resource': resource, 'body': msg['body'].strip() })) def handle_tune(self, msg): tune = msg['pubsub_event']['items']['item']['payload'] tune_data = { tag_name_without_ns(el): el.text for el in tune.getchildren() } self.xin.push( json.dumps({ 'type': 'tune', 'from': str(msg['from']), 'tune': tune_data })) def handle_disconnection(self, presence): try: jid, resource = str(presence['from']).split('/', 1) except ValueError: jid = str(presence['from']) self.xin.push(json.dumps({'type': 'tune', 'from': jid, 'tune': {}})) def listen_queue(self): try: data = self.xout.pop() if data: data = json.loads(data) except Exception, e: log.error('%s %s %s' % (e.__class__.__name__, e.message, type(data), data)) data = None if not data: spawn_later(0.05, self.listen_queue) return try: html = None if 'html' in data and data['html']: html = sleekxmpp.xmlstream.ET.XML( '<div style="margin-top:0">%s</div>' % data['html']) #'<html xmlns="http://jabber.org/protocol/xhtml-im">' + \ #'<body xmlns="http://www.w3.org/1999/xhtml">%s</body></html>' if '_resource' in data and data['_resource']: mfrom = '%s/%s' % (settings.xmpp_jid, data['_resource']) else: mfrom = self._jid if self.check_subscription(data['to']): if '_presence' in data and data['_presence']: pstatus = data['_presence'] \ if isinstance(data['_presence'], (str, unicode)) \ else "I'm online" self.send_presence(pto=data['to'], pstatus=pstatus) mid = data['_msg_id'] if '_msg_id' in data else None self.send_message(mfrom=mfrom, mto=data['to'], mtype='chat', mid=mid, mbody=data['body'], mhtml=html) elif '_authorize' in data and data['_authorize']: # TODO: request subscription self.sendPresenceSubscription(pto=data['to']) cache_store( 'presence:subscribed:%s' % JID(data['to']).bare, { 'mfrom': mfrom, 'mto': data['to'], 'mtype': 'chat', 'mbody': data['body'], 'mhtml': html }, 3600 * 24 * 7) finally: spawn(self.listen_queue)
def __init__(self, field, value=None): self.id = None self.login = None self.accounts = [] self.accounts_add = [] self.accounts_del = [] self.profile = {} self.profile_upd = {} self.info = {} self.info_upd = {} self.password = None self._private = None self.redis = RedisPool(settings.storage_socket) if isinstance(field, (int, long)): self.id = field self.login = cache_get('login:%s' % field) if not self.login: res = db.fetchone("SELECT login FROM users.logins WHERE id=%s;", [field]) if not res: raise UserNotFound self.login = res[0] cache_store('login:%s' % field, self.login) return if not value: #raise UserNotFound # empty user return if field == 'login': r = cache_get('id_login:%s' % value.lower()) if r: try: self.id, self.login, self.type = r except ValueError: self.id, self.login = r self.type = 'user' else: res = db.fetchone("SELECT id, login, type FROM users.logins " "WHERE lower(login)=%s;", [str(value).lower()]) if not res: raise UserNotFound(value) self.id, self.login, self.type = res cache_store('id_login:%s' % value.lower(), [res[0], res[1], res[2]]) return r = cache_get('addr_id_login:%s' % value.lower()) if r: self.id, self.login = r else: res = db.fetchone("SELECT u.id, u.login FROM users.accounts a " "JOIN users.logins u ON u.id=a.user_id " "WHERE a.type=%s AND a.address=%s;", [field, value]) #, _cache=3600) if res: self.id, self.login = res cache_store('addr_id_login:%s' % value.lower(),[res[0], res[1]])
pass from point.core.user import User from importers.juick import Juick from point.util import cache_get, cache_store importer_types = {"juick": Juick} try: login, itype, path = sys.argv[1:4] except (IndexError, ValueError): sys.stderr.write("Usage: %s <login> <type> <path>\n" % sys.argv[0]) exit(1) try: imp = importer_types[itype](User("login", login), path) except KeyError: sys.stderr.write("%s: invalid source type\n" % itype) exit(1) for ext_id, post, comments in imp.posts(): post.archive, post.author, post.text, post.tags key = "imported:%s:%s:%s" % (itype, login.lower(), ext_id) print key if cache_get(key): continue post.save() for comment in comments: comment.save() cache_store(key, 1, settings.ids_cache_expire)
from importers.juick import Juick from point.util import cache_get, cache_store importer_types = { "juick": Juick } try: login, itype, path = sys.argv[1:4] except (IndexError, ValueError): sys.stderr.write("Usage: %s <login> <type> <path>\n" % sys.argv[0]) exit(1) try: imp = importer_types[itype](User("login", login), path) except KeyError: sys.stderr.write("%s: invalid source type\n" % itype) exit(1) for ext_id, post, comments in imp.posts(): post.archive, post.author, post.text, post.tags key = "imported:%s:%s:%s" % (itype, login.lower(), ext_id) print key if cache_get(key): continue post.save() for comment in comments: comment.save() cache_store(key, 1, settings.ids_cache_expire)