def get_reported_authors(cls, time = None, sort = None): reports = {} for t_cls in (Link, Comment, Message): q = t_cls._query(t_cls.c._spam == False, t_cls.c.reported > 0, data = True) q._sort = desc("_date") if time: q._filter(time) reports.update(Report.reported(things = list(q), amount = 0)) # at this point, we have a full list of reports made on the interval specified # build up an author to report list authors = Account._byID([k[1].author_id for k, v in reports.iteritems()], data = True) if reports else [] # and build up a report on each author author_rep = {} for (tattler, thing, amount), r in reports.iteritems(): aid = thing.author_id if not author_rep.get(aid): author_rep[aid] = Storage(author = authors[aid]) author_rep[aid].num_reports = 1 author_rep[aid].acct_correct = tattler.report_correct author_rep[aid].acct_wrong = tattler.report_ignored author_rep[aid].most_recent = r._date author_rep[aid].reporters = set([tattler]) else: author_rep[aid].num_reports += 1 author_rep[aid].acct_correct += tattler.report_correct author_rep[aid].acct_wrong += tattler.report_ignored if author_rep[aid].most_recent < r._date: author_rep[aid].most_recent = r._date author_rep[aid].reporters.add(tattler) authors = author_rep.values() if sort == "hot": def report_hotness(a): return a.acct_correct / max(a.acct_wrong + a.acct_correct,1) def better_reporter(a, b): q = report_hotness(b) - report_hotness(a) if q == 0: return b.acct_correct - a.acct_correct else: return 1 if q > 0 else -1 authors.sort(better_reporter) if sort == "top": authors.sort(lambda x, y: y.num_reports - x.num_reports) elif sort == "new": def newer_reporter(a, b): t = b.most_recent - a.most_recent t0 = datetime.timedelta(0) return 1 if t > t0 else -1 if t < t0 else 0 authors.sort(newer_reporter) return authors
('MULTI_TOO_MANY_SUBREDDITS', _('no more space for subreddits in that multireddit')), ('MULTI_SPECIAL_SUBREDDIT', _("can't add special subreddit %(path)s")), ('JSON_PARSE_ERROR', _('unable to parse JSON data')), ('JSON_INVALID', _('unexpected JSON structure')), ('JSON_MISSING_KEY', _('JSON missing key: "%(key)s"')), ('NO_CHANGE_KIND', _("can't change post type")), ('INVALID_LOCATION', _("invalid location")), ('BANNED_FROM_SUBREDDIT', _('that user is banned from the subreddit')), ('GOLD_REQUIRED', _('you must have an active reddit gold subscription to do that')), ('INSUFFICIENT_CREDDITS', _("insufficient creddits")), ('SCRAPER_ERROR', _("unable to scrape provided url")), )) errors = Storage([(e, e) for e in error_list.keys()]) def add_error_codes(new_codes): """Add error codes to the error enumeration. It is assumed that the incoming messages are marked for translation but not yet translated, so they can be declared before pylons.i18n is ready. """ for code, message in new_codes.iteritems(): error_list[code] = _(message) errors[code] = code class RedditError(Exception):
import datetime from pylons import g from r2.lib.db.queries import get_gilded_user_comments from r2.lib.utils import Storage from r2.models import GildingsByDay, Thing, Comment from r2.models.query_cache import CachedQueryMutator date = datetime.datetime.now(g.tz) earliest_date = datetime.datetime(2012, 10, 01, tzinfo=g.tz) already_seen = set() with CachedQueryMutator() as m: while date > earliest_date: gildings = GildingsByDay.get_gildings(date) fullnames = [x["thing"] for x in gildings] things = Thing._by_fullname(fullnames, data=True, return_dict=False) comments = {t._fullname: t for t in things if isinstance(t, Comment)} for gilding in gildings: fullname = gilding["thing"] if fullname in comments and fullname not in already_seen: thing = gilding["thing"] = comments[fullname] gilding_object = Storage(gilding) m.insert(get_gilded_user_comments(thing.author_id), [gilding_object]) already_seen.add(fullname) date -= datetime.timedelta(days=1)
}, 'multis': { 'title': _('multis'), }, 'users': { 'title': _('users'), }, 'wiki': { 'title': _('wiki'), }, 'captcha': { 'title': _('captcha'), } } api_section = Storage((k, k) for k in section_info) def api_doc(section, uses_site=False, **kwargs): """ Add documentation annotations to the decorated function. See ApidocsController.docs_from_controller for a list of annotation fields. """ def add_metadata(api_function): doc = api_function._api_doc = getattr(api_function, '_api_doc', {}) if 'extends' in kwargs: kwargs['extends'] = kwargs['extends']._api_doc doc.update(kwargs) doc['uses_site'] = uses_site doc['section'] = section
def cast_vote(sub, obj, vote_info, timer, date): from r2.models.admintools import valid_user, valid_thing, update_score from r2.lib.count import incr_sr_count names_by_dir = {True: "1", None: "0", False: "-1"} # `vote` mimics the old pg vote rel interface so downstream code doesn't # need to change. (but it totally needn't stay that way forever!) vote = Storage( _thing1=sub, _thing2=obj, _name=names_by_dir[vote_info["dir"]], _date=date, valid_thing=True, valid_user=True, ip=vote_info["ip"], ) # these track how much ups/downs should change on `obj` ups_delta = 1 if int(vote._name) > 0 else 0 downs_delta = 1 if int(vote._name) < 0 else 0 # see if the user has voted on this thing before old_votes = VoteDetailsByThing.get_details(obj, [sub]) old_vote = None if old_votes: old_vote = old_votes[0] timer.intermediate("cass_read_vote") if old_vote: vote._date = datetime.utcfromtimestamp( old_vote["date"]).replace(tzinfo=pytz.UTC) vote.valid_thing = old_vote["valid_thing"] vote.valid_user = old_vote["valid_user"] vote.ip = old_vote["ip"] if vote._name == old_vote["direction"]: # the old vote and new vote are the same. bail out. return vote # remove the old vote from the score old_direction = int(old_vote["direction"]) ups_delta -= 1 if old_direction > 0 else 0 downs_delta -= 1 if old_direction < 0 else 0 # calculate valid_thing and valid_user sr = obj.subreddit_slow kind = obj.__class__.__name__.lower() karma = sub.karma(kind, sr) if vote.valid_thing: vote.valid_thing = valid_thing(vote, karma, vote_info["cheater"], vote_info["info"]) if vote.valid_user: vote.valid_user = vote.valid_thing and valid_user(vote, sr, karma) if kind == "link" and getattr(obj, "is_self", False): # self-posts do not generate karma vote.valid_user = False g.stats.simple_event("vote.valid_thing." + str(vote.valid_thing).lower()) g.stats.simple_event("vote.valid_user." + str(vote.valid_user).lower()) # update various score/karma/vote counts if not (not old_vote and obj.author_id == sub._id and vote._name == "1"): # newly created objects start out with _ups = 1, so we skip updating # their score here if this is the author's own initial vote on it. old_valid_thing = old_vote["valid_thing"] if old_vote else True update_score(obj, ups_delta, downs_delta, vote, old_valid_thing) timer.intermediate("pg_update_score") if vote.valid_user: author = Account._byID(obj.author_id, data=True) author.incr_karma(kind, sr, ups_delta - downs_delta) timer.intermediate("pg_incr_karma") if not old_vote and vote.valid_thing and kind == "link": if sub._id != obj.author_id: incr_sr_count(sr) timer.intermediate("incr_sr_counts") # write the vote to cassandra VotesByAccount.copy_from(vote, vote_info["info"]) timer.intermediate("cassavotes") vote._thing2.update_search_index(boost_only=True) timer.intermediate("update_search_index") if "event" in vote_info and vote_info["event"]: g.events.vote_event(vote, old_vote, event_base=vote_info["event"]) return vote
def valid_signature(payload, signature): """Checks if `signature` matches `payload`. `Signature` (at least as of version 1) be of the form: {global_version}:{platform}:{version}:{signature} where: * global_version (currently hard-coded to be "1") can be used to change this header's underlying schema later if needs be. As such, can be treated as a protocol version. * platform is the client platform type (generally "ios" or "android") * version is the client's token version (can be updated and incremented per app build as needs be. * signature is the hmac of the request's POST body with the token derived from the above three parameters via `get_secret_token` """ result = Storage( global_version=-1, platform=None, version=-1, mac=None, valid=False, epoch=None, error=ERRORS.UNKNOWN, ) sig_match = SIG_HEADER_RE.match(signature or "") if not sig_match: result.error = ERRORS.INVALID_FORMAT return result sig_header_dict = sig_match.groupdict() # we're matching \d so this shouldn't throw a TypeError result.global_version = int(sig_header_dict['global_version']) # incrementing this value is drastic. We can't validate a token protocol # we don't understand. if result.global_version > GLOBAL_TOKEN_VERSION: result.error = ERRORS.UNKOWN_GLOBAL_VERSION return result # currently there's only one version, but here's where we'll eventually # patch in more. sig_match = SIG_CONTENT_V1_RE.match(sig_header_dict['payload']) if not sig_match: result.error = ERRORS.UNPARSEABLE return result result.update(sig_match.groupdict()) result.version = int(result.version) result.epoch = int(result.epoch) # verify that the token provided hasn't been invalidated if is_invalid_token(result.platform, result.version): result.error = ERRORS.INVALIDATED_TOKEN return result if not valid_epoch(result.platform, result.epoch): result.error = ERRORS.EXPIRED_TOKEN return result # get the expected secret used to verify this request. secret_token = get_secret_token( result.platform, result.version, global_version=result.global_version, ) result.valid = constant_time_compare( result.mac, versioned_hmac( secret_token, epoch_wrap(result.epoch, payload), result.global_version ), ) if result.valid: result.error = None else: result.error = ERRORS.SIGNATURE_MISMATCH return result
def cast_vote(sub, obj, vote_info, timer, date): from r2.models.admintools import valid_user, valid_thing, update_score from r2.lib.count import incr_sr_count names_by_dir = {True: "1", None: "0", False: "-1"} # `vote` mimics the old pg vote rel interface so downstream code doesn't # need to change. (but it totally needn't stay that way forever!) vote = Storage( _thing1=sub, _thing2=obj, _name=names_by_dir[vote_info["dir"]], _date=date, valid_thing=True, valid_user=True, ip=vote_info["ip"], ) # these track how much ups/downs should change on `obj` ups_delta = 1 if int(vote._name) > 0 else 0 downs_delta = 1 if int(vote._name) < 0 else 0 # see if the user has voted on this thing before old_votes = VoteDetailsByThing.get_details(obj, [sub]) old_vote = None if old_votes: old_vote = old_votes[0] timer.intermediate("cass_read_vote") if old_vote: vote._date = datetime.utcfromtimestamp( old_vote["date"]).replace(tzinfo=pytz.UTC) vote.valid_thing = old_vote["valid_thing"] vote.valid_user = old_vote["valid_user"] vote.ip = old_vote["ip"] if vote._name == old_vote["direction"]: # the old vote and new vote are the same. bail out. return vote # remove the old vote from the score old_direction = int(old_vote["direction"]) ups_delta -= 1 if old_direction > 0 else 0 downs_delta -= 1 if old_direction < 0 else 0 # calculate valid_thing and valid_user sr = obj.subreddit_slow kind = obj.__class__.__name__.lower() karma = sub.karma(kind, sr) if vote.valid_thing: vote.valid_thing = valid_thing(vote, karma, vote_info["cheater"], vote_info["info"]) if vote.valid_user: vote.valid_user = vote.valid_thing and valid_user(vote, sr, karma) if kind == "link" and getattr(obj, "is_self", False): # self-posts do not generate karma vote.valid_user = False g.stats.simple_event("vote.valid_thing." + str(vote.valid_thing).lower()) g.stats.simple_event("vote.valid_user." + str(vote.valid_user).lower()) # update various score/karma/vote counts if not (not old_vote and obj.author_id == sub._id and vote._name == "1"): # newly created objects start out with _ups = 1, so we skip updating # their score here if this is the author's own initial vote on it. old_valid_thing = old_vote["valid_thing"] if old_vote else True update_score(obj, ups_delta, downs_delta, vote, old_valid_thing) timer.intermediate("pg_update_score") if vote.valid_user: author = Account._byID(obj.author_id, data=True) author.incr_karma(kind, sr, ups_delta - downs_delta) timer.intermediate("pg_incr_karma") if not old_vote and vote.valid_thing and kind == "link": if sub._id != obj.author_id: incr_sr_count(sr) timer.intermediate("incr_sr_counts") # write the vote to cassandra VotesByAccount.copy_from(vote, vote_info["info"]) timer.intermediate("cassavotes") num_votes = vote._thing2._ups + vote._thing2._downs if num_votes < 20 or num_votes % 10 == 0: # always update the search index if the thing has fewer than 20 votes # when the thing has more votes queue an update less often vote._thing2.update_search_index(boost_only=True) timer.intermediate("update_search_index") event_data = vote_info.get("event_data") if event_data: g.events.vote_event(vote, old_vote, event_data["context"], event_data["sensitive"]) return vote
from babel.numbers import format_currency from r2.lib import promote from r2.lib.db.sorts import epoch_seconds from r2.lib.menus import menu from r2.lib.menus import NavButton, NamedButton, PageNameNav, NavMenu from r2.lib.pages.pages import Reddit, TimeSeriesChart, TabbedPane from r2.lib.promote import cost_per_mille, cost_per_click from r2.lib.template_helpers import format_number from r2.lib.utils import Storage, to_date, timedelta_by_name from r2.lib.wrapped import Templated from r2.models import Thing, Link, PromoCampaign, traffic from r2.models.subreddit import Subreddit, _DefaultSR COLORS = Storage(UPVOTE_ORANGE="#ff5700", DOWNVOTE_BLUE="#9494ff", MISCELLANEOUS="#006600") class TrafficPage(Reddit): """Base page template for pages rendering traffic graphs.""" extension_handling = False extra_page_classes = ["traffic"] def __init__(self, content): Reddit.__init__(self, title=_("traffic stats"), content=content) def build_toolbars(self): main_buttons = [ NavButton(menu.sitewide, "/"),
def _fast_query(cls, subject, objects, properties=None): # this is a compatibility shim for transition return { k: Storage(name=v) for k, v in cls.fast_query(subject, objects).iteritems() }
keyspace = 'reddit' thing_cache = g.thing_cache disallow_db_writes = g.disallow_db_writes tz = g.tz log = g.log read_consistency_level = g.cassandra_rcl write_consistency_level = g.cassandra_wcl debug = g.debug make_lock = g.make_lock db_create_tables = g.db_create_tables thing_types = {} # The available consistency levels CL = Storage(ANY=ConsistencyLevel.ANY, ONE=ConsistencyLevel.ONE, QUORUM=ConsistencyLevel.QUORUM, ALL=ConsistencyLevel.ALL) # the greatest number of columns that we're willing to accept over the # wire for a given row (this should be increased if we start working # with classes with lots of columns, like Account which has lots of # karma_ rows, or we should not do that) max_column_count = 10000 class CassandraException(Exception): """Base class for Exceptions in tdb_cassandra""" pass class InvariantException(CassandraException):
def api_wrapper(self, kw): return Storage(**kw)
class Email(object): handler = EmailHandler() Kind = ["SHARE", "FEEDBACK", "ADVERTISE", "OPTOUT", "OPTIN"] Kind = Storage((e, i) for i, e in enumerate(Kind)) def __init__(self, user, thing, email, from_name, date, ip, banned_ip, kind, msg_hash, body = '', from_addr = '', reply_to = ''): self.user = user self.thing = thing self.to_addr = email self.fr_addr = from_addr self._from_name = from_name self.date = date self.ip = ip self.banned_ip = banned_ip self.kind = kind self.sent = False self.body = body self.subject = '' self.msg_hash = msg_hash self.reply_to = reply_to def from_name(self): if not self.user: name = "%(name)s" elif self._from_name != self.user.name: name = "%(name)s (%(uname)s)" else: name = "%(uname)s" return name % dict(name = self._from_name, uname = self.user.name if self.user else '') @classmethod def get_unsent(cls, max_date, batch_limit = 50, kind = None): for e in cls.handler.from_queue(max_date, batch_limit = batch_limit, kind = kind): yield cls(*e) def should_queue(self): return (not self.user or not self.user._spam) and \ (not self.thing or not self.thing._spam) and \ not self.banned_ip and \ (self.kind == self.Kind.OPTOUT or not has_opted_out(self.to_addr)) def set_sent(self, date = None, rejected = False): if not self.sent: from pylons import g self.date = date or datetime.datetime.now(g.tz) t = self.handler.reject_table if rejected else self.handler.track_table t.insert().execute({t.c.account_id: self.user._id if self.user else 0, t.c.to_addr : self.to_addr, t.c.fr_addr : self.fr_addr, t.c.reply_to : self.reply_to, t.c.ip : self.ip, t.c.fullname: self.thing._fullname if self.thing else "", t.c.date: self.date, t.c.kind : self.kind, t.c.msg_hash : self.msg_hash, }) self.sent = True def to_MIMEText(self): def utf8(s): return s.encode('utf8') if isinstance(s, unicode) else s fr = '"%s" <%s>' % (self.from_name(), self.fr_addr) if not fr.startswith('-') and not self.to_addr.startswith('-'): # security msg = MIMEText(utf8(self.body)) msg.set_charset('utf8') msg['To'] = utf8(self.to_addr) msg['From'] = utf8(fr) msg['Subject'] = utf8(self.subject) if self.user: msg['X-Reddit-username'] = utf8(self.user.name) msg['X-Reddit-ID'] = self.msg_hash if self.reply_to: msg['Reply-To'] = utf8(self.reply_to) return msg return None
def cast_vote(sub, obj, dir, ip, vote_info, cheater, timer, date): from r2.models.admintools import valid_user, valid_thing, update_score from r2.lib.count import incr_sr_count from r2.lib.db import queries names_by_dir = {True: "1", None: "0", False: "-1"} # `vote` mimics the old pg vote rel interface so downstream code doesn't # need to change. (but it totally needn't stay that way forever!) vote = Storage( _thing1=sub, _thing2=obj, _name=names_by_dir[dir], _date=date, valid_thing=True, valid_user=True, ip=ip, ) # these track how much ups/downs should change on `obj` ups_delta = 1 if int(vote._name) > 0 else 0 downs_delta = 1 if int(vote._name) < 0 else 0 # see if the user has voted on this thing before pgrel = Vote.rel(sub, obj) pgoldvote = pgrel._fast_query(sub, obj, ["-1", "0", "1"]).values() try: pgoldvote = filter(None, pgoldvote)[0] except IndexError: pgoldvote = None timer.intermediate("pg_read_vote") if pgoldvote: # old_vote is mimicking `{Link,Comment}VoteDetailsByThing` here because # that will eventually be exactly what it is old_vote = { "direction": pgoldvote._name, "valid_thing": pgoldvote.valid_thing, "valid_user": pgoldvote.valid_user, "ip": getattr(pgoldvote, "ip", None), } vote.valid_thing = old_vote["valid_thing"] vote.valid_user = old_vote["valid_user"] if vote._name == old_vote["direction"]: # the old vote and new vote are the same. bail out. return vote # remove the old vote from the score old_direction = int(old_vote["direction"]) ups_delta -= 1 if old_direction > 0 else 0 downs_delta -= 1 if old_direction < 0 else 0 else: old_vote = None # calculate valid_thing and valid_user sr = obj.subreddit_slow kind = obj.__class__.__name__.lower() karma = sub.karma(kind, sr) if vote.valid_thing: vote.valid_thing = valid_thing(vote, karma, cheater, vote_info) if vote.valid_user: vote.valid_user = vote.valid_thing and valid_user(vote, sr, karma) if kind == "link" and getattr(obj, "is_self", False): # self-posts do not generate karma vote.valid_user = False g.stats.simple_event("vote.valid_thing." + str(vote.valid_thing).lower()) g.stats.simple_event("vote.valid_user." + str(vote.valid_user).lower()) # write out the new/modified vote to postgres if pgoldvote: pgvote = pgoldvote pgvote._name = vote._name else: pgvote = pgrel(sub, obj, vote._name, date=vote._date, ip=ip) pgvote.valid_thing = vote.valid_thing pgvote.valid_user = vote.valid_user pgvote._commit() timer.intermediate("pg_write_vote") # update various score/karma/vote counts if not (not old_vote and obj.author_id == sub._id and vote._name == "1"): # newly created objects start out with _ups = 1, so we skip updating # their score here if this is the author's own initial vote on it. old_valid_thing = old_vote["valid_thing"] if old_vote else True update_score(obj, ups_delta, downs_delta, vote, old_valid_thing) timer.intermediate("pg_update_score") if vote.valid_user: author = Account._byID(obj.author_id, data=True) author.incr_karma(kind, sr, ups_delta - downs_delta) timer.intermediate("pg_incr_karma") if not old_vote and vote.valid_thing and kind == "link": if sub._id != obj.author_id: incr_sr_count(sr) timer.intermediate("incr_sr_counts") # write the vote to cassandra VotesByAccount.copy_from(vote, vote_info) timer.intermediate("cassavotes") # update the search index queries.changed(vote._thing2, boost_only=True) timer.intermediate("changed") return vote
def get_reported_authors(cls, time=None, sort=None): reports = {} for t_cls in (Link, Comment, Message): q = t_cls._query(t_cls.c._spam == False, t_cls.c.reported != 0, data=True) q._sort = desc("_date") if time: q._filter(time) reports.update(Report.reported(things=list(q), amount=0)) # at this point, we have a full list of reports made on the interval specified # build up an author to report list authors = Account._byID( [k[1].author_id for k, v in reports.iteritems()], data=True) if reports else [] # and build up a report on each author author_rep = {} for (tattler, thing, amount), r in reports.iteritems(): aid = thing.author_id if not author_rep.get(aid): author_rep[aid] = Storage(author=authors[aid]) author_rep[aid].num_reports = 1 author_rep[aid].acct_correct = tattler.report_correct author_rep[aid].acct_wrong = tattler.report_ignored author_rep[aid].most_recent = r._date author_rep[aid].reporters = set([tattler]) else: author_rep[aid].num_reports += 1 author_rep[aid].acct_correct += tattler.report_correct author_rep[aid].acct_wrong += tattler.report_ignored if author_rep[aid].most_recent < r._date: author_rep[aid].most_recent = r._date author_rep[aid].reporters.add(tattler) authors = author_rep.values() if sort == "hot": def report_hotness(a): return a.acct_correct / max(a.acct_wrong + a.acct_correct, 1) def better_reporter(a, b): q = report_hotness(b) - report_hotness(a) if q == 0: return b.acct_correct - a.acct_correct else: return 1 if q > 0 else -1 authors.sort(better_reporter) if sort == "top": authors.sort(lambda x, y: y.num_reports - x.num_reports) elif sort == "new": def newer_reporter(a, b): t = b.most_recent - a.most_recent t0 = datetime.timedelta(0) return 1 if t > t0 else -1 if t < t0 else 0 authors.sort(newer_reporter) return authors
""" from pylons import g from httplib import HTTPSConnection from urlparse import urlparse import socket, re from BeautifulSoup import BeautifulStoneSoup from r2.lib.utils import iters, Storage from r2.models import NotFound from r2.models.bidding import CustomerID, PayID, ShippingAddress # list of the most common errors. Errors = Storage(TESTMODE="E00009", TRANSACTION_FAIL="E00027", DUPLICATE_RECORD="E00039", RECORD_NOT_FOUND="E00040", TOO_MANY_PAY_PROFILES="E00042", TOO_MANY_SHIP_ADDRESSES="E00043") class AuthorizeNetException(Exception): pass class SimpleXMLObject(object): """ All API transactions are done with authorize.net using XML, so here's a class for generating and extracting structured data from XML. """ _keys = []
__all__ = ['TRANSACTION_NOT_FOUND'] TRANSACTION_NOT_FOUND = 16 # useful test data: test_card = dict( AMEX=("370000000000002", 1234), DISCOVER=("6011000000000012", 123), MASTERCARD=("5424000000000015", 123), VISA=("4007000000027", 123), # visa card which generates error codes based on the amount ERRORCARD=("4222222222222", 123)) test_card = Storage( (k, CreditCard(cardNumber=x, expirationDate="2011-11", cardCode=y)) for k, (x, y) in test_card.iteritems()) test_address = Address(firstName="John", lastName="Doe", address="123 Fake St.", city="Anytown", state="MN", zip="12346") @export def get_account_info(user, recursed=False): # if we don't have an ID for the user, try to make one if not CustomerID.get_id(user): cust_id = CreateCustomerProfileRequest(user).make_request()
from collections import namedtuple from pylons import app_globals as g from r2.lib.utils import Storage, epoch_timestamp, constant_time_compare, tup GLOBAL_TOKEN_VERSION = 1 SIGNATURE_UA_HEADER = "X-hmac-signed-result" SIGNATURE_BODY_HEADER = "X-hmac-signed-body" SIG_HEADER_RE = re.compile(r"^(?P<global_version>\d+?):(?P<payload>.*)$") SIG_CONTENT_V1_RE = re.compile( r"^(?P<platform>.+?):(?P<version>\d+?):(?P<epoch>\d+?):(?P<mac>.*)$" ) ERRORS = Storage() SignatureError = namedtuple("SignatureError", "code msg") for code, msg in ( ("UNKNOWN", "default signature failure mode (shouldn't happen!)"), ("INVALID_FORMAT", "no signature header or completely unparsable"), ("UNKOWN_GLOBAL_VERSION", "token global version is from the future"), ("UNPARSEABLE", "couldn't parse signature for this global version"), ("INVALIDATED_TOKEN", "platform/version combination is invalid."), ("EXPIRED_TOKEN", "epoch provided is too old."), ("SIGNATURE_MISMATCH", "the payload's signature doesn't match the header"), ("MULTISIG_MISMATCH", "more than one version on multiple signatures!") ): code = code.upper() ERRORS[code] = SignatureError(code, msg)
def response_func(self, **kw): return Storage(**kw)
ProfileTransVoid, UpdateCustomerPaymentProfileRequest, ) __all__ = [] # useful test data: test_card = dict(AMEX = ("370000000000002" , 1234), DISCOVER = ("6011000000000012" , 123), MASTERCARD = ("5424000000000015" , 123), VISA = ("4007000000027" , 123), # visa card which generates error codes based on the amount ERRORCARD = ("4222222222222" , 123)) test_card = Storage((k, CreditCard(cardNumber=x, expirationDate="2011-11", cardCode=y)) for k, (x, y) in test_card.iteritems()) test_address = Address(firstName="John", lastName="Doe", address="123 Fake St.", city="Anytown", state="MN", zip="12346") @export def get_account_info(user, recursed=False): # if we don't have an ID for the user, try to make one if not CustomerID.get_id(user): cust_id = CreateCustomerProfileRequest(user).make_request()
def valid_signature(payload, signature): """Checks if `signature` matches `payload`. `Signature` (at least as of version 1) be of the form: {global_version}:{platform}:{version}:{signature} where: * global_version (currently hard-coded to be "1") can be used to change this header's underlying schema later if needs be. As such, can be treated as a protocol version. * platform is the client platform type (generally "ios" or "android") * version is the client's token version (can be updated and incremented per app build as needs be. * signature is the hmac of the request's POST body with the token derived from the above three parameters via `get_secret_token` """ result = Storage( global_version=-1, platform=None, version=-1, mac=None, valid=False, epoch=None, error=ERRORS.UNKNOWN, ) sig_match = SIG_HEADER_RE.match(signature or "") if not sig_match: result.error = ERRORS.INVALID_FORMAT return result sig_header_dict = sig_match.groupdict() # we're matching \d so this shouldn't throw a TypeError result.global_version = int(sig_header_dict['global_version']) # incrementing this value is drastic. We can't validate a token protocol # we don't understand. if result.global_version > GLOBAL_TOKEN_VERSION: result.error = ERRORS.UNKOWN_GLOBAL_VERSION return result # currently there's only one version, but here's where we'll eventually # patch in more. sig_match = SIG_CONTENT_V1_RE.match(sig_header_dict['payload']) if not sig_match: result.error = ERRORS.UNPARSEABLE return result result.update(sig_match.groupdict()) result.version = int(result.version) result.epoch = int(result.epoch) # verify that the token provided hasn't been invalidated if is_invalid_token(result.platform, result.version): result.error = ERRORS.INVALIDATED_TOKEN return result if not valid_epoch(result.platform, result.epoch): result.error = ERRORS.EXPIRED_TOKEN return result # get the expected secret used to verify this request. secret_token = get_secret_token( result.platform, result.version, global_version=result.global_version, ) result.valid = constant_time_compare( result.mac, versioned_hmac(secret_token, epoch_wrap(result.epoch, payload), result.global_version), ) if result.valid: result.error = None else: result.error = ERRORS.SIGNATURE_MISMATCH return result