def _findexactmatches(repo, added, removed): '''find renamed files that have no changes Takes a list of new filectxs and a list of removed filectxs, and yields (before, after) tuples of exact matches. ''' numfiles = len(added) + len(removed) # Get hashes of removed files. hashes = {} for i, fctx in enumerate(removed): repo.ui.progress(_('searching for exact renames'), i, total=numfiles) h = util.sha1(fctx.data()).digest() hashes[h] = fctx # For each added file, see if it corresponds to a removed file. for i, fctx in enumerate(added): repo.ui.progress(_('searching for exact renames'), i + len(removed), total=numfiles) h = util.sha1(fctx.data()).digest() if h in hashes: yield (hashes[h], fctx) # Done repo.ui.progress(_('searching for exact renames'), None)
def unbundle(self, cg, heads, source): '''Send cg (a readable file-like object representing the changegroup to push, typically a chunkbuffer object) to the remote server as a bundle. Return an integer indicating the result of the push (see localrepository.addchangegroup()).''' if heads != ['force'] and self.capable('unbundlehash'): heads = encodelist(['hashed', util.sha1(''.join(sorted(heads))).digest()]) else: heads = encodelist(heads) ret, output = self._callpush("unbundle", cg, heads=heads) if ret == "": raise error.ResponseError( _('push failed:'), output) try: ret = int(ret) except ValueError: raise error.ResponseError( _('push failed (unexpected response):'), ret) for l in output.splitlines(True): self.ui.status(_('remote: '), l) return ret
def gitindex(text): if not text: return '0' * 40 l = len(text) s = util.sha1('blob %d\0' % l) s.update(text) return s.hexdigest()
def unbundle(self, cg, heads, source): '''Send cg (a readable file-like object representing the changegroup to push, typically a chunkbuffer object) to the remote server as a bundle. When pushing a bundle10 stream, return an integer indicating the result of the push (see localrepository.addchangegroup()). When pushing a bundle20 stream, return a bundle20 stream.''' if heads != ['force'] and self.capable('unbundlehash'): heads = encodelist(['hashed', util.sha1(''.join(sorted(heads))).digest()]) else: heads = encodelist(heads) if util.safehasattr(cg, 'deltaheader'): # this a bundle10, do the old style call sequence ret, output = self._callpush("unbundle", cg, heads=heads) if ret == "": raise error.ResponseError( _('push failed:'), output) try: ret = int(ret) except ValueError: raise error.ResponseError( _('push failed (unexpected response):'), ret) for l in output.splitlines(True): self.ui.status(_('remote: '), l) else: # bundle2 push. Send a stream, fetch a stream. stream = self._calltwowaystream('unbundle', cg, heads=heads) ret = bundle2.unbundle20(self.ui, stream) return ret
def test_remapping(metasync, opts): replication = 2 config = [(0, 2), (1, 4), (2, 4), (3, 2)] hspace = 20 detmap = DetMap2(config, hspace, replication) N = 50 lst = [] for _ in range(100): randstr = ''.join( random.choice(string.letters + string.digits) for _ in range(N)) hashid = util.sha1(randstr) lst.append(hashid) #lst = detmap.get_mapping(hashid) #for i in lst: # count[i] += 1 detmap.reconfig(config, 3) assert len(detmap.mapinfo) == 2 added, removed = detmap.get_remapping(lst) for i in removed: assert len(removed[i]) == 0 import copy detmap = DetMap2(config, hspace, replication) config = copy.copy(config) config.pop() lst3 = [] for hv in lst: if 3 in detmap.get_mapping(hv): lst3.append(hv) detmap.reconfig(config) added, removed = detmap.get_remapping(lst) assert len(removed[3]) == len(lst3)
def unbundle(self, cg, heads, source): '''Send cg (a readable file-like object representing the changegroup to push, typically a chunkbuffer object) to the remote server as a bundle. Return an integer indicating the result of the push (see localrepository.addchangegroup()).''' if heads != ['force'] and self.capable('unbundlehash'): heads = encodelist( ['hashed', util.sha1(''.join(sorted(heads))).digest()]) else: heads = encodelist(heads) ret, output = self._callpush("unbundle", cg, heads=heads) if ret == "": raise error.ResponseError(_('push failed:'), output) try: ret = int(ret) except ValueError: raise error.ResponseError(_('push failed (unexpected response):'), ret) for l in output.splitlines(True): self.ui.status(_('remote: '), l) return ret
def post(self): name = self.get_argument("name", "") email = self.get_argument("email", "") password = self.get_argument("password", None) if not name or len(name) > 15: self.render("join.html", error=111, name=name, email=email) return match = re.search(r'[\w.-]+@[\w.-]+', email) if not match: self.render("join.html", error=112, name=name, email=email) return if not password: self.render("join.html", error=110, name=name, email=email) return user = self.user_dal.get({"email": email}) if user: self.render("join.html", error=113, name=name, email=email) return user = self.user_dal.template() user["name"] = name user["email"] = email user["password"] = sha1(password) user["remote_ip"] = self.request.remote_ip user_id = self.user_dal.insert(user) if not user_id: self.render("join.html", error=114) return self.set_secure_cookie("user", str(user_id), expires_days=30) self.redirect(self.get_argument("next", "/"))
def old_login(username, md5_password): from api import agent_header exponent = int("010001", 16) modulus = int("AC69F5CCC8BDE47CD3D371603748378C9CFAD2938A6B021E0E191013975AD683F5CBF9ADE8BD7D46B4D2EC2D78A" "F146F1DD2D50DC51446BB8880B8CE88D476694DFC60594393BEEFAA16F5DBCEBE22F89D640F5336E42F587DC4AF" "EDEFEAC36CF007009CCCE5C1ACB4FF06FBA69802A8085C2C54BADD0597FC83E6870F1E36FD", 16) param = '{"cmdID":1,"isCompressed":0,"rsaKey":{"n":"AC69F5CCC8BDE47CD3D371603748378C9CFAD2938A6B0' \ '21E0E191013975AD683F5CBF9ADE8BD7D46B4D2EC2D78AF146F1DD2D50DC51446BB8880B8CE88D476694DFC60594393BEEFAA16F' \ '5DBCEBE22F89D640F5336E42F587DC4AFEDEFEAC36CF007009CCCE5C1ACB4FF06FBA69802A8085C2C54BADD0597FC83E6870F1E3' \ '6FD","e":"010001"},"businessType":%s,"passWord":"******","loginType":0,"sdkVersion":177588,' \ '"appName":"ANDROID-com.xunlei.redcrystalandroid","platformVersion":1,"devicesign":"%s",' \ '"sessionID":"","protocolVersion":%s,"userName":"******","extensionList":"","sequenceNo":%s,' \ '"peerID":"","clientVersion":"1.0.0"}' _chars = "0123456789ABCDEF" deviceid = username device_id = md5(deviceid) appName = 'com.xunlei.redcrystalandroid' businessType = '61' key = 'C2049664-1E4A-4E1C-A475-977F0E207C9C' key_md5 = md5(key) device_sign = "div100.%s%s" % (device_id, md5(sha1("%s%s%s%s" % (device_id, appName, businessType, key_md5)))) hash_password = hex(pow_mod(StrToInt(md5_password), exponent, modulus))[2:].upper().zfill(256) params = param % (61, hash_password, device_sign, 108, username, 1000006) r = requests.post("https://login.mobile.reg2t.sandai.net/", data=params, headers=agent_header, verify=False) login_status = json.loads(r.text) return login_status
def post(self): name = self.get_argument("name", "") email = self.get_argument("email", "") password = self.get_argument("password", None) if not name or len(name) > 15: self.render("join.html", error=111, name=name, email=email) return match = re.search(r"[\w.-]+@[\w.-]+", email) if not match: self.render("join.html", error=112, name=name, email=email) return if not password: self.render("join.html", error=110, name=name, email=email) return user = self.user_dal.get({"email": email}) if user: self.render("join.html", error=113, name=name, email=email) return user = self.user_dal.template() user["name"] = name user["email"] = email user["password"] = sha1(password) user["remote_ip"] = self.request.remote_ip user_id = self.user_dal.insert(user) if not user_id: self.render("join.html", error=114) return self.set_secure_cookie("user", str(user_id), expires_days=30) self.redirect(self.get_argument("next", "/"))
def test_remapping(metasync, opts): replication = 2 config = [(0,2), (1,4), (2,4), (3,2)] hspace = 20 detmap = DetMap2(config, hspace, replication) N = 50 lst = [] for _ in range(100): randstr = ''.join(random.choice(string.letters + string.digits) for _ in range(N)) hashid = util.sha1(randstr) lst.append(hashid) #lst = detmap.get_mapping(hashid) #for i in lst: # count[i] += 1 detmap.reconfig(config, 3) assert len(detmap.mapinfo) == 2 added, removed = detmap.get_remapping(lst) for i in removed: assert len(removed[i]) == 0 import copy detmap = DetMap2(config, hspace, replication) config = copy.copy(config) config.pop() lst3 = [] for hv in lst: if 3 in detmap.get_mapping(hv): lst3.append(hv) detmap.reconfig(config) added, removed = detmap.get_remapping(lst) assert len(removed[3]) == len(lst3)
def gitindex(text): if not text: return "0" * 40 l = len(text) s = util.sha1("blob %d\0" % l) s.update(text) return s.hexdigest()
def hv(self): # dirty, recompute the hv if self._dirty or self._hv is None: self._hv = util.sha1(self.dump()) self._dirty = False self._updated() return self._hv
def post(self): user = self.current_user password = self.get_argument("pwd", "") new_pwd = self.get_argument("new_pwd", "") if user["password"] != sha1(password): self.render("account/pwd.html", error=141) return if new_pwd == "": self.render("account/pwd.html", error=142) return user["password"] = sha1(new_pwd) result = self.user_dal.update_user(user) if result: self.render("account/pwd.html", error=143) return current_user = user self.render("account/pwd.html", error=140)
def connect(self): if hasattr(self, 'ui'): cacerts = self.ui.config('web', 'cacerts') if cacerts: cacerts = util.expandpath(cacerts) else: cacerts = None hostfingerprint = self.ui.config('hostfingerprints', self.host) if cacerts and not hostfingerprint: sock = _create_connection((self.host, self.port)) self.sock = _ssl_wrap_socket(sock, self.key_file, self.cert_file, cert_reqs=CERT_REQUIRED, ca_certs=cacerts) msg = _verifycert(self.sock.getpeercert(), self.host) if msg: raise util.Abort( _('%s certificate error: %s ' '(use --insecure to connect ' 'insecurely)') % (self.host, msg)) self.ui.debug('%s certificate successfully verified\n' % self.host) else: httplib.HTTPSConnection.connect(self) if hasattr(self.sock, 'getpeercert'): peercert = self.sock.getpeercert(True) peerfingerprint = util.sha1(peercert).hexdigest() nicefingerprint = ":".join([ peerfingerprint[x:x + 2] for x in xrange(0, len(peerfingerprint), 2) ]) if hostfingerprint: if peerfingerprint.lower() != \ hostfingerprint.replace(':', '').lower(): raise util.Abort( _('invalid certificate for %s ' 'with fingerprint %s') % (self.host, nicefingerprint)) self.ui.debug( '%s certificate matched fingerprint %s\n' % (self.host, nicefingerprint)) else: self.ui.warn( _('warning: %s certificate ' 'with fingerprint %s not verified ' '(check hostfingerprints or web.cacerts ' 'config setting)\n') % (self.host, nicefingerprint)) else: # python 2.5 ? if hostfingerprint: raise util.Abort( _('no certificate for %s ' 'with fingerprint') % self.host) self.ui.warn( _('warning: %s certificate not verified ' '(check web.cacerts config setting)\n') % self.host)
def cachehash(repo, hideable): """return sha1 hash of repository data to identify a valid cache. We calculate a sha1 of repo heads and the content of the obsstore and write it to the cache. Upon reading we can easily validate by checking the hash against the stored one and discard the cache in case the hashes don't match. """ h = util.sha1() h.update(''.join(repo.heads())) h.update(str(hash(frozenset(hideable)))) return h.digest()
def connect(self): self.sock = _create_connection((self.host, self.port)) host = self.host if self.realhostport: # use CONNECT proxy something = _generic_proxytunnel(self) host = self.realhostport.rsplit(':', 1)[0] cacerts = self.ui.config('web', 'cacerts') hostfingerprint = self.ui.config('hostfingerprints', host) if cacerts and not hostfingerprint: cacerts = util.expandpath(cacerts) if not os.path.exists(cacerts): raise util.Abort(_('could not find ' 'web.cacerts: %s') % cacerts) self.sock = _ssl_wrap_socket(self.sock, self.key_file, self.cert_file, cert_reqs=CERT_REQUIRED, ca_certs=cacerts) msg = _verifycert(self.sock.getpeercert(), host) if msg: raise util.Abort(_('%s certificate error: %s ' '(use --insecure to connect ' 'insecurely)') % (host, msg)) self.ui.debug('%s certificate successfully verified\n' % host) else: self.sock = _ssl_wrap_socket(self.sock, self.key_file, self.cert_file) if hasattr(self.sock, 'getpeercert'): peercert = self.sock.getpeercert(True) peerfingerprint = util.sha1(peercert).hexdigest() nicefingerprint = ":".join([peerfingerprint[x:x + 2] for x in xrange(0, len(peerfingerprint), 2)]) if hostfingerprint: if peerfingerprint.lower() != \ hostfingerprint.replace(':', '').lower(): raise util.Abort(_('invalid certificate for %s ' 'with fingerprint %s') % (host, nicefingerprint)) self.ui.debug('%s certificate matched fingerprint %s\n' % (host, nicefingerprint)) else: self.ui.warn(_('warning: %s certificate ' 'with fingerprint %s not verified ' '(check hostfingerprints or web.cacerts ' 'config setting)\n') % (host, nicefingerprint)) else: # python 2.5 ? if hostfingerprint: raise util.Abort(_('no certificate for %s with ' 'configured hostfingerprint') % host) self.ui.warn(_('warning: %s certificate not verified ' '(check web.cacerts config setting)\n') % host)
def add(self, fcl, fco, fca, fd, flags): hash = util.sha1(fcl.path()).hexdigest() self._repo.opener("merge/" + hash, "w").write(fcl.data()) self._state[fd] = [ 'u', hash, fcl.path(), fca.path(), hex(fca.filenode()), fco.path(), flags ] self._write()
def old_login(username, md5_password): hash_password = rsa_encode(md5_password) _chars = "0123456789ABCDEF" peer_id = ''.join(random.sample(_chars, 16)) device_id = md5("%s23333" % md5_password) # just generate a 32bit string appName = 'com.xunlei.redcrystalandroid' key = 'C2049664-1E4A-4E1C-A475-977F0E207C9C' md5_key = md5(key) device_sign = 'div100.%s%s' % ( device_id, md5(sha1("%s%s%s%s" % (device_id, appName, 61, md5_key)))) payload = json.dumps({ "protocolVersion": PROTOCOL_VERSION, "sequenceNo": 1000001, "platformVersion": 1, "sdkVersion": 177588, "peerID": peer_id, "businessType": 61, "clientVersion": APP_VERSION, "devicesign": device_sign, "isCompressed": 0, "cmdID": 1, "userName": username, "passWord": hash_password, "loginType": 0, "sessionID": "", "verifyKey": "", "verifyCode": "", "appName": "ANDROID-com.xunlei.redcrystalandroid", "rsaKey": { "e": "%06X" % rsa_pubexp, "n": long2hex(rsa_mod) }, "extensionList": "" }) headers = { 'User-Agent': "Mozilla/5.0 (iPhone; CPU iPhone OS 5_1 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Mobile/9B176 MicroMessenger/4.3.2" } r = requests.post("https://login.mobile.reg2t.sandai.net/", data=payload, headers=headers, verify=False) login_status = json.loads(r.text) return login_status
def add(self, fcl, fco, fca, fd): hash = util.sha1(fcl.path()).hexdigest() self._repo.opener.write("merge/" + hash, fcl.data()) self._state[fd] = [ 'u', hash, fcl.path(), fca.path(), hex(fca.filenode()), fco.path(), fcl.flags() ] self._dirty = True
def check_heads(repo, their_heads, context): """check if the heads of a repo have been modified Used by peer for unbundling. """ heads = repo.heads() heads_hash = util.sha1(''.join(sorted(heads))).digest() if not (their_heads == ['force'] or their_heads == heads or their_heads == ['hashed', heads_hash]): # someone else committed/pushed/unbundled while we # were transferring data raise error.PushRaced('repository changed while %s - ' 'please try again' % context)
def test_map_pack(metasync, opts): config = [(0,2), (1,10), (2,4), (3,2)] hspace = 100 replication = 2 N = 50 detmap = DetMap2(config, hspace, replication) detmap2 = DetMap2(config, hspace, replication) detmap2.pack() for _ in range(100): randstr = ''.join(random.choice(string.letters + string.digits) for _ in range(N)) hashid = util.sha1(randstr) assert detmap.get_mapping(hashid) == detmap2.get_mapping(hashid)
def connect(self): if hasattr(self, 'ui'): cacerts = self.ui.config('web', 'cacerts') if cacerts: cacerts = util.expandpath(cacerts) else: cacerts = None hostfingerprint = self.ui.config('hostfingerprints', self.host) if cacerts and not hostfingerprint: sock = _create_connection((self.host, self.port)) self.sock = _ssl_wrap_socket(sock, self.key_file, self.cert_file, cert_reqs=CERT_REQUIRED, ca_certs=cacerts) msg = _verifycert(self.sock.getpeercert(), self.host) if msg: raise util.Abort(_('%s certificate error: %s ' '(use --insecure to connect ' 'insecurely)') % (self.host, msg)) self.ui.debug('%s certificate successfully verified\n' % self.host) else: httplib.HTTPSConnection.connect(self) if hasattr(self.sock, 'getpeercert'): peercert = self.sock.getpeercert(True) peerfingerprint = util.sha1(peercert).hexdigest() nicefingerprint = ":".join([peerfingerprint[x:x + 2] for x in xrange(0, len(peerfingerprint), 2)]) if hostfingerprint: if peerfingerprint.lower() != \ hostfingerprint.replace(':', '').lower(): raise util.Abort(_('invalid certificate for %s ' 'with fingerprint %s') % (self.host, nicefingerprint)) self.ui.debug('%s certificate matched fingerprint %s\n' % (self.host, nicefingerprint)) else: self.ui.warn(_('warning: %s certificate ' 'with fingerprint %s not verified ' '(check hostfingerprints or web.cacerts ' 'config setting)\n') % (self.host, nicefingerprint)) else: # python 2.5 ? if hostfingerprint: raise util.Abort(_('no certificate for %s ' 'with fingerprint') % self.host) self.ui.warn(_('warning: %s certificate not verified ' '(check web.cacerts config setting)\n') % self.host)
def post(self): email = self.get_argument("email", "") password = self.get_argument("password", None) if email == "" or not password: self.render("login.html", error=100, email=email) return parameters = {"email": email, "password": sha1(password)} user = self.user_dal.get(parameters) if not user: self.render("login.html", error=100, email=email) return self.set_secure_cookie("user", str(user["_id"]), expires_days=30) self.redirect(self.get_argument("next", "/"))
def test_map_pack(metasync, opts): config = [(0, 2), (1, 10), (2, 4), (3, 2)] hspace = 100 replication = 2 N = 50 detmap = DetMap2(config, hspace, replication) detmap2 = DetMap2(config, hspace, replication) detmap2.pack() for _ in range(100): randstr = ''.join( random.choice(string.letters + string.digits) for _ in range(N)) hashid = util.sha1(randstr) assert detmap.get_mapping(hashid) == detmap2.get_mapping(hashid)
def executeQuery(sparqlEndpoint, query, returnFormat = JSON): """ Execute SPARQL @query on @sparqlEndpoint. Cache the query results temporarily in the tmp directory. """ path = os.path.join(util.root(), "tmp", util.sha1(sparqlEndpoint.endpoint + query)) if os.path.exists(path): with open(path, "rb") as file: return pickle.load(file) else: sparqlEndpoint.setQuery(unicode(query.decode("UTF-8"))) sparqlEndpoint.setReturnFormat(returnFormat) results = sparqlEndpoint.query().convert() with open(path, "wb") as file: pickle.dump(results, file) return results
def add(self, fcl, fco, fca, fd): """add a new (potentially?) conflicting file the merge state fcl: file context for local, fco: file context for remote, fca: file context for ancestors, fd: file path of the resulting merge. note: also write the local version to the `.hg/merge` directory. """ hash = util.sha1(fcl.path()).hexdigest() self._repo.vfs.write('merge/' + hash, fcl.data()) self._state[fd] = ['u', hash, fcl.path(), fca.path(), hex(fca.filenode()), fco.path(), hex(fco.filenode()), fcl.flags()] self._dirty = True
def add(self, fcl, fco, fca, fd): """add a new (potentially?) conflicting file the merge state fcl: file context for local, fco: file context for remote, fca: file context for ancestors, fd: file path of the resulting merge. note: also write the local version to the `.hg/merge` directory. """ hash = util.sha1(fcl.path()).hexdigest() self._repo.opener.write("merge/" + hash, fcl.data()) self._state[fd] = ['u', hash, fcl.path(), fca.path(), hex(fca.filenode()), fco.path(), hex(fco.filenode()), fcl.flags()] self._dirty = True
def old_login(username, md5_password): hash_password = rsa_encode(md5_password) _chars = "0123456789ABCDEF" peer_id = ''.join(random.sample(_chars, 16)) device_id = md5("%s23333" % md5_password) # just generate a 32bit string appName = 'com.xunlei.redcrystalandroid' key = 'C2049664-1E4A-4E1C-A475-977F0E207C9C' md5_key = md5(key) device_sign = 'div100.%s%s' % (device_id, md5(sha1("%s%s%s%s" % (device_id, appName, 61, md5_key)))) payload = json.dumps({ "protocolVersion": PROTOCOL_VERSION, "sequenceNo": 1000001, "platformVersion": 1, "sdkVersion": 177588, "peerID": peer_id, "businessType": 61, "clientVersion": APP_VERSION, "devicesign": device_sign, "isCompressed": 0, "cmdID": 1, "userName": username, "passWord": hash_password, "loginType": 0, "sessionID": "", "verifyKey": "", "verifyCode": "", "appName": "ANDROID-com.xunlei.redcrystalandroid", "rsaKey": { "e": "%06X" % rsa_pubexp, "n": long2hex(rsa_mod) }, "extensionList": "" }) headers = {'User-Agent': "Mozilla/5.0 (iPhone; CPU iPhone OS 5_1 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Mobile/9B176 MicroMessenger/4.3.2"} r = requests.post("https://login.mobile.reg2t.sandai.net/", data=payload, headers=headers, verify=False) login_status = json.loads(r.text) return login_status
def test_mapping_fairness(metasync, opts): "test the fairness of mapping scheme" import string import random def evaluate(count, config): N = sum(count) C = sum(map(lambda x: x[1], config)) score = 0.0 for srv in config: score += (1.0 * count[srv[0]] / srv[1] - 1.0 * N / C)**2 return score config = [(0, 2), (1, 7), (2, 10), (3, 15)] nspace = sum(map(lambda x: x[1], config)) result = [['replication', 'factor', 'result', 'fairness', 'score']] N = 50 random.seed(0) for replication in range(1, 4): for factor in range(100, 1001, 100): hspace = factor * nspace detmap = DetMap2(config, hspace, replication) count = [0, 0, 0, 0] for _ in range(5000): randstr = ''.join( random.choice(string.letters + string.digits) for _ in range(N)) hashid = util.sha1(randstr) lst = detmap.get_mapping(hashid) for i in lst: count[i] += 1 fairness = [1.0 * count[i] / config[i][1] for i in range(4)] score = evaluate(count, config) row = [replication, factor, count, fairness, score] result.append(row) for row in result: for e in row: print "%s\t" % e, print
def _load(self): if self._entries is not None: return self._entries = {} pn = self.bs.get_path(self.hv) firstline = True for line in open(pn): # processing header line if firstline: self._eval_header(line) firstline = False continue # processing entries self._entries.update(self._eval_entry(line)) # strong assert! assert self.hv == util.sha1(self.dump())
def old_login(username, md5_password): from api import agent_header exponent = int("010001", 16) modulus = int( "AC69F5CCC8BDE47CD3D371603748378C9CFAD2938A6B021E0E191013975AD683F5CBF9ADE8BD7D46B4D2EC2D78A" "F146F1DD2D50DC51446BB8880B8CE88D476694DFC60594393BEEFAA16F5DBCEBE22F89D640F5336E42F587DC4AF" "EDEFEAC36CF007009CCCE5C1ACB4FF06FBA69802A8085C2C54BADD0597FC83E6870F1E36FD", 16) param = '{"cmdID":1,"isCompressed":0,"rsaKey":{"n":"AC69F5CCC8BDE47CD3D371603748378C9CFAD2938A6B0' \ '21E0E191013975AD683F5CBF9ADE8BD7D46B4D2EC2D78AF146F1DD2D50DC51446BB8880B8CE88D476694DFC60594393BEEFAA16F' \ '5DBCEBE22F89D640F5336E42F587DC4AFEDEFEAC36CF007009CCCE5C1ACB4FF06FBA69802A8085C2C54BADD0597FC83E6870F1E3' \ '6FD","e":"010001"},"businessType":%s,"passWord":"******","loginType":0,"sdkVersion":177588,' \ '"appName":"ANDROID-com.xunlei.redcrystalandroid","platformVersion":1,"devicesign":"%s",' \ '"sessionID":"","protocolVersion":%s,"userName":"******","extensionList":"","sequenceNo":%s,' \ '"peerID":"","clientVersion":"1.0.0"}' _chars = "0123456789ABCDEF" deviceid = username device_id = md5(deviceid) appName = 'com.xunlei.redcrystalandroid' businessType = '61' key = 'C2049664-1E4A-4E1C-A475-977F0E207C9C' key_md5 = md5(key) device_sign = "div100.%s%s" % ( device_id, md5(sha1("%s%s%s%s" % (device_id, appName, businessType, key_md5)))) hash_password = hex(pow_mod(StrToInt(md5_password), exponent, modulus))[2:].upper().zfill(256) params = param % (61, hash_password, device_sign, 108, username, 1000006) r = requests.post("https://login.mobile.reg2t.sandai.net/", data=params, headers=agent_header, verify=False) login_status = json.loads(r.text) return login_status
def simple_walk(folder): # simple_walk will skip dipping into the folder # that are not tracked in the repo untracked = [] changed = [] for f in os.listdir(folder): if f == META_DIR: continue basename = os.path.basename(folder) if basename == '.' or basename == '': relpath = f else: relpath = os.path.join(folder, f) if relpath in tracked: if os.path.isdir(f): _untracked, _changed = simple_walk(relpath) untracked.extend(_untracked) changed.extend(_changed) else: fblob = tracked[relpath] # compare the file modified time and its metadata blob modified time curr_mtime = os.path.getmtime(relpath) last_mtime = os.path.getmtime( os.path.join(self.path_objs, fblob.hv)) if curr_mtime > last_mtime: # only load file when the file modified time is greater than metadata modified time fblob._load() flag = False # compare chunk hash for (offset, chunk) in util.each_chunk2(relpath, unit): if util.sha1( chunk) != fblob.entries[offset].hv: flag = True break if flag: changed.append(relpath) else: if os.path.isdir(relpath): relpath = os.path.join(relpath, '') untracked.append(relpath) return untracked, changed
def test_mapping_fairness(metasync, opts): "test the fairness of mapping scheme" import string import random def evaluate(count, config): N = sum(count) C = sum(map(lambda x: x[1], config)) score = 0.0 for srv in config: score += (1.0*count[srv[0]]/srv[1] - 1.0*N/C) ** 2 return score config = [(0,2), (1,7), (2,10), (3,15)] nspace = sum(map(lambda x: x[1], config)) result = [['replication', 'factor', 'result', 'fairness', 'score']] N = 50 random.seed(0) for replication in range(1, 4): for factor in range(100, 1001, 100): hspace = factor * nspace detmap = DetMap2(config, hspace, replication) count = [0, 0, 0, 0] for _ in range(5000): randstr = ''.join(random.choice(string.letters + string.digits) for _ in range(N)) hashid = util.sha1(randstr) lst = detmap.get_mapping(hashid) for i in lst: count[i] += 1 fairness = [1.0 * count[i] / config[i][1] for i in range(4)] score = evaluate(count, config) row = [replication, factor, count, fairness, score] result.append(row) for row in result: for e in row: print "%s\t" % e, print
def _hashfiltered(self, repo): """build hash of revision filtered in the current cache Tracking tipnode and tiprev is not enough to ensure validity of the cache as they do not help to distinct cache that ignored various revision bellow tiprev. To detect such difference, we build a cache of all ignored revisions. """ cl = repo.changelog if not cl.filteredrevs: return None key = None revs = sorted(r for r in cl.filteredrevs if r <= self.tiprev) if revs: s = util.sha1() for rev in revs: s.update('%s;' % rev) key = s.digest() return key
def simple_walk(folder): # simple_walk will skip dipping into the folder # that are not tracked in the repo untracked = [] changed = [] for f in os.listdir(folder): if f == META_DIR: continue basename = os.path.basename(folder) if basename == '.' or basename == '': relpath = f else: relpath = os.path.join(folder, f) if relpath in tracked: if os.path.isdir(f): _untracked, _changed = simple_walk(relpath) untracked.extend(_untracked) changed.extend(_changed) else: fblob = tracked[relpath] # compare the file modified time and its metadata blob modified time curr_mtime = os.path.getmtime(relpath) last_mtime = os.path.getmtime(os.path.join(self.path_objs, fblob.hv)) if curr_mtime > last_mtime: # only load file when the file modified time is greater than metadata modified time fblob._load() flag = False # compare chunk hash for (offset, chunk) in util.each_chunk2(relpath, unit): if util.sha1(chunk) != fblob.entries[offset].hv: flag = True break if flag: changed.append(relpath) else: if os.path.isdir(relpath): relpath = os.path.join(relpath, '') untracked.append(relpath) return untracked, changed
def filteredhash(repo, maxrev): """build hash of filtered revisions in the current repoview. Multiple caches perform up-to-date validation by checking that the tiprev and tipnode stored in the cache file match the current repository. However, this is not sufficient for validating repoviews because the set of revisions in the view may change without the repository tiprev and tipnode changing. This function hashes all the revs filtered from the view and returns that SHA-1 digest. """ cl = repo.changelog if not cl.filteredrevs: return None key = None revs = sorted(r for r in cl.filteredrevs if r <= maxrev) if revs: s = util.sha1() for rev in revs: s.update('%s;' % rev) key = s.digest() return key
def send_audit_logs(self, preimage, log): """ Sends audit log file to the EB server for debugging :param preimage: :param log: :return: """ api_data_req_body = { 'username': self.config.username if self.config is not None else None, 'apikey': self.config.apikey if self.config is not None else None } if api_data_req_body['apikey'] is None: api_data_req_body = None effort = { 'preimage': preimage, 'secondpreimage': util.sha1(preimage, as_hex=True), 'collision': 20 } req = SendLogRequest(api_data=api_data_req_body, effort=effort, log=log, env=self.config.env, config=self.eb_config) self.audit.audit_request(req_type=req.__class__, data=api_data_req_body) try: resp = req.call() except Exception as e: self.audit.audit_exception(e) self.audit.audit_request(api_data=api_data_req_body, effort=effort, response=req.response, env=self.config.env, config=self.eb_config) logger.debug('API req: %s' % api_data_req_body) logger.debug('API req_effort: %s' % effort) logger.debug('API res: %s' % req.response) raise return resp
def check_heads(): heads = repo.heads() heads_hash = util.sha1(''.join(sorted(heads))).digest() return (their_heads == ['force'] or their_heads == heads or their_heads == ['hashed', heads_hash])
def cmd_init(self, namespace, backend=None, nreplicas=None, encrypt_key=None): # already initialized? if self.check_sanity(): dbg.err("already initialized %s (%s)" \ % (self.path_root, self.namespace)) return False os.mkdir(self.path_meta) os.mkdir(self.path_objs) # build config opts conf = util.new_config() # core: unique/permanent info about local machine (often called client) # NOTE. not sure if encryption_key should be in core, or unchangable conf.add_section('core') conf.set('core', 'namespace', namespace) conf.set('core', 'clientid', util.gen_uuid()) conf.set('core', 'encryptkey', _get_conf_encryptkey(encrypt_key)) # backend: info about sync service providers # XXX: Error handling conf.add_section('backend') try: services = _get_conf_services(backend) conf.set('backend', 'services', services) conf.set('backend', 'nreplicas', _get_conf_nreplicas(nreplicas, len(services.split(",")))) except: pass # flush with open(self.path_conf, "w") as fd: conf.write(fd) try: self._load() except NameError: shutil.rmtree(self.path_meta) return False # put config into remote conf.remove_option('core', 'clientid') conf.remove_option('core', 'encryptkey') with io.BytesIO() as out: conf.write(out) val = out.getvalue() configname = util.sha1(val) self._put_all_content( val, self.get_remote_path("configs/%s" % configname[:6]), True) #temporary --- move this to pPaxos #self._put_all_content(configname[:6], self.get_remote_path("config"), True) # Format for master: headhash.config[:6].version prev_master = "." + configname[:6] + ".0" # do we need both? or shall we put them into a file together. with open(self.get_head(), "w") as f: f.write(prev_master) with open(self.get_prev(), "w") as f: f.write(prev_master) self._put_all_dir(self.get_remote_path("objects")) # change to put_content self._put_all(self.get_head(), self.get_remote_path(self.get_head_name())) self._put_all(self.get_prev(), self.get_remote_path(self.get_prev_name())) from paxos import Proposer self.proposer = Proposer(None, self.services, self.get_pPaxos_path(prev_master)) self._join() return True
def sha1(): from util import sha1 print sha1("daydayfree")
def connect(self): self.sock = _create_connection((self.host, self.port)) host = self.host if self.realhostport: # use CONNECT proxy something = _generic_proxytunnel(self) host = self.realhostport.rsplit(':', 1)[0] cacerts = self.ui.config('web', 'cacerts') hostfingerprint = self.ui.config('hostfingerprints', host) if cacerts and not hostfingerprint: cacerts = util.expandpath(cacerts) if not os.path.exists(cacerts): raise util.Abort( _('could not find ' 'web.cacerts: %s') % cacerts) self.sock = _ssl_wrap_socket(self.sock, self.key_file, self.cert_file, cert_reqs=CERT_REQUIRED, ca_certs=cacerts) msg = _verifycert(self.sock.getpeercert(), host) if msg: raise util.Abort( _('%s certificate error: %s ' '(use --insecure to connect ' 'insecurely)') % (host, msg)) self.ui.debug('%s certificate successfully verified\n' % host) else: self.sock = _ssl_wrap_socket(self.sock, self.key_file, self.cert_file) if hasattr(self.sock, 'getpeercert'): peercert = self.sock.getpeercert(True) peerfingerprint = util.sha1(peercert).hexdigest() nicefingerprint = ":".join([ peerfingerprint[x:x + 2] for x in xrange(0, len(peerfingerprint), 2) ]) if hostfingerprint: if peerfingerprint.lower() != \ hostfingerprint.replace(':', '').lower(): raise util.Abort( _('invalid certificate for %s ' 'with fingerprint %s') % (host, nicefingerprint)) self.ui.debug( '%s certificate matched fingerprint %s\n' % (host, nicefingerprint)) else: self.ui.warn( _('warning: %s certificate ' 'with fingerprint %s not verified ' '(check hostfingerprints or web.cacerts ' 'config setting)\n') % (host, nicefingerprint)) else: # python 2.5 ? if hostfingerprint: raise util.Abort( _('no certificate for %s with ' 'configured hostfingerprint') % host) self.ui.warn( _('warning: %s certificate not verified ' '(check web.cacerts config setting)\n') % host)
def hv(self): if self._hv is None: self._hv = util.sha1(self.chunk) return self._hv
def job_init(self): # Creating the new command # A bunch of directories we will need later on rootdir = util.abs_root_path() self.rootdir=rootdir self.working_dir = os.path.relpath(os.getcwd(), rootdir) self.resultsdir = os.path.join(rootdir, exp_common.RESULTS_DIR) if self.hsh is None: if self.code is None: self.new_cmd, deps = exp_common.expand_command(self.command, self.params, self.parents) self.hsh = util.sha1(self.commit + str(len(self.working_dir)) + self.working_dir + str(len(self.command)) + self.new_cmd) self.exp_results = os.path.join(self.resultsdir, self.hsh) self.expdir = os.path.join(rootdir, exp_common.EXP_DIR, self.hsh) self.new_cmd = self.new_cmd.replace('{}', self.exp_results) self.new_code=None else: # terrible terrible hack to prevent parameter # substitution for macros (since this syntax # interferes with Python list syntax). TODO: figure # out whether this is actually a good idea (hint: no). code = self.code.replace("[", "<---") code = code.replace("]", "--->") new_code, deps = exp_common.expand_command(code, self.params, self.parents) new_code = new_code.replace("<---", "[") self.new_code = new_code.replace("--->", "]") deps=[x.hsh for x in self.parents] self.hsh = util.sha1(self.commit + str(len(self.working_dir)) + self.working_dir + str(len(self.code)) + self.new_code + repr(deps)) self.exp_results = os.path.join(self.resultsdir, self.hsh) self.expdir = os.path.join(rootdir, exp_common.EXP_DIR, self.hsh) self.new_code = self.new_code.replace('{}', self.exp_results) self.new_cmd=None # try to read run info from disk self.info = load_info(self.hsh) else: self.info = load_info(self.hsh) if self.info is None: print "Error: could not load experiment %s." % (self.hsh) exit(1) self.new_cmd = self.info['final_command'] self.new_cmd = self.info['final_code'] self.deps = self.info['deps'] #exp_common.expand_command(self.info["command"], self.info["params"], self.deps()) self.desc = self.info['description'] self.exp_results = os.path.join(self.resultsdir, self.hsh) self.expdir = os.path.join(rootdir, exp_common.EXP_DIR, self.hsh) # if not found, intialize from scratch if self.info is None: self.info = dict() self.info['description'] = self.desc # description (string) self.info['working_dir'] = self.working_dir # TODO: figure out how to handle these implicit dependencies # The dependencies will be filled in later once the parents are finished. See setup_env. self.info['deps'] = set([x.hsh for x in self.parents] + deps) self.info['command'] = self.command # command to run (string) self.info['code'] = self.code # code to execute self.info['commit'] = self.commit # commit hash (string) self.info['date'] = time.time() self.info['params'] = self.params # parameters to pass (dictionary) self.info['run_state'] = RUN_STATE_VIRGIN self.info['return_code'] = None self.info['final_command']=self.new_cmd self.info['final_code']=self.new_code else: if self.info['description'] != self.desc: print "Warning: job description '%s' differs from " \ "saved description '%s'; using '%s'" \ % (self.desc, self.info['description'], \ self.info['description']) if self.rerun == True: self.info['run_state'] = RUN_STATE_VIRGIN self.info['return_code'] = None self.info['date'] = time.time() shutil.rmtree(self.exp_results) self.jobid = None
def add(self, fcl, fco, fca, fd): hash = util.sha1(fcl.path()).hexdigest() self._repo.opener.write("merge/" + hash, fcl.data()) self._state[fd] = ["u", hash, fcl.path(), fca.path(), hex(fca.filenode()), fco.path(), fcl.flags()] self._dirty = True
def cmd_init(self, namespace, backend=None, nreplicas=None, encrypt_key=None): # already initialized? if self.check_sanity(): dbg.err("already initialized %s (%s)" \ % (self.path_root, self.namespace)) return False os.mkdir(self.path_meta) os.mkdir(self.path_objs) # build config opts conf = util.new_config() # core: unique/permanent info about local machine (often called client) # NOTE. not sure if encryption_key should be in core, or unchangable conf.add_section('core') conf.set('core', 'namespace' , namespace) conf.set('core', 'clientid' , util.gen_uuid()) conf.set('core', 'encryptkey', _get_conf_encryptkey(encrypt_key)) # backend: info about sync service providers # XXX: Error handling conf.add_section('backend') try: services = _get_conf_services(backend) conf.set('backend', 'services' , services) conf.set('backend', 'nreplicas', _get_conf_nreplicas(nreplicas, len(services.split(",")))) except: pass # flush with open(self.path_conf, "w") as fd: conf.write(fd) try: self._load() except NameError: shutil.rmtree(self.path_meta) return False # put config into remote conf.remove_option('core','clientid') conf.remove_option('core','encryptkey') with io.BytesIO() as out: conf.write(out) val = out.getvalue() configname = util.sha1(val) self._put_all_content(val, self.get_remote_path("configs/%s" % configname[:6]), True) #temporary --- move this to pPaxos #self._put_all_content(configname[:6], self.get_remote_path("config"), True) # Format for master: headhash.config[:6].version prev_master = "." + configname[:6] + ".0" # do we need both? or shall we put them into a file together. with open(self.get_head(), "w") as f: f.write(prev_master) with open(self.get_prev(), "w") as f: f.write(prev_master) self._put_all_dir(self.get_remote_path("objects")) # change to put_content self._put_all(self.get_head() , self.get_remote_path(self.get_head_name())) self._put_all(self.get_prev() , self.get_remote_path(self.get_prev_name())) from paxos import Proposer self.proposer = Proposer(None, self.services, self.get_pPaxos_path(prev_master)) self._join() return True
def add(self, fcl, fco, fca, fd, flags): hash = util.sha1(fcl.path()).hexdigest() self._repo.opener("merge/" + hash, "w").write(fcl.data()) self._state[fd] = ['u', hash, fcl.path(), fca.path(), hex(fca.filenode()), fco.path(), flags] self._dirty = True