def sync(self): for key in self.meta['sites']: request_log.info('SYNC %s' % key) publish = self.meta['sites'][key] if 'sync.url' not in publish or 'sync.secret' not in publish: request_log.info('SKIP %s' % key) continue url = publish['sync.url'] secret = publish['sync.secret'] body = open(os.path.join(self.repo_path, publish['filename']), 'r', encoding='utf8').read() tsp = str(int(time.mktime(time.localtime()))) v = signature(tsp + secret) http_client = AsyncHTTPClient() try: response = yield http_client.fetch( HTTPRequest(url=url, method='POST', body=body, headers={ 'tsp': tsp, 'v': v })) request_log.info('SYNC RESULT %d', response.code) except Exception as e: request_log.exception("FAIL")
def get(self): """HTTP get method.""" # destination url url = self.request.get('q') if not url or (not url.startswith('http:') and not url.startswith('https:')): self.error(400) return # id is optional -- for tracking clicks on individual items id = self.request.get('id') if not id: id = "" sig = self.request.get('sig') expected_sig = utils.signature(url+id) logging.debug('url: %s s: %s xs: %s' % (url, sig, expected_sig)) if sig == expected_sig: # this is 301, permanent self.redirect(url, permanent=True) # this is 302 temporary #self.redirect(url) return # TODO: Use a proper template so this looks nicer. response = ('<h1>Redirect</h1>' + 'This page is sending you to <a href="%s">%s</a><p />' % (cgi.escape(url, True), cgi.escape(url, True))) # TODO: Something more clever than go(-1), which doesn't work on new # windows, etc. Maybe check for 'referer' or send users to '/'. response += ('If you do not want to visit that page, you can ' + '<a href="javascript:history.go(-1)">go back</a>.') self.response.out.write(response)
def post(self): request_log.info('PRICE CONFIG START') try: safety = self.application.config.get('safety') if safety is None: request_log.error('CONFIG FAIL (NO SAFETY)') return self.send_error(500) # verify ip in white list if self.request.remote_ip not in safety['white_list']: request_log.error('CONFIG FAIL (NOT IN WHITELIST)') return self.send_error(500) # verify key tsp0 = self.request.headers['tsp'] encrypted0 = self.request.headers['v'] encrypted1 = signature(tsp0 + safety['secret']) if encrypted1 != encrypted0: request_log.error('CONFIG FAIL (SECRET FAIL)') return self.send_error(500) # reload body = self.request.body.decode() for line in body.split('\n'): request_log.debug(line) if line.startswith('set'): keys = line.split() self.master.set(keys[1], keys[2]) elif line.startswith('hmset'): keys = line.split() mapping = { keys[x]: keys[x + 1] for x in range(2, len(keys), 2) } self.master.hmset(keys[1], mapping) elif line.startswith('del'): keys = line.split() self.master.delete(keys[1]) return self.finish() except Exception as e: request_log.exception('CONFIG SYNC FAIL') self.send_error(500)
def call_refund(base_url, user_id, income, token, operator, notes): sign = signature('{user_id}{order_id}{income}{operator}{token}'.format( user_id=user_id, order_id='', income=income, operator=operator, token=token)) body = json.dumps({ 'type': 'deposit', 'user_id': user_id, 'operator': operator, 'token': token, 'income': str(income), 'notes': notes, 'sign': sign, 'order_id': '' }) request_log.info('FUND REQ %s', body) http_client = AsyncHTTPClient() url = 'http://%s/admin/fund' % base_url try: request = HTTPRequest(url=url, method='POST', body=body, request_timeout=120) response = yield http_client.fetch(request) if response.code == 200: body = response.body.decode() request_log.info('FUND RESP %s', body) resp = json.loads(body) if resp['status'] == 'ok': return 'success' else: return 'fail' except Exception as e: print(e) print('CALL FUND FAIL') finally: http_client.close() return 'exception'
def get(self): """HTTP get method.""" # XSRF check: usig = signature of the user's login cookie. # Note: This is the logged in app engine user and uses # an internal implementation detail of appengine. usig = utils.signature(userinfo.get_cookie('ACSID') or userinfo.get_cookie('dev_appserver_login')) template_values = { 'logout_link': users.create_logout_url('/'), 'msg': '', 'action': '', 'usig': usig, 'version' : os.getenv('CURRENT_VERSION_ID'), 'private_keys': private_keys, } # Get memcache stats and calculate some useful percentages memcache_stats = memcache.get_stats() try: hits = memcache_stats['hits'] misses = memcache_stats['misses'] memcache_stats['hit_percent'] = '%4.1f%%' % ((100.0 * hits) / (hits + misses)) except ZeroDivisionError: # Don't think we'll ever hit this but just in case... memcache_stats['hit_percent'] = 100.0 memcache_stats['size'] = memcache_stats['bytes'] / (1024*1024) memcache_stats['size_unit'] = 'MB' if memcache_stats['size'] < 1: memcache_stats['size'] = memcache_stats['bytes'] / 1024 memcache_stats['size_unit'] = 'KB' template_values['memcache_stats'] = memcache_stats action = self.request.get('action') if not action: action = "mainmenu" template_values['action'] = action if action == "mainmenu": template_values['msg'] = "" elif action == "flush_memcache": memcache.flush_all() template_values['msg'] = "memcached flushed" elif action == 'moderators': self.admin_moderator(template_values) logging.debug("admin_view: %s" % template_values['msg']) self.response.out.write(render_template(ADMIN_TEMPLATE, template_values))
def post(self): """HTTP post method.""" if self.request.get('action') != 'moderators': self.error(400) return usig = utils.signature(userinfo.get_cookie('ACSID') or userinfo.get_cookie('dev_appserver_login')) if self.request.get('usig') != usig: self.error(400) logging.warning('views.admin.post XSRF attempt. %s!=%s', usig, self.request.get('usig')) return keys_to_enable = self.request.POST.getall('enable') keys_to_disable = self.request.POST.getall('disable') now = datetime.isoformat(datetime.now()) admin = users.get_current_user().email() users_to_enable = models.UserInfo.get_by_key_name(keys_to_enable) for user in users_to_enable: user.moderator = True if not user.moderator_request_admin_notes: user.moderator_request_admin_notes = '' user.moderator_request_admin_notes += '%s: Enabled by %s.\n' % \ (now, admin) db.put(users_to_enable) users_to_disable = models.UserInfo.get_by_key_name(keys_to_disable) for user in users_to_disable: user.moderator = False if not user.moderator_request_admin_notes: user.moderator_request_admin_notes = '' user.moderator_request_admin_notes += '%s: Disabled by %s.\n' % \ (now, admin) db.put(users_to_disable) self.response.out.write( '<div style="color: green">Enabled %s and ' 'disabled %s moderators.</div>' % (len(users_to_enable), len(users_to_disable))) self.response.out.write('<a href="%s?action=moderators&zx=%d">' 'Continue</a>' % (self.request.path_url, datetime.now().microsecond))
def assign_merge_keys(): """private helper function for dedup()""" for res in self.results: # Merge keys are M + md5hash(some stuff). This distinguishes them from # the stable IDs, which are just md5hash(someotherstuff). #res.merge_key = 'M' + hashlib.md5(safe_str(res.title) + # safe_str(res.snippet) + # safe_str(res.location)).hexdigest() res.merge_key = 'M' + hashlib.md5(safe_str(res.title) + safe_str(res.snippet)).hexdigest() res.url_sig = utils.signature(res.url + res.merge_key) # we will be sorting & de-duping the merged results # by start date so we need an epoch time res.t_startdate = res.startdate.timetuple() # month_day used by django res.month_day = (time.strftime("%B", res.t_startdate) + " " + str(int(time.strftime("%d", res.t_startdate)))) # this is for the list of any results merged with this one res.merged_list = [] res.merged_debug = []
def sync_pricing(self, core_lines, ui_lines): for key in self.meta['sites']: request_log.info('SYNC PRICING %s' % key) publish = self.meta['sites'][key] if 'pricing.url' not in publish or 'sync.secret' not in publish: request_log.info('SKIP %s' % key) continue url = publish['pricing.url'] secret = publish['sync.secret'] if publish['publisher'] == 'purus': body = '\n'.join(ui_lines) elif publish['publisher'] == 'madeira': body = '\n'.join(core_lines) else: request_log.info('UNKNOWN PUBLISHER %s', publish['publisher']) continue request_log.info('SYNC VIA %s' % url) tsp = str(int(time.mktime(time.localtime()))) v = signature(tsp + secret) http_client = AsyncHTTPClient() try: response = yield http_client.fetch( HTTPRequest(url=url, method='POST', body=body, headers={ 'tsp': tsp, 'v': v })) request_log.info('SYNC RESULT %d', response.code) except Exception as e: request_log.exception("FAIL")
def post(self, path): if 'admin-route' not in self.current_user['roles']: self.send_error(403) return http_client = AsyncHTTPClient() if path == 'interface/list': try: self.json_args['domain_id'] = self.current_user['domain_id'] #self.json_args['domain_id'] = 00000 body = json.dumps(self.json_args) base_url = self.application.config['connection']['repo'] url = base_url + '/api/route/interface/list' response = yield http_client.fetch(url, method='POST', body=body) resp = response.body.decode() self.finish(resp) finally: http_client.close() elif path == 'interface/price': try: self.json_args['domain_id'] = self.current_user['domain_id'] body = json.dumps(self.json_args) base_url = self.application.config['connection']['repo'] url = base_url + '/api/route/interface/price' response = yield http_client.fetch(url, method='POST', body=body) resp = response.body.decode() self.finish(resp) finally: http_client.close() elif path == 'interface/price/add': try: self.json_args['domain_id'] = self.current_user['domain_id'] body = json.dumps(self.json_args) base_url = self.application.config['connection']['repo'] url = base_url + '/api/route/interface/price/add' response = yield http_client.fetch(url, method='POST', body=body) resp = response.body.decode() self.finish(resp) finally: http_client.close() elif path == 'interface/price/remove': try: self.json_args['domain_id'] = self.current_user['domain_id'] body = json.dumps(self.json_args) base_url = self.application.config['connection']['repo'] url = base_url + '/api/route/interface/price/remove' response = yield http_client.fetch(url, method='POST', body=body) resp = response.body.decode() self.finish(resp) finally: http_client.close() elif path == 'interface/price/modify': try: self.json_args['domain_id'] = self.current_user['domain_id'] body = json.dumps(self.json_args) base_url = self.application.config['connection']['repo'] url = base_url + '/api/route/interface/price/modify' response = yield http_client.fetch(url, method='POST', body=body) resp = response.body.decode() self.finish(resp) finally: http_client.close() elif path == 'interface/score': try: self.json_args['domain_id'] = self.current_user['domain_id'] body = json.dumps(self.json_args) base_url = self.application.config['connection']['repo'] url = base_url + '/api/route/interface/score' response = yield http_client.fetch(url, method='POST', body=body) resp = response.body.decode() self.finish(resp) finally: http_client.close() elif path == 'supply/save_update': self.json_args['domain_id'] = self.current_user['domain_id'] try: base_url = self.application.config['connection']['repo'] body = json.dumps(self.json_args) url = base_url + '/api/route/supply/save_update' response = yield http_client.fetch(url, method='POST', body=body) resp = response.body.decode() self.finish(resp) finally: http_client.close() elif path == 'supply/delete': self.json_args['domain_id'] = self.current_user['domain_id'] try: base_url = self.application.config['connection']['repo'] body = json.dumps(self.json_args) url = base_url + '/api/route/supply/delete' response = yield http_client.fetch(url, method='POST', body=body) resp = response.body.decode() self.finish(resp) finally: http_client.close() elif path == 'product/list': self.json_args['domain_id'] = self.current_user['domain_id'] try: base_url = self.application.config['connection']['repo'] body = json.dumps(self.json_args) url = base_url + '/api/route/product/list' response = yield http_client.fetch(url, method='POST', body=body) resp = response.body.decode() self.finish(resp) finally: http_client.close() elif path == 'product/update': self.json_args['domain_id'] = self.current_user['domain_id'] try: base_url = self.application.config['connection']['repo'] body = json.dumps(self.json_args) url = base_url + '/api/route/product/update' response = yield http_client.fetch(url, method='POST', body=body) resp = response.body.decode() self.finish(resp) finally: http_client.close() elif path == 'maintain/remove': try: key = self.json_args.get('key') body = 'del ' + key secret = self.application.config.get('safety').get('secret') tsp = str(int(time.mktime(time.localtime()))) v = signature(tsp + secret) partner_id = self.current_user['partner_id'] base_url = self.application.config['downstream'][partner_id][ 'shard'] url = 'http://%s/admin/pricing' % base_url response = yield http_client.fetch(url, method='POST', body=body, headers={ 'tsp': tsp, 'v': v }, request_timeout=120) body = response.body.decode() self.finish(json.dumps({"msg": "删除成功"})) except Exception as e: request_log.exception('MAINTAIN FAIL') self.finish(json.dumps({"msg": "删除异常:" + repr(e)})) finally: http_client.close() elif path == 'maintain/add': try: route = self.json_args.get('route') carrier = self.json_args.get('carrier') area = self.json_args.get('area') user_id = self.json_args.get('user_id') ttl = self.json_args.get('ttl') ttl_value = self.json_args.get('ttl_value') notes = quote_plus(self.json_args.get('notes')) if notes == '': notes = quote_plus('维护') if ttl is None or ttl == '' or not ttl_value.isdigit(): ttl_value = None elif ttl == 'hour': ttl_value = int(ttl_value) * 3600 elif ttl == 'min': ttl_value = int(ttl_value) * 60 elif ttl == 'day': ttl_value = int(ttl_value) * 3600 * 24 key = 'maintain:%s:%s:%s' % (route, carrier, area) if user_id and user_id != '': key = key + ':' + user_id if ttl_value: body = 'setex %s %s %d' % (key, notes, ttl_value) else: body = 'set %s %s' % (key, notes) secret = self.application.config.get('safety').get('secret') tsp = str(int(time.mktime(time.localtime()))) v = signature(tsp + secret) partner_id = self.current_user['partner_id'] base_url = self.application.config['downstream'][partner_id][ 'shard'] url = 'http://%s/admin/pricing' % base_url response = yield http_client.fetch(url, method='POST', body=body, headers={ 'tsp': tsp, 'v': v }, request_timeout=120) body = response.body.decode() self.finish(json.dumps({"msg": "添加成功"})) except Exception as e: request_log.exception('MAINTAIN FAIL') self.finish(json.dumps({"msg": "添加异常:" + repr(e)})) finally: http_client.close() elif path == 'pool/add': try: key = self.json_args.get('key') number = self.json_args.get('number') notes = self.json_args.get('notes') #notes暂为处理 value = number body = 'set %s %s' % (key, value) secret = self.application.config.get('safety').get('secret') tsp = str(int(time.mktime(time.localtime()))) v = signature(tsp + secret) partner_id = self.current_user['partner_id'] base_url = self.application.config['downstream'][partner_id][ 'shard'] url = 'http://%s/admin/pricing' % base_url response = yield http_client.fetch(url, method='POST', body=body, headers={ 'tsp': tsp, 'v': v }, request_timeout=120) body = response.body.decode() self.finish(json.dumps({"msg": "添加成功"})) except Exception as e: request_log.exception('ADD POOL FAIL') self.finish(json.dumps({"msg": "添加异常:" + repr(e)})) elif path == 'pool/remove': try: key = self.json_args.get('key') body = 'del ' + key secret = self.application.config.get('safety').get('secret') tsp = str(int(time.mktime(time.localtime()))) v = signature(tsp + secret) partner_id = self.current_user['partner_id'] base_url = self.application.config['downstream'][partner_id][ 'shard'] url = 'http://%s/admin/pricing' % base_url response = yield http_client.fetch(url, method='POST', body=body, headers={ 'tsp': tsp, 'v': v }, request_timeout=120) body = response.body.decode() self.finish(json.dumps({"msg": "删除成功"})) except Exception as e: request_log.exception('DELETE POOL FAIL') self.finish(json.dumps({"msg": "删除异常:" + repr(e)})) else: self.send_error(404)
def get_usig(user): """Get a signature for the current user suitable for an XSRF token.""" if user and user.get_cookie(): return utils.signature(user.get_cookie())
def commit(self, key=None, value=None, add=True, **kwargs): """Commit the index to the working tree. >>> repo.add('foo', 'my very special bar') >>> repo.commit() >>> foo = repo.show('foo') u'my very special bar' If a key and value are specified, will add them immediately before committing them. >>> repo.commit('fast', 'and easy, too!') >>> foo = repo.show('fast') u'and easy, too!' :param key: The key :type key: string :param value: The value of the key. :type value: anything that runs through :func:`json.dumps` :param author: (optional) The signature for the author of the first commit. Defaults to global author. :param message: (optional) Message for first commit. Defaults to "adding [key]" if there was no prior value. :type message: string :param author: (optional) The signature for the committer of the first commit. Defaults to git's `--global` `author.name` and `author.email`. :type author: pygit2.Signature :param committer: (optional) The signature for the committer of the first commit. Defaults to author. :type committer: pygit2.Signature :param parents: (optional) The parents of this commit. Defaults to the last commit for this key if it already exists, or an empty list if not. :type parents: list of :class:`Commit <jsongit.wrappers.Commit>` :raises: :class:`NotJsonError <jsongit.NotJsonError>` :class:`InvalidKeyError <jsongit.InvalidKeyError>` """ keys = [key] if key is not None else [e.path for e in self._repo.index] message = kwargs.pop('message', '') parents = kwargs.pop('parents', None) author = kwargs.pop('author', utils.signature(self._global_name, self._global_email)) committer = kwargs.pop('committer', author) if kwargs: raise TypeError("Unknown keyword args %s" % kwargs) if key is None and value is not None: raise InvalidKeyError() if parents is not None: for parent in parents: if parent.repo != self: raise DifferentRepoError() if add is True and key is not None and value is not None: self.add(key, value) repo_head = self._repo_head() tree_id = self._repo.index.write_tree() self._repo.create_commit(self._head_target(), author, committer, message, tree_id, [repo_head.oid] if repo_head else []) # TODO This will create some keys but not others if there is a bad key for key in keys: if parents is None: parents = [self.head(key)] if self.committed(key) else [] try: # create a single-entry tree for the commit. blob_id = self._navigate_tree(tree_id, key) idx = pygit2.Index('') idx.add(pygit2.IndexEntry(key, blob_id, pygit2.GIT_FILEMODE_BLOB)) key_tree_id = idx.write_tree(self._repo) self._repo.create_commit(self._key2ref(key), author, committer, message, key_tree_id, [parent.oid for parent in parents]) except (pygit2.GitError, OSError) as e: if (str(e).startswith('Failed to create reference') or 'directory' in str(e)): raise InvalidKeyError(e) else: raise e
def sync_pricing(session, domain_id, filter_user=None, filter_product=None): request_log.info('SYNC PRICING [user=%s,prod=%s]', filter_user, filter_product) price_lines = [] route_lines = [] product_lines = [] product_madeira_lines = [] service_list = session.query(RepoService).all() service_list = [x for x in service_list if domain_id in x.domains] # product q_product = session.query(RepoProduct).filter( RepoProduct.domain_id == domain_id) if filter_product: legacy_id = session.query(RepoProduct.legacy_id).filter( RepoProduct.domain_id == domain_id).filter( RepoProduct.product_id == filter_product).one() if legacy_id: legacy_id = legacy_id[0] q_product = q_product.filter(RepoProduct.legacy_id == legacy_id) product_list = list( q_product.order_by(RepoProduct.carrier, RepoProduct.area, RepoProduct.scope, RepoProduct.value).all()) legacy_list = [] for product in product_list: if product.legacy_id not in legacy_list: legacy_list.append(product.legacy_id) request_log.info('LOAD PRODUCT %d(%d)', len(product_list), len(legacy_list)) # user q_user = session.query(RepoUser).filter(RepoUser.domain_id == domain_id) if filter_user: q_user = q_user.filter(RepoUser.user_id == filter_user) user_list = q_user.order_by(RepoUser.user_id).all() request_log.info('LOAD USER %d', len(user_list)) # special q_special = session.query(RepoSpecial) if filter_user: q_special = q_special.filter(RepoSpecial.user_id == filter_user) special_list = q_special.order_by(RepoSpecial.id).all() # supply supply_map = {} for supply in session.query(RepoRouteSupply).filter( RepoRouteSupply.domain_id == domain_id).all(): supply_map[str(supply.id)] = supply # interface interface_list = [] for inf, prz, prod in session.query( RepoRouteInterface, RepoInterfacePrice, RepoProduct).filter(RepoRouteInterface.interface_id == RepoInterfacePrice.interface_id).filter( RepoInterfacePrice.product_id == RepoProduct.product_id).all(): interface_list.append({ 'id': inf.interface_id, 'product': prz.product_id, 'value': prz.value, 'score': prz.score, 'legacy': prod.legacy_id, 'area': prod.area }) for user in user_list: for legacy_id in legacy_list: user_special_list = list( filter(lambda s: s.user_id == user.user_id, special_list)) this_product_list = list( filter(lambda p: p.legacy_id == legacy_id, product_list)) price_line, route_line, product_line = get_pricing_line( this_product_list, user, user_special_list, supply_map, interface_list) product_lines += product_line price_lines += price_line route_lines += route_line if user.details and 'product=true' in user.details: product_madeira_lines += product_line yield gen.moment for service in service_list: request_log.info('SYNC PRICING => %s' % service) if service.sync_key is None or service.sync_product is None: request_log.info('SKIP %s' % service) continue if service.type == 'purus': body = '\n'.join(product_lines) request_log.info('\n' + body) elif service.type == 'madeira': body = '\n'.join(price_lines + route_lines + product_madeira_lines) request_log.info('\n' + body) else: request_log.warn('UNKNOWN TYPE %s', service.type) continue url = service.sync_product secret = service.sync_key request_log.info('SYNC VIA %s' % url) tsp = str(int(time.mktime(time.localtime()))) v = signature(tsp + secret) http_client = AsyncHTTPClient() try: response = yield http_client.fetch( HTTPRequest(url=url, method='POST', body=body, headers={ 'tsp': tsp, 'v': v }, request_timeout=120)) request_log.info('SYNC RESULT %d', response.code) log = RepoSyncLog() log.service_id = service.service_id log.type = 'pricing' log.result = response.code log.create_time = dt.now() session.add(log) session.commit() except Exception as e: request_log.exception("FAIL")
def sync_user(session, domain_id): # service could contains multi-domains. service_list = session.query(RepoService).all() # filter by domain_id service_list = [x for x in service_list if domain_id in x.domains] # loading all data, filter in services user_list = session.query(RepoUser).order_by(RepoUser.user_id).all() operator_list = session.query(RepoOperator).order_by( RepoOperator.user_id).all() domain_list = session.query(RepoDomain).order_by(RepoDomain.id).all() interface_list = session.query(RepoRouteInterface).filter( RepoRouteInterface.domain_id == domain_id).order_by( RepoRouteInterface.id).all() for service in service_list: request_log.info('SYNC USER => %s' % service) if service.sync_key is None or service.sync_user is None: request_log.info('SKIP %s' % service.service_id) continue url = service.sync_user secret = service.sync_key if service.type == 'purus': body = publish_purus(user_list, operator_list, domain_list, service_list, service, interface_list) elif service.type == 'madeira': body = publish_madeira(user_list, service, domain_list) elif service.type == 'truman': body = publish_truman(user_list, operator_list, service) elif service.type == 'forrestal': fuel_list = session.query(FuelAccount).filter( FuelAccount.status == 'valid').filter( FuelAccount.is_default != None).order_by( FuelAccount.user_id).all() body = publish_forrestal(user_list, service, fuel_list) else: request_log.error('UNKNOWN TYPE %s' % service.type) continue tsp = str(int(time.mktime(time.localtime()))) v = signature(tsp + secret) http_client = AsyncHTTPClient() try: request_log.debug('BODY=\n%s', body) response = yield http_client.fetch( HTTPRequest(url=url, method='POST', body=body, validate_cert=False, headers={ 'tsp': tsp, 'v': v })) request_log.info('SYNC RESULT %d', response.code) log = RepoSyncLog() log.service_id = service.service_id log.result = response.code log.type = 'user' log.create_time = dt.now() session.add(log) session.commit() except Exception as e: request_log.exception("FAIL")
def post(self): request_log.info('CONFIG START') try: safety = self.application.config.get('safety') if safety is None: request_log.error('CONFIG FAIL (NO SAFETY)') return self.send_error(500) # verify ip in white list if self.request.remote_ip not in safety['white_list']: request_log.error("CONFIG FAIL ('%s'NOT IN WHITELIST)", self.request.remote_ip) return self.send_error(500) # verify key tsp0 = self.request.headers['tsp'] encrypted0 = self.request.headers['v'] encrypted1 = signature(tsp0 + safety['secret']) if encrypted1 != encrypted0: request_log.error('CONFIG FAIL (SECRET FAIL)') return self.send_error(500) # decode (optional) # reload body = self.request.body.decode() cfg = yaml.load(body) if cfg: # basic check d1 = len(cfg.get('downstream')) d0 = len(self.application.config.get('downstream')) delta = abs((d1 - d0) * 100 / d0) request_log.info('CONFIG DELTA %.3f', delta) tsp = time.strftime("%m%d%H%M%S", time.localtime()) # back config shutil.copy('downstream.yaml', 'downstream.yaml.%s' % tsp) # write config with open('downstream.tmp', 'w', encoding='utf8') as stream: stream.write(body) # exist just before overwrite if delta > 10 and abs(d1 - d0) > 10: request_log.error('CONFIG FAIL DELTA %.3f (%d)', delta, (d1 - d0)) return self.send_error(500) # overwrite config shutil.move('downstream.tmp', 'downstream.yaml') self.application.config['downstream'] = cfg.get('downstream') self.application.config['user'] = cfg.get('user') self.application.config['domain'] = cfg.get('domain') self.application.config['interface'] = cfg.get('interface') request_log.info('CONFIG SYNCED (%dK)', len(body) / 1024) return self.finish(json.dumps({'status': 'ok'})) except Exception as e: request_log.exception('CONFIG SYNC FAIL') self.send_error(500)
def commit(self, key=None, value=None, add=True, **kwargs): """Commit the index to the working tree. >>> repo.add('foo', 'my very special bar') >>> repo.commit() >>> foo = repo.show('foo') u'my very special bar' If a key and value are specified, will add them immediately before committing them. >>> repo.commit('fast', 'and easy, too!') >>> foo = repo.show('fast') u'and easy, too!' :param key: The key :type key: string :param value: The value of the key. :type value: anything that runs through :func:`json.dumps` :param author: (optional) The signature for the author of the first commit. Defaults to global author. :param message: (optional) Message for first commit. Defaults to "adding [key]" if there was no prior value. :type message: string :param author: (optional) The signature for the committer of the first commit. Defaults to git's `--global` `author.name` and `author.email`. :type author: pygit2.Signature :param committer: (optional) The signature for the committer of the first commit. Defaults to author. :type committer: pygit2.Signature :param parents: (optional) The parents of this commit. Defaults to the last commit for this key if it already exists, or an empty list if not. :type parents: list of :class:`Commit <jsongit.wrappers.Commit>` :raises: :class:`NotJsonError <jsongit.NotJsonError>` :class:`InvalidKeyError <jsongit.InvalidKeyError>` """ keys = [key] if key is not None else [e.path for e in self._repo.index] message = kwargs.pop('message', '') parents = kwargs.pop('parents', None) author = kwargs.pop( 'author', utils.signature(self._global_name, self._global_email)) committer = kwargs.pop('committer', author) if kwargs: raise TypeError("Unknown keyword args %s" % kwargs) if key is None and value is not None: raise InvalidKeyError() if parents is not None: for parent in parents: if parent.repo != self: raise DifferentRepoError() if add is True and key is not None and value is not None: self.add(key, value) repo_head = self._repo_head() tree_id = self._repo.index.write_tree() self._repo.create_commit(self._head_target(), author, committer, message, tree_id, [repo_head.oid] if repo_head else []) # TODO This will create some keys but not others if there is a bad key for key in keys: if parents is None: parents = [self.head(key)] if self.committed(key) else [] try: # create a single-entry tree for the commit. blob_id = self._navigate_tree(tree_id, key) idx = pygit2.Index('') idx.add( pygit2.IndexEntry(key, blob_id, pygit2.GIT_FILEMODE_BLOB)) key_tree_id = idx.write_tree(self._repo) self._repo.create_commit(self._key2ref(key), author, committer, message, key_tree_id, [parent.oid for parent in parents]) except (pygit2.GitError, OSError) as e: if (str(e).startswith('Failed to create reference') or 'directory' in str(e)): raise InvalidKeyError(e) else: raise e