def test_latest_donation_list(self, check_status_psp): user_token = Token.objects.create(user=self.user1) response = self.client.get(self.fundraiser_donation_list_url, token="Token {0}".format(user_token)) self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) self.assertEqual(len(response.data['results']), 2) # Second donation (first in list) without fundraiser data1 = bunchify(response.data['results'][0]) self.assertEqual(data1.id, self.donation2.id) self.assertEqual(data1.amount, Decimal('10')) self.assertEqual(data1.project.title, self.project.title) self.assertTrue(data1.project.country.name) self.assertEqual(data1.user.full_name, self.user2.get_full_name()) self.assertEqual(data1.project.image, '') self.assertEqual(data1.project.owner.avatar, '') # First donation without fundraiser data2 = bunchify(response.data['results'][1]) self.assertEqual(data2['amount'], Decimal('15')) self.assertEqual(data2['fundraiser'], self.fundraiser.id)
def create_status_response(self, status='AUTHORIZED', payments=None, totals=None): if payments is None: payments = [{ 'id': 123456789, 'paymentMethod': 'MASTERCARD', 'authorization': {'status': status, 'amount': {'value': 1000, '_currency': 'EUR'}} }] default_totals = { 'totalRegistered': 1000, 'totalShopperPending': 0, 'totalAcquirerPending': 0, 'totalAcquirerApproved': 0, 'totalCaptured': 0, 'totalRefunded': 0, 'totalChargedback': 0 } if totals is not None: default_totals.update(totals) return bunchify({ 'payment': bunchify(payments), 'approximateTotals': bunchify(default_totals) })
def db(method): for current_stage in env.stages: if isinstance(current_stage, basestring): stage = bunchify(stages[current_stage]) else: stage = bunchify(current_stage) env.user = stage.user if method == "import": if env.host == "localhost": local("cd /tmp && mysql -u $MYSQL_USER -h localhost -p$MYSQL_PASS $MYSQL_DB < dump.sql") else: run("cd $HOME/tmp && mysql -u $MYSQL_USER -h localhost -p$MYSQL_PASS $MYSQL_DB < import.sql") if method == "dump": if env.host == "localhost": local("cd /tmp && mysqldump -u $MYSQL_USER -h localhost -p$MYSQL_PASS $MYSQL_DB > dump.sql") else: run("cd $HOME/tmp && mysqldump -u $MYSQL_USER -h localhost -p$MYSQL_PASS $MYSQL_DB > dump.sql") if method == "down": get("/home/"+stage.user+"/tmp/dump.sql","/tmp/dump.sql") if method == "up": put("/tmp/dump.sql","/home/"+stage.user+"/tmp/import.sql") if method == "sync": run("cd $HOME/tmp && mysqldump -u $MYSQL_USER -h localhost -p$MYSQL_PASS $MYSQL_DB > dump.sql") get("/home/"+stage.user+"/tmp/dump.sql","/tmp/dump.sql") local("cd /tmp && mysql -u $MYSQL_USER -h localhost -p$MYSQL_PASS $MYSQL_DB < dump.sql")
def main(): requests.packages.urllib3.disable_warnings() defaultConfig = { 'url': 'https://stream.watsonplatform.net/text-to-speech/api', 'user': '******', 'password': '******', 'voice': 'en-US_AllisonVoice', 'chunk': 2048 } home = os.path.expanduser("~") defaultConfigFile = home + '/.config-tts-watson.yml' parser = argparse.ArgumentParser( description='Text to speech using watson') parser.add_argument('-f', action='store', dest='configFile', default=defaultConfigFile, help='config file', required=False) parser.add_argument('text_to_transform', action='store', nargs='+') args = parser.parse_args() conf = anyconfig.container(defaultConfig) if not os.path.isfile(args.configFile): print "Config file '" + args.configFile + "' doesn't exist." print "Creating it ..." user = raw_input("Watson user: "******"Watson password: "******" ".join(args.text_to_transform))
def loads(self, data): """ :inherit. """ loaded = super(BunchSerializer, self).loads(data) if isinstance(loaded, list): loaded = [bunchify(record) for record in loaded] elif isinstance(loaded, dict): loaded = bunchify(loaded) return loaded
def bunchified(self): """ Returns a bunchified (converted into bunch.Bunch) version of self.raw_request, deep copied if it's a dict (or a subclass). Note that it makes sense to use this method only with dicts or JSON input. """ # We have a dict if isinstance(self.raw_request, dict): return bunchify(deepcopy(self.raw_request)) # Must be a JSON input, raises exception when attempting to load it if it's not return bunchify(loads(self.raw_request))
def main(): defaultConfig = { 'url': 'https://gateway.watsonplatform.net/dialog/api', 'user': '******', 'password': '******', } home = os.path.expanduser("~") defaultConfigFile = home + '/.config-dialog-watson.yml' parser = argparse.ArgumentParser( description='Text to speech using watson') parser.add_argument('-f', action='store', dest='configFile', default=defaultConfigFile, help='config file', required=False) parser.add_argument('dialog_file', action='store', nargs=1) parser.add_argument('-n', '--name', dest='dialog_name', action='store', help='Dialog name', required=True) parser.add_argument('--clean', dest='clean', action='store_true') args = parser.parse_args() dialog_file = "".join(args.dialog_file) conf = anyconfig.container(defaultConfig) if not os.path.isfile(args.configFile): print "Config file '" + args.configFile + "' doesn't exist." print "Creating it ..." user = raw_input("Watson user: "******"Watson password: "******"".join(args.dialog_name), bconf.url, os.path.dirname(dialog_file) + "/dialog_id_file.txt") if args.clean: watsonClient.clean_dialogs() resp = watsonClient.start_dialog() print '' print bcolors.WARNING + "Watson: " + bcolors.OKBLUE + "\n".join(resp.response) + bcolors.ENDC while True: userResponse = raw_input(bcolors.WARNING + "You: " + bcolors.OKGREEN) resp = watsonClient.converse(userResponse) print bcolors.WARNING + "Watson: " + bcolors.OKBLUE + "\n".join(resp.response) + bcolors.ENDC if userResponse == "bye": break print "" print "Your profile:" for name, value in watsonClient.get_profile().get_data().iteritems(): print "\t" + name + ": " + value if __name__ == "__main__": main()
def _prepare_data(self, info, photos, flickr_user, exif=None, geo=None): photoset = bunchify(info) photos = bunchify(photos['photoset']['photo']) data = {'flickr_id': photoset.id, 'server': photoset.server, 'secret': photoset.secret, 'farm': photoset.farm, 'primary': photoset.primary, 'title': photoset.title._content, 'description': photoset.description._content, 'date_posted': ts_to_dt(photoset.date_create, flickr_user.tzoffset), 'date_updated': ts_to_dt(photoset.date_update, flickr_user.tzoffset), 'photos': photos, 'last_sync': now(), } if flickr_user: data['user'] = flickr_user return data
def get_openapi_spec(self, data, needs_api_invoke, needs_rest_channels, api_invoke_path): data = bunchify(data) channel_data = self.server.worker_store.request_dispatcher.url_data.channel_data generator = OpenAPIGenerator(data, channel_data, needs_api_invoke, needs_rest_channels, api_invoke_path) return generator.generate()
def get(self, path, **params): """GET an API path and return bunchified result.""" url = self.url(path) self.log.debug("GET from %r", url) response = self.session.get(url, params=params) response.raise_for_status() return bunchify(response.json())
def init(self): try: json = loads(self.inner.text) except ValueError: msg = 'inner.status_code `{}`, JSON parsing error `{}`'.format( self.inner.status_code, self.inner.text) self.logger.error(msg) raise ValueError(msg) if 'zato_env' in json: has_zato_env = True self.details = json['zato_env']['details'] self.sio_result = json['zato_env']['result'] self.ok = self.sio_result == ZATO_OK else: has_zato_env = False self.details = self.inner.text self.ok = self.inner.ok if self.ok: if has_zato_env: # There will be two keys, zato_env and the actual payload for key, _value in json.items(): if key != 'zato_env': value = _value break else: value = json if self.set_data(value, has_zato_env): self.has_data = True if self.to_bunch: self.data = bunchify(self.data)
def all_runs(result_dir): args = { 'population_size': 100, 'elite_size': 0, 'selection': 'roulette', 'tournament_size': 0, 'crossover_rate': 0.8, 'mutation_rate': 0.005, 'chromosome_length': 30, 'fitness_threshold': 0.98, 'max_iterations': 1000, 'rng_freeze': False, 'noise_type': 'snp', 'noise_param': 0.2, 'dump_images': False, 'output_file': 'output.json', 'print_iterations': False, } # Increase population size by 10 pid = os.getpid() for population_size in xrange(10, 1000 + 10, 10): # Elitism - 1/10 population elite_size = int(population_size * 0.1) output_filename = "population-%i-%i.json" % (population_size, pid) filepath = os.path.join(result_dir, output_filename) args['output_file'] = filepath args['population_size'] = population_size args['elite_size'] = elite_size experiment.run(bunchify(args))
def parse(self): for impl_name, details in self.service_store_services.iteritems(): details = bunchify(details) _should_include = self._should_handle(details.name, self.include) _should_exclude = self._should_handle(details.name, self.exclude) if (not _should_include) or _should_exclude: continue info = ServiceInfo(details.name, details.service_class, self.simple_io_config) self.services[info.name] = info for name, info in self.services.iteritems(): self.invokes[name] = info.invokes for source, targets in self.invokes.iteritems(): for target in targets: sources = self.invoked_by.setdefault(target, []) sources.append(source) for name, info in self.services.iteritems(): info.invoked_by = self.invoked_by.get(name, [])
def broker(url, cmd): postdata = {} if cmd.has_key('password'): postdata['password'] = cmd['password'] del cmd['password'] postdata['json_data'] = json.dumps(cmd) rsp = requests.post(url, data=postdata) # Allow 400 status code as will most likely get message # back from OpenShift API that will make more sense than # a HTTP error if rsp.status_code != 200 and rsp.status_code != 400: rsp.raise_for_status() d = json.loads(rsp.text) if d.has_key('data') and len(d['data']) > 0: data = json.loads(d['data']) d['data'] = data rv = bunchify(d) if int(rv.exit_code) != 0: raise RuntimeError("ERROR <%(exit_code)d>: %(result)s.\n%(debug)s" % rv) return rv
def init(self): json = loads(self.inner.text) if 'zato_env' in json: has_zato_env = True self.details = json['zato_env']['details'] self.sio_result = json['zato_env']['result'] self.ok = self.sio_result == ZATO_OK else: has_zato_env = False self.details = self.inner.text self.ok = self.inner.ok if self.ok: if has_zato_env: # There will be two keys, zato_env and the actual payload for key, _value in json.items(): if key != 'zato_env': value = _value break else: value = json if self.set_data(value, has_zato_env): self.has_data = True if self.to_bunch: self.data = bunchify(self.data)
def update_attrs(cls, name, attrs): attrs = bunchify(attrs) mod = getmodule(cls) attrs.elem = getattr(mod, 'elem') attrs.label = getattr(mod, 'label') attrs.model = getattr(mod, 'model') attrs.output_required_extra = getattr(mod, 'output_required_extra', []) attrs.output_optional_extra = getattr(mod, 'output_optional_extra', []) attrs.get_data_func = getattr(mod, 'list_func') attrs.def_needed = getattr(mod, 'def_needed', False) attrs.initial_input = getattr(mod, 'initial_input', {}) attrs.skip_input_params = getattr(mod, 'skip_input_params', []) attrs.skip_output_params = getattr(mod, 'skip_output_params', []) attrs.pre_opaque_attrs_hook = getattr(mod, 'pre_opaque_attrs_hook', None) attrs.instance_hook = getattr(mod, 'instance_hook', None) attrs.response_hook = getattr(mod, 'response_hook', None) attrs.delete_hook = getattr(mod, 'delete_hook', None) attrs.broker_message_hook = getattr(mod, 'broker_message_hook', None) attrs.extra_delete_attrs = getattr(mod, 'extra_delete_attrs', []) attrs.input_required_extra = getattr(mod, 'input_required_extra', []) attrs.input_optional_extra = getattr(mod, 'input_optional_extra', []) attrs.create_edit_input_required_extra = getattr( mod, 'create_edit_input_required_extra', []) attrs.create_edit_rewrite = getattr(mod, 'create_edit_rewrite', []) attrs.check_existing_one = getattr(mod, 'check_existing_one', True) attrs.request_as_is = getattr(mod, 'request_as_is', []) attrs.sio_default_value = getattr(mod, 'sio_default_value', None) attrs.get_list_docs = getattr(mod, 'get_list_docs', None) attrs.delete_require_instance = getattr(mod, 'delete_require_instance', True) attrs._meta_session = None attrs.is_create = False attrs.is_edit = False attrs.is_create_edit = False attrs.is_delete = False if name == 'GetList': # get_sio sorts out what is required and what is optional. attrs.output_required = attrs.model attrs.output_optional = attrs.model else: attrs.broker_message = getattr(mod, 'broker_message') attrs.broker_message_prefix = getattr(mod, 'broker_message_prefix') if name in ('Create', 'Edit'): attrs.input_required = attrs.model attrs.input_optional = attrs.model attrs.is_create = name == 'Create' attrs.is_edit = name == 'Edit' attrs.is_create_edit = True elif name == 'Delete': attrs.is_delete = True return attrs
def updateResourceWithParams(cls, resource, params): resource = bunch.bunchify(resource) for key, val in params.iteritems(): if val is not None: resource[key] = val return resource
def from_build_info_to_build_short_info(result): build_info = bunch.bunchify(result) def is_param(x): klass = x.get('_class', None) return klass == "hudson.model.ParametersAction" params = next( iter(x.parameters for x in filter(is_param, build_info.actions)), {}) parameters_real = {p.name: p.value for p in params} info = bunch.Bunch({ 'queueId': result['queueId'], 'number': result['number'], 'timestamp': result['timestamp'], 'displayName': result['displayName'], 'artifacts': result['artifacts'], 'duration': result['duration'], 'result': result['result'], 'fullDisplayName': result['fullDisplayName'], 'parametersReal': parameters_real, 'url': result['url'] }) return info
def prepeare_hosts(config_dict, local_base): """Prepare env global variable""" hosts = Bunch() for name, values in config_dict.items(): if not values: continue h = bunchify(values) h.setdefault('port', 22) h.repo_path = os.path.join('/home', h.user, h.project_base, h.project_name) h.code_path = os.path.join(h.repo_path, h.code_subdir) h.host_string = '{user}@{remote_ip}:{port}'.format(**h) # Virtualenv h.venv_dir = '/home/{user}/.virtualenvs/{virtualenv}'.format(**h) h.venv_prefix = ( 'export export PIP_DOWNLOAD_CACHE=~/.pip_download_cache;' 'source ' '{venv_dir}/bin/activate'.format(**h)) # Proxy proxy = env.get('proxy', None) if proxy: h.proxy_settings = 'export http_proxy=%s export https_proxy=%s' else: h.proxy_settings = '' h.local_base = os.path.abspath(local_base) h.setdefault('proxy_command', '') hosts[name] = h env._hosts = hosts
def main(): args = parser.parse_args() with open(args.constants, 'r') as f: constants = bunchify(yaml.load(f)) logger.info("Loading image %s ..." % args.input) img = cv2.imread(args.input, flags=cv2.IMREAD_COLOR) # image scaled in 0-1 range img = img / 255.0 logger.info("Generating dark channel prior ...") dark_channel = steps.generate_dark_channel(img, constants) logger.info("Estimating airlight ...") airlight = steps.estimate_airlight(img, dark_channel, constants) logger.info("Estimated airlight is %s", str(airlight)) logger.info("Estimating transmission map ...") tmap = steps.estimate_tmap(dark_channel, constants) logger.info("Smooth transmission map ...") tmap = steps.smooth_tmap(img, tmap, constants) logger.info("Dehazing image ...") dehazed = steps.dehaze(img, airlight, tmap, constants) tools.show_img([img, dehazed])
def prepare(hosts_datadir, workdir=None, repos=[], cachedir=None, backend=RUM.DEFAULT_BACKEND, backends=RUM.BACKENDS): """ Scan and collect hosts' basic data (installed rpms list, etc.). :param hosts_datadir: Dir in which rpm db roots of hosts exist :param workdir: Working dir to save results :param repos: List of yum repos to get updateinfo data (errata and updtes) :param cachedir: A dir to save metadata cache of yum repos :param backend: Backend module to use to get updates and errata :param backends: Backend list :return: A generator to yield a tuple, (host_identity, host_rpmroot or None) """ if workdir is None: LOG.info(_("Set workdir to hosts_datadir: %s"), hosts_datadir) workdir = hosts_datadir else: if not os.path.exists(workdir): LOG.debug(_("Creating working dir: %s"), workdir) os.makedirs(workdir) for h, root in hosts_rpmroot_g(hosts_datadir): hworkdir = os.path.join(workdir, h) if not hworkdir: os.makedirs(hworkdir) if root is None: touch(os.path.join(hworkdir, "RPMDB_NOT_AVAILABLE")) yield bunch.bunchify(dict(id=h, workdir=hworkdir, available=False)) else: yield RUM.prepare(root, hworkdir, repos, h, cachedir, backend, backends)
def get(self, id): return bunchify( json.loads( self._make_request(self.resource_name, resource_id=str(self.resource_id), subresource=self.subresource, subresource_id=str(id)).content))
def load_package_groups_data_g(paths=[], data=_DATA_0, profkey="system_profile"): """ :param paths: A list of package group data dirs """ sysprof = bunch.bunchify(data.get(profkey, {})) for path in paths: logging.debug("Loading profiles from: " + path) pgdata = load_profiles(path) for grp in pgdata.get("groups", []): instif = grp.get("install_if", '') grp["install_if"] = parse_install_pred(instif, sysprof, False) logging.debug("install_if: %s -> %s" % (instif, grp["install_if"])) # TODO: Is 'type' of the packages (mandatory | default | optional) # to be checked? inst_pkgs = RU.uniq((p["name"] for p in grp.get("packages", []) if grp["install_if"]), use_set=True) uninst_pkgs = RU.uniq((p["name"] for p in grp.get("packages", []) if not grp["install_if"]), use_set=True) grp["install_pkgs"] = inst_pkgs grp["remove_pkgs"] = uninst_pkgs yield grp
def _invoke(self, func, func_name, url_path, request, expect_ok, auth=None, _not_given='_test_not_given'): address = Config.server_address.format(url_path) request['current_app'] = Config.current_app data = dumps(request) logger.info('Invoking %s %s with %s', func_name, address, data) response = func(address, data=data, auth=auth) logger.info('Response received %s %s', response.status_code, response.text) data = loads(response.text) data = bunchify(data) return data # Most tests require status OK and CID if expect_ok: self.assertNotEquals(data.get('cid', _not_given), _not_given) self.assertEquals(data.status, status_code.ok) return data
def test_get_product_get_image(self, requests_mock): response_mock = Mock() response_mock.status_code = 200 response_mock.content = json.dumps({'id': 991, 'name': 'test prod'}) requests_mock.get.return_value = response_mock cli = APIClient('test_api_key', 'test user agent') p = ProductResource(cli, '46') res = p.get(991) self.assertEqual(991, res.id) requests_mock.get.assert_called_with( url='https://api.tiendanube.com/v1/46/products/991', headers={ 'Authentication': 'bearer test_api_key', 'User-Agent': 'test user agent' }, params=None) response_mock.content = json.dumps({ 'id': 1, 'name': 'test prod image' }) res = res.images.get(1) self.assertEqual(bunchify({'id': 1, 'name': 'test prod image'}), res) requests_mock.get.assert_called_with( url='https://api.tiendanube.com/v1/46/products/991/images/1', headers={ 'Authentication': 'bearer test_api_key', 'User-Agent': 'test user agent' }, params=None)
def get(self, id): return bunchify(json.loads(self._make_request( self.resource_name, resource_id=str(self.resource_id), subresource=self.subresource, subresource_id=str(id)).content) )
def test_list_webhooks_filter(self, requests_mock): response_mock = Mock() response_mock.status_code = 200 response_mock.content = json.dumps([ { 'id': 47 }, ]) requests_mock.get.return_value = response_mock cli = APIClient('test_api_key', 'test user agent') w = WebhookResource(cli, '46') res = w.list(filters={'since_id': 47}, fields='id') self.assertEqual(bunchify([ { 'id': 47 }, ]), res) requests_mock.get.assert_called_with( url='https://api.tiendanube.com/v1/46/webhooks', headers={ 'Authentication': 'bearer test_api_key', 'User-Agent': 'test user agent' }, params={ 'since_id': 47, 'fields': 'id' })
def test_map(self): source = { 'a': { 'b': [1, 2, '3', 4], 'c': {'d':'123'} }} m = Mapper(source) # 1:1 mappings m.map('/aa', '/a/b') m.map('/bb', '/a/c/d') # Force conversion to int m.map('/cc/dd', 'int:/a/c/d') # Manually signal /cc/ee/ff should be a list here .. m.set('/cc/ee/ff', []) m.map('/cc/ee/ff/19', 'int:/a/c/d') target = bunchify(m.target) self.assertListEqual(target.aa, [1, 2, '3', 4]) self.assertEquals(target.bb, '123') self.assertEquals(target.cc.dd, 123) self.assertEquals(target.cc.ee.ff, [None] * 19 + [123])
def get_instances_with_retries(ec2_client, instance_ids): """ AWS describe_instances with retries. """ response = ec2_client.describe_instances( InstanceIds=instance_ids) return bunchify(response)
def get_instance_price(self, instance): """ Given an Instance object, gets the price of that instance based on the InstanceType, AZ and StartTime. """ current_time = datetime.now() if 'InstanceLifecycle' not in instance: return { str(current_time): self.bid_advisor.get_on_demand_price(instance.InstanceType) } query_time = current_time query_time = query_time.replace(minute=instance.LaunchTime.minute) query_time = query_time.replace(second=instance.LaunchTime.second) query_time = query_time.replace( microsecond=instance.LaunchTime.microsecond) if current_time.minute >= instance.LaunchTime.minute: query_time = query_time.replace(hour=current_time.hour) else: query_time = query_time.replace(hour=current_time.hour - 1) response = self.ec2_client.describe_spot_price_history( EndTime=query_time, InstanceTypes=[instance.InstanceType], ProductDescriptions=['Linux/UNIX (Amazon VPC)'], AvailabilityZone=instance.Placement.AvailabilityZone, StartTime=query_time) assert response is not None, "Failed to get spot-instance prices" resp = bunchify(response) if len(resp.SpotPriceHistory) > 0: return {str(current_time): resp.SpotPriceHistory[0].SpotPrice} else: return {str(current_time): "-1"}
def _new(cls, api, data, err_handler=None, params=None, method=None): """ Create a new `Node` from a `Graph` and a JSON-decoded object. If the object is not a dictionary, it will be returned as-is. """ if isinstance(data, dict): if data.get("error"): code = data["error"].get("code") msg = data["error"]["message"] if code is None: code_match = code_re.match(msg) if code_match is not None: code = int(code_match.group(1)) e = GraphException(code, msg, graph=api, params=params, method=method) if err_handler: return err_handler(e=e) else: raise e return cls(api, bunch.bunchify(data)) return data
def json_call(service, data, config): """Make a call to a Zato service from a set list with JSON POST data. The service URLs are configured in the SERVICE_URL module global dictionary with the service name as written in the header of the Zato service documentation page as the key. @params service: service name @param data: dictionary of data to send a JSON post data @param config: configuration dictionary as read from 'deploy.conf' """ address = 'http://%s:%s' % (config.lb_host, config.lb_port) try: path = SERVICE_URLS[service] except KeyError: log.error("Unknown zato JSON service name: 5s", service) raise auth = (config.http_user, config.http_password) client = JSONClient(address, path, auth) log.debug("Invoking service at '%s' with data: %s", path, data) res = client.invoke(data) if not res.ok: raise JSONCallResponseError( "Zato non-successful result code: {}".format(res)) return bunchify(res.data)
def get_dict_with_opaque(instance, to_bunch=False): opaque = parse_instance_opaque_attr(instance) out = instance._asdict() if hasattr(instance, '_asdict') else instance.asdict() for k, v in opaque.items(): out[k] = v return bunchify(out) if to_bunch else out
def describe_asg_with_retries(ac_client, asgs=[]): """ AWS describe_auto_scaling_groups with retries. """ response = ac_client.describe_auto_scaling_groups( AutoScalingGroupNames=asgs) return bunchify(response)
def on_broker_msg_CHANNEL_WEB_SOCKET_EDIT(self, msg): # Each worker uses a unique bind port msg = bunchify(msg) self.web_socket_channel_create_edit(msg.old_name, msg, 'edit', 5, False)
def services_list(request, environment_id): services = [] # need to create new session to see services deployed be other user session_id = Session.get_or_create(request, environment_id) get_environment = muranoclient(request).environments.get environment = get_environment(environment_id, session_id) try: reports = muranoclient(request).environments.last_status(environment_id, session_id) except HTTPNotFound: reports = {} for service_item in environment.services: service_data = service_item service_data["full_service_name"] = get_service_name(service_data["type"]) if service_data["id"] in reports and reports[service_data["id"]]: last_operation = str(reports[service_data["id"]].text) time = reports[service_data["id"]].updated.replace("T", " ") else: last_operation = "Service draft created" if environment.version == 0 else "" time = service_data["updated"].replace("T", " ")[:-7] service_data["environment_id"] = environment_id service_data["environment_version"] = environment.version service_data["operation"] = last_operation service_data["operation_updated"] = time services.append(service_data) log.debug("Service::List") return [bunch.bunchify(service) for service in services]
def load(self, config_src, signal_update=True, namespace=None, monitor=False, sub_key=None): """ Load config from source(s) :param config_src: URI(s) or dictionaries to load the config from. If config_src is a list, then the first config is loaded as the main config with subsequent configs meged into it. :type config_src: a string or dictionary or list of strings and/or dictionaries """ namespace = self._get_namespace(namespace) merge_configs = [] if isinstance(config_src, list): merge_configs = config_src[1:] config_src = config_src[0] if isinstance(config_src, basestring): if monitor: self.start_src_monitor(config_src) config_src = Loader.load(config_src) self._configs[namespace] = Config(bunchify(config_src)) self.merge(merge_configs, False, namespace, monitor, False) self._sub_keys[namespace] = sub_key self._do_subs(namespace) self._configs[namespace]._freeze() if signal_update: self.signal_update(namespace)
def get_service_table_line(self, idx, name, docs, sio): name_fs_safe = 'service_{}'.format(fs_safe_name(name)) file_name = '{}.rst'.format(name_fs_safe) summary = docs.summary if summary: summary = self._make_sphinx_safe(summary) return bunchify({ 'ns': str(idx), 'orig_name': name, 'sphinx_name': name.replace('_', '\_'), # Needed for Sphinx to ignore undescores 'name': name_fs_safe, 'name_link': """:doc:`{} <./{}>`""".format(name, name_fs_safe), 'file_name': file_name, 'description': summary or no_value, 'docs': docs, 'sio': sio })
def merge(self, config_src, signal_update=False, namespace=None, monitor=False, do_subs=True): """ Merge configs :param config_src: URI(s) or dictionaries to load config(s) from to be merged into the main config :type config_src: a string or dictionary or list of strings and/or dictionaries """ namespace = self._get_namespace(namespace) if self._configs.get(namespace, None) is None: raise ValueError('no config to merge with!') if not isinstance(config_src, list): config_src = [config_src] for config in config_src: if isinstance(config, basestring): if monitor: self.start_src_monitor(config) config = Loader.load(config) self._configs[namespace]._merge(bunchify(config)) if do_subs: self._do_subs(namespace) if signal_update: self.signal_update(namespace)
def parse_instance_opaque_attr(instance): opaque = getattr(instance, GENERIC.ATTR_NAME) opaque = loads(opaque) if opaque else None if not opaque: return {} ElemsWithOpaqueMaker.process_config_dict(opaque) return bunchify(opaque)
def __init__(self): currentDir = os.path.dirname(os.path.realpath(__file__)) configFile = currentDir + "/../" + self.CONFIG_FILE conf = anyconfig.load(configFile) bconf = bunch.bunchify(conf) self.ttsWatson = TtsWatson(bconf.user, bconf.password, bconf.voice, bconf.url, bconf.chunk)
def validate(self, expected_username, token): """ Check if the given token is (still) valid. 1. Look for the token in Cache without decrypting/decoding it. 2.a If not found, return "Invalid" 2.b If found: 3. decrypt 4. decode 5. renew the cache expiration asynchronously (do not wait for the update confirmation). 5. return "valid" + the token contents """ if self.cache.get(token): decrypted = self.fernet.decrypt(token) token_data = bunchify(jwt.decode(decrypted, self.secret)) if token_data.username == expected_username: # Renew the token expiration self.cache.put(token, token, token_data.ttl, is_async=True) return Bunch(valid=True, token=token_data, raw_token=token) else: return Bunch(valid=False, message='Unexpected user for token found') else: return Bunch(valid=False, message='Invalid token')
def get_state(bunch=True): with open('ops/config/defaults.conf') as defaults_file: defaults_file_content = defaults_file.read() state = yaml.load(defaults_file_content) if os.path.isfile(os.environ['HOME']+'/ops.conf'): with open(os.environ['HOME']+'/ops.conf') as ops_file: ops_file_content = ops_file.read() ops = yaml.load(ops_file_content) state = dict_merge(state, ops) if os.path.isfile('ops/config/project.conf'): with open('ops/config/project.conf') as project_file: project_file_content = project_file.read() project = yaml.load(project_file_content) state = dict_merge(state, project) if os.path.isfile('ops/config/private.conf'): with open('ops/config/private.conf') as private_file: private_file_content = private_file.read() private = yaml.load(private_file_content) state = dict_merge(state, private) if bunch: return bunchify(state) else: return state
def dispatcher_callback(self, event, ctx, **opaque): self.dispatcher_backlog.append(bunchify({ 'event_id': new_cid(), 'event': event, 'ctx': ctx, 'opaque': opaque }))
def mail_pledge_platform_admin(donation): # Only process "one-off" type donations if donation.order.order_type != "one-off": return project_url = '/projects/{0}'.format(donation.project.slug) try: admin_email = properties.TENANT_MAIL_PROPERTIES.get('address') except AttributeError: logger.critical('No mail properties found for {0}'.format( connection.tenant.client_name)) if admin_email: # Use the platform default language with TenantLanguage(properties.LANGUAGE_CODE): subject = _('A new pledge donation has been received') payment_method = get_payment_method(donation) # Create fake user object for mail receiver = bunchify({'email': admin_email}) # Send email to the project owner. send_mail(template_name='payments_pledge/mails/pledge_details.mail', subject=subject, to=receiver, link=project_url, donation=donation, pledged=True, payment_method=payment_method)
def set_config(self): """ Sets self attributes, as configured in shmem by our parent process. """ config = self.config_ipc.get_config('zato-{}'.format(self.ipc_name)) config = loads(config) config = bunchify(config) self.username = config.username self.password = config.password self.server_auth = (self.username, self.password) self.base_dir = config.base_dir self.port = config.port self.server_port = config.server_port self.server_path = config.server_path self.server_address = self.server_address.format( self.server_port, self.server_path) with open(config.logging_conf_path) as f: logging_config = yaml.load(f) if not 'zato_{}'.format(self.conn_type) in logging_config['loggers']: logging_config = get_logging_config(self.conn_type, self.logging_file_name) # Configure logging for this connector self.set_up_logging(logging_config) # Store our process's pidfile if config.needs_pidfile: self.store_pidfile(config.pidfile_suffix)
def post(self, request, *args, **kwargs): serializer = ShareSerializer(bunchify({}), data=request.DATA) if not serializer.is_valid(): return response.Response(serializer.errors, status=400) args = self.project_args(serializer.data.get('project')) if args is None: return HttpResponseNotFound() sender_name = self.request.user.get_full_name() or self.request.user.username sender_email = self.request.user.email share_name = serializer.object.get('share_name', None) share_email = serializer.object.get('share_email', None) share_motivation = serializer.object.get('share_motivation', None) share_cc = serializer.object.get('share_cc') args.update(dict( template_name='utils/mails/share_flyer.mail', subject=_('%(name)s wants to share a project with you!') % dict(name=sender_name), to=namedtuple("Receiver", "email")(email=share_email), from_email=sender_email, share_name=share_name, share_email=share_email, share_motivation=share_motivation, sender_name=sender_name, sender_email=sender_email, cc=[sender_email] if share_cc else [] )) if share_cc: args['cc'] = [sender_email] result = send_mail(**args) return response.Response({}, status=201)
def load_settings(settings_filename): settings = bunchify(load(file(settings_filename))) return Bunch(sources_filename=settings.sources_filename, logging_config_filename=settings.logging_config_filename, oxtalks_username=settings.oxtalks.username, oxtalks_password=settings.oxtalks.password, oxtalks_hostname=settings.oxtalks.hostname)
def update(self, body=None, minor=True): """Update a page's content.""" assert self.markup == 'storage', "Cannot update non-storage page markup!" if body is None: body = self.body if body == self._data.body[self.markup].value: return # No changes data = { #'id': self._data.id, 'type': 'page', 'space': {'key': self.space_key}, 'title': self.title, 'version': dict(number=self.version + 1, minorEdit=minor), 'body': { 'storage': { 'value': body, 'representation': self.markup, } }, 'expand': 'version', } response = self.cf.session.put(self._data._links.self, json=data) response.raise_for_status() ##page = response.json(); print(page) result = bunchify(response.json()) self._data.body[self.markup].value = body self._data.version = result.version return result
def init(self): try: json = loads(self.inner.text) except ValueError: msg = 'inner.status_code `{}`, JSON parsing error `{}`'.format(self.inner.status_code, self.inner.text) self.logger.error(msg) raise ValueError(msg) if 'zato_env' in json: has_zato_env = True self.details = json['zato_env']['details'] self.sio_result = json['zato_env']['result'] self.ok = self.sio_result == ZATO_OK else: has_zato_env = False self.details = self.inner.text self.ok = self.inner.ok if self.ok: if has_zato_env: # There will be two keys, zato_env and the actual payload for key, _value in json.items(): if key != 'zato_env': value = _value break else: value = json if self.set_data(value, has_zato_env): self.has_data = True if self.to_bunch: self.data = bunchify(self.data)
def _read_data(self): '''Read stored data.''' if not os.path.exists(self.data_file): self.data = INITIAL_DATA self._save_data() json_data = json.loads(open(self.data_file).read()) self.data = bunchify(json_data)
def update_attrs(cls, name, attrs): attrs = bunchify(attrs) mod = getmodule(cls) attrs.elem = getattr(mod, 'elem') attrs.model = getattr(mod, 'model') attrs.output_required_extra = getattr(mod, 'output_required_extra', []) attrs.output_optional_extra = getattr(mod, 'output_optional_extra', []) attrs.get_data_func = getattr(mod, 'list_func') attrs.def_needed = getattr(mod, 'def_needed', False) if name == 'GetList': attrs.output_required = attrs.model else: attrs.broker_message = getattr(mod, 'broker_message') attrs.broker_message_prefix = getattr(mod, 'broker_message_prefix') if name in('Create', 'Edit'): attrs.input_required = attrs.model attrs.input_optional = attrs.model attrs.is_create_edit = True attrs.is_edit = name == 'Edit' return attrs
def _new(cls, api, data, err_handler=None, params=None, method=None): """ Create a new `Node` from a `Graph` and a JSON-decoded object. If the object is not a dictionary, it will be returned as-is. """ if isinstance(data, dict): if data.get("error"): code = data["error"].get("code") if code is None: code = data["error"].get("error_code") msg = data["error"].get("message") if msg is None: msg = data["error"].get("error_msg") if code is None: code_match = code_re.match(msg) if code_match is not None: code = int(code_match.group(1)) e = GraphException(code, msg, graph=api, params=params, method=method) if err_handler: return err_handler(e=e) else: raise e return cls(api, bunch.bunchify(data)) return data
def messageReceived(self, msg): msg = bunchify(msg) if 'status' in msg: targets = [obj for obj in msg.proximity if obj.object_type == 'player'] if targets: my_pos= Point2(*msg.gps.position) # rotate aiming to the target t = targets[0] t_pos = Point2(t.position[0] + t.velocity[0] * POS_PER_VEL, t.position[1] + t.velocity[1] * POS_PER_VEL) turn = relative_angle(my_pos.x, my_pos.y, t_pos.x, t_pos.y, msg.gps.angle) else: # rotate a tick to the right turn = 0.1 # do the rotation and shoot self.command('turn', value=turn) self.command('fire')
def prepeare_hosts(config_dict, local_base): """Prepare env global variable""" hosts = Bunch() for name, values in config_dict.items(): if not values: continue h = bunchify(values) h.setdefault('port', 22) h.repo_path = os.path.join('/home', h.user, h.project_base, h.project_name) h.code_path = os.path.join(h.repo_path, h.code_subdir) h.host_string = '{user}@{remote_ip}:{port}'.format(**h) # Virtualenv h.venv_dir = '/home/{user}/.virtualenvs/{virtualenv}'.format(**h) h.venv_prefix = ('export export PIP_DOWNLOAD_CACHE=~/.pip_download_cache;' 'source ' '{venv_dir}/bin/activate'.format(**h)) # Proxy proxy = env.get('proxy', None) if proxy: h.proxy_settings = 'export http_proxy=%s export https_proxy=%s' else: h.proxy_settings = '' h.local_base = os.path.abspath(local_base) h.setdefault('proxy_command', '') hosts[name] = h env._hosts = hosts